In gcc/testsuite/: 2010-09-30 Nicola Pero <nicola.pero@meta-innovation.com>
[official-gcc.git] / gcc / tree-sra.c
blob4364713ca5716fa55955b545e1e4abe77d92c48e
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "toplev.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "cgraph.h"
83 #include "tree-flow.h"
84 #include "ipa-prop.h"
85 #include "tree-pretty-print.h"
86 #include "statistics.h"
87 #include "tree-dump.h"
88 #include "timevar.h"
89 #include "params.h"
90 #include "target.h"
91 #include "flags.h"
92 #include "dbgcnt.h"
93 #include "tree-inline.h"
94 #include "gimple-pretty-print.h"
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
101 /* Global variable describing which aggregate reduction we are performing at
102 the moment. */
103 static enum sra_mode sra_mode;
105 struct assign_link;
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
123 struct access
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
129 HOST_WIDE_INT size;
130 tree base;
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
134 testcase. */
135 tree expr;
136 /* Type. */
137 tree type;
139 /* The statement this access belongs to. */
140 gimple stmt;
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
158 /* Pointers to the first and last element in the linked list of assign
159 links. */
160 struct assign_link *first_link, *last_link;
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
170 /* Is this particular access write access? */
171 unsigned write : 1;
173 /* Is this access an artificial one created to scalarize some record
174 entirely? */
175 unsigned total_scalarization : 1;
177 /* Is this access currently in the work queue? */
178 unsigned grp_queued : 1;
180 /* Does this group contain a write access? This flag is propagated down the
181 access tree. */
182 unsigned grp_write : 1;
184 /* Does this group contain a read access? This flag is propagated down the
185 access tree. */
186 unsigned grp_read : 1;
188 /* Does this group contain a read access that comes from an assignment
189 statement? This flag is propagated down the access tree. */
190 unsigned grp_assignment_read : 1;
192 /* Other passes of the analysis use this bit to make function
193 analyze_access_subtree create scalar replacements for this group if
194 possible. */
195 unsigned grp_hint : 1;
197 /* Is the subtree rooted in this access fully covered by scalar
198 replacements? */
199 unsigned grp_covered : 1;
201 /* If set to true, this access and all below it in an access tree must not be
202 scalarized. */
203 unsigned grp_unscalarizable_region : 1;
205 /* Whether data have been written to parts of the aggregate covered by this
206 access which is not to be scalarized. This flag is propagated up in the
207 access tree. */
208 unsigned grp_unscalarized_data : 1;
210 /* Does this access and/or group contain a write access through a
211 BIT_FIELD_REF? */
212 unsigned grp_partial_lhs : 1;
214 /* Set when a scalar replacement should be created for this variable. We do
215 the decision and creation at different places because create_tmp_var
216 cannot be called from within FOR_EACH_REFERENCED_VAR. */
217 unsigned grp_to_be_replaced : 1;
219 /* Should TREE_NO_WARNING of a replacement be set? */
220 unsigned grp_no_warning : 1;
222 /* Is it possible that the group refers to data which might be (directly or
223 otherwise) modified? */
224 unsigned grp_maybe_modified : 1;
226 /* Set when this is a representative of a pointer to scalar (i.e. by
227 reference) parameter which we consider for turning into a plain scalar
228 (i.e. a by value parameter). */
229 unsigned grp_scalar_ptr : 1;
231 /* Set when we discover that this pointer is not safe to dereference in the
232 caller. */
233 unsigned grp_not_necessarilly_dereferenced : 1;
236 typedef struct access *access_p;
238 DEF_VEC_P (access_p);
239 DEF_VEC_ALLOC_P (access_p, heap);
241 /* Alloc pool for allocating access structures. */
242 static alloc_pool access_pool;
244 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
245 are used to propagate subaccesses from rhs to lhs as long as they don't
246 conflict with what is already there. */
247 struct assign_link
249 struct access *lacc, *racc;
250 struct assign_link *next;
253 /* Alloc pool for allocating assign link structures. */
254 static alloc_pool link_pool;
256 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
257 static struct pointer_map_t *base_access_vec;
259 /* Bitmap of candidates. */
260 static bitmap candidate_bitmap;
262 /* Bitmap of candidates which we should try to entirely scalarize away and
263 those which cannot be (because they are and need be used as a whole). */
264 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
266 /* Obstack for creation of fancy names. */
267 static struct obstack name_obstack;
269 /* Head of a linked list of accesses that need to have its subaccesses
270 propagated to their assignment counterparts. */
271 static struct access *work_queue_head;
273 /* Number of parameters of the analyzed function when doing early ipa SRA. */
274 static int func_param_count;
276 /* scan_function sets the following to true if it encounters a call to
277 __builtin_apply_args. */
278 static bool encountered_apply_args;
280 /* Set by scan_function when it finds a recursive call. */
281 static bool encountered_recursive_call;
283 /* Set by scan_function when it finds a recursive call with less actual
284 arguments than formal parameters.. */
285 static bool encountered_unchangable_recursive_call;
287 /* This is a table in which for each basic block and parameter there is a
288 distance (offset + size) in that parameter which is dereferenced and
289 accessed in that BB. */
290 static HOST_WIDE_INT *bb_dereferences;
291 /* Bitmap of BBs that can cause the function to "stop" progressing by
292 returning, throwing externally, looping infinitely or calling a function
293 which might abort etc.. */
294 static bitmap final_bbs;
296 /* Representative of no accesses at all. */
297 static struct access no_accesses_representant;
299 /* Predicate to test the special value. */
301 static inline bool
302 no_accesses_p (struct access *access)
304 return access == &no_accesses_representant;
307 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
308 representative fields are dumped, otherwise those which only describe the
309 individual access are. */
311 static struct
313 /* Number of processed aggregates is readily available in
314 analyze_all_variable_accesses and so is not stored here. */
316 /* Number of created scalar replacements. */
317 int replacements;
319 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
320 expression. */
321 int exprs;
323 /* Number of statements created by generate_subtree_copies. */
324 int subtree_copies;
326 /* Number of statements created by load_assign_lhs_subreplacements. */
327 int subreplacements;
329 /* Number of times sra_modify_assign has deleted a statement. */
330 int deleted;
332 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
333 RHS reparately due to type conversions or nonexistent matching
334 references. */
335 int separate_lhs_rhs_handling;
337 /* Number of parameters that were removed because they were unused. */
338 int deleted_unused_parameters;
340 /* Number of scalars passed as parameters by reference that have been
341 converted to be passed by value. */
342 int scalar_by_ref_to_by_val;
344 /* Number of aggregate parameters that were replaced by one or more of their
345 components. */
346 int aggregate_params_reduced;
348 /* Numbber of components created when splitting aggregate parameters. */
349 int param_reductions_created;
350 } sra_stats;
352 static void
353 dump_access (FILE *f, struct access *access, bool grp)
355 fprintf (f, "access { ");
356 fprintf (f, "base = (%d)'", DECL_UID (access->base));
357 print_generic_expr (f, access->base, 0);
358 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
359 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
360 fprintf (f, ", expr = ");
361 print_generic_expr (f, access->expr, 0);
362 fprintf (f, ", type = ");
363 print_generic_expr (f, access->type, 0);
364 if (grp)
365 fprintf (f, ", grp_write = %d, total_scalarization = %d, "
366 "grp_read = %d, grp_hint = %d, grp_assignment_read = %d,"
367 "grp_covered = %d, grp_unscalarizable_region = %d, "
368 "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
369 "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
370 "grp_not_necessarilly_dereferenced = %d\n",
371 access->grp_write, access->total_scalarization,
372 access->grp_read, access->grp_hint, access->grp_assignment_read,
373 access->grp_covered, access->grp_unscalarizable_region,
374 access->grp_unscalarized_data, access->grp_partial_lhs,
375 access->grp_to_be_replaced, access->grp_maybe_modified,
376 access->grp_not_necessarilly_dereferenced);
377 else
378 fprintf (f, ", write = %d, total_scalarization = %d, "
379 "grp_partial_lhs = %d\n",
380 access->write, access->total_scalarization,
381 access->grp_partial_lhs);
384 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
386 static void
387 dump_access_tree_1 (FILE *f, struct access *access, int level)
391 int i;
393 for (i = 0; i < level; i++)
394 fputs ("* ", dump_file);
396 dump_access (f, access, true);
398 if (access->first_child)
399 dump_access_tree_1 (f, access->first_child, level + 1);
401 access = access->next_sibling;
403 while (access);
406 /* Dump all access trees for a variable, given the pointer to the first root in
407 ACCESS. */
409 static void
410 dump_access_tree (FILE *f, struct access *access)
412 for (; access; access = access->next_grp)
413 dump_access_tree_1 (f, access, 0);
416 /* Return true iff ACC is non-NULL and has subaccesses. */
418 static inline bool
419 access_has_children_p (struct access *acc)
421 return acc && acc->first_child;
424 /* Return a vector of pointers to accesses for the variable given in BASE or
425 NULL if there is none. */
427 static VEC (access_p, heap) *
428 get_base_access_vector (tree base)
430 void **slot;
432 slot = pointer_map_contains (base_access_vec, base);
433 if (!slot)
434 return NULL;
435 else
436 return *(VEC (access_p, heap) **) slot;
439 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
440 in ACCESS. Return NULL if it cannot be found. */
442 static struct access *
443 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
444 HOST_WIDE_INT size)
446 while (access && (access->offset != offset || access->size != size))
448 struct access *child = access->first_child;
450 while (child && (child->offset + child->size <= offset))
451 child = child->next_sibling;
452 access = child;
455 return access;
458 /* Return the first group representative for DECL or NULL if none exists. */
460 static struct access *
461 get_first_repr_for_decl (tree base)
463 VEC (access_p, heap) *access_vec;
465 access_vec = get_base_access_vector (base);
466 if (!access_vec)
467 return NULL;
469 return VEC_index (access_p, access_vec, 0);
472 /* Find an access representative for the variable BASE and given OFFSET and
473 SIZE. Requires that access trees have already been built. Return NULL if
474 it cannot be found. */
476 static struct access *
477 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
478 HOST_WIDE_INT size)
480 struct access *access;
482 access = get_first_repr_for_decl (base);
483 while (access && (access->offset + access->size <= offset))
484 access = access->next_grp;
485 if (!access)
486 return NULL;
488 return find_access_in_subtree (access, offset, size);
491 /* Add LINK to the linked list of assign links of RACC. */
492 static void
493 add_link_to_rhs (struct access *racc, struct assign_link *link)
495 gcc_assert (link->racc == racc);
497 if (!racc->first_link)
499 gcc_assert (!racc->last_link);
500 racc->first_link = link;
502 else
503 racc->last_link->next = link;
505 racc->last_link = link;
506 link->next = NULL;
509 /* Move all link structures in their linked list in OLD_RACC to the linked list
510 in NEW_RACC. */
511 static void
512 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
514 if (!old_racc->first_link)
516 gcc_assert (!old_racc->last_link);
517 return;
520 if (new_racc->first_link)
522 gcc_assert (!new_racc->last_link->next);
523 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
525 new_racc->last_link->next = old_racc->first_link;
526 new_racc->last_link = old_racc->last_link;
528 else
530 gcc_assert (!new_racc->last_link);
532 new_racc->first_link = old_racc->first_link;
533 new_racc->last_link = old_racc->last_link;
535 old_racc->first_link = old_racc->last_link = NULL;
538 /* Add ACCESS to the work queue (which is actually a stack). */
540 static void
541 add_access_to_work_queue (struct access *access)
543 if (!access->grp_queued)
545 gcc_assert (!access->next_queued);
546 access->next_queued = work_queue_head;
547 access->grp_queued = 1;
548 work_queue_head = access;
552 /* Pop an access from the work queue, and return it, assuming there is one. */
554 static struct access *
555 pop_access_from_work_queue (void)
557 struct access *access = work_queue_head;
559 work_queue_head = access->next_queued;
560 access->next_queued = NULL;
561 access->grp_queued = 0;
562 return access;
566 /* Allocate necessary structures. */
568 static void
569 sra_initialize (void)
571 candidate_bitmap = BITMAP_ALLOC (NULL);
572 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
573 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
574 gcc_obstack_init (&name_obstack);
575 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
576 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
577 base_access_vec = pointer_map_create ();
578 memset (&sra_stats, 0, sizeof (sra_stats));
579 encountered_apply_args = false;
580 encountered_recursive_call = false;
581 encountered_unchangable_recursive_call = false;
584 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
586 static bool
587 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
588 void *data ATTRIBUTE_UNUSED)
590 VEC (access_p, heap) *access_vec;
591 access_vec = (VEC (access_p, heap) *) *value;
592 VEC_free (access_p, heap, access_vec);
594 return true;
597 /* Deallocate all general structures. */
599 static void
600 sra_deinitialize (void)
602 BITMAP_FREE (candidate_bitmap);
603 BITMAP_FREE (should_scalarize_away_bitmap);
604 BITMAP_FREE (cannot_scalarize_away_bitmap);
605 free_alloc_pool (access_pool);
606 free_alloc_pool (link_pool);
607 obstack_free (&name_obstack, NULL);
609 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
610 pointer_map_destroy (base_access_vec);
613 /* Remove DECL from candidates for SRA and write REASON to the dump file if
614 there is one. */
615 static void
616 disqualify_candidate (tree decl, const char *reason)
618 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
620 if (dump_file && (dump_flags & TDF_DETAILS))
622 fprintf (dump_file, "! Disqualifying ");
623 print_generic_expr (dump_file, decl, 0);
624 fprintf (dump_file, " - %s\n", reason);
628 /* Return true iff the type contains a field or an element which does not allow
629 scalarization. */
631 static bool
632 type_internals_preclude_sra_p (tree type)
634 tree fld;
635 tree et;
637 switch (TREE_CODE (type))
639 case RECORD_TYPE:
640 case UNION_TYPE:
641 case QUAL_UNION_TYPE:
642 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
643 if (TREE_CODE (fld) == FIELD_DECL)
645 tree ft = TREE_TYPE (fld);
647 if (TREE_THIS_VOLATILE (fld)
648 || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
649 || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
650 || !host_integerp (DECL_SIZE (fld), 1))
651 return true;
653 if (AGGREGATE_TYPE_P (ft)
654 && type_internals_preclude_sra_p (ft))
655 return true;
658 return false;
660 case ARRAY_TYPE:
661 et = TREE_TYPE (type);
663 if (AGGREGATE_TYPE_P (et))
664 return type_internals_preclude_sra_p (et);
665 else
666 return false;
668 default:
669 return false;
673 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
674 base variable if it is. Return T if it is not an SSA_NAME. */
676 static tree
677 get_ssa_base_param (tree t)
679 if (TREE_CODE (t) == SSA_NAME)
681 if (SSA_NAME_IS_DEFAULT_DEF (t))
682 return SSA_NAME_VAR (t);
683 else
684 return NULL_TREE;
686 return t;
689 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
690 belongs to, unless the BB has already been marked as a potentially
691 final. */
693 static void
694 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
696 basic_block bb = gimple_bb (stmt);
697 int idx, parm_index = 0;
698 tree parm;
700 if (bitmap_bit_p (final_bbs, bb->index))
701 return;
703 for (parm = DECL_ARGUMENTS (current_function_decl);
704 parm && parm != base;
705 parm = DECL_CHAIN (parm))
706 parm_index++;
708 gcc_assert (parm_index < func_param_count);
710 idx = bb->index * func_param_count + parm_index;
711 if (bb_dereferences[idx] < dist)
712 bb_dereferences[idx] = dist;
715 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
716 the three fields. Also add it to the vector of accesses corresponding to
717 the base. Finally, return the new access. */
719 static struct access *
720 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
722 VEC (access_p, heap) *vec;
723 struct access *access;
724 void **slot;
726 access = (struct access *) pool_alloc (access_pool);
727 memset (access, 0, sizeof (struct access));
728 access->base = base;
729 access->offset = offset;
730 access->size = size;
732 slot = pointer_map_contains (base_access_vec, base);
733 if (slot)
734 vec = (VEC (access_p, heap) *) *slot;
735 else
736 vec = VEC_alloc (access_p, heap, 32);
738 VEC_safe_push (access_p, heap, vec, access);
740 *((struct VEC (access_p,heap) **)
741 pointer_map_insert (base_access_vec, base)) = vec;
743 return access;
746 /* Create and insert access for EXPR. Return created access, or NULL if it is
747 not possible. */
749 static struct access *
750 create_access (tree expr, gimple stmt, bool write)
752 struct access *access;
753 HOST_WIDE_INT offset, size, max_size;
754 tree base = expr;
755 bool ptr, unscalarizable_region = false;
757 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
759 if (sra_mode == SRA_MODE_EARLY_IPA
760 && TREE_CODE (base) == MEM_REF)
762 base = get_ssa_base_param (TREE_OPERAND (base, 0));
763 if (!base)
764 return NULL;
765 ptr = true;
767 else
768 ptr = false;
770 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
771 return NULL;
773 if (sra_mode == SRA_MODE_EARLY_IPA)
775 if (size < 0 || size != max_size)
777 disqualify_candidate (base, "Encountered a variable sized access.");
778 return NULL;
780 if (TREE_CODE (expr) == COMPONENT_REF
781 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
783 disqualify_candidate (base, "Encountered a bit-field access.");
784 return NULL;
786 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
788 if (ptr)
789 mark_parm_dereference (base, offset + size, stmt);
791 else
793 if (size != max_size)
795 size = max_size;
796 unscalarizable_region = true;
798 if (size < 0)
800 disqualify_candidate (base, "Encountered an unconstrained access.");
801 return NULL;
805 access = create_access_1 (base, offset, size);
806 access->expr = expr;
807 access->type = TREE_TYPE (expr);
808 access->write = write;
809 access->grp_unscalarizable_region = unscalarizable_region;
810 access->stmt = stmt;
812 return access;
816 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
817 register types or (recursively) records with only these two kinds of fields.
818 It also returns false if any of these records has a zero-size field as its
819 last field or has a bit-field. */
821 static bool
822 type_consists_of_records_p (tree type)
824 tree fld;
825 bool last_fld_has_zero_size = false;
827 if (TREE_CODE (type) != RECORD_TYPE)
828 return false;
830 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
831 if (TREE_CODE (fld) == FIELD_DECL)
833 tree ft = TREE_TYPE (fld);
835 if (DECL_BIT_FIELD (fld))
836 return false;
838 if (!is_gimple_reg_type (ft)
839 && !type_consists_of_records_p (ft))
840 return false;
842 last_fld_has_zero_size = tree_low_cst (DECL_SIZE (fld), 1) == 0;
845 if (last_fld_has_zero_size)
846 return false;
848 return true;
851 /* Create total_scalarization accesses for all scalar type fields in DECL that
852 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
853 must be the top-most VAR_DECL representing the variable, OFFSET must be the
854 offset of DECL within BASE. REF must be the memory reference expression for
855 the given decl. */
857 static void
858 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
859 tree ref)
861 tree fld, decl_type = TREE_TYPE (decl);
863 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
864 if (TREE_CODE (fld) == FIELD_DECL)
866 HOST_WIDE_INT pos = offset + int_bit_position (fld);
867 tree ft = TREE_TYPE (fld);
868 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
869 NULL_TREE);
871 if (is_gimple_reg_type (ft))
873 struct access *access;
874 HOST_WIDE_INT size;
876 size = tree_low_cst (DECL_SIZE (fld), 1);
877 access = create_access_1 (base, pos, size);
878 access->expr = nref;
879 access->type = ft;
880 access->total_scalarization = 1;
881 /* Accesses for intraprocedural SRA can have their stmt NULL. */
883 else
884 completely_scalarize_record (base, fld, pos, nref);
889 /* Search the given tree for a declaration by skipping handled components and
890 exclude it from the candidates. */
892 static void
893 disqualify_base_of_expr (tree t, const char *reason)
895 t = get_base_address (t);
896 if (sra_mode == SRA_MODE_EARLY_IPA
897 && TREE_CODE (t) == MEM_REF)
898 t = get_ssa_base_param (TREE_OPERAND (t, 0));
900 if (t && DECL_P (t))
901 disqualify_candidate (t, reason);
904 /* Scan expression EXPR and create access structures for all accesses to
905 candidates for scalarization. Return the created access or NULL if none is
906 created. */
908 static struct access *
909 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
911 struct access *ret = NULL;
912 bool partial_ref;
914 if (TREE_CODE (expr) == BIT_FIELD_REF
915 || TREE_CODE (expr) == IMAGPART_EXPR
916 || TREE_CODE (expr) == REALPART_EXPR)
918 expr = TREE_OPERAND (expr, 0);
919 partial_ref = true;
921 else
922 partial_ref = false;
924 /* We need to dive through V_C_Es in order to get the size of its parameter
925 and not the result type. Ada produces such statements. We are also
926 capable of handling the topmost V_C_E but not any of those buried in other
927 handled components. */
928 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
929 expr = TREE_OPERAND (expr, 0);
931 if (contains_view_convert_expr_p (expr))
933 disqualify_base_of_expr (expr, "V_C_E under a different handled "
934 "component.");
935 return NULL;
938 switch (TREE_CODE (expr))
940 case MEM_REF:
941 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
942 && sra_mode != SRA_MODE_EARLY_IPA)
943 return NULL;
944 /* fall through */
945 case VAR_DECL:
946 case PARM_DECL:
947 case RESULT_DECL:
948 case COMPONENT_REF:
949 case ARRAY_REF:
950 case ARRAY_RANGE_REF:
951 ret = create_access (expr, stmt, write);
952 break;
954 default:
955 break;
958 if (write && partial_ref && ret)
959 ret->grp_partial_lhs = 1;
961 return ret;
964 /* Scan expression EXPR and create access structures for all accesses to
965 candidates for scalarization. Return true if any access has been inserted.
966 STMT must be the statement from which the expression is taken, WRITE must be
967 true if the expression is a store and false otherwise. */
969 static bool
970 build_access_from_expr (tree expr, gimple stmt, bool write)
972 struct access *access;
974 access = build_access_from_expr_1 (expr, stmt, write);
975 if (access)
977 /* This means the aggregate is accesses as a whole in a way other than an
978 assign statement and thus cannot be removed even if we had a scalar
979 replacement for everything. */
980 if (cannot_scalarize_away_bitmap)
981 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
982 return true;
984 return false;
987 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
988 modes in which it matters, return true iff they have been disqualified. RHS
989 may be NULL, in that case ignore it. If we scalarize an aggregate in
990 intra-SRA we may need to add statements after each statement. This is not
991 possible if a statement unconditionally has to end the basic block. */
992 static bool
993 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
995 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
996 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
998 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
999 if (rhs)
1000 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1001 return true;
1003 return false;
1006 /* Scan expressions occuring in STMT, create access structures for all accesses
1007 to candidates for scalarization and remove those candidates which occur in
1008 statements or expressions that prevent them from being split apart. Return
1009 true if any access has been inserted. */
1011 static bool
1012 build_accesses_from_assign (gimple stmt)
1014 tree lhs, rhs;
1015 struct access *lacc, *racc;
1017 if (!gimple_assign_single_p (stmt))
1018 return false;
1020 lhs = gimple_assign_lhs (stmt);
1021 rhs = gimple_assign_rhs1 (stmt);
1023 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1024 return false;
1026 racc = build_access_from_expr_1 (rhs, stmt, false);
1027 lacc = build_access_from_expr_1 (lhs, stmt, true);
1029 if (racc)
1031 racc->grp_assignment_read = 1;
1032 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1033 && !is_gimple_reg_type (racc->type))
1034 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1037 if (lacc && racc
1038 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1039 && !lacc->grp_unscalarizable_region
1040 && !racc->grp_unscalarizable_region
1041 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1042 /* FIXME: Turn the following line into an assert after PR 40058 is
1043 fixed. */
1044 && lacc->size == racc->size
1045 && useless_type_conversion_p (lacc->type, racc->type))
1047 struct assign_link *link;
1049 link = (struct assign_link *) pool_alloc (link_pool);
1050 memset (link, 0, sizeof (struct assign_link));
1052 link->lacc = lacc;
1053 link->racc = racc;
1055 add_link_to_rhs (racc, link);
1058 return lacc || racc;
1061 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1062 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1064 static bool
1065 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1066 void *data ATTRIBUTE_UNUSED)
1068 op = get_base_address (op);
1069 if (op
1070 && DECL_P (op))
1071 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1073 return false;
1076 /* Return true iff callsite CALL has at least as many actual arguments as there
1077 are formal parameters of the function currently processed by IPA-SRA. */
1079 static inline bool
1080 callsite_has_enough_arguments_p (gimple call)
1082 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1085 /* Scan function and look for interesting expressions and create access
1086 structures for them. Return true iff any access is created. */
1088 static bool
1089 scan_function (void)
1091 basic_block bb;
1092 bool ret = false;
1094 FOR_EACH_BB (bb)
1096 gimple_stmt_iterator gsi;
1097 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1099 gimple stmt = gsi_stmt (gsi);
1100 tree t;
1101 unsigned i;
1103 if (final_bbs && stmt_can_throw_external (stmt))
1104 bitmap_set_bit (final_bbs, bb->index);
1105 switch (gimple_code (stmt))
1107 case GIMPLE_RETURN:
1108 t = gimple_return_retval (stmt);
1109 if (t != NULL_TREE)
1110 ret |= build_access_from_expr (t, stmt, false);
1111 if (final_bbs)
1112 bitmap_set_bit (final_bbs, bb->index);
1113 break;
1115 case GIMPLE_ASSIGN:
1116 ret |= build_accesses_from_assign (stmt);
1117 break;
1119 case GIMPLE_CALL:
1120 for (i = 0; i < gimple_call_num_args (stmt); i++)
1121 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1122 stmt, false);
1124 if (sra_mode == SRA_MODE_EARLY_IPA)
1126 tree dest = gimple_call_fndecl (stmt);
1127 int flags = gimple_call_flags (stmt);
1129 if (dest)
1131 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1132 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1133 encountered_apply_args = true;
1134 if (cgraph_get_node (dest)
1135 == cgraph_get_node (current_function_decl))
1137 encountered_recursive_call = true;
1138 if (!callsite_has_enough_arguments_p (stmt))
1139 encountered_unchangable_recursive_call = true;
1143 if (final_bbs
1144 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1145 bitmap_set_bit (final_bbs, bb->index);
1148 t = gimple_call_lhs (stmt);
1149 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1150 ret |= build_access_from_expr (t, stmt, true);
1151 break;
1153 case GIMPLE_ASM:
1154 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1155 asm_visit_addr);
1156 if (final_bbs)
1157 bitmap_set_bit (final_bbs, bb->index);
1159 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1161 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1162 ret |= build_access_from_expr (t, stmt, false);
1164 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1166 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1167 ret |= build_access_from_expr (t, stmt, true);
1169 break;
1171 default:
1172 break;
1177 return ret;
1180 /* Helper of QSORT function. There are pointers to accesses in the array. An
1181 access is considered smaller than another if it has smaller offset or if the
1182 offsets are the same but is size is bigger. */
1184 static int
1185 compare_access_positions (const void *a, const void *b)
1187 const access_p *fp1 = (const access_p *) a;
1188 const access_p *fp2 = (const access_p *) b;
1189 const access_p f1 = *fp1;
1190 const access_p f2 = *fp2;
1192 if (f1->offset != f2->offset)
1193 return f1->offset < f2->offset ? -1 : 1;
1195 if (f1->size == f2->size)
1197 if (f1->type == f2->type)
1198 return 0;
1199 /* Put any non-aggregate type before any aggregate type. */
1200 else if (!is_gimple_reg_type (f1->type)
1201 && is_gimple_reg_type (f2->type))
1202 return 1;
1203 else if (is_gimple_reg_type (f1->type)
1204 && !is_gimple_reg_type (f2->type))
1205 return -1;
1206 /* Put any complex or vector type before any other scalar type. */
1207 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1208 && TREE_CODE (f1->type) != VECTOR_TYPE
1209 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1210 || TREE_CODE (f2->type) == VECTOR_TYPE))
1211 return 1;
1212 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1213 || TREE_CODE (f1->type) == VECTOR_TYPE)
1214 && TREE_CODE (f2->type) != COMPLEX_TYPE
1215 && TREE_CODE (f2->type) != VECTOR_TYPE)
1216 return -1;
1217 /* Put the integral type with the bigger precision first. */
1218 else if (INTEGRAL_TYPE_P (f1->type)
1219 && INTEGRAL_TYPE_P (f2->type))
1220 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1221 /* Put any integral type with non-full precision last. */
1222 else if (INTEGRAL_TYPE_P (f1->type)
1223 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1224 != TYPE_PRECISION (f1->type)))
1225 return 1;
1226 else if (INTEGRAL_TYPE_P (f2->type)
1227 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1228 != TYPE_PRECISION (f2->type)))
1229 return -1;
1230 /* Stabilize the sort. */
1231 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1234 /* We want the bigger accesses first, thus the opposite operator in the next
1235 line: */
1236 return f1->size > f2->size ? -1 : 1;
1240 /* Append a name of the declaration to the name obstack. A helper function for
1241 make_fancy_name. */
1243 static void
1244 make_fancy_decl_name (tree decl)
1246 char buffer[32];
1248 tree name = DECL_NAME (decl);
1249 if (name)
1250 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1251 IDENTIFIER_LENGTH (name));
1252 else
1254 sprintf (buffer, "D%u", DECL_UID (decl));
1255 obstack_grow (&name_obstack, buffer, strlen (buffer));
1259 /* Helper for make_fancy_name. */
1261 static void
1262 make_fancy_name_1 (tree expr)
1264 char buffer[32];
1265 tree index;
1267 if (DECL_P (expr))
1269 make_fancy_decl_name (expr);
1270 return;
1273 switch (TREE_CODE (expr))
1275 case COMPONENT_REF:
1276 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1277 obstack_1grow (&name_obstack, '$');
1278 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1279 break;
1281 case ARRAY_REF:
1282 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1283 obstack_1grow (&name_obstack, '$');
1284 /* Arrays with only one element may not have a constant as their
1285 index. */
1286 index = TREE_OPERAND (expr, 1);
1287 if (TREE_CODE (index) != INTEGER_CST)
1288 break;
1289 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1290 obstack_grow (&name_obstack, buffer, strlen (buffer));
1291 break;
1293 case ADDR_EXPR:
1294 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1295 break;
1297 case MEM_REF:
1298 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1299 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1301 obstack_1grow (&name_obstack, '$');
1302 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1303 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1304 obstack_grow (&name_obstack, buffer, strlen (buffer));
1306 break;
1308 case BIT_FIELD_REF:
1309 case REALPART_EXPR:
1310 case IMAGPART_EXPR:
1311 gcc_unreachable (); /* we treat these as scalars. */
1312 break;
1313 default:
1314 break;
1318 /* Create a human readable name for replacement variable of ACCESS. */
1320 static char *
1321 make_fancy_name (tree expr)
1323 make_fancy_name_1 (expr);
1324 obstack_1grow (&name_obstack, '\0');
1325 return XOBFINISH (&name_obstack, char *);
1328 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1329 EXP_TYPE at the given OFFSET. If BASE is something for which
1330 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1331 to insert new statements either before or below the current one as specified
1332 by INSERT_AFTER. This function is not capable of handling bitfields. */
1334 tree
1335 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1336 tree exp_type, gimple_stmt_iterator *gsi,
1337 bool insert_after)
1339 tree prev_base = base;
1340 tree off;
1341 HOST_WIDE_INT base_offset;
1343 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1345 base = get_addr_base_and_unit_offset (base, &base_offset);
1347 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1348 offset such as array[var_index]. */
1349 if (!base)
1351 gimple stmt;
1352 tree tmp, addr;
1354 gcc_checking_assert (gsi);
1355 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1356 add_referenced_var (tmp);
1357 tmp = make_ssa_name (tmp, NULL);
1358 addr = build_fold_addr_expr (unshare_expr (prev_base));
1359 stmt = gimple_build_assign (tmp, addr);
1360 gimple_set_location (stmt, loc);
1361 SSA_NAME_DEF_STMT (tmp) = stmt;
1362 if (insert_after)
1363 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1364 else
1365 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1366 update_stmt (stmt);
1368 off = build_int_cst (reference_alias_ptr_type (prev_base),
1369 offset / BITS_PER_UNIT);
1370 base = tmp;
1372 else if (TREE_CODE (base) == MEM_REF)
1374 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1375 base_offset + offset / BITS_PER_UNIT);
1376 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off, 0);
1377 base = unshare_expr (TREE_OPERAND (base, 0));
1379 else
1381 off = build_int_cst (reference_alias_ptr_type (base),
1382 base_offset + offset / BITS_PER_UNIT);
1383 base = build_fold_addr_expr (unshare_expr (base));
1386 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1389 /* Construct a memory reference to a part of an aggregate BASE at the given
1390 OFFSET and of the same type as MODEL. In case this is a reference to a
1391 bit-field, the function will replicate the last component_ref of model's
1392 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1393 build_ref_for_offset. */
1395 static tree
1396 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1397 struct access *model, gimple_stmt_iterator *gsi,
1398 bool insert_after)
1400 if (TREE_CODE (model->expr) == COMPONENT_REF
1401 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1403 /* This access represents a bit-field. */
1404 tree t, exp_type;
1406 offset -= int_bit_position (TREE_OPERAND (model->expr, 1));
1407 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1408 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1409 return fold_build3_loc (loc, COMPONENT_REF, model->type, t,
1410 TREE_OPERAND (model->expr, 1), NULL_TREE);
1412 else
1413 return build_ref_for_offset (loc, base, offset, model->type,
1414 gsi, insert_after);
1417 /* Construct a memory reference consisting of component_refs and array_refs to
1418 a part of an aggregate *RES (which is of type TYPE). The requested part
1419 should have type EXP_TYPE at be the given OFFSET. This function might not
1420 succeed, it returns true when it does and only then *RES points to something
1421 meaningful. This function should be used only to build expressions that we
1422 might need to present to user (e.g. in warnings). In all other situations,
1423 build_ref_for_model or build_ref_for_offset should be used instead. */
1425 static bool
1426 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1427 tree exp_type)
1429 while (1)
1431 tree fld;
1432 tree tr_size, index, minidx;
1433 HOST_WIDE_INT el_size;
1435 if (offset == 0 && exp_type
1436 && types_compatible_p (exp_type, type))
1437 return true;
1439 switch (TREE_CODE (type))
1441 case UNION_TYPE:
1442 case QUAL_UNION_TYPE:
1443 case RECORD_TYPE:
1444 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1446 HOST_WIDE_INT pos, size;
1447 tree expr, *expr_ptr;
1449 if (TREE_CODE (fld) != FIELD_DECL)
1450 continue;
1452 pos = int_bit_position (fld);
1453 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1454 tr_size = DECL_SIZE (fld);
1455 if (!tr_size || !host_integerp (tr_size, 1))
1456 continue;
1457 size = tree_low_cst (tr_size, 1);
1458 if (size == 0)
1460 if (pos != offset)
1461 continue;
1463 else if (pos > offset || (pos + size) <= offset)
1464 continue;
1466 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1467 NULL_TREE);
1468 expr_ptr = &expr;
1469 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1470 offset - pos, exp_type))
1472 *res = expr;
1473 return true;
1476 return false;
1478 case ARRAY_TYPE:
1479 tr_size = TYPE_SIZE (TREE_TYPE (type));
1480 if (!tr_size || !host_integerp (tr_size, 1))
1481 return false;
1482 el_size = tree_low_cst (tr_size, 1);
1484 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1485 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1486 return false;
1487 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1488 if (!integer_zerop (minidx))
1489 index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1490 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1491 NULL_TREE, NULL_TREE);
1492 offset = offset % el_size;
1493 type = TREE_TYPE (type);
1494 break;
1496 default:
1497 if (offset != 0)
1498 return false;
1500 if (exp_type)
1501 return false;
1502 else
1503 return true;
1508 /* Return true iff TYPE is stdarg va_list type. */
1510 static inline bool
1511 is_va_list_type (tree type)
1513 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1516 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1517 those with type which is suitable for scalarization. */
1519 static bool
1520 find_var_candidates (void)
1522 tree var, type;
1523 referenced_var_iterator rvi;
1524 bool ret = false;
1526 FOR_EACH_REFERENCED_VAR (var, rvi)
1528 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1529 continue;
1530 type = TREE_TYPE (var);
1532 if (!AGGREGATE_TYPE_P (type)
1533 || needs_to_live_in_memory (var)
1534 || TREE_THIS_VOLATILE (var)
1535 || !COMPLETE_TYPE_P (type)
1536 || !host_integerp (TYPE_SIZE (type), 1)
1537 || tree_low_cst (TYPE_SIZE (type), 1) == 0
1538 || type_internals_preclude_sra_p (type)
1539 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1540 we also want to schedule it rather late. Thus we ignore it in
1541 the early pass. */
1542 || (sra_mode == SRA_MODE_EARLY_INTRA
1543 && is_va_list_type (type)))
1544 continue;
1546 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1548 if (dump_file && (dump_flags & TDF_DETAILS))
1550 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1551 print_generic_expr (dump_file, var, 0);
1552 fprintf (dump_file, "\n");
1554 ret = true;
1557 return ret;
1560 /* Sort all accesses for the given variable, check for partial overlaps and
1561 return NULL if there are any. If there are none, pick a representative for
1562 each combination of offset and size and create a linked list out of them.
1563 Return the pointer to the first representative and make sure it is the first
1564 one in the vector of accesses. */
1566 static struct access *
1567 sort_and_splice_var_accesses (tree var)
1569 int i, j, access_count;
1570 struct access *res, **prev_acc_ptr = &res;
1571 VEC (access_p, heap) *access_vec;
1572 bool first = true;
1573 HOST_WIDE_INT low = -1, high = 0;
1575 access_vec = get_base_access_vector (var);
1576 if (!access_vec)
1577 return NULL;
1578 access_count = VEC_length (access_p, access_vec);
1580 /* Sort by <OFFSET, SIZE>. */
1581 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
1582 compare_access_positions);
1584 i = 0;
1585 while (i < access_count)
1587 struct access *access = VEC_index (access_p, access_vec, i);
1588 bool grp_write = access->write;
1589 bool grp_read = !access->write;
1590 bool grp_assignment_read = access->grp_assignment_read;
1591 bool multiple_reads = false;
1592 bool total_scalarization = access->total_scalarization;
1593 bool grp_partial_lhs = access->grp_partial_lhs;
1594 bool first_scalar = is_gimple_reg_type (access->type);
1595 bool unscalarizable_region = access->grp_unscalarizable_region;
1597 if (first || access->offset >= high)
1599 first = false;
1600 low = access->offset;
1601 high = access->offset + access->size;
1603 else if (access->offset > low && access->offset + access->size > high)
1604 return NULL;
1605 else
1606 gcc_assert (access->offset >= low
1607 && access->offset + access->size <= high);
1609 j = i + 1;
1610 while (j < access_count)
1612 struct access *ac2 = VEC_index (access_p, access_vec, j);
1613 if (ac2->offset != access->offset || ac2->size != access->size)
1614 break;
1615 if (ac2->write)
1616 grp_write = true;
1617 else
1619 if (grp_read)
1620 multiple_reads = true;
1621 else
1622 grp_read = true;
1624 grp_assignment_read |= ac2->grp_assignment_read;
1625 grp_partial_lhs |= ac2->grp_partial_lhs;
1626 unscalarizable_region |= ac2->grp_unscalarizable_region;
1627 total_scalarization |= ac2->total_scalarization;
1628 relink_to_new_repr (access, ac2);
1630 /* If there are both aggregate-type and scalar-type accesses with
1631 this combination of size and offset, the comparison function
1632 should have put the scalars first. */
1633 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1634 ac2->group_representative = access;
1635 j++;
1638 i = j;
1640 access->group_representative = access;
1641 access->grp_write = grp_write;
1642 access->grp_read = grp_read;
1643 access->grp_assignment_read = grp_assignment_read;
1644 access->grp_hint = multiple_reads || total_scalarization;
1645 access->grp_partial_lhs = grp_partial_lhs;
1646 access->grp_unscalarizable_region = unscalarizable_region;
1647 if (access->first_link)
1648 add_access_to_work_queue (access);
1650 *prev_acc_ptr = access;
1651 prev_acc_ptr = &access->next_grp;
1654 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1655 return res;
1658 /* Create a variable for the given ACCESS which determines the type, name and a
1659 few other properties. Return the variable declaration and store it also to
1660 ACCESS->replacement. */
1662 static tree
1663 create_access_replacement (struct access *access, bool rename)
1665 tree repl;
1667 repl = create_tmp_var (access->type, "SR");
1668 get_var_ann (repl);
1669 add_referenced_var (repl);
1670 if (rename)
1671 mark_sym_for_renaming (repl);
1673 if (!access->grp_partial_lhs
1674 && (TREE_CODE (access->type) == COMPLEX_TYPE
1675 || TREE_CODE (access->type) == VECTOR_TYPE))
1676 DECL_GIMPLE_REG_P (repl) = 1;
1678 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1679 DECL_ARTIFICIAL (repl) = 1;
1680 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1682 if (DECL_NAME (access->base)
1683 && !DECL_IGNORED_P (access->base)
1684 && !DECL_ARTIFICIAL (access->base))
1686 char *pretty_name = make_fancy_name (access->expr);
1687 tree debug_expr = unshare_expr (access->expr), d;
1689 DECL_NAME (repl) = get_identifier (pretty_name);
1690 obstack_free (&name_obstack, pretty_name);
1692 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1693 as DECL_DEBUG_EXPR isn't considered when looking for still
1694 used SSA_NAMEs and thus they could be freed. All debug info
1695 generation cares is whether something is constant or variable
1696 and that get_ref_base_and_extent works properly on the
1697 expression. */
1698 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1699 switch (TREE_CODE (d))
1701 case ARRAY_REF:
1702 case ARRAY_RANGE_REF:
1703 if (TREE_OPERAND (d, 1)
1704 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1705 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1706 if (TREE_OPERAND (d, 3)
1707 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1708 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1709 /* FALLTHRU */
1710 case COMPONENT_REF:
1711 if (TREE_OPERAND (d, 2)
1712 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1713 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1714 break;
1715 default:
1716 break;
1718 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1719 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1720 if (access->grp_no_warning)
1721 TREE_NO_WARNING (repl) = 1;
1722 else
1723 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1725 else
1726 TREE_NO_WARNING (repl) = 1;
1728 if (dump_file)
1730 fprintf (dump_file, "Created a replacement for ");
1731 print_generic_expr (dump_file, access->base, 0);
1732 fprintf (dump_file, " offset: %u, size: %u: ",
1733 (unsigned) access->offset, (unsigned) access->size);
1734 print_generic_expr (dump_file, repl, 0);
1735 fprintf (dump_file, "\n");
1737 sra_stats.replacements++;
1739 return repl;
1742 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1744 static inline tree
1745 get_access_replacement (struct access *access)
1747 gcc_assert (access->grp_to_be_replaced);
1749 if (!access->replacement_decl)
1750 access->replacement_decl = create_access_replacement (access, true);
1751 return access->replacement_decl;
1754 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1755 not mark it for renaming. */
1757 static inline tree
1758 get_unrenamed_access_replacement (struct access *access)
1760 gcc_assert (!access->grp_to_be_replaced);
1762 if (!access->replacement_decl)
1763 access->replacement_decl = create_access_replacement (access, false);
1764 return access->replacement_decl;
1768 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1769 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1770 to it is not "within" the root. Return false iff some accesses partially
1771 overlap. */
1773 static bool
1774 build_access_subtree (struct access **access)
1776 struct access *root = *access, *last_child = NULL;
1777 HOST_WIDE_INT limit = root->offset + root->size;
1779 *access = (*access)->next_grp;
1780 while (*access && (*access)->offset + (*access)->size <= limit)
1782 if (!last_child)
1783 root->first_child = *access;
1784 else
1785 last_child->next_sibling = *access;
1786 last_child = *access;
1788 if (!build_access_subtree (access))
1789 return false;
1792 if (*access && (*access)->offset < limit)
1793 return false;
1795 return true;
1798 /* Build a tree of access representatives, ACCESS is the pointer to the first
1799 one, others are linked in a list by the next_grp field. Return false iff
1800 some accesses partially overlap. */
1802 static bool
1803 build_access_trees (struct access *access)
1805 while (access)
1807 struct access *root = access;
1809 if (!build_access_subtree (&access))
1810 return false;
1811 root->next_grp = access;
1813 return true;
1816 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1817 array. */
1819 static bool
1820 expr_with_var_bounded_array_refs_p (tree expr)
1822 while (handled_component_p (expr))
1824 if (TREE_CODE (expr) == ARRAY_REF
1825 && !host_integerp (array_ref_low_bound (expr), 0))
1826 return true;
1827 expr = TREE_OPERAND (expr, 0);
1829 return false;
1832 enum mark_read_status { SRA_MR_NOT_READ, SRA_MR_READ, SRA_MR_ASSIGN_READ};
1834 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1835 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1836 sorts of access flags appropriately along the way, notably always set
1837 grp_read and grp_assign_read according to MARK_READ and grp_write when
1838 MARK_WRITE is true. */
1840 static bool
1841 analyze_access_subtree (struct access *root, bool allow_replacements,
1842 enum mark_read_status mark_read, bool mark_write)
1844 struct access *child;
1845 HOST_WIDE_INT limit = root->offset + root->size;
1846 HOST_WIDE_INT covered_to = root->offset;
1847 bool scalar = is_gimple_reg_type (root->type);
1848 bool hole = false, sth_created = false;
1849 bool direct_read = root->grp_read;
1851 if (mark_read == SRA_MR_ASSIGN_READ)
1853 root->grp_read = 1;
1854 root->grp_assignment_read = 1;
1856 if (mark_read == SRA_MR_READ)
1857 root->grp_read = 1;
1858 else if (root->grp_assignment_read)
1859 mark_read = SRA_MR_ASSIGN_READ;
1860 else if (root->grp_read)
1861 mark_read = SRA_MR_READ;
1863 if (mark_write)
1864 root->grp_write = true;
1865 else if (root->grp_write)
1866 mark_write = true;
1868 if (root->grp_unscalarizable_region)
1869 allow_replacements = false;
1871 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1872 allow_replacements = false;
1874 for (child = root->first_child; child; child = child->next_sibling)
1876 if (!hole && child->offset < covered_to)
1877 hole = true;
1878 else
1879 covered_to += child->size;
1881 sth_created |= analyze_access_subtree (child,
1882 allow_replacements && !scalar,
1883 mark_read, mark_write);
1885 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1886 hole |= !child->grp_covered;
1889 if (allow_replacements && scalar && !root->first_child
1890 && (root->grp_hint
1891 || (root->grp_write && (direct_read || root->grp_assignment_read))))
1893 if (dump_file && (dump_flags & TDF_DETAILS))
1895 fprintf (dump_file, "Marking ");
1896 print_generic_expr (dump_file, root->base, 0);
1897 fprintf (dump_file, " offset: %u, size: %u: ",
1898 (unsigned) root->offset, (unsigned) root->size);
1899 fprintf (dump_file, " to be replaced.\n");
1902 root->grp_to_be_replaced = 1;
1903 sth_created = true;
1904 hole = false;
1906 else if (covered_to < limit)
1907 hole = true;
1909 if (sth_created && !hole)
1911 root->grp_covered = 1;
1912 return true;
1914 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1915 root->grp_unscalarized_data = 1; /* not covered and written to */
1916 if (sth_created)
1917 return true;
1918 return false;
1921 /* Analyze all access trees linked by next_grp by the means of
1922 analyze_access_subtree. */
1923 static bool
1924 analyze_access_trees (struct access *access)
1926 bool ret = false;
1928 while (access)
1930 if (analyze_access_subtree (access, true, SRA_MR_NOT_READ, false))
1931 ret = true;
1932 access = access->next_grp;
1935 return ret;
1938 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1939 SIZE would conflict with an already existing one. If exactly such a child
1940 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1942 static bool
1943 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1944 HOST_WIDE_INT size, struct access **exact_match)
1946 struct access *child;
1948 for (child = lacc->first_child; child; child = child->next_sibling)
1950 if (child->offset == norm_offset && child->size == size)
1952 *exact_match = child;
1953 return true;
1956 if (child->offset < norm_offset + size
1957 && child->offset + child->size > norm_offset)
1958 return true;
1961 return false;
1964 /* Create a new child access of PARENT, with all properties just like MODEL
1965 except for its offset and with its grp_write false and grp_read true.
1966 Return the new access or NULL if it cannot be created. Note that this access
1967 is created long after all splicing and sorting, it's not located in any
1968 access vector and is automatically a representative of its group. */
1970 static struct access *
1971 create_artificial_child_access (struct access *parent, struct access *model,
1972 HOST_WIDE_INT new_offset)
1974 struct access *access;
1975 struct access **child;
1976 tree expr = parent->base;
1978 gcc_assert (!model->grp_unscalarizable_region);
1980 access = (struct access *) pool_alloc (access_pool);
1981 memset (access, 0, sizeof (struct access));
1982 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
1983 model->type))
1985 access->grp_no_warning = true;
1986 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
1987 new_offset, model, NULL, false);
1990 access->base = parent->base;
1991 access->expr = expr;
1992 access->offset = new_offset;
1993 access->size = model->size;
1994 access->type = model->type;
1995 access->grp_write = true;
1996 access->grp_read = false;
1998 child = &parent->first_child;
1999 while (*child && (*child)->offset < new_offset)
2000 child = &(*child)->next_sibling;
2002 access->next_sibling = *child;
2003 *child = access;
2005 return access;
2009 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2010 true if any new subaccess was created. Additionally, if RACC is a scalar
2011 access but LACC is not, change the type of the latter, if possible. */
2013 static bool
2014 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2016 struct access *rchild;
2017 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2018 bool ret = false;
2020 if (is_gimple_reg_type (lacc->type)
2021 || lacc->grp_unscalarizable_region
2022 || racc->grp_unscalarizable_region)
2023 return false;
2025 if (!lacc->first_child && !racc->first_child
2026 && is_gimple_reg_type (racc->type))
2028 tree t = lacc->base;
2030 lacc->type = racc->type;
2031 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t), lacc->offset,
2032 racc->type))
2033 lacc->expr = t;
2034 else
2036 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2037 lacc->base, lacc->offset,
2038 racc, NULL, false);
2039 lacc->grp_no_warning = true;
2041 return false;
2044 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2046 struct access *new_acc = NULL;
2047 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2049 if (rchild->grp_unscalarizable_region)
2050 continue;
2052 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2053 &new_acc))
2055 if (new_acc)
2057 rchild->grp_hint = 1;
2058 new_acc->grp_hint |= new_acc->grp_read;
2059 if (rchild->first_child)
2060 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2062 continue;
2065 rchild->grp_hint = 1;
2066 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2067 if (new_acc)
2069 ret = true;
2070 if (racc->first_child)
2071 propagate_subaccesses_across_link (new_acc, rchild);
2075 return ret;
2078 /* Propagate all subaccesses across assignment links. */
2080 static void
2081 propagate_all_subaccesses (void)
2083 while (work_queue_head)
2085 struct access *racc = pop_access_from_work_queue ();
2086 struct assign_link *link;
2088 gcc_assert (racc->first_link);
2090 for (link = racc->first_link; link; link = link->next)
2092 struct access *lacc = link->lacc;
2094 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2095 continue;
2096 lacc = lacc->group_representative;
2097 if (propagate_subaccesses_across_link (lacc, racc)
2098 && lacc->first_link)
2099 add_access_to_work_queue (lacc);
2104 /* Go through all accesses collected throughout the (intraprocedural) analysis
2105 stage, exclude overlapping ones, identify representatives and build trees
2106 out of them, making decisions about scalarization on the way. Return true
2107 iff there are any to-be-scalarized variables after this stage. */
2109 static bool
2110 analyze_all_variable_accesses (void)
2112 int res = 0;
2113 bitmap tmp = BITMAP_ALLOC (NULL);
2114 bitmap_iterator bi;
2115 unsigned i, max_total_scalarization_size;
2117 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2118 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2120 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2121 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2122 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2124 tree var = referenced_var (i);
2126 if (TREE_CODE (var) == VAR_DECL
2127 && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2128 <= max_total_scalarization_size)
2129 && type_consists_of_records_p (TREE_TYPE (var)))
2131 completely_scalarize_record (var, var, 0, var);
2132 if (dump_file && (dump_flags & TDF_DETAILS))
2134 fprintf (dump_file, "Will attempt to totally scalarize ");
2135 print_generic_expr (dump_file, var, 0);
2136 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2141 bitmap_copy (tmp, candidate_bitmap);
2142 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2144 tree var = referenced_var (i);
2145 struct access *access;
2147 access = sort_and_splice_var_accesses (var);
2148 if (!access || !build_access_trees (access))
2149 disqualify_candidate (var,
2150 "No or inhibitingly overlapping accesses.");
2153 propagate_all_subaccesses ();
2155 bitmap_copy (tmp, candidate_bitmap);
2156 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2158 tree var = referenced_var (i);
2159 struct access *access = get_first_repr_for_decl (var);
2161 if (analyze_access_trees (access))
2163 res++;
2164 if (dump_file && (dump_flags & TDF_DETAILS))
2166 fprintf (dump_file, "\nAccess trees for ");
2167 print_generic_expr (dump_file, var, 0);
2168 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2169 dump_access_tree (dump_file, access);
2170 fprintf (dump_file, "\n");
2173 else
2174 disqualify_candidate (var, "No scalar replacements to be created.");
2177 BITMAP_FREE (tmp);
2179 if (res)
2181 statistics_counter_event (cfun, "Scalarized aggregates", res);
2182 return true;
2184 else
2185 return false;
2188 /* Generate statements copying scalar replacements of accesses within a subtree
2189 into or out of AGG. ACCESS, all its children, siblings and their children
2190 are to be processed. AGG is an aggregate type expression (can be a
2191 declaration but does not have to be, it can for example also be a mem_ref or
2192 a series of handled components). TOP_OFFSET is the offset of the processed
2193 subtree which has to be subtracted from offsets of individual accesses to
2194 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2195 replacements in the interval <start_offset, start_offset + chunk_size>,
2196 otherwise copy all. GSI is a statement iterator used to place the new
2197 statements. WRITE should be true when the statements should write from AGG
2198 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2199 statements will be added after the current statement in GSI, they will be
2200 added before the statement otherwise. */
2202 static void
2203 generate_subtree_copies (struct access *access, tree agg,
2204 HOST_WIDE_INT top_offset,
2205 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2206 gimple_stmt_iterator *gsi, bool write,
2207 bool insert_after, location_t loc)
2211 if (chunk_size && access->offset >= start_offset + chunk_size)
2212 return;
2214 if (access->grp_to_be_replaced
2215 && (chunk_size == 0
2216 || access->offset + access->size > start_offset))
2218 tree expr, repl = get_access_replacement (access);
2219 gimple stmt;
2221 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2222 access, gsi, insert_after);
2224 if (write)
2226 if (access->grp_partial_lhs)
2227 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2228 !insert_after,
2229 insert_after ? GSI_NEW_STMT
2230 : GSI_SAME_STMT);
2231 stmt = gimple_build_assign (repl, expr);
2233 else
2235 TREE_NO_WARNING (repl) = 1;
2236 if (access->grp_partial_lhs)
2237 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2238 !insert_after,
2239 insert_after ? GSI_NEW_STMT
2240 : GSI_SAME_STMT);
2241 stmt = gimple_build_assign (expr, repl);
2243 gimple_set_location (stmt, loc);
2245 if (insert_after)
2246 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2247 else
2248 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2249 update_stmt (stmt);
2250 sra_stats.subtree_copies++;
2253 if (access->first_child)
2254 generate_subtree_copies (access->first_child, agg, top_offset,
2255 start_offset, chunk_size, gsi,
2256 write, insert_after, loc);
2258 access = access->next_sibling;
2260 while (access);
2263 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2264 the root of the subtree to be processed. GSI is the statement iterator used
2265 for inserting statements which are added after the current statement if
2266 INSERT_AFTER is true or before it otherwise. */
2268 static void
2269 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2270 bool insert_after, location_t loc)
2273 struct access *child;
2275 if (access->grp_to_be_replaced)
2277 gimple stmt;
2279 stmt = gimple_build_assign (get_access_replacement (access),
2280 fold_convert (access->type,
2281 integer_zero_node));
2282 if (insert_after)
2283 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2284 else
2285 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2286 update_stmt (stmt);
2287 gimple_set_location (stmt, loc);
2290 for (child = access->first_child; child; child = child->next_sibling)
2291 init_subtree_with_zero (child, gsi, insert_after, loc);
2294 /* Search for an access representative for the given expression EXPR and
2295 return it or NULL if it cannot be found. */
2297 static struct access *
2298 get_access_for_expr (tree expr)
2300 HOST_WIDE_INT offset, size, max_size;
2301 tree base;
2303 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2304 a different size than the size of its argument and we need the latter
2305 one. */
2306 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2307 expr = TREE_OPERAND (expr, 0);
2309 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2310 if (max_size == -1 || !DECL_P (base))
2311 return NULL;
2313 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2314 return NULL;
2316 return get_var_base_offset_size_access (base, offset, max_size);
2319 /* Replace the expression EXPR with a scalar replacement if there is one and
2320 generate other statements to do type conversion or subtree copying if
2321 necessary. GSI is used to place newly created statements, WRITE is true if
2322 the expression is being written to (it is on a LHS of a statement or output
2323 in an assembly statement). */
2325 static bool
2326 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2328 location_t loc;
2329 struct access *access;
2330 tree type, bfr;
2332 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2334 bfr = *expr;
2335 expr = &TREE_OPERAND (*expr, 0);
2337 else
2338 bfr = NULL_TREE;
2340 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2341 expr = &TREE_OPERAND (*expr, 0);
2342 access = get_access_for_expr (*expr);
2343 if (!access)
2344 return false;
2345 type = TREE_TYPE (*expr);
2347 loc = gimple_location (gsi_stmt (*gsi));
2348 if (access->grp_to_be_replaced)
2350 tree repl = get_access_replacement (access);
2351 /* If we replace a non-register typed access simply use the original
2352 access expression to extract the scalar component afterwards.
2353 This happens if scalarizing a function return value or parameter
2354 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2355 gcc.c-torture/compile/20011217-1.c.
2357 We also want to use this when accessing a complex or vector which can
2358 be accessed as a different type too, potentially creating a need for
2359 type conversion (see PR42196) and when scalarized unions are involved
2360 in assembler statements (see PR42398). */
2361 if (!useless_type_conversion_p (type, access->type))
2363 tree ref;
2365 ref = build_ref_for_model (loc, access->base, access->offset, access,
2366 NULL, false);
2368 if (write)
2370 gimple stmt;
2372 if (access->grp_partial_lhs)
2373 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2374 false, GSI_NEW_STMT);
2375 stmt = gimple_build_assign (repl, ref);
2376 gimple_set_location (stmt, loc);
2377 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2379 else
2381 gimple stmt;
2383 if (access->grp_partial_lhs)
2384 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2385 true, GSI_SAME_STMT);
2386 stmt = gimple_build_assign (ref, repl);
2387 gimple_set_location (stmt, loc);
2388 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2391 else
2392 *expr = repl;
2393 sra_stats.exprs++;
2396 if (access->first_child)
2398 HOST_WIDE_INT start_offset, chunk_size;
2399 if (bfr
2400 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2401 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2403 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2404 start_offset = access->offset
2405 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2407 else
2408 start_offset = chunk_size = 0;
2410 generate_subtree_copies (access->first_child, access->base, 0,
2411 start_offset, chunk_size, gsi, write, write,
2412 loc);
2414 return true;
2417 /* Where scalar replacements of the RHS have been written to when a replacement
2418 of a LHS of an assigments cannot be direclty loaded from a replacement of
2419 the RHS. */
2420 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2421 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2422 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2424 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2425 base aggregate if there are unscalarized data or directly to LHS of the
2426 statement that is pointed to by GSI otherwise. */
2428 static enum unscalarized_data_handling
2429 handle_unscalarized_data_in_subtree (struct access *top_racc,
2430 gimple_stmt_iterator *gsi)
2432 if (top_racc->grp_unscalarized_data)
2434 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2435 gsi, false, false,
2436 gimple_location (gsi_stmt (*gsi)));
2437 return SRA_UDH_RIGHT;
2439 else
2441 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2442 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2443 0, 0, gsi, false, false,
2444 gimple_location (gsi_stmt (*gsi)));
2445 return SRA_UDH_LEFT;
2450 /* Try to generate statements to load all sub-replacements in an access subtree
2451 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2452 If that is not possible, refresh the TOP_RACC base aggregate and load the
2453 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2454 copied. NEW_GSI is stmt iterator used for statement insertions after the
2455 original assignment, OLD_GSI is used to insert statements before the
2456 assignment. *REFRESHED keeps the information whether we have needed to
2457 refresh replacements of the LHS and from which side of the assignments this
2458 takes place. */
2460 static void
2461 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2462 HOST_WIDE_INT left_offset,
2463 gimple_stmt_iterator *old_gsi,
2464 gimple_stmt_iterator *new_gsi,
2465 enum unscalarized_data_handling *refreshed)
2467 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2468 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2470 if (lacc->grp_to_be_replaced)
2472 struct access *racc;
2473 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2474 gimple stmt;
2475 tree rhs;
2477 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2478 if (racc && racc->grp_to_be_replaced)
2480 rhs = get_access_replacement (racc);
2481 if (!useless_type_conversion_p (lacc->type, racc->type))
2482 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2484 else
2486 /* No suitable access on the right hand side, need to load from
2487 the aggregate. See if we have to update it first... */
2488 if (*refreshed == SRA_UDH_NONE)
2489 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2490 old_gsi);
2492 if (*refreshed == SRA_UDH_LEFT)
2493 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2494 new_gsi, true);
2495 else
2496 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2497 new_gsi, true);
2500 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2501 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2502 gimple_set_location (stmt, loc);
2503 update_stmt (stmt);
2504 sra_stats.subreplacements++;
2506 else if (*refreshed == SRA_UDH_NONE
2507 && lacc->grp_read && !lacc->grp_covered)
2508 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2509 old_gsi);
2511 if (lacc->first_child)
2512 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2513 old_gsi, new_gsi, refreshed);
2517 /* Result code for SRA assignment modification. */
2518 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2519 SRA_AM_MODIFIED, /* stmt changed but not
2520 removed */
2521 SRA_AM_REMOVED }; /* stmt eliminated */
2523 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2524 to the assignment and GSI is the statement iterator pointing at it. Returns
2525 the same values as sra_modify_assign. */
2527 static enum assignment_mod_result
2528 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2530 tree lhs = gimple_assign_lhs (*stmt);
2531 struct access *acc;
2532 location_t loc;
2534 acc = get_access_for_expr (lhs);
2535 if (!acc)
2536 return SRA_AM_NONE;
2538 loc = gimple_location (*stmt);
2539 if (VEC_length (constructor_elt,
2540 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2542 /* I have never seen this code path trigger but if it can happen the
2543 following should handle it gracefully. */
2544 if (access_has_children_p (acc))
2545 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2546 true, true, loc);
2547 return SRA_AM_MODIFIED;
2550 if (acc->grp_covered)
2552 init_subtree_with_zero (acc, gsi, false, loc);
2553 unlink_stmt_vdef (*stmt);
2554 gsi_remove (gsi, true);
2555 return SRA_AM_REMOVED;
2557 else
2559 init_subtree_with_zero (acc, gsi, true, loc);
2560 return SRA_AM_MODIFIED;
2564 /* Create and return a new suitable default definition SSA_NAME for RACC which
2565 is an access describing an uninitialized part of an aggregate that is being
2566 loaded. */
2568 static tree
2569 get_repl_default_def_ssa_name (struct access *racc)
2571 tree repl, decl;
2573 decl = get_unrenamed_access_replacement (racc);
2575 repl = gimple_default_def (cfun, decl);
2576 if (!repl)
2578 repl = make_ssa_name (decl, gimple_build_nop ());
2579 set_default_def (decl, repl);
2582 return repl;
2585 /* Examine both sides of the assignment statement pointed to by STMT, replace
2586 them with a scalare replacement if there is one and generate copying of
2587 replacements if scalarized aggregates have been used in the assignment. GSI
2588 is used to hold generated statements for type conversions and subtree
2589 copying. */
2591 static enum assignment_mod_result
2592 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2594 struct access *lacc, *racc;
2595 tree lhs, rhs;
2596 bool modify_this_stmt = false;
2597 bool force_gimple_rhs = false;
2598 location_t loc;
2599 gimple_stmt_iterator orig_gsi = *gsi;
2601 if (!gimple_assign_single_p (*stmt))
2602 return SRA_AM_NONE;
2603 lhs = gimple_assign_lhs (*stmt);
2604 rhs = gimple_assign_rhs1 (*stmt);
2606 if (TREE_CODE (rhs) == CONSTRUCTOR)
2607 return sra_modify_constructor_assign (stmt, gsi);
2609 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2610 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2611 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2613 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2614 gsi, false);
2615 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2616 gsi, true);
2617 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2620 lacc = get_access_for_expr (lhs);
2621 racc = get_access_for_expr (rhs);
2622 if (!lacc && !racc)
2623 return SRA_AM_NONE;
2625 loc = gimple_location (*stmt);
2626 if (lacc && lacc->grp_to_be_replaced)
2628 lhs = get_access_replacement (lacc);
2629 gimple_assign_set_lhs (*stmt, lhs);
2630 modify_this_stmt = true;
2631 if (lacc->grp_partial_lhs)
2632 force_gimple_rhs = true;
2633 sra_stats.exprs++;
2636 if (racc && racc->grp_to_be_replaced)
2638 rhs = get_access_replacement (racc);
2639 modify_this_stmt = true;
2640 if (racc->grp_partial_lhs)
2641 force_gimple_rhs = true;
2642 sra_stats.exprs++;
2645 if (modify_this_stmt)
2647 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2649 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2650 ??? This should move to fold_stmt which we simply should
2651 call after building a VIEW_CONVERT_EXPR here. */
2652 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2653 && !access_has_children_p (lacc))
2655 lhs = build_ref_for_offset (loc, lhs, 0, TREE_TYPE (rhs),
2656 gsi, false);
2657 gimple_assign_set_lhs (*stmt, lhs);
2659 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2660 && !contains_view_convert_expr_p (rhs)
2661 && !access_has_children_p (racc))
2662 rhs = build_ref_for_offset (loc, rhs, 0, TREE_TYPE (lhs),
2663 gsi, false);
2665 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2667 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
2668 rhs);
2669 if (is_gimple_reg_type (TREE_TYPE (lhs))
2670 && TREE_CODE (lhs) != SSA_NAME)
2671 force_gimple_rhs = true;
2676 /* From this point on, the function deals with assignments in between
2677 aggregates when at least one has scalar reductions of some of its
2678 components. There are three possible scenarios: Both the LHS and RHS have
2679 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2681 In the first case, we would like to load the LHS components from RHS
2682 components whenever possible. If that is not possible, we would like to
2683 read it directly from the RHS (after updating it by storing in it its own
2684 components). If there are some necessary unscalarized data in the LHS,
2685 those will be loaded by the original assignment too. If neither of these
2686 cases happen, the original statement can be removed. Most of this is done
2687 by load_assign_lhs_subreplacements.
2689 In the second case, we would like to store all RHS scalarized components
2690 directly into LHS and if they cover the aggregate completely, remove the
2691 statement too. In the third case, we want the LHS components to be loaded
2692 directly from the RHS (DSE will remove the original statement if it
2693 becomes redundant).
2695 This is a bit complex but manageable when types match and when unions do
2696 not cause confusion in a way that we cannot really load a component of LHS
2697 from the RHS or vice versa (the access representing this level can have
2698 subaccesses that are accessible only through a different union field at a
2699 higher level - different from the one used in the examined expression).
2700 Unions are fun.
2702 Therefore, I specially handle a fourth case, happening when there is a
2703 specific type cast or it is impossible to locate a scalarized subaccess on
2704 the other side of the expression. If that happens, I simply "refresh" the
2705 RHS by storing in it is scalarized components leave the original statement
2706 there to do the copying and then load the scalar replacements of the LHS.
2707 This is what the first branch does. */
2709 if (gimple_has_volatile_ops (*stmt)
2710 || contains_view_convert_expr_p (rhs)
2711 || contains_view_convert_expr_p (lhs))
2713 if (access_has_children_p (racc))
2714 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2715 gsi, false, false, loc);
2716 if (access_has_children_p (lacc))
2717 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2718 gsi, true, true, loc);
2719 sra_stats.separate_lhs_rhs_handling++;
2721 else
2723 if (access_has_children_p (lacc) && access_has_children_p (racc))
2725 gimple_stmt_iterator orig_gsi = *gsi;
2726 enum unscalarized_data_handling refreshed;
2728 if (lacc->grp_read && !lacc->grp_covered)
2729 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
2730 else
2731 refreshed = SRA_UDH_NONE;
2733 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
2734 &orig_gsi, gsi, &refreshed);
2735 if (refreshed != SRA_UDH_RIGHT)
2737 gsi_next (gsi);
2738 unlink_stmt_vdef (*stmt);
2739 gsi_remove (&orig_gsi, true);
2740 sra_stats.deleted++;
2741 return SRA_AM_REMOVED;
2744 else
2746 if (racc)
2748 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2750 if (dump_file)
2752 fprintf (dump_file, "Removing load: ");
2753 print_gimple_stmt (dump_file, *stmt, 0, 0);
2756 if (TREE_CODE (lhs) == SSA_NAME)
2758 rhs = get_repl_default_def_ssa_name (racc);
2759 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2760 TREE_TYPE (rhs)))
2761 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
2762 TREE_TYPE (lhs), rhs);
2764 else
2766 if (racc->first_child)
2767 generate_subtree_copies (racc->first_child, lhs,
2768 racc->offset, 0, 0, gsi,
2769 false, false, loc);
2771 gcc_assert (*stmt == gsi_stmt (*gsi));
2772 unlink_stmt_vdef (*stmt);
2773 gsi_remove (gsi, true);
2774 sra_stats.deleted++;
2775 return SRA_AM_REMOVED;
2778 else if (racc->first_child)
2779 generate_subtree_copies (racc->first_child, lhs, racc->offset,
2780 0, 0, gsi, false, true, loc);
2782 if (access_has_children_p (lacc))
2783 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2784 0, 0, gsi, true, true, loc);
2788 /* This gimplification must be done after generate_subtree_copies, lest we
2789 insert the subtree copies in the middle of the gimplified sequence. */
2790 if (force_gimple_rhs)
2791 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2792 true, GSI_SAME_STMT);
2793 if (gimple_assign_rhs1 (*stmt) != rhs)
2795 modify_this_stmt = true;
2796 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2797 gcc_assert (*stmt == gsi_stmt (orig_gsi));
2800 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2803 /* Traverse the function body and all modifications as decided in
2804 analyze_all_variable_accesses. Return true iff the CFG has been
2805 changed. */
2807 static bool
2808 sra_modify_function_body (void)
2810 bool cfg_changed = false;
2811 basic_block bb;
2813 FOR_EACH_BB (bb)
2815 gimple_stmt_iterator gsi = gsi_start_bb (bb);
2816 while (!gsi_end_p (gsi))
2818 gimple stmt = gsi_stmt (gsi);
2819 enum assignment_mod_result assign_result;
2820 bool modified = false, deleted = false;
2821 tree *t;
2822 unsigned i;
2824 switch (gimple_code (stmt))
2826 case GIMPLE_RETURN:
2827 t = gimple_return_retval_ptr (stmt);
2828 if (*t != NULL_TREE)
2829 modified |= sra_modify_expr (t, &gsi, false);
2830 break;
2832 case GIMPLE_ASSIGN:
2833 assign_result = sra_modify_assign (&stmt, &gsi);
2834 modified |= assign_result == SRA_AM_MODIFIED;
2835 deleted = assign_result == SRA_AM_REMOVED;
2836 break;
2838 case GIMPLE_CALL:
2839 /* Operands must be processed before the lhs. */
2840 for (i = 0; i < gimple_call_num_args (stmt); i++)
2842 t = gimple_call_arg_ptr (stmt, i);
2843 modified |= sra_modify_expr (t, &gsi, false);
2846 if (gimple_call_lhs (stmt))
2848 t = gimple_call_lhs_ptr (stmt);
2849 modified |= sra_modify_expr (t, &gsi, true);
2851 break;
2853 case GIMPLE_ASM:
2854 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
2856 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
2857 modified |= sra_modify_expr (t, &gsi, false);
2859 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
2861 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
2862 modified |= sra_modify_expr (t, &gsi, true);
2864 break;
2866 default:
2867 break;
2870 if (modified)
2872 update_stmt (stmt);
2873 if (maybe_clean_eh_stmt (stmt)
2874 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2875 cfg_changed = true;
2877 if (!deleted)
2878 gsi_next (&gsi);
2882 return cfg_changed;
2885 /* Generate statements initializing scalar replacements of parts of function
2886 parameters. */
2888 static void
2889 initialize_parameter_reductions (void)
2891 gimple_stmt_iterator gsi;
2892 gimple_seq seq = NULL;
2893 tree parm;
2895 for (parm = DECL_ARGUMENTS (current_function_decl);
2896 parm;
2897 parm = DECL_CHAIN (parm))
2899 VEC (access_p, heap) *access_vec;
2900 struct access *access;
2902 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2903 continue;
2904 access_vec = get_base_access_vector (parm);
2905 if (!access_vec)
2906 continue;
2908 if (!seq)
2910 seq = gimple_seq_alloc ();
2911 gsi = gsi_start (seq);
2914 for (access = VEC_index (access_p, access_vec, 0);
2915 access;
2916 access = access->next_grp)
2917 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
2918 EXPR_LOCATION (parm));
2921 if (seq)
2922 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
2925 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
2926 it reveals there are components of some aggregates to be scalarized, it runs
2927 the required transformations. */
2928 static unsigned int
2929 perform_intra_sra (void)
2931 int ret = 0;
2932 sra_initialize ();
2934 if (!find_var_candidates ())
2935 goto out;
2937 if (!scan_function ())
2938 goto out;
2940 if (!analyze_all_variable_accesses ())
2941 goto out;
2943 if (sra_modify_function_body ())
2944 ret = TODO_update_ssa | TODO_cleanup_cfg;
2945 else
2946 ret = TODO_update_ssa;
2947 initialize_parameter_reductions ();
2949 statistics_counter_event (cfun, "Scalar replacements created",
2950 sra_stats.replacements);
2951 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
2952 statistics_counter_event (cfun, "Subtree copy stmts",
2953 sra_stats.subtree_copies);
2954 statistics_counter_event (cfun, "Subreplacement stmts",
2955 sra_stats.subreplacements);
2956 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
2957 statistics_counter_event (cfun, "Separate LHS and RHS handling",
2958 sra_stats.separate_lhs_rhs_handling);
2960 out:
2961 sra_deinitialize ();
2962 return ret;
2965 /* Perform early intraprocedural SRA. */
2966 static unsigned int
2967 early_intra_sra (void)
2969 sra_mode = SRA_MODE_EARLY_INTRA;
2970 return perform_intra_sra ();
2973 /* Perform "late" intraprocedural SRA. */
2974 static unsigned int
2975 late_intra_sra (void)
2977 sra_mode = SRA_MODE_INTRA;
2978 return perform_intra_sra ();
2982 static bool
2983 gate_intra_sra (void)
2985 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
2989 struct gimple_opt_pass pass_sra_early =
2992 GIMPLE_PASS,
2993 "esra", /* name */
2994 gate_intra_sra, /* gate */
2995 early_intra_sra, /* execute */
2996 NULL, /* sub */
2997 NULL, /* next */
2998 0, /* static_pass_number */
2999 TV_TREE_SRA, /* tv_id */
3000 PROP_cfg | PROP_ssa, /* properties_required */
3001 0, /* properties_provided */
3002 0, /* properties_destroyed */
3003 0, /* todo_flags_start */
3004 TODO_dump_func
3005 | TODO_update_ssa
3006 | TODO_ggc_collect
3007 | TODO_verify_ssa /* todo_flags_finish */
3011 struct gimple_opt_pass pass_sra =
3014 GIMPLE_PASS,
3015 "sra", /* name */
3016 gate_intra_sra, /* gate */
3017 late_intra_sra, /* execute */
3018 NULL, /* sub */
3019 NULL, /* next */
3020 0, /* static_pass_number */
3021 TV_TREE_SRA, /* tv_id */
3022 PROP_cfg | PROP_ssa, /* properties_required */
3023 0, /* properties_provided */
3024 0, /* properties_destroyed */
3025 TODO_update_address_taken, /* todo_flags_start */
3026 TODO_dump_func
3027 | TODO_update_ssa
3028 | TODO_ggc_collect
3029 | TODO_verify_ssa /* todo_flags_finish */
3034 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3035 parameter. */
3037 static bool
3038 is_unused_scalar_param (tree parm)
3040 tree name;
3041 return (is_gimple_reg (parm)
3042 && (!(name = gimple_default_def (cfun, parm))
3043 || has_zero_uses (name)));
3046 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3047 examine whether there are any direct or otherwise infeasible ones. If so,
3048 return true, otherwise return false. PARM must be a gimple register with a
3049 non-NULL default definition. */
3051 static bool
3052 ptr_parm_has_direct_uses (tree parm)
3054 imm_use_iterator ui;
3055 gimple stmt;
3056 tree name = gimple_default_def (cfun, parm);
3057 bool ret = false;
3059 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3061 int uses_ok = 0;
3062 use_operand_p use_p;
3064 if (is_gimple_debug (stmt))
3065 continue;
3067 /* Valid uses include dereferences on the lhs and the rhs. */
3068 if (gimple_has_lhs (stmt))
3070 tree lhs = gimple_get_lhs (stmt);
3071 while (handled_component_p (lhs))
3072 lhs = TREE_OPERAND (lhs, 0);
3073 if (TREE_CODE (lhs) == MEM_REF
3074 && TREE_OPERAND (lhs, 0) == name
3075 && integer_zerop (TREE_OPERAND (lhs, 1))
3076 && types_compatible_p (TREE_TYPE (lhs),
3077 TREE_TYPE (TREE_TYPE (name))))
3078 uses_ok++;
3080 if (gimple_assign_single_p (stmt))
3082 tree rhs = gimple_assign_rhs1 (stmt);
3083 while (handled_component_p (rhs))
3084 rhs = TREE_OPERAND (rhs, 0);
3085 if (TREE_CODE (rhs) == MEM_REF
3086 && TREE_OPERAND (rhs, 0) == name
3087 && integer_zerop (TREE_OPERAND (rhs, 1))
3088 && types_compatible_p (TREE_TYPE (rhs),
3089 TREE_TYPE (TREE_TYPE (name))))
3090 uses_ok++;
3092 else if (is_gimple_call (stmt))
3094 unsigned i;
3095 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3097 tree arg = gimple_call_arg (stmt, i);
3098 while (handled_component_p (arg))
3099 arg = TREE_OPERAND (arg, 0);
3100 if (TREE_CODE (arg) == MEM_REF
3101 && TREE_OPERAND (arg, 0) == name
3102 && integer_zerop (TREE_OPERAND (arg, 1))
3103 && types_compatible_p (TREE_TYPE (arg),
3104 TREE_TYPE (TREE_TYPE (name))))
3105 uses_ok++;
3109 /* If the number of valid uses does not match the number of
3110 uses in this stmt there is an unhandled use. */
3111 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3112 --uses_ok;
3114 if (uses_ok != 0)
3115 ret = true;
3117 if (ret)
3118 BREAK_FROM_IMM_USE_STMT (ui);
3121 return ret;
3124 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3125 them in candidate_bitmap. Note that these do not necessarily include
3126 parameter which are unused and thus can be removed. Return true iff any
3127 such candidate has been found. */
3129 static bool
3130 find_param_candidates (void)
3132 tree parm;
3133 int count = 0;
3134 bool ret = false;
3136 for (parm = DECL_ARGUMENTS (current_function_decl);
3137 parm;
3138 parm = DECL_CHAIN (parm))
3140 tree type = TREE_TYPE (parm);
3142 count++;
3144 if (TREE_THIS_VOLATILE (parm)
3145 || TREE_ADDRESSABLE (parm)
3146 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3147 continue;
3149 if (is_unused_scalar_param (parm))
3151 ret = true;
3152 continue;
3155 if (POINTER_TYPE_P (type))
3157 type = TREE_TYPE (type);
3159 if (TREE_CODE (type) == FUNCTION_TYPE
3160 || TYPE_VOLATILE (type)
3161 || (TREE_CODE (type) == ARRAY_TYPE
3162 && TYPE_NONALIASED_COMPONENT (type))
3163 || !is_gimple_reg (parm)
3164 || is_va_list_type (type)
3165 || ptr_parm_has_direct_uses (parm))
3166 continue;
3168 else if (!AGGREGATE_TYPE_P (type))
3169 continue;
3171 if (!COMPLETE_TYPE_P (type)
3172 || !host_integerp (TYPE_SIZE (type), 1)
3173 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3174 || (AGGREGATE_TYPE_P (type)
3175 && type_internals_preclude_sra_p (type)))
3176 continue;
3178 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3179 ret = true;
3180 if (dump_file && (dump_flags & TDF_DETAILS))
3182 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3183 print_generic_expr (dump_file, parm, 0);
3184 fprintf (dump_file, "\n");
3188 func_param_count = count;
3189 return ret;
3192 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3193 maybe_modified. */
3195 static bool
3196 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3197 void *data)
3199 struct access *repr = (struct access *) data;
3201 repr->grp_maybe_modified = 1;
3202 return true;
3205 /* Analyze what representatives (in linked lists accessible from
3206 REPRESENTATIVES) can be modified by side effects of statements in the
3207 current function. */
3209 static void
3210 analyze_modified_params (VEC (access_p, heap) *representatives)
3212 int i;
3214 for (i = 0; i < func_param_count; i++)
3216 struct access *repr;
3218 for (repr = VEC_index (access_p, representatives, i);
3219 repr;
3220 repr = repr->next_grp)
3222 struct access *access;
3223 bitmap visited;
3224 ao_ref ar;
3226 if (no_accesses_p (repr))
3227 continue;
3228 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3229 || repr->grp_maybe_modified)
3230 continue;
3232 ao_ref_init (&ar, repr->expr);
3233 visited = BITMAP_ALLOC (NULL);
3234 for (access = repr; access; access = access->next_sibling)
3236 /* All accesses are read ones, otherwise grp_maybe_modified would
3237 be trivially set. */
3238 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3239 mark_maybe_modified, repr, &visited);
3240 if (repr->grp_maybe_modified)
3241 break;
3243 BITMAP_FREE (visited);
3248 /* Propagate distances in bb_dereferences in the opposite direction than the
3249 control flow edges, in each step storing the maximum of the current value
3250 and the minimum of all successors. These steps are repeated until the table
3251 stabilizes. Note that BBs which might terminate the functions (according to
3252 final_bbs bitmap) never updated in this way. */
3254 static void
3255 propagate_dereference_distances (void)
3257 VEC (basic_block, heap) *queue;
3258 basic_block bb;
3260 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3261 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3262 FOR_EACH_BB (bb)
3264 VEC_quick_push (basic_block, queue, bb);
3265 bb->aux = bb;
3268 while (!VEC_empty (basic_block, queue))
3270 edge_iterator ei;
3271 edge e;
3272 bool change = false;
3273 int i;
3275 bb = VEC_pop (basic_block, queue);
3276 bb->aux = NULL;
3278 if (bitmap_bit_p (final_bbs, bb->index))
3279 continue;
3281 for (i = 0; i < func_param_count; i++)
3283 int idx = bb->index * func_param_count + i;
3284 bool first = true;
3285 HOST_WIDE_INT inh = 0;
3287 FOR_EACH_EDGE (e, ei, bb->succs)
3289 int succ_idx = e->dest->index * func_param_count + i;
3291 if (e->src == EXIT_BLOCK_PTR)
3292 continue;
3294 if (first)
3296 first = false;
3297 inh = bb_dereferences [succ_idx];
3299 else if (bb_dereferences [succ_idx] < inh)
3300 inh = bb_dereferences [succ_idx];
3303 if (!first && bb_dereferences[idx] < inh)
3305 bb_dereferences[idx] = inh;
3306 change = true;
3310 if (change && !bitmap_bit_p (final_bbs, bb->index))
3311 FOR_EACH_EDGE (e, ei, bb->preds)
3313 if (e->src->aux)
3314 continue;
3316 e->src->aux = e->src;
3317 VEC_quick_push (basic_block, queue, e->src);
3321 VEC_free (basic_block, heap, queue);
3324 /* Dump a dereferences TABLE with heading STR to file F. */
3326 static void
3327 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3329 basic_block bb;
3331 fprintf (dump_file, str);
3332 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3334 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3335 if (bb != EXIT_BLOCK_PTR)
3337 int i;
3338 for (i = 0; i < func_param_count; i++)
3340 int idx = bb->index * func_param_count + i;
3341 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3344 fprintf (f, "\n");
3346 fprintf (dump_file, "\n");
3349 /* Determine what (parts of) parameters passed by reference that are not
3350 assigned to are not certainly dereferenced in this function and thus the
3351 dereferencing cannot be safely moved to the caller without potentially
3352 introducing a segfault. Mark such REPRESENTATIVES as
3353 grp_not_necessarilly_dereferenced.
3355 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3356 part is calculated rather than simple booleans are calculated for each
3357 pointer parameter to handle cases when only a fraction of the whole
3358 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3359 an example).
3361 The maximum dereference distances for each pointer parameter and BB are
3362 already stored in bb_dereference. This routine simply propagates these
3363 values upwards by propagate_dereference_distances and then compares the
3364 distances of individual parameters in the ENTRY BB to the equivalent
3365 distances of each representative of a (fraction of a) parameter. */
3367 static void
3368 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3370 int i;
3372 if (dump_file && (dump_flags & TDF_DETAILS))
3373 dump_dereferences_table (dump_file,
3374 "Dereference table before propagation:\n",
3375 bb_dereferences);
3377 propagate_dereference_distances ();
3379 if (dump_file && (dump_flags & TDF_DETAILS))
3380 dump_dereferences_table (dump_file,
3381 "Dereference table after propagation:\n",
3382 bb_dereferences);
3384 for (i = 0; i < func_param_count; i++)
3386 struct access *repr = VEC_index (access_p, representatives, i);
3387 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3389 if (!repr || no_accesses_p (repr))
3390 continue;
3394 if ((repr->offset + repr->size) > bb_dereferences[idx])
3395 repr->grp_not_necessarilly_dereferenced = 1;
3396 repr = repr->next_grp;
3398 while (repr);
3402 /* Return the representative access for the parameter declaration PARM if it is
3403 a scalar passed by reference which is not written to and the pointer value
3404 is not used directly. Thus, if it is legal to dereference it in the caller
3405 and we can rule out modifications through aliases, such parameter should be
3406 turned into one passed by value. Return NULL otherwise. */
3408 static struct access *
3409 unmodified_by_ref_scalar_representative (tree parm)
3411 int i, access_count;
3412 struct access *repr;
3413 VEC (access_p, heap) *access_vec;
3415 access_vec = get_base_access_vector (parm);
3416 gcc_assert (access_vec);
3417 repr = VEC_index (access_p, access_vec, 0);
3418 if (repr->write)
3419 return NULL;
3420 repr->group_representative = repr;
3422 access_count = VEC_length (access_p, access_vec);
3423 for (i = 1; i < access_count; i++)
3425 struct access *access = VEC_index (access_p, access_vec, i);
3426 if (access->write)
3427 return NULL;
3428 access->group_representative = repr;
3429 access->next_sibling = repr->next_sibling;
3430 repr->next_sibling = access;
3433 repr->grp_read = 1;
3434 repr->grp_scalar_ptr = 1;
3435 return repr;
3438 /* Return true iff this access precludes IPA-SRA of the parameter it is
3439 associated with. */
3441 static bool
3442 access_precludes_ipa_sra_p (struct access *access)
3444 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3445 is incompatible assign in a call statement (and possibly even in asm
3446 statements). This can be relaxed by using a new temporary but only for
3447 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3448 intraprocedural SRA we deal with this by keeping the old aggregate around,
3449 something we cannot do in IPA-SRA.) */
3450 if (access->write
3451 && (is_gimple_call (access->stmt)
3452 || gimple_code (access->stmt) == GIMPLE_ASM))
3453 return true;
3455 return false;
3459 /* Sort collected accesses for parameter PARM, identify representatives for
3460 each accessed region and link them together. Return NULL if there are
3461 different but overlapping accesses, return the special ptr value meaning
3462 there are no accesses for this parameter if that is the case and return the
3463 first representative otherwise. Set *RO_GRP if there is a group of accesses
3464 with only read (i.e. no write) accesses. */
3466 static struct access *
3467 splice_param_accesses (tree parm, bool *ro_grp)
3469 int i, j, access_count, group_count;
3470 int agg_size, total_size = 0;
3471 struct access *access, *res, **prev_acc_ptr = &res;
3472 VEC (access_p, heap) *access_vec;
3474 access_vec = get_base_access_vector (parm);
3475 if (!access_vec)
3476 return &no_accesses_representant;
3477 access_count = VEC_length (access_p, access_vec);
3479 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
3480 compare_access_positions);
3482 i = 0;
3483 total_size = 0;
3484 group_count = 0;
3485 while (i < access_count)
3487 bool modification;
3488 access = VEC_index (access_p, access_vec, i);
3489 modification = access->write;
3490 if (access_precludes_ipa_sra_p (access))
3491 return NULL;
3493 /* Access is about to become group representative unless we find some
3494 nasty overlap which would preclude us from breaking this parameter
3495 apart. */
3497 j = i + 1;
3498 while (j < access_count)
3500 struct access *ac2 = VEC_index (access_p, access_vec, j);
3501 if (ac2->offset != access->offset)
3503 /* All or nothing law for parameters. */
3504 if (access->offset + access->size > ac2->offset)
3505 return NULL;
3506 else
3507 break;
3509 else if (ac2->size != access->size)
3510 return NULL;
3512 if (access_precludes_ipa_sra_p (ac2))
3513 return NULL;
3515 modification |= ac2->write;
3516 ac2->group_representative = access;
3517 ac2->next_sibling = access->next_sibling;
3518 access->next_sibling = ac2;
3519 j++;
3522 group_count++;
3523 access->grp_maybe_modified = modification;
3524 if (!modification)
3525 *ro_grp = true;
3526 *prev_acc_ptr = access;
3527 prev_acc_ptr = &access->next_grp;
3528 total_size += access->size;
3529 i = j;
3532 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3533 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3534 else
3535 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3536 if (total_size >= agg_size)
3537 return NULL;
3539 gcc_assert (group_count > 0);
3540 return res;
3543 /* Decide whether parameters with representative accesses given by REPR should
3544 be reduced into components. */
3546 static int
3547 decide_one_param_reduction (struct access *repr)
3549 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3550 bool by_ref;
3551 tree parm;
3553 parm = repr->base;
3554 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3555 gcc_assert (cur_parm_size > 0);
3557 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3559 by_ref = true;
3560 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3562 else
3564 by_ref = false;
3565 agg_size = cur_parm_size;
3568 if (dump_file)
3570 struct access *acc;
3571 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3572 print_generic_expr (dump_file, parm, 0);
3573 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3574 for (acc = repr; acc; acc = acc->next_grp)
3575 dump_access (dump_file, acc, true);
3578 total_size = 0;
3579 new_param_count = 0;
3581 for (; repr; repr = repr->next_grp)
3583 gcc_assert (parm == repr->base);
3584 new_param_count++;
3586 if (!by_ref || (!repr->grp_maybe_modified
3587 && !repr->grp_not_necessarilly_dereferenced))
3588 total_size += repr->size;
3589 else
3590 total_size += cur_parm_size;
3593 gcc_assert (new_param_count > 0);
3595 if (optimize_function_for_size_p (cfun))
3596 parm_size_limit = cur_parm_size;
3597 else
3598 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3599 * cur_parm_size);
3601 if (total_size < agg_size
3602 && total_size <= parm_size_limit)
3604 if (dump_file)
3605 fprintf (dump_file, " ....will be split into %i components\n",
3606 new_param_count);
3607 return new_param_count;
3609 else
3610 return 0;
3613 /* The order of the following enums is important, we need to do extra work for
3614 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3615 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3616 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3618 /* Identify representatives of all accesses to all candidate parameters for
3619 IPA-SRA. Return result based on what representatives have been found. */
3621 static enum ipa_splicing_result
3622 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3624 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3625 tree parm;
3626 struct access *repr;
3628 *representatives = VEC_alloc (access_p, heap, func_param_count);
3630 for (parm = DECL_ARGUMENTS (current_function_decl);
3631 parm;
3632 parm = DECL_CHAIN (parm))
3634 if (is_unused_scalar_param (parm))
3636 VEC_quick_push (access_p, *representatives,
3637 &no_accesses_representant);
3638 if (result == NO_GOOD_ACCESS)
3639 result = UNUSED_PARAMS;
3641 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3642 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3643 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3645 repr = unmodified_by_ref_scalar_representative (parm);
3646 VEC_quick_push (access_p, *representatives, repr);
3647 if (repr)
3648 result = UNMODIF_BY_REF_ACCESSES;
3650 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3652 bool ro_grp = false;
3653 repr = splice_param_accesses (parm, &ro_grp);
3654 VEC_quick_push (access_p, *representatives, repr);
3656 if (repr && !no_accesses_p (repr))
3658 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3660 if (ro_grp)
3661 result = UNMODIF_BY_REF_ACCESSES;
3662 else if (result < MODIF_BY_REF_ACCESSES)
3663 result = MODIF_BY_REF_ACCESSES;
3665 else if (result < BY_VAL_ACCESSES)
3666 result = BY_VAL_ACCESSES;
3668 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3669 result = UNUSED_PARAMS;
3671 else
3672 VEC_quick_push (access_p, *representatives, NULL);
3675 if (result == NO_GOOD_ACCESS)
3677 VEC_free (access_p, heap, *representatives);
3678 *representatives = NULL;
3679 return NO_GOOD_ACCESS;
3682 return result;
3685 /* Return the index of BASE in PARMS. Abort if it is not found. */
3687 static inline int
3688 get_param_index (tree base, VEC(tree, heap) *parms)
3690 int i, len;
3692 len = VEC_length (tree, parms);
3693 for (i = 0; i < len; i++)
3694 if (VEC_index (tree, parms, i) == base)
3695 return i;
3696 gcc_unreachable ();
3699 /* Convert the decisions made at the representative level into compact
3700 parameter adjustments. REPRESENTATIVES are pointers to first
3701 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3702 final number of adjustments. */
3704 static ipa_parm_adjustment_vec
3705 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3706 int adjustments_count)
3708 VEC (tree, heap) *parms;
3709 ipa_parm_adjustment_vec adjustments;
3710 tree parm;
3711 int i;
3713 gcc_assert (adjustments_count > 0);
3714 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3715 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3716 parm = DECL_ARGUMENTS (current_function_decl);
3717 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
3719 struct access *repr = VEC_index (access_p, representatives, i);
3721 if (!repr || no_accesses_p (repr))
3723 struct ipa_parm_adjustment *adj;
3725 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3726 memset (adj, 0, sizeof (*adj));
3727 adj->base_index = get_param_index (parm, parms);
3728 adj->base = parm;
3729 if (!repr)
3730 adj->copy_param = 1;
3731 else
3732 adj->remove_param = 1;
3734 else
3736 struct ipa_parm_adjustment *adj;
3737 int index = get_param_index (parm, parms);
3739 for (; repr; repr = repr->next_grp)
3741 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3742 memset (adj, 0, sizeof (*adj));
3743 gcc_assert (repr->base == parm);
3744 adj->base_index = index;
3745 adj->base = repr->base;
3746 adj->type = repr->type;
3747 adj->offset = repr->offset;
3748 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3749 && (repr->grp_maybe_modified
3750 || repr->grp_not_necessarilly_dereferenced));
3755 VEC_free (tree, heap, parms);
3756 return adjustments;
3759 /* Analyze the collected accesses and produce a plan what to do with the
3760 parameters in the form of adjustments, NULL meaning nothing. */
3762 static ipa_parm_adjustment_vec
3763 analyze_all_param_acesses (void)
3765 enum ipa_splicing_result repr_state;
3766 bool proceed = false;
3767 int i, adjustments_count = 0;
3768 VEC (access_p, heap) *representatives;
3769 ipa_parm_adjustment_vec adjustments;
3771 repr_state = splice_all_param_accesses (&representatives);
3772 if (repr_state == NO_GOOD_ACCESS)
3773 return NULL;
3775 /* If there are any parameters passed by reference which are not modified
3776 directly, we need to check whether they can be modified indirectly. */
3777 if (repr_state == UNMODIF_BY_REF_ACCESSES)
3779 analyze_caller_dereference_legality (representatives);
3780 analyze_modified_params (representatives);
3783 for (i = 0; i < func_param_count; i++)
3785 struct access *repr = VEC_index (access_p, representatives, i);
3787 if (repr && !no_accesses_p (repr))
3789 if (repr->grp_scalar_ptr)
3791 adjustments_count++;
3792 if (repr->grp_not_necessarilly_dereferenced
3793 || repr->grp_maybe_modified)
3794 VEC_replace (access_p, representatives, i, NULL);
3795 else
3797 proceed = true;
3798 sra_stats.scalar_by_ref_to_by_val++;
3801 else
3803 int new_components = decide_one_param_reduction (repr);
3805 if (new_components == 0)
3807 VEC_replace (access_p, representatives, i, NULL);
3808 adjustments_count++;
3810 else
3812 adjustments_count += new_components;
3813 sra_stats.aggregate_params_reduced++;
3814 sra_stats.param_reductions_created += new_components;
3815 proceed = true;
3819 else
3821 if (no_accesses_p (repr))
3823 proceed = true;
3824 sra_stats.deleted_unused_parameters++;
3826 adjustments_count++;
3830 if (!proceed && dump_file)
3831 fprintf (dump_file, "NOT proceeding to change params.\n");
3833 if (proceed)
3834 adjustments = turn_representatives_into_adjustments (representatives,
3835 adjustments_count);
3836 else
3837 adjustments = NULL;
3839 VEC_free (access_p, heap, representatives);
3840 return adjustments;
3843 /* If a parameter replacement identified by ADJ does not yet exist in the form
3844 of declaration, create it and record it, otherwise return the previously
3845 created one. */
3847 static tree
3848 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3850 tree repl;
3851 if (!adj->new_ssa_base)
3853 char *pretty_name = make_fancy_name (adj->base);
3855 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
3856 DECL_NAME (repl) = get_identifier (pretty_name);
3857 obstack_free (&name_obstack, pretty_name);
3859 get_var_ann (repl);
3860 add_referenced_var (repl);
3861 adj->new_ssa_base = repl;
3863 else
3864 repl = adj->new_ssa_base;
3865 return repl;
3868 /* Find the first adjustment for a particular parameter BASE in a vector of
3869 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3870 adjustment. */
3872 static struct ipa_parm_adjustment *
3873 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3875 int i, len;
3877 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3878 for (i = 0; i < len; i++)
3880 struct ipa_parm_adjustment *adj;
3882 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3883 if (!adj->copy_param && adj->base == base)
3884 return adj;
3887 return NULL;
3890 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
3891 removed because its value is not used, replace the SSA_NAME with a one
3892 relating to a created VAR_DECL together all of its uses and return true.
3893 ADJUSTMENTS is a pointer to an adjustments vector. */
3895 static bool
3896 replace_removed_params_ssa_names (gimple stmt,
3897 ipa_parm_adjustment_vec adjustments)
3899 struct ipa_parm_adjustment *adj;
3900 tree lhs, decl, repl, name;
3902 if (gimple_code (stmt) == GIMPLE_PHI)
3903 lhs = gimple_phi_result (stmt);
3904 else if (is_gimple_assign (stmt))
3905 lhs = gimple_assign_lhs (stmt);
3906 else if (is_gimple_call (stmt))
3907 lhs = gimple_call_lhs (stmt);
3908 else
3909 gcc_unreachable ();
3911 if (TREE_CODE (lhs) != SSA_NAME)
3912 return false;
3913 decl = SSA_NAME_VAR (lhs);
3914 if (TREE_CODE (decl) != PARM_DECL)
3915 return false;
3917 adj = get_adjustment_for_base (adjustments, decl);
3918 if (!adj)
3919 return false;
3921 repl = get_replaced_param_substitute (adj);
3922 name = make_ssa_name (repl, stmt);
3924 if (dump_file)
3926 fprintf (dump_file, "replacing an SSA name of a removed param ");
3927 print_generic_expr (dump_file, lhs, 0);
3928 fprintf (dump_file, " with ");
3929 print_generic_expr (dump_file, name, 0);
3930 fprintf (dump_file, "\n");
3933 if (is_gimple_assign (stmt))
3934 gimple_assign_set_lhs (stmt, name);
3935 else if (is_gimple_call (stmt))
3936 gimple_call_set_lhs (stmt, name);
3937 else
3938 gimple_phi_set_result (stmt, name);
3940 replace_uses_by (lhs, name);
3941 release_ssa_name (lhs);
3942 return true;
3945 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3946 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3947 specifies whether the function should care about type incompatibility the
3948 current and new expressions. If it is false, the function will leave
3949 incompatibility issues to the caller. Return true iff the expression
3950 was modified. */
3952 static bool
3953 sra_ipa_modify_expr (tree *expr, bool convert,
3954 ipa_parm_adjustment_vec adjustments)
3956 int i, len;
3957 struct ipa_parm_adjustment *adj, *cand = NULL;
3958 HOST_WIDE_INT offset, size, max_size;
3959 tree base, src;
3961 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3963 if (TREE_CODE (*expr) == BIT_FIELD_REF
3964 || TREE_CODE (*expr) == IMAGPART_EXPR
3965 || TREE_CODE (*expr) == REALPART_EXPR)
3967 expr = &TREE_OPERAND (*expr, 0);
3968 convert = true;
3971 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
3972 if (!base || size == -1 || max_size == -1)
3973 return false;
3975 if (TREE_CODE (base) == MEM_REF)
3977 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
3978 base = TREE_OPERAND (base, 0);
3981 base = get_ssa_base_param (base);
3982 if (!base || TREE_CODE (base) != PARM_DECL)
3983 return false;
3985 for (i = 0; i < len; i++)
3987 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3989 if (adj->base == base &&
3990 (adj->offset == offset || adj->remove_param))
3992 cand = adj;
3993 break;
3996 if (!cand || cand->copy_param || cand->remove_param)
3997 return false;
3999 if (cand->by_ref)
4000 src = build_simple_mem_ref (cand->reduction);
4001 else
4002 src = cand->reduction;
4004 if (dump_file && (dump_flags & TDF_DETAILS))
4006 fprintf (dump_file, "About to replace expr ");
4007 print_generic_expr (dump_file, *expr, 0);
4008 fprintf (dump_file, " with ");
4009 print_generic_expr (dump_file, src, 0);
4010 fprintf (dump_file, "\n");
4013 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4015 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4016 *expr = vce;
4018 else
4019 *expr = src;
4020 return true;
4023 /* If the statement pointed to by STMT_PTR contains any expressions that need
4024 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4025 potential type incompatibilities (GSI is used to accommodate conversion
4026 statements and must point to the statement). Return true iff the statement
4027 was modified. */
4029 static bool
4030 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4031 ipa_parm_adjustment_vec adjustments)
4033 gimple stmt = *stmt_ptr;
4034 tree *lhs_p, *rhs_p;
4035 bool any;
4037 if (!gimple_assign_single_p (stmt))
4038 return false;
4040 rhs_p = gimple_assign_rhs1_ptr (stmt);
4041 lhs_p = gimple_assign_lhs_ptr (stmt);
4043 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4044 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4045 if (any)
4047 tree new_rhs = NULL_TREE;
4049 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4051 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4053 /* V_C_Es of constructors can cause trouble (PR 42714). */
4054 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4055 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
4056 else
4057 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4059 else
4060 new_rhs = fold_build1_loc (gimple_location (stmt),
4061 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4062 *rhs_p);
4064 else if (REFERENCE_CLASS_P (*rhs_p)
4065 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4066 && !is_gimple_reg (*lhs_p))
4067 /* This can happen when an assignment in between two single field
4068 structures is turned into an assignment in between two pointers to
4069 scalars (PR 42237). */
4070 new_rhs = *rhs_p;
4072 if (new_rhs)
4074 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4075 true, GSI_SAME_STMT);
4077 gimple_assign_set_rhs_from_tree (gsi, tmp);
4080 return true;
4083 return false;
4086 /* Traverse the function body and all modifications as described in
4087 ADJUSTMENTS. Return true iff the CFG has been changed. */
4089 static bool
4090 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4092 bool cfg_changed = false;
4093 basic_block bb;
4095 FOR_EACH_BB (bb)
4097 gimple_stmt_iterator gsi;
4099 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4100 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4102 gsi = gsi_start_bb (bb);
4103 while (!gsi_end_p (gsi))
4105 gimple stmt = gsi_stmt (gsi);
4106 bool modified = false;
4107 tree *t;
4108 unsigned i;
4110 switch (gimple_code (stmt))
4112 case GIMPLE_RETURN:
4113 t = gimple_return_retval_ptr (stmt);
4114 if (*t != NULL_TREE)
4115 modified |= sra_ipa_modify_expr (t, true, adjustments);
4116 break;
4118 case GIMPLE_ASSIGN:
4119 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4120 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4121 break;
4123 case GIMPLE_CALL:
4124 /* Operands must be processed before the lhs. */
4125 for (i = 0; i < gimple_call_num_args (stmt); i++)
4127 t = gimple_call_arg_ptr (stmt, i);
4128 modified |= sra_ipa_modify_expr (t, true, adjustments);
4131 if (gimple_call_lhs (stmt))
4133 t = gimple_call_lhs_ptr (stmt);
4134 modified |= sra_ipa_modify_expr (t, false, adjustments);
4135 modified |= replace_removed_params_ssa_names (stmt,
4136 adjustments);
4138 break;
4140 case GIMPLE_ASM:
4141 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4143 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4144 modified |= sra_ipa_modify_expr (t, true, adjustments);
4146 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4148 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4149 modified |= sra_ipa_modify_expr (t, false, adjustments);
4151 break;
4153 default:
4154 break;
4157 if (modified)
4159 update_stmt (stmt);
4160 if (maybe_clean_eh_stmt (stmt)
4161 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4162 cfg_changed = true;
4164 gsi_next (&gsi);
4168 return cfg_changed;
4171 /* Call gimple_debug_bind_reset_value on all debug statements describing
4172 gimple register parameters that are being removed or replaced. */
4174 static void
4175 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4177 int i, len;
4179 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4180 for (i = 0; i < len; i++)
4182 struct ipa_parm_adjustment *adj;
4183 imm_use_iterator ui;
4184 gimple stmt;
4185 tree name;
4187 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4188 if (adj->copy_param || !is_gimple_reg (adj->base))
4189 continue;
4190 name = gimple_default_def (cfun, adj->base);
4191 if (!name)
4192 continue;
4193 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4195 /* All other users must have been removed by
4196 ipa_sra_modify_function_body. */
4197 gcc_assert (is_gimple_debug (stmt));
4198 gimple_debug_bind_reset_value (stmt);
4199 update_stmt (stmt);
4204 /* Return true iff all callers have at least as many actual arguments as there
4205 are formal parameters in the current function. */
4207 static bool
4208 all_callers_have_enough_arguments_p (struct cgraph_node *node)
4210 struct cgraph_edge *cs;
4211 for (cs = node->callers; cs; cs = cs->next_caller)
4212 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4213 return false;
4215 return true;
4219 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4221 static void
4222 convert_callers (struct cgraph_node *node, tree old_decl,
4223 ipa_parm_adjustment_vec adjustments)
4225 tree old_cur_fndecl = current_function_decl;
4226 struct cgraph_edge *cs;
4227 basic_block this_block;
4228 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4230 for (cs = node->callers; cs; cs = cs->next_caller)
4232 current_function_decl = cs->caller->decl;
4233 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4235 if (dump_file)
4236 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4237 cs->caller->uid, cs->callee->uid,
4238 cgraph_node_name (cs->caller),
4239 cgraph_node_name (cs->callee));
4241 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4243 pop_cfun ();
4246 for (cs = node->callers; cs; cs = cs->next_caller)
4247 if (bitmap_set_bit (recomputed_callers, cs->caller->uid))
4248 compute_inline_parameters (cs->caller);
4249 BITMAP_FREE (recomputed_callers);
4251 current_function_decl = old_cur_fndecl;
4253 if (!encountered_recursive_call)
4254 return;
4256 FOR_EACH_BB (this_block)
4258 gimple_stmt_iterator gsi;
4260 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4262 gimple stmt = gsi_stmt (gsi);
4263 tree call_fndecl;
4264 if (gimple_code (stmt) != GIMPLE_CALL)
4265 continue;
4266 call_fndecl = gimple_call_fndecl (stmt);
4267 if (call_fndecl == old_decl)
4269 if (dump_file)
4270 fprintf (dump_file, "Adjusting recursive call");
4271 gimple_call_set_fndecl (stmt, node->decl);
4272 ipa_modify_call_arguments (NULL, stmt, adjustments);
4277 return;
4280 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4281 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4283 static bool
4284 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4286 struct cgraph_node *new_node;
4287 struct cgraph_edge *cs;
4288 bool cfg_changed;
4289 VEC (cgraph_edge_p, heap) * redirect_callers;
4290 int node_callers;
4292 node_callers = 0;
4293 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4294 node_callers++;
4295 redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers);
4296 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4297 VEC_quick_push (cgraph_edge_p, redirect_callers, cs);
4299 rebuild_cgraph_edges ();
4300 pop_cfun ();
4301 current_function_decl = NULL_TREE;
4303 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4304 NULL, NULL, "isra");
4305 current_function_decl = new_node->decl;
4306 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4308 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4309 cfg_changed = ipa_sra_modify_function_body (adjustments);
4310 sra_ipa_reset_debug_stmts (adjustments);
4311 convert_callers (new_node, node->decl, adjustments);
4312 cgraph_make_node_local (new_node);
4313 return cfg_changed;
4316 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4317 attributes, return true otherwise. NODE is the cgraph node of the current
4318 function. */
4320 static bool
4321 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4323 if (!cgraph_node_can_be_local_p (node))
4325 if (dump_file)
4326 fprintf (dump_file, "Function not local to this compilation unit.\n");
4327 return false;
4330 if (!tree_versionable_function_p (node->decl))
4332 if (dump_file)
4333 fprintf (dump_file, "Function is not versionable.\n");
4334 return false;
4337 if (DECL_VIRTUAL_P (current_function_decl))
4339 if (dump_file)
4340 fprintf (dump_file, "Function is a virtual method.\n");
4341 return false;
4344 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4345 && node->global.size >= MAX_INLINE_INSNS_AUTO)
4347 if (dump_file)
4348 fprintf (dump_file, "Function too big to be made truly local.\n");
4349 return false;
4352 if (!node->callers)
4354 if (dump_file)
4355 fprintf (dump_file,
4356 "Function has no callers in this compilation unit.\n");
4357 return false;
4360 if (cfun->stdarg)
4362 if (dump_file)
4363 fprintf (dump_file, "Function uses stdarg. \n");
4364 return false;
4367 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4368 return false;
4370 return true;
4373 /* Perform early interprocedural SRA. */
4375 static unsigned int
4376 ipa_early_sra (void)
4378 struct cgraph_node *node = cgraph_node (current_function_decl);
4379 ipa_parm_adjustment_vec adjustments;
4380 int ret = 0;
4382 if (!ipa_sra_preliminary_function_checks (node))
4383 return 0;
4385 sra_initialize ();
4386 sra_mode = SRA_MODE_EARLY_IPA;
4388 if (!find_param_candidates ())
4390 if (dump_file)
4391 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4392 goto simple_out;
4395 if (!all_callers_have_enough_arguments_p (node))
4397 if (dump_file)
4398 fprintf (dump_file, "There are callers with insufficient number of "
4399 "arguments.\n");
4400 goto simple_out;
4403 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4404 func_param_count
4405 * last_basic_block_for_function (cfun));
4406 final_bbs = BITMAP_ALLOC (NULL);
4408 scan_function ();
4409 if (encountered_apply_args)
4411 if (dump_file)
4412 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4413 goto out;
4416 if (encountered_unchangable_recursive_call)
4418 if (dump_file)
4419 fprintf (dump_file, "Function calls itself with insufficient "
4420 "number of arguments.\n");
4421 goto out;
4424 adjustments = analyze_all_param_acesses ();
4425 if (!adjustments)
4426 goto out;
4427 if (dump_file)
4428 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4430 if (modify_function (node, adjustments))
4431 ret = TODO_update_ssa | TODO_cleanup_cfg;
4432 else
4433 ret = TODO_update_ssa;
4434 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4436 statistics_counter_event (cfun, "Unused parameters deleted",
4437 sra_stats.deleted_unused_parameters);
4438 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4439 sra_stats.scalar_by_ref_to_by_val);
4440 statistics_counter_event (cfun, "Aggregate parameters broken up",
4441 sra_stats.aggregate_params_reduced);
4442 statistics_counter_event (cfun, "Aggregate parameter components created",
4443 sra_stats.param_reductions_created);
4445 out:
4446 BITMAP_FREE (final_bbs);
4447 free (bb_dereferences);
4448 simple_out:
4449 sra_deinitialize ();
4450 return ret;
4453 /* Return if early ipa sra shall be performed. */
4454 static bool
4455 ipa_early_sra_gate (void)
4457 return flag_ipa_sra && dbg_cnt (eipa_sra);
4460 struct gimple_opt_pass pass_early_ipa_sra =
4463 GIMPLE_PASS,
4464 "eipa_sra", /* name */
4465 ipa_early_sra_gate, /* gate */
4466 ipa_early_sra, /* execute */
4467 NULL, /* sub */
4468 NULL, /* next */
4469 0, /* static_pass_number */
4470 TV_IPA_SRA, /* tv_id */
4471 0, /* properties_required */
4472 0, /* properties_provided */
4473 0, /* properties_destroyed */
4474 0, /* todo_flags_start */
4475 TODO_dump_func | TODO_dump_cgraph /* todo_flags_finish */