* config/sh/sh.md (*movqi_pop): New insn pattern.
[official-gcc.git] / gcc / tree-sra.c
blob56531687f34c138812d0a71c5b26ee97f8ae22f8
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "expr.h"
81 #include "gimple.h"
82 #include "cgraph.h"
83 #include "tree-flow.h"
84 #include "ipa-prop.h"
85 #include "diagnostic.h"
86 #include "statistics.h"
87 #include "tree-dump.h"
88 #include "timevar.h"
89 #include "params.h"
90 #include "target.h"
91 #include "flags.h"
93 /* Enumeration of all aggregate reductions we can do. */
94 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
95 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
96 SRA_MODE_INTRA }; /* late intraprocedural SRA */
98 /* Global variable describing which aggregate reduction we are performing at
99 the moment. */
100 static enum sra_mode sra_mode;
102 struct assign_link;
104 /* ACCESS represents each access to an aggregate variable (as a whole or a
105 part). It can also represent a group of accesses that refer to exactly the
106 same fragment of an aggregate (i.e. those that have exactly the same offset
107 and size). Such representatives for a single aggregate, once determined,
108 are linked in a linked list and have the group fields set.
110 Moreover, when doing intraprocedural SRA, a tree is built from those
111 representatives (by the means of first_child and next_sibling pointers), in
112 which all items in a subtree are "within" the root, i.e. their offset is
113 greater or equal to offset of the root and offset+size is smaller or equal
114 to offset+size of the root. Children of an access are sorted by offset.
116 Note that accesses to parts of vector and complex number types always
117 represented by an access to the whole complex number or a vector. It is a
118 duty of the modifying functions to replace them appropriately. */
120 struct access
122 /* Values returned by `get_ref_base_and_extent' for each component reference
123 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
124 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
125 HOST_WIDE_INT offset;
126 HOST_WIDE_INT size;
127 tree base;
129 /* Expression. It is context dependent so do not use it to create new
130 expressions to access the original aggregate. See PR 42154 for a
131 testcase. */
132 tree expr;
133 /* Type. */
134 tree type;
136 /* The statement this access belongs to. */
137 gimple stmt;
139 /* Next group representative for this aggregate. */
140 struct access *next_grp;
142 /* Pointer to the group representative. Pointer to itself if the struct is
143 the representative. */
144 struct access *group_representative;
146 /* If this access has any children (in terms of the definition above), this
147 points to the first one. */
148 struct access *first_child;
150 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
151 described above. In IPA-SRA this is a pointer to the next access
152 belonging to the same group (having the same representative). */
153 struct access *next_sibling;
155 /* Pointers to the first and last element in the linked list of assign
156 links. */
157 struct assign_link *first_link, *last_link;
159 /* Pointer to the next access in the work queue. */
160 struct access *next_queued;
162 /* Replacement variable for this access "region." Never to be accessed
163 directly, always only by the means of get_access_replacement() and only
164 when grp_to_be_replaced flag is set. */
165 tree replacement_decl;
167 /* Is this particular access write access? */
168 unsigned write : 1;
170 /* Is this access an artificial one created to scalarize some record
171 entirely? */
172 unsigned total_scalarization : 1;
174 /* Is this access currently in the work queue? */
175 unsigned grp_queued : 1;
177 /* Does this group contain a write access? This flag is propagated down the
178 access tree. */
179 unsigned grp_write : 1;
181 /* Does this group contain a read access? This flag is propagated down the
182 access tree. */
183 unsigned grp_read : 1;
185 /* Other passes of the analysis use this bit to make function
186 analyze_access_subtree create scalar replacements for this group if
187 possible. */
188 unsigned grp_hint : 1;
190 /* Is the subtree rooted in this access fully covered by scalar
191 replacements? */
192 unsigned grp_covered : 1;
194 /* If set to true, this access and all below it in an access tree must not be
195 scalarized. */
196 unsigned grp_unscalarizable_region : 1;
198 /* Whether data have been written to parts of the aggregate covered by this
199 access which is not to be scalarized. This flag is propagated up in the
200 access tree. */
201 unsigned grp_unscalarized_data : 1;
203 /* Does this access and/or group contain a write access through a
204 BIT_FIELD_REF? */
205 unsigned grp_partial_lhs : 1;
207 /* Set when a scalar replacement should be created for this variable. We do
208 the decision and creation at different places because create_tmp_var
209 cannot be called from within FOR_EACH_REFERENCED_VAR. */
210 unsigned grp_to_be_replaced : 1;
212 /* Is it possible that the group refers to data which might be (directly or
213 otherwise) modified? */
214 unsigned grp_maybe_modified : 1;
216 /* Set when this is a representative of a pointer to scalar (i.e. by
217 reference) parameter which we consider for turning into a plain scalar
218 (i.e. a by value parameter). */
219 unsigned grp_scalar_ptr : 1;
221 /* Set when we discover that this pointer is not safe to dereference in the
222 caller. */
223 unsigned grp_not_necessarilly_dereferenced : 1;
226 typedef struct access *access_p;
228 DEF_VEC_P (access_p);
229 DEF_VEC_ALLOC_P (access_p, heap);
231 /* Alloc pool for allocating access structures. */
232 static alloc_pool access_pool;
234 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
235 are used to propagate subaccesses from rhs to lhs as long as they don't
236 conflict with what is already there. */
237 struct assign_link
239 struct access *lacc, *racc;
240 struct assign_link *next;
243 /* Alloc pool for allocating assign link structures. */
244 static alloc_pool link_pool;
246 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
247 static struct pointer_map_t *base_access_vec;
249 /* Bitmap of candidates. */
250 static bitmap candidate_bitmap;
252 /* Bitmap of candidates which we should try to entirely scalarize away and
253 those which cannot be (because they are and need be used as a whole). */
254 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
256 /* Obstack for creation of fancy names. */
257 static struct obstack name_obstack;
259 /* Head of a linked list of accesses that need to have its subaccesses
260 propagated to their assignment counterparts. */
261 static struct access *work_queue_head;
263 /* Number of parameters of the analyzed function when doing early ipa SRA. */
264 static int func_param_count;
266 /* scan_function sets the following to true if it encounters a call to
267 __builtin_apply_args. */
268 static bool encountered_apply_args;
270 /* Set by scan_function when it finds a recursive call. */
271 static bool encountered_recursive_call;
273 /* Set by scan_function when it finds a recursive call with less actual
274 arguments than formal parameters.. */
275 static bool encountered_unchangable_recursive_call;
277 /* This is a table in which for each basic block and parameter there is a
278 distance (offset + size) in that parameter which is dereferenced and
279 accessed in that BB. */
280 static HOST_WIDE_INT *bb_dereferences;
281 /* Bitmap of BBs that can cause the function to "stop" progressing by
282 returning, throwing externally, looping infinitely or calling a function
283 which might abort etc.. */
284 static bitmap final_bbs;
286 /* Representative of no accesses at all. */
287 static struct access no_accesses_representant;
289 /* Predicate to test the special value. */
291 static inline bool
292 no_accesses_p (struct access *access)
294 return access == &no_accesses_representant;
297 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
298 representative fields are dumped, otherwise those which only describe the
299 individual access are. */
301 static struct
303 /* Number of processed aggregates is readily available in
304 analyze_all_variable_accesses and so is not stored here. */
306 /* Number of created scalar replacements. */
307 int replacements;
309 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
310 expression. */
311 int exprs;
313 /* Number of statements created by generate_subtree_copies. */
314 int subtree_copies;
316 /* Number of statements created by load_assign_lhs_subreplacements. */
317 int subreplacements;
319 /* Number of times sra_modify_assign has deleted a statement. */
320 int deleted;
322 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
323 RHS reparately due to type conversions or nonexistent matching
324 references. */
325 int separate_lhs_rhs_handling;
327 /* Number of parameters that were removed because they were unused. */
328 int deleted_unused_parameters;
330 /* Number of scalars passed as parameters by reference that have been
331 converted to be passed by value. */
332 int scalar_by_ref_to_by_val;
334 /* Number of aggregate parameters that were replaced by one or more of their
335 components. */
336 int aggregate_params_reduced;
338 /* Numbber of components created when splitting aggregate parameters. */
339 int param_reductions_created;
340 } sra_stats;
342 static void
343 dump_access (FILE *f, struct access *access, bool grp)
345 fprintf (f, "access { ");
346 fprintf (f, "base = (%d)'", DECL_UID (access->base));
347 print_generic_expr (f, access->base, 0);
348 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
349 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
350 fprintf (f, ", expr = ");
351 print_generic_expr (f, access->expr, 0);
352 fprintf (f, ", type = ");
353 print_generic_expr (f, access->type, 0);
354 if (grp)
355 fprintf (f, ", grp_write = %d, total_scalarization = %d, "
356 "grp_read = %d, grp_hint = %d, "
357 "grp_covered = %d, grp_unscalarizable_region = %d, "
358 "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
359 "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
360 "grp_not_necessarilly_dereferenced = %d\n",
361 access->grp_write, access->total_scalarization,
362 access->grp_read, access->grp_hint,
363 access->grp_covered, access->grp_unscalarizable_region,
364 access->grp_unscalarized_data, access->grp_partial_lhs,
365 access->grp_to_be_replaced, access->grp_maybe_modified,
366 access->grp_not_necessarilly_dereferenced);
367 else
368 fprintf (f, ", write = %d, total_scalarization = %d, "
369 "grp_partial_lhs = %d\n",
370 access->write, access->total_scalarization,
371 access->grp_partial_lhs);
374 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
376 static void
377 dump_access_tree_1 (FILE *f, struct access *access, int level)
381 int i;
383 for (i = 0; i < level; i++)
384 fputs ("* ", dump_file);
386 dump_access (f, access, true);
388 if (access->first_child)
389 dump_access_tree_1 (f, access->first_child, level + 1);
391 access = access->next_sibling;
393 while (access);
396 /* Dump all access trees for a variable, given the pointer to the first root in
397 ACCESS. */
399 static void
400 dump_access_tree (FILE *f, struct access *access)
402 for (; access; access = access->next_grp)
403 dump_access_tree_1 (f, access, 0);
406 /* Return true iff ACC is non-NULL and has subaccesses. */
408 static inline bool
409 access_has_children_p (struct access *acc)
411 return acc && acc->first_child;
414 /* Return a vector of pointers to accesses for the variable given in BASE or
415 NULL if there is none. */
417 static VEC (access_p, heap) *
418 get_base_access_vector (tree base)
420 void **slot;
422 slot = pointer_map_contains (base_access_vec, base);
423 if (!slot)
424 return NULL;
425 else
426 return *(VEC (access_p, heap) **) slot;
429 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
430 in ACCESS. Return NULL if it cannot be found. */
432 static struct access *
433 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
434 HOST_WIDE_INT size)
436 while (access && (access->offset != offset || access->size != size))
438 struct access *child = access->first_child;
440 while (child && (child->offset + child->size <= offset))
441 child = child->next_sibling;
442 access = child;
445 return access;
448 /* Return the first group representative for DECL or NULL if none exists. */
450 static struct access *
451 get_first_repr_for_decl (tree base)
453 VEC (access_p, heap) *access_vec;
455 access_vec = get_base_access_vector (base);
456 if (!access_vec)
457 return NULL;
459 return VEC_index (access_p, access_vec, 0);
462 /* Find an access representative for the variable BASE and given OFFSET and
463 SIZE. Requires that access trees have already been built. Return NULL if
464 it cannot be found. */
466 static struct access *
467 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
468 HOST_WIDE_INT size)
470 struct access *access;
472 access = get_first_repr_for_decl (base);
473 while (access && (access->offset + access->size <= offset))
474 access = access->next_grp;
475 if (!access)
476 return NULL;
478 return find_access_in_subtree (access, offset, size);
481 /* Add LINK to the linked list of assign links of RACC. */
482 static void
483 add_link_to_rhs (struct access *racc, struct assign_link *link)
485 gcc_assert (link->racc == racc);
487 if (!racc->first_link)
489 gcc_assert (!racc->last_link);
490 racc->first_link = link;
492 else
493 racc->last_link->next = link;
495 racc->last_link = link;
496 link->next = NULL;
499 /* Move all link structures in their linked list in OLD_RACC to the linked list
500 in NEW_RACC. */
501 static void
502 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
504 if (!old_racc->first_link)
506 gcc_assert (!old_racc->last_link);
507 return;
510 if (new_racc->first_link)
512 gcc_assert (!new_racc->last_link->next);
513 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
515 new_racc->last_link->next = old_racc->first_link;
516 new_racc->last_link = old_racc->last_link;
518 else
520 gcc_assert (!new_racc->last_link);
522 new_racc->first_link = old_racc->first_link;
523 new_racc->last_link = old_racc->last_link;
525 old_racc->first_link = old_racc->last_link = NULL;
528 /* Add ACCESS to the work queue (which is actually a stack). */
530 static void
531 add_access_to_work_queue (struct access *access)
533 if (!access->grp_queued)
535 gcc_assert (!access->next_queued);
536 access->next_queued = work_queue_head;
537 access->grp_queued = 1;
538 work_queue_head = access;
542 /* Pop an access from the work queue, and return it, assuming there is one. */
544 static struct access *
545 pop_access_from_work_queue (void)
547 struct access *access = work_queue_head;
549 work_queue_head = access->next_queued;
550 access->next_queued = NULL;
551 access->grp_queued = 0;
552 return access;
556 /* Allocate necessary structures. */
558 static void
559 sra_initialize (void)
561 candidate_bitmap = BITMAP_ALLOC (NULL);
562 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
563 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
564 gcc_obstack_init (&name_obstack);
565 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
566 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
567 base_access_vec = pointer_map_create ();
568 memset (&sra_stats, 0, sizeof (sra_stats));
569 encountered_apply_args = false;
570 encountered_recursive_call = false;
571 encountered_unchangable_recursive_call = false;
574 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
576 static bool
577 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
578 void *data ATTRIBUTE_UNUSED)
580 VEC (access_p, heap) *access_vec;
581 access_vec = (VEC (access_p, heap) *) *value;
582 VEC_free (access_p, heap, access_vec);
584 return true;
587 /* Deallocate all general structures. */
589 static void
590 sra_deinitialize (void)
592 BITMAP_FREE (candidate_bitmap);
593 BITMAP_FREE (should_scalarize_away_bitmap);
594 BITMAP_FREE (cannot_scalarize_away_bitmap);
595 free_alloc_pool (access_pool);
596 free_alloc_pool (link_pool);
597 obstack_free (&name_obstack, NULL);
599 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
600 pointer_map_destroy (base_access_vec);
603 /* Remove DECL from candidates for SRA and write REASON to the dump file if
604 there is one. */
605 static void
606 disqualify_candidate (tree decl, const char *reason)
608 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
610 if (dump_file && (dump_flags & TDF_DETAILS))
612 fprintf (dump_file, "! Disqualifying ");
613 print_generic_expr (dump_file, decl, 0);
614 fprintf (dump_file, " - %s\n", reason);
618 /* Return true iff the type contains a field or an element which does not allow
619 scalarization. */
621 static bool
622 type_internals_preclude_sra_p (tree type)
624 tree fld;
625 tree et;
627 switch (TREE_CODE (type))
629 case RECORD_TYPE:
630 case UNION_TYPE:
631 case QUAL_UNION_TYPE:
632 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
633 if (TREE_CODE (fld) == FIELD_DECL)
635 tree ft = TREE_TYPE (fld);
637 if (TREE_THIS_VOLATILE (fld)
638 || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
639 || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
640 || !host_integerp (DECL_SIZE (fld), 1))
641 return true;
643 if (AGGREGATE_TYPE_P (ft)
644 && type_internals_preclude_sra_p (ft))
645 return true;
648 return false;
650 case ARRAY_TYPE:
651 et = TREE_TYPE (type);
653 if (AGGREGATE_TYPE_P (et))
654 return type_internals_preclude_sra_p (et);
655 else
656 return false;
658 default:
659 return false;
663 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
664 base variable if it is. Return T if it is not an SSA_NAME. */
666 static tree
667 get_ssa_base_param (tree t)
669 if (TREE_CODE (t) == SSA_NAME)
671 if (SSA_NAME_IS_DEFAULT_DEF (t))
672 return SSA_NAME_VAR (t);
673 else
674 return NULL_TREE;
676 return t;
679 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
680 belongs to, unless the BB has already been marked as a potentially
681 final. */
683 static void
684 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
686 basic_block bb = gimple_bb (stmt);
687 int idx, parm_index = 0;
688 tree parm;
690 if (bitmap_bit_p (final_bbs, bb->index))
691 return;
693 for (parm = DECL_ARGUMENTS (current_function_decl);
694 parm && parm != base;
695 parm = TREE_CHAIN (parm))
696 parm_index++;
698 gcc_assert (parm_index < func_param_count);
700 idx = bb->index * func_param_count + parm_index;
701 if (bb_dereferences[idx] < dist)
702 bb_dereferences[idx] = dist;
705 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
706 the three fields. Also add it to the vector of accesses corresponding to
707 the base. Finally, return the new access. */
709 static struct access *
710 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
712 VEC (access_p, heap) *vec;
713 struct access *access;
714 void **slot;
716 access = (struct access *) pool_alloc (access_pool);
717 memset (access, 0, sizeof (struct access));
718 access->base = base;
719 access->offset = offset;
720 access->size = size;
722 slot = pointer_map_contains (base_access_vec, base);
723 if (slot)
724 vec = (VEC (access_p, heap) *) *slot;
725 else
726 vec = VEC_alloc (access_p, heap, 32);
728 VEC_safe_push (access_p, heap, vec, access);
730 *((struct VEC (access_p,heap) **)
731 pointer_map_insert (base_access_vec, base)) = vec;
733 return access;
736 /* Create and insert access for EXPR. Return created access, or NULL if it is
737 not possible. */
739 static struct access *
740 create_access (tree expr, gimple stmt, bool write)
742 struct access *access;
743 HOST_WIDE_INT offset, size, max_size;
744 tree base = expr;
745 bool ptr, unscalarizable_region = false;
747 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
749 if (sra_mode == SRA_MODE_EARLY_IPA && INDIRECT_REF_P (base))
751 base = get_ssa_base_param (TREE_OPERAND (base, 0));
752 if (!base)
753 return NULL;
754 ptr = true;
756 else
757 ptr = false;
759 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
760 return NULL;
762 if (sra_mode == SRA_MODE_EARLY_IPA)
764 if (size < 0 || size != max_size)
766 disqualify_candidate (base, "Encountered a variable sized access.");
767 return NULL;
769 if ((offset % BITS_PER_UNIT) != 0 || (size % BITS_PER_UNIT) != 0)
771 disqualify_candidate (base,
772 "Encountered an acces not aligned to a byte.");
773 return NULL;
776 if (ptr)
777 mark_parm_dereference (base, offset + size, stmt);
779 else
781 if (size != max_size)
783 size = max_size;
784 unscalarizable_region = true;
786 if (size < 0)
788 disqualify_candidate (base, "Encountered an unconstrained access.");
789 return NULL;
793 access = create_access_1 (base, offset, size);
794 access->expr = expr;
795 access->type = TREE_TYPE (expr);
796 access->write = write;
797 access->grp_unscalarizable_region = unscalarizable_region;
798 access->stmt = stmt;
800 return access;
804 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
805 register types or (recursively) records with only these two kinds of fields.
806 It also returns false if any of these records has a zero-size field as its
807 last field. */
809 static bool
810 type_consists_of_records_p (tree type)
812 tree fld;
813 bool last_fld_has_zero_size = false;
815 if (TREE_CODE (type) != RECORD_TYPE)
816 return false;
818 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
819 if (TREE_CODE (fld) == FIELD_DECL)
821 tree ft = TREE_TYPE (fld);
823 if (!is_gimple_reg_type (ft)
824 && !type_consists_of_records_p (ft))
825 return false;
827 last_fld_has_zero_size = tree_low_cst (DECL_SIZE (fld), 1) == 0;
830 if (last_fld_has_zero_size)
831 return false;
833 return true;
836 /* Create total_scalarization accesses for all scalar type fields in DECL that
837 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
838 must be the top-most VAR_DECL representing the variable, OFFSET must be the
839 offset of DECL within BASE. */
841 static void
842 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset)
844 tree fld, decl_type = TREE_TYPE (decl);
846 for (fld = TYPE_FIELDS (decl_type); fld; fld = TREE_CHAIN (fld))
847 if (TREE_CODE (fld) == FIELD_DECL)
849 HOST_WIDE_INT pos = offset + int_bit_position (fld);
850 tree ft = TREE_TYPE (fld);
852 if (is_gimple_reg_type (ft))
854 struct access *access;
855 HOST_WIDE_INT size;
856 tree expr;
857 bool ok;
859 size = tree_low_cst (DECL_SIZE (fld), 1);
860 expr = base;
861 ok = build_ref_for_offset (&expr, TREE_TYPE (base), pos,
862 ft, false);
863 gcc_assert (ok);
865 access = create_access_1 (base, pos, size);
866 access->expr = expr;
867 access->type = ft;
868 access->total_scalarization = 1;
869 /* Accesses for intraprocedural SRA can have their stmt NULL. */
871 else
872 completely_scalarize_record (base, fld, pos);
877 /* Search the given tree for a declaration by skipping handled components and
878 exclude it from the candidates. */
880 static void
881 disqualify_base_of_expr (tree t, const char *reason)
883 while (handled_component_p (t))
884 t = TREE_OPERAND (t, 0);
886 if (sra_mode == SRA_MODE_EARLY_IPA)
888 if (INDIRECT_REF_P (t))
889 t = TREE_OPERAND (t, 0);
890 t = get_ssa_base_param (t);
893 if (t && DECL_P (t))
894 disqualify_candidate (t, reason);
897 /* Scan expression EXPR and create access structures for all accesses to
898 candidates for scalarization. Return the created access or NULL if none is
899 created. */
901 static struct access *
902 build_access_from_expr_1 (tree *expr_ptr, gimple stmt, bool write)
904 struct access *ret = NULL;
905 tree expr = *expr_ptr;
906 bool partial_ref;
908 if (TREE_CODE (expr) == BIT_FIELD_REF
909 || TREE_CODE (expr) == IMAGPART_EXPR
910 || TREE_CODE (expr) == REALPART_EXPR)
912 expr = TREE_OPERAND (expr, 0);
913 partial_ref = true;
915 else
916 partial_ref = false;
918 /* We need to dive through V_C_Es in order to get the size of its parameter
919 and not the result type. Ada produces such statements. We are also
920 capable of handling the topmost V_C_E but not any of those buried in other
921 handled components. */
922 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
923 expr = TREE_OPERAND (expr, 0);
925 if (contains_view_convert_expr_p (expr))
927 disqualify_base_of_expr (expr, "V_C_E under a different handled "
928 "component.");
929 return NULL;
932 switch (TREE_CODE (expr))
934 case INDIRECT_REF:
935 if (sra_mode != SRA_MODE_EARLY_IPA)
936 return NULL;
937 /* fall through */
938 case VAR_DECL:
939 case PARM_DECL:
940 case RESULT_DECL:
941 case COMPONENT_REF:
942 case ARRAY_REF:
943 case ARRAY_RANGE_REF:
944 ret = create_access (expr, stmt, write);
945 break;
947 default:
948 break;
951 if (write && partial_ref && ret)
952 ret->grp_partial_lhs = 1;
954 return ret;
957 /* Callback of scan_function. Scan expression EXPR and create access
958 structures for all accesses to candidates for scalarization. Return true if
959 any access has been inserted. */
961 static bool
962 build_access_from_expr (tree *expr_ptr,
963 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED, bool write,
964 void *data ATTRIBUTE_UNUSED)
966 struct access *access;
968 access = build_access_from_expr_1 (expr_ptr, gsi_stmt (*gsi), write);
969 if (access)
971 /* This means the aggregate is accesses as a whole in a way other than an
972 assign statement and thus cannot be removed even if we had a scalar
973 replacement for everything. */
974 if (cannot_scalarize_away_bitmap)
975 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
976 return true;
978 return false;
981 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
982 modes in which it matters, return true iff they have been disqualified. RHS
983 may be NULL, in that case ignore it. If we scalarize an aggregate in
984 intra-SRA we may need to add statements after each statement. This is not
985 possible if a statement unconditionally has to end the basic block. */
986 static bool
987 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
989 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
990 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
992 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
993 if (rhs)
994 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
995 return true;
997 return false;
1001 /* Result code for scan_assign callback for scan_function. */
1002 enum scan_assign_result { SRA_SA_NONE, /* nothing done for the stmt */
1003 SRA_SA_PROCESSED, /* stmt analyzed/changed */
1004 SRA_SA_REMOVED }; /* stmt redundant and eliminated */
1007 /* Callback of scan_function. Scan expressions occuring in the statement
1008 pointed to by STMT_EXPR, create access structures for all accesses to
1009 candidates for scalarization and remove those candidates which occur in
1010 statements or expressions that prevent them from being split apart. Return
1011 true if any access has been inserted. */
1013 static enum scan_assign_result
1014 build_accesses_from_assign (gimple *stmt_ptr,
1015 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1016 void *data ATTRIBUTE_UNUSED)
1018 gimple stmt = *stmt_ptr;
1019 tree *lhs_ptr, *rhs_ptr;
1020 struct access *lacc, *racc;
1022 if (!gimple_assign_single_p (stmt))
1023 return SRA_SA_NONE;
1025 lhs_ptr = gimple_assign_lhs_ptr (stmt);
1026 rhs_ptr = gimple_assign_rhs1_ptr (stmt);
1028 if (disqualify_ops_if_throwing_stmt (stmt, *lhs_ptr, *rhs_ptr))
1029 return SRA_SA_NONE;
1031 racc = build_access_from_expr_1 (rhs_ptr, stmt, false);
1032 lacc = build_access_from_expr_1 (lhs_ptr, stmt, true);
1034 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1035 && racc && !is_gimple_reg_type (racc->type))
1036 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1038 if (lacc && racc
1039 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1040 && !lacc->grp_unscalarizable_region
1041 && !racc->grp_unscalarizable_region
1042 && AGGREGATE_TYPE_P (TREE_TYPE (*lhs_ptr))
1043 /* FIXME: Turn the following line into an assert after PR 40058 is
1044 fixed. */
1045 && lacc->size == racc->size
1046 && useless_type_conversion_p (lacc->type, racc->type))
1048 struct assign_link *link;
1050 link = (struct assign_link *) pool_alloc (link_pool);
1051 memset (link, 0, sizeof (struct assign_link));
1053 link->lacc = lacc;
1054 link->racc = racc;
1056 add_link_to_rhs (racc, link);
1059 return (lacc || racc) ? SRA_SA_PROCESSED : SRA_SA_NONE;
1062 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1063 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1065 static bool
1066 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1067 void *data ATTRIBUTE_UNUSED)
1069 op = get_base_address (op);
1070 if (op
1071 && DECL_P (op))
1072 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1074 return false;
1077 /* Return true iff callsite CALL has at least as many actual arguments as there
1078 are formal parameters of the function currently processed by IPA-SRA. */
1080 static inline bool
1081 callsite_has_enough_arguments_p (gimple call)
1083 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1086 /* Scan function and look for interesting statements. Return true if any has
1087 been found or processed, as indicated by callbacks. SCAN_EXPR is a callback
1088 called on all expressions within statements except assign statements and
1089 those deemed entirely unsuitable for some reason (all operands in such
1090 statements and expression are removed from candidate_bitmap). SCAN_ASSIGN
1091 is a callback called on all assign statements, HANDLE_SSA_DEFS is a callback
1092 called on assign statements and those call statements which have a lhs, it
1093 can be NULL. ANALYSIS_STAGE is true when running in the analysis stage of a
1094 pass and thus no statement is being modified. DATA is a pointer passed to
1095 all callbacks. If any single callback returns true, this function also
1096 returns true, otherwise it returns false. */
1098 static bool
1099 scan_function (bool (*scan_expr) (tree *, gimple_stmt_iterator *, bool, void *),
1100 enum scan_assign_result (*scan_assign) (gimple *,
1101 gimple_stmt_iterator *,
1102 void *),
1103 bool (*handle_ssa_defs)(gimple, void *),
1104 bool analysis_stage, void *data)
1106 gimple_stmt_iterator gsi;
1107 basic_block bb;
1108 unsigned i;
1109 tree *t;
1110 bool ret = false;
1112 FOR_EACH_BB (bb)
1114 bool bb_changed = false;
1116 if (handle_ssa_defs)
1117 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1118 ret |= handle_ssa_defs (gsi_stmt (gsi), data);
1120 gsi = gsi_start_bb (bb);
1121 while (!gsi_end_p (gsi))
1123 gimple stmt = gsi_stmt (gsi);
1124 enum scan_assign_result assign_result;
1125 bool any = false, deleted = false;
1127 if (analysis_stage && final_bbs && stmt_can_throw_external (stmt))
1128 bitmap_set_bit (final_bbs, bb->index);
1129 switch (gimple_code (stmt))
1131 case GIMPLE_RETURN:
1132 t = gimple_return_retval_ptr (stmt);
1133 if (*t != NULL_TREE)
1134 any |= scan_expr (t, &gsi, false, data);
1135 if (analysis_stage && final_bbs)
1136 bitmap_set_bit (final_bbs, bb->index);
1137 break;
1139 case GIMPLE_ASSIGN:
1140 assign_result = scan_assign (&stmt, &gsi, data);
1141 any |= assign_result == SRA_SA_PROCESSED;
1142 deleted = assign_result == SRA_SA_REMOVED;
1143 if (handle_ssa_defs && assign_result != SRA_SA_REMOVED)
1144 any |= handle_ssa_defs (stmt, data);
1145 break;
1147 case GIMPLE_CALL:
1148 /* Operands must be processed before the lhs. */
1149 for (i = 0; i < gimple_call_num_args (stmt); i++)
1151 tree *argp = gimple_call_arg_ptr (stmt, i);
1152 any |= scan_expr (argp, &gsi, false, data);
1155 if (analysis_stage && sra_mode == SRA_MODE_EARLY_IPA)
1157 tree dest = gimple_call_fndecl (stmt);
1158 int flags = gimple_call_flags (stmt);
1160 if (dest)
1162 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1163 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1164 encountered_apply_args = true;
1165 if (cgraph_get_node (dest)
1166 == cgraph_get_node (current_function_decl))
1168 encountered_recursive_call = true;
1169 if (!callsite_has_enough_arguments_p (stmt))
1170 encountered_unchangable_recursive_call = true;
1174 if (final_bbs
1175 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1176 bitmap_set_bit (final_bbs, bb->index);
1179 if (gimple_call_lhs (stmt))
1181 tree *lhs_ptr = gimple_call_lhs_ptr (stmt);
1182 if (!analysis_stage
1183 || !disqualify_ops_if_throwing_stmt (stmt,
1184 *lhs_ptr, NULL))
1186 any |= scan_expr (lhs_ptr, &gsi, true, data);
1187 if (handle_ssa_defs)
1188 any |= handle_ssa_defs (stmt, data);
1191 break;
1193 case GIMPLE_ASM:
1194 if (analysis_stage)
1196 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1197 asm_visit_addr);
1198 if (final_bbs)
1199 bitmap_set_bit (final_bbs, bb->index);
1201 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1203 tree *op = &TREE_VALUE (gimple_asm_input_op (stmt, i));
1204 any |= scan_expr (op, &gsi, false, data);
1206 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1208 tree *op = &TREE_VALUE (gimple_asm_output_op (stmt, i));
1209 any |= scan_expr (op, &gsi, true, data);
1211 break;
1213 default:
1214 break;
1217 if (any)
1219 ret = true;
1221 if (!analysis_stage)
1223 bb_changed = true;
1224 update_stmt (stmt);
1225 maybe_clean_eh_stmt (stmt);
1228 if (deleted)
1229 bb_changed = true;
1230 else
1232 gsi_next (&gsi);
1233 ret = true;
1236 if (!analysis_stage && bb_changed && sra_mode == SRA_MODE_EARLY_IPA)
1237 gimple_purge_dead_eh_edges (bb);
1240 return ret;
1243 /* Helper of QSORT function. There are pointers to accesses in the array. An
1244 access is considered smaller than another if it has smaller offset or if the
1245 offsets are the same but is size is bigger. */
1247 static int
1248 compare_access_positions (const void *a, const void *b)
1250 const access_p *fp1 = (const access_p *) a;
1251 const access_p *fp2 = (const access_p *) b;
1252 const access_p f1 = *fp1;
1253 const access_p f2 = *fp2;
1255 if (f1->offset != f2->offset)
1256 return f1->offset < f2->offset ? -1 : 1;
1258 if (f1->size == f2->size)
1260 if (f1->type == f2->type)
1261 return 0;
1262 /* Put any non-aggregate type before any aggregate type. */
1263 else if (!is_gimple_reg_type (f1->type)
1264 && is_gimple_reg_type (f2->type))
1265 return 1;
1266 else if (is_gimple_reg_type (f1->type)
1267 && !is_gimple_reg_type (f2->type))
1268 return -1;
1269 /* Put any complex or vector type before any other scalar type. */
1270 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1271 && TREE_CODE (f1->type) != VECTOR_TYPE
1272 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1273 || TREE_CODE (f2->type) == VECTOR_TYPE))
1274 return 1;
1275 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1276 || TREE_CODE (f1->type) == VECTOR_TYPE)
1277 && TREE_CODE (f2->type) != COMPLEX_TYPE
1278 && TREE_CODE (f2->type) != VECTOR_TYPE)
1279 return -1;
1280 /* Put the integral type with the bigger precision first. */
1281 else if (INTEGRAL_TYPE_P (f1->type)
1282 && INTEGRAL_TYPE_P (f2->type))
1283 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1284 /* Put any integral type with non-full precision last. */
1285 else if (INTEGRAL_TYPE_P (f1->type)
1286 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1287 != TYPE_PRECISION (f1->type)))
1288 return 1;
1289 else if (INTEGRAL_TYPE_P (f2->type)
1290 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1291 != TYPE_PRECISION (f2->type)))
1292 return -1;
1293 /* Stabilize the sort. */
1294 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1297 /* We want the bigger accesses first, thus the opposite operator in the next
1298 line: */
1299 return f1->size > f2->size ? -1 : 1;
1303 /* Append a name of the declaration to the name obstack. A helper function for
1304 make_fancy_name. */
1306 static void
1307 make_fancy_decl_name (tree decl)
1309 char buffer[32];
1311 tree name = DECL_NAME (decl);
1312 if (name)
1313 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1314 IDENTIFIER_LENGTH (name));
1315 else
1317 sprintf (buffer, "D%u", DECL_UID (decl));
1318 obstack_grow (&name_obstack, buffer, strlen (buffer));
1322 /* Helper for make_fancy_name. */
1324 static void
1325 make_fancy_name_1 (tree expr)
1327 char buffer[32];
1328 tree index;
1330 if (DECL_P (expr))
1332 make_fancy_decl_name (expr);
1333 return;
1336 switch (TREE_CODE (expr))
1338 case COMPONENT_REF:
1339 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1340 obstack_1grow (&name_obstack, '$');
1341 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1342 break;
1344 case ARRAY_REF:
1345 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1346 obstack_1grow (&name_obstack, '$');
1347 /* Arrays with only one element may not have a constant as their
1348 index. */
1349 index = TREE_OPERAND (expr, 1);
1350 if (TREE_CODE (index) != INTEGER_CST)
1351 break;
1352 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1353 obstack_grow (&name_obstack, buffer, strlen (buffer));
1355 break;
1357 case BIT_FIELD_REF:
1358 case REALPART_EXPR:
1359 case IMAGPART_EXPR:
1360 gcc_unreachable (); /* we treat these as scalars. */
1361 break;
1362 default:
1363 break;
1367 /* Create a human readable name for replacement variable of ACCESS. */
1369 static char *
1370 make_fancy_name (tree expr)
1372 make_fancy_name_1 (expr);
1373 obstack_1grow (&name_obstack, '\0');
1374 return XOBFINISH (&name_obstack, char *);
1377 /* Helper function for build_ref_for_offset. */
1379 static bool
1380 build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
1381 tree exp_type)
1383 while (1)
1385 tree fld;
1386 tree tr_size, index, minidx;
1387 HOST_WIDE_INT el_size;
1389 if (offset == 0 && exp_type
1390 && types_compatible_p (exp_type, type))
1391 return true;
1393 switch (TREE_CODE (type))
1395 case UNION_TYPE:
1396 case QUAL_UNION_TYPE:
1397 case RECORD_TYPE:
1398 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
1400 HOST_WIDE_INT pos, size;
1401 tree expr, *expr_ptr;
1403 if (TREE_CODE (fld) != FIELD_DECL)
1404 continue;
1406 pos = int_bit_position (fld);
1407 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1408 tr_size = DECL_SIZE (fld);
1409 if (!tr_size || !host_integerp (tr_size, 1))
1410 continue;
1411 size = tree_low_cst (tr_size, 1);
1412 if (size == 0)
1414 if (pos != offset)
1415 continue;
1417 else if (pos > offset || (pos + size) <= offset)
1418 continue;
1420 if (res)
1422 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1423 NULL_TREE);
1424 expr_ptr = &expr;
1426 else
1427 expr_ptr = NULL;
1428 if (build_ref_for_offset_1 (expr_ptr, TREE_TYPE (fld),
1429 offset - pos, exp_type))
1431 if (res)
1432 *res = expr;
1433 return true;
1436 return false;
1438 case ARRAY_TYPE:
1439 tr_size = TYPE_SIZE (TREE_TYPE (type));
1440 if (!tr_size || !host_integerp (tr_size, 1))
1441 return false;
1442 el_size = tree_low_cst (tr_size, 1);
1444 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1445 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1446 return false;
1447 if (res)
1449 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1450 if (!integer_zerop (minidx))
1451 index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1452 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1453 NULL_TREE, NULL_TREE);
1455 offset = offset % el_size;
1456 type = TREE_TYPE (type);
1457 break;
1459 default:
1460 if (offset != 0)
1461 return false;
1463 if (exp_type)
1464 return false;
1465 else
1466 return true;
1471 /* Construct an expression that would reference a part of aggregate *EXPR of
1472 type TYPE at the given OFFSET of the type EXP_TYPE. If EXPR is NULL, the
1473 function only determines whether it can build such a reference without
1474 actually doing it, otherwise, the tree it points to is unshared first and
1475 then used as a base for furhter sub-references.
1477 FIXME: Eventually this should be replaced with
1478 maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
1479 minor rewrite of fold_stmt.
1482 bool
1483 build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
1484 tree exp_type, bool allow_ptr)
1486 location_t loc = expr ? EXPR_LOCATION (*expr) : UNKNOWN_LOCATION;
1488 if (expr)
1489 *expr = unshare_expr (*expr);
1491 if (allow_ptr && POINTER_TYPE_P (type))
1493 type = TREE_TYPE (type);
1494 if (expr)
1495 *expr = fold_build1_loc (loc, INDIRECT_REF, type, *expr);
1498 return build_ref_for_offset_1 (expr, type, offset, exp_type);
1501 /* Return true iff TYPE is stdarg va_list type. */
1503 static inline bool
1504 is_va_list_type (tree type)
1506 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1509 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1510 those with type which is suitable for scalarization. */
1512 static bool
1513 find_var_candidates (void)
1515 tree var, type;
1516 referenced_var_iterator rvi;
1517 bool ret = false;
1519 FOR_EACH_REFERENCED_VAR (var, rvi)
1521 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1522 continue;
1523 type = TREE_TYPE (var);
1525 if (!AGGREGATE_TYPE_P (type)
1526 || needs_to_live_in_memory (var)
1527 || TREE_THIS_VOLATILE (var)
1528 || !COMPLETE_TYPE_P (type)
1529 || !host_integerp (TYPE_SIZE (type), 1)
1530 || tree_low_cst (TYPE_SIZE (type), 1) == 0
1531 || type_internals_preclude_sra_p (type)
1532 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1533 we also want to schedule it rather late. Thus we ignore it in
1534 the early pass. */
1535 || (sra_mode == SRA_MODE_EARLY_INTRA
1536 && is_va_list_type (type)))
1537 continue;
1539 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1541 if (dump_file && (dump_flags & TDF_DETAILS))
1543 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1544 print_generic_expr (dump_file, var, 0);
1545 fprintf (dump_file, "\n");
1547 ret = true;
1550 return ret;
1553 /* Sort all accesses for the given variable, check for partial overlaps and
1554 return NULL if there are any. If there are none, pick a representative for
1555 each combination of offset and size and create a linked list out of them.
1556 Return the pointer to the first representative and make sure it is the first
1557 one in the vector of accesses. */
1559 static struct access *
1560 sort_and_splice_var_accesses (tree var)
1562 int i, j, access_count;
1563 struct access *res, **prev_acc_ptr = &res;
1564 VEC (access_p, heap) *access_vec;
1565 bool first = true;
1566 HOST_WIDE_INT low = -1, high = 0;
1568 access_vec = get_base_access_vector (var);
1569 if (!access_vec)
1570 return NULL;
1571 access_count = VEC_length (access_p, access_vec);
1573 /* Sort by <OFFSET, SIZE>. */
1574 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
1575 compare_access_positions);
1577 i = 0;
1578 while (i < access_count)
1580 struct access *access = VEC_index (access_p, access_vec, i);
1581 bool grp_write = access->write;
1582 bool grp_read = !access->write;
1583 bool multiple_reads = false;
1584 bool total_scalarization = access->total_scalarization;
1585 bool grp_partial_lhs = access->grp_partial_lhs;
1586 bool first_scalar = is_gimple_reg_type (access->type);
1587 bool unscalarizable_region = access->grp_unscalarizable_region;
1589 if (first || access->offset >= high)
1591 first = false;
1592 low = access->offset;
1593 high = access->offset + access->size;
1595 else if (access->offset > low && access->offset + access->size > high)
1596 return NULL;
1597 else
1598 gcc_assert (access->offset >= low
1599 && access->offset + access->size <= high);
1601 j = i + 1;
1602 while (j < access_count)
1604 struct access *ac2 = VEC_index (access_p, access_vec, j);
1605 if (ac2->offset != access->offset || ac2->size != access->size)
1606 break;
1607 if (ac2->write)
1608 grp_write = true;
1609 else
1611 if (grp_read)
1612 multiple_reads = true;
1613 else
1614 grp_read = true;
1616 grp_partial_lhs |= ac2->grp_partial_lhs;
1617 unscalarizable_region |= ac2->grp_unscalarizable_region;
1618 total_scalarization |= ac2->total_scalarization;
1619 relink_to_new_repr (access, ac2);
1621 /* If there are both aggregate-type and scalar-type accesses with
1622 this combination of size and offset, the comparison function
1623 should have put the scalars first. */
1624 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1625 ac2->group_representative = access;
1626 j++;
1629 i = j;
1631 access->group_representative = access;
1632 access->grp_write = grp_write;
1633 access->grp_read = grp_read;
1634 access->grp_hint = multiple_reads || total_scalarization;
1635 access->grp_partial_lhs = grp_partial_lhs;
1636 access->grp_unscalarizable_region = unscalarizable_region;
1637 if (access->first_link)
1638 add_access_to_work_queue (access);
1640 *prev_acc_ptr = access;
1641 prev_acc_ptr = &access->next_grp;
1644 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1645 return res;
1648 /* Create a variable for the given ACCESS which determines the type, name and a
1649 few other properties. Return the variable declaration and store it also to
1650 ACCESS->replacement. */
1652 static tree
1653 create_access_replacement (struct access *access)
1655 tree repl;
1657 repl = create_tmp_var (access->type, "SR");
1658 get_var_ann (repl);
1659 add_referenced_var (repl);
1660 mark_sym_for_renaming (repl);
1662 if (!access->grp_partial_lhs
1663 && (TREE_CODE (access->type) == COMPLEX_TYPE
1664 || TREE_CODE (access->type) == VECTOR_TYPE))
1665 DECL_GIMPLE_REG_P (repl) = 1;
1667 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1668 DECL_ARTIFICIAL (repl) = 1;
1669 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1671 if (DECL_NAME (access->base)
1672 && !DECL_IGNORED_P (access->base)
1673 && !DECL_ARTIFICIAL (access->base))
1675 char *pretty_name = make_fancy_name (access->expr);
1677 DECL_NAME (repl) = get_identifier (pretty_name);
1678 obstack_free (&name_obstack, pretty_name);
1680 SET_DECL_DEBUG_EXPR (repl, access->expr);
1681 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1682 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1684 else
1685 TREE_NO_WARNING (repl) = 1;
1687 if (dump_file)
1689 fprintf (dump_file, "Created a replacement for ");
1690 print_generic_expr (dump_file, access->base, 0);
1691 fprintf (dump_file, " offset: %u, size: %u: ",
1692 (unsigned) access->offset, (unsigned) access->size);
1693 print_generic_expr (dump_file, repl, 0);
1694 fprintf (dump_file, "\n");
1696 sra_stats.replacements++;
1698 return repl;
1701 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1703 static inline tree
1704 get_access_replacement (struct access *access)
1706 gcc_assert (access->grp_to_be_replaced);
1708 if (!access->replacement_decl)
1709 access->replacement_decl = create_access_replacement (access);
1710 return access->replacement_decl;
1713 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1714 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1715 to it is not "within" the root. */
1717 static void
1718 build_access_subtree (struct access **access)
1720 struct access *root = *access, *last_child = NULL;
1721 HOST_WIDE_INT limit = root->offset + root->size;
1723 *access = (*access)->next_grp;
1724 while (*access && (*access)->offset + (*access)->size <= limit)
1726 if (!last_child)
1727 root->first_child = *access;
1728 else
1729 last_child->next_sibling = *access;
1730 last_child = *access;
1732 build_access_subtree (access);
1736 /* Build a tree of access representatives, ACCESS is the pointer to the first
1737 one, others are linked in a list by the next_grp field. Decide about scalar
1738 replacements on the way, return true iff any are to be created. */
1740 static void
1741 build_access_trees (struct access *access)
1743 while (access)
1745 struct access *root = access;
1747 build_access_subtree (&access);
1748 root->next_grp = access;
1752 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1753 array. */
1755 static bool
1756 expr_with_var_bounded_array_refs_p (tree expr)
1758 while (handled_component_p (expr))
1760 if (TREE_CODE (expr) == ARRAY_REF
1761 && !host_integerp (array_ref_low_bound (expr), 0))
1762 return true;
1763 expr = TREE_OPERAND (expr, 0);
1765 return false;
1768 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1769 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set
1770 all sorts of access flags appropriately along the way, notably always ser
1771 grp_read when MARK_READ is true and grp_write when MARK_WRITE is true. */
1773 static bool
1774 analyze_access_subtree (struct access *root, bool allow_replacements,
1775 bool mark_read, bool mark_write)
1777 struct access *child;
1778 HOST_WIDE_INT limit = root->offset + root->size;
1779 HOST_WIDE_INT covered_to = root->offset;
1780 bool scalar = is_gimple_reg_type (root->type);
1781 bool hole = false, sth_created = false;
1782 bool direct_read = root->grp_read;
1784 if (mark_read)
1785 root->grp_read = true;
1786 else if (root->grp_read)
1787 mark_read = true;
1789 if (mark_write)
1790 root->grp_write = true;
1791 else if (root->grp_write)
1792 mark_write = true;
1794 if (root->grp_unscalarizable_region)
1795 allow_replacements = false;
1797 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1798 allow_replacements = false;
1800 for (child = root->first_child; child; child = child->next_sibling)
1802 if (!hole && child->offset < covered_to)
1803 hole = true;
1804 else
1805 covered_to += child->size;
1807 sth_created |= analyze_access_subtree (child, allow_replacements,
1808 mark_read, mark_write);
1810 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1811 hole |= !child->grp_covered;
1814 if (allow_replacements && scalar && !root->first_child
1815 && (root->grp_hint
1816 || (direct_read && root->grp_write))
1817 /* We must not ICE later on when trying to build an access to the
1818 original data within the aggregate even when it is impossible to do in
1819 a defined way like in the PR 42703 testcase. Therefore we check
1820 pre-emptively here that we will be able to do that. */
1821 && build_ref_for_offset (NULL, TREE_TYPE (root->base), root->offset,
1822 root->type, false))
1824 if (dump_file && (dump_flags & TDF_DETAILS))
1826 fprintf (dump_file, "Marking ");
1827 print_generic_expr (dump_file, root->base, 0);
1828 fprintf (dump_file, " offset: %u, size: %u: ",
1829 (unsigned) root->offset, (unsigned) root->size);
1830 fprintf (dump_file, " to be replaced.\n");
1833 root->grp_to_be_replaced = 1;
1834 sth_created = true;
1835 hole = false;
1837 else if (covered_to < limit)
1838 hole = true;
1840 if (sth_created && !hole)
1842 root->grp_covered = 1;
1843 return true;
1845 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1846 root->grp_unscalarized_data = 1; /* not covered and written to */
1847 if (sth_created)
1848 return true;
1849 return false;
1852 /* Analyze all access trees linked by next_grp by the means of
1853 analyze_access_subtree. */
1854 static bool
1855 analyze_access_trees (struct access *access)
1857 bool ret = false;
1859 while (access)
1861 if (analyze_access_subtree (access, true, false, false))
1862 ret = true;
1863 access = access->next_grp;
1866 return ret;
1869 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1870 SIZE would conflict with an already existing one. If exactly such a child
1871 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1873 static bool
1874 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1875 HOST_WIDE_INT size, struct access **exact_match)
1877 struct access *child;
1879 for (child = lacc->first_child; child; child = child->next_sibling)
1881 if (child->offset == norm_offset && child->size == size)
1883 *exact_match = child;
1884 return true;
1887 if (child->offset < norm_offset + size
1888 && child->offset + child->size > norm_offset)
1889 return true;
1892 return false;
1895 /* Create a new child access of PARENT, with all properties just like MODEL
1896 except for its offset and with its grp_write false and grp_read true.
1897 Return the new access or NULL if it cannot be created. Note that this access
1898 is created long after all splicing and sorting, it's not located in any
1899 access vector and is automatically a representative of its group. */
1901 static struct access *
1902 create_artificial_child_access (struct access *parent, struct access *model,
1903 HOST_WIDE_INT new_offset)
1905 struct access *access;
1906 struct access **child;
1907 tree expr = parent->base;;
1909 gcc_assert (!model->grp_unscalarizable_region);
1911 if (!build_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
1912 model->type, false))
1913 return NULL;
1915 access = (struct access *) pool_alloc (access_pool);
1916 memset (access, 0, sizeof (struct access));
1917 access->base = parent->base;
1918 access->expr = expr;
1919 access->offset = new_offset;
1920 access->size = model->size;
1921 access->type = model->type;
1922 access->grp_write = true;
1923 access->grp_read = false;
1925 child = &parent->first_child;
1926 while (*child && (*child)->offset < new_offset)
1927 child = &(*child)->next_sibling;
1929 access->next_sibling = *child;
1930 *child = access;
1932 return access;
1936 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
1937 true if any new subaccess was created. Additionally, if RACC is a scalar
1938 access but LACC is not, change the type of the latter, if possible. */
1940 static bool
1941 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
1943 struct access *rchild;
1944 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
1945 bool ret = false;
1947 if (is_gimple_reg_type (lacc->type)
1948 || lacc->grp_unscalarizable_region
1949 || racc->grp_unscalarizable_region)
1950 return false;
1952 if (!lacc->first_child && !racc->first_child
1953 && is_gimple_reg_type (racc->type))
1955 tree t = lacc->base;
1957 if (build_ref_for_offset (&t, TREE_TYPE (t), lacc->offset, racc->type,
1958 false))
1960 lacc->expr = t;
1961 lacc->type = racc->type;
1963 return false;
1966 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
1968 struct access *new_acc = NULL;
1969 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
1971 if (rchild->grp_unscalarizable_region)
1972 continue;
1974 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
1975 &new_acc))
1977 if (new_acc)
1979 rchild->grp_hint = 1;
1980 new_acc->grp_hint |= new_acc->grp_read;
1981 if (rchild->first_child)
1982 ret |= propagate_subaccesses_across_link (new_acc, rchild);
1984 continue;
1987 /* If a (part of) a union field is on the RHS of an assignment, it can
1988 have sub-accesses which do not make sense on the LHS (PR 40351).
1989 Check that this is not the case. */
1990 if (!build_ref_for_offset (NULL, TREE_TYPE (lacc->base), norm_offset,
1991 rchild->type, false))
1992 continue;
1994 rchild->grp_hint = 1;
1995 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
1996 if (new_acc)
1998 ret = true;
1999 if (racc->first_child)
2000 propagate_subaccesses_across_link (new_acc, rchild);
2004 return ret;
2007 /* Propagate all subaccesses across assignment links. */
2009 static void
2010 propagate_all_subaccesses (void)
2012 while (work_queue_head)
2014 struct access *racc = pop_access_from_work_queue ();
2015 struct assign_link *link;
2017 gcc_assert (racc->first_link);
2019 for (link = racc->first_link; link; link = link->next)
2021 struct access *lacc = link->lacc;
2023 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2024 continue;
2025 lacc = lacc->group_representative;
2026 if (propagate_subaccesses_across_link (lacc, racc)
2027 && lacc->first_link)
2028 add_access_to_work_queue (lacc);
2033 /* Go through all accesses collected throughout the (intraprocedural) analysis
2034 stage, exclude overlapping ones, identify representatives and build trees
2035 out of them, making decisions about scalarization on the way. Return true
2036 iff there are any to-be-scalarized variables after this stage. */
2038 static bool
2039 analyze_all_variable_accesses (void)
2041 int res = 0;
2042 bitmap tmp = BITMAP_ALLOC (NULL);
2043 bitmap_iterator bi;
2044 unsigned i, max_total_scalarization_size;
2046 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2047 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2049 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2050 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2051 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2053 tree var = referenced_var (i);
2055 if (TREE_CODE (var) == VAR_DECL
2056 && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2057 <= max_total_scalarization_size)
2058 && type_consists_of_records_p (TREE_TYPE (var)))
2060 completely_scalarize_record (var, var, 0);
2061 if (dump_file && (dump_flags & TDF_DETAILS))
2063 fprintf (dump_file, "Will attempt to totally scalarize ");
2064 print_generic_expr (dump_file, var, 0);
2065 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2070 bitmap_copy (tmp, candidate_bitmap);
2071 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2073 tree var = referenced_var (i);
2074 struct access *access;
2076 access = sort_and_splice_var_accesses (var);
2077 if (access)
2078 build_access_trees (access);
2079 else
2080 disqualify_candidate (var,
2081 "No or inhibitingly overlapping accesses.");
2084 propagate_all_subaccesses ();
2086 bitmap_copy (tmp, candidate_bitmap);
2087 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2089 tree var = referenced_var (i);
2090 struct access *access = get_first_repr_for_decl (var);
2092 if (analyze_access_trees (access))
2094 res++;
2095 if (dump_file && (dump_flags & TDF_DETAILS))
2097 fprintf (dump_file, "\nAccess trees for ");
2098 print_generic_expr (dump_file, var, 0);
2099 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2100 dump_access_tree (dump_file, access);
2101 fprintf (dump_file, "\n");
2104 else
2105 disqualify_candidate (var, "No scalar replacements to be created.");
2108 BITMAP_FREE (tmp);
2110 if (res)
2112 statistics_counter_event (cfun, "Scalarized aggregates", res);
2113 return true;
2115 else
2116 return false;
2119 /* Return true iff a reference statement into aggregate AGG can be built for
2120 every single to-be-replaced accesses that is a child of ACCESS, its sibling
2121 or a child of its sibling. TOP_OFFSET is the offset from the processed
2122 access subtree that has to be subtracted from offset of each access. */
2124 static bool
2125 ref_expr_for_all_replacements_p (struct access *access, tree agg,
2126 HOST_WIDE_INT top_offset)
2130 if (access->grp_to_be_replaced
2131 && !build_ref_for_offset (NULL, TREE_TYPE (agg),
2132 access->offset - top_offset,
2133 access->type, false))
2134 return false;
2136 if (access->first_child
2137 && !ref_expr_for_all_replacements_p (access->first_child, agg,
2138 top_offset))
2139 return false;
2141 access = access->next_sibling;
2143 while (access);
2145 return true;
2148 /* Generate statements copying scalar replacements of accesses within a subtree
2149 into or out of AGG. ACCESS is the first child of the root of the subtree to
2150 be processed. AGG is an aggregate type expression (can be a declaration but
2151 does not have to be, it can for example also be an indirect_ref).
2152 TOP_OFFSET is the offset of the processed subtree which has to be subtracted
2153 from offsets of individual accesses to get corresponding offsets for AGG.
2154 If CHUNK_SIZE is non-null, copy only replacements in the interval
2155 <start_offset, start_offset + chunk_size>, otherwise copy all. GSI is a
2156 statement iterator used to place the new statements. WRITE should be true
2157 when the statements should write from AGG to the replacement and false if
2158 vice versa. if INSERT_AFTER is true, new statements will be added after the
2159 current statement in GSI, they will be added before the statement
2160 otherwise. */
2162 static void
2163 generate_subtree_copies (struct access *access, tree agg,
2164 HOST_WIDE_INT top_offset,
2165 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2166 gimple_stmt_iterator *gsi, bool write,
2167 bool insert_after)
2171 tree expr = agg;
2173 if (chunk_size && access->offset >= start_offset + chunk_size)
2174 return;
2176 if (access->grp_to_be_replaced
2177 && (chunk_size == 0
2178 || access->offset + access->size > start_offset))
2180 tree repl = get_access_replacement (access);
2181 bool ref_found;
2182 gimple stmt;
2184 ref_found = build_ref_for_offset (&expr, TREE_TYPE (agg),
2185 access->offset - top_offset,
2186 access->type, false);
2187 gcc_assert (ref_found);
2189 if (write)
2191 if (access->grp_partial_lhs)
2192 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2193 !insert_after,
2194 insert_after ? GSI_NEW_STMT
2195 : GSI_SAME_STMT);
2196 stmt = gimple_build_assign (repl, expr);
2198 else
2200 TREE_NO_WARNING (repl) = 1;
2201 if (access->grp_partial_lhs)
2202 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2203 !insert_after,
2204 insert_after ? GSI_NEW_STMT
2205 : GSI_SAME_STMT);
2206 stmt = gimple_build_assign (expr, repl);
2209 if (insert_after)
2210 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2211 else
2212 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2213 update_stmt (stmt);
2214 sra_stats.subtree_copies++;
2217 if (access->first_child)
2218 generate_subtree_copies (access->first_child, agg, top_offset,
2219 start_offset, chunk_size, gsi,
2220 write, insert_after);
2222 access = access->next_sibling;
2224 while (access);
2227 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2228 the root of the subtree to be processed. GSI is the statement iterator used
2229 for inserting statements which are added after the current statement if
2230 INSERT_AFTER is true or before it otherwise. */
2232 static void
2233 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2234 bool insert_after)
2237 struct access *child;
2239 if (access->grp_to_be_replaced)
2241 gimple stmt;
2243 stmt = gimple_build_assign (get_access_replacement (access),
2244 fold_convert (access->type,
2245 integer_zero_node));
2246 if (insert_after)
2247 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2248 else
2249 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2250 update_stmt (stmt);
2253 for (child = access->first_child; child; child = child->next_sibling)
2254 init_subtree_with_zero (child, gsi, insert_after);
2257 /* Search for an access representative for the given expression EXPR and
2258 return it or NULL if it cannot be found. */
2260 static struct access *
2261 get_access_for_expr (tree expr)
2263 HOST_WIDE_INT offset, size, max_size;
2264 tree base;
2266 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2267 a different size than the size of its argument and we need the latter
2268 one. */
2269 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2270 expr = TREE_OPERAND (expr, 0);
2272 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2273 if (max_size == -1 || !DECL_P (base))
2274 return NULL;
2276 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2277 return NULL;
2279 return get_var_base_offset_size_access (base, offset, max_size);
2282 /* Callback for scan_function. Replace the expression EXPR with a scalar
2283 replacement if there is one and generate other statements to do type
2284 conversion or subtree copying if necessary. GSI is used to place newly
2285 created statements, WRITE is true if the expression is being written to (it
2286 is on a LHS of a statement or output in an assembly statement). */
2288 static bool
2289 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write,
2290 void *data ATTRIBUTE_UNUSED)
2292 struct access *access;
2293 tree type, bfr;
2295 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2297 bfr = *expr;
2298 expr = &TREE_OPERAND (*expr, 0);
2300 else
2301 bfr = NULL_TREE;
2303 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2304 expr = &TREE_OPERAND (*expr, 0);
2305 access = get_access_for_expr (*expr);
2306 if (!access)
2307 return false;
2308 type = TREE_TYPE (*expr);
2310 if (access->grp_to_be_replaced)
2312 tree repl = get_access_replacement (access);
2313 /* If we replace a non-register typed access simply use the original
2314 access expression to extract the scalar component afterwards.
2315 This happens if scalarizing a function return value or parameter
2316 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2317 gcc.c-torture/compile/20011217-1.c.
2319 We also want to use this when accessing a complex or vector which can
2320 be accessed as a different type too, potentially creating a need for
2321 type conversion (see PR42196) and when scalarized unions are involved
2322 in assembler statements (see PR42398). */
2323 if (!useless_type_conversion_p (type, access->type))
2325 tree ref = access->base;
2326 bool ok;
2328 ok = build_ref_for_offset (&ref, TREE_TYPE (ref),
2329 access->offset, access->type, false);
2330 gcc_assert (ok);
2332 if (write)
2334 gimple stmt;
2336 if (access->grp_partial_lhs)
2337 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2338 false, GSI_NEW_STMT);
2339 stmt = gimple_build_assign (repl, ref);
2340 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2342 else
2344 gimple stmt;
2346 if (access->grp_partial_lhs)
2347 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2348 true, GSI_SAME_STMT);
2349 stmt = gimple_build_assign (ref, repl);
2350 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2353 else
2354 *expr = repl;
2355 sra_stats.exprs++;
2358 if (access->first_child)
2360 HOST_WIDE_INT start_offset, chunk_size;
2361 if (bfr
2362 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2363 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2365 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2366 start_offset = access->offset
2367 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2369 else
2370 start_offset = chunk_size = 0;
2372 generate_subtree_copies (access->first_child, access->base, 0,
2373 start_offset, chunk_size, gsi, write, write);
2375 return true;
2378 /* Where scalar replacements of the RHS have been written to when a replacement
2379 of a LHS of an assigments cannot be direclty loaded from a replacement of
2380 the RHS. */
2381 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2382 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2383 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2385 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2386 base aggregate if there are unscalarized data or directly to LHS
2387 otherwise. */
2389 static enum unscalarized_data_handling
2390 handle_unscalarized_data_in_subtree (struct access *top_racc, tree lhs,
2391 gimple_stmt_iterator *gsi)
2393 if (top_racc->grp_unscalarized_data)
2395 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2396 gsi, false, false);
2397 return SRA_UDH_RIGHT;
2399 else
2401 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2402 0, 0, gsi, false, false);
2403 return SRA_UDH_LEFT;
2408 /* Try to generate statements to load all sub-replacements in an access
2409 (sub)tree (LACC is the first child) from scalar replacements in the TOP_RACC
2410 (sub)tree. If that is not possible, refresh the TOP_RACC base aggregate and
2411 load the accesses from it. LEFT_OFFSET is the offset of the left whole
2412 subtree being copied, RIGHT_OFFSET is the same thing for the right subtree.
2413 GSI is stmt iterator used for statement insertions. *REFRESHED is true iff
2414 the rhs top aggregate has already been refreshed by contents of its scalar
2415 reductions and is set to true if this function has to do it. */
2417 static void
2418 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2419 HOST_WIDE_INT left_offset,
2420 HOST_WIDE_INT right_offset,
2421 gimple_stmt_iterator *old_gsi,
2422 gimple_stmt_iterator *new_gsi,
2423 enum unscalarized_data_handling *refreshed,
2424 tree lhs)
2426 location_t loc = EXPR_LOCATION (lacc->expr);
2429 if (lacc->grp_to_be_replaced)
2431 struct access *racc;
2432 HOST_WIDE_INT offset = lacc->offset - left_offset + right_offset;
2433 gimple stmt;
2434 tree rhs;
2436 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2437 if (racc && racc->grp_to_be_replaced)
2439 rhs = get_access_replacement (racc);
2440 if (!useless_type_conversion_p (lacc->type, racc->type))
2441 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2443 else
2445 /* No suitable access on the right hand side, need to load from
2446 the aggregate. See if we have to update it first... */
2447 if (*refreshed == SRA_UDH_NONE)
2448 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2449 lhs, old_gsi);
2451 if (*refreshed == SRA_UDH_LEFT)
2453 bool repl_found;
2455 rhs = lacc->base;
2456 repl_found = build_ref_for_offset (&rhs, TREE_TYPE (rhs),
2457 lacc->offset, lacc->type,
2458 false);
2459 gcc_assert (repl_found);
2461 else
2463 bool repl_found;
2465 rhs = top_racc->base;
2466 repl_found = build_ref_for_offset (&rhs,
2467 TREE_TYPE (top_racc->base),
2468 offset, lacc->type, false);
2469 gcc_assert (repl_found);
2473 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2474 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2475 update_stmt (stmt);
2476 sra_stats.subreplacements++;
2478 else if (*refreshed == SRA_UDH_NONE
2479 && lacc->grp_read && !lacc->grp_covered)
2480 *refreshed = handle_unscalarized_data_in_subtree (top_racc, lhs,
2481 old_gsi);
2483 if (lacc->first_child)
2484 load_assign_lhs_subreplacements (lacc->first_child, top_racc,
2485 left_offset, right_offset,
2486 old_gsi, new_gsi, refreshed, lhs);
2487 lacc = lacc->next_sibling;
2489 while (lacc);
2492 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2493 to the assignment and GSI is the statement iterator pointing at it. Returns
2494 the same values as sra_modify_assign. */
2496 static enum scan_assign_result
2497 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2499 tree lhs = gimple_assign_lhs (*stmt);
2500 struct access *acc;
2502 acc = get_access_for_expr (lhs);
2503 if (!acc)
2504 return SRA_SA_NONE;
2506 if (VEC_length (constructor_elt,
2507 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2509 /* I have never seen this code path trigger but if it can happen the
2510 following should handle it gracefully. */
2511 if (access_has_children_p (acc))
2512 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2513 true, true);
2514 return SRA_SA_PROCESSED;
2517 if (acc->grp_covered)
2519 init_subtree_with_zero (acc, gsi, false);
2520 unlink_stmt_vdef (*stmt);
2521 gsi_remove (gsi, true);
2522 return SRA_SA_REMOVED;
2524 else
2526 init_subtree_with_zero (acc, gsi, true);
2527 return SRA_SA_PROCESSED;
2532 /* Callback of scan_function to process assign statements. It examines both
2533 sides of the statement, replaces them with a scalare replacement if there is
2534 one and generating copying of replacements if scalarized aggregates have been
2535 used in the assignment. STMT is a pointer to the assign statement, GSI is
2536 used to hold generated statements for type conversions and subtree
2537 copying. */
2539 static enum scan_assign_result
2540 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
2541 void *data ATTRIBUTE_UNUSED)
2543 struct access *lacc, *racc;
2544 tree lhs, rhs;
2545 bool modify_this_stmt = false;
2546 bool force_gimple_rhs = false;
2547 location_t loc = gimple_location (*stmt);
2548 gimple_stmt_iterator orig_gsi = *gsi;
2550 if (!gimple_assign_single_p (*stmt))
2551 return SRA_SA_NONE;
2552 lhs = gimple_assign_lhs (*stmt);
2553 rhs = gimple_assign_rhs1 (*stmt);
2555 if (TREE_CODE (rhs) == CONSTRUCTOR)
2556 return sra_modify_constructor_assign (stmt, gsi);
2558 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2559 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2560 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2562 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2563 gsi, false, data);
2564 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2565 gsi, true, data);
2566 return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2569 lacc = get_access_for_expr (lhs);
2570 racc = get_access_for_expr (rhs);
2571 if (!lacc && !racc)
2572 return SRA_SA_NONE;
2574 if (lacc && lacc->grp_to_be_replaced)
2576 lhs = get_access_replacement (lacc);
2577 gimple_assign_set_lhs (*stmt, lhs);
2578 modify_this_stmt = true;
2579 if (lacc->grp_partial_lhs)
2580 force_gimple_rhs = true;
2581 sra_stats.exprs++;
2584 if (racc && racc->grp_to_be_replaced)
2586 rhs = get_access_replacement (racc);
2587 modify_this_stmt = true;
2588 if (racc->grp_partial_lhs)
2589 force_gimple_rhs = true;
2590 sra_stats.exprs++;
2593 if (modify_this_stmt)
2595 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2597 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2598 ??? This should move to fold_stmt which we simply should
2599 call after building a VIEW_CONVERT_EXPR here. */
2600 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2601 && !access_has_children_p (lacc))
2603 tree expr = lhs;
2604 if (build_ref_for_offset (&expr, TREE_TYPE (lhs), 0,
2605 TREE_TYPE (rhs), false))
2607 lhs = expr;
2608 gimple_assign_set_lhs (*stmt, expr);
2611 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2612 && !access_has_children_p (racc))
2614 tree expr = rhs;
2615 if (build_ref_for_offset (&expr, TREE_TYPE (rhs), 0,
2616 TREE_TYPE (lhs), false))
2617 rhs = expr;
2619 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2621 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2622 if (is_gimple_reg_type (TREE_TYPE (lhs))
2623 && TREE_CODE (lhs) != SSA_NAME)
2624 force_gimple_rhs = true;
2629 /* From this point on, the function deals with assignments in between
2630 aggregates when at least one has scalar reductions of some of its
2631 components. There are three possible scenarios: Both the LHS and RHS have
2632 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2634 In the first case, we would like to load the LHS components from RHS
2635 components whenever possible. If that is not possible, we would like to
2636 read it directly from the RHS (after updating it by storing in it its own
2637 components). If there are some necessary unscalarized data in the LHS,
2638 those will be loaded by the original assignment too. If neither of these
2639 cases happen, the original statement can be removed. Most of this is done
2640 by load_assign_lhs_subreplacements.
2642 In the second case, we would like to store all RHS scalarized components
2643 directly into LHS and if they cover the aggregate completely, remove the
2644 statement too. In the third case, we want the LHS components to be loaded
2645 directly from the RHS (DSE will remove the original statement if it
2646 becomes redundant).
2648 This is a bit complex but manageable when types match and when unions do
2649 not cause confusion in a way that we cannot really load a component of LHS
2650 from the RHS or vice versa (the access representing this level can have
2651 subaccesses that are accessible only through a different union field at a
2652 higher level - different from the one used in the examined expression).
2653 Unions are fun.
2655 Therefore, I specially handle a fourth case, happening when there is a
2656 specific type cast or it is impossible to locate a scalarized subaccess on
2657 the other side of the expression. If that happens, I simply "refresh" the
2658 RHS by storing in it is scalarized components leave the original statement
2659 there to do the copying and then load the scalar replacements of the LHS.
2660 This is what the first branch does. */
2662 if (gimple_has_volatile_ops (*stmt)
2663 || contains_view_convert_expr_p (rhs)
2664 || contains_view_convert_expr_p (lhs)
2665 || (access_has_children_p (racc)
2666 && !ref_expr_for_all_replacements_p (racc, lhs, racc->offset))
2667 || (access_has_children_p (lacc)
2668 && !ref_expr_for_all_replacements_p (lacc, rhs, lacc->offset)))
2670 if (access_has_children_p (racc))
2671 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2672 gsi, false, false);
2673 if (access_has_children_p (lacc))
2674 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2675 gsi, true, true);
2676 sra_stats.separate_lhs_rhs_handling++;
2678 else
2680 if (access_has_children_p (lacc) && access_has_children_p (racc))
2682 gimple_stmt_iterator orig_gsi = *gsi;
2683 enum unscalarized_data_handling refreshed;
2685 if (lacc->grp_read && !lacc->grp_covered)
2686 refreshed = handle_unscalarized_data_in_subtree (racc, lhs, gsi);
2687 else
2688 refreshed = SRA_UDH_NONE;
2690 load_assign_lhs_subreplacements (lacc->first_child, racc,
2691 lacc->offset, racc->offset,
2692 &orig_gsi, gsi, &refreshed, lhs);
2693 if (refreshed != SRA_UDH_RIGHT)
2695 if (*stmt == gsi_stmt (*gsi))
2696 gsi_next (gsi);
2698 unlink_stmt_vdef (*stmt);
2699 gsi_remove (&orig_gsi, true);
2700 sra_stats.deleted++;
2701 return SRA_SA_REMOVED;
2704 else
2706 if (access_has_children_p (racc))
2708 if (!racc->grp_unscalarized_data
2709 /* Do not remove SSA name definitions (PR 42704). */
2710 && TREE_CODE (lhs) != SSA_NAME)
2712 generate_subtree_copies (racc->first_child, lhs,
2713 racc->offset, 0, 0, gsi,
2714 false, false);
2715 gcc_assert (*stmt == gsi_stmt (*gsi));
2716 unlink_stmt_vdef (*stmt);
2717 gsi_remove (gsi, true);
2718 sra_stats.deleted++;
2719 return SRA_SA_REMOVED;
2721 else
2722 generate_subtree_copies (racc->first_child, lhs,
2723 racc->offset, 0, 0, gsi, false, true);
2725 else if (access_has_children_p (lacc))
2726 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2727 0, 0, gsi, true, true);
2731 /* This gimplification must be done after generate_subtree_copies, lest we
2732 insert the subtree copies in the middle of the gimplified sequence. */
2733 if (force_gimple_rhs)
2734 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2735 true, GSI_SAME_STMT);
2736 if (gimple_assign_rhs1 (*stmt) != rhs)
2738 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2739 gcc_assert (*stmt == gsi_stmt (orig_gsi));
2742 return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2745 /* Generate statements initializing scalar replacements of parts of function
2746 parameters. */
2748 static void
2749 initialize_parameter_reductions (void)
2751 gimple_stmt_iterator gsi;
2752 gimple_seq seq = NULL;
2753 tree parm;
2755 for (parm = DECL_ARGUMENTS (current_function_decl);
2756 parm;
2757 parm = TREE_CHAIN (parm))
2759 VEC (access_p, heap) *access_vec;
2760 struct access *access;
2762 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2763 continue;
2764 access_vec = get_base_access_vector (parm);
2765 if (!access_vec)
2766 continue;
2768 if (!seq)
2770 seq = gimple_seq_alloc ();
2771 gsi = gsi_start (seq);
2774 for (access = VEC_index (access_p, access_vec, 0);
2775 access;
2776 access = access->next_grp)
2777 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true);
2780 if (seq)
2781 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
2784 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
2785 it reveals there are components of some aggregates to be scalarized, it runs
2786 the required transformations. */
2787 static unsigned int
2788 perform_intra_sra (void)
2790 int ret = 0;
2791 sra_initialize ();
2793 if (!find_var_candidates ())
2794 goto out;
2796 if (!scan_function (build_access_from_expr, build_accesses_from_assign, NULL,
2797 true, NULL))
2798 goto out;
2800 if (!analyze_all_variable_accesses ())
2801 goto out;
2803 scan_function (sra_modify_expr, sra_modify_assign, NULL, false, NULL);
2804 initialize_parameter_reductions ();
2806 statistics_counter_event (cfun, "Scalar replacements created",
2807 sra_stats.replacements);
2808 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
2809 statistics_counter_event (cfun, "Subtree copy stmts",
2810 sra_stats.subtree_copies);
2811 statistics_counter_event (cfun, "Subreplacement stmts",
2812 sra_stats.subreplacements);
2813 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
2814 statistics_counter_event (cfun, "Separate LHS and RHS handling",
2815 sra_stats.separate_lhs_rhs_handling);
2817 ret = TODO_update_ssa;
2819 out:
2820 sra_deinitialize ();
2821 return ret;
2824 /* Perform early intraprocedural SRA. */
2825 static unsigned int
2826 early_intra_sra (void)
2828 sra_mode = SRA_MODE_EARLY_INTRA;
2829 return perform_intra_sra ();
2832 /* Perform "late" intraprocedural SRA. */
2833 static unsigned int
2834 late_intra_sra (void)
2836 sra_mode = SRA_MODE_INTRA;
2837 return perform_intra_sra ();
2841 static bool
2842 gate_intra_sra (void)
2844 return flag_tree_sra != 0;
2848 struct gimple_opt_pass pass_sra_early =
2851 GIMPLE_PASS,
2852 "esra", /* name */
2853 gate_intra_sra, /* gate */
2854 early_intra_sra, /* execute */
2855 NULL, /* sub */
2856 NULL, /* next */
2857 0, /* static_pass_number */
2858 TV_TREE_SRA, /* tv_id */
2859 PROP_cfg | PROP_ssa, /* properties_required */
2860 0, /* properties_provided */
2861 0, /* properties_destroyed */
2862 0, /* todo_flags_start */
2863 TODO_dump_func
2864 | TODO_update_ssa
2865 | TODO_ggc_collect
2866 | TODO_verify_ssa /* todo_flags_finish */
2870 struct gimple_opt_pass pass_sra =
2873 GIMPLE_PASS,
2874 "sra", /* name */
2875 gate_intra_sra, /* gate */
2876 late_intra_sra, /* execute */
2877 NULL, /* sub */
2878 NULL, /* next */
2879 0, /* static_pass_number */
2880 TV_TREE_SRA, /* tv_id */
2881 PROP_cfg | PROP_ssa, /* properties_required */
2882 0, /* properties_provided */
2883 0, /* properties_destroyed */
2884 TODO_update_address_taken, /* todo_flags_start */
2885 TODO_dump_func
2886 | TODO_update_ssa
2887 | TODO_ggc_collect
2888 | TODO_verify_ssa /* todo_flags_finish */
2893 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
2894 parameter. */
2896 static bool
2897 is_unused_scalar_param (tree parm)
2899 tree name;
2900 return (is_gimple_reg (parm)
2901 && (!(name = gimple_default_def (cfun, parm))
2902 || has_zero_uses (name)));
2905 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
2906 examine whether there are any direct or otherwise infeasible ones. If so,
2907 return true, otherwise return false. PARM must be a gimple register with a
2908 non-NULL default definition. */
2910 static bool
2911 ptr_parm_has_direct_uses (tree parm)
2913 imm_use_iterator ui;
2914 gimple stmt;
2915 tree name = gimple_default_def (cfun, parm);
2916 bool ret = false;
2918 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
2920 int uses_ok = 0;
2921 use_operand_p use_p;
2923 if (is_gimple_debug (stmt))
2924 continue;
2926 /* Valid uses include dereferences on the lhs and the rhs. */
2927 if (gimple_has_lhs (stmt))
2929 tree lhs = gimple_get_lhs (stmt);
2930 while (handled_component_p (lhs))
2931 lhs = TREE_OPERAND (lhs, 0);
2932 if (INDIRECT_REF_P (lhs)
2933 && TREE_OPERAND (lhs, 0) == name)
2934 uses_ok++;
2936 if (gimple_assign_single_p (stmt))
2938 tree rhs = gimple_assign_rhs1 (stmt);
2939 while (handled_component_p (rhs))
2940 rhs = TREE_OPERAND (rhs, 0);
2941 if (INDIRECT_REF_P (rhs)
2942 && TREE_OPERAND (rhs, 0) == name)
2943 uses_ok++;
2945 else if (is_gimple_call (stmt))
2947 unsigned i;
2948 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2950 tree arg = gimple_call_arg (stmt, i);
2951 while (handled_component_p (arg))
2952 arg = TREE_OPERAND (arg, 0);
2953 if (INDIRECT_REF_P (arg)
2954 && TREE_OPERAND (arg, 0) == name)
2955 uses_ok++;
2959 /* If the number of valid uses does not match the number of
2960 uses in this stmt there is an unhandled use. */
2961 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
2962 --uses_ok;
2964 if (uses_ok != 0)
2965 ret = true;
2967 if (ret)
2968 BREAK_FROM_IMM_USE_STMT (ui);
2971 return ret;
2974 /* Identify candidates for reduction for IPA-SRA based on their type and mark
2975 them in candidate_bitmap. Note that these do not necessarily include
2976 parameter which are unused and thus can be removed. Return true iff any
2977 such candidate has been found. */
2979 static bool
2980 find_param_candidates (void)
2982 tree parm;
2983 int count = 0;
2984 bool ret = false;
2986 for (parm = DECL_ARGUMENTS (current_function_decl);
2987 parm;
2988 parm = TREE_CHAIN (parm))
2990 tree type = TREE_TYPE (parm);
2992 count++;
2994 if (TREE_THIS_VOLATILE (parm)
2995 || TREE_ADDRESSABLE (parm)
2996 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
2997 continue;
2999 if (is_unused_scalar_param (parm))
3001 ret = true;
3002 continue;
3005 if (POINTER_TYPE_P (type))
3007 type = TREE_TYPE (type);
3009 if (TREE_CODE (type) == FUNCTION_TYPE
3010 || TYPE_VOLATILE (type)
3011 || !is_gimple_reg (parm)
3012 || is_va_list_type (type)
3013 || ptr_parm_has_direct_uses (parm))
3014 continue;
3016 else if (!AGGREGATE_TYPE_P (type))
3017 continue;
3019 if (!COMPLETE_TYPE_P (type)
3020 || !host_integerp (TYPE_SIZE (type), 1)
3021 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3022 || (AGGREGATE_TYPE_P (type)
3023 && type_internals_preclude_sra_p (type)))
3024 continue;
3026 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3027 ret = true;
3028 if (dump_file && (dump_flags & TDF_DETAILS))
3030 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3031 print_generic_expr (dump_file, parm, 0);
3032 fprintf (dump_file, "\n");
3036 func_param_count = count;
3037 return ret;
3040 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3041 maybe_modified. */
3043 static bool
3044 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3045 void *data)
3047 struct access *repr = (struct access *) data;
3049 repr->grp_maybe_modified = 1;
3050 return true;
3053 /* Analyze what representatives (in linked lists accessible from
3054 REPRESENTATIVES) can be modified by side effects of statements in the
3055 current function. */
3057 static void
3058 analyze_modified_params (VEC (access_p, heap) *representatives)
3060 int i;
3062 for (i = 0; i < func_param_count; i++)
3064 struct access *repr;
3066 for (repr = VEC_index (access_p, representatives, i);
3067 repr;
3068 repr = repr->next_grp)
3070 struct access *access;
3071 bitmap visited;
3072 ao_ref ar;
3074 if (no_accesses_p (repr))
3075 continue;
3076 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3077 || repr->grp_maybe_modified)
3078 continue;
3080 ao_ref_init (&ar, repr->expr);
3081 visited = BITMAP_ALLOC (NULL);
3082 for (access = repr; access; access = access->next_sibling)
3084 /* All accesses are read ones, otherwise grp_maybe_modified would
3085 be trivially set. */
3086 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3087 mark_maybe_modified, repr, &visited);
3088 if (repr->grp_maybe_modified)
3089 break;
3091 BITMAP_FREE (visited);
3096 /* Propagate distances in bb_dereferences in the opposite direction than the
3097 control flow edges, in each step storing the maximum of the current value
3098 and the minimum of all successors. These steps are repeated until the table
3099 stabilizes. Note that BBs which might terminate the functions (according to
3100 final_bbs bitmap) never updated in this way. */
3102 static void
3103 propagate_dereference_distances (void)
3105 VEC (basic_block, heap) *queue;
3106 basic_block bb;
3108 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3109 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3110 FOR_EACH_BB (bb)
3112 VEC_quick_push (basic_block, queue, bb);
3113 bb->aux = bb;
3116 while (!VEC_empty (basic_block, queue))
3118 edge_iterator ei;
3119 edge e;
3120 bool change = false;
3121 int i;
3123 bb = VEC_pop (basic_block, queue);
3124 bb->aux = NULL;
3126 if (bitmap_bit_p (final_bbs, bb->index))
3127 continue;
3129 for (i = 0; i < func_param_count; i++)
3131 int idx = bb->index * func_param_count + i;
3132 bool first = true;
3133 HOST_WIDE_INT inh = 0;
3135 FOR_EACH_EDGE (e, ei, bb->succs)
3137 int succ_idx = e->dest->index * func_param_count + i;
3139 if (e->src == EXIT_BLOCK_PTR)
3140 continue;
3142 if (first)
3144 first = false;
3145 inh = bb_dereferences [succ_idx];
3147 else if (bb_dereferences [succ_idx] < inh)
3148 inh = bb_dereferences [succ_idx];
3151 if (!first && bb_dereferences[idx] < inh)
3153 bb_dereferences[idx] = inh;
3154 change = true;
3158 if (change && !bitmap_bit_p (final_bbs, bb->index))
3159 FOR_EACH_EDGE (e, ei, bb->preds)
3161 if (e->src->aux)
3162 continue;
3164 e->src->aux = e->src;
3165 VEC_quick_push (basic_block, queue, e->src);
3169 VEC_free (basic_block, heap, queue);
3172 /* Dump a dereferences TABLE with heading STR to file F. */
3174 static void
3175 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3177 basic_block bb;
3179 fprintf (dump_file, str);
3180 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3182 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3183 if (bb != EXIT_BLOCK_PTR)
3185 int i;
3186 for (i = 0; i < func_param_count; i++)
3188 int idx = bb->index * func_param_count + i;
3189 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3192 fprintf (f, "\n");
3194 fprintf (dump_file, "\n");
3197 /* Determine what (parts of) parameters passed by reference that are not
3198 assigned to are not certainly dereferenced in this function and thus the
3199 dereferencing cannot be safely moved to the caller without potentially
3200 introducing a segfault. Mark such REPRESENTATIVES as
3201 grp_not_necessarilly_dereferenced.
3203 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3204 part is calculated rather than simple booleans are calculated for each
3205 pointer parameter to handle cases when only a fraction of the whole
3206 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3207 an example).
3209 The maximum dereference distances for each pointer parameter and BB are
3210 already stored in bb_dereference. This routine simply propagates these
3211 values upwards by propagate_dereference_distances and then compares the
3212 distances of individual parameters in the ENTRY BB to the equivalent
3213 distances of each representative of a (fraction of a) parameter. */
3215 static void
3216 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3218 int i;
3220 if (dump_file && (dump_flags & TDF_DETAILS))
3221 dump_dereferences_table (dump_file,
3222 "Dereference table before propagation:\n",
3223 bb_dereferences);
3225 propagate_dereference_distances ();
3227 if (dump_file && (dump_flags & TDF_DETAILS))
3228 dump_dereferences_table (dump_file,
3229 "Dereference table after propagation:\n",
3230 bb_dereferences);
3232 for (i = 0; i < func_param_count; i++)
3234 struct access *repr = VEC_index (access_p, representatives, i);
3235 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3237 if (!repr || no_accesses_p (repr))
3238 continue;
3242 if ((repr->offset + repr->size) > bb_dereferences[idx])
3243 repr->grp_not_necessarilly_dereferenced = 1;
3244 repr = repr->next_grp;
3246 while (repr);
3250 /* Return the representative access for the parameter declaration PARM if it is
3251 a scalar passed by reference which is not written to and the pointer value
3252 is not used directly. Thus, if it is legal to dereference it in the caller
3253 and we can rule out modifications through aliases, such parameter should be
3254 turned into one passed by value. Return NULL otherwise. */
3256 static struct access *
3257 unmodified_by_ref_scalar_representative (tree parm)
3259 int i, access_count;
3260 struct access *repr;
3261 VEC (access_p, heap) *access_vec;
3263 access_vec = get_base_access_vector (parm);
3264 gcc_assert (access_vec);
3265 repr = VEC_index (access_p, access_vec, 0);
3266 if (repr->write)
3267 return NULL;
3268 repr->group_representative = repr;
3270 access_count = VEC_length (access_p, access_vec);
3271 for (i = 1; i < access_count; i++)
3273 struct access *access = VEC_index (access_p, access_vec, i);
3274 if (access->write)
3275 return NULL;
3276 access->group_representative = repr;
3277 access->next_sibling = repr->next_sibling;
3278 repr->next_sibling = access;
3281 repr->grp_read = 1;
3282 repr->grp_scalar_ptr = 1;
3283 return repr;
3286 /* Return true iff this access precludes IPA-SRA of the parameter it is
3287 associated with. */
3289 static bool
3290 access_precludes_ipa_sra_p (struct access *access)
3292 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3293 is incompatible assign in a call statement (and possibly even in asm
3294 statements). This can be relaxed by using a new temporary but only for
3295 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3296 intraprocedural SRA we deal with this by keeping the old aggregate around,
3297 something we cannot do in IPA-SRA.) */
3298 if (access->write
3299 && (is_gimple_call (access->stmt)
3300 || gimple_code (access->stmt) == GIMPLE_ASM))
3301 return true;
3303 return false;
3307 /* Sort collected accesses for parameter PARM, identify representatives for
3308 each accessed region and link them together. Return NULL if there are
3309 different but overlapping accesses, return the special ptr value meaning
3310 there are no accesses for this parameter if that is the case and return the
3311 first representative otherwise. Set *RO_GRP if there is a group of accesses
3312 with only read (i.e. no write) accesses. */
3314 static struct access *
3315 splice_param_accesses (tree parm, bool *ro_grp)
3317 int i, j, access_count, group_count;
3318 int agg_size, total_size = 0;
3319 struct access *access, *res, **prev_acc_ptr = &res;
3320 VEC (access_p, heap) *access_vec;
3322 access_vec = get_base_access_vector (parm);
3323 if (!access_vec)
3324 return &no_accesses_representant;
3325 access_count = VEC_length (access_p, access_vec);
3327 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
3328 compare_access_positions);
3330 i = 0;
3331 total_size = 0;
3332 group_count = 0;
3333 while (i < access_count)
3335 bool modification;
3336 access = VEC_index (access_p, access_vec, i);
3337 modification = access->write;
3338 if (access_precludes_ipa_sra_p (access))
3339 return NULL;
3341 /* Access is about to become group representative unless we find some
3342 nasty overlap which would preclude us from breaking this parameter
3343 apart. */
3345 j = i + 1;
3346 while (j < access_count)
3348 struct access *ac2 = VEC_index (access_p, access_vec, j);
3349 if (ac2->offset != access->offset)
3351 /* All or nothing law for parameters. */
3352 if (access->offset + access->size > ac2->offset)
3353 return NULL;
3354 else
3355 break;
3357 else if (ac2->size != access->size)
3358 return NULL;
3360 if (access_precludes_ipa_sra_p (ac2))
3361 return NULL;
3363 modification |= ac2->write;
3364 ac2->group_representative = access;
3365 ac2->next_sibling = access->next_sibling;
3366 access->next_sibling = ac2;
3367 j++;
3370 group_count++;
3371 access->grp_maybe_modified = modification;
3372 if (!modification)
3373 *ro_grp = true;
3374 *prev_acc_ptr = access;
3375 prev_acc_ptr = &access->next_grp;
3376 total_size += access->size;
3377 i = j;
3380 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3381 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3382 else
3383 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3384 if (total_size >= agg_size)
3385 return NULL;
3387 gcc_assert (group_count > 0);
3388 return res;
3391 /* Decide whether parameters with representative accesses given by REPR should
3392 be reduced into components. */
3394 static int
3395 decide_one_param_reduction (struct access *repr)
3397 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3398 bool by_ref;
3399 tree parm;
3401 parm = repr->base;
3402 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3403 gcc_assert (cur_parm_size > 0);
3405 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3407 by_ref = true;
3408 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3410 else
3412 by_ref = false;
3413 agg_size = cur_parm_size;
3416 if (dump_file)
3418 struct access *acc;
3419 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3420 print_generic_expr (dump_file, parm, 0);
3421 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3422 for (acc = repr; acc; acc = acc->next_grp)
3423 dump_access (dump_file, acc, true);
3426 total_size = 0;
3427 new_param_count = 0;
3429 for (; repr; repr = repr->next_grp)
3431 gcc_assert (parm == repr->base);
3432 new_param_count++;
3434 if (!by_ref || (!repr->grp_maybe_modified
3435 && !repr->grp_not_necessarilly_dereferenced))
3436 total_size += repr->size;
3437 else
3438 total_size += cur_parm_size;
3441 gcc_assert (new_param_count > 0);
3443 if (optimize_function_for_size_p (cfun))
3444 parm_size_limit = cur_parm_size;
3445 else
3446 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3447 * cur_parm_size);
3449 if (total_size < agg_size
3450 && total_size <= parm_size_limit)
3452 if (dump_file)
3453 fprintf (dump_file, " ....will be split into %i components\n",
3454 new_param_count);
3455 return new_param_count;
3457 else
3458 return 0;
3461 /* The order of the following enums is important, we need to do extra work for
3462 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3463 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3464 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3466 /* Identify representatives of all accesses to all candidate parameters for
3467 IPA-SRA. Return result based on what representatives have been found. */
3469 static enum ipa_splicing_result
3470 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3472 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3473 tree parm;
3474 struct access *repr;
3476 *representatives = VEC_alloc (access_p, heap, func_param_count);
3478 for (parm = DECL_ARGUMENTS (current_function_decl);
3479 parm;
3480 parm = TREE_CHAIN (parm))
3482 if (is_unused_scalar_param (parm))
3484 VEC_quick_push (access_p, *representatives,
3485 &no_accesses_representant);
3486 if (result == NO_GOOD_ACCESS)
3487 result = UNUSED_PARAMS;
3489 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3490 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3491 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3493 repr = unmodified_by_ref_scalar_representative (parm);
3494 VEC_quick_push (access_p, *representatives, repr);
3495 if (repr)
3496 result = UNMODIF_BY_REF_ACCESSES;
3498 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3500 bool ro_grp = false;
3501 repr = splice_param_accesses (parm, &ro_grp);
3502 VEC_quick_push (access_p, *representatives, repr);
3504 if (repr && !no_accesses_p (repr))
3506 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3508 if (ro_grp)
3509 result = UNMODIF_BY_REF_ACCESSES;
3510 else if (result < MODIF_BY_REF_ACCESSES)
3511 result = MODIF_BY_REF_ACCESSES;
3513 else if (result < BY_VAL_ACCESSES)
3514 result = BY_VAL_ACCESSES;
3516 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3517 result = UNUSED_PARAMS;
3519 else
3520 VEC_quick_push (access_p, *representatives, NULL);
3523 if (result == NO_GOOD_ACCESS)
3525 VEC_free (access_p, heap, *representatives);
3526 *representatives = NULL;
3527 return NO_GOOD_ACCESS;
3530 return result;
3533 /* Return the index of BASE in PARMS. Abort if it is not found. */
3535 static inline int
3536 get_param_index (tree base, VEC(tree, heap) *parms)
3538 int i, len;
3540 len = VEC_length (tree, parms);
3541 for (i = 0; i < len; i++)
3542 if (VEC_index (tree, parms, i) == base)
3543 return i;
3544 gcc_unreachable ();
3547 /* Convert the decisions made at the representative level into compact
3548 parameter adjustments. REPRESENTATIVES are pointers to first
3549 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3550 final number of adjustments. */
3552 static ipa_parm_adjustment_vec
3553 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3554 int adjustments_count)
3556 VEC (tree, heap) *parms;
3557 ipa_parm_adjustment_vec adjustments;
3558 tree parm;
3559 int i;
3561 gcc_assert (adjustments_count > 0);
3562 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3563 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3564 parm = DECL_ARGUMENTS (current_function_decl);
3565 for (i = 0; i < func_param_count; i++, parm = TREE_CHAIN (parm))
3567 struct access *repr = VEC_index (access_p, representatives, i);
3569 if (!repr || no_accesses_p (repr))
3571 struct ipa_parm_adjustment *adj;
3573 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3574 memset (adj, 0, sizeof (*adj));
3575 adj->base_index = get_param_index (parm, parms);
3576 adj->base = parm;
3577 if (!repr)
3578 adj->copy_param = 1;
3579 else
3580 adj->remove_param = 1;
3582 else
3584 struct ipa_parm_adjustment *adj;
3585 int index = get_param_index (parm, parms);
3587 for (; repr; repr = repr->next_grp)
3589 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3590 memset (adj, 0, sizeof (*adj));
3591 gcc_assert (repr->base == parm);
3592 adj->base_index = index;
3593 adj->base = repr->base;
3594 adj->type = repr->type;
3595 adj->offset = repr->offset;
3596 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3597 && (repr->grp_maybe_modified
3598 || repr->grp_not_necessarilly_dereferenced));
3603 VEC_free (tree, heap, parms);
3604 return adjustments;
3607 /* Analyze the collected accesses and produce a plan what to do with the
3608 parameters in the form of adjustments, NULL meaning nothing. */
3610 static ipa_parm_adjustment_vec
3611 analyze_all_param_acesses (void)
3613 enum ipa_splicing_result repr_state;
3614 bool proceed = false;
3615 int i, adjustments_count = 0;
3616 VEC (access_p, heap) *representatives;
3617 ipa_parm_adjustment_vec adjustments;
3619 repr_state = splice_all_param_accesses (&representatives);
3620 if (repr_state == NO_GOOD_ACCESS)
3621 return NULL;
3623 /* If there are any parameters passed by reference which are not modified
3624 directly, we need to check whether they can be modified indirectly. */
3625 if (repr_state == UNMODIF_BY_REF_ACCESSES)
3627 analyze_caller_dereference_legality (representatives);
3628 analyze_modified_params (representatives);
3631 for (i = 0; i < func_param_count; i++)
3633 struct access *repr = VEC_index (access_p, representatives, i);
3635 if (repr && !no_accesses_p (repr))
3637 if (repr->grp_scalar_ptr)
3639 adjustments_count++;
3640 if (repr->grp_not_necessarilly_dereferenced
3641 || repr->grp_maybe_modified)
3642 VEC_replace (access_p, representatives, i, NULL);
3643 else
3645 proceed = true;
3646 sra_stats.scalar_by_ref_to_by_val++;
3649 else
3651 int new_components = decide_one_param_reduction (repr);
3653 if (new_components == 0)
3655 VEC_replace (access_p, representatives, i, NULL);
3656 adjustments_count++;
3658 else
3660 adjustments_count += new_components;
3661 sra_stats.aggregate_params_reduced++;
3662 sra_stats.param_reductions_created += new_components;
3663 proceed = true;
3667 else
3669 if (no_accesses_p (repr))
3671 proceed = true;
3672 sra_stats.deleted_unused_parameters++;
3674 adjustments_count++;
3678 if (!proceed && dump_file)
3679 fprintf (dump_file, "NOT proceeding to change params.\n");
3681 if (proceed)
3682 adjustments = turn_representatives_into_adjustments (representatives,
3683 adjustments_count);
3684 else
3685 adjustments = NULL;
3687 VEC_free (access_p, heap, representatives);
3688 return adjustments;
3691 /* If a parameter replacement identified by ADJ does not yet exist in the form
3692 of declaration, create it and record it, otherwise return the previously
3693 created one. */
3695 static tree
3696 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3698 tree repl;
3699 if (!adj->new_ssa_base)
3701 char *pretty_name = make_fancy_name (adj->base);
3703 repl = create_tmp_var (TREE_TYPE (adj->base), "ISR");
3704 if (TREE_CODE (TREE_TYPE (repl)) == COMPLEX_TYPE
3705 || TREE_CODE (TREE_TYPE (repl)) == VECTOR_TYPE)
3706 DECL_GIMPLE_REG_P (repl) = 1;
3707 DECL_NAME (repl) = get_identifier (pretty_name);
3708 obstack_free (&name_obstack, pretty_name);
3710 get_var_ann (repl);
3711 add_referenced_var (repl);
3712 adj->new_ssa_base = repl;
3714 else
3715 repl = adj->new_ssa_base;
3716 return repl;
3719 /* Find the first adjustment for a particular parameter BASE in a vector of
3720 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3721 adjustment. */
3723 static struct ipa_parm_adjustment *
3724 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3726 int i, len;
3728 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3729 for (i = 0; i < len; i++)
3731 struct ipa_parm_adjustment *adj;
3733 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3734 if (!adj->copy_param && adj->base == base)
3735 return adj;
3738 return NULL;
3741 /* Callback for scan_function. If the statement STMT defines an SSA_NAME of a
3742 parameter which is to be removed because its value is not used, replace the
3743 SSA_NAME with a one relating to a created VAR_DECL and replace all of its
3744 uses too and return true (update_stmt is then issued for the statement by
3745 the caller). DATA is a pointer to an adjustments vector. */
3747 static bool
3748 replace_removed_params_ssa_names (gimple stmt, void *data)
3750 VEC (ipa_parm_adjustment_t, heap) *adjustments;
3751 struct ipa_parm_adjustment *adj;
3752 tree lhs, decl, repl, name;
3754 adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3755 if (gimple_code (stmt) == GIMPLE_PHI)
3756 lhs = gimple_phi_result (stmt);
3757 else if (is_gimple_assign (stmt))
3758 lhs = gimple_assign_lhs (stmt);
3759 else if (is_gimple_call (stmt))
3760 lhs = gimple_call_lhs (stmt);
3761 else
3762 gcc_unreachable ();
3764 if (TREE_CODE (lhs) != SSA_NAME)
3765 return false;
3766 decl = SSA_NAME_VAR (lhs);
3767 if (TREE_CODE (decl) != PARM_DECL)
3768 return false;
3770 adj = get_adjustment_for_base (adjustments, decl);
3771 if (!adj)
3772 return false;
3774 repl = get_replaced_param_substitute (adj);
3775 name = make_ssa_name (repl, stmt);
3777 if (dump_file)
3779 fprintf (dump_file, "replacing an SSA name of a removed param ");
3780 print_generic_expr (dump_file, lhs, 0);
3781 fprintf (dump_file, " with ");
3782 print_generic_expr (dump_file, name, 0);
3783 fprintf (dump_file, "\n");
3786 if (is_gimple_assign (stmt))
3787 gimple_assign_set_lhs (stmt, name);
3788 else if (is_gimple_call (stmt))
3789 gimple_call_set_lhs (stmt, name);
3790 else
3791 gimple_phi_set_result (stmt, name);
3793 replace_uses_by (lhs, name);
3794 return true;
3797 /* Callback for scan_function and helper to sra_ipa_modify_assign. If the
3798 expression *EXPR should be replaced by a reduction of a parameter, do so.
3799 DATA is a pointer to a vector of adjustments. DONT_CONVERT specifies
3800 whether the function should care about type incompatibility the current and
3801 new expressions. If it is true, the function will leave incompatibility
3802 issues to the caller.
3804 When called directly by scan_function, DONT_CONVERT is true when the EXPR is
3805 a write (LHS) expression. */
3807 static bool
3808 sra_ipa_modify_expr (tree *expr, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
3809 bool dont_convert, void *data)
3811 ipa_parm_adjustment_vec adjustments;
3812 int i, len;
3813 struct ipa_parm_adjustment *adj, *cand = NULL;
3814 HOST_WIDE_INT offset, size, max_size;
3815 tree base, src;
3817 adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3818 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3820 if (TREE_CODE (*expr) == BIT_FIELD_REF
3821 || TREE_CODE (*expr) == IMAGPART_EXPR
3822 || TREE_CODE (*expr) == REALPART_EXPR)
3824 expr = &TREE_OPERAND (*expr, 0);
3825 dont_convert = false;
3828 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
3829 if (!base || size == -1 || max_size == -1)
3830 return false;
3832 if (INDIRECT_REF_P (base))
3833 base = TREE_OPERAND (base, 0);
3835 base = get_ssa_base_param (base);
3836 if (!base || TREE_CODE (base) != PARM_DECL)
3837 return false;
3839 for (i = 0; i < len; i++)
3841 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3843 if (adj->base == base &&
3844 (adj->offset == offset || adj->remove_param))
3846 cand = adj;
3847 break;
3850 if (!cand || cand->copy_param || cand->remove_param)
3851 return false;
3853 if (cand->by_ref)
3855 tree folded;
3856 src = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (cand->reduction)),
3857 cand->reduction);
3858 folded = gimple_fold_indirect_ref (src);
3859 if (folded)
3860 src = folded;
3862 else
3863 src = cand->reduction;
3865 if (dump_file && (dump_flags & TDF_DETAILS))
3867 fprintf (dump_file, "About to replace expr ");
3868 print_generic_expr (dump_file, *expr, 0);
3869 fprintf (dump_file, " with ");
3870 print_generic_expr (dump_file, src, 0);
3871 fprintf (dump_file, "\n");
3874 if (!dont_convert
3875 && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3877 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3878 *expr = vce;
3880 else
3881 *expr = src;
3882 return true;
3885 /* Callback for scan_function to process assign statements. Performs
3886 essentially the same function like sra_ipa_modify_expr. */
3888 static enum scan_assign_result
3889 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi, void *data)
3891 gimple stmt = *stmt_ptr;
3892 tree *lhs_p, *rhs_p;
3893 bool any;
3895 if (!gimple_assign_single_p (stmt))
3896 return SRA_SA_NONE;
3898 rhs_p = gimple_assign_rhs1_ptr (stmt);
3899 lhs_p = gimple_assign_lhs_ptr (stmt);
3901 any = sra_ipa_modify_expr (rhs_p, gsi, true, data);
3902 any |= sra_ipa_modify_expr (lhs_p, gsi, true, data);
3903 if (any)
3905 tree new_rhs = NULL_TREE;
3907 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
3909 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
3911 /* V_C_Es of constructors can cause trouble (PR 42714). */
3912 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
3913 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
3914 else
3915 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
3917 else
3918 new_rhs = fold_build1_loc (gimple_location (stmt),
3919 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
3920 *rhs_p);
3922 else if (REFERENCE_CLASS_P (*rhs_p)
3923 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
3924 && !is_gimple_reg (*lhs_p))
3925 /* This can happen when an assignment in between two single field
3926 structures is turned into an assignment in between two pointers to
3927 scalars (PR 42237). */
3928 new_rhs = *rhs_p;
3930 if (new_rhs)
3932 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
3933 true, GSI_SAME_STMT);
3935 gimple_assign_set_rhs_from_tree (gsi, tmp);
3938 return SRA_SA_PROCESSED;
3941 return SRA_SA_NONE;
3944 /* Call gimple_debug_bind_reset_value on all debug statements describing
3945 gimple register parameters that are being removed or replaced. */
3947 static void
3948 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
3950 int i, len;
3952 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3953 for (i = 0; i < len; i++)
3955 struct ipa_parm_adjustment *adj;
3956 imm_use_iterator ui;
3957 gimple stmt;
3958 tree name;
3960 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3961 if (adj->copy_param || !is_gimple_reg (adj->base))
3962 continue;
3963 name = gimple_default_def (cfun, adj->base);
3964 if (!name)
3965 continue;
3966 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3968 /* All other users must have been removed by scan_function. */
3969 gcc_assert (is_gimple_debug (stmt));
3970 gimple_debug_bind_reset_value (stmt);
3971 update_stmt (stmt);
3976 /* Return true iff all callers have at least as many actual arguments as there
3977 are formal parameters in the current function. */
3979 static bool
3980 all_callers_have_enough_arguments_p (struct cgraph_node *node)
3982 struct cgraph_edge *cs;
3983 for (cs = node->callers; cs; cs = cs->next_caller)
3984 if (!callsite_has_enough_arguments_p (cs->call_stmt))
3985 return false;
3987 return true;
3991 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
3993 static void
3994 convert_callers (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
3996 tree old_cur_fndecl = current_function_decl;
3997 struct cgraph_edge *cs;
3998 basic_block this_block;
3999 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4001 for (cs = node->callers; cs; cs = cs->next_caller)
4003 current_function_decl = cs->caller->decl;
4004 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4006 if (dump_file)
4007 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4008 cs->caller->uid, cs->callee->uid,
4009 cgraph_node_name (cs->caller),
4010 cgraph_node_name (cs->callee));
4012 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4014 pop_cfun ();
4017 for (cs = node->callers; cs; cs = cs->next_caller)
4018 if (!bitmap_bit_p (recomputed_callers, cs->caller->uid))
4020 compute_inline_parameters (cs->caller);
4021 bitmap_set_bit (recomputed_callers, cs->caller->uid);
4023 BITMAP_FREE (recomputed_callers);
4025 current_function_decl = old_cur_fndecl;
4027 if (!encountered_recursive_call)
4028 return;
4030 FOR_EACH_BB (this_block)
4032 gimple_stmt_iterator gsi;
4034 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4036 gimple stmt = gsi_stmt (gsi);
4037 tree call_fndecl;
4038 if (gimple_code (stmt) != GIMPLE_CALL)
4039 continue;
4040 call_fndecl = gimple_call_fndecl (stmt);
4041 if (call_fndecl && cgraph_get_node (call_fndecl) == node)
4043 if (dump_file)
4044 fprintf (dump_file, "Adjusting recursive call");
4045 ipa_modify_call_arguments (NULL, stmt, adjustments);
4050 return;
4053 /* Create an abstract origin declaration for OLD_DECL and make it an abstract
4054 origin of the provided decl so that there are preserved parameters for debug
4055 information. */
4057 static void
4058 create_abstract_origin (tree old_decl)
4060 if (!DECL_ABSTRACT_ORIGIN (old_decl))
4062 tree new_decl = copy_node (old_decl);
4064 DECL_ABSTRACT (new_decl) = 1;
4065 SET_DECL_ASSEMBLER_NAME (new_decl, NULL_TREE);
4066 SET_DECL_RTL (new_decl, NULL);
4067 DECL_STRUCT_FUNCTION (new_decl) = NULL;
4068 DECL_ARTIFICIAL (old_decl) = 1;
4069 DECL_ABSTRACT_ORIGIN (old_decl) = new_decl;
4073 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4074 as given in ADJUSTMENTS. */
4076 static void
4077 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4079 struct cgraph_node *alias;
4080 for (alias = node->same_body; alias; alias = alias->next)
4081 ipa_modify_formal_parameters (alias->decl, adjustments, "ISRA");
4082 /* current_function_decl must be handled last, after same_body aliases,
4083 as following functions will use what it computed. */
4084 create_abstract_origin (current_function_decl);
4085 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4086 scan_function (sra_ipa_modify_expr, sra_ipa_modify_assign,
4087 replace_removed_params_ssa_names, false, adjustments);
4088 sra_ipa_reset_debug_stmts (adjustments);
4089 convert_callers (node, adjustments);
4090 cgraph_make_node_local (node);
4091 return;
4094 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4095 attributes, return true otherwise. NODE is the cgraph node of the current
4096 function. */
4098 static bool
4099 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4101 if (!cgraph_node_can_be_local_p (node))
4103 if (dump_file)
4104 fprintf (dump_file, "Function not local to this compilation unit.\n");
4105 return false;
4108 if (DECL_VIRTUAL_P (current_function_decl))
4110 if (dump_file)
4111 fprintf (dump_file, "Function is a virtual method.\n");
4112 return false;
4115 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4116 && node->global.size >= MAX_INLINE_INSNS_AUTO)
4118 if (dump_file)
4119 fprintf (dump_file, "Function too big to be made truly local.\n");
4120 return false;
4123 if (!node->callers)
4125 if (dump_file)
4126 fprintf (dump_file,
4127 "Function has no callers in this compilation unit.\n");
4128 return false;
4131 if (cfun->stdarg)
4133 if (dump_file)
4134 fprintf (dump_file, "Function uses stdarg. \n");
4135 return false;
4138 return true;
4141 /* Perform early interprocedural SRA. */
4143 static unsigned int
4144 ipa_early_sra (void)
4146 struct cgraph_node *node = cgraph_node (current_function_decl);
4147 ipa_parm_adjustment_vec adjustments;
4148 int ret = 0;
4150 if (!ipa_sra_preliminary_function_checks (node))
4151 return 0;
4153 sra_initialize ();
4154 sra_mode = SRA_MODE_EARLY_IPA;
4156 if (!find_param_candidates ())
4158 if (dump_file)
4159 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4160 goto simple_out;
4163 if (!all_callers_have_enough_arguments_p (node))
4165 if (dump_file)
4166 fprintf (dump_file, "There are callers with insufficient number of "
4167 "arguments.\n");
4168 goto simple_out;
4171 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4172 func_param_count
4173 * last_basic_block_for_function (cfun));
4174 final_bbs = BITMAP_ALLOC (NULL);
4176 scan_function (build_access_from_expr, build_accesses_from_assign,
4177 NULL, true, NULL);
4178 if (encountered_apply_args)
4180 if (dump_file)
4181 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4182 goto out;
4185 if (encountered_unchangable_recursive_call)
4187 if (dump_file)
4188 fprintf (dump_file, "Function calls itself with insufficient "
4189 "number of arguments.\n");
4190 goto out;
4193 adjustments = analyze_all_param_acesses ();
4194 if (!adjustments)
4195 goto out;
4196 if (dump_file)
4197 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4199 modify_function (node, adjustments);
4200 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4201 ret = TODO_update_ssa;
4203 statistics_counter_event (cfun, "Unused parameters deleted",
4204 sra_stats.deleted_unused_parameters);
4205 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4206 sra_stats.scalar_by_ref_to_by_val);
4207 statistics_counter_event (cfun, "Aggregate parameters broken up",
4208 sra_stats.aggregate_params_reduced);
4209 statistics_counter_event (cfun, "Aggregate parameter components created",
4210 sra_stats.param_reductions_created);
4212 out:
4213 BITMAP_FREE (final_bbs);
4214 free (bb_dereferences);
4215 simple_out:
4216 sra_deinitialize ();
4217 return ret;
4220 /* Return if early ipa sra shall be performed. */
4221 static bool
4222 ipa_early_sra_gate (void)
4224 return flag_ipa_sra;
4227 struct gimple_opt_pass pass_early_ipa_sra =
4230 GIMPLE_PASS,
4231 "eipa_sra", /* name */
4232 ipa_early_sra_gate, /* gate */
4233 ipa_early_sra, /* execute */
4234 NULL, /* sub */
4235 NULL, /* next */
4236 0, /* static_pass_number */
4237 TV_IPA_SRA, /* tv_id */
4238 0, /* properties_required */
4239 0, /* properties_provided */
4240 0, /* properties_destroyed */
4241 0, /* todo_flags_start */
4242 TODO_dump_func | TODO_dump_cgraph /* todo_flags_finish */