* gimplify.c (gimplify_modify_expr_rhs): Don't return GS_OK for
[official-gcc/constexpr.git] / gcc / tree-sra.c
blob166338ffa5b97be54c8dec26edfdb8748eb517f4
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "expr.h"
81 #include "gimple.h"
82 #include "cgraph.h"
83 #include "tree-flow.h"
84 #include "ipa-prop.h"
85 #include "diagnostic.h"
86 #include "statistics.h"
87 #include "tree-dump.h"
88 #include "timevar.h"
89 #include "params.h"
90 #include "target.h"
91 #include "flags.h"
93 /* Enumeration of all aggregate reductions we can do. */
94 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
95 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
96 SRA_MODE_INTRA }; /* late intraprocedural SRA */
98 /* Global variable describing which aggregate reduction we are performing at
99 the moment. */
100 static enum sra_mode sra_mode;
102 struct assign_link;
104 /* ACCESS represents each access to an aggregate variable (as a whole or a
105 part). It can also represent a group of accesses that refer to exactly the
106 same fragment of an aggregate (i.e. those that have exactly the same offset
107 and size). Such representatives for a single aggregate, once determined,
108 are linked in a linked list and have the group fields set.
110 Moreover, when doing intraprocedural SRA, a tree is built from those
111 representatives (by the means of first_child and next_sibling pointers), in
112 which all items in a subtree are "within" the root, i.e. their offset is
113 greater or equal to offset of the root and offset+size is smaller or equal
114 to offset+size of the root. Children of an access are sorted by offset.
116 Note that accesses to parts of vector and complex number types always
117 represented by an access to the whole complex number or a vector. It is a
118 duty of the modifying functions to replace them appropriately. */
120 struct access
122 /* Values returned by `get_ref_base_and_extent' for each component reference
123 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
124 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
125 HOST_WIDE_INT offset;
126 HOST_WIDE_INT size;
127 tree base;
129 /* Expression. It is context dependent so do not use it to create new
130 expressions to access the original aggregate. See PR 42154 for a
131 testcase. */
132 tree expr;
133 /* Type. */
134 tree type;
136 /* The statement this access belongs to. */
137 gimple stmt;
139 /* Next group representative for this aggregate. */
140 struct access *next_grp;
142 /* Pointer to the group representative. Pointer to itself if the struct is
143 the representative. */
144 struct access *group_representative;
146 /* If this access has any children (in terms of the definition above), this
147 points to the first one. */
148 struct access *first_child;
150 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
151 described above. In IPA-SRA this is a pointer to the next access
152 belonging to the same group (having the same representative). */
153 struct access *next_sibling;
155 /* Pointers to the first and last element in the linked list of assign
156 links. */
157 struct assign_link *first_link, *last_link;
159 /* Pointer to the next access in the work queue. */
160 struct access *next_queued;
162 /* Replacement variable for this access "region." Never to be accessed
163 directly, always only by the means of get_access_replacement() and only
164 when grp_to_be_replaced flag is set. */
165 tree replacement_decl;
167 /* Is this particular access write access? */
168 unsigned write : 1;
170 /* Is this access an artificial one created to scalarize some record
171 entirely? */
172 unsigned total_scalarization : 1;
174 /* Is this access currently in the work queue? */
175 unsigned grp_queued : 1;
177 /* Does this group contain a write access? This flag is propagated down the
178 access tree. */
179 unsigned grp_write : 1;
181 /* Does this group contain a read access? This flag is propagated down the
182 access tree. */
183 unsigned grp_read : 1;
185 /* Does this group contain a read access that comes from an assignment
186 statement? This flag is propagated down the access tree. */
187 unsigned grp_assignment_read : 1;
189 /* Other passes of the analysis use this bit to make function
190 analyze_access_subtree create scalar replacements for this group if
191 possible. */
192 unsigned grp_hint : 1;
194 /* Is the subtree rooted in this access fully covered by scalar
195 replacements? */
196 unsigned grp_covered : 1;
198 /* If set to true, this access and all below it in an access tree must not be
199 scalarized. */
200 unsigned grp_unscalarizable_region : 1;
202 /* Whether data have been written to parts of the aggregate covered by this
203 access which is not to be scalarized. This flag is propagated up in the
204 access tree. */
205 unsigned grp_unscalarized_data : 1;
207 /* Does this access and/or group contain a write access through a
208 BIT_FIELD_REF? */
209 unsigned grp_partial_lhs : 1;
211 /* Set when a scalar replacement should be created for this variable. We do
212 the decision and creation at different places because create_tmp_var
213 cannot be called from within FOR_EACH_REFERENCED_VAR. */
214 unsigned grp_to_be_replaced : 1;
216 /* Is it possible that the group refers to data which might be (directly or
217 otherwise) modified? */
218 unsigned grp_maybe_modified : 1;
220 /* Set when this is a representative of a pointer to scalar (i.e. by
221 reference) parameter which we consider for turning into a plain scalar
222 (i.e. a by value parameter). */
223 unsigned grp_scalar_ptr : 1;
225 /* Set when we discover that this pointer is not safe to dereference in the
226 caller. */
227 unsigned grp_not_necessarilly_dereferenced : 1;
230 typedef struct access *access_p;
232 DEF_VEC_P (access_p);
233 DEF_VEC_ALLOC_P (access_p, heap);
235 /* Alloc pool for allocating access structures. */
236 static alloc_pool access_pool;
238 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
239 are used to propagate subaccesses from rhs to lhs as long as they don't
240 conflict with what is already there. */
241 struct assign_link
243 struct access *lacc, *racc;
244 struct assign_link *next;
247 /* Alloc pool for allocating assign link structures. */
248 static alloc_pool link_pool;
250 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
251 static struct pointer_map_t *base_access_vec;
253 /* Bitmap of candidates. */
254 static bitmap candidate_bitmap;
256 /* Bitmap of candidates which we should try to entirely scalarize away and
257 those which cannot be (because they are and need be used as a whole). */
258 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
260 /* Obstack for creation of fancy names. */
261 static struct obstack name_obstack;
263 /* Head of a linked list of accesses that need to have its subaccesses
264 propagated to their assignment counterparts. */
265 static struct access *work_queue_head;
267 /* Number of parameters of the analyzed function when doing early ipa SRA. */
268 static int func_param_count;
270 /* scan_function sets the following to true if it encounters a call to
271 __builtin_apply_args. */
272 static bool encountered_apply_args;
274 /* Set by scan_function when it finds a recursive call. */
275 static bool encountered_recursive_call;
277 /* Set by scan_function when it finds a recursive call with less actual
278 arguments than formal parameters.. */
279 static bool encountered_unchangable_recursive_call;
281 /* This is a table in which for each basic block and parameter there is a
282 distance (offset + size) in that parameter which is dereferenced and
283 accessed in that BB. */
284 static HOST_WIDE_INT *bb_dereferences;
285 /* Bitmap of BBs that can cause the function to "stop" progressing by
286 returning, throwing externally, looping infinitely or calling a function
287 which might abort etc.. */
288 static bitmap final_bbs;
290 /* Representative of no accesses at all. */
291 static struct access no_accesses_representant;
293 /* Predicate to test the special value. */
295 static inline bool
296 no_accesses_p (struct access *access)
298 return access == &no_accesses_representant;
301 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
302 representative fields are dumped, otherwise those which only describe the
303 individual access are. */
305 static struct
307 /* Number of processed aggregates is readily available in
308 analyze_all_variable_accesses and so is not stored here. */
310 /* Number of created scalar replacements. */
311 int replacements;
313 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
314 expression. */
315 int exprs;
317 /* Number of statements created by generate_subtree_copies. */
318 int subtree_copies;
320 /* Number of statements created by load_assign_lhs_subreplacements. */
321 int subreplacements;
323 /* Number of times sra_modify_assign has deleted a statement. */
324 int deleted;
326 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
327 RHS reparately due to type conversions or nonexistent matching
328 references. */
329 int separate_lhs_rhs_handling;
331 /* Number of parameters that were removed because they were unused. */
332 int deleted_unused_parameters;
334 /* Number of scalars passed as parameters by reference that have been
335 converted to be passed by value. */
336 int scalar_by_ref_to_by_val;
338 /* Number of aggregate parameters that were replaced by one or more of their
339 components. */
340 int aggregate_params_reduced;
342 /* Numbber of components created when splitting aggregate parameters. */
343 int param_reductions_created;
344 } sra_stats;
346 static void
347 dump_access (FILE *f, struct access *access, bool grp)
349 fprintf (f, "access { ");
350 fprintf (f, "base = (%d)'", DECL_UID (access->base));
351 print_generic_expr (f, access->base, 0);
352 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
353 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
354 fprintf (f, ", expr = ");
355 print_generic_expr (f, access->expr, 0);
356 fprintf (f, ", type = ");
357 print_generic_expr (f, access->type, 0);
358 if (grp)
359 fprintf (f, ", grp_write = %d, total_scalarization = %d, "
360 "grp_read = %d, grp_hint = %d, "
361 "grp_covered = %d, grp_unscalarizable_region = %d, "
362 "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
363 "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
364 "grp_not_necessarilly_dereferenced = %d\n",
365 access->grp_write, access->total_scalarization,
366 access->grp_read, access->grp_hint,
367 access->grp_covered, access->grp_unscalarizable_region,
368 access->grp_unscalarized_data, access->grp_partial_lhs,
369 access->grp_to_be_replaced, access->grp_maybe_modified,
370 access->grp_not_necessarilly_dereferenced);
371 else
372 fprintf (f, ", write = %d, total_scalarization = %d, "
373 "grp_partial_lhs = %d\n",
374 access->write, access->total_scalarization,
375 access->grp_partial_lhs);
378 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
380 static void
381 dump_access_tree_1 (FILE *f, struct access *access, int level)
385 int i;
387 for (i = 0; i < level; i++)
388 fputs ("* ", dump_file);
390 dump_access (f, access, true);
392 if (access->first_child)
393 dump_access_tree_1 (f, access->first_child, level + 1);
395 access = access->next_sibling;
397 while (access);
400 /* Dump all access trees for a variable, given the pointer to the first root in
401 ACCESS. */
403 static void
404 dump_access_tree (FILE *f, struct access *access)
406 for (; access; access = access->next_grp)
407 dump_access_tree_1 (f, access, 0);
410 /* Return true iff ACC is non-NULL and has subaccesses. */
412 static inline bool
413 access_has_children_p (struct access *acc)
415 return acc && acc->first_child;
418 /* Return a vector of pointers to accesses for the variable given in BASE or
419 NULL if there is none. */
421 static VEC (access_p, heap) *
422 get_base_access_vector (tree base)
424 void **slot;
426 slot = pointer_map_contains (base_access_vec, base);
427 if (!slot)
428 return NULL;
429 else
430 return *(VEC (access_p, heap) **) slot;
433 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
434 in ACCESS. Return NULL if it cannot be found. */
436 static struct access *
437 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
438 HOST_WIDE_INT size)
440 while (access && (access->offset != offset || access->size != size))
442 struct access *child = access->first_child;
444 while (child && (child->offset + child->size <= offset))
445 child = child->next_sibling;
446 access = child;
449 return access;
452 /* Return the first group representative for DECL or NULL if none exists. */
454 static struct access *
455 get_first_repr_for_decl (tree base)
457 VEC (access_p, heap) *access_vec;
459 access_vec = get_base_access_vector (base);
460 if (!access_vec)
461 return NULL;
463 return VEC_index (access_p, access_vec, 0);
466 /* Find an access representative for the variable BASE and given OFFSET and
467 SIZE. Requires that access trees have already been built. Return NULL if
468 it cannot be found. */
470 static struct access *
471 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
472 HOST_WIDE_INT size)
474 struct access *access;
476 access = get_first_repr_for_decl (base);
477 while (access && (access->offset + access->size <= offset))
478 access = access->next_grp;
479 if (!access)
480 return NULL;
482 return find_access_in_subtree (access, offset, size);
485 /* Add LINK to the linked list of assign links of RACC. */
486 static void
487 add_link_to_rhs (struct access *racc, struct assign_link *link)
489 gcc_assert (link->racc == racc);
491 if (!racc->first_link)
493 gcc_assert (!racc->last_link);
494 racc->first_link = link;
496 else
497 racc->last_link->next = link;
499 racc->last_link = link;
500 link->next = NULL;
503 /* Move all link structures in their linked list in OLD_RACC to the linked list
504 in NEW_RACC. */
505 static void
506 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
508 if (!old_racc->first_link)
510 gcc_assert (!old_racc->last_link);
511 return;
514 if (new_racc->first_link)
516 gcc_assert (!new_racc->last_link->next);
517 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
519 new_racc->last_link->next = old_racc->first_link;
520 new_racc->last_link = old_racc->last_link;
522 else
524 gcc_assert (!new_racc->last_link);
526 new_racc->first_link = old_racc->first_link;
527 new_racc->last_link = old_racc->last_link;
529 old_racc->first_link = old_racc->last_link = NULL;
532 /* Add ACCESS to the work queue (which is actually a stack). */
534 static void
535 add_access_to_work_queue (struct access *access)
537 if (!access->grp_queued)
539 gcc_assert (!access->next_queued);
540 access->next_queued = work_queue_head;
541 access->grp_queued = 1;
542 work_queue_head = access;
546 /* Pop an access from the work queue, and return it, assuming there is one. */
548 static struct access *
549 pop_access_from_work_queue (void)
551 struct access *access = work_queue_head;
553 work_queue_head = access->next_queued;
554 access->next_queued = NULL;
555 access->grp_queued = 0;
556 return access;
560 /* Allocate necessary structures. */
562 static void
563 sra_initialize (void)
565 candidate_bitmap = BITMAP_ALLOC (NULL);
566 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
567 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
568 gcc_obstack_init (&name_obstack);
569 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
570 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
571 base_access_vec = pointer_map_create ();
572 memset (&sra_stats, 0, sizeof (sra_stats));
573 encountered_apply_args = false;
574 encountered_recursive_call = false;
575 encountered_unchangable_recursive_call = false;
578 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
580 static bool
581 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
582 void *data ATTRIBUTE_UNUSED)
584 VEC (access_p, heap) *access_vec;
585 access_vec = (VEC (access_p, heap) *) *value;
586 VEC_free (access_p, heap, access_vec);
588 return true;
591 /* Deallocate all general structures. */
593 static void
594 sra_deinitialize (void)
596 BITMAP_FREE (candidate_bitmap);
597 BITMAP_FREE (should_scalarize_away_bitmap);
598 BITMAP_FREE (cannot_scalarize_away_bitmap);
599 free_alloc_pool (access_pool);
600 free_alloc_pool (link_pool);
601 obstack_free (&name_obstack, NULL);
603 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
604 pointer_map_destroy (base_access_vec);
607 /* Remove DECL from candidates for SRA and write REASON to the dump file if
608 there is one. */
609 static void
610 disqualify_candidate (tree decl, const char *reason)
612 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
614 if (dump_file && (dump_flags & TDF_DETAILS))
616 fprintf (dump_file, "! Disqualifying ");
617 print_generic_expr (dump_file, decl, 0);
618 fprintf (dump_file, " - %s\n", reason);
622 /* Return true iff the type contains a field or an element which does not allow
623 scalarization. */
625 static bool
626 type_internals_preclude_sra_p (tree type)
628 tree fld;
629 tree et;
631 switch (TREE_CODE (type))
633 case RECORD_TYPE:
634 case UNION_TYPE:
635 case QUAL_UNION_TYPE:
636 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
637 if (TREE_CODE (fld) == FIELD_DECL)
639 tree ft = TREE_TYPE (fld);
641 if (TREE_THIS_VOLATILE (fld)
642 || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
643 || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
644 || !host_integerp (DECL_SIZE (fld), 1))
645 return true;
647 if (AGGREGATE_TYPE_P (ft)
648 && type_internals_preclude_sra_p (ft))
649 return true;
652 return false;
654 case ARRAY_TYPE:
655 et = TREE_TYPE (type);
657 if (AGGREGATE_TYPE_P (et))
658 return type_internals_preclude_sra_p (et);
659 else
660 return false;
662 default:
663 return false;
667 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
668 base variable if it is. Return T if it is not an SSA_NAME. */
670 static tree
671 get_ssa_base_param (tree t)
673 if (TREE_CODE (t) == SSA_NAME)
675 if (SSA_NAME_IS_DEFAULT_DEF (t))
676 return SSA_NAME_VAR (t);
677 else
678 return NULL_TREE;
680 return t;
683 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
684 belongs to, unless the BB has already been marked as a potentially
685 final. */
687 static void
688 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
690 basic_block bb = gimple_bb (stmt);
691 int idx, parm_index = 0;
692 tree parm;
694 if (bitmap_bit_p (final_bbs, bb->index))
695 return;
697 for (parm = DECL_ARGUMENTS (current_function_decl);
698 parm && parm != base;
699 parm = TREE_CHAIN (parm))
700 parm_index++;
702 gcc_assert (parm_index < func_param_count);
704 idx = bb->index * func_param_count + parm_index;
705 if (bb_dereferences[idx] < dist)
706 bb_dereferences[idx] = dist;
709 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
710 the three fields. Also add it to the vector of accesses corresponding to
711 the base. Finally, return the new access. */
713 static struct access *
714 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
716 VEC (access_p, heap) *vec;
717 struct access *access;
718 void **slot;
720 access = (struct access *) pool_alloc (access_pool);
721 memset (access, 0, sizeof (struct access));
722 access->base = base;
723 access->offset = offset;
724 access->size = size;
726 slot = pointer_map_contains (base_access_vec, base);
727 if (slot)
728 vec = (VEC (access_p, heap) *) *slot;
729 else
730 vec = VEC_alloc (access_p, heap, 32);
732 VEC_safe_push (access_p, heap, vec, access);
734 *((struct VEC (access_p,heap) **)
735 pointer_map_insert (base_access_vec, base)) = vec;
737 return access;
740 /* Create and insert access for EXPR. Return created access, or NULL if it is
741 not possible. */
743 static struct access *
744 create_access (tree expr, gimple stmt, bool write)
746 struct access *access;
747 HOST_WIDE_INT offset, size, max_size;
748 tree base = expr;
749 bool ptr, unscalarizable_region = false;
751 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
753 if (sra_mode == SRA_MODE_EARLY_IPA && INDIRECT_REF_P (base))
755 base = get_ssa_base_param (TREE_OPERAND (base, 0));
756 if (!base)
757 return NULL;
758 ptr = true;
760 else
761 ptr = false;
763 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
764 return NULL;
766 if (sra_mode == SRA_MODE_EARLY_IPA)
768 if (size < 0 || size != max_size)
770 disqualify_candidate (base, "Encountered a variable sized access.");
771 return NULL;
773 if ((offset % BITS_PER_UNIT) != 0 || (size % BITS_PER_UNIT) != 0)
775 disqualify_candidate (base,
776 "Encountered an acces not aligned to a byte.");
777 return NULL;
780 if (ptr)
781 mark_parm_dereference (base, offset + size, stmt);
783 else
785 if (size != max_size)
787 size = max_size;
788 unscalarizable_region = true;
790 if (size < 0)
792 disqualify_candidate (base, "Encountered an unconstrained access.");
793 return NULL;
797 access = create_access_1 (base, offset, size);
798 access->expr = expr;
799 access->type = TREE_TYPE (expr);
800 access->write = write;
801 access->grp_unscalarizable_region = unscalarizable_region;
802 access->stmt = stmt;
804 return access;
808 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
809 register types or (recursively) records with only these two kinds of fields.
810 It also returns false if any of these records has a zero-size field as its
811 last field. */
813 static bool
814 type_consists_of_records_p (tree type)
816 tree fld;
817 bool last_fld_has_zero_size = false;
819 if (TREE_CODE (type) != RECORD_TYPE)
820 return false;
822 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
823 if (TREE_CODE (fld) == FIELD_DECL)
825 tree ft = TREE_TYPE (fld);
827 if (!is_gimple_reg_type (ft)
828 && !type_consists_of_records_p (ft))
829 return false;
831 last_fld_has_zero_size = tree_low_cst (DECL_SIZE (fld), 1) == 0;
834 if (last_fld_has_zero_size)
835 return false;
837 return true;
840 /* Create total_scalarization accesses for all scalar type fields in DECL that
841 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
842 must be the top-most VAR_DECL representing the variable, OFFSET must be the
843 offset of DECL within BASE. */
845 static void
846 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset)
848 tree fld, decl_type = TREE_TYPE (decl);
850 for (fld = TYPE_FIELDS (decl_type); fld; fld = TREE_CHAIN (fld))
851 if (TREE_CODE (fld) == FIELD_DECL)
853 HOST_WIDE_INT pos = offset + int_bit_position (fld);
854 tree ft = TREE_TYPE (fld);
856 if (is_gimple_reg_type (ft))
858 struct access *access;
859 HOST_WIDE_INT size;
860 tree expr;
861 bool ok;
863 size = tree_low_cst (DECL_SIZE (fld), 1);
864 expr = base;
865 ok = build_ref_for_offset (&expr, TREE_TYPE (base), pos,
866 ft, false);
867 gcc_assert (ok);
869 access = create_access_1 (base, pos, size);
870 access->expr = expr;
871 access->type = ft;
872 access->total_scalarization = 1;
873 /* Accesses for intraprocedural SRA can have their stmt NULL. */
875 else
876 completely_scalarize_record (base, fld, pos);
881 /* Search the given tree for a declaration by skipping handled components and
882 exclude it from the candidates. */
884 static void
885 disqualify_base_of_expr (tree t, const char *reason)
887 while (handled_component_p (t))
888 t = TREE_OPERAND (t, 0);
890 if (sra_mode == SRA_MODE_EARLY_IPA)
892 if (INDIRECT_REF_P (t))
893 t = TREE_OPERAND (t, 0);
894 t = get_ssa_base_param (t);
897 if (t && DECL_P (t))
898 disqualify_candidate (t, reason);
901 /* Scan expression EXPR and create access structures for all accesses to
902 candidates for scalarization. Return the created access or NULL if none is
903 created. */
905 static struct access *
906 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
908 struct access *ret = NULL;
909 bool partial_ref;
911 if (TREE_CODE (expr) == BIT_FIELD_REF
912 || TREE_CODE (expr) == IMAGPART_EXPR
913 || TREE_CODE (expr) == REALPART_EXPR)
915 expr = TREE_OPERAND (expr, 0);
916 partial_ref = true;
918 else
919 partial_ref = false;
921 /* We need to dive through V_C_Es in order to get the size of its parameter
922 and not the result type. Ada produces such statements. We are also
923 capable of handling the topmost V_C_E but not any of those buried in other
924 handled components. */
925 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
926 expr = TREE_OPERAND (expr, 0);
928 if (contains_view_convert_expr_p (expr))
930 disqualify_base_of_expr (expr, "V_C_E under a different handled "
931 "component.");
932 return NULL;
935 switch (TREE_CODE (expr))
937 case INDIRECT_REF:
938 if (sra_mode != SRA_MODE_EARLY_IPA)
939 return NULL;
940 /* fall through */
941 case VAR_DECL:
942 case PARM_DECL:
943 case RESULT_DECL:
944 case COMPONENT_REF:
945 case ARRAY_REF:
946 case ARRAY_RANGE_REF:
947 ret = create_access (expr, stmt, write);
948 break;
950 default:
951 break;
954 if (write && partial_ref && ret)
955 ret->grp_partial_lhs = 1;
957 return ret;
960 /* Scan expression EXPR and create access structures for all accesses to
961 candidates for scalarization. Return true if any access has been inserted.
962 STMT must be the statement from which the expression is taken, WRITE must be
963 true if the expression is a store and false otherwise. */
965 static bool
966 build_access_from_expr (tree expr, gimple stmt, bool write)
968 struct access *access;
970 access = build_access_from_expr_1 (expr, stmt, write);
971 if (access)
973 /* This means the aggregate is accesses as a whole in a way other than an
974 assign statement and thus cannot be removed even if we had a scalar
975 replacement for everything. */
976 if (cannot_scalarize_away_bitmap)
977 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
978 return true;
980 return false;
983 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
984 modes in which it matters, return true iff they have been disqualified. RHS
985 may be NULL, in that case ignore it. If we scalarize an aggregate in
986 intra-SRA we may need to add statements after each statement. This is not
987 possible if a statement unconditionally has to end the basic block. */
988 static bool
989 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
991 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
992 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
994 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
995 if (rhs)
996 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
997 return true;
999 return false;
1002 /* Scan expressions occuring in STMT, create access structures for all accesses
1003 to candidates for scalarization and remove those candidates which occur in
1004 statements or expressions that prevent them from being split apart. Return
1005 true if any access has been inserted. */
1007 static bool
1008 build_accesses_from_assign (gimple stmt)
1010 tree lhs, rhs;
1011 struct access *lacc, *racc;
1013 if (!gimple_assign_single_p (stmt))
1014 return false;
1016 lhs = gimple_assign_lhs (stmt);
1017 rhs = gimple_assign_rhs1 (stmt);
1019 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1020 return false;
1022 racc = build_access_from_expr_1 (rhs, stmt, false);
1023 lacc = build_access_from_expr_1 (lhs, stmt, true);
1025 if (racc)
1027 racc->grp_assignment_read = 1;
1028 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1029 && !is_gimple_reg_type (racc->type))
1030 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1033 if (lacc && racc
1034 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1035 && !lacc->grp_unscalarizable_region
1036 && !racc->grp_unscalarizable_region
1037 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1038 /* FIXME: Turn the following line into an assert after PR 40058 is
1039 fixed. */
1040 && lacc->size == racc->size
1041 && useless_type_conversion_p (lacc->type, racc->type))
1043 struct assign_link *link;
1045 link = (struct assign_link *) pool_alloc (link_pool);
1046 memset (link, 0, sizeof (struct assign_link));
1048 link->lacc = lacc;
1049 link->racc = racc;
1051 add_link_to_rhs (racc, link);
1054 return lacc || racc;
1057 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1058 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1060 static bool
1061 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1062 void *data ATTRIBUTE_UNUSED)
1064 op = get_base_address (op);
1065 if (op
1066 && DECL_P (op))
1067 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1069 return false;
1072 /* Return true iff callsite CALL has at least as many actual arguments as there
1073 are formal parameters of the function currently processed by IPA-SRA. */
1075 static inline bool
1076 callsite_has_enough_arguments_p (gimple call)
1078 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1081 /* Scan function and look for interesting expressions and create access
1082 structures for them. Return true iff any access is created. */
1084 static bool
1085 scan_function (void)
1087 basic_block bb;
1088 bool ret = false;
1090 FOR_EACH_BB (bb)
1092 gimple_stmt_iterator gsi;
1093 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1095 gimple stmt = gsi_stmt (gsi);
1096 tree t;
1097 unsigned i;
1099 if (final_bbs && stmt_can_throw_external (stmt))
1100 bitmap_set_bit (final_bbs, bb->index);
1101 switch (gimple_code (stmt))
1103 case GIMPLE_RETURN:
1104 t = gimple_return_retval (stmt);
1105 if (t != NULL_TREE)
1106 ret |= build_access_from_expr (t, stmt, false);
1107 if (final_bbs)
1108 bitmap_set_bit (final_bbs, bb->index);
1109 break;
1111 case GIMPLE_ASSIGN:
1112 ret |= build_accesses_from_assign (stmt);
1113 break;
1115 case GIMPLE_CALL:
1116 for (i = 0; i < gimple_call_num_args (stmt); i++)
1117 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1118 stmt, false);
1120 if (sra_mode == SRA_MODE_EARLY_IPA)
1122 tree dest = gimple_call_fndecl (stmt);
1123 int flags = gimple_call_flags (stmt);
1125 if (dest)
1127 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1128 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1129 encountered_apply_args = true;
1130 if (cgraph_get_node (dest)
1131 == cgraph_get_node (current_function_decl))
1133 encountered_recursive_call = true;
1134 if (!callsite_has_enough_arguments_p (stmt))
1135 encountered_unchangable_recursive_call = true;
1139 if (final_bbs
1140 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1141 bitmap_set_bit (final_bbs, bb->index);
1144 t = gimple_call_lhs (stmt);
1145 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1146 ret |= build_access_from_expr (t, stmt, true);
1147 break;
1149 case GIMPLE_ASM:
1150 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1151 asm_visit_addr);
1152 if (final_bbs)
1153 bitmap_set_bit (final_bbs, bb->index);
1155 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1157 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1158 ret |= build_access_from_expr (t, stmt, false);
1160 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1162 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1163 ret |= build_access_from_expr (t, stmt, true);
1165 break;
1167 default:
1168 break;
1173 return ret;
1176 /* Helper of QSORT function. There are pointers to accesses in the array. An
1177 access is considered smaller than another if it has smaller offset or if the
1178 offsets are the same but is size is bigger. */
1180 static int
1181 compare_access_positions (const void *a, const void *b)
1183 const access_p *fp1 = (const access_p *) a;
1184 const access_p *fp2 = (const access_p *) b;
1185 const access_p f1 = *fp1;
1186 const access_p f2 = *fp2;
1188 if (f1->offset != f2->offset)
1189 return f1->offset < f2->offset ? -1 : 1;
1191 if (f1->size == f2->size)
1193 if (f1->type == f2->type)
1194 return 0;
1195 /* Put any non-aggregate type before any aggregate type. */
1196 else if (!is_gimple_reg_type (f1->type)
1197 && is_gimple_reg_type (f2->type))
1198 return 1;
1199 else if (is_gimple_reg_type (f1->type)
1200 && !is_gimple_reg_type (f2->type))
1201 return -1;
1202 /* Put any complex or vector type before any other scalar type. */
1203 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1204 && TREE_CODE (f1->type) != VECTOR_TYPE
1205 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1206 || TREE_CODE (f2->type) == VECTOR_TYPE))
1207 return 1;
1208 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1209 || TREE_CODE (f1->type) == VECTOR_TYPE)
1210 && TREE_CODE (f2->type) != COMPLEX_TYPE
1211 && TREE_CODE (f2->type) != VECTOR_TYPE)
1212 return -1;
1213 /* Put the integral type with the bigger precision first. */
1214 else if (INTEGRAL_TYPE_P (f1->type)
1215 && INTEGRAL_TYPE_P (f2->type))
1216 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1217 /* Put any integral type with non-full precision last. */
1218 else if (INTEGRAL_TYPE_P (f1->type)
1219 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1220 != TYPE_PRECISION (f1->type)))
1221 return 1;
1222 else if (INTEGRAL_TYPE_P (f2->type)
1223 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1224 != TYPE_PRECISION (f2->type)))
1225 return -1;
1226 /* Stabilize the sort. */
1227 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1230 /* We want the bigger accesses first, thus the opposite operator in the next
1231 line: */
1232 return f1->size > f2->size ? -1 : 1;
1236 /* Append a name of the declaration to the name obstack. A helper function for
1237 make_fancy_name. */
1239 static void
1240 make_fancy_decl_name (tree decl)
1242 char buffer[32];
1244 tree name = DECL_NAME (decl);
1245 if (name)
1246 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1247 IDENTIFIER_LENGTH (name));
1248 else
1250 sprintf (buffer, "D%u", DECL_UID (decl));
1251 obstack_grow (&name_obstack, buffer, strlen (buffer));
1255 /* Helper for make_fancy_name. */
1257 static void
1258 make_fancy_name_1 (tree expr)
1260 char buffer[32];
1261 tree index;
1263 if (DECL_P (expr))
1265 make_fancy_decl_name (expr);
1266 return;
1269 switch (TREE_CODE (expr))
1271 case COMPONENT_REF:
1272 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1273 obstack_1grow (&name_obstack, '$');
1274 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1275 break;
1277 case ARRAY_REF:
1278 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1279 obstack_1grow (&name_obstack, '$');
1280 /* Arrays with only one element may not have a constant as their
1281 index. */
1282 index = TREE_OPERAND (expr, 1);
1283 if (TREE_CODE (index) != INTEGER_CST)
1284 break;
1285 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1286 obstack_grow (&name_obstack, buffer, strlen (buffer));
1288 break;
1290 case BIT_FIELD_REF:
1291 case REALPART_EXPR:
1292 case IMAGPART_EXPR:
1293 gcc_unreachable (); /* we treat these as scalars. */
1294 break;
1295 default:
1296 break;
1300 /* Create a human readable name for replacement variable of ACCESS. */
1302 static char *
1303 make_fancy_name (tree expr)
1305 make_fancy_name_1 (expr);
1306 obstack_1grow (&name_obstack, '\0');
1307 return XOBFINISH (&name_obstack, char *);
1310 /* Helper function for build_ref_for_offset. */
1312 static bool
1313 build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
1314 tree exp_type)
1316 while (1)
1318 tree fld;
1319 tree tr_size, index, minidx;
1320 HOST_WIDE_INT el_size;
1322 if (offset == 0 && exp_type
1323 && types_compatible_p (exp_type, type))
1324 return true;
1326 switch (TREE_CODE (type))
1328 case UNION_TYPE:
1329 case QUAL_UNION_TYPE:
1330 case RECORD_TYPE:
1331 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
1333 HOST_WIDE_INT pos, size;
1334 tree expr, *expr_ptr;
1336 if (TREE_CODE (fld) != FIELD_DECL)
1337 continue;
1339 pos = int_bit_position (fld);
1340 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1341 tr_size = DECL_SIZE (fld);
1342 if (!tr_size || !host_integerp (tr_size, 1))
1343 continue;
1344 size = tree_low_cst (tr_size, 1);
1345 if (size == 0)
1347 if (pos != offset)
1348 continue;
1350 else if (pos > offset || (pos + size) <= offset)
1351 continue;
1353 if (res)
1355 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1356 NULL_TREE);
1357 expr_ptr = &expr;
1359 else
1360 expr_ptr = NULL;
1361 if (build_ref_for_offset_1 (expr_ptr, TREE_TYPE (fld),
1362 offset - pos, exp_type))
1364 if (res)
1365 *res = expr;
1366 return true;
1369 return false;
1371 case ARRAY_TYPE:
1372 tr_size = TYPE_SIZE (TREE_TYPE (type));
1373 if (!tr_size || !host_integerp (tr_size, 1))
1374 return false;
1375 el_size = tree_low_cst (tr_size, 1);
1377 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1378 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1379 return false;
1380 if (res)
1382 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1383 if (!integer_zerop (minidx))
1384 index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1385 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1386 NULL_TREE, NULL_TREE);
1388 offset = offset % el_size;
1389 type = TREE_TYPE (type);
1390 break;
1392 default:
1393 if (offset != 0)
1394 return false;
1396 if (exp_type)
1397 return false;
1398 else
1399 return true;
1404 /* Construct an expression that would reference a part of aggregate *EXPR of
1405 type TYPE at the given OFFSET of the type EXP_TYPE. If EXPR is NULL, the
1406 function only determines whether it can build such a reference without
1407 actually doing it, otherwise, the tree it points to is unshared first and
1408 then used as a base for furhter sub-references.
1410 FIXME: Eventually this should be replaced with
1411 maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
1412 minor rewrite of fold_stmt.
1415 bool
1416 build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
1417 tree exp_type, bool allow_ptr)
1419 location_t loc = expr ? EXPR_LOCATION (*expr) : UNKNOWN_LOCATION;
1421 if (expr)
1422 *expr = unshare_expr (*expr);
1424 if (allow_ptr && POINTER_TYPE_P (type))
1426 type = TREE_TYPE (type);
1427 if (expr)
1428 *expr = fold_build1_loc (loc, INDIRECT_REF, type, *expr);
1431 return build_ref_for_offset_1 (expr, type, offset, exp_type);
1434 /* Return true iff TYPE is stdarg va_list type. */
1436 static inline bool
1437 is_va_list_type (tree type)
1439 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1442 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1443 those with type which is suitable for scalarization. */
1445 static bool
1446 find_var_candidates (void)
1448 tree var, type;
1449 referenced_var_iterator rvi;
1450 bool ret = false;
1452 FOR_EACH_REFERENCED_VAR (var, rvi)
1454 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1455 continue;
1456 type = TREE_TYPE (var);
1458 if (!AGGREGATE_TYPE_P (type)
1459 || needs_to_live_in_memory (var)
1460 || TREE_THIS_VOLATILE (var)
1461 || !COMPLETE_TYPE_P (type)
1462 || !host_integerp (TYPE_SIZE (type), 1)
1463 || tree_low_cst (TYPE_SIZE (type), 1) == 0
1464 || type_internals_preclude_sra_p (type)
1465 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1466 we also want to schedule it rather late. Thus we ignore it in
1467 the early pass. */
1468 || (sra_mode == SRA_MODE_EARLY_INTRA
1469 && is_va_list_type (type)))
1470 continue;
1472 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1474 if (dump_file && (dump_flags & TDF_DETAILS))
1476 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1477 print_generic_expr (dump_file, var, 0);
1478 fprintf (dump_file, "\n");
1480 ret = true;
1483 return ret;
1486 /* Sort all accesses for the given variable, check for partial overlaps and
1487 return NULL if there are any. If there are none, pick a representative for
1488 each combination of offset and size and create a linked list out of them.
1489 Return the pointer to the first representative and make sure it is the first
1490 one in the vector of accesses. */
1492 static struct access *
1493 sort_and_splice_var_accesses (tree var)
1495 int i, j, access_count;
1496 struct access *res, **prev_acc_ptr = &res;
1497 VEC (access_p, heap) *access_vec;
1498 bool first = true;
1499 HOST_WIDE_INT low = -1, high = 0;
1501 access_vec = get_base_access_vector (var);
1502 if (!access_vec)
1503 return NULL;
1504 access_count = VEC_length (access_p, access_vec);
1506 /* Sort by <OFFSET, SIZE>. */
1507 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
1508 compare_access_positions);
1510 i = 0;
1511 while (i < access_count)
1513 struct access *access = VEC_index (access_p, access_vec, i);
1514 bool grp_write = access->write;
1515 bool grp_read = !access->write;
1516 bool grp_assignment_read = access->grp_assignment_read;
1517 bool multiple_reads = false;
1518 bool total_scalarization = access->total_scalarization;
1519 bool grp_partial_lhs = access->grp_partial_lhs;
1520 bool first_scalar = is_gimple_reg_type (access->type);
1521 bool unscalarizable_region = access->grp_unscalarizable_region;
1523 if (first || access->offset >= high)
1525 first = false;
1526 low = access->offset;
1527 high = access->offset + access->size;
1529 else if (access->offset > low && access->offset + access->size > high)
1530 return NULL;
1531 else
1532 gcc_assert (access->offset >= low
1533 && access->offset + access->size <= high);
1535 j = i + 1;
1536 while (j < access_count)
1538 struct access *ac2 = VEC_index (access_p, access_vec, j);
1539 if (ac2->offset != access->offset || ac2->size != access->size)
1540 break;
1541 if (ac2->write)
1542 grp_write = true;
1543 else
1545 if (grp_read)
1546 multiple_reads = true;
1547 else
1548 grp_read = true;
1550 grp_assignment_read |= ac2->grp_assignment_read;
1551 grp_partial_lhs |= ac2->grp_partial_lhs;
1552 unscalarizable_region |= ac2->grp_unscalarizable_region;
1553 total_scalarization |= ac2->total_scalarization;
1554 relink_to_new_repr (access, ac2);
1556 /* If there are both aggregate-type and scalar-type accesses with
1557 this combination of size and offset, the comparison function
1558 should have put the scalars first. */
1559 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1560 ac2->group_representative = access;
1561 j++;
1564 i = j;
1566 access->group_representative = access;
1567 access->grp_write = grp_write;
1568 access->grp_read = grp_read;
1569 access->grp_assignment_read = grp_assignment_read;
1570 access->grp_hint = multiple_reads || total_scalarization;
1571 access->grp_partial_lhs = grp_partial_lhs;
1572 access->grp_unscalarizable_region = unscalarizable_region;
1573 if (access->first_link)
1574 add_access_to_work_queue (access);
1576 *prev_acc_ptr = access;
1577 prev_acc_ptr = &access->next_grp;
1580 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1581 return res;
1584 /* Create a variable for the given ACCESS which determines the type, name and a
1585 few other properties. Return the variable declaration and store it also to
1586 ACCESS->replacement. */
1588 static tree
1589 create_access_replacement (struct access *access)
1591 tree repl;
1593 repl = create_tmp_var (access->type, "SR");
1594 get_var_ann (repl);
1595 add_referenced_var (repl);
1596 mark_sym_for_renaming (repl);
1598 if (!access->grp_partial_lhs
1599 && (TREE_CODE (access->type) == COMPLEX_TYPE
1600 || TREE_CODE (access->type) == VECTOR_TYPE))
1601 DECL_GIMPLE_REG_P (repl) = 1;
1603 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1604 DECL_ARTIFICIAL (repl) = 1;
1605 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1607 if (DECL_NAME (access->base)
1608 && !DECL_IGNORED_P (access->base)
1609 && !DECL_ARTIFICIAL (access->base))
1611 char *pretty_name = make_fancy_name (access->expr);
1613 DECL_NAME (repl) = get_identifier (pretty_name);
1614 obstack_free (&name_obstack, pretty_name);
1616 SET_DECL_DEBUG_EXPR (repl, access->expr);
1617 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1618 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1620 else
1621 TREE_NO_WARNING (repl) = 1;
1623 if (dump_file)
1625 fprintf (dump_file, "Created a replacement for ");
1626 print_generic_expr (dump_file, access->base, 0);
1627 fprintf (dump_file, " offset: %u, size: %u: ",
1628 (unsigned) access->offset, (unsigned) access->size);
1629 print_generic_expr (dump_file, repl, 0);
1630 fprintf (dump_file, "\n");
1632 sra_stats.replacements++;
1634 return repl;
1637 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1639 static inline tree
1640 get_access_replacement (struct access *access)
1642 gcc_assert (access->grp_to_be_replaced);
1644 if (!access->replacement_decl)
1645 access->replacement_decl = create_access_replacement (access);
1646 return access->replacement_decl;
1649 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1650 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1651 to it is not "within" the root. */
1653 static void
1654 build_access_subtree (struct access **access)
1656 struct access *root = *access, *last_child = NULL;
1657 HOST_WIDE_INT limit = root->offset + root->size;
1659 *access = (*access)->next_grp;
1660 while (*access && (*access)->offset + (*access)->size <= limit)
1662 if (!last_child)
1663 root->first_child = *access;
1664 else
1665 last_child->next_sibling = *access;
1666 last_child = *access;
1668 build_access_subtree (access);
1672 /* Build a tree of access representatives, ACCESS is the pointer to the first
1673 one, others are linked in a list by the next_grp field. Decide about scalar
1674 replacements on the way, return true iff any are to be created. */
1676 static void
1677 build_access_trees (struct access *access)
1679 while (access)
1681 struct access *root = access;
1683 build_access_subtree (&access);
1684 root->next_grp = access;
1688 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1689 array. */
1691 static bool
1692 expr_with_var_bounded_array_refs_p (tree expr)
1694 while (handled_component_p (expr))
1696 if (TREE_CODE (expr) == ARRAY_REF
1697 && !host_integerp (array_ref_low_bound (expr), 0))
1698 return true;
1699 expr = TREE_OPERAND (expr, 0);
1701 return false;
1704 enum mark_read_status { SRA_MR_NOT_READ, SRA_MR_READ, SRA_MR_ASSIGN_READ};
1706 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1707 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1708 sorts of access flags appropriately along the way, notably always set
1709 grp_read and grp_assign_read according to MARK_READ and grp_write when
1710 MARK_WRITE is true. */
1712 static bool
1713 analyze_access_subtree (struct access *root, bool allow_replacements,
1714 enum mark_read_status mark_read, bool mark_write)
1716 struct access *child;
1717 HOST_WIDE_INT limit = root->offset + root->size;
1718 HOST_WIDE_INT covered_to = root->offset;
1719 bool scalar = is_gimple_reg_type (root->type);
1720 bool hole = false, sth_created = false;
1721 bool direct_read = root->grp_read;
1723 if (mark_read == SRA_MR_ASSIGN_READ)
1725 root->grp_read = 1;
1726 root->grp_assignment_read = 1;
1728 if (mark_read == SRA_MR_READ)
1729 root->grp_read = 1;
1730 else if (root->grp_assignment_read)
1731 mark_read = SRA_MR_ASSIGN_READ;
1732 else if (root->grp_read)
1733 mark_read = SRA_MR_READ;
1735 if (mark_write)
1736 root->grp_write = true;
1737 else if (root->grp_write)
1738 mark_write = true;
1740 if (root->grp_unscalarizable_region)
1741 allow_replacements = false;
1743 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1744 allow_replacements = false;
1746 for (child = root->first_child; child; child = child->next_sibling)
1748 if (!hole && child->offset < covered_to)
1749 hole = true;
1750 else
1751 covered_to += child->size;
1753 sth_created |= analyze_access_subtree (child, allow_replacements,
1754 mark_read, mark_write);
1756 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1757 hole |= !child->grp_covered;
1760 if (allow_replacements && scalar && !root->first_child
1761 && (root->grp_hint
1762 || (root->grp_write && (direct_read || root->grp_assignment_read)))
1763 /* We must not ICE later on when trying to build an access to the
1764 original data within the aggregate even when it is impossible to do in
1765 a defined way like in the PR 42703 testcase. Therefore we check
1766 pre-emptively here that we will be able to do that. */
1767 && build_ref_for_offset (NULL, TREE_TYPE (root->base), root->offset,
1768 root->type, false))
1770 if (dump_file && (dump_flags & TDF_DETAILS))
1772 fprintf (dump_file, "Marking ");
1773 print_generic_expr (dump_file, root->base, 0);
1774 fprintf (dump_file, " offset: %u, size: %u: ",
1775 (unsigned) root->offset, (unsigned) root->size);
1776 fprintf (dump_file, " to be replaced.\n");
1779 root->grp_to_be_replaced = 1;
1780 sth_created = true;
1781 hole = false;
1783 else if (covered_to < limit)
1784 hole = true;
1786 if (sth_created && !hole)
1788 root->grp_covered = 1;
1789 return true;
1791 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1792 root->grp_unscalarized_data = 1; /* not covered and written to */
1793 if (sth_created)
1794 return true;
1795 return false;
1798 /* Analyze all access trees linked by next_grp by the means of
1799 analyze_access_subtree. */
1800 static bool
1801 analyze_access_trees (struct access *access)
1803 bool ret = false;
1805 while (access)
1807 if (analyze_access_subtree (access, true, SRA_MR_NOT_READ, false))
1808 ret = true;
1809 access = access->next_grp;
1812 return ret;
1815 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1816 SIZE would conflict with an already existing one. If exactly such a child
1817 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1819 static bool
1820 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1821 HOST_WIDE_INT size, struct access **exact_match)
1823 struct access *child;
1825 for (child = lacc->first_child; child; child = child->next_sibling)
1827 if (child->offset == norm_offset && child->size == size)
1829 *exact_match = child;
1830 return true;
1833 if (child->offset < norm_offset + size
1834 && child->offset + child->size > norm_offset)
1835 return true;
1838 return false;
1841 /* Create a new child access of PARENT, with all properties just like MODEL
1842 except for its offset and with its grp_write false and grp_read true.
1843 Return the new access or NULL if it cannot be created. Note that this access
1844 is created long after all splicing and sorting, it's not located in any
1845 access vector and is automatically a representative of its group. */
1847 static struct access *
1848 create_artificial_child_access (struct access *parent, struct access *model,
1849 HOST_WIDE_INT new_offset)
1851 struct access *access;
1852 struct access **child;
1853 tree expr = parent->base;;
1855 gcc_assert (!model->grp_unscalarizable_region);
1857 if (!build_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
1858 model->type, false))
1859 return NULL;
1861 access = (struct access *) pool_alloc (access_pool);
1862 memset (access, 0, sizeof (struct access));
1863 access->base = parent->base;
1864 access->expr = expr;
1865 access->offset = new_offset;
1866 access->size = model->size;
1867 access->type = model->type;
1868 access->grp_write = true;
1869 access->grp_read = false;
1871 child = &parent->first_child;
1872 while (*child && (*child)->offset < new_offset)
1873 child = &(*child)->next_sibling;
1875 access->next_sibling = *child;
1876 *child = access;
1878 return access;
1882 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
1883 true if any new subaccess was created. Additionally, if RACC is a scalar
1884 access but LACC is not, change the type of the latter, if possible. */
1886 static bool
1887 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
1889 struct access *rchild;
1890 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
1891 bool ret = false;
1893 if (is_gimple_reg_type (lacc->type)
1894 || lacc->grp_unscalarizable_region
1895 || racc->grp_unscalarizable_region)
1896 return false;
1898 if (!lacc->first_child && !racc->first_child
1899 && is_gimple_reg_type (racc->type))
1901 tree t = lacc->base;
1903 if (build_ref_for_offset (&t, TREE_TYPE (t), lacc->offset, racc->type,
1904 false))
1906 lacc->expr = t;
1907 lacc->type = racc->type;
1909 return false;
1912 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
1914 struct access *new_acc = NULL;
1915 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
1917 if (rchild->grp_unscalarizable_region)
1918 continue;
1920 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
1921 &new_acc))
1923 if (new_acc)
1925 rchild->grp_hint = 1;
1926 new_acc->grp_hint |= new_acc->grp_read;
1927 if (rchild->first_child)
1928 ret |= propagate_subaccesses_across_link (new_acc, rchild);
1930 continue;
1933 /* If a (part of) a union field is on the RHS of an assignment, it can
1934 have sub-accesses which do not make sense on the LHS (PR 40351).
1935 Check that this is not the case. */
1936 if (!build_ref_for_offset (NULL, TREE_TYPE (lacc->base), norm_offset,
1937 rchild->type, false))
1938 continue;
1940 rchild->grp_hint = 1;
1941 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
1942 if (new_acc)
1944 ret = true;
1945 if (racc->first_child)
1946 propagate_subaccesses_across_link (new_acc, rchild);
1950 return ret;
1953 /* Propagate all subaccesses across assignment links. */
1955 static void
1956 propagate_all_subaccesses (void)
1958 while (work_queue_head)
1960 struct access *racc = pop_access_from_work_queue ();
1961 struct assign_link *link;
1963 gcc_assert (racc->first_link);
1965 for (link = racc->first_link; link; link = link->next)
1967 struct access *lacc = link->lacc;
1969 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
1970 continue;
1971 lacc = lacc->group_representative;
1972 if (propagate_subaccesses_across_link (lacc, racc)
1973 && lacc->first_link)
1974 add_access_to_work_queue (lacc);
1979 /* Go through all accesses collected throughout the (intraprocedural) analysis
1980 stage, exclude overlapping ones, identify representatives and build trees
1981 out of them, making decisions about scalarization on the way. Return true
1982 iff there are any to-be-scalarized variables after this stage. */
1984 static bool
1985 analyze_all_variable_accesses (void)
1987 int res = 0;
1988 bitmap tmp = BITMAP_ALLOC (NULL);
1989 bitmap_iterator bi;
1990 unsigned i, max_total_scalarization_size;
1992 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
1993 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
1995 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
1996 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
1997 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
1999 tree var = referenced_var (i);
2001 if (TREE_CODE (var) == VAR_DECL
2002 && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2003 <= max_total_scalarization_size)
2004 && type_consists_of_records_p (TREE_TYPE (var)))
2006 completely_scalarize_record (var, var, 0);
2007 if (dump_file && (dump_flags & TDF_DETAILS))
2009 fprintf (dump_file, "Will attempt to totally scalarize ");
2010 print_generic_expr (dump_file, var, 0);
2011 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2016 bitmap_copy (tmp, candidate_bitmap);
2017 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2019 tree var = referenced_var (i);
2020 struct access *access;
2022 access = sort_and_splice_var_accesses (var);
2023 if (access)
2024 build_access_trees (access);
2025 else
2026 disqualify_candidate (var,
2027 "No or inhibitingly overlapping accesses.");
2030 propagate_all_subaccesses ();
2032 bitmap_copy (tmp, candidate_bitmap);
2033 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2035 tree var = referenced_var (i);
2036 struct access *access = get_first_repr_for_decl (var);
2038 if (analyze_access_trees (access))
2040 res++;
2041 if (dump_file && (dump_flags & TDF_DETAILS))
2043 fprintf (dump_file, "\nAccess trees for ");
2044 print_generic_expr (dump_file, var, 0);
2045 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2046 dump_access_tree (dump_file, access);
2047 fprintf (dump_file, "\n");
2050 else
2051 disqualify_candidate (var, "No scalar replacements to be created.");
2054 BITMAP_FREE (tmp);
2056 if (res)
2058 statistics_counter_event (cfun, "Scalarized aggregates", res);
2059 return true;
2061 else
2062 return false;
2065 /* Return true iff a reference statement into aggregate AGG can be built for
2066 every single to-be-replaced accesses that is a child of ACCESS, its sibling
2067 or a child of its sibling. TOP_OFFSET is the offset from the processed
2068 access subtree that has to be subtracted from offset of each access. */
2070 static bool
2071 ref_expr_for_all_replacements_p (struct access *access, tree agg,
2072 HOST_WIDE_INT top_offset)
2076 if (access->grp_to_be_replaced
2077 && !build_ref_for_offset (NULL, TREE_TYPE (agg),
2078 access->offset - top_offset,
2079 access->type, false))
2080 return false;
2082 if (access->first_child
2083 && !ref_expr_for_all_replacements_p (access->first_child, agg,
2084 top_offset))
2085 return false;
2087 access = access->next_sibling;
2089 while (access);
2091 return true;
2094 /* Generate statements copying scalar replacements of accesses within a subtree
2095 into or out of AGG. ACCESS is the first child of the root of the subtree to
2096 be processed. AGG is an aggregate type expression (can be a declaration but
2097 does not have to be, it can for example also be an indirect_ref).
2098 TOP_OFFSET is the offset of the processed subtree which has to be subtracted
2099 from offsets of individual accesses to get corresponding offsets for AGG.
2100 If CHUNK_SIZE is non-null, copy only replacements in the interval
2101 <start_offset, start_offset + chunk_size>, otherwise copy all. GSI is a
2102 statement iterator used to place the new statements. WRITE should be true
2103 when the statements should write from AGG to the replacement and false if
2104 vice versa. if INSERT_AFTER is true, new statements will be added after the
2105 current statement in GSI, they will be added before the statement
2106 otherwise. */
2108 static void
2109 generate_subtree_copies (struct access *access, tree agg,
2110 HOST_WIDE_INT top_offset,
2111 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2112 gimple_stmt_iterator *gsi, bool write,
2113 bool insert_after)
2117 tree expr = agg;
2119 if (chunk_size && access->offset >= start_offset + chunk_size)
2120 return;
2122 if (access->grp_to_be_replaced
2123 && (chunk_size == 0
2124 || access->offset + access->size > start_offset))
2126 tree repl = get_access_replacement (access);
2127 bool ref_found;
2128 gimple stmt;
2130 ref_found = build_ref_for_offset (&expr, TREE_TYPE (agg),
2131 access->offset - top_offset,
2132 access->type, false);
2133 gcc_assert (ref_found);
2135 if (write)
2137 if (access->grp_partial_lhs)
2138 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2139 !insert_after,
2140 insert_after ? GSI_NEW_STMT
2141 : GSI_SAME_STMT);
2142 stmt = gimple_build_assign (repl, expr);
2144 else
2146 TREE_NO_WARNING (repl) = 1;
2147 if (access->grp_partial_lhs)
2148 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2149 !insert_after,
2150 insert_after ? GSI_NEW_STMT
2151 : GSI_SAME_STMT);
2152 stmt = gimple_build_assign (expr, repl);
2155 if (insert_after)
2156 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2157 else
2158 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2159 update_stmt (stmt);
2160 sra_stats.subtree_copies++;
2163 if (access->first_child)
2164 generate_subtree_copies (access->first_child, agg, top_offset,
2165 start_offset, chunk_size, gsi,
2166 write, insert_after);
2168 access = access->next_sibling;
2170 while (access);
2173 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2174 the root of the subtree to be processed. GSI is the statement iterator used
2175 for inserting statements which are added after the current statement if
2176 INSERT_AFTER is true or before it otherwise. */
2178 static void
2179 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2180 bool insert_after)
2183 struct access *child;
2185 if (access->grp_to_be_replaced)
2187 gimple stmt;
2189 stmt = gimple_build_assign (get_access_replacement (access),
2190 fold_convert (access->type,
2191 integer_zero_node));
2192 if (insert_after)
2193 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2194 else
2195 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2196 update_stmt (stmt);
2199 for (child = access->first_child; child; child = child->next_sibling)
2200 init_subtree_with_zero (child, gsi, insert_after);
2203 /* Search for an access representative for the given expression EXPR and
2204 return it or NULL if it cannot be found. */
2206 static struct access *
2207 get_access_for_expr (tree expr)
2209 HOST_WIDE_INT offset, size, max_size;
2210 tree base;
2212 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2213 a different size than the size of its argument and we need the latter
2214 one. */
2215 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2216 expr = TREE_OPERAND (expr, 0);
2218 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2219 if (max_size == -1 || !DECL_P (base))
2220 return NULL;
2222 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2223 return NULL;
2225 return get_var_base_offset_size_access (base, offset, max_size);
2228 /* Replace the expression EXPR with a scalar replacement if there is one and
2229 generate other statements to do type conversion or subtree copying if
2230 necessary. GSI is used to place newly created statements, WRITE is true if
2231 the expression is being written to (it is on a LHS of a statement or output
2232 in an assembly statement). */
2234 static bool
2235 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2237 struct access *access;
2238 tree type, bfr;
2240 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2242 bfr = *expr;
2243 expr = &TREE_OPERAND (*expr, 0);
2245 else
2246 bfr = NULL_TREE;
2248 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2249 expr = &TREE_OPERAND (*expr, 0);
2250 access = get_access_for_expr (*expr);
2251 if (!access)
2252 return false;
2253 type = TREE_TYPE (*expr);
2255 if (access->grp_to_be_replaced)
2257 tree repl = get_access_replacement (access);
2258 /* If we replace a non-register typed access simply use the original
2259 access expression to extract the scalar component afterwards.
2260 This happens if scalarizing a function return value or parameter
2261 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2262 gcc.c-torture/compile/20011217-1.c.
2264 We also want to use this when accessing a complex or vector which can
2265 be accessed as a different type too, potentially creating a need for
2266 type conversion (see PR42196) and when scalarized unions are involved
2267 in assembler statements (see PR42398). */
2268 if (!useless_type_conversion_p (type, access->type))
2270 tree ref = access->base;
2271 bool ok;
2273 ok = build_ref_for_offset (&ref, TREE_TYPE (ref),
2274 access->offset, access->type, false);
2275 gcc_assert (ok);
2277 if (write)
2279 gimple stmt;
2281 if (access->grp_partial_lhs)
2282 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2283 false, GSI_NEW_STMT);
2284 stmt = gimple_build_assign (repl, ref);
2285 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2287 else
2289 gimple stmt;
2291 if (access->grp_partial_lhs)
2292 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2293 true, GSI_SAME_STMT);
2294 stmt = gimple_build_assign (ref, repl);
2295 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2298 else
2299 *expr = repl;
2300 sra_stats.exprs++;
2303 if (access->first_child)
2305 HOST_WIDE_INT start_offset, chunk_size;
2306 if (bfr
2307 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2308 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2310 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2311 start_offset = access->offset
2312 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2314 else
2315 start_offset = chunk_size = 0;
2317 generate_subtree_copies (access->first_child, access->base, 0,
2318 start_offset, chunk_size, gsi, write, write);
2320 return true;
2323 /* Where scalar replacements of the RHS have been written to when a replacement
2324 of a LHS of an assigments cannot be direclty loaded from a replacement of
2325 the RHS. */
2326 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2327 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2328 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2330 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2331 base aggregate if there are unscalarized data or directly to LHS
2332 otherwise. */
2334 static enum unscalarized_data_handling
2335 handle_unscalarized_data_in_subtree (struct access *top_racc, tree lhs,
2336 gimple_stmt_iterator *gsi)
2338 if (top_racc->grp_unscalarized_data)
2340 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2341 gsi, false, false);
2342 return SRA_UDH_RIGHT;
2344 else
2346 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2347 0, 0, gsi, false, false);
2348 return SRA_UDH_LEFT;
2353 /* Try to generate statements to load all sub-replacements in an access
2354 (sub)tree (LACC is the first child) from scalar replacements in the TOP_RACC
2355 (sub)tree. If that is not possible, refresh the TOP_RACC base aggregate and
2356 load the accesses from it. LEFT_OFFSET is the offset of the left whole
2357 subtree being copied, RIGHT_OFFSET is the same thing for the right subtree.
2358 GSI is stmt iterator used for statement insertions. *REFRESHED is true iff
2359 the rhs top aggregate has already been refreshed by contents of its scalar
2360 reductions and is set to true if this function has to do it. */
2362 static void
2363 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2364 HOST_WIDE_INT left_offset,
2365 HOST_WIDE_INT right_offset,
2366 gimple_stmt_iterator *old_gsi,
2367 gimple_stmt_iterator *new_gsi,
2368 enum unscalarized_data_handling *refreshed,
2369 tree lhs)
2371 location_t loc = EXPR_LOCATION (lacc->expr);
2374 if (lacc->grp_to_be_replaced)
2376 struct access *racc;
2377 HOST_WIDE_INT offset = lacc->offset - left_offset + right_offset;
2378 gimple stmt;
2379 tree rhs;
2381 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2382 if (racc && racc->grp_to_be_replaced)
2384 rhs = get_access_replacement (racc);
2385 if (!useless_type_conversion_p (lacc->type, racc->type))
2386 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2388 else
2390 /* No suitable access on the right hand side, need to load from
2391 the aggregate. See if we have to update it first... */
2392 if (*refreshed == SRA_UDH_NONE)
2393 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2394 lhs, old_gsi);
2396 if (*refreshed == SRA_UDH_LEFT)
2398 bool repl_found;
2400 rhs = lacc->base;
2401 repl_found = build_ref_for_offset (&rhs, TREE_TYPE (rhs),
2402 lacc->offset, lacc->type,
2403 false);
2404 gcc_assert (repl_found);
2406 else
2408 bool repl_found;
2410 rhs = top_racc->base;
2411 repl_found = build_ref_for_offset (&rhs,
2412 TREE_TYPE (top_racc->base),
2413 offset, lacc->type, false);
2414 gcc_assert (repl_found);
2418 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2419 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2420 update_stmt (stmt);
2421 sra_stats.subreplacements++;
2423 else if (*refreshed == SRA_UDH_NONE
2424 && lacc->grp_read && !lacc->grp_covered)
2425 *refreshed = handle_unscalarized_data_in_subtree (top_racc, lhs,
2426 old_gsi);
2428 if (lacc->first_child)
2429 load_assign_lhs_subreplacements (lacc->first_child, top_racc,
2430 left_offset, right_offset,
2431 old_gsi, new_gsi, refreshed, lhs);
2432 lacc = lacc->next_sibling;
2434 while (lacc);
2437 /* Result code for SRA assignment modification. */
2438 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2439 SRA_AM_MODIFIED, /* stmt changed but not
2440 removed */
2441 SRA_AM_REMOVED }; /* stmt eliminated */
2443 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2444 to the assignment and GSI is the statement iterator pointing at it. Returns
2445 the same values as sra_modify_assign. */
2447 static enum assignment_mod_result
2448 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2450 tree lhs = gimple_assign_lhs (*stmt);
2451 struct access *acc;
2453 acc = get_access_for_expr (lhs);
2454 if (!acc)
2455 return SRA_AM_NONE;
2457 if (VEC_length (constructor_elt,
2458 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2460 /* I have never seen this code path trigger but if it can happen the
2461 following should handle it gracefully. */
2462 if (access_has_children_p (acc))
2463 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2464 true, true);
2465 return SRA_AM_MODIFIED;
2468 if (acc->grp_covered)
2470 init_subtree_with_zero (acc, gsi, false);
2471 unlink_stmt_vdef (*stmt);
2472 gsi_remove (gsi, true);
2473 return SRA_AM_REMOVED;
2475 else
2477 init_subtree_with_zero (acc, gsi, true);
2478 return SRA_AM_MODIFIED;
2482 /* Create a new suitable default definition SSA_NAME and replace all uses of
2483 SSA with it. */
2485 static void
2486 replace_uses_with_default_def_ssa_name (tree ssa)
2488 tree repl, decl = SSA_NAME_VAR (ssa);
2489 if (TREE_CODE (decl) == PARM_DECL)
2491 tree tmp = create_tmp_reg (TREE_TYPE (decl), "SR");
2493 get_var_ann (tmp);
2494 add_referenced_var (tmp);
2495 repl = make_ssa_name (tmp, gimple_build_nop ());
2496 set_default_def (tmp, repl);
2498 else
2500 repl = gimple_default_def (cfun, decl);
2501 if (!repl)
2503 repl = make_ssa_name (decl, gimple_build_nop ());
2504 set_default_def (decl, repl);
2508 replace_uses_by (ssa, repl);
2511 /* Examine both sides of the assignment statement pointed to by STMT, replace
2512 them with a scalare replacement if there is one and generate copying of
2513 replacements if scalarized aggregates have been used in the assignment. GSI
2514 is used to hold generated statements for type conversions and subtree
2515 copying. */
2517 static enum assignment_mod_result
2518 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2520 struct access *lacc, *racc;
2521 tree lhs, rhs;
2522 bool modify_this_stmt = false;
2523 bool force_gimple_rhs = false;
2524 location_t loc = gimple_location (*stmt);
2525 gimple_stmt_iterator orig_gsi = *gsi;
2527 if (!gimple_assign_single_p (*stmt))
2528 return SRA_AM_NONE;
2529 lhs = gimple_assign_lhs (*stmt);
2530 rhs = gimple_assign_rhs1 (*stmt);
2532 if (TREE_CODE (rhs) == CONSTRUCTOR)
2533 return sra_modify_constructor_assign (stmt, gsi);
2535 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2536 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2537 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2539 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2540 gsi, false);
2541 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2542 gsi, true);
2543 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2546 lacc = get_access_for_expr (lhs);
2547 racc = get_access_for_expr (rhs);
2548 if (!lacc && !racc)
2549 return SRA_AM_NONE;
2551 if (lacc && lacc->grp_to_be_replaced)
2553 lhs = get_access_replacement (lacc);
2554 gimple_assign_set_lhs (*stmt, lhs);
2555 modify_this_stmt = true;
2556 if (lacc->grp_partial_lhs)
2557 force_gimple_rhs = true;
2558 sra_stats.exprs++;
2561 if (racc && racc->grp_to_be_replaced)
2563 rhs = get_access_replacement (racc);
2564 modify_this_stmt = true;
2565 if (racc->grp_partial_lhs)
2566 force_gimple_rhs = true;
2567 sra_stats.exprs++;
2570 if (modify_this_stmt)
2572 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2574 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2575 ??? This should move to fold_stmt which we simply should
2576 call after building a VIEW_CONVERT_EXPR here. */
2577 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2578 && !access_has_children_p (lacc))
2580 tree expr = lhs;
2581 if (build_ref_for_offset (&expr, TREE_TYPE (lhs), 0,
2582 TREE_TYPE (rhs), false))
2584 lhs = expr;
2585 gimple_assign_set_lhs (*stmt, expr);
2588 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2589 && !access_has_children_p (racc))
2591 tree expr = rhs;
2592 if (build_ref_for_offset (&expr, TREE_TYPE (rhs), 0,
2593 TREE_TYPE (lhs), false))
2594 rhs = expr;
2596 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2598 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2599 if (is_gimple_reg_type (TREE_TYPE (lhs))
2600 && TREE_CODE (lhs) != SSA_NAME)
2601 force_gimple_rhs = true;
2606 /* From this point on, the function deals with assignments in between
2607 aggregates when at least one has scalar reductions of some of its
2608 components. There are three possible scenarios: Both the LHS and RHS have
2609 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2611 In the first case, we would like to load the LHS components from RHS
2612 components whenever possible. If that is not possible, we would like to
2613 read it directly from the RHS (after updating it by storing in it its own
2614 components). If there are some necessary unscalarized data in the LHS,
2615 those will be loaded by the original assignment too. If neither of these
2616 cases happen, the original statement can be removed. Most of this is done
2617 by load_assign_lhs_subreplacements.
2619 In the second case, we would like to store all RHS scalarized components
2620 directly into LHS and if they cover the aggregate completely, remove the
2621 statement too. In the third case, we want the LHS components to be loaded
2622 directly from the RHS (DSE will remove the original statement if it
2623 becomes redundant).
2625 This is a bit complex but manageable when types match and when unions do
2626 not cause confusion in a way that we cannot really load a component of LHS
2627 from the RHS or vice versa (the access representing this level can have
2628 subaccesses that are accessible only through a different union field at a
2629 higher level - different from the one used in the examined expression).
2630 Unions are fun.
2632 Therefore, I specially handle a fourth case, happening when there is a
2633 specific type cast or it is impossible to locate a scalarized subaccess on
2634 the other side of the expression. If that happens, I simply "refresh" the
2635 RHS by storing in it is scalarized components leave the original statement
2636 there to do the copying and then load the scalar replacements of the LHS.
2637 This is what the first branch does. */
2639 if (gimple_has_volatile_ops (*stmt)
2640 || contains_view_convert_expr_p (rhs)
2641 || contains_view_convert_expr_p (lhs)
2642 || (access_has_children_p (racc)
2643 && !ref_expr_for_all_replacements_p (racc, lhs, racc->offset))
2644 || (access_has_children_p (lacc)
2645 && !ref_expr_for_all_replacements_p (lacc, rhs, lacc->offset)))
2647 if (access_has_children_p (racc))
2648 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2649 gsi, false, false);
2650 if (access_has_children_p (lacc))
2651 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2652 gsi, true, true);
2653 sra_stats.separate_lhs_rhs_handling++;
2655 else
2657 if (access_has_children_p (lacc) && access_has_children_p (racc))
2659 gimple_stmt_iterator orig_gsi = *gsi;
2660 enum unscalarized_data_handling refreshed;
2662 if (lacc->grp_read && !lacc->grp_covered)
2663 refreshed = handle_unscalarized_data_in_subtree (racc, lhs, gsi);
2664 else
2665 refreshed = SRA_UDH_NONE;
2667 load_assign_lhs_subreplacements (lacc->first_child, racc,
2668 lacc->offset, racc->offset,
2669 &orig_gsi, gsi, &refreshed, lhs);
2670 if (refreshed != SRA_UDH_RIGHT)
2672 if (*stmt == gsi_stmt (*gsi))
2673 gsi_next (gsi);
2675 unlink_stmt_vdef (*stmt);
2676 gsi_remove (&orig_gsi, true);
2677 sra_stats.deleted++;
2678 return SRA_AM_REMOVED;
2681 else
2683 if (racc)
2685 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2687 if (racc->first_child)
2688 generate_subtree_copies (racc->first_child, lhs,
2689 racc->offset, 0, 0, gsi,
2690 false, false);
2691 gcc_assert (*stmt == gsi_stmt (*gsi));
2692 if (TREE_CODE (lhs) == SSA_NAME)
2693 replace_uses_with_default_def_ssa_name (lhs);
2695 unlink_stmt_vdef (*stmt);
2696 gsi_remove (gsi, true);
2697 sra_stats.deleted++;
2698 return SRA_AM_REMOVED;
2700 else if (racc->first_child)
2701 generate_subtree_copies (racc->first_child, lhs,
2702 racc->offset, 0, 0, gsi, false, true);
2704 if (access_has_children_p (lacc))
2705 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2706 0, 0, gsi, true, true);
2710 /* This gimplification must be done after generate_subtree_copies, lest we
2711 insert the subtree copies in the middle of the gimplified sequence. */
2712 if (force_gimple_rhs)
2713 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2714 true, GSI_SAME_STMT);
2715 if (gimple_assign_rhs1 (*stmt) != rhs)
2717 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2718 gcc_assert (*stmt == gsi_stmt (orig_gsi));
2721 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2724 /* Traverse the function body and all modifications as decided in
2725 analyze_all_variable_accesses. */
2727 static void
2728 sra_modify_function_body (void)
2730 basic_block bb;
2732 FOR_EACH_BB (bb)
2734 gimple_stmt_iterator gsi = gsi_start_bb (bb);
2735 while (!gsi_end_p (gsi))
2737 gimple stmt = gsi_stmt (gsi);
2738 enum assignment_mod_result assign_result;
2739 bool modified = false, deleted = false;
2740 tree *t;
2741 unsigned i;
2743 switch (gimple_code (stmt))
2745 case GIMPLE_RETURN:
2746 t = gimple_return_retval_ptr (stmt);
2747 if (*t != NULL_TREE)
2748 modified |= sra_modify_expr (t, &gsi, false);
2749 break;
2751 case GIMPLE_ASSIGN:
2752 assign_result = sra_modify_assign (&stmt, &gsi);
2753 modified |= assign_result == SRA_AM_MODIFIED;
2754 deleted = assign_result == SRA_AM_REMOVED;
2755 break;
2757 case GIMPLE_CALL:
2758 /* Operands must be processed before the lhs. */
2759 for (i = 0; i < gimple_call_num_args (stmt); i++)
2761 t = gimple_call_arg_ptr (stmt, i);
2762 modified |= sra_modify_expr (t, &gsi, false);
2765 if (gimple_call_lhs (stmt))
2767 t = gimple_call_lhs_ptr (stmt);
2768 modified |= sra_modify_expr (t, &gsi, true);
2770 break;
2772 case GIMPLE_ASM:
2773 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
2775 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
2776 modified |= sra_modify_expr (t, &gsi, false);
2778 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
2780 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
2781 modified |= sra_modify_expr (t, &gsi, true);
2783 break;
2785 default:
2786 break;
2789 if (modified)
2791 update_stmt (stmt);
2792 maybe_clean_eh_stmt (stmt);
2794 if (!deleted)
2795 gsi_next (&gsi);
2800 /* Generate statements initializing scalar replacements of parts of function
2801 parameters. */
2803 static void
2804 initialize_parameter_reductions (void)
2806 gimple_stmt_iterator gsi;
2807 gimple_seq seq = NULL;
2808 tree parm;
2810 for (parm = DECL_ARGUMENTS (current_function_decl);
2811 parm;
2812 parm = TREE_CHAIN (parm))
2814 VEC (access_p, heap) *access_vec;
2815 struct access *access;
2817 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2818 continue;
2819 access_vec = get_base_access_vector (parm);
2820 if (!access_vec)
2821 continue;
2823 if (!seq)
2825 seq = gimple_seq_alloc ();
2826 gsi = gsi_start (seq);
2829 for (access = VEC_index (access_p, access_vec, 0);
2830 access;
2831 access = access->next_grp)
2832 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true);
2835 if (seq)
2836 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
2839 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
2840 it reveals there are components of some aggregates to be scalarized, it runs
2841 the required transformations. */
2842 static unsigned int
2843 perform_intra_sra (void)
2845 int ret = 0;
2846 sra_initialize ();
2848 if (!find_var_candidates ())
2849 goto out;
2851 if (!scan_function ())
2852 goto out;
2854 if (!analyze_all_variable_accesses ())
2855 goto out;
2857 sra_modify_function_body ();
2858 initialize_parameter_reductions ();
2860 statistics_counter_event (cfun, "Scalar replacements created",
2861 sra_stats.replacements);
2862 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
2863 statistics_counter_event (cfun, "Subtree copy stmts",
2864 sra_stats.subtree_copies);
2865 statistics_counter_event (cfun, "Subreplacement stmts",
2866 sra_stats.subreplacements);
2867 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
2868 statistics_counter_event (cfun, "Separate LHS and RHS handling",
2869 sra_stats.separate_lhs_rhs_handling);
2871 ret = TODO_update_ssa;
2873 out:
2874 sra_deinitialize ();
2875 return ret;
2878 /* Perform early intraprocedural SRA. */
2879 static unsigned int
2880 early_intra_sra (void)
2882 sra_mode = SRA_MODE_EARLY_INTRA;
2883 return perform_intra_sra ();
2886 /* Perform "late" intraprocedural SRA. */
2887 static unsigned int
2888 late_intra_sra (void)
2890 sra_mode = SRA_MODE_INTRA;
2891 return perform_intra_sra ();
2895 static bool
2896 gate_intra_sra (void)
2898 return flag_tree_sra != 0;
2902 struct gimple_opt_pass pass_sra_early =
2905 GIMPLE_PASS,
2906 "esra", /* name */
2907 gate_intra_sra, /* gate */
2908 early_intra_sra, /* execute */
2909 NULL, /* sub */
2910 NULL, /* next */
2911 0, /* static_pass_number */
2912 TV_TREE_SRA, /* tv_id */
2913 PROP_cfg | PROP_ssa, /* properties_required */
2914 0, /* properties_provided */
2915 0, /* properties_destroyed */
2916 0, /* todo_flags_start */
2917 TODO_dump_func
2918 | TODO_update_ssa
2919 | TODO_ggc_collect
2920 | TODO_verify_ssa /* todo_flags_finish */
2924 struct gimple_opt_pass pass_sra =
2927 GIMPLE_PASS,
2928 "sra", /* name */
2929 gate_intra_sra, /* gate */
2930 late_intra_sra, /* execute */
2931 NULL, /* sub */
2932 NULL, /* next */
2933 0, /* static_pass_number */
2934 TV_TREE_SRA, /* tv_id */
2935 PROP_cfg | PROP_ssa, /* properties_required */
2936 0, /* properties_provided */
2937 0, /* properties_destroyed */
2938 TODO_update_address_taken, /* todo_flags_start */
2939 TODO_dump_func
2940 | TODO_update_ssa
2941 | TODO_ggc_collect
2942 | TODO_verify_ssa /* todo_flags_finish */
2947 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
2948 parameter. */
2950 static bool
2951 is_unused_scalar_param (tree parm)
2953 tree name;
2954 return (is_gimple_reg (parm)
2955 && (!(name = gimple_default_def (cfun, parm))
2956 || has_zero_uses (name)));
2959 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
2960 examine whether there are any direct or otherwise infeasible ones. If so,
2961 return true, otherwise return false. PARM must be a gimple register with a
2962 non-NULL default definition. */
2964 static bool
2965 ptr_parm_has_direct_uses (tree parm)
2967 imm_use_iterator ui;
2968 gimple stmt;
2969 tree name = gimple_default_def (cfun, parm);
2970 bool ret = false;
2972 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
2974 int uses_ok = 0;
2975 use_operand_p use_p;
2977 if (is_gimple_debug (stmt))
2978 continue;
2980 /* Valid uses include dereferences on the lhs and the rhs. */
2981 if (gimple_has_lhs (stmt))
2983 tree lhs = gimple_get_lhs (stmt);
2984 while (handled_component_p (lhs))
2985 lhs = TREE_OPERAND (lhs, 0);
2986 if (INDIRECT_REF_P (lhs)
2987 && TREE_OPERAND (lhs, 0) == name)
2988 uses_ok++;
2990 if (gimple_assign_single_p (stmt))
2992 tree rhs = gimple_assign_rhs1 (stmt);
2993 while (handled_component_p (rhs))
2994 rhs = TREE_OPERAND (rhs, 0);
2995 if (INDIRECT_REF_P (rhs)
2996 && TREE_OPERAND (rhs, 0) == name)
2997 uses_ok++;
2999 else if (is_gimple_call (stmt))
3001 unsigned i;
3002 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3004 tree arg = gimple_call_arg (stmt, i);
3005 while (handled_component_p (arg))
3006 arg = TREE_OPERAND (arg, 0);
3007 if (INDIRECT_REF_P (arg)
3008 && TREE_OPERAND (arg, 0) == name)
3009 uses_ok++;
3013 /* If the number of valid uses does not match the number of
3014 uses in this stmt there is an unhandled use. */
3015 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3016 --uses_ok;
3018 if (uses_ok != 0)
3019 ret = true;
3021 if (ret)
3022 BREAK_FROM_IMM_USE_STMT (ui);
3025 return ret;
3028 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3029 them in candidate_bitmap. Note that these do not necessarily include
3030 parameter which are unused and thus can be removed. Return true iff any
3031 such candidate has been found. */
3033 static bool
3034 find_param_candidates (void)
3036 tree parm;
3037 int count = 0;
3038 bool ret = false;
3040 for (parm = DECL_ARGUMENTS (current_function_decl);
3041 parm;
3042 parm = TREE_CHAIN (parm))
3044 tree type = TREE_TYPE (parm);
3046 count++;
3048 if (TREE_THIS_VOLATILE (parm)
3049 || TREE_ADDRESSABLE (parm)
3050 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3051 continue;
3053 if (is_unused_scalar_param (parm))
3055 ret = true;
3056 continue;
3059 if (POINTER_TYPE_P (type))
3061 type = TREE_TYPE (type);
3063 if (TREE_CODE (type) == FUNCTION_TYPE
3064 || TYPE_VOLATILE (type)
3065 || !is_gimple_reg (parm)
3066 || is_va_list_type (type)
3067 || ptr_parm_has_direct_uses (parm))
3068 continue;
3070 else if (!AGGREGATE_TYPE_P (type))
3071 continue;
3073 if (!COMPLETE_TYPE_P (type)
3074 || !host_integerp (TYPE_SIZE (type), 1)
3075 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3076 || (AGGREGATE_TYPE_P (type)
3077 && type_internals_preclude_sra_p (type)))
3078 continue;
3080 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3081 ret = true;
3082 if (dump_file && (dump_flags & TDF_DETAILS))
3084 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3085 print_generic_expr (dump_file, parm, 0);
3086 fprintf (dump_file, "\n");
3090 func_param_count = count;
3091 return ret;
3094 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3095 maybe_modified. */
3097 static bool
3098 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3099 void *data)
3101 struct access *repr = (struct access *) data;
3103 repr->grp_maybe_modified = 1;
3104 return true;
3107 /* Analyze what representatives (in linked lists accessible from
3108 REPRESENTATIVES) can be modified by side effects of statements in the
3109 current function. */
3111 static void
3112 analyze_modified_params (VEC (access_p, heap) *representatives)
3114 int i;
3116 for (i = 0; i < func_param_count; i++)
3118 struct access *repr;
3120 for (repr = VEC_index (access_p, representatives, i);
3121 repr;
3122 repr = repr->next_grp)
3124 struct access *access;
3125 bitmap visited;
3126 ao_ref ar;
3128 if (no_accesses_p (repr))
3129 continue;
3130 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3131 || repr->grp_maybe_modified)
3132 continue;
3134 ao_ref_init (&ar, repr->expr);
3135 visited = BITMAP_ALLOC (NULL);
3136 for (access = repr; access; access = access->next_sibling)
3138 /* All accesses are read ones, otherwise grp_maybe_modified would
3139 be trivially set. */
3140 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3141 mark_maybe_modified, repr, &visited);
3142 if (repr->grp_maybe_modified)
3143 break;
3145 BITMAP_FREE (visited);
3150 /* Propagate distances in bb_dereferences in the opposite direction than the
3151 control flow edges, in each step storing the maximum of the current value
3152 and the minimum of all successors. These steps are repeated until the table
3153 stabilizes. Note that BBs which might terminate the functions (according to
3154 final_bbs bitmap) never updated in this way. */
3156 static void
3157 propagate_dereference_distances (void)
3159 VEC (basic_block, heap) *queue;
3160 basic_block bb;
3162 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3163 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3164 FOR_EACH_BB (bb)
3166 VEC_quick_push (basic_block, queue, bb);
3167 bb->aux = bb;
3170 while (!VEC_empty (basic_block, queue))
3172 edge_iterator ei;
3173 edge e;
3174 bool change = false;
3175 int i;
3177 bb = VEC_pop (basic_block, queue);
3178 bb->aux = NULL;
3180 if (bitmap_bit_p (final_bbs, bb->index))
3181 continue;
3183 for (i = 0; i < func_param_count; i++)
3185 int idx = bb->index * func_param_count + i;
3186 bool first = true;
3187 HOST_WIDE_INT inh = 0;
3189 FOR_EACH_EDGE (e, ei, bb->succs)
3191 int succ_idx = e->dest->index * func_param_count + i;
3193 if (e->src == EXIT_BLOCK_PTR)
3194 continue;
3196 if (first)
3198 first = false;
3199 inh = bb_dereferences [succ_idx];
3201 else if (bb_dereferences [succ_idx] < inh)
3202 inh = bb_dereferences [succ_idx];
3205 if (!first && bb_dereferences[idx] < inh)
3207 bb_dereferences[idx] = inh;
3208 change = true;
3212 if (change && !bitmap_bit_p (final_bbs, bb->index))
3213 FOR_EACH_EDGE (e, ei, bb->preds)
3215 if (e->src->aux)
3216 continue;
3218 e->src->aux = e->src;
3219 VEC_quick_push (basic_block, queue, e->src);
3223 VEC_free (basic_block, heap, queue);
3226 /* Dump a dereferences TABLE with heading STR to file F. */
3228 static void
3229 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3231 basic_block bb;
3233 fprintf (dump_file, str);
3234 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3236 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3237 if (bb != EXIT_BLOCK_PTR)
3239 int i;
3240 for (i = 0; i < func_param_count; i++)
3242 int idx = bb->index * func_param_count + i;
3243 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3246 fprintf (f, "\n");
3248 fprintf (dump_file, "\n");
3251 /* Determine what (parts of) parameters passed by reference that are not
3252 assigned to are not certainly dereferenced in this function and thus the
3253 dereferencing cannot be safely moved to the caller without potentially
3254 introducing a segfault. Mark such REPRESENTATIVES as
3255 grp_not_necessarilly_dereferenced.
3257 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3258 part is calculated rather than simple booleans are calculated for each
3259 pointer parameter to handle cases when only a fraction of the whole
3260 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3261 an example).
3263 The maximum dereference distances for each pointer parameter and BB are
3264 already stored in bb_dereference. This routine simply propagates these
3265 values upwards by propagate_dereference_distances and then compares the
3266 distances of individual parameters in the ENTRY BB to the equivalent
3267 distances of each representative of a (fraction of a) parameter. */
3269 static void
3270 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3272 int i;
3274 if (dump_file && (dump_flags & TDF_DETAILS))
3275 dump_dereferences_table (dump_file,
3276 "Dereference table before propagation:\n",
3277 bb_dereferences);
3279 propagate_dereference_distances ();
3281 if (dump_file && (dump_flags & TDF_DETAILS))
3282 dump_dereferences_table (dump_file,
3283 "Dereference table after propagation:\n",
3284 bb_dereferences);
3286 for (i = 0; i < func_param_count; i++)
3288 struct access *repr = VEC_index (access_p, representatives, i);
3289 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3291 if (!repr || no_accesses_p (repr))
3292 continue;
3296 if ((repr->offset + repr->size) > bb_dereferences[idx])
3297 repr->grp_not_necessarilly_dereferenced = 1;
3298 repr = repr->next_grp;
3300 while (repr);
3304 /* Return the representative access for the parameter declaration PARM if it is
3305 a scalar passed by reference which is not written to and the pointer value
3306 is not used directly. Thus, if it is legal to dereference it in the caller
3307 and we can rule out modifications through aliases, such parameter should be
3308 turned into one passed by value. Return NULL otherwise. */
3310 static struct access *
3311 unmodified_by_ref_scalar_representative (tree parm)
3313 int i, access_count;
3314 struct access *repr;
3315 VEC (access_p, heap) *access_vec;
3317 access_vec = get_base_access_vector (parm);
3318 gcc_assert (access_vec);
3319 repr = VEC_index (access_p, access_vec, 0);
3320 if (repr->write)
3321 return NULL;
3322 repr->group_representative = repr;
3324 access_count = VEC_length (access_p, access_vec);
3325 for (i = 1; i < access_count; i++)
3327 struct access *access = VEC_index (access_p, access_vec, i);
3328 if (access->write)
3329 return NULL;
3330 access->group_representative = repr;
3331 access->next_sibling = repr->next_sibling;
3332 repr->next_sibling = access;
3335 repr->grp_read = 1;
3336 repr->grp_scalar_ptr = 1;
3337 return repr;
3340 /* Return true iff this access precludes IPA-SRA of the parameter it is
3341 associated with. */
3343 static bool
3344 access_precludes_ipa_sra_p (struct access *access)
3346 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3347 is incompatible assign in a call statement (and possibly even in asm
3348 statements). This can be relaxed by using a new temporary but only for
3349 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3350 intraprocedural SRA we deal with this by keeping the old aggregate around,
3351 something we cannot do in IPA-SRA.) */
3352 if (access->write
3353 && (is_gimple_call (access->stmt)
3354 || gimple_code (access->stmt) == GIMPLE_ASM))
3355 return true;
3357 return false;
3361 /* Sort collected accesses for parameter PARM, identify representatives for
3362 each accessed region and link them together. Return NULL if there are
3363 different but overlapping accesses, return the special ptr value meaning
3364 there are no accesses for this parameter if that is the case and return the
3365 first representative otherwise. Set *RO_GRP if there is a group of accesses
3366 with only read (i.e. no write) accesses. */
3368 static struct access *
3369 splice_param_accesses (tree parm, bool *ro_grp)
3371 int i, j, access_count, group_count;
3372 int agg_size, total_size = 0;
3373 struct access *access, *res, **prev_acc_ptr = &res;
3374 VEC (access_p, heap) *access_vec;
3376 access_vec = get_base_access_vector (parm);
3377 if (!access_vec)
3378 return &no_accesses_representant;
3379 access_count = VEC_length (access_p, access_vec);
3381 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
3382 compare_access_positions);
3384 i = 0;
3385 total_size = 0;
3386 group_count = 0;
3387 while (i < access_count)
3389 bool modification;
3390 access = VEC_index (access_p, access_vec, i);
3391 modification = access->write;
3392 if (access_precludes_ipa_sra_p (access))
3393 return NULL;
3395 /* Access is about to become group representative unless we find some
3396 nasty overlap which would preclude us from breaking this parameter
3397 apart. */
3399 j = i + 1;
3400 while (j < access_count)
3402 struct access *ac2 = VEC_index (access_p, access_vec, j);
3403 if (ac2->offset != access->offset)
3405 /* All or nothing law for parameters. */
3406 if (access->offset + access->size > ac2->offset)
3407 return NULL;
3408 else
3409 break;
3411 else if (ac2->size != access->size)
3412 return NULL;
3414 if (access_precludes_ipa_sra_p (ac2))
3415 return NULL;
3417 modification |= ac2->write;
3418 ac2->group_representative = access;
3419 ac2->next_sibling = access->next_sibling;
3420 access->next_sibling = ac2;
3421 j++;
3424 group_count++;
3425 access->grp_maybe_modified = modification;
3426 if (!modification)
3427 *ro_grp = true;
3428 *prev_acc_ptr = access;
3429 prev_acc_ptr = &access->next_grp;
3430 total_size += access->size;
3431 i = j;
3434 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3435 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3436 else
3437 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3438 if (total_size >= agg_size)
3439 return NULL;
3441 gcc_assert (group_count > 0);
3442 return res;
3445 /* Decide whether parameters with representative accesses given by REPR should
3446 be reduced into components. */
3448 static int
3449 decide_one_param_reduction (struct access *repr)
3451 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3452 bool by_ref;
3453 tree parm;
3455 parm = repr->base;
3456 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3457 gcc_assert (cur_parm_size > 0);
3459 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3461 by_ref = true;
3462 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3464 else
3466 by_ref = false;
3467 agg_size = cur_parm_size;
3470 if (dump_file)
3472 struct access *acc;
3473 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3474 print_generic_expr (dump_file, parm, 0);
3475 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3476 for (acc = repr; acc; acc = acc->next_grp)
3477 dump_access (dump_file, acc, true);
3480 total_size = 0;
3481 new_param_count = 0;
3483 for (; repr; repr = repr->next_grp)
3485 gcc_assert (parm == repr->base);
3486 new_param_count++;
3488 if (!by_ref || (!repr->grp_maybe_modified
3489 && !repr->grp_not_necessarilly_dereferenced))
3490 total_size += repr->size;
3491 else
3492 total_size += cur_parm_size;
3495 gcc_assert (new_param_count > 0);
3497 if (optimize_function_for_size_p (cfun))
3498 parm_size_limit = cur_parm_size;
3499 else
3500 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3501 * cur_parm_size);
3503 if (total_size < agg_size
3504 && total_size <= parm_size_limit)
3506 if (dump_file)
3507 fprintf (dump_file, " ....will be split into %i components\n",
3508 new_param_count);
3509 return new_param_count;
3511 else
3512 return 0;
3515 /* The order of the following enums is important, we need to do extra work for
3516 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3517 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3518 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3520 /* Identify representatives of all accesses to all candidate parameters for
3521 IPA-SRA. Return result based on what representatives have been found. */
3523 static enum ipa_splicing_result
3524 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3526 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3527 tree parm;
3528 struct access *repr;
3530 *representatives = VEC_alloc (access_p, heap, func_param_count);
3532 for (parm = DECL_ARGUMENTS (current_function_decl);
3533 parm;
3534 parm = TREE_CHAIN (parm))
3536 if (is_unused_scalar_param (parm))
3538 VEC_quick_push (access_p, *representatives,
3539 &no_accesses_representant);
3540 if (result == NO_GOOD_ACCESS)
3541 result = UNUSED_PARAMS;
3543 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3544 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3545 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3547 repr = unmodified_by_ref_scalar_representative (parm);
3548 VEC_quick_push (access_p, *representatives, repr);
3549 if (repr)
3550 result = UNMODIF_BY_REF_ACCESSES;
3552 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3554 bool ro_grp = false;
3555 repr = splice_param_accesses (parm, &ro_grp);
3556 VEC_quick_push (access_p, *representatives, repr);
3558 if (repr && !no_accesses_p (repr))
3560 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3562 if (ro_grp)
3563 result = UNMODIF_BY_REF_ACCESSES;
3564 else if (result < MODIF_BY_REF_ACCESSES)
3565 result = MODIF_BY_REF_ACCESSES;
3567 else if (result < BY_VAL_ACCESSES)
3568 result = BY_VAL_ACCESSES;
3570 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3571 result = UNUSED_PARAMS;
3573 else
3574 VEC_quick_push (access_p, *representatives, NULL);
3577 if (result == NO_GOOD_ACCESS)
3579 VEC_free (access_p, heap, *representatives);
3580 *representatives = NULL;
3581 return NO_GOOD_ACCESS;
3584 return result;
3587 /* Return the index of BASE in PARMS. Abort if it is not found. */
3589 static inline int
3590 get_param_index (tree base, VEC(tree, heap) *parms)
3592 int i, len;
3594 len = VEC_length (tree, parms);
3595 for (i = 0; i < len; i++)
3596 if (VEC_index (tree, parms, i) == base)
3597 return i;
3598 gcc_unreachable ();
3601 /* Convert the decisions made at the representative level into compact
3602 parameter adjustments. REPRESENTATIVES are pointers to first
3603 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3604 final number of adjustments. */
3606 static ipa_parm_adjustment_vec
3607 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3608 int adjustments_count)
3610 VEC (tree, heap) *parms;
3611 ipa_parm_adjustment_vec adjustments;
3612 tree parm;
3613 int i;
3615 gcc_assert (adjustments_count > 0);
3616 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3617 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3618 parm = DECL_ARGUMENTS (current_function_decl);
3619 for (i = 0; i < func_param_count; i++, parm = TREE_CHAIN (parm))
3621 struct access *repr = VEC_index (access_p, representatives, i);
3623 if (!repr || no_accesses_p (repr))
3625 struct ipa_parm_adjustment *adj;
3627 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3628 memset (adj, 0, sizeof (*adj));
3629 adj->base_index = get_param_index (parm, parms);
3630 adj->base = parm;
3631 if (!repr)
3632 adj->copy_param = 1;
3633 else
3634 adj->remove_param = 1;
3636 else
3638 struct ipa_parm_adjustment *adj;
3639 int index = get_param_index (parm, parms);
3641 for (; repr; repr = repr->next_grp)
3643 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3644 memset (adj, 0, sizeof (*adj));
3645 gcc_assert (repr->base == parm);
3646 adj->base_index = index;
3647 adj->base = repr->base;
3648 adj->type = repr->type;
3649 adj->offset = repr->offset;
3650 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3651 && (repr->grp_maybe_modified
3652 || repr->grp_not_necessarilly_dereferenced));
3657 VEC_free (tree, heap, parms);
3658 return adjustments;
3661 /* Analyze the collected accesses and produce a plan what to do with the
3662 parameters in the form of adjustments, NULL meaning nothing. */
3664 static ipa_parm_adjustment_vec
3665 analyze_all_param_acesses (void)
3667 enum ipa_splicing_result repr_state;
3668 bool proceed = false;
3669 int i, adjustments_count = 0;
3670 VEC (access_p, heap) *representatives;
3671 ipa_parm_adjustment_vec adjustments;
3673 repr_state = splice_all_param_accesses (&representatives);
3674 if (repr_state == NO_GOOD_ACCESS)
3675 return NULL;
3677 /* If there are any parameters passed by reference which are not modified
3678 directly, we need to check whether they can be modified indirectly. */
3679 if (repr_state == UNMODIF_BY_REF_ACCESSES)
3681 analyze_caller_dereference_legality (representatives);
3682 analyze_modified_params (representatives);
3685 for (i = 0; i < func_param_count; i++)
3687 struct access *repr = VEC_index (access_p, representatives, i);
3689 if (repr && !no_accesses_p (repr))
3691 if (repr->grp_scalar_ptr)
3693 adjustments_count++;
3694 if (repr->grp_not_necessarilly_dereferenced
3695 || repr->grp_maybe_modified)
3696 VEC_replace (access_p, representatives, i, NULL);
3697 else
3699 proceed = true;
3700 sra_stats.scalar_by_ref_to_by_val++;
3703 else
3705 int new_components = decide_one_param_reduction (repr);
3707 if (new_components == 0)
3709 VEC_replace (access_p, representatives, i, NULL);
3710 adjustments_count++;
3712 else
3714 adjustments_count += new_components;
3715 sra_stats.aggregate_params_reduced++;
3716 sra_stats.param_reductions_created += new_components;
3717 proceed = true;
3721 else
3723 if (no_accesses_p (repr))
3725 proceed = true;
3726 sra_stats.deleted_unused_parameters++;
3728 adjustments_count++;
3732 if (!proceed && dump_file)
3733 fprintf (dump_file, "NOT proceeding to change params.\n");
3735 if (proceed)
3736 adjustments = turn_representatives_into_adjustments (representatives,
3737 adjustments_count);
3738 else
3739 adjustments = NULL;
3741 VEC_free (access_p, heap, representatives);
3742 return adjustments;
3745 /* If a parameter replacement identified by ADJ does not yet exist in the form
3746 of declaration, create it and record it, otherwise return the previously
3747 created one. */
3749 static tree
3750 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3752 tree repl;
3753 if (!adj->new_ssa_base)
3755 char *pretty_name = make_fancy_name (adj->base);
3757 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
3758 DECL_NAME (repl) = get_identifier (pretty_name);
3759 obstack_free (&name_obstack, pretty_name);
3761 get_var_ann (repl);
3762 add_referenced_var (repl);
3763 adj->new_ssa_base = repl;
3765 else
3766 repl = adj->new_ssa_base;
3767 return repl;
3770 /* Find the first adjustment for a particular parameter BASE in a vector of
3771 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3772 adjustment. */
3774 static struct ipa_parm_adjustment *
3775 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3777 int i, len;
3779 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3780 for (i = 0; i < len; i++)
3782 struct ipa_parm_adjustment *adj;
3784 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3785 if (!adj->copy_param && adj->base == base)
3786 return adj;
3789 return NULL;
3792 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
3793 removed because its value is not used, replace the SSA_NAME with a one
3794 relating to a created VAR_DECL together all of its uses and return true.
3795 ADJUSTMENTS is a pointer to an adjustments vector. */
3797 static bool
3798 replace_removed_params_ssa_names (gimple stmt,
3799 ipa_parm_adjustment_vec adjustments)
3801 struct ipa_parm_adjustment *adj;
3802 tree lhs, decl, repl, name;
3804 if (gimple_code (stmt) == GIMPLE_PHI)
3805 lhs = gimple_phi_result (stmt);
3806 else if (is_gimple_assign (stmt))
3807 lhs = gimple_assign_lhs (stmt);
3808 else if (is_gimple_call (stmt))
3809 lhs = gimple_call_lhs (stmt);
3810 else
3811 gcc_unreachable ();
3813 if (TREE_CODE (lhs) != SSA_NAME)
3814 return false;
3815 decl = SSA_NAME_VAR (lhs);
3816 if (TREE_CODE (decl) != PARM_DECL)
3817 return false;
3819 adj = get_adjustment_for_base (adjustments, decl);
3820 if (!adj)
3821 return false;
3823 repl = get_replaced_param_substitute (adj);
3824 name = make_ssa_name (repl, stmt);
3826 if (dump_file)
3828 fprintf (dump_file, "replacing an SSA name of a removed param ");
3829 print_generic_expr (dump_file, lhs, 0);
3830 fprintf (dump_file, " with ");
3831 print_generic_expr (dump_file, name, 0);
3832 fprintf (dump_file, "\n");
3835 if (is_gimple_assign (stmt))
3836 gimple_assign_set_lhs (stmt, name);
3837 else if (is_gimple_call (stmt))
3838 gimple_call_set_lhs (stmt, name);
3839 else
3840 gimple_phi_set_result (stmt, name);
3842 replace_uses_by (lhs, name);
3843 return true;
3846 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3847 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3848 specifies whether the function should care about type incompatibility the
3849 current and new expressions. If it is false, the function will leave
3850 incompatibility issues to the caller. Return true iff the expression
3851 was modified. */
3853 static bool
3854 sra_ipa_modify_expr (tree *expr, bool convert,
3855 ipa_parm_adjustment_vec adjustments)
3857 int i, len;
3858 struct ipa_parm_adjustment *adj, *cand = NULL;
3859 HOST_WIDE_INT offset, size, max_size;
3860 tree base, src;
3862 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3864 if (TREE_CODE (*expr) == BIT_FIELD_REF
3865 || TREE_CODE (*expr) == IMAGPART_EXPR
3866 || TREE_CODE (*expr) == REALPART_EXPR)
3868 expr = &TREE_OPERAND (*expr, 0);
3869 convert = true;
3872 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
3873 if (!base || size == -1 || max_size == -1)
3874 return false;
3876 if (INDIRECT_REF_P (base))
3877 base = TREE_OPERAND (base, 0);
3879 base = get_ssa_base_param (base);
3880 if (!base || TREE_CODE (base) != PARM_DECL)
3881 return false;
3883 for (i = 0; i < len; i++)
3885 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3887 if (adj->base == base &&
3888 (adj->offset == offset || adj->remove_param))
3890 cand = adj;
3891 break;
3894 if (!cand || cand->copy_param || cand->remove_param)
3895 return false;
3897 if (cand->by_ref)
3899 tree folded;
3900 src = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (cand->reduction)),
3901 cand->reduction);
3902 folded = gimple_fold_indirect_ref (src);
3903 if (folded)
3904 src = folded;
3906 else
3907 src = cand->reduction;
3909 if (dump_file && (dump_flags & TDF_DETAILS))
3911 fprintf (dump_file, "About to replace expr ");
3912 print_generic_expr (dump_file, *expr, 0);
3913 fprintf (dump_file, " with ");
3914 print_generic_expr (dump_file, src, 0);
3915 fprintf (dump_file, "\n");
3918 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3920 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3921 *expr = vce;
3923 else
3924 *expr = src;
3925 return true;
3928 /* If the statement pointed to by STMT_PTR contains any expressions that need
3929 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
3930 potential type incompatibilities (GSI is used to accommodate conversion
3931 statements and must point to the statement). Return true iff the statement
3932 was modified. */
3934 static bool
3935 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
3936 ipa_parm_adjustment_vec adjustments)
3938 gimple stmt = *stmt_ptr;
3939 tree *lhs_p, *rhs_p;
3940 bool any;
3942 if (!gimple_assign_single_p (stmt))
3943 return false;
3945 rhs_p = gimple_assign_rhs1_ptr (stmt);
3946 lhs_p = gimple_assign_lhs_ptr (stmt);
3948 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
3949 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
3950 if (any)
3952 tree new_rhs = NULL_TREE;
3954 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
3956 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
3958 /* V_C_Es of constructors can cause trouble (PR 42714). */
3959 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
3960 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
3961 else
3962 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
3964 else
3965 new_rhs = fold_build1_loc (gimple_location (stmt),
3966 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
3967 *rhs_p);
3969 else if (REFERENCE_CLASS_P (*rhs_p)
3970 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
3971 && !is_gimple_reg (*lhs_p))
3972 /* This can happen when an assignment in between two single field
3973 structures is turned into an assignment in between two pointers to
3974 scalars (PR 42237). */
3975 new_rhs = *rhs_p;
3977 if (new_rhs)
3979 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
3980 true, GSI_SAME_STMT);
3982 gimple_assign_set_rhs_from_tree (gsi, tmp);
3985 return true;
3988 return false;
3991 /* Traverse the function body and all modifications as described in
3992 ADJUSTMENTS. */
3994 static void
3995 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
3997 basic_block bb;
3999 FOR_EACH_BB (bb)
4001 gimple_stmt_iterator gsi;
4002 bool bb_changed = false;
4004 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4005 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4007 gsi = gsi_start_bb (bb);
4008 while (!gsi_end_p (gsi))
4010 gimple stmt = gsi_stmt (gsi);
4011 bool modified = false;
4012 tree *t;
4013 unsigned i;
4015 switch (gimple_code (stmt))
4017 case GIMPLE_RETURN:
4018 t = gimple_return_retval_ptr (stmt);
4019 if (*t != NULL_TREE)
4020 modified |= sra_ipa_modify_expr (t, true, adjustments);
4021 break;
4023 case GIMPLE_ASSIGN:
4024 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4025 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4026 break;
4028 case GIMPLE_CALL:
4029 /* Operands must be processed before the lhs. */
4030 for (i = 0; i < gimple_call_num_args (stmt); i++)
4032 t = gimple_call_arg_ptr (stmt, i);
4033 modified |= sra_ipa_modify_expr (t, true, adjustments);
4036 if (gimple_call_lhs (stmt))
4038 t = gimple_call_lhs_ptr (stmt);
4039 modified |= sra_ipa_modify_expr (t, false, adjustments);
4040 modified |= replace_removed_params_ssa_names (stmt,
4041 adjustments);
4043 break;
4045 case GIMPLE_ASM:
4046 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4048 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4049 modified |= sra_ipa_modify_expr (t, true, adjustments);
4051 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4053 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4054 modified |= sra_ipa_modify_expr (t, false, adjustments);
4056 break;
4058 default:
4059 break;
4062 if (modified)
4064 bb_changed = true;
4065 update_stmt (stmt);
4066 maybe_clean_eh_stmt (stmt);
4068 gsi_next (&gsi);
4070 if (bb_changed)
4071 gimple_purge_dead_eh_edges (bb);
4075 /* Call gimple_debug_bind_reset_value on all debug statements describing
4076 gimple register parameters that are being removed or replaced. */
4078 static void
4079 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4081 int i, len;
4083 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4084 for (i = 0; i < len; i++)
4086 struct ipa_parm_adjustment *adj;
4087 imm_use_iterator ui;
4088 gimple stmt;
4089 tree name;
4091 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4092 if (adj->copy_param || !is_gimple_reg (adj->base))
4093 continue;
4094 name = gimple_default_def (cfun, adj->base);
4095 if (!name)
4096 continue;
4097 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4099 /* All other users must have been removed by
4100 ipa_sra_modify_function_body. */
4101 gcc_assert (is_gimple_debug (stmt));
4102 gimple_debug_bind_reset_value (stmt);
4103 update_stmt (stmt);
4108 /* Return true iff all callers have at least as many actual arguments as there
4109 are formal parameters in the current function. */
4111 static bool
4112 all_callers_have_enough_arguments_p (struct cgraph_node *node)
4114 struct cgraph_edge *cs;
4115 for (cs = node->callers; cs; cs = cs->next_caller)
4116 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4117 return false;
4119 return true;
4123 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4125 static void
4126 convert_callers (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4128 tree old_cur_fndecl = current_function_decl;
4129 struct cgraph_edge *cs;
4130 basic_block this_block;
4131 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4133 for (cs = node->callers; cs; cs = cs->next_caller)
4135 current_function_decl = cs->caller->decl;
4136 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4138 if (dump_file)
4139 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4140 cs->caller->uid, cs->callee->uid,
4141 cgraph_node_name (cs->caller),
4142 cgraph_node_name (cs->callee));
4144 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4146 pop_cfun ();
4149 for (cs = node->callers; cs; cs = cs->next_caller)
4150 if (!bitmap_bit_p (recomputed_callers, cs->caller->uid))
4152 compute_inline_parameters (cs->caller);
4153 bitmap_set_bit (recomputed_callers, cs->caller->uid);
4155 BITMAP_FREE (recomputed_callers);
4157 current_function_decl = old_cur_fndecl;
4159 if (!encountered_recursive_call)
4160 return;
4162 FOR_EACH_BB (this_block)
4164 gimple_stmt_iterator gsi;
4166 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4168 gimple stmt = gsi_stmt (gsi);
4169 tree call_fndecl;
4170 if (gimple_code (stmt) != GIMPLE_CALL)
4171 continue;
4172 call_fndecl = gimple_call_fndecl (stmt);
4173 if (call_fndecl && cgraph_get_node (call_fndecl) == node)
4175 if (dump_file)
4176 fprintf (dump_file, "Adjusting recursive call");
4177 ipa_modify_call_arguments (NULL, stmt, adjustments);
4182 return;
4185 /* Create an abstract origin declaration for OLD_DECL and make it an abstract
4186 origin of the provided decl so that there are preserved parameters for debug
4187 information. */
4189 static void
4190 create_abstract_origin (tree old_decl)
4192 if (!DECL_ABSTRACT_ORIGIN (old_decl))
4194 tree new_decl = copy_node (old_decl);
4196 DECL_ABSTRACT (new_decl) = 1;
4197 SET_DECL_ASSEMBLER_NAME (new_decl, NULL_TREE);
4198 SET_DECL_RTL (new_decl, NULL);
4199 DECL_STRUCT_FUNCTION (new_decl) = NULL;
4200 DECL_ARTIFICIAL (old_decl) = 1;
4201 DECL_ABSTRACT_ORIGIN (old_decl) = new_decl;
4205 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4206 as given in ADJUSTMENTS. */
4208 static void
4209 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4211 struct cgraph_node *alias;
4212 for (alias = node->same_body; alias; alias = alias->next)
4213 ipa_modify_formal_parameters (alias->decl, adjustments, "ISRA");
4214 /* current_function_decl must be handled last, after same_body aliases,
4215 as following functions will use what it computed. */
4216 create_abstract_origin (current_function_decl);
4217 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4218 ipa_sra_modify_function_body (adjustments);
4219 sra_ipa_reset_debug_stmts (adjustments);
4220 convert_callers (node, adjustments);
4221 cgraph_make_node_local (node);
4222 return;
4225 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4226 attributes, return true otherwise. NODE is the cgraph node of the current
4227 function. */
4229 static bool
4230 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4232 if (!cgraph_node_can_be_local_p (node))
4234 if (dump_file)
4235 fprintf (dump_file, "Function not local to this compilation unit.\n");
4236 return false;
4239 if (DECL_VIRTUAL_P (current_function_decl))
4241 if (dump_file)
4242 fprintf (dump_file, "Function is a virtual method.\n");
4243 return false;
4246 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4247 && node->global.size >= MAX_INLINE_INSNS_AUTO)
4249 if (dump_file)
4250 fprintf (dump_file, "Function too big to be made truly local.\n");
4251 return false;
4254 if (!node->callers)
4256 if (dump_file)
4257 fprintf (dump_file,
4258 "Function has no callers in this compilation unit.\n");
4259 return false;
4262 if (cfun->stdarg)
4264 if (dump_file)
4265 fprintf (dump_file, "Function uses stdarg. \n");
4266 return false;
4269 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4270 return false;
4272 return true;
4275 /* Perform early interprocedural SRA. */
4277 static unsigned int
4278 ipa_early_sra (void)
4280 struct cgraph_node *node = cgraph_node (current_function_decl);
4281 ipa_parm_adjustment_vec adjustments;
4282 int ret = 0;
4284 if (!ipa_sra_preliminary_function_checks (node))
4285 return 0;
4287 sra_initialize ();
4288 sra_mode = SRA_MODE_EARLY_IPA;
4290 if (!find_param_candidates ())
4292 if (dump_file)
4293 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4294 goto simple_out;
4297 if (!all_callers_have_enough_arguments_p (node))
4299 if (dump_file)
4300 fprintf (dump_file, "There are callers with insufficient number of "
4301 "arguments.\n");
4302 goto simple_out;
4305 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4306 func_param_count
4307 * last_basic_block_for_function (cfun));
4308 final_bbs = BITMAP_ALLOC (NULL);
4310 scan_function ();
4311 if (encountered_apply_args)
4313 if (dump_file)
4314 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4315 goto out;
4318 if (encountered_unchangable_recursive_call)
4320 if (dump_file)
4321 fprintf (dump_file, "Function calls itself with insufficient "
4322 "number of arguments.\n");
4323 goto out;
4326 adjustments = analyze_all_param_acesses ();
4327 if (!adjustments)
4328 goto out;
4329 if (dump_file)
4330 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4332 modify_function (node, adjustments);
4333 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4334 ret = TODO_update_ssa;
4336 statistics_counter_event (cfun, "Unused parameters deleted",
4337 sra_stats.deleted_unused_parameters);
4338 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4339 sra_stats.scalar_by_ref_to_by_val);
4340 statistics_counter_event (cfun, "Aggregate parameters broken up",
4341 sra_stats.aggregate_params_reduced);
4342 statistics_counter_event (cfun, "Aggregate parameter components created",
4343 sra_stats.param_reductions_created);
4345 out:
4346 BITMAP_FREE (final_bbs);
4347 free (bb_dereferences);
4348 simple_out:
4349 sra_deinitialize ();
4350 return ret;
4353 /* Return if early ipa sra shall be performed. */
4354 static bool
4355 ipa_early_sra_gate (void)
4357 return flag_ipa_sra;
4360 struct gimple_opt_pass pass_early_ipa_sra =
4363 GIMPLE_PASS,
4364 "eipa_sra", /* name */
4365 ipa_early_sra_gate, /* gate */
4366 ipa_early_sra, /* execute */
4367 NULL, /* sub */
4368 NULL, /* next */
4369 0, /* static_pass_number */
4370 TV_IPA_SRA, /* tv_id */
4371 0, /* properties_required */
4372 0, /* properties_provided */
4373 0, /* properties_destroyed */
4374 0, /* todo_flags_start */
4375 TODO_dump_func | TODO_dump_cgraph /* todo_flags_finish */