* targhooks.c (default_function_value): Don't use
[official-gcc.git] / gcc / tree-sra.c
blobcd99b30b69a3cd8f335f6a28249a97080ad49206
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "ipa-prop.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
87 #include "timevar.h"
88 #include "params.h"
89 #include "target.h"
90 #include "flags.h"
91 #include "dbgcnt.h"
93 /* Enumeration of all aggregate reductions we can do. */
94 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
95 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
96 SRA_MODE_INTRA }; /* late intraprocedural SRA */
98 /* Global variable describing which aggregate reduction we are performing at
99 the moment. */
100 static enum sra_mode sra_mode;
102 struct assign_link;
104 /* ACCESS represents each access to an aggregate variable (as a whole or a
105 part). It can also represent a group of accesses that refer to exactly the
106 same fragment of an aggregate (i.e. those that have exactly the same offset
107 and size). Such representatives for a single aggregate, once determined,
108 are linked in a linked list and have the group fields set.
110 Moreover, when doing intraprocedural SRA, a tree is built from those
111 representatives (by the means of first_child and next_sibling pointers), in
112 which all items in a subtree are "within" the root, i.e. their offset is
113 greater or equal to offset of the root and offset+size is smaller or equal
114 to offset+size of the root. Children of an access are sorted by offset.
116 Note that accesses to parts of vector and complex number types always
117 represented by an access to the whole complex number or a vector. It is a
118 duty of the modifying functions to replace them appropriately. */
120 struct access
122 /* Values returned by `get_ref_base_and_extent' for each component reference
123 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
124 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
125 HOST_WIDE_INT offset;
126 HOST_WIDE_INT size;
127 tree base;
129 /* Expression. It is context dependent so do not use it to create new
130 expressions to access the original aggregate. See PR 42154 for a
131 testcase. */
132 tree expr;
133 /* Type. */
134 tree type;
136 /* The statement this access belongs to. */
137 gimple stmt;
139 /* Next group representative for this aggregate. */
140 struct access *next_grp;
142 /* Pointer to the group representative. Pointer to itself if the struct is
143 the representative. */
144 struct access *group_representative;
146 /* If this access has any children (in terms of the definition above), this
147 points to the first one. */
148 struct access *first_child;
150 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
151 described above. In IPA-SRA this is a pointer to the next access
152 belonging to the same group (having the same representative). */
153 struct access *next_sibling;
155 /* Pointers to the first and last element in the linked list of assign
156 links. */
157 struct assign_link *first_link, *last_link;
159 /* Pointer to the next access in the work queue. */
160 struct access *next_queued;
162 /* Replacement variable for this access "region." Never to be accessed
163 directly, always only by the means of get_access_replacement() and only
164 when grp_to_be_replaced flag is set. */
165 tree replacement_decl;
167 /* Is this particular access write access? */
168 unsigned write : 1;
170 /* Is this access an artificial one created to scalarize some record
171 entirely? */
172 unsigned total_scalarization : 1;
174 /* Is this access currently in the work queue? */
175 unsigned grp_queued : 1;
177 /* Does this group contain a write access? This flag is propagated down the
178 access tree. */
179 unsigned grp_write : 1;
181 /* Does this group contain a read access? This flag is propagated down the
182 access tree. */
183 unsigned grp_read : 1;
185 /* Does this group contain a read access that comes from an assignment
186 statement? This flag is propagated down the access tree. */
187 unsigned grp_assignment_read : 1;
189 /* Other passes of the analysis use this bit to make function
190 analyze_access_subtree create scalar replacements for this group if
191 possible. */
192 unsigned grp_hint : 1;
194 /* Is the subtree rooted in this access fully covered by scalar
195 replacements? */
196 unsigned grp_covered : 1;
198 /* If set to true, this access and all below it in an access tree must not be
199 scalarized. */
200 unsigned grp_unscalarizable_region : 1;
202 /* Whether data have been written to parts of the aggregate covered by this
203 access which is not to be scalarized. This flag is propagated up in the
204 access tree. */
205 unsigned grp_unscalarized_data : 1;
207 /* Does this access and/or group contain a write access through a
208 BIT_FIELD_REF? */
209 unsigned grp_partial_lhs : 1;
211 /* Set when a scalar replacement should be created for this variable. We do
212 the decision and creation at different places because create_tmp_var
213 cannot be called from within FOR_EACH_REFERENCED_VAR. */
214 unsigned grp_to_be_replaced : 1;
216 /* Is it possible that the group refers to data which might be (directly or
217 otherwise) modified? */
218 unsigned grp_maybe_modified : 1;
220 /* Set when this is a representative of a pointer to scalar (i.e. by
221 reference) parameter which we consider for turning into a plain scalar
222 (i.e. a by value parameter). */
223 unsigned grp_scalar_ptr : 1;
225 /* Set when we discover that this pointer is not safe to dereference in the
226 caller. */
227 unsigned grp_not_necessarilly_dereferenced : 1;
230 typedef struct access *access_p;
232 DEF_VEC_P (access_p);
233 DEF_VEC_ALLOC_P (access_p, heap);
235 /* Alloc pool for allocating access structures. */
236 static alloc_pool access_pool;
238 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
239 are used to propagate subaccesses from rhs to lhs as long as they don't
240 conflict with what is already there. */
241 struct assign_link
243 struct access *lacc, *racc;
244 struct assign_link *next;
247 /* Alloc pool for allocating assign link structures. */
248 static alloc_pool link_pool;
250 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
251 static struct pointer_map_t *base_access_vec;
253 /* Bitmap of candidates. */
254 static bitmap candidate_bitmap;
256 /* Bitmap of candidates which we should try to entirely scalarize away and
257 those which cannot be (because they are and need be used as a whole). */
258 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
260 /* Obstack for creation of fancy names. */
261 static struct obstack name_obstack;
263 /* Head of a linked list of accesses that need to have its subaccesses
264 propagated to their assignment counterparts. */
265 static struct access *work_queue_head;
267 /* Number of parameters of the analyzed function when doing early ipa SRA. */
268 static int func_param_count;
270 /* scan_function sets the following to true if it encounters a call to
271 __builtin_apply_args. */
272 static bool encountered_apply_args;
274 /* Set by scan_function when it finds a recursive call. */
275 static bool encountered_recursive_call;
277 /* Set by scan_function when it finds a recursive call with less actual
278 arguments than formal parameters.. */
279 static bool encountered_unchangable_recursive_call;
281 /* This is a table in which for each basic block and parameter there is a
282 distance (offset + size) in that parameter which is dereferenced and
283 accessed in that BB. */
284 static HOST_WIDE_INT *bb_dereferences;
285 /* Bitmap of BBs that can cause the function to "stop" progressing by
286 returning, throwing externally, looping infinitely or calling a function
287 which might abort etc.. */
288 static bitmap final_bbs;
290 /* Representative of no accesses at all. */
291 static struct access no_accesses_representant;
293 /* Predicate to test the special value. */
295 static inline bool
296 no_accesses_p (struct access *access)
298 return access == &no_accesses_representant;
301 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
302 representative fields are dumped, otherwise those which only describe the
303 individual access are. */
305 static struct
307 /* Number of processed aggregates is readily available in
308 analyze_all_variable_accesses and so is not stored here. */
310 /* Number of created scalar replacements. */
311 int replacements;
313 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
314 expression. */
315 int exprs;
317 /* Number of statements created by generate_subtree_copies. */
318 int subtree_copies;
320 /* Number of statements created by load_assign_lhs_subreplacements. */
321 int subreplacements;
323 /* Number of times sra_modify_assign has deleted a statement. */
324 int deleted;
326 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
327 RHS reparately due to type conversions or nonexistent matching
328 references. */
329 int separate_lhs_rhs_handling;
331 /* Number of parameters that were removed because they were unused. */
332 int deleted_unused_parameters;
334 /* Number of scalars passed as parameters by reference that have been
335 converted to be passed by value. */
336 int scalar_by_ref_to_by_val;
338 /* Number of aggregate parameters that were replaced by one or more of their
339 components. */
340 int aggregate_params_reduced;
342 /* Numbber of components created when splitting aggregate parameters. */
343 int param_reductions_created;
344 } sra_stats;
346 static void
347 dump_access (FILE *f, struct access *access, bool grp)
349 fprintf (f, "access { ");
350 fprintf (f, "base = (%d)'", DECL_UID (access->base));
351 print_generic_expr (f, access->base, 0);
352 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
353 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
354 fprintf (f, ", expr = ");
355 print_generic_expr (f, access->expr, 0);
356 fprintf (f, ", type = ");
357 print_generic_expr (f, access->type, 0);
358 if (grp)
359 fprintf (f, ", grp_write = %d, total_scalarization = %d, "
360 "grp_read = %d, grp_hint = %d, grp_assignment_read = %d,"
361 "grp_covered = %d, grp_unscalarizable_region = %d, "
362 "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
363 "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
364 "grp_not_necessarilly_dereferenced = %d\n",
365 access->grp_write, access->total_scalarization,
366 access->grp_read, access->grp_hint, access->grp_assignment_read,
367 access->grp_covered, access->grp_unscalarizable_region,
368 access->grp_unscalarized_data, access->grp_partial_lhs,
369 access->grp_to_be_replaced, access->grp_maybe_modified,
370 access->grp_not_necessarilly_dereferenced);
371 else
372 fprintf (f, ", write = %d, total_scalarization = %d, "
373 "grp_partial_lhs = %d\n",
374 access->write, access->total_scalarization,
375 access->grp_partial_lhs);
378 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
380 static void
381 dump_access_tree_1 (FILE *f, struct access *access, int level)
385 int i;
387 for (i = 0; i < level; i++)
388 fputs ("* ", dump_file);
390 dump_access (f, access, true);
392 if (access->first_child)
393 dump_access_tree_1 (f, access->first_child, level + 1);
395 access = access->next_sibling;
397 while (access);
400 /* Dump all access trees for a variable, given the pointer to the first root in
401 ACCESS. */
403 static void
404 dump_access_tree (FILE *f, struct access *access)
406 for (; access; access = access->next_grp)
407 dump_access_tree_1 (f, access, 0);
410 /* Return true iff ACC is non-NULL and has subaccesses. */
412 static inline bool
413 access_has_children_p (struct access *acc)
415 return acc && acc->first_child;
418 /* Return a vector of pointers to accesses for the variable given in BASE or
419 NULL if there is none. */
421 static VEC (access_p, heap) *
422 get_base_access_vector (tree base)
424 void **slot;
426 slot = pointer_map_contains (base_access_vec, base);
427 if (!slot)
428 return NULL;
429 else
430 return *(VEC (access_p, heap) **) slot;
433 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
434 in ACCESS. Return NULL if it cannot be found. */
436 static struct access *
437 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
438 HOST_WIDE_INT size)
440 while (access && (access->offset != offset || access->size != size))
442 struct access *child = access->first_child;
444 while (child && (child->offset + child->size <= offset))
445 child = child->next_sibling;
446 access = child;
449 return access;
452 /* Return the first group representative for DECL or NULL if none exists. */
454 static struct access *
455 get_first_repr_for_decl (tree base)
457 VEC (access_p, heap) *access_vec;
459 access_vec = get_base_access_vector (base);
460 if (!access_vec)
461 return NULL;
463 return VEC_index (access_p, access_vec, 0);
466 /* Find an access representative for the variable BASE and given OFFSET and
467 SIZE. Requires that access trees have already been built. Return NULL if
468 it cannot be found. */
470 static struct access *
471 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
472 HOST_WIDE_INT size)
474 struct access *access;
476 access = get_first_repr_for_decl (base);
477 while (access && (access->offset + access->size <= offset))
478 access = access->next_grp;
479 if (!access)
480 return NULL;
482 return find_access_in_subtree (access, offset, size);
485 /* Add LINK to the linked list of assign links of RACC. */
486 static void
487 add_link_to_rhs (struct access *racc, struct assign_link *link)
489 gcc_assert (link->racc == racc);
491 if (!racc->first_link)
493 gcc_assert (!racc->last_link);
494 racc->first_link = link;
496 else
497 racc->last_link->next = link;
499 racc->last_link = link;
500 link->next = NULL;
503 /* Move all link structures in their linked list in OLD_RACC to the linked list
504 in NEW_RACC. */
505 static void
506 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
508 if (!old_racc->first_link)
510 gcc_assert (!old_racc->last_link);
511 return;
514 if (new_racc->first_link)
516 gcc_assert (!new_racc->last_link->next);
517 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
519 new_racc->last_link->next = old_racc->first_link;
520 new_racc->last_link = old_racc->last_link;
522 else
524 gcc_assert (!new_racc->last_link);
526 new_racc->first_link = old_racc->first_link;
527 new_racc->last_link = old_racc->last_link;
529 old_racc->first_link = old_racc->last_link = NULL;
532 /* Add ACCESS to the work queue (which is actually a stack). */
534 static void
535 add_access_to_work_queue (struct access *access)
537 if (!access->grp_queued)
539 gcc_assert (!access->next_queued);
540 access->next_queued = work_queue_head;
541 access->grp_queued = 1;
542 work_queue_head = access;
546 /* Pop an access from the work queue, and return it, assuming there is one. */
548 static struct access *
549 pop_access_from_work_queue (void)
551 struct access *access = work_queue_head;
553 work_queue_head = access->next_queued;
554 access->next_queued = NULL;
555 access->grp_queued = 0;
556 return access;
560 /* Allocate necessary structures. */
562 static void
563 sra_initialize (void)
565 candidate_bitmap = BITMAP_ALLOC (NULL);
566 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
567 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
568 gcc_obstack_init (&name_obstack);
569 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
570 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
571 base_access_vec = pointer_map_create ();
572 memset (&sra_stats, 0, sizeof (sra_stats));
573 encountered_apply_args = false;
574 encountered_recursive_call = false;
575 encountered_unchangable_recursive_call = false;
578 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
580 static bool
581 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
582 void *data ATTRIBUTE_UNUSED)
584 VEC (access_p, heap) *access_vec;
585 access_vec = (VEC (access_p, heap) *) *value;
586 VEC_free (access_p, heap, access_vec);
588 return true;
591 /* Deallocate all general structures. */
593 static void
594 sra_deinitialize (void)
596 BITMAP_FREE (candidate_bitmap);
597 BITMAP_FREE (should_scalarize_away_bitmap);
598 BITMAP_FREE (cannot_scalarize_away_bitmap);
599 free_alloc_pool (access_pool);
600 free_alloc_pool (link_pool);
601 obstack_free (&name_obstack, NULL);
603 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
604 pointer_map_destroy (base_access_vec);
607 /* Remove DECL from candidates for SRA and write REASON to the dump file if
608 there is one. */
609 static void
610 disqualify_candidate (tree decl, const char *reason)
612 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
614 if (dump_file && (dump_flags & TDF_DETAILS))
616 fprintf (dump_file, "! Disqualifying ");
617 print_generic_expr (dump_file, decl, 0);
618 fprintf (dump_file, " - %s\n", reason);
622 /* Return true iff the type contains a field or an element which does not allow
623 scalarization. */
625 static bool
626 type_internals_preclude_sra_p (tree type)
628 tree fld;
629 tree et;
631 switch (TREE_CODE (type))
633 case RECORD_TYPE:
634 case UNION_TYPE:
635 case QUAL_UNION_TYPE:
636 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
637 if (TREE_CODE (fld) == FIELD_DECL)
639 tree ft = TREE_TYPE (fld);
641 if (TREE_THIS_VOLATILE (fld)
642 || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
643 || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
644 || !host_integerp (DECL_SIZE (fld), 1))
645 return true;
647 if (AGGREGATE_TYPE_P (ft)
648 && type_internals_preclude_sra_p (ft))
649 return true;
652 return false;
654 case ARRAY_TYPE:
655 et = TREE_TYPE (type);
657 if (AGGREGATE_TYPE_P (et))
658 return type_internals_preclude_sra_p (et);
659 else
660 return false;
662 default:
663 return false;
667 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
668 base variable if it is. Return T if it is not an SSA_NAME. */
670 static tree
671 get_ssa_base_param (tree t)
673 if (TREE_CODE (t) == SSA_NAME)
675 if (SSA_NAME_IS_DEFAULT_DEF (t))
676 return SSA_NAME_VAR (t);
677 else
678 return NULL_TREE;
680 return t;
683 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
684 belongs to, unless the BB has already been marked as a potentially
685 final. */
687 static void
688 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
690 basic_block bb = gimple_bb (stmt);
691 int idx, parm_index = 0;
692 tree parm;
694 if (bitmap_bit_p (final_bbs, bb->index))
695 return;
697 for (parm = DECL_ARGUMENTS (current_function_decl);
698 parm && parm != base;
699 parm = TREE_CHAIN (parm))
700 parm_index++;
702 gcc_assert (parm_index < func_param_count);
704 idx = bb->index * func_param_count + parm_index;
705 if (bb_dereferences[idx] < dist)
706 bb_dereferences[idx] = dist;
709 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
710 the three fields. Also add it to the vector of accesses corresponding to
711 the base. Finally, return the new access. */
713 static struct access *
714 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
716 VEC (access_p, heap) *vec;
717 struct access *access;
718 void **slot;
720 access = (struct access *) pool_alloc (access_pool);
721 memset (access, 0, sizeof (struct access));
722 access->base = base;
723 access->offset = offset;
724 access->size = size;
726 slot = pointer_map_contains (base_access_vec, base);
727 if (slot)
728 vec = (VEC (access_p, heap) *) *slot;
729 else
730 vec = VEC_alloc (access_p, heap, 32);
732 VEC_safe_push (access_p, heap, vec, access);
734 *((struct VEC (access_p,heap) **)
735 pointer_map_insert (base_access_vec, base)) = vec;
737 return access;
740 /* Create and insert access for EXPR. Return created access, or NULL if it is
741 not possible. */
743 static struct access *
744 create_access (tree expr, gimple stmt, bool write)
746 struct access *access;
747 HOST_WIDE_INT offset, size, max_size;
748 tree base = expr;
749 bool ptr, unscalarizable_region = false;
751 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
753 if (sra_mode == SRA_MODE_EARLY_IPA && INDIRECT_REF_P (base))
755 base = get_ssa_base_param (TREE_OPERAND (base, 0));
756 if (!base)
757 return NULL;
758 ptr = true;
760 else
761 ptr = false;
763 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
764 return NULL;
766 if (sra_mode == SRA_MODE_EARLY_IPA)
768 if (size < 0 || size != max_size)
770 disqualify_candidate (base, "Encountered a variable sized access.");
771 return NULL;
773 if ((offset % BITS_PER_UNIT) != 0 || (size % BITS_PER_UNIT) != 0)
775 disqualify_candidate (base,
776 "Encountered an acces not aligned to a byte.");
777 return NULL;
780 if (ptr)
781 mark_parm_dereference (base, offset + size, stmt);
783 else
785 if (size != max_size)
787 size = max_size;
788 unscalarizable_region = true;
790 if (size < 0)
792 disqualify_candidate (base, "Encountered an unconstrained access.");
793 return NULL;
797 access = create_access_1 (base, offset, size);
798 access->expr = expr;
799 access->type = TREE_TYPE (expr);
800 access->write = write;
801 access->grp_unscalarizable_region = unscalarizable_region;
802 access->stmt = stmt;
804 return access;
808 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
809 register types or (recursively) records with only these two kinds of fields.
810 It also returns false if any of these records has a zero-size field as its
811 last field. */
813 static bool
814 type_consists_of_records_p (tree type)
816 tree fld;
817 bool last_fld_has_zero_size = false;
819 if (TREE_CODE (type) != RECORD_TYPE)
820 return false;
822 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
823 if (TREE_CODE (fld) == FIELD_DECL)
825 tree ft = TREE_TYPE (fld);
827 if (!is_gimple_reg_type (ft)
828 && !type_consists_of_records_p (ft))
829 return false;
831 last_fld_has_zero_size = tree_low_cst (DECL_SIZE (fld), 1) == 0;
834 if (last_fld_has_zero_size)
835 return false;
837 return true;
840 /* Create total_scalarization accesses for all scalar type fields in DECL that
841 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
842 must be the top-most VAR_DECL representing the variable, OFFSET must be the
843 offset of DECL within BASE. */
845 static void
846 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset)
848 tree fld, decl_type = TREE_TYPE (decl);
850 for (fld = TYPE_FIELDS (decl_type); fld; fld = TREE_CHAIN (fld))
851 if (TREE_CODE (fld) == FIELD_DECL)
853 HOST_WIDE_INT pos = offset + int_bit_position (fld);
854 tree ft = TREE_TYPE (fld);
856 if (is_gimple_reg_type (ft))
858 struct access *access;
859 HOST_WIDE_INT size;
860 tree expr;
861 bool ok;
863 size = tree_low_cst (DECL_SIZE (fld), 1);
864 expr = base;
865 ok = build_ref_for_offset (&expr, TREE_TYPE (base), pos,
866 ft, false);
867 gcc_assert (ok);
869 access = create_access_1 (base, pos, size);
870 access->expr = expr;
871 access->type = ft;
872 access->total_scalarization = 1;
873 /* Accesses for intraprocedural SRA can have their stmt NULL. */
875 else
876 completely_scalarize_record (base, fld, pos);
881 /* Search the given tree for a declaration by skipping handled components and
882 exclude it from the candidates. */
884 static void
885 disqualify_base_of_expr (tree t, const char *reason)
887 while (handled_component_p (t))
888 t = TREE_OPERAND (t, 0);
890 if (sra_mode == SRA_MODE_EARLY_IPA)
892 if (INDIRECT_REF_P (t))
893 t = TREE_OPERAND (t, 0);
894 t = get_ssa_base_param (t);
897 if (t && DECL_P (t))
898 disqualify_candidate (t, reason);
901 /* Scan expression EXPR and create access structures for all accesses to
902 candidates for scalarization. Return the created access or NULL if none is
903 created. */
905 static struct access *
906 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
908 struct access *ret = NULL;
909 bool partial_ref;
911 if (TREE_CODE (expr) == BIT_FIELD_REF
912 || TREE_CODE (expr) == IMAGPART_EXPR
913 || TREE_CODE (expr) == REALPART_EXPR)
915 expr = TREE_OPERAND (expr, 0);
916 partial_ref = true;
918 else
919 partial_ref = false;
921 /* We need to dive through V_C_Es in order to get the size of its parameter
922 and not the result type. Ada produces such statements. We are also
923 capable of handling the topmost V_C_E but not any of those buried in other
924 handled components. */
925 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
926 expr = TREE_OPERAND (expr, 0);
928 if (contains_view_convert_expr_p (expr))
930 disqualify_base_of_expr (expr, "V_C_E under a different handled "
931 "component.");
932 return NULL;
935 switch (TREE_CODE (expr))
937 case INDIRECT_REF:
938 if (sra_mode != SRA_MODE_EARLY_IPA)
939 return NULL;
940 /* fall through */
941 case VAR_DECL:
942 case PARM_DECL:
943 case RESULT_DECL:
944 case COMPONENT_REF:
945 case ARRAY_REF:
946 case ARRAY_RANGE_REF:
947 ret = create_access (expr, stmt, write);
948 break;
950 default:
951 break;
954 if (write && partial_ref && ret)
955 ret->grp_partial_lhs = 1;
957 return ret;
960 /* Scan expression EXPR and create access structures for all accesses to
961 candidates for scalarization. Return true if any access has been inserted.
962 STMT must be the statement from which the expression is taken, WRITE must be
963 true if the expression is a store and false otherwise. */
965 static bool
966 build_access_from_expr (tree expr, gimple stmt, bool write)
968 struct access *access;
970 access = build_access_from_expr_1 (expr, stmt, write);
971 if (access)
973 /* This means the aggregate is accesses as a whole in a way other than an
974 assign statement and thus cannot be removed even if we had a scalar
975 replacement for everything. */
976 if (cannot_scalarize_away_bitmap)
977 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
978 return true;
980 return false;
983 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
984 modes in which it matters, return true iff they have been disqualified. RHS
985 may be NULL, in that case ignore it. If we scalarize an aggregate in
986 intra-SRA we may need to add statements after each statement. This is not
987 possible if a statement unconditionally has to end the basic block. */
988 static bool
989 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
991 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
992 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
994 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
995 if (rhs)
996 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
997 return true;
999 return false;
1002 /* Scan expressions occuring in STMT, create access structures for all accesses
1003 to candidates for scalarization and remove those candidates which occur in
1004 statements or expressions that prevent them from being split apart. Return
1005 true if any access has been inserted. */
1007 static bool
1008 build_accesses_from_assign (gimple stmt)
1010 tree lhs, rhs;
1011 struct access *lacc, *racc;
1013 if (!gimple_assign_single_p (stmt))
1014 return false;
1016 lhs = gimple_assign_lhs (stmt);
1017 rhs = gimple_assign_rhs1 (stmt);
1019 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1020 return false;
1022 racc = build_access_from_expr_1 (rhs, stmt, false);
1023 lacc = build_access_from_expr_1 (lhs, stmt, true);
1025 if (racc)
1027 racc->grp_assignment_read = 1;
1028 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1029 && !is_gimple_reg_type (racc->type))
1030 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1033 if (lacc && racc
1034 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1035 && !lacc->grp_unscalarizable_region
1036 && !racc->grp_unscalarizable_region
1037 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1038 /* FIXME: Turn the following line into an assert after PR 40058 is
1039 fixed. */
1040 && lacc->size == racc->size
1041 && useless_type_conversion_p (lacc->type, racc->type))
1043 struct assign_link *link;
1045 link = (struct assign_link *) pool_alloc (link_pool);
1046 memset (link, 0, sizeof (struct assign_link));
1048 link->lacc = lacc;
1049 link->racc = racc;
1051 add_link_to_rhs (racc, link);
1054 return lacc || racc;
1057 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1058 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1060 static bool
1061 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1062 void *data ATTRIBUTE_UNUSED)
1064 op = get_base_address (op);
1065 if (op
1066 && DECL_P (op))
1067 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1069 return false;
1072 /* Return true iff callsite CALL has at least as many actual arguments as there
1073 are formal parameters of the function currently processed by IPA-SRA. */
1075 static inline bool
1076 callsite_has_enough_arguments_p (gimple call)
1078 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1081 /* Scan function and look for interesting expressions and create access
1082 structures for them. Return true iff any access is created. */
1084 static bool
1085 scan_function (void)
1087 basic_block bb;
1088 bool ret = false;
1090 FOR_EACH_BB (bb)
1092 gimple_stmt_iterator gsi;
1093 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1095 gimple stmt = gsi_stmt (gsi);
1096 tree t;
1097 unsigned i;
1099 if (final_bbs && stmt_can_throw_external (stmt))
1100 bitmap_set_bit (final_bbs, bb->index);
1101 switch (gimple_code (stmt))
1103 case GIMPLE_RETURN:
1104 t = gimple_return_retval (stmt);
1105 if (t != NULL_TREE)
1106 ret |= build_access_from_expr (t, stmt, false);
1107 if (final_bbs)
1108 bitmap_set_bit (final_bbs, bb->index);
1109 break;
1111 case GIMPLE_ASSIGN:
1112 ret |= build_accesses_from_assign (stmt);
1113 break;
1115 case GIMPLE_CALL:
1116 for (i = 0; i < gimple_call_num_args (stmt); i++)
1117 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1118 stmt, false);
1120 if (sra_mode == SRA_MODE_EARLY_IPA)
1122 tree dest = gimple_call_fndecl (stmt);
1123 int flags = gimple_call_flags (stmt);
1125 if (dest)
1127 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1128 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1129 encountered_apply_args = true;
1130 if (cgraph_get_node (dest)
1131 == cgraph_get_node (current_function_decl))
1133 encountered_recursive_call = true;
1134 if (!callsite_has_enough_arguments_p (stmt))
1135 encountered_unchangable_recursive_call = true;
1139 if (final_bbs
1140 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1141 bitmap_set_bit (final_bbs, bb->index);
1144 t = gimple_call_lhs (stmt);
1145 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1146 ret |= build_access_from_expr (t, stmt, true);
1147 break;
1149 case GIMPLE_ASM:
1150 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1151 asm_visit_addr);
1152 if (final_bbs)
1153 bitmap_set_bit (final_bbs, bb->index);
1155 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1157 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1158 ret |= build_access_from_expr (t, stmt, false);
1160 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1162 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1163 ret |= build_access_from_expr (t, stmt, true);
1165 break;
1167 default:
1168 break;
1173 return ret;
1176 /* Helper of QSORT function. There are pointers to accesses in the array. An
1177 access is considered smaller than another if it has smaller offset or if the
1178 offsets are the same but is size is bigger. */
1180 static int
1181 compare_access_positions (const void *a, const void *b)
1183 const access_p *fp1 = (const access_p *) a;
1184 const access_p *fp2 = (const access_p *) b;
1185 const access_p f1 = *fp1;
1186 const access_p f2 = *fp2;
1188 if (f1->offset != f2->offset)
1189 return f1->offset < f2->offset ? -1 : 1;
1191 if (f1->size == f2->size)
1193 if (f1->type == f2->type)
1194 return 0;
1195 /* Put any non-aggregate type before any aggregate type. */
1196 else if (!is_gimple_reg_type (f1->type)
1197 && is_gimple_reg_type (f2->type))
1198 return 1;
1199 else if (is_gimple_reg_type (f1->type)
1200 && !is_gimple_reg_type (f2->type))
1201 return -1;
1202 /* Put any complex or vector type before any other scalar type. */
1203 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1204 && TREE_CODE (f1->type) != VECTOR_TYPE
1205 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1206 || TREE_CODE (f2->type) == VECTOR_TYPE))
1207 return 1;
1208 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1209 || TREE_CODE (f1->type) == VECTOR_TYPE)
1210 && TREE_CODE (f2->type) != COMPLEX_TYPE
1211 && TREE_CODE (f2->type) != VECTOR_TYPE)
1212 return -1;
1213 /* Put the integral type with the bigger precision first. */
1214 else if (INTEGRAL_TYPE_P (f1->type)
1215 && INTEGRAL_TYPE_P (f2->type))
1216 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1217 /* Put any integral type with non-full precision last. */
1218 else if (INTEGRAL_TYPE_P (f1->type)
1219 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1220 != TYPE_PRECISION (f1->type)))
1221 return 1;
1222 else if (INTEGRAL_TYPE_P (f2->type)
1223 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1224 != TYPE_PRECISION (f2->type)))
1225 return -1;
1226 /* Stabilize the sort. */
1227 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1230 /* We want the bigger accesses first, thus the opposite operator in the next
1231 line: */
1232 return f1->size > f2->size ? -1 : 1;
1236 /* Append a name of the declaration to the name obstack. A helper function for
1237 make_fancy_name. */
1239 static void
1240 make_fancy_decl_name (tree decl)
1242 char buffer[32];
1244 tree name = DECL_NAME (decl);
1245 if (name)
1246 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1247 IDENTIFIER_LENGTH (name));
1248 else
1250 sprintf (buffer, "D%u", DECL_UID (decl));
1251 obstack_grow (&name_obstack, buffer, strlen (buffer));
1255 /* Helper for make_fancy_name. */
1257 static void
1258 make_fancy_name_1 (tree expr)
1260 char buffer[32];
1261 tree index;
1263 if (DECL_P (expr))
1265 make_fancy_decl_name (expr);
1266 return;
1269 switch (TREE_CODE (expr))
1271 case COMPONENT_REF:
1272 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1273 obstack_1grow (&name_obstack, '$');
1274 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1275 break;
1277 case ARRAY_REF:
1278 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1279 obstack_1grow (&name_obstack, '$');
1280 /* Arrays with only one element may not have a constant as their
1281 index. */
1282 index = TREE_OPERAND (expr, 1);
1283 if (TREE_CODE (index) != INTEGER_CST)
1284 break;
1285 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1286 obstack_grow (&name_obstack, buffer, strlen (buffer));
1288 break;
1290 case BIT_FIELD_REF:
1291 case REALPART_EXPR:
1292 case IMAGPART_EXPR:
1293 gcc_unreachable (); /* we treat these as scalars. */
1294 break;
1295 default:
1296 break;
1300 /* Create a human readable name for replacement variable of ACCESS. */
1302 static char *
1303 make_fancy_name (tree expr)
1305 make_fancy_name_1 (expr);
1306 obstack_1grow (&name_obstack, '\0');
1307 return XOBFINISH (&name_obstack, char *);
1310 /* Helper function for build_ref_for_offset. */
1312 static bool
1313 build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
1314 tree exp_type)
1316 while (1)
1318 tree fld;
1319 tree tr_size, index, minidx;
1320 HOST_WIDE_INT el_size;
1322 if (offset == 0 && exp_type
1323 && types_compatible_p (exp_type, type))
1324 return true;
1326 switch (TREE_CODE (type))
1328 case UNION_TYPE:
1329 case QUAL_UNION_TYPE:
1330 case RECORD_TYPE:
1331 for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
1333 HOST_WIDE_INT pos, size;
1334 tree expr, *expr_ptr;
1336 if (TREE_CODE (fld) != FIELD_DECL)
1337 continue;
1339 pos = int_bit_position (fld);
1340 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1341 tr_size = DECL_SIZE (fld);
1342 if (!tr_size || !host_integerp (tr_size, 1))
1343 continue;
1344 size = tree_low_cst (tr_size, 1);
1345 if (size == 0)
1347 if (pos != offset)
1348 continue;
1350 else if (pos > offset || (pos + size) <= offset)
1351 continue;
1353 if (res)
1355 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1356 NULL_TREE);
1357 expr_ptr = &expr;
1359 else
1360 expr_ptr = NULL;
1361 if (build_ref_for_offset_1 (expr_ptr, TREE_TYPE (fld),
1362 offset - pos, exp_type))
1364 if (res)
1365 *res = expr;
1366 return true;
1369 return false;
1371 case ARRAY_TYPE:
1372 tr_size = TYPE_SIZE (TREE_TYPE (type));
1373 if (!tr_size || !host_integerp (tr_size, 1))
1374 return false;
1375 el_size = tree_low_cst (tr_size, 1);
1377 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1378 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1379 return false;
1380 if (res)
1382 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1383 if (!integer_zerop (minidx))
1384 index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1385 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1386 NULL_TREE, NULL_TREE);
1388 offset = offset % el_size;
1389 type = TREE_TYPE (type);
1390 break;
1392 default:
1393 if (offset != 0)
1394 return false;
1396 if (exp_type)
1397 return false;
1398 else
1399 return true;
1404 /* Construct an expression that would reference a part of aggregate *EXPR of
1405 type TYPE at the given OFFSET of the type EXP_TYPE. If EXPR is NULL, the
1406 function only determines whether it can build such a reference without
1407 actually doing it, otherwise, the tree it points to is unshared first and
1408 then used as a base for furhter sub-references.
1410 FIXME: Eventually this should be replaced with
1411 maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
1412 minor rewrite of fold_stmt.
1415 bool
1416 build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
1417 tree exp_type, bool allow_ptr)
1419 location_t loc = expr ? EXPR_LOCATION (*expr) : UNKNOWN_LOCATION;
1421 if (expr)
1422 *expr = unshare_expr (*expr);
1424 if (allow_ptr && POINTER_TYPE_P (type))
1426 type = TREE_TYPE (type);
1427 if (expr)
1428 *expr = fold_build1_loc (loc, INDIRECT_REF, type, *expr);
1431 return build_ref_for_offset_1 (expr, type, offset, exp_type);
1434 /* Return true iff TYPE is stdarg va_list type. */
1436 static inline bool
1437 is_va_list_type (tree type)
1439 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1442 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1443 those with type which is suitable for scalarization. */
1445 static bool
1446 find_var_candidates (void)
1448 tree var, type;
1449 referenced_var_iterator rvi;
1450 bool ret = false;
1452 FOR_EACH_REFERENCED_VAR (var, rvi)
1454 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1455 continue;
1456 type = TREE_TYPE (var);
1458 if (!AGGREGATE_TYPE_P (type)
1459 || needs_to_live_in_memory (var)
1460 || TREE_THIS_VOLATILE (var)
1461 || !COMPLETE_TYPE_P (type)
1462 || !host_integerp (TYPE_SIZE (type), 1)
1463 || tree_low_cst (TYPE_SIZE (type), 1) == 0
1464 || type_internals_preclude_sra_p (type)
1465 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1466 we also want to schedule it rather late. Thus we ignore it in
1467 the early pass. */
1468 || (sra_mode == SRA_MODE_EARLY_INTRA
1469 && is_va_list_type (type)))
1470 continue;
1472 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1474 if (dump_file && (dump_flags & TDF_DETAILS))
1476 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1477 print_generic_expr (dump_file, var, 0);
1478 fprintf (dump_file, "\n");
1480 ret = true;
1483 return ret;
1486 /* Sort all accesses for the given variable, check for partial overlaps and
1487 return NULL if there are any. If there are none, pick a representative for
1488 each combination of offset and size and create a linked list out of them.
1489 Return the pointer to the first representative and make sure it is the first
1490 one in the vector of accesses. */
1492 static struct access *
1493 sort_and_splice_var_accesses (tree var)
1495 int i, j, access_count;
1496 struct access *res, **prev_acc_ptr = &res;
1497 VEC (access_p, heap) *access_vec;
1498 bool first = true;
1499 HOST_WIDE_INT low = -1, high = 0;
1501 access_vec = get_base_access_vector (var);
1502 if (!access_vec)
1503 return NULL;
1504 access_count = VEC_length (access_p, access_vec);
1506 /* Sort by <OFFSET, SIZE>. */
1507 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
1508 compare_access_positions);
1510 i = 0;
1511 while (i < access_count)
1513 struct access *access = VEC_index (access_p, access_vec, i);
1514 bool grp_write = access->write;
1515 bool grp_read = !access->write;
1516 bool grp_assignment_read = access->grp_assignment_read;
1517 bool multiple_reads = false;
1518 bool total_scalarization = access->total_scalarization;
1519 bool grp_partial_lhs = access->grp_partial_lhs;
1520 bool first_scalar = is_gimple_reg_type (access->type);
1521 bool unscalarizable_region = access->grp_unscalarizable_region;
1523 if (first || access->offset >= high)
1525 first = false;
1526 low = access->offset;
1527 high = access->offset + access->size;
1529 else if (access->offset > low && access->offset + access->size > high)
1530 return NULL;
1531 else
1532 gcc_assert (access->offset >= low
1533 && access->offset + access->size <= high);
1535 j = i + 1;
1536 while (j < access_count)
1538 struct access *ac2 = VEC_index (access_p, access_vec, j);
1539 if (ac2->offset != access->offset || ac2->size != access->size)
1540 break;
1541 if (ac2->write)
1542 grp_write = true;
1543 else
1545 if (grp_read)
1546 multiple_reads = true;
1547 else
1548 grp_read = true;
1550 grp_assignment_read |= ac2->grp_assignment_read;
1551 grp_partial_lhs |= ac2->grp_partial_lhs;
1552 unscalarizable_region |= ac2->grp_unscalarizable_region;
1553 total_scalarization |= ac2->total_scalarization;
1554 relink_to_new_repr (access, ac2);
1556 /* If there are both aggregate-type and scalar-type accesses with
1557 this combination of size and offset, the comparison function
1558 should have put the scalars first. */
1559 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1560 ac2->group_representative = access;
1561 j++;
1564 i = j;
1566 access->group_representative = access;
1567 access->grp_write = grp_write;
1568 access->grp_read = grp_read;
1569 access->grp_assignment_read = grp_assignment_read;
1570 access->grp_hint = multiple_reads || total_scalarization;
1571 access->grp_partial_lhs = grp_partial_lhs;
1572 access->grp_unscalarizable_region = unscalarizable_region;
1573 if (access->first_link)
1574 add_access_to_work_queue (access);
1576 *prev_acc_ptr = access;
1577 prev_acc_ptr = &access->next_grp;
1580 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1581 return res;
1584 /* Create a variable for the given ACCESS which determines the type, name and a
1585 few other properties. Return the variable declaration and store it also to
1586 ACCESS->replacement. */
1588 static tree
1589 create_access_replacement (struct access *access, bool rename)
1591 tree repl;
1593 repl = create_tmp_var (access->type, "SR");
1594 get_var_ann (repl);
1595 add_referenced_var (repl);
1596 if (rename)
1597 mark_sym_for_renaming (repl);
1599 if (!access->grp_partial_lhs
1600 && (TREE_CODE (access->type) == COMPLEX_TYPE
1601 || TREE_CODE (access->type) == VECTOR_TYPE))
1602 DECL_GIMPLE_REG_P (repl) = 1;
1604 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1605 DECL_ARTIFICIAL (repl) = 1;
1606 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1608 if (DECL_NAME (access->base)
1609 && !DECL_IGNORED_P (access->base)
1610 && !DECL_ARTIFICIAL (access->base))
1612 char *pretty_name = make_fancy_name (access->expr);
1613 tree debug_expr = unshare_expr (access->expr), d;
1615 DECL_NAME (repl) = get_identifier (pretty_name);
1616 obstack_free (&name_obstack, pretty_name);
1618 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1619 as DECL_DEBUG_EXPR isn't considered when looking for still
1620 used SSA_NAMEs and thus they could be freed. All debug info
1621 generation cares is whether something is constant or variable
1622 and that get_ref_base_and_extent works properly on the
1623 expression. */
1624 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1625 switch (TREE_CODE (d))
1627 case ARRAY_REF:
1628 case ARRAY_RANGE_REF:
1629 if (TREE_OPERAND (d, 1)
1630 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1631 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1632 if (TREE_OPERAND (d, 3)
1633 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1634 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1635 /* FALLTHRU */
1636 case COMPONENT_REF:
1637 if (TREE_OPERAND (d, 2)
1638 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1639 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1640 break;
1641 default:
1642 break;
1644 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1645 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1646 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1648 else
1649 TREE_NO_WARNING (repl) = 1;
1651 if (dump_file)
1653 fprintf (dump_file, "Created a replacement for ");
1654 print_generic_expr (dump_file, access->base, 0);
1655 fprintf (dump_file, " offset: %u, size: %u: ",
1656 (unsigned) access->offset, (unsigned) access->size);
1657 print_generic_expr (dump_file, repl, 0);
1658 fprintf (dump_file, "\n");
1660 sra_stats.replacements++;
1662 return repl;
1665 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1667 static inline tree
1668 get_access_replacement (struct access *access)
1670 gcc_assert (access->grp_to_be_replaced);
1672 if (!access->replacement_decl)
1673 access->replacement_decl = create_access_replacement (access, true);
1674 return access->replacement_decl;
1677 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1678 not mark it for renaming. */
1680 static inline tree
1681 get_unrenamed_access_replacement (struct access *access)
1683 gcc_assert (!access->grp_to_be_replaced);
1685 if (!access->replacement_decl)
1686 access->replacement_decl = create_access_replacement (access, false);
1687 return access->replacement_decl;
1691 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1692 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1693 to it is not "within" the root. Return false iff some accesses partially
1694 overlap. */
1696 static bool
1697 build_access_subtree (struct access **access)
1699 struct access *root = *access, *last_child = NULL;
1700 HOST_WIDE_INT limit = root->offset + root->size;
1702 *access = (*access)->next_grp;
1703 while (*access && (*access)->offset + (*access)->size <= limit)
1705 if (!last_child)
1706 root->first_child = *access;
1707 else
1708 last_child->next_sibling = *access;
1709 last_child = *access;
1711 if (!build_access_subtree (access))
1712 return false;
1715 if (*access && (*access)->offset < limit)
1716 return false;
1718 return true;
1721 /* Build a tree of access representatives, ACCESS is the pointer to the first
1722 one, others are linked in a list by the next_grp field. Return false iff
1723 some accesses partially overlap. */
1725 static bool
1726 build_access_trees (struct access *access)
1728 while (access)
1730 struct access *root = access;
1732 if (!build_access_subtree (&access))
1733 return false;
1734 root->next_grp = access;
1736 return true;
1739 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1740 array. */
1742 static bool
1743 expr_with_var_bounded_array_refs_p (tree expr)
1745 while (handled_component_p (expr))
1747 if (TREE_CODE (expr) == ARRAY_REF
1748 && !host_integerp (array_ref_low_bound (expr), 0))
1749 return true;
1750 expr = TREE_OPERAND (expr, 0);
1752 return false;
1755 enum mark_read_status { SRA_MR_NOT_READ, SRA_MR_READ, SRA_MR_ASSIGN_READ};
1757 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1758 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1759 sorts of access flags appropriately along the way, notably always set
1760 grp_read and grp_assign_read according to MARK_READ and grp_write when
1761 MARK_WRITE is true. */
1763 static bool
1764 analyze_access_subtree (struct access *root, bool allow_replacements,
1765 enum mark_read_status mark_read, bool mark_write)
1767 struct access *child;
1768 HOST_WIDE_INT limit = root->offset + root->size;
1769 HOST_WIDE_INT covered_to = root->offset;
1770 bool scalar = is_gimple_reg_type (root->type);
1771 bool hole = false, sth_created = false;
1772 bool direct_read = root->grp_read;
1774 if (mark_read == SRA_MR_ASSIGN_READ)
1776 root->grp_read = 1;
1777 root->grp_assignment_read = 1;
1779 if (mark_read == SRA_MR_READ)
1780 root->grp_read = 1;
1781 else if (root->grp_assignment_read)
1782 mark_read = SRA_MR_ASSIGN_READ;
1783 else if (root->grp_read)
1784 mark_read = SRA_MR_READ;
1786 if (mark_write)
1787 root->grp_write = true;
1788 else if (root->grp_write)
1789 mark_write = true;
1791 if (root->grp_unscalarizable_region)
1792 allow_replacements = false;
1794 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1795 allow_replacements = false;
1797 for (child = root->first_child; child; child = child->next_sibling)
1799 if (!hole && child->offset < covered_to)
1800 hole = true;
1801 else
1802 covered_to += child->size;
1804 sth_created |= analyze_access_subtree (child,
1805 allow_replacements && !scalar,
1806 mark_read, mark_write);
1808 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1809 hole |= !child->grp_covered;
1812 if (allow_replacements && scalar && !root->first_child
1813 && (root->grp_hint
1814 || (root->grp_write && (direct_read || root->grp_assignment_read)))
1815 /* We must not ICE later on when trying to build an access to the
1816 original data within the aggregate even when it is impossible to do in
1817 a defined way like in the PR 42703 testcase. Therefore we check
1818 pre-emptively here that we will be able to do that. */
1819 && build_ref_for_offset (NULL, TREE_TYPE (root->base), root->offset,
1820 root->type, false))
1822 if (dump_file && (dump_flags & TDF_DETAILS))
1824 fprintf (dump_file, "Marking ");
1825 print_generic_expr (dump_file, root->base, 0);
1826 fprintf (dump_file, " offset: %u, size: %u: ",
1827 (unsigned) root->offset, (unsigned) root->size);
1828 fprintf (dump_file, " to be replaced.\n");
1831 root->grp_to_be_replaced = 1;
1832 sth_created = true;
1833 hole = false;
1835 else if (covered_to < limit)
1836 hole = true;
1838 if (sth_created && !hole)
1840 root->grp_covered = 1;
1841 return true;
1843 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1844 root->grp_unscalarized_data = 1; /* not covered and written to */
1845 if (sth_created)
1846 return true;
1847 return false;
1850 /* Analyze all access trees linked by next_grp by the means of
1851 analyze_access_subtree. */
1852 static bool
1853 analyze_access_trees (struct access *access)
1855 bool ret = false;
1857 while (access)
1859 if (analyze_access_subtree (access, true, SRA_MR_NOT_READ, false))
1860 ret = true;
1861 access = access->next_grp;
1864 return ret;
1867 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1868 SIZE would conflict with an already existing one. If exactly such a child
1869 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1871 static bool
1872 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1873 HOST_WIDE_INT size, struct access **exact_match)
1875 struct access *child;
1877 for (child = lacc->first_child; child; child = child->next_sibling)
1879 if (child->offset == norm_offset && child->size == size)
1881 *exact_match = child;
1882 return true;
1885 if (child->offset < norm_offset + size
1886 && child->offset + child->size > norm_offset)
1887 return true;
1890 return false;
1893 /* Create a new child access of PARENT, with all properties just like MODEL
1894 except for its offset and with its grp_write false and grp_read true.
1895 Return the new access or NULL if it cannot be created. Note that this access
1896 is created long after all splicing and sorting, it's not located in any
1897 access vector and is automatically a representative of its group. */
1899 static struct access *
1900 create_artificial_child_access (struct access *parent, struct access *model,
1901 HOST_WIDE_INT new_offset)
1903 struct access *access;
1904 struct access **child;
1905 tree expr = parent->base;;
1907 gcc_assert (!model->grp_unscalarizable_region);
1909 if (!build_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
1910 model->type, false))
1911 return NULL;
1913 access = (struct access *) pool_alloc (access_pool);
1914 memset (access, 0, sizeof (struct access));
1915 access->base = parent->base;
1916 access->expr = expr;
1917 access->offset = new_offset;
1918 access->size = model->size;
1919 access->type = model->type;
1920 access->grp_write = true;
1921 access->grp_read = false;
1923 child = &parent->first_child;
1924 while (*child && (*child)->offset < new_offset)
1925 child = &(*child)->next_sibling;
1927 access->next_sibling = *child;
1928 *child = access;
1930 return access;
1934 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
1935 true if any new subaccess was created. Additionally, if RACC is a scalar
1936 access but LACC is not, change the type of the latter, if possible. */
1938 static bool
1939 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
1941 struct access *rchild;
1942 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
1943 bool ret = false;
1945 if (is_gimple_reg_type (lacc->type)
1946 || lacc->grp_unscalarizable_region
1947 || racc->grp_unscalarizable_region)
1948 return false;
1950 if (!lacc->first_child && !racc->first_child
1951 && is_gimple_reg_type (racc->type))
1953 tree t = lacc->base;
1955 if (build_ref_for_offset (&t, TREE_TYPE (t), lacc->offset, racc->type,
1956 false))
1958 lacc->expr = t;
1959 lacc->type = racc->type;
1961 return false;
1964 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
1966 struct access *new_acc = NULL;
1967 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
1969 if (rchild->grp_unscalarizable_region)
1970 continue;
1972 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
1973 &new_acc))
1975 if (new_acc)
1977 rchild->grp_hint = 1;
1978 new_acc->grp_hint |= new_acc->grp_read;
1979 if (rchild->first_child)
1980 ret |= propagate_subaccesses_across_link (new_acc, rchild);
1982 continue;
1985 /* If a (part of) a union field is on the RHS of an assignment, it can
1986 have sub-accesses which do not make sense on the LHS (PR 40351).
1987 Check that this is not the case. */
1988 if (!build_ref_for_offset (NULL, TREE_TYPE (lacc->base), norm_offset,
1989 rchild->type, false))
1990 continue;
1992 rchild->grp_hint = 1;
1993 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
1994 if (new_acc)
1996 ret = true;
1997 if (racc->first_child)
1998 propagate_subaccesses_across_link (new_acc, rchild);
2002 return ret;
2005 /* Propagate all subaccesses across assignment links. */
2007 static void
2008 propagate_all_subaccesses (void)
2010 while (work_queue_head)
2012 struct access *racc = pop_access_from_work_queue ();
2013 struct assign_link *link;
2015 gcc_assert (racc->first_link);
2017 for (link = racc->first_link; link; link = link->next)
2019 struct access *lacc = link->lacc;
2021 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2022 continue;
2023 lacc = lacc->group_representative;
2024 if (propagate_subaccesses_across_link (lacc, racc)
2025 && lacc->first_link)
2026 add_access_to_work_queue (lacc);
2031 /* Go through all accesses collected throughout the (intraprocedural) analysis
2032 stage, exclude overlapping ones, identify representatives and build trees
2033 out of them, making decisions about scalarization on the way. Return true
2034 iff there are any to-be-scalarized variables after this stage. */
2036 static bool
2037 analyze_all_variable_accesses (void)
2039 int res = 0;
2040 bitmap tmp = BITMAP_ALLOC (NULL);
2041 bitmap_iterator bi;
2042 unsigned i, max_total_scalarization_size;
2044 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2045 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2047 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2048 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2049 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2051 tree var = referenced_var (i);
2053 if (TREE_CODE (var) == VAR_DECL
2054 && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2055 <= max_total_scalarization_size)
2056 && type_consists_of_records_p (TREE_TYPE (var)))
2058 completely_scalarize_record (var, var, 0);
2059 if (dump_file && (dump_flags & TDF_DETAILS))
2061 fprintf (dump_file, "Will attempt to totally scalarize ");
2062 print_generic_expr (dump_file, var, 0);
2063 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2068 bitmap_copy (tmp, candidate_bitmap);
2069 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2071 tree var = referenced_var (i);
2072 struct access *access;
2074 access = sort_and_splice_var_accesses (var);
2075 if (!access || !build_access_trees (access))
2076 disqualify_candidate (var,
2077 "No or inhibitingly overlapping accesses.");
2080 propagate_all_subaccesses ();
2082 bitmap_copy (tmp, candidate_bitmap);
2083 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2085 tree var = referenced_var (i);
2086 struct access *access = get_first_repr_for_decl (var);
2088 if (analyze_access_trees (access))
2090 res++;
2091 if (dump_file && (dump_flags & TDF_DETAILS))
2093 fprintf (dump_file, "\nAccess trees for ");
2094 print_generic_expr (dump_file, var, 0);
2095 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2096 dump_access_tree (dump_file, access);
2097 fprintf (dump_file, "\n");
2100 else
2101 disqualify_candidate (var, "No scalar replacements to be created.");
2104 BITMAP_FREE (tmp);
2106 if (res)
2108 statistics_counter_event (cfun, "Scalarized aggregates", res);
2109 return true;
2111 else
2112 return false;
2115 /* Return true iff a reference statement into aggregate AGG can be built for
2116 every single to-be-replaced accesses that is a child of ACCESS, its sibling
2117 or a child of its sibling. TOP_OFFSET is the offset from the processed
2118 access subtree that has to be subtracted from offset of each access. */
2120 static bool
2121 ref_expr_for_all_replacements_p (struct access *access, tree agg,
2122 HOST_WIDE_INT top_offset)
2126 if (access->grp_to_be_replaced
2127 && !build_ref_for_offset (NULL, TREE_TYPE (agg),
2128 access->offset - top_offset,
2129 access->type, false))
2130 return false;
2132 if (access->first_child
2133 && !ref_expr_for_all_replacements_p (access->first_child, agg,
2134 top_offset))
2135 return false;
2137 access = access->next_sibling;
2139 while (access);
2141 return true;
2144 /* Generate statements copying scalar replacements of accesses within a subtree
2145 into or out of AGG. ACCESS is the first child of the root of the subtree to
2146 be processed. AGG is an aggregate type expression (can be a declaration but
2147 does not have to be, it can for example also be an indirect_ref).
2148 TOP_OFFSET is the offset of the processed subtree which has to be subtracted
2149 from offsets of individual accesses to get corresponding offsets for AGG.
2150 If CHUNK_SIZE is non-null, copy only replacements in the interval
2151 <start_offset, start_offset + chunk_size>, otherwise copy all. GSI is a
2152 statement iterator used to place the new statements. WRITE should be true
2153 when the statements should write from AGG to the replacement and false if
2154 vice versa. if INSERT_AFTER is true, new statements will be added after the
2155 current statement in GSI, they will be added before the statement
2156 otherwise. */
2158 static void
2159 generate_subtree_copies (struct access *access, tree agg,
2160 HOST_WIDE_INT top_offset,
2161 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2162 gimple_stmt_iterator *gsi, bool write,
2163 bool insert_after)
2167 tree expr = agg;
2169 if (chunk_size && access->offset >= start_offset + chunk_size)
2170 return;
2172 if (access->grp_to_be_replaced
2173 && (chunk_size == 0
2174 || access->offset + access->size > start_offset))
2176 tree repl = get_access_replacement (access);
2177 bool ref_found;
2178 gimple stmt;
2180 ref_found = build_ref_for_offset (&expr, TREE_TYPE (agg),
2181 access->offset - top_offset,
2182 access->type, false);
2183 gcc_assert (ref_found);
2185 if (write)
2187 if (access->grp_partial_lhs)
2188 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2189 !insert_after,
2190 insert_after ? GSI_NEW_STMT
2191 : GSI_SAME_STMT);
2192 stmt = gimple_build_assign (repl, expr);
2194 else
2196 TREE_NO_WARNING (repl) = 1;
2197 if (access->grp_partial_lhs)
2198 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2199 !insert_after,
2200 insert_after ? GSI_NEW_STMT
2201 : GSI_SAME_STMT);
2202 stmt = gimple_build_assign (expr, repl);
2205 if (insert_after)
2206 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2207 else
2208 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2209 update_stmt (stmt);
2210 sra_stats.subtree_copies++;
2213 if (access->first_child)
2214 generate_subtree_copies (access->first_child, agg, top_offset,
2215 start_offset, chunk_size, gsi,
2216 write, insert_after);
2218 access = access->next_sibling;
2220 while (access);
2223 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2224 the root of the subtree to be processed. GSI is the statement iterator used
2225 for inserting statements which are added after the current statement if
2226 INSERT_AFTER is true or before it otherwise. */
2228 static void
2229 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2230 bool insert_after)
2233 struct access *child;
2235 if (access->grp_to_be_replaced)
2237 gimple stmt;
2239 stmt = gimple_build_assign (get_access_replacement (access),
2240 fold_convert (access->type,
2241 integer_zero_node));
2242 if (insert_after)
2243 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2244 else
2245 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2246 update_stmt (stmt);
2249 for (child = access->first_child; child; child = child->next_sibling)
2250 init_subtree_with_zero (child, gsi, insert_after);
2253 /* Search for an access representative for the given expression EXPR and
2254 return it or NULL if it cannot be found. */
2256 static struct access *
2257 get_access_for_expr (tree expr)
2259 HOST_WIDE_INT offset, size, max_size;
2260 tree base;
2262 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2263 a different size than the size of its argument and we need the latter
2264 one. */
2265 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2266 expr = TREE_OPERAND (expr, 0);
2268 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2269 if (max_size == -1 || !DECL_P (base))
2270 return NULL;
2272 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2273 return NULL;
2275 return get_var_base_offset_size_access (base, offset, max_size);
2278 /* Replace the expression EXPR with a scalar replacement if there is one and
2279 generate other statements to do type conversion or subtree copying if
2280 necessary. GSI is used to place newly created statements, WRITE is true if
2281 the expression is being written to (it is on a LHS of a statement or output
2282 in an assembly statement). */
2284 static bool
2285 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2287 struct access *access;
2288 tree type, bfr;
2290 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2292 bfr = *expr;
2293 expr = &TREE_OPERAND (*expr, 0);
2295 else
2296 bfr = NULL_TREE;
2298 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2299 expr = &TREE_OPERAND (*expr, 0);
2300 access = get_access_for_expr (*expr);
2301 if (!access)
2302 return false;
2303 type = TREE_TYPE (*expr);
2305 if (access->grp_to_be_replaced)
2307 tree repl = get_access_replacement (access);
2308 /* If we replace a non-register typed access simply use the original
2309 access expression to extract the scalar component afterwards.
2310 This happens if scalarizing a function return value or parameter
2311 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2312 gcc.c-torture/compile/20011217-1.c.
2314 We also want to use this when accessing a complex or vector which can
2315 be accessed as a different type too, potentially creating a need for
2316 type conversion (see PR42196) and when scalarized unions are involved
2317 in assembler statements (see PR42398). */
2318 if (!useless_type_conversion_p (type, access->type))
2320 tree ref = access->base;
2321 bool ok;
2323 ok = build_ref_for_offset (&ref, TREE_TYPE (ref),
2324 access->offset, access->type, false);
2325 gcc_assert (ok);
2327 if (write)
2329 gimple stmt;
2331 if (access->grp_partial_lhs)
2332 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2333 false, GSI_NEW_STMT);
2334 stmt = gimple_build_assign (repl, ref);
2335 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2337 else
2339 gimple stmt;
2341 if (access->grp_partial_lhs)
2342 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2343 true, GSI_SAME_STMT);
2344 stmt = gimple_build_assign (ref, repl);
2345 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2348 else
2349 *expr = repl;
2350 sra_stats.exprs++;
2353 if (access->first_child)
2355 HOST_WIDE_INT start_offset, chunk_size;
2356 if (bfr
2357 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2358 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2360 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2361 start_offset = access->offset
2362 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2364 else
2365 start_offset = chunk_size = 0;
2367 generate_subtree_copies (access->first_child, access->base, 0,
2368 start_offset, chunk_size, gsi, write, write);
2370 return true;
2373 /* Where scalar replacements of the RHS have been written to when a replacement
2374 of a LHS of an assigments cannot be direclty loaded from a replacement of
2375 the RHS. */
2376 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2377 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2378 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2380 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2381 base aggregate if there are unscalarized data or directly to LHS
2382 otherwise. */
2384 static enum unscalarized_data_handling
2385 handle_unscalarized_data_in_subtree (struct access *top_racc, tree lhs,
2386 gimple_stmt_iterator *gsi)
2388 if (top_racc->grp_unscalarized_data)
2390 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2391 gsi, false, false);
2392 return SRA_UDH_RIGHT;
2394 else
2396 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2397 0, 0, gsi, false, false);
2398 return SRA_UDH_LEFT;
2403 /* Try to generate statements to load all sub-replacements in an access
2404 (sub)tree (LACC is the first child) from scalar replacements in the TOP_RACC
2405 (sub)tree. If that is not possible, refresh the TOP_RACC base aggregate and
2406 load the accesses from it. LEFT_OFFSET is the offset of the left whole
2407 subtree being copied, RIGHT_OFFSET is the same thing for the right subtree.
2408 GSI is stmt iterator used for statement insertions. *REFRESHED is true iff
2409 the rhs top aggregate has already been refreshed by contents of its scalar
2410 reductions and is set to true if this function has to do it. */
2412 static void
2413 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2414 HOST_WIDE_INT left_offset,
2415 HOST_WIDE_INT right_offset,
2416 gimple_stmt_iterator *old_gsi,
2417 gimple_stmt_iterator *new_gsi,
2418 enum unscalarized_data_handling *refreshed,
2419 tree lhs)
2421 location_t loc = EXPR_LOCATION (lacc->expr);
2424 if (lacc->grp_to_be_replaced)
2426 struct access *racc;
2427 HOST_WIDE_INT offset = lacc->offset - left_offset + right_offset;
2428 gimple stmt;
2429 tree rhs;
2431 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2432 if (racc && racc->grp_to_be_replaced)
2434 rhs = get_access_replacement (racc);
2435 if (!useless_type_conversion_p (lacc->type, racc->type))
2436 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2438 else
2440 /* No suitable access on the right hand side, need to load from
2441 the aggregate. See if we have to update it first... */
2442 if (*refreshed == SRA_UDH_NONE)
2443 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2444 lhs, old_gsi);
2446 if (*refreshed == SRA_UDH_LEFT)
2448 bool repl_found;
2450 rhs = lacc->base;
2451 repl_found = build_ref_for_offset (&rhs, TREE_TYPE (rhs),
2452 lacc->offset, lacc->type,
2453 false);
2454 gcc_assert (repl_found);
2456 else
2458 bool repl_found;
2460 rhs = top_racc->base;
2461 repl_found = build_ref_for_offset (&rhs,
2462 TREE_TYPE (top_racc->base),
2463 offset, lacc->type, false);
2464 gcc_assert (repl_found);
2468 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2469 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2470 update_stmt (stmt);
2471 sra_stats.subreplacements++;
2473 else if (*refreshed == SRA_UDH_NONE
2474 && lacc->grp_read && !lacc->grp_covered)
2475 *refreshed = handle_unscalarized_data_in_subtree (top_racc, lhs,
2476 old_gsi);
2478 if (lacc->first_child)
2479 load_assign_lhs_subreplacements (lacc->first_child, top_racc,
2480 left_offset, right_offset,
2481 old_gsi, new_gsi, refreshed, lhs);
2482 lacc = lacc->next_sibling;
2484 while (lacc);
2487 /* Result code for SRA assignment modification. */
2488 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2489 SRA_AM_MODIFIED, /* stmt changed but not
2490 removed */
2491 SRA_AM_REMOVED }; /* stmt eliminated */
2493 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2494 to the assignment and GSI is the statement iterator pointing at it. Returns
2495 the same values as sra_modify_assign. */
2497 static enum assignment_mod_result
2498 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2500 tree lhs = gimple_assign_lhs (*stmt);
2501 struct access *acc;
2503 acc = get_access_for_expr (lhs);
2504 if (!acc)
2505 return SRA_AM_NONE;
2507 if (VEC_length (constructor_elt,
2508 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2510 /* I have never seen this code path trigger but if it can happen the
2511 following should handle it gracefully. */
2512 if (access_has_children_p (acc))
2513 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2514 true, true);
2515 return SRA_AM_MODIFIED;
2518 if (acc->grp_covered)
2520 init_subtree_with_zero (acc, gsi, false);
2521 unlink_stmt_vdef (*stmt);
2522 gsi_remove (gsi, true);
2523 return SRA_AM_REMOVED;
2525 else
2527 init_subtree_with_zero (acc, gsi, true);
2528 return SRA_AM_MODIFIED;
2532 /* Create a new suitable default definition SSA_NAME and replace all uses of
2533 SSA with it, RACC is access describing the uninitialized part of an
2534 aggregate that is being loaded. */
2536 static void
2537 replace_uses_with_default_def_ssa_name (tree ssa, struct access *racc)
2539 tree repl, decl;
2541 decl = get_unrenamed_access_replacement (racc);
2543 repl = gimple_default_def (cfun, decl);
2544 if (!repl)
2546 repl = make_ssa_name (decl, gimple_build_nop ());
2547 set_default_def (decl, repl);
2550 replace_uses_by (ssa, repl);
2553 /* Examine both sides of the assignment statement pointed to by STMT, replace
2554 them with a scalare replacement if there is one and generate copying of
2555 replacements if scalarized aggregates have been used in the assignment. GSI
2556 is used to hold generated statements for type conversions and subtree
2557 copying. */
2559 static enum assignment_mod_result
2560 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2562 struct access *lacc, *racc;
2563 tree lhs, rhs;
2564 bool modify_this_stmt = false;
2565 bool force_gimple_rhs = false;
2566 location_t loc = gimple_location (*stmt);
2567 gimple_stmt_iterator orig_gsi = *gsi;
2569 if (!gimple_assign_single_p (*stmt))
2570 return SRA_AM_NONE;
2571 lhs = gimple_assign_lhs (*stmt);
2572 rhs = gimple_assign_rhs1 (*stmt);
2574 if (TREE_CODE (rhs) == CONSTRUCTOR)
2575 return sra_modify_constructor_assign (stmt, gsi);
2577 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2578 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2579 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2581 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2582 gsi, false);
2583 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2584 gsi, true);
2585 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2588 lacc = get_access_for_expr (lhs);
2589 racc = get_access_for_expr (rhs);
2590 if (!lacc && !racc)
2591 return SRA_AM_NONE;
2593 if (lacc && lacc->grp_to_be_replaced)
2595 lhs = get_access_replacement (lacc);
2596 gimple_assign_set_lhs (*stmt, lhs);
2597 modify_this_stmt = true;
2598 if (lacc->grp_partial_lhs)
2599 force_gimple_rhs = true;
2600 sra_stats.exprs++;
2603 if (racc && racc->grp_to_be_replaced)
2605 rhs = get_access_replacement (racc);
2606 modify_this_stmt = true;
2607 if (racc->grp_partial_lhs)
2608 force_gimple_rhs = true;
2609 sra_stats.exprs++;
2612 if (modify_this_stmt)
2614 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2616 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2617 ??? This should move to fold_stmt which we simply should
2618 call after building a VIEW_CONVERT_EXPR here. */
2619 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2620 && !access_has_children_p (lacc))
2622 tree expr = lhs;
2623 if (build_ref_for_offset (&expr, TREE_TYPE (lhs), 0,
2624 TREE_TYPE (rhs), false))
2626 lhs = expr;
2627 gimple_assign_set_lhs (*stmt, expr);
2630 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2631 && !access_has_children_p (racc))
2633 tree expr = rhs;
2634 if (build_ref_for_offset (&expr, TREE_TYPE (rhs), 0,
2635 TREE_TYPE (lhs), false))
2636 rhs = expr;
2638 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2640 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2641 if (is_gimple_reg_type (TREE_TYPE (lhs))
2642 && TREE_CODE (lhs) != SSA_NAME)
2643 force_gimple_rhs = true;
2648 /* From this point on, the function deals with assignments in between
2649 aggregates when at least one has scalar reductions of some of its
2650 components. There are three possible scenarios: Both the LHS and RHS have
2651 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2653 In the first case, we would like to load the LHS components from RHS
2654 components whenever possible. If that is not possible, we would like to
2655 read it directly from the RHS (after updating it by storing in it its own
2656 components). If there are some necessary unscalarized data in the LHS,
2657 those will be loaded by the original assignment too. If neither of these
2658 cases happen, the original statement can be removed. Most of this is done
2659 by load_assign_lhs_subreplacements.
2661 In the second case, we would like to store all RHS scalarized components
2662 directly into LHS and if they cover the aggregate completely, remove the
2663 statement too. In the third case, we want the LHS components to be loaded
2664 directly from the RHS (DSE will remove the original statement if it
2665 becomes redundant).
2667 This is a bit complex but manageable when types match and when unions do
2668 not cause confusion in a way that we cannot really load a component of LHS
2669 from the RHS or vice versa (the access representing this level can have
2670 subaccesses that are accessible only through a different union field at a
2671 higher level - different from the one used in the examined expression).
2672 Unions are fun.
2674 Therefore, I specially handle a fourth case, happening when there is a
2675 specific type cast or it is impossible to locate a scalarized subaccess on
2676 the other side of the expression. If that happens, I simply "refresh" the
2677 RHS by storing in it is scalarized components leave the original statement
2678 there to do the copying and then load the scalar replacements of the LHS.
2679 This is what the first branch does. */
2681 if (gimple_has_volatile_ops (*stmt)
2682 || contains_view_convert_expr_p (rhs)
2683 || contains_view_convert_expr_p (lhs)
2684 || (access_has_children_p (racc)
2685 && !ref_expr_for_all_replacements_p (racc, lhs, racc->offset))
2686 || (access_has_children_p (lacc)
2687 && !ref_expr_for_all_replacements_p (lacc, rhs, lacc->offset)))
2689 if (access_has_children_p (racc))
2690 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2691 gsi, false, false);
2692 if (access_has_children_p (lacc))
2693 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2694 gsi, true, true);
2695 sra_stats.separate_lhs_rhs_handling++;
2697 else
2699 if (access_has_children_p (lacc) && access_has_children_p (racc))
2701 gimple_stmt_iterator orig_gsi = *gsi;
2702 enum unscalarized_data_handling refreshed;
2704 if (lacc->grp_read && !lacc->grp_covered)
2705 refreshed = handle_unscalarized_data_in_subtree (racc, lhs, gsi);
2706 else
2707 refreshed = SRA_UDH_NONE;
2709 load_assign_lhs_subreplacements (lacc->first_child, racc,
2710 lacc->offset, racc->offset,
2711 &orig_gsi, gsi, &refreshed, lhs);
2712 if (refreshed != SRA_UDH_RIGHT)
2714 if (*stmt == gsi_stmt (*gsi))
2715 gsi_next (gsi);
2717 unlink_stmt_vdef (*stmt);
2718 gsi_remove (&orig_gsi, true);
2719 sra_stats.deleted++;
2720 return SRA_AM_REMOVED;
2723 else
2725 if (racc)
2727 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2729 if (racc->first_child)
2730 generate_subtree_copies (racc->first_child, lhs,
2731 racc->offset, 0, 0, gsi,
2732 false, false);
2733 gcc_assert (*stmt == gsi_stmt (*gsi));
2734 if (TREE_CODE (lhs) == SSA_NAME)
2735 replace_uses_with_default_def_ssa_name (lhs, racc);
2737 unlink_stmt_vdef (*stmt);
2738 gsi_remove (gsi, true);
2739 sra_stats.deleted++;
2740 return SRA_AM_REMOVED;
2742 else if (racc->first_child)
2743 generate_subtree_copies (racc->first_child, lhs,
2744 racc->offset, 0, 0, gsi, false, true);
2746 if (access_has_children_p (lacc))
2747 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2748 0, 0, gsi, true, true);
2752 /* This gimplification must be done after generate_subtree_copies, lest we
2753 insert the subtree copies in the middle of the gimplified sequence. */
2754 if (force_gimple_rhs)
2755 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2756 true, GSI_SAME_STMT);
2757 if (gimple_assign_rhs1 (*stmt) != rhs)
2759 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2760 gcc_assert (*stmt == gsi_stmt (orig_gsi));
2763 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2766 /* Traverse the function body and all modifications as decided in
2767 analyze_all_variable_accesses. */
2769 static void
2770 sra_modify_function_body (void)
2772 basic_block bb;
2774 FOR_EACH_BB (bb)
2776 gimple_stmt_iterator gsi = gsi_start_bb (bb);
2777 while (!gsi_end_p (gsi))
2779 gimple stmt = gsi_stmt (gsi);
2780 enum assignment_mod_result assign_result;
2781 bool modified = false, deleted = false;
2782 tree *t;
2783 unsigned i;
2785 switch (gimple_code (stmt))
2787 case GIMPLE_RETURN:
2788 t = gimple_return_retval_ptr (stmt);
2789 if (*t != NULL_TREE)
2790 modified |= sra_modify_expr (t, &gsi, false);
2791 break;
2793 case GIMPLE_ASSIGN:
2794 assign_result = sra_modify_assign (&stmt, &gsi);
2795 modified |= assign_result == SRA_AM_MODIFIED;
2796 deleted = assign_result == SRA_AM_REMOVED;
2797 break;
2799 case GIMPLE_CALL:
2800 /* Operands must be processed before the lhs. */
2801 for (i = 0; i < gimple_call_num_args (stmt); i++)
2803 t = gimple_call_arg_ptr (stmt, i);
2804 modified |= sra_modify_expr (t, &gsi, false);
2807 if (gimple_call_lhs (stmt))
2809 t = gimple_call_lhs_ptr (stmt);
2810 modified |= sra_modify_expr (t, &gsi, true);
2812 break;
2814 case GIMPLE_ASM:
2815 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
2817 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
2818 modified |= sra_modify_expr (t, &gsi, false);
2820 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
2822 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
2823 modified |= sra_modify_expr (t, &gsi, true);
2825 break;
2827 default:
2828 break;
2831 if (modified)
2833 update_stmt (stmt);
2834 maybe_clean_eh_stmt (stmt);
2836 if (!deleted)
2837 gsi_next (&gsi);
2842 /* Generate statements initializing scalar replacements of parts of function
2843 parameters. */
2845 static void
2846 initialize_parameter_reductions (void)
2848 gimple_stmt_iterator gsi;
2849 gimple_seq seq = NULL;
2850 tree parm;
2852 for (parm = DECL_ARGUMENTS (current_function_decl);
2853 parm;
2854 parm = TREE_CHAIN (parm))
2856 VEC (access_p, heap) *access_vec;
2857 struct access *access;
2859 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2860 continue;
2861 access_vec = get_base_access_vector (parm);
2862 if (!access_vec)
2863 continue;
2865 if (!seq)
2867 seq = gimple_seq_alloc ();
2868 gsi = gsi_start (seq);
2871 for (access = VEC_index (access_p, access_vec, 0);
2872 access;
2873 access = access->next_grp)
2874 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true);
2877 if (seq)
2878 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
2881 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
2882 it reveals there are components of some aggregates to be scalarized, it runs
2883 the required transformations. */
2884 static unsigned int
2885 perform_intra_sra (void)
2887 int ret = 0;
2888 sra_initialize ();
2890 if (!find_var_candidates ())
2891 goto out;
2893 if (!scan_function ())
2894 goto out;
2896 if (!analyze_all_variable_accesses ())
2897 goto out;
2899 sra_modify_function_body ();
2900 initialize_parameter_reductions ();
2902 statistics_counter_event (cfun, "Scalar replacements created",
2903 sra_stats.replacements);
2904 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
2905 statistics_counter_event (cfun, "Subtree copy stmts",
2906 sra_stats.subtree_copies);
2907 statistics_counter_event (cfun, "Subreplacement stmts",
2908 sra_stats.subreplacements);
2909 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
2910 statistics_counter_event (cfun, "Separate LHS and RHS handling",
2911 sra_stats.separate_lhs_rhs_handling);
2913 ret = TODO_update_ssa;
2915 out:
2916 sra_deinitialize ();
2917 return ret;
2920 /* Perform early intraprocedural SRA. */
2921 static unsigned int
2922 early_intra_sra (void)
2924 sra_mode = SRA_MODE_EARLY_INTRA;
2925 return perform_intra_sra ();
2928 /* Perform "late" intraprocedural SRA. */
2929 static unsigned int
2930 late_intra_sra (void)
2932 sra_mode = SRA_MODE_INTRA;
2933 return perform_intra_sra ();
2937 static bool
2938 gate_intra_sra (void)
2940 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
2944 struct gimple_opt_pass pass_sra_early =
2947 GIMPLE_PASS,
2948 "esra", /* name */
2949 gate_intra_sra, /* gate */
2950 early_intra_sra, /* execute */
2951 NULL, /* sub */
2952 NULL, /* next */
2953 0, /* static_pass_number */
2954 TV_TREE_SRA, /* tv_id */
2955 PROP_cfg | PROP_ssa, /* properties_required */
2956 0, /* properties_provided */
2957 0, /* properties_destroyed */
2958 0, /* todo_flags_start */
2959 TODO_dump_func
2960 | TODO_update_ssa
2961 | TODO_ggc_collect
2962 | TODO_verify_ssa /* todo_flags_finish */
2966 struct gimple_opt_pass pass_sra =
2969 GIMPLE_PASS,
2970 "sra", /* name */
2971 gate_intra_sra, /* gate */
2972 late_intra_sra, /* execute */
2973 NULL, /* sub */
2974 NULL, /* next */
2975 0, /* static_pass_number */
2976 TV_TREE_SRA, /* tv_id */
2977 PROP_cfg | PROP_ssa, /* properties_required */
2978 0, /* properties_provided */
2979 0, /* properties_destroyed */
2980 TODO_update_address_taken, /* todo_flags_start */
2981 TODO_dump_func
2982 | TODO_update_ssa
2983 | TODO_ggc_collect
2984 | TODO_verify_ssa /* todo_flags_finish */
2989 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
2990 parameter. */
2992 static bool
2993 is_unused_scalar_param (tree parm)
2995 tree name;
2996 return (is_gimple_reg (parm)
2997 && (!(name = gimple_default_def (cfun, parm))
2998 || has_zero_uses (name)));
3001 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3002 examine whether there are any direct or otherwise infeasible ones. If so,
3003 return true, otherwise return false. PARM must be a gimple register with a
3004 non-NULL default definition. */
3006 static bool
3007 ptr_parm_has_direct_uses (tree parm)
3009 imm_use_iterator ui;
3010 gimple stmt;
3011 tree name = gimple_default_def (cfun, parm);
3012 bool ret = false;
3014 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3016 int uses_ok = 0;
3017 use_operand_p use_p;
3019 if (is_gimple_debug (stmt))
3020 continue;
3022 /* Valid uses include dereferences on the lhs and the rhs. */
3023 if (gimple_has_lhs (stmt))
3025 tree lhs = gimple_get_lhs (stmt);
3026 while (handled_component_p (lhs))
3027 lhs = TREE_OPERAND (lhs, 0);
3028 if (INDIRECT_REF_P (lhs)
3029 && TREE_OPERAND (lhs, 0) == name)
3030 uses_ok++;
3032 if (gimple_assign_single_p (stmt))
3034 tree rhs = gimple_assign_rhs1 (stmt);
3035 while (handled_component_p (rhs))
3036 rhs = TREE_OPERAND (rhs, 0);
3037 if (INDIRECT_REF_P (rhs)
3038 && TREE_OPERAND (rhs, 0) == name)
3039 uses_ok++;
3041 else if (is_gimple_call (stmt))
3043 unsigned i;
3044 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3046 tree arg = gimple_call_arg (stmt, i);
3047 while (handled_component_p (arg))
3048 arg = TREE_OPERAND (arg, 0);
3049 if (INDIRECT_REF_P (arg)
3050 && TREE_OPERAND (arg, 0) == name)
3051 uses_ok++;
3055 /* If the number of valid uses does not match the number of
3056 uses in this stmt there is an unhandled use. */
3057 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3058 --uses_ok;
3060 if (uses_ok != 0)
3061 ret = true;
3063 if (ret)
3064 BREAK_FROM_IMM_USE_STMT (ui);
3067 return ret;
3070 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3071 them in candidate_bitmap. Note that these do not necessarily include
3072 parameter which are unused and thus can be removed. Return true iff any
3073 such candidate has been found. */
3075 static bool
3076 find_param_candidates (void)
3078 tree parm;
3079 int count = 0;
3080 bool ret = false;
3082 for (parm = DECL_ARGUMENTS (current_function_decl);
3083 parm;
3084 parm = TREE_CHAIN (parm))
3086 tree type = TREE_TYPE (parm);
3088 count++;
3090 if (TREE_THIS_VOLATILE (parm)
3091 || TREE_ADDRESSABLE (parm)
3092 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3093 continue;
3095 if (is_unused_scalar_param (parm))
3097 ret = true;
3098 continue;
3101 if (POINTER_TYPE_P (type))
3103 type = TREE_TYPE (type);
3105 if (TREE_CODE (type) == FUNCTION_TYPE
3106 || TYPE_VOLATILE (type)
3107 || !is_gimple_reg (parm)
3108 || is_va_list_type (type)
3109 || ptr_parm_has_direct_uses (parm))
3110 continue;
3112 else if (!AGGREGATE_TYPE_P (type))
3113 continue;
3115 if (!COMPLETE_TYPE_P (type)
3116 || !host_integerp (TYPE_SIZE (type), 1)
3117 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3118 || (AGGREGATE_TYPE_P (type)
3119 && type_internals_preclude_sra_p (type)))
3120 continue;
3122 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3123 ret = true;
3124 if (dump_file && (dump_flags & TDF_DETAILS))
3126 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3127 print_generic_expr (dump_file, parm, 0);
3128 fprintf (dump_file, "\n");
3132 func_param_count = count;
3133 return ret;
3136 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3137 maybe_modified. */
3139 static bool
3140 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3141 void *data)
3143 struct access *repr = (struct access *) data;
3145 repr->grp_maybe_modified = 1;
3146 return true;
3149 /* Analyze what representatives (in linked lists accessible from
3150 REPRESENTATIVES) can be modified by side effects of statements in the
3151 current function. */
3153 static void
3154 analyze_modified_params (VEC (access_p, heap) *representatives)
3156 int i;
3158 for (i = 0; i < func_param_count; i++)
3160 struct access *repr;
3162 for (repr = VEC_index (access_p, representatives, i);
3163 repr;
3164 repr = repr->next_grp)
3166 struct access *access;
3167 bitmap visited;
3168 ao_ref ar;
3170 if (no_accesses_p (repr))
3171 continue;
3172 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3173 || repr->grp_maybe_modified)
3174 continue;
3176 ao_ref_init (&ar, repr->expr);
3177 visited = BITMAP_ALLOC (NULL);
3178 for (access = repr; access; access = access->next_sibling)
3180 /* All accesses are read ones, otherwise grp_maybe_modified would
3181 be trivially set. */
3182 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3183 mark_maybe_modified, repr, &visited);
3184 if (repr->grp_maybe_modified)
3185 break;
3187 BITMAP_FREE (visited);
3192 /* Propagate distances in bb_dereferences in the opposite direction than the
3193 control flow edges, in each step storing the maximum of the current value
3194 and the minimum of all successors. These steps are repeated until the table
3195 stabilizes. Note that BBs which might terminate the functions (according to
3196 final_bbs bitmap) never updated in this way. */
3198 static void
3199 propagate_dereference_distances (void)
3201 VEC (basic_block, heap) *queue;
3202 basic_block bb;
3204 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3205 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3206 FOR_EACH_BB (bb)
3208 VEC_quick_push (basic_block, queue, bb);
3209 bb->aux = bb;
3212 while (!VEC_empty (basic_block, queue))
3214 edge_iterator ei;
3215 edge e;
3216 bool change = false;
3217 int i;
3219 bb = VEC_pop (basic_block, queue);
3220 bb->aux = NULL;
3222 if (bitmap_bit_p (final_bbs, bb->index))
3223 continue;
3225 for (i = 0; i < func_param_count; i++)
3227 int idx = bb->index * func_param_count + i;
3228 bool first = true;
3229 HOST_WIDE_INT inh = 0;
3231 FOR_EACH_EDGE (e, ei, bb->succs)
3233 int succ_idx = e->dest->index * func_param_count + i;
3235 if (e->src == EXIT_BLOCK_PTR)
3236 continue;
3238 if (first)
3240 first = false;
3241 inh = bb_dereferences [succ_idx];
3243 else if (bb_dereferences [succ_idx] < inh)
3244 inh = bb_dereferences [succ_idx];
3247 if (!first && bb_dereferences[idx] < inh)
3249 bb_dereferences[idx] = inh;
3250 change = true;
3254 if (change && !bitmap_bit_p (final_bbs, bb->index))
3255 FOR_EACH_EDGE (e, ei, bb->preds)
3257 if (e->src->aux)
3258 continue;
3260 e->src->aux = e->src;
3261 VEC_quick_push (basic_block, queue, e->src);
3265 VEC_free (basic_block, heap, queue);
3268 /* Dump a dereferences TABLE with heading STR to file F. */
3270 static void
3271 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3273 basic_block bb;
3275 fprintf (dump_file, str);
3276 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3278 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3279 if (bb != EXIT_BLOCK_PTR)
3281 int i;
3282 for (i = 0; i < func_param_count; i++)
3284 int idx = bb->index * func_param_count + i;
3285 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3288 fprintf (f, "\n");
3290 fprintf (dump_file, "\n");
3293 /* Determine what (parts of) parameters passed by reference that are not
3294 assigned to are not certainly dereferenced in this function and thus the
3295 dereferencing cannot be safely moved to the caller without potentially
3296 introducing a segfault. Mark such REPRESENTATIVES as
3297 grp_not_necessarilly_dereferenced.
3299 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3300 part is calculated rather than simple booleans are calculated for each
3301 pointer parameter to handle cases when only a fraction of the whole
3302 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3303 an example).
3305 The maximum dereference distances for each pointer parameter and BB are
3306 already stored in bb_dereference. This routine simply propagates these
3307 values upwards by propagate_dereference_distances and then compares the
3308 distances of individual parameters in the ENTRY BB to the equivalent
3309 distances of each representative of a (fraction of a) parameter. */
3311 static void
3312 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3314 int i;
3316 if (dump_file && (dump_flags & TDF_DETAILS))
3317 dump_dereferences_table (dump_file,
3318 "Dereference table before propagation:\n",
3319 bb_dereferences);
3321 propagate_dereference_distances ();
3323 if (dump_file && (dump_flags & TDF_DETAILS))
3324 dump_dereferences_table (dump_file,
3325 "Dereference table after propagation:\n",
3326 bb_dereferences);
3328 for (i = 0; i < func_param_count; i++)
3330 struct access *repr = VEC_index (access_p, representatives, i);
3331 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3333 if (!repr || no_accesses_p (repr))
3334 continue;
3338 if ((repr->offset + repr->size) > bb_dereferences[idx])
3339 repr->grp_not_necessarilly_dereferenced = 1;
3340 repr = repr->next_grp;
3342 while (repr);
3346 /* Return the representative access for the parameter declaration PARM if it is
3347 a scalar passed by reference which is not written to and the pointer value
3348 is not used directly. Thus, if it is legal to dereference it in the caller
3349 and we can rule out modifications through aliases, such parameter should be
3350 turned into one passed by value. Return NULL otherwise. */
3352 static struct access *
3353 unmodified_by_ref_scalar_representative (tree parm)
3355 int i, access_count;
3356 struct access *repr;
3357 VEC (access_p, heap) *access_vec;
3359 access_vec = get_base_access_vector (parm);
3360 gcc_assert (access_vec);
3361 repr = VEC_index (access_p, access_vec, 0);
3362 if (repr->write)
3363 return NULL;
3364 repr->group_representative = repr;
3366 access_count = VEC_length (access_p, access_vec);
3367 for (i = 1; i < access_count; i++)
3369 struct access *access = VEC_index (access_p, access_vec, i);
3370 if (access->write)
3371 return NULL;
3372 access->group_representative = repr;
3373 access->next_sibling = repr->next_sibling;
3374 repr->next_sibling = access;
3377 repr->grp_read = 1;
3378 repr->grp_scalar_ptr = 1;
3379 return repr;
3382 /* Return true iff this access precludes IPA-SRA of the parameter it is
3383 associated with. */
3385 static bool
3386 access_precludes_ipa_sra_p (struct access *access)
3388 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3389 is incompatible assign in a call statement (and possibly even in asm
3390 statements). This can be relaxed by using a new temporary but only for
3391 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3392 intraprocedural SRA we deal with this by keeping the old aggregate around,
3393 something we cannot do in IPA-SRA.) */
3394 if (access->write
3395 && (is_gimple_call (access->stmt)
3396 || gimple_code (access->stmt) == GIMPLE_ASM))
3397 return true;
3399 return false;
3403 /* Sort collected accesses for parameter PARM, identify representatives for
3404 each accessed region and link them together. Return NULL if there are
3405 different but overlapping accesses, return the special ptr value meaning
3406 there are no accesses for this parameter if that is the case and return the
3407 first representative otherwise. Set *RO_GRP if there is a group of accesses
3408 with only read (i.e. no write) accesses. */
3410 static struct access *
3411 splice_param_accesses (tree parm, bool *ro_grp)
3413 int i, j, access_count, group_count;
3414 int agg_size, total_size = 0;
3415 struct access *access, *res, **prev_acc_ptr = &res;
3416 VEC (access_p, heap) *access_vec;
3418 access_vec = get_base_access_vector (parm);
3419 if (!access_vec)
3420 return &no_accesses_representant;
3421 access_count = VEC_length (access_p, access_vec);
3423 qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
3424 compare_access_positions);
3426 i = 0;
3427 total_size = 0;
3428 group_count = 0;
3429 while (i < access_count)
3431 bool modification;
3432 access = VEC_index (access_p, access_vec, i);
3433 modification = access->write;
3434 if (access_precludes_ipa_sra_p (access))
3435 return NULL;
3437 /* Access is about to become group representative unless we find some
3438 nasty overlap which would preclude us from breaking this parameter
3439 apart. */
3441 j = i + 1;
3442 while (j < access_count)
3444 struct access *ac2 = VEC_index (access_p, access_vec, j);
3445 if (ac2->offset != access->offset)
3447 /* All or nothing law for parameters. */
3448 if (access->offset + access->size > ac2->offset)
3449 return NULL;
3450 else
3451 break;
3453 else if (ac2->size != access->size)
3454 return NULL;
3456 if (access_precludes_ipa_sra_p (ac2))
3457 return NULL;
3459 modification |= ac2->write;
3460 ac2->group_representative = access;
3461 ac2->next_sibling = access->next_sibling;
3462 access->next_sibling = ac2;
3463 j++;
3466 group_count++;
3467 access->grp_maybe_modified = modification;
3468 if (!modification)
3469 *ro_grp = true;
3470 *prev_acc_ptr = access;
3471 prev_acc_ptr = &access->next_grp;
3472 total_size += access->size;
3473 i = j;
3476 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3477 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3478 else
3479 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3480 if (total_size >= agg_size)
3481 return NULL;
3483 gcc_assert (group_count > 0);
3484 return res;
3487 /* Decide whether parameters with representative accesses given by REPR should
3488 be reduced into components. */
3490 static int
3491 decide_one_param_reduction (struct access *repr)
3493 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3494 bool by_ref;
3495 tree parm;
3497 parm = repr->base;
3498 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3499 gcc_assert (cur_parm_size > 0);
3501 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3503 by_ref = true;
3504 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3506 else
3508 by_ref = false;
3509 agg_size = cur_parm_size;
3512 if (dump_file)
3514 struct access *acc;
3515 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3516 print_generic_expr (dump_file, parm, 0);
3517 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3518 for (acc = repr; acc; acc = acc->next_grp)
3519 dump_access (dump_file, acc, true);
3522 total_size = 0;
3523 new_param_count = 0;
3525 for (; repr; repr = repr->next_grp)
3527 gcc_assert (parm == repr->base);
3528 new_param_count++;
3530 if (!by_ref || (!repr->grp_maybe_modified
3531 && !repr->grp_not_necessarilly_dereferenced))
3532 total_size += repr->size;
3533 else
3534 total_size += cur_parm_size;
3537 gcc_assert (new_param_count > 0);
3539 if (optimize_function_for_size_p (cfun))
3540 parm_size_limit = cur_parm_size;
3541 else
3542 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3543 * cur_parm_size);
3545 if (total_size < agg_size
3546 && total_size <= parm_size_limit)
3548 if (dump_file)
3549 fprintf (dump_file, " ....will be split into %i components\n",
3550 new_param_count);
3551 return new_param_count;
3553 else
3554 return 0;
3557 /* The order of the following enums is important, we need to do extra work for
3558 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3559 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3560 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3562 /* Identify representatives of all accesses to all candidate parameters for
3563 IPA-SRA. Return result based on what representatives have been found. */
3565 static enum ipa_splicing_result
3566 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3568 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3569 tree parm;
3570 struct access *repr;
3572 *representatives = VEC_alloc (access_p, heap, func_param_count);
3574 for (parm = DECL_ARGUMENTS (current_function_decl);
3575 parm;
3576 parm = TREE_CHAIN (parm))
3578 if (is_unused_scalar_param (parm))
3580 VEC_quick_push (access_p, *representatives,
3581 &no_accesses_representant);
3582 if (result == NO_GOOD_ACCESS)
3583 result = UNUSED_PARAMS;
3585 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3586 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3587 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3589 repr = unmodified_by_ref_scalar_representative (parm);
3590 VEC_quick_push (access_p, *representatives, repr);
3591 if (repr)
3592 result = UNMODIF_BY_REF_ACCESSES;
3594 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3596 bool ro_grp = false;
3597 repr = splice_param_accesses (parm, &ro_grp);
3598 VEC_quick_push (access_p, *representatives, repr);
3600 if (repr && !no_accesses_p (repr))
3602 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3604 if (ro_grp)
3605 result = UNMODIF_BY_REF_ACCESSES;
3606 else if (result < MODIF_BY_REF_ACCESSES)
3607 result = MODIF_BY_REF_ACCESSES;
3609 else if (result < BY_VAL_ACCESSES)
3610 result = BY_VAL_ACCESSES;
3612 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3613 result = UNUSED_PARAMS;
3615 else
3616 VEC_quick_push (access_p, *representatives, NULL);
3619 if (result == NO_GOOD_ACCESS)
3621 VEC_free (access_p, heap, *representatives);
3622 *representatives = NULL;
3623 return NO_GOOD_ACCESS;
3626 return result;
3629 /* Return the index of BASE in PARMS. Abort if it is not found. */
3631 static inline int
3632 get_param_index (tree base, VEC(tree, heap) *parms)
3634 int i, len;
3636 len = VEC_length (tree, parms);
3637 for (i = 0; i < len; i++)
3638 if (VEC_index (tree, parms, i) == base)
3639 return i;
3640 gcc_unreachable ();
3643 /* Convert the decisions made at the representative level into compact
3644 parameter adjustments. REPRESENTATIVES are pointers to first
3645 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3646 final number of adjustments. */
3648 static ipa_parm_adjustment_vec
3649 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3650 int adjustments_count)
3652 VEC (tree, heap) *parms;
3653 ipa_parm_adjustment_vec adjustments;
3654 tree parm;
3655 int i;
3657 gcc_assert (adjustments_count > 0);
3658 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3659 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3660 parm = DECL_ARGUMENTS (current_function_decl);
3661 for (i = 0; i < func_param_count; i++, parm = TREE_CHAIN (parm))
3663 struct access *repr = VEC_index (access_p, representatives, i);
3665 if (!repr || no_accesses_p (repr))
3667 struct ipa_parm_adjustment *adj;
3669 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3670 memset (adj, 0, sizeof (*adj));
3671 adj->base_index = get_param_index (parm, parms);
3672 adj->base = parm;
3673 if (!repr)
3674 adj->copy_param = 1;
3675 else
3676 adj->remove_param = 1;
3678 else
3680 struct ipa_parm_adjustment *adj;
3681 int index = get_param_index (parm, parms);
3683 for (; repr; repr = repr->next_grp)
3685 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3686 memset (adj, 0, sizeof (*adj));
3687 gcc_assert (repr->base == parm);
3688 adj->base_index = index;
3689 adj->base = repr->base;
3690 adj->type = repr->type;
3691 adj->offset = repr->offset;
3692 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3693 && (repr->grp_maybe_modified
3694 || repr->grp_not_necessarilly_dereferenced));
3699 VEC_free (tree, heap, parms);
3700 return adjustments;
3703 /* Analyze the collected accesses and produce a plan what to do with the
3704 parameters in the form of adjustments, NULL meaning nothing. */
3706 static ipa_parm_adjustment_vec
3707 analyze_all_param_acesses (void)
3709 enum ipa_splicing_result repr_state;
3710 bool proceed = false;
3711 int i, adjustments_count = 0;
3712 VEC (access_p, heap) *representatives;
3713 ipa_parm_adjustment_vec adjustments;
3715 repr_state = splice_all_param_accesses (&representatives);
3716 if (repr_state == NO_GOOD_ACCESS)
3717 return NULL;
3719 /* If there are any parameters passed by reference which are not modified
3720 directly, we need to check whether they can be modified indirectly. */
3721 if (repr_state == UNMODIF_BY_REF_ACCESSES)
3723 analyze_caller_dereference_legality (representatives);
3724 analyze_modified_params (representatives);
3727 for (i = 0; i < func_param_count; i++)
3729 struct access *repr = VEC_index (access_p, representatives, i);
3731 if (repr && !no_accesses_p (repr))
3733 if (repr->grp_scalar_ptr)
3735 adjustments_count++;
3736 if (repr->grp_not_necessarilly_dereferenced
3737 || repr->grp_maybe_modified)
3738 VEC_replace (access_p, representatives, i, NULL);
3739 else
3741 proceed = true;
3742 sra_stats.scalar_by_ref_to_by_val++;
3745 else
3747 int new_components = decide_one_param_reduction (repr);
3749 if (new_components == 0)
3751 VEC_replace (access_p, representatives, i, NULL);
3752 adjustments_count++;
3754 else
3756 adjustments_count += new_components;
3757 sra_stats.aggregate_params_reduced++;
3758 sra_stats.param_reductions_created += new_components;
3759 proceed = true;
3763 else
3765 if (no_accesses_p (repr))
3767 proceed = true;
3768 sra_stats.deleted_unused_parameters++;
3770 adjustments_count++;
3774 if (!proceed && dump_file)
3775 fprintf (dump_file, "NOT proceeding to change params.\n");
3777 if (proceed)
3778 adjustments = turn_representatives_into_adjustments (representatives,
3779 adjustments_count);
3780 else
3781 adjustments = NULL;
3783 VEC_free (access_p, heap, representatives);
3784 return adjustments;
3787 /* If a parameter replacement identified by ADJ does not yet exist in the form
3788 of declaration, create it and record it, otherwise return the previously
3789 created one. */
3791 static tree
3792 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3794 tree repl;
3795 if (!adj->new_ssa_base)
3797 char *pretty_name = make_fancy_name (adj->base);
3799 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
3800 DECL_NAME (repl) = get_identifier (pretty_name);
3801 obstack_free (&name_obstack, pretty_name);
3803 get_var_ann (repl);
3804 add_referenced_var (repl);
3805 adj->new_ssa_base = repl;
3807 else
3808 repl = adj->new_ssa_base;
3809 return repl;
3812 /* Find the first adjustment for a particular parameter BASE in a vector of
3813 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3814 adjustment. */
3816 static struct ipa_parm_adjustment *
3817 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3819 int i, len;
3821 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3822 for (i = 0; i < len; i++)
3824 struct ipa_parm_adjustment *adj;
3826 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3827 if (!adj->copy_param && adj->base == base)
3828 return adj;
3831 return NULL;
3834 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
3835 removed because its value is not used, replace the SSA_NAME with a one
3836 relating to a created VAR_DECL together all of its uses and return true.
3837 ADJUSTMENTS is a pointer to an adjustments vector. */
3839 static bool
3840 replace_removed_params_ssa_names (gimple stmt,
3841 ipa_parm_adjustment_vec adjustments)
3843 struct ipa_parm_adjustment *adj;
3844 tree lhs, decl, repl, name;
3846 if (gimple_code (stmt) == GIMPLE_PHI)
3847 lhs = gimple_phi_result (stmt);
3848 else if (is_gimple_assign (stmt))
3849 lhs = gimple_assign_lhs (stmt);
3850 else if (is_gimple_call (stmt))
3851 lhs = gimple_call_lhs (stmt);
3852 else
3853 gcc_unreachable ();
3855 if (TREE_CODE (lhs) != SSA_NAME)
3856 return false;
3857 decl = SSA_NAME_VAR (lhs);
3858 if (TREE_CODE (decl) != PARM_DECL)
3859 return false;
3861 adj = get_adjustment_for_base (adjustments, decl);
3862 if (!adj)
3863 return false;
3865 repl = get_replaced_param_substitute (adj);
3866 name = make_ssa_name (repl, stmt);
3868 if (dump_file)
3870 fprintf (dump_file, "replacing an SSA name of a removed param ");
3871 print_generic_expr (dump_file, lhs, 0);
3872 fprintf (dump_file, " with ");
3873 print_generic_expr (dump_file, name, 0);
3874 fprintf (dump_file, "\n");
3877 if (is_gimple_assign (stmt))
3878 gimple_assign_set_lhs (stmt, name);
3879 else if (is_gimple_call (stmt))
3880 gimple_call_set_lhs (stmt, name);
3881 else
3882 gimple_phi_set_result (stmt, name);
3884 replace_uses_by (lhs, name);
3885 return true;
3888 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3889 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3890 specifies whether the function should care about type incompatibility the
3891 current and new expressions. If it is false, the function will leave
3892 incompatibility issues to the caller. Return true iff the expression
3893 was modified. */
3895 static bool
3896 sra_ipa_modify_expr (tree *expr, bool convert,
3897 ipa_parm_adjustment_vec adjustments)
3899 int i, len;
3900 struct ipa_parm_adjustment *adj, *cand = NULL;
3901 HOST_WIDE_INT offset, size, max_size;
3902 tree base, src;
3904 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3906 if (TREE_CODE (*expr) == BIT_FIELD_REF
3907 || TREE_CODE (*expr) == IMAGPART_EXPR
3908 || TREE_CODE (*expr) == REALPART_EXPR)
3910 expr = &TREE_OPERAND (*expr, 0);
3911 convert = true;
3914 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
3915 if (!base || size == -1 || max_size == -1)
3916 return false;
3918 if (INDIRECT_REF_P (base))
3919 base = TREE_OPERAND (base, 0);
3921 base = get_ssa_base_param (base);
3922 if (!base || TREE_CODE (base) != PARM_DECL)
3923 return false;
3925 for (i = 0; i < len; i++)
3927 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3929 if (adj->base == base &&
3930 (adj->offset == offset || adj->remove_param))
3932 cand = adj;
3933 break;
3936 if (!cand || cand->copy_param || cand->remove_param)
3937 return false;
3939 if (cand->by_ref)
3941 tree folded;
3942 src = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (cand->reduction)),
3943 cand->reduction);
3944 folded = gimple_fold_indirect_ref (src);
3945 if (folded)
3946 src = folded;
3948 else
3949 src = cand->reduction;
3951 if (dump_file && (dump_flags & TDF_DETAILS))
3953 fprintf (dump_file, "About to replace expr ");
3954 print_generic_expr (dump_file, *expr, 0);
3955 fprintf (dump_file, " with ");
3956 print_generic_expr (dump_file, src, 0);
3957 fprintf (dump_file, "\n");
3960 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3962 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3963 *expr = vce;
3965 else
3966 *expr = src;
3967 return true;
3970 /* If the statement pointed to by STMT_PTR contains any expressions that need
3971 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
3972 potential type incompatibilities (GSI is used to accommodate conversion
3973 statements and must point to the statement). Return true iff the statement
3974 was modified. */
3976 static bool
3977 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
3978 ipa_parm_adjustment_vec adjustments)
3980 gimple stmt = *stmt_ptr;
3981 tree *lhs_p, *rhs_p;
3982 bool any;
3984 if (!gimple_assign_single_p (stmt))
3985 return false;
3987 rhs_p = gimple_assign_rhs1_ptr (stmt);
3988 lhs_p = gimple_assign_lhs_ptr (stmt);
3990 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
3991 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
3992 if (any)
3994 tree new_rhs = NULL_TREE;
3996 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
3998 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4000 /* V_C_Es of constructors can cause trouble (PR 42714). */
4001 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4002 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
4003 else
4004 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4006 else
4007 new_rhs = fold_build1_loc (gimple_location (stmt),
4008 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4009 *rhs_p);
4011 else if (REFERENCE_CLASS_P (*rhs_p)
4012 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4013 && !is_gimple_reg (*lhs_p))
4014 /* This can happen when an assignment in between two single field
4015 structures is turned into an assignment in between two pointers to
4016 scalars (PR 42237). */
4017 new_rhs = *rhs_p;
4019 if (new_rhs)
4021 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4022 true, GSI_SAME_STMT);
4024 gimple_assign_set_rhs_from_tree (gsi, tmp);
4027 return true;
4030 return false;
4033 /* Traverse the function body and all modifications as described in
4034 ADJUSTMENTS. */
4036 static void
4037 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4039 basic_block bb;
4041 FOR_EACH_BB (bb)
4043 gimple_stmt_iterator gsi;
4044 bool bb_changed = false;
4046 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4047 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4049 gsi = gsi_start_bb (bb);
4050 while (!gsi_end_p (gsi))
4052 gimple stmt = gsi_stmt (gsi);
4053 bool modified = false;
4054 tree *t;
4055 unsigned i;
4057 switch (gimple_code (stmt))
4059 case GIMPLE_RETURN:
4060 t = gimple_return_retval_ptr (stmt);
4061 if (*t != NULL_TREE)
4062 modified |= sra_ipa_modify_expr (t, true, adjustments);
4063 break;
4065 case GIMPLE_ASSIGN:
4066 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4067 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4068 break;
4070 case GIMPLE_CALL:
4071 /* Operands must be processed before the lhs. */
4072 for (i = 0; i < gimple_call_num_args (stmt); i++)
4074 t = gimple_call_arg_ptr (stmt, i);
4075 modified |= sra_ipa_modify_expr (t, true, adjustments);
4078 if (gimple_call_lhs (stmt))
4080 t = gimple_call_lhs_ptr (stmt);
4081 modified |= sra_ipa_modify_expr (t, false, adjustments);
4082 modified |= replace_removed_params_ssa_names (stmt,
4083 adjustments);
4085 break;
4087 case GIMPLE_ASM:
4088 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4090 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4091 modified |= sra_ipa_modify_expr (t, true, adjustments);
4093 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4095 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4096 modified |= sra_ipa_modify_expr (t, false, adjustments);
4098 break;
4100 default:
4101 break;
4104 if (modified)
4106 bb_changed = true;
4107 update_stmt (stmt);
4108 maybe_clean_eh_stmt (stmt);
4110 gsi_next (&gsi);
4112 if (bb_changed)
4113 gimple_purge_dead_eh_edges (bb);
4117 /* Call gimple_debug_bind_reset_value on all debug statements describing
4118 gimple register parameters that are being removed or replaced. */
4120 static void
4121 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4123 int i, len;
4125 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4126 for (i = 0; i < len; i++)
4128 struct ipa_parm_adjustment *adj;
4129 imm_use_iterator ui;
4130 gimple stmt;
4131 tree name;
4133 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4134 if (adj->copy_param || !is_gimple_reg (adj->base))
4135 continue;
4136 name = gimple_default_def (cfun, adj->base);
4137 if (!name)
4138 continue;
4139 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4141 /* All other users must have been removed by
4142 ipa_sra_modify_function_body. */
4143 gcc_assert (is_gimple_debug (stmt));
4144 gimple_debug_bind_reset_value (stmt);
4145 update_stmt (stmt);
4150 /* Return true iff all callers have at least as many actual arguments as there
4151 are formal parameters in the current function. */
4153 static bool
4154 all_callers_have_enough_arguments_p (struct cgraph_node *node)
4156 struct cgraph_edge *cs;
4157 for (cs = node->callers; cs; cs = cs->next_caller)
4158 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4159 return false;
4161 return true;
4165 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4167 static void
4168 convert_callers (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4170 tree old_cur_fndecl = current_function_decl;
4171 struct cgraph_edge *cs;
4172 basic_block this_block;
4173 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4175 for (cs = node->callers; cs; cs = cs->next_caller)
4177 current_function_decl = cs->caller->decl;
4178 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4180 if (dump_file)
4181 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4182 cs->caller->uid, cs->callee->uid,
4183 cgraph_node_name (cs->caller),
4184 cgraph_node_name (cs->callee));
4186 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4188 pop_cfun ();
4191 for (cs = node->callers; cs; cs = cs->next_caller)
4192 if (!bitmap_bit_p (recomputed_callers, cs->caller->uid))
4194 compute_inline_parameters (cs->caller);
4195 bitmap_set_bit (recomputed_callers, cs->caller->uid);
4197 BITMAP_FREE (recomputed_callers);
4199 current_function_decl = old_cur_fndecl;
4201 if (!encountered_recursive_call)
4202 return;
4204 FOR_EACH_BB (this_block)
4206 gimple_stmt_iterator gsi;
4208 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4210 gimple stmt = gsi_stmt (gsi);
4211 tree call_fndecl;
4212 if (gimple_code (stmt) != GIMPLE_CALL)
4213 continue;
4214 call_fndecl = gimple_call_fndecl (stmt);
4215 if (call_fndecl && cgraph_get_node (call_fndecl) == node)
4217 if (dump_file)
4218 fprintf (dump_file, "Adjusting recursive call");
4219 ipa_modify_call_arguments (NULL, stmt, adjustments);
4224 return;
4227 /* Create an abstract origin declaration for OLD_DECL and make it an abstract
4228 origin of the provided decl so that there are preserved parameters for debug
4229 information. */
4231 static void
4232 create_abstract_origin (tree old_decl)
4234 if (!DECL_ABSTRACT_ORIGIN (old_decl))
4236 tree new_decl = copy_node (old_decl);
4238 DECL_ABSTRACT (new_decl) = 1;
4239 SET_DECL_ASSEMBLER_NAME (new_decl, NULL_TREE);
4240 SET_DECL_RTL (new_decl, NULL);
4241 DECL_STRUCT_FUNCTION (new_decl) = NULL;
4242 DECL_ARTIFICIAL (old_decl) = 1;
4243 DECL_ABSTRACT_ORIGIN (old_decl) = new_decl;
4247 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4248 as given in ADJUSTMENTS. */
4250 static void
4251 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4253 struct cgraph_node *alias;
4254 for (alias = node->same_body; alias; alias = alias->next)
4255 ipa_modify_formal_parameters (alias->decl, adjustments, "ISRA");
4256 /* current_function_decl must be handled last, after same_body aliases,
4257 as following functions will use what it computed. */
4258 create_abstract_origin (current_function_decl);
4259 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4260 ipa_sra_modify_function_body (adjustments);
4261 sra_ipa_reset_debug_stmts (adjustments);
4262 convert_callers (node, adjustments);
4263 cgraph_make_node_local (node);
4264 return;
4267 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4268 attributes, return true otherwise. NODE is the cgraph node of the current
4269 function. */
4271 static bool
4272 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4274 if (!cgraph_node_can_be_local_p (node))
4276 if (dump_file)
4277 fprintf (dump_file, "Function not local to this compilation unit.\n");
4278 return false;
4281 if (DECL_VIRTUAL_P (current_function_decl))
4283 if (dump_file)
4284 fprintf (dump_file, "Function is a virtual method.\n");
4285 return false;
4288 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4289 && node->global.size >= MAX_INLINE_INSNS_AUTO)
4291 if (dump_file)
4292 fprintf (dump_file, "Function too big to be made truly local.\n");
4293 return false;
4296 if (!node->callers)
4298 if (dump_file)
4299 fprintf (dump_file,
4300 "Function has no callers in this compilation unit.\n");
4301 return false;
4304 if (cfun->stdarg)
4306 if (dump_file)
4307 fprintf (dump_file, "Function uses stdarg. \n");
4308 return false;
4311 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4312 return false;
4314 return true;
4317 /* Perform early interprocedural SRA. */
4319 static unsigned int
4320 ipa_early_sra (void)
4322 struct cgraph_node *node = cgraph_node (current_function_decl);
4323 ipa_parm_adjustment_vec adjustments;
4324 int ret = 0;
4326 if (!ipa_sra_preliminary_function_checks (node))
4327 return 0;
4329 sra_initialize ();
4330 sra_mode = SRA_MODE_EARLY_IPA;
4332 if (!find_param_candidates ())
4334 if (dump_file)
4335 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4336 goto simple_out;
4339 if (!all_callers_have_enough_arguments_p (node))
4341 if (dump_file)
4342 fprintf (dump_file, "There are callers with insufficient number of "
4343 "arguments.\n");
4344 goto simple_out;
4347 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4348 func_param_count
4349 * last_basic_block_for_function (cfun));
4350 final_bbs = BITMAP_ALLOC (NULL);
4352 scan_function ();
4353 if (encountered_apply_args)
4355 if (dump_file)
4356 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4357 goto out;
4360 if (encountered_unchangable_recursive_call)
4362 if (dump_file)
4363 fprintf (dump_file, "Function calls itself with insufficient "
4364 "number of arguments.\n");
4365 goto out;
4368 adjustments = analyze_all_param_acesses ();
4369 if (!adjustments)
4370 goto out;
4371 if (dump_file)
4372 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4374 modify_function (node, adjustments);
4375 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4376 ret = TODO_update_ssa;
4378 statistics_counter_event (cfun, "Unused parameters deleted",
4379 sra_stats.deleted_unused_parameters);
4380 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4381 sra_stats.scalar_by_ref_to_by_val);
4382 statistics_counter_event (cfun, "Aggregate parameters broken up",
4383 sra_stats.aggregate_params_reduced);
4384 statistics_counter_event (cfun, "Aggregate parameter components created",
4385 sra_stats.param_reductions_created);
4387 out:
4388 BITMAP_FREE (final_bbs);
4389 free (bb_dereferences);
4390 simple_out:
4391 sra_deinitialize ();
4392 return ret;
4395 /* Return if early ipa sra shall be performed. */
4396 static bool
4397 ipa_early_sra_gate (void)
4399 return flag_ipa_sra;
4402 struct gimple_opt_pass pass_early_ipa_sra =
4405 GIMPLE_PASS,
4406 "eipa_sra", /* name */
4407 ipa_early_sra_gate, /* gate */
4408 ipa_early_sra, /* execute */
4409 NULL, /* sub */
4410 NULL, /* next */
4411 0, /* static_pass_number */
4412 TV_IPA_SRA, /* tv_id */
4413 0, /* properties_required */
4414 0, /* properties_provided */
4415 0, /* properties_destroyed */
4416 0, /* todo_flags_start */
4417 TODO_dump_func | TODO_dump_cgraph /* todo_flags_finish */