2012-05-17 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc.git] / gcc / tree-sra.c
blob3f84f6b22fb4dc64d135f4123b1e20296df5e6a7
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "ipa-prop.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
87 #include "timevar.h"
88 #include "params.h"
89 #include "target.h"
90 #include "flags.h"
91 #include "dbgcnt.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
101 /* Global variable describing which aggregate reduction we are performing at
102 the moment. */
103 static enum sra_mode sra_mode;
105 struct assign_link;
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
123 struct access
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
129 HOST_WIDE_INT size;
130 tree base;
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
134 testcase. */
135 tree expr;
136 /* Type. */
137 tree type;
139 /* The statement this access belongs to. */
140 gimple stmt;
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
158 /* Pointers to the first and last element in the linked list of assign
159 links. */
160 struct assign_link *first_link, *last_link;
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
170 /* Is this particular access write access? */
171 unsigned write : 1;
173 /* Is this access an access to a non-addressable field? */
174 unsigned non_addressable : 1;
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued : 1;
179 /* Does this group contain a write access? This flag is propagated down the
180 access tree. */
181 unsigned grp_write : 1;
183 /* Does this group contain a read access? This flag is propagated down the
184 access tree. */
185 unsigned grp_read : 1;
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read : 1;
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write : 1;
195 /* Does this group contain a read access through a scalar type? This flag is
196 not propagated in the access tree in any direction. */
197 unsigned grp_scalar_read : 1;
199 /* Does this group contain a write access through a scalar type? This flag
200 is not propagated in the access tree in any direction. */
201 unsigned grp_scalar_write : 1;
203 /* Is this access an artificial one created to scalarize some record
204 entirely? */
205 unsigned grp_total_scalarization : 1;
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
209 possible. */
210 unsigned grp_hint : 1;
212 /* Is the subtree rooted in this access fully covered by scalar
213 replacements? */
214 unsigned grp_covered : 1;
216 /* If set to true, this access and all below it in an access tree must not be
217 scalarized. */
218 unsigned grp_unscalarizable_region : 1;
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
222 access tree. */
223 unsigned grp_unscalarized_data : 1;
225 /* Does this access and/or group contain a write access through a
226 BIT_FIELD_REF? */
227 unsigned grp_partial_lhs : 1;
229 /* Set when a scalar replacement should be created for this variable. We do
230 the decision and creation at different places because create_tmp_var
231 cannot be called from within FOR_EACH_REFERENCED_VAR. */
232 unsigned grp_to_be_replaced : 1;
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning : 1;
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified : 1;
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr : 1;
246 /* Set when we discover that this pointer is not safe to dereference in the
247 caller. */
248 unsigned grp_not_necessarilly_dereferenced : 1;
251 typedef struct access *access_p;
253 DEF_VEC_P (access_p);
254 DEF_VEC_ALLOC_P (access_p, heap);
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool;
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
262 struct assign_link
264 struct access *lacc, *racc;
265 struct assign_link *next;
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool;
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
272 static struct pointer_map_t *base_access_vec;
274 /* Bitmap of candidates. */
275 static bitmap candidate_bitmap;
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278 those which cannot be (because they are and need be used as a whole). */
279 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
281 /* Obstack for creation of fancy names. */
282 static struct obstack name_obstack;
284 /* Head of a linked list of accesses that need to have its subaccesses
285 propagated to their assignment counterparts. */
286 static struct access *work_queue_head;
288 /* Number of parameters of the analyzed function when doing early ipa SRA. */
289 static int func_param_count;
291 /* scan_function sets the following to true if it encounters a call to
292 __builtin_apply_args. */
293 static bool encountered_apply_args;
295 /* Set by scan_function when it finds a recursive call. */
296 static bool encountered_recursive_call;
298 /* Set by scan_function when it finds a recursive call with less actual
299 arguments than formal parameters.. */
300 static bool encountered_unchangable_recursive_call;
302 /* This is a table in which for each basic block and parameter there is a
303 distance (offset + size) in that parameter which is dereferenced and
304 accessed in that BB. */
305 static HOST_WIDE_INT *bb_dereferences;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307 returning, throwing externally, looping infinitely or calling a function
308 which might abort etc.. */
309 static bitmap final_bbs;
311 /* Representative of no accesses at all. */
312 static struct access no_accesses_representant;
314 /* Predicate to test the special value. */
316 static inline bool
317 no_accesses_p (struct access *access)
319 return access == &no_accesses_representant;
322 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
323 representative fields are dumped, otherwise those which only describe the
324 individual access are. */
326 static struct
328 /* Number of processed aggregates is readily available in
329 analyze_all_variable_accesses and so is not stored here. */
331 /* Number of created scalar replacements. */
332 int replacements;
334 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
335 expression. */
336 int exprs;
338 /* Number of statements created by generate_subtree_copies. */
339 int subtree_copies;
341 /* Number of statements created by load_assign_lhs_subreplacements. */
342 int subreplacements;
344 /* Number of times sra_modify_assign has deleted a statement. */
345 int deleted;
347 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348 RHS reparately due to type conversions or nonexistent matching
349 references. */
350 int separate_lhs_rhs_handling;
352 /* Number of parameters that were removed because they were unused. */
353 int deleted_unused_parameters;
355 /* Number of scalars passed as parameters by reference that have been
356 converted to be passed by value. */
357 int scalar_by_ref_to_by_val;
359 /* Number of aggregate parameters that were replaced by one or more of their
360 components. */
361 int aggregate_params_reduced;
363 /* Numbber of components created when splitting aggregate parameters. */
364 int param_reductions_created;
365 } sra_stats;
367 static void
368 dump_access (FILE *f, struct access *access, bool grp)
370 fprintf (f, "access { ");
371 fprintf (f, "base = (%d)'", DECL_UID (access->base));
372 print_generic_expr (f, access->base, 0);
373 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
374 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
375 fprintf (f, ", expr = ");
376 print_generic_expr (f, access->expr, 0);
377 fprintf (f, ", type = ");
378 print_generic_expr (f, access->type, 0);
379 if (grp)
380 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381 "grp_assignment_write = %d, grp_scalar_read = %d, "
382 "grp_scalar_write = %d, grp_total_scalarization = %d, "
383 "grp_hint = %d, grp_covered = %d, "
384 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 "grp_maybe_modified = %d, "
387 "grp_not_necessarilly_dereferenced = %d\n",
388 access->grp_read, access->grp_write, access->grp_assignment_read,
389 access->grp_assignment_write, access->grp_scalar_read,
390 access->grp_scalar_write, access->grp_total_scalarization,
391 access->grp_hint, access->grp_covered,
392 access->grp_unscalarizable_region, access->grp_unscalarized_data,
393 access->grp_partial_lhs, access->grp_to_be_replaced,
394 access->grp_maybe_modified,
395 access->grp_not_necessarilly_dereferenced);
396 else
397 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
398 "grp_partial_lhs = %d\n",
399 access->write, access->grp_total_scalarization,
400 access->grp_partial_lhs);
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
405 static void
406 dump_access_tree_1 (FILE *f, struct access *access, int level)
410 int i;
412 for (i = 0; i < level; i++)
413 fputs ("* ", dump_file);
415 dump_access (f, access, true);
417 if (access->first_child)
418 dump_access_tree_1 (f, access->first_child, level + 1);
420 access = access->next_sibling;
422 while (access);
425 /* Dump all access trees for a variable, given the pointer to the first root in
426 ACCESS. */
428 static void
429 dump_access_tree (FILE *f, struct access *access)
431 for (; access; access = access->next_grp)
432 dump_access_tree_1 (f, access, 0);
435 /* Return true iff ACC is non-NULL and has subaccesses. */
437 static inline bool
438 access_has_children_p (struct access *acc)
440 return acc && acc->first_child;
443 /* Return true iff ACC is (partly) covered by at least one replacement. */
445 static bool
446 access_has_replacements_p (struct access *acc)
448 struct access *child;
449 if (acc->grp_to_be_replaced)
450 return true;
451 for (child = acc->first_child; child; child = child->next_sibling)
452 if (access_has_replacements_p (child))
453 return true;
454 return false;
457 /* Return a vector of pointers to accesses for the variable given in BASE or
458 NULL if there is none. */
460 static VEC (access_p, heap) *
461 get_base_access_vector (tree base)
463 void **slot;
465 slot = pointer_map_contains (base_access_vec, base);
466 if (!slot)
467 return NULL;
468 else
469 return *(VEC (access_p, heap) **) slot;
472 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
473 in ACCESS. Return NULL if it cannot be found. */
475 static struct access *
476 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
477 HOST_WIDE_INT size)
479 while (access && (access->offset != offset || access->size != size))
481 struct access *child = access->first_child;
483 while (child && (child->offset + child->size <= offset))
484 child = child->next_sibling;
485 access = child;
488 return access;
491 /* Return the first group representative for DECL or NULL if none exists. */
493 static struct access *
494 get_first_repr_for_decl (tree base)
496 VEC (access_p, heap) *access_vec;
498 access_vec = get_base_access_vector (base);
499 if (!access_vec)
500 return NULL;
502 return VEC_index (access_p, access_vec, 0);
505 /* Find an access representative for the variable BASE and given OFFSET and
506 SIZE. Requires that access trees have already been built. Return NULL if
507 it cannot be found. */
509 static struct access *
510 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
511 HOST_WIDE_INT size)
513 struct access *access;
515 access = get_first_repr_for_decl (base);
516 while (access && (access->offset + access->size <= offset))
517 access = access->next_grp;
518 if (!access)
519 return NULL;
521 return find_access_in_subtree (access, offset, size);
524 /* Add LINK to the linked list of assign links of RACC. */
525 static void
526 add_link_to_rhs (struct access *racc, struct assign_link *link)
528 gcc_assert (link->racc == racc);
530 if (!racc->first_link)
532 gcc_assert (!racc->last_link);
533 racc->first_link = link;
535 else
536 racc->last_link->next = link;
538 racc->last_link = link;
539 link->next = NULL;
542 /* Move all link structures in their linked list in OLD_RACC to the linked list
543 in NEW_RACC. */
544 static void
545 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
547 if (!old_racc->first_link)
549 gcc_assert (!old_racc->last_link);
550 return;
553 if (new_racc->first_link)
555 gcc_assert (!new_racc->last_link->next);
556 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
558 new_racc->last_link->next = old_racc->first_link;
559 new_racc->last_link = old_racc->last_link;
561 else
563 gcc_assert (!new_racc->last_link);
565 new_racc->first_link = old_racc->first_link;
566 new_racc->last_link = old_racc->last_link;
568 old_racc->first_link = old_racc->last_link = NULL;
571 /* Add ACCESS to the work queue (which is actually a stack). */
573 static void
574 add_access_to_work_queue (struct access *access)
576 if (!access->grp_queued)
578 gcc_assert (!access->next_queued);
579 access->next_queued = work_queue_head;
580 access->grp_queued = 1;
581 work_queue_head = access;
585 /* Pop an access from the work queue, and return it, assuming there is one. */
587 static struct access *
588 pop_access_from_work_queue (void)
590 struct access *access = work_queue_head;
592 work_queue_head = access->next_queued;
593 access->next_queued = NULL;
594 access->grp_queued = 0;
595 return access;
599 /* Allocate necessary structures. */
601 static void
602 sra_initialize (void)
604 candidate_bitmap = BITMAP_ALLOC (NULL);
605 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
606 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
607 gcc_obstack_init (&name_obstack);
608 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
609 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
610 base_access_vec = pointer_map_create ();
611 memset (&sra_stats, 0, sizeof (sra_stats));
612 encountered_apply_args = false;
613 encountered_recursive_call = false;
614 encountered_unchangable_recursive_call = false;
617 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
619 static bool
620 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
621 void *data ATTRIBUTE_UNUSED)
623 VEC (access_p, heap) *access_vec;
624 access_vec = (VEC (access_p, heap) *) *value;
625 VEC_free (access_p, heap, access_vec);
627 return true;
630 /* Deallocate all general structures. */
632 static void
633 sra_deinitialize (void)
635 BITMAP_FREE (candidate_bitmap);
636 BITMAP_FREE (should_scalarize_away_bitmap);
637 BITMAP_FREE (cannot_scalarize_away_bitmap);
638 free_alloc_pool (access_pool);
639 free_alloc_pool (link_pool);
640 obstack_free (&name_obstack, NULL);
642 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
643 pointer_map_destroy (base_access_vec);
646 /* Remove DECL from candidates for SRA and write REASON to the dump file if
647 there is one. */
648 static void
649 disqualify_candidate (tree decl, const char *reason)
651 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
653 if (dump_file && (dump_flags & TDF_DETAILS))
655 fprintf (dump_file, "! Disqualifying ");
656 print_generic_expr (dump_file, decl, 0);
657 fprintf (dump_file, " - %s\n", reason);
661 /* Return true iff the type contains a field or an element which does not allow
662 scalarization. */
664 static bool
665 type_internals_preclude_sra_p (tree type, const char **msg)
667 tree fld;
668 tree et;
670 switch (TREE_CODE (type))
672 case RECORD_TYPE:
673 case UNION_TYPE:
674 case QUAL_UNION_TYPE:
675 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
676 if (TREE_CODE (fld) == FIELD_DECL)
678 tree ft = TREE_TYPE (fld);
680 if (TREE_THIS_VOLATILE (fld))
682 *msg = "volatile structure field";
683 return true;
685 if (!DECL_FIELD_OFFSET (fld))
687 *msg = "no structure field offset";
688 return true;
690 if (!DECL_SIZE (fld))
692 *msg = "zero structure field size";
693 return true;
695 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
697 *msg = "structure field offset not fixed";
698 return true;
700 if (!host_integerp (DECL_SIZE (fld), 1))
702 *msg = "structure field size not fixed";
703 return true;
705 if (AGGREGATE_TYPE_P (ft)
706 && int_bit_position (fld) % BITS_PER_UNIT != 0)
708 *msg = "structure field is bit field";
709 return true;
712 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
713 return true;
716 return false;
718 case ARRAY_TYPE:
719 et = TREE_TYPE (type);
721 if (TYPE_VOLATILE (et))
723 *msg = "element type is volatile";
724 return true;
727 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
728 return true;
730 return false;
732 default:
733 return false;
737 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
738 base variable if it is. Return T if it is not an SSA_NAME. */
740 static tree
741 get_ssa_base_param (tree t)
743 if (TREE_CODE (t) == SSA_NAME)
745 if (SSA_NAME_IS_DEFAULT_DEF (t))
746 return SSA_NAME_VAR (t);
747 else
748 return NULL_TREE;
750 return t;
753 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
754 belongs to, unless the BB has already been marked as a potentially
755 final. */
757 static void
758 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
760 basic_block bb = gimple_bb (stmt);
761 int idx, parm_index = 0;
762 tree parm;
764 if (bitmap_bit_p (final_bbs, bb->index))
765 return;
767 for (parm = DECL_ARGUMENTS (current_function_decl);
768 parm && parm != base;
769 parm = DECL_CHAIN (parm))
770 parm_index++;
772 gcc_assert (parm_index < func_param_count);
774 idx = bb->index * func_param_count + parm_index;
775 if (bb_dereferences[idx] < dist)
776 bb_dereferences[idx] = dist;
779 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
780 the three fields. Also add it to the vector of accesses corresponding to
781 the base. Finally, return the new access. */
783 static struct access *
784 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
786 VEC (access_p, heap) *vec;
787 struct access *access;
788 void **slot;
790 access = (struct access *) pool_alloc (access_pool);
791 memset (access, 0, sizeof (struct access));
792 access->base = base;
793 access->offset = offset;
794 access->size = size;
796 slot = pointer_map_contains (base_access_vec, base);
797 if (slot)
798 vec = (VEC (access_p, heap) *) *slot;
799 else
800 vec = VEC_alloc (access_p, heap, 32);
802 VEC_safe_push (access_p, heap, vec, access);
804 *((struct VEC (access_p,heap) **)
805 pointer_map_insert (base_access_vec, base)) = vec;
807 return access;
810 /* Create and insert access for EXPR. Return created access, or NULL if it is
811 not possible. */
813 static struct access *
814 create_access (tree expr, gimple stmt, bool write)
816 struct access *access;
817 HOST_WIDE_INT offset, size, max_size;
818 tree base = expr;
819 bool ptr, unscalarizable_region = false;
821 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
823 if (sra_mode == SRA_MODE_EARLY_IPA
824 && TREE_CODE (base) == MEM_REF)
826 base = get_ssa_base_param (TREE_OPERAND (base, 0));
827 if (!base)
828 return NULL;
829 ptr = true;
831 else
832 ptr = false;
834 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
835 return NULL;
837 if (sra_mode == SRA_MODE_EARLY_IPA)
839 if (size < 0 || size != max_size)
841 disqualify_candidate (base, "Encountered a variable sized access.");
842 return NULL;
844 if (TREE_CODE (expr) == COMPONENT_REF
845 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
847 disqualify_candidate (base, "Encountered a bit-field access.");
848 return NULL;
850 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
852 if (ptr)
853 mark_parm_dereference (base, offset + size, stmt);
855 else
857 if (size != max_size)
859 size = max_size;
860 unscalarizable_region = true;
862 if (size < 0)
864 disqualify_candidate (base, "Encountered an unconstrained access.");
865 return NULL;
869 access = create_access_1 (base, offset, size);
870 access->expr = expr;
871 access->type = TREE_TYPE (expr);
872 access->write = write;
873 access->grp_unscalarizable_region = unscalarizable_region;
874 access->stmt = stmt;
876 if (TREE_CODE (expr) == COMPONENT_REF
877 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
878 access->non_addressable = 1;
880 return access;
884 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
885 register types or (recursively) records with only these two kinds of fields.
886 It also returns false if any of these records contains a bit-field. */
888 static bool
889 type_consists_of_records_p (tree type)
891 tree fld;
893 if (TREE_CODE (type) != RECORD_TYPE)
894 return false;
896 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
897 if (TREE_CODE (fld) == FIELD_DECL)
899 tree ft = TREE_TYPE (fld);
901 if (DECL_BIT_FIELD (fld))
902 return false;
904 if (!is_gimple_reg_type (ft)
905 && !type_consists_of_records_p (ft))
906 return false;
909 return true;
912 /* Create total_scalarization accesses for all scalar type fields in DECL that
913 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
914 must be the top-most VAR_DECL representing the variable, OFFSET must be the
915 offset of DECL within BASE. REF must be the memory reference expression for
916 the given decl. */
918 static void
919 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
920 tree ref)
922 tree fld, decl_type = TREE_TYPE (decl);
924 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
925 if (TREE_CODE (fld) == FIELD_DECL)
927 HOST_WIDE_INT pos = offset + int_bit_position (fld);
928 tree ft = TREE_TYPE (fld);
929 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
930 NULL_TREE);
932 if (is_gimple_reg_type (ft))
934 struct access *access;
935 HOST_WIDE_INT size;
937 size = tree_low_cst (DECL_SIZE (fld), 1);
938 access = create_access_1 (base, pos, size);
939 access->expr = nref;
940 access->type = ft;
941 access->grp_total_scalarization = 1;
942 /* Accesses for intraprocedural SRA can have their stmt NULL. */
944 else
945 completely_scalarize_record (base, fld, pos, nref);
949 /* Create total_scalarization accesses for all scalar type fields in VAR and
950 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
951 type_consists_of_records_p. */
953 static void
954 completely_scalarize_var (tree var)
956 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
957 struct access *access;
959 access = create_access_1 (var, 0, size);
960 access->expr = var;
961 access->type = TREE_TYPE (var);
962 access->grp_total_scalarization = 1;
964 completely_scalarize_record (var, var, 0, var);
967 /* Search the given tree for a declaration by skipping handled components and
968 exclude it from the candidates. */
970 static void
971 disqualify_base_of_expr (tree t, const char *reason)
973 t = get_base_address (t);
974 if (sra_mode == SRA_MODE_EARLY_IPA
975 && TREE_CODE (t) == MEM_REF)
976 t = get_ssa_base_param (TREE_OPERAND (t, 0));
978 if (t && DECL_P (t))
979 disqualify_candidate (t, reason);
982 /* Scan expression EXPR and create access structures for all accesses to
983 candidates for scalarization. Return the created access or NULL if none is
984 created. */
986 static struct access *
987 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
989 struct access *ret = NULL;
990 bool partial_ref;
992 if (TREE_CODE (expr) == BIT_FIELD_REF
993 || TREE_CODE (expr) == IMAGPART_EXPR
994 || TREE_CODE (expr) == REALPART_EXPR)
996 expr = TREE_OPERAND (expr, 0);
997 partial_ref = true;
999 else
1000 partial_ref = false;
1002 /* We need to dive through V_C_Es in order to get the size of its parameter
1003 and not the result type. Ada produces such statements. We are also
1004 capable of handling the topmost V_C_E but not any of those buried in other
1005 handled components. */
1006 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1007 expr = TREE_OPERAND (expr, 0);
1009 if (contains_view_convert_expr_p (expr))
1011 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1012 "component.");
1013 return NULL;
1016 switch (TREE_CODE (expr))
1018 case MEM_REF:
1019 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1020 && sra_mode != SRA_MODE_EARLY_IPA)
1021 return NULL;
1022 /* fall through */
1023 case VAR_DECL:
1024 case PARM_DECL:
1025 case RESULT_DECL:
1026 case COMPONENT_REF:
1027 case ARRAY_REF:
1028 case ARRAY_RANGE_REF:
1029 ret = create_access (expr, stmt, write);
1030 break;
1032 default:
1033 break;
1036 if (write && partial_ref && ret)
1037 ret->grp_partial_lhs = 1;
1039 return ret;
1042 /* Scan expression EXPR and create access structures for all accesses to
1043 candidates for scalarization. Return true if any access has been inserted.
1044 STMT must be the statement from which the expression is taken, WRITE must be
1045 true if the expression is a store and false otherwise. */
1047 static bool
1048 build_access_from_expr (tree expr, gimple stmt, bool write)
1050 struct access *access;
1052 access = build_access_from_expr_1 (expr, stmt, write);
1053 if (access)
1055 /* This means the aggregate is accesses as a whole in a way other than an
1056 assign statement and thus cannot be removed even if we had a scalar
1057 replacement for everything. */
1058 if (cannot_scalarize_away_bitmap)
1059 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1060 return true;
1062 return false;
1065 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1066 modes in which it matters, return true iff they have been disqualified. RHS
1067 may be NULL, in that case ignore it. If we scalarize an aggregate in
1068 intra-SRA we may need to add statements after each statement. This is not
1069 possible if a statement unconditionally has to end the basic block. */
1070 static bool
1071 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1073 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1074 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1076 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1077 if (rhs)
1078 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1079 return true;
1081 return false;
1084 /* Scan expressions occuring in STMT, create access structures for all accesses
1085 to candidates for scalarization and remove those candidates which occur in
1086 statements or expressions that prevent them from being split apart. Return
1087 true if any access has been inserted. */
1089 static bool
1090 build_accesses_from_assign (gimple stmt)
1092 tree lhs, rhs;
1093 struct access *lacc, *racc;
1095 if (!gimple_assign_single_p (stmt)
1096 /* Scope clobbers don't influence scalarization. */
1097 || gimple_clobber_p (stmt))
1098 return false;
1100 lhs = gimple_assign_lhs (stmt);
1101 rhs = gimple_assign_rhs1 (stmt);
1103 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1104 return false;
1106 racc = build_access_from_expr_1 (rhs, stmt, false);
1107 lacc = build_access_from_expr_1 (lhs, stmt, true);
1109 if (lacc)
1110 lacc->grp_assignment_write = 1;
1112 if (racc)
1114 racc->grp_assignment_read = 1;
1115 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1116 && !is_gimple_reg_type (racc->type))
1117 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1120 if (lacc && racc
1121 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1122 && !lacc->grp_unscalarizable_region
1123 && !racc->grp_unscalarizable_region
1124 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1125 && lacc->size == racc->size
1126 && useless_type_conversion_p (lacc->type, racc->type))
1128 struct assign_link *link;
1130 link = (struct assign_link *) pool_alloc (link_pool);
1131 memset (link, 0, sizeof (struct assign_link));
1133 link->lacc = lacc;
1134 link->racc = racc;
1136 add_link_to_rhs (racc, link);
1139 return lacc || racc;
1142 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1143 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1145 static bool
1146 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1147 void *data ATTRIBUTE_UNUSED)
1149 op = get_base_address (op);
1150 if (op
1151 && DECL_P (op))
1152 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1154 return false;
1157 /* Return true iff callsite CALL has at least as many actual arguments as there
1158 are formal parameters of the function currently processed by IPA-SRA. */
1160 static inline bool
1161 callsite_has_enough_arguments_p (gimple call)
1163 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1166 /* Scan function and look for interesting expressions and create access
1167 structures for them. Return true iff any access is created. */
1169 static bool
1170 scan_function (void)
1172 basic_block bb;
1173 bool ret = false;
1175 FOR_EACH_BB (bb)
1177 gimple_stmt_iterator gsi;
1178 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1180 gimple stmt = gsi_stmt (gsi);
1181 tree t;
1182 unsigned i;
1184 if (final_bbs && stmt_can_throw_external (stmt))
1185 bitmap_set_bit (final_bbs, bb->index);
1186 switch (gimple_code (stmt))
1188 case GIMPLE_RETURN:
1189 t = gimple_return_retval (stmt);
1190 if (t != NULL_TREE)
1191 ret |= build_access_from_expr (t, stmt, false);
1192 if (final_bbs)
1193 bitmap_set_bit (final_bbs, bb->index);
1194 break;
1196 case GIMPLE_ASSIGN:
1197 ret |= build_accesses_from_assign (stmt);
1198 break;
1200 case GIMPLE_CALL:
1201 for (i = 0; i < gimple_call_num_args (stmt); i++)
1202 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1203 stmt, false);
1205 if (sra_mode == SRA_MODE_EARLY_IPA)
1207 tree dest = gimple_call_fndecl (stmt);
1208 int flags = gimple_call_flags (stmt);
1210 if (dest)
1212 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1213 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1214 encountered_apply_args = true;
1215 if (cgraph_get_node (dest)
1216 == cgraph_get_node (current_function_decl))
1218 encountered_recursive_call = true;
1219 if (!callsite_has_enough_arguments_p (stmt))
1220 encountered_unchangable_recursive_call = true;
1224 if (final_bbs
1225 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1226 bitmap_set_bit (final_bbs, bb->index);
1229 t = gimple_call_lhs (stmt);
1230 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1231 ret |= build_access_from_expr (t, stmt, true);
1232 break;
1234 case GIMPLE_ASM:
1235 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1236 asm_visit_addr);
1237 if (final_bbs)
1238 bitmap_set_bit (final_bbs, bb->index);
1240 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1242 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1243 ret |= build_access_from_expr (t, stmt, false);
1245 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1247 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1248 ret |= build_access_from_expr (t, stmt, true);
1250 break;
1252 default:
1253 break;
1258 return ret;
1261 /* Helper of QSORT function. There are pointers to accesses in the array. An
1262 access is considered smaller than another if it has smaller offset or if the
1263 offsets are the same but is size is bigger. */
1265 static int
1266 compare_access_positions (const void *a, const void *b)
1268 const access_p *fp1 = (const access_p *) a;
1269 const access_p *fp2 = (const access_p *) b;
1270 const access_p f1 = *fp1;
1271 const access_p f2 = *fp2;
1273 if (f1->offset != f2->offset)
1274 return f1->offset < f2->offset ? -1 : 1;
1276 if (f1->size == f2->size)
1278 if (f1->type == f2->type)
1279 return 0;
1280 /* Put any non-aggregate type before any aggregate type. */
1281 else if (!is_gimple_reg_type (f1->type)
1282 && is_gimple_reg_type (f2->type))
1283 return 1;
1284 else if (is_gimple_reg_type (f1->type)
1285 && !is_gimple_reg_type (f2->type))
1286 return -1;
1287 /* Put any complex or vector type before any other scalar type. */
1288 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1289 && TREE_CODE (f1->type) != VECTOR_TYPE
1290 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1291 || TREE_CODE (f2->type) == VECTOR_TYPE))
1292 return 1;
1293 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1294 || TREE_CODE (f1->type) == VECTOR_TYPE)
1295 && TREE_CODE (f2->type) != COMPLEX_TYPE
1296 && TREE_CODE (f2->type) != VECTOR_TYPE)
1297 return -1;
1298 /* Put the integral type with the bigger precision first. */
1299 else if (INTEGRAL_TYPE_P (f1->type)
1300 && INTEGRAL_TYPE_P (f2->type))
1301 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1302 /* Put any integral type with non-full precision last. */
1303 else if (INTEGRAL_TYPE_P (f1->type)
1304 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1305 != TYPE_PRECISION (f1->type)))
1306 return 1;
1307 else if (INTEGRAL_TYPE_P (f2->type)
1308 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1309 != TYPE_PRECISION (f2->type)))
1310 return -1;
1311 /* Stabilize the sort. */
1312 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1315 /* We want the bigger accesses first, thus the opposite operator in the next
1316 line: */
1317 return f1->size > f2->size ? -1 : 1;
1321 /* Append a name of the declaration to the name obstack. A helper function for
1322 make_fancy_name. */
1324 static void
1325 make_fancy_decl_name (tree decl)
1327 char buffer[32];
1329 tree name = DECL_NAME (decl);
1330 if (name)
1331 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1332 IDENTIFIER_LENGTH (name));
1333 else
1335 sprintf (buffer, "D%u", DECL_UID (decl));
1336 obstack_grow (&name_obstack, buffer, strlen (buffer));
1340 /* Helper for make_fancy_name. */
1342 static void
1343 make_fancy_name_1 (tree expr)
1345 char buffer[32];
1346 tree index;
1348 if (DECL_P (expr))
1350 make_fancy_decl_name (expr);
1351 return;
1354 switch (TREE_CODE (expr))
1356 case COMPONENT_REF:
1357 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1358 obstack_1grow (&name_obstack, '$');
1359 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1360 break;
1362 case ARRAY_REF:
1363 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1364 obstack_1grow (&name_obstack, '$');
1365 /* Arrays with only one element may not have a constant as their
1366 index. */
1367 index = TREE_OPERAND (expr, 1);
1368 if (TREE_CODE (index) != INTEGER_CST)
1369 break;
1370 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1371 obstack_grow (&name_obstack, buffer, strlen (buffer));
1372 break;
1374 case ADDR_EXPR:
1375 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1376 break;
1378 case MEM_REF:
1379 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1380 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1382 obstack_1grow (&name_obstack, '$');
1383 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1384 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1385 obstack_grow (&name_obstack, buffer, strlen (buffer));
1387 break;
1389 case BIT_FIELD_REF:
1390 case REALPART_EXPR:
1391 case IMAGPART_EXPR:
1392 gcc_unreachable (); /* we treat these as scalars. */
1393 break;
1394 default:
1395 break;
1399 /* Create a human readable name for replacement variable of ACCESS. */
1401 static char *
1402 make_fancy_name (tree expr)
1404 make_fancy_name_1 (expr);
1405 obstack_1grow (&name_obstack, '\0');
1406 return XOBFINISH (&name_obstack, char *);
1409 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1410 EXP_TYPE at the given OFFSET. If BASE is something for which
1411 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1412 to insert new statements either before or below the current one as specified
1413 by INSERT_AFTER. This function is not capable of handling bitfields. */
1415 tree
1416 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1417 tree exp_type, gimple_stmt_iterator *gsi,
1418 bool insert_after)
1420 tree prev_base = base;
1421 tree off;
1422 HOST_WIDE_INT base_offset;
1423 unsigned HOST_WIDE_INT misalign;
1424 unsigned int align;
1426 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1428 base = get_addr_base_and_unit_offset (base, &base_offset);
1430 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1431 offset such as array[var_index]. */
1432 if (!base)
1434 gimple stmt;
1435 tree tmp, addr;
1437 gcc_checking_assert (gsi);
1438 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1439 add_referenced_var (tmp);
1440 tmp = make_ssa_name (tmp, NULL);
1441 addr = build_fold_addr_expr (unshare_expr (prev_base));
1442 STRIP_USELESS_TYPE_CONVERSION (addr);
1443 stmt = gimple_build_assign (tmp, addr);
1444 gimple_set_location (stmt, loc);
1445 SSA_NAME_DEF_STMT (tmp) = stmt;
1446 if (insert_after)
1447 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1448 else
1449 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1450 update_stmt (stmt);
1452 off = build_int_cst (reference_alias_ptr_type (prev_base),
1453 offset / BITS_PER_UNIT);
1454 base = tmp;
1456 else if (TREE_CODE (base) == MEM_REF)
1458 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1459 base_offset + offset / BITS_PER_UNIT);
1460 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1461 base = unshare_expr (TREE_OPERAND (base, 0));
1463 else
1465 off = build_int_cst (reference_alias_ptr_type (base),
1466 base_offset + offset / BITS_PER_UNIT);
1467 base = build_fold_addr_expr (unshare_expr (base));
1470 /* If prev_base were always an originally performed access
1471 we can extract more optimistic alignment information
1472 by looking at the access mode. That would constrain the
1473 alignment of base + base_offset which we would need to
1474 adjust according to offset. */
1475 if (!get_pointer_alignment_1 (base, &align, &misalign))
1477 gcc_assert (misalign == 0);
1478 if (TREE_CODE (prev_base) == MEM_REF
1479 || TREE_CODE (prev_base) == TARGET_MEM_REF)
1480 align = TYPE_ALIGN (TREE_TYPE (prev_base));
1482 misalign += (double_int_sext (tree_to_double_int (off),
1483 TYPE_PRECISION (TREE_TYPE (off))).low
1484 * BITS_PER_UNIT);
1485 misalign = misalign & (align - 1);
1486 if (misalign != 0)
1487 align = (misalign & -misalign);
1488 if (align < TYPE_ALIGN (exp_type))
1489 exp_type = build_aligned_type (exp_type, align);
1491 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1494 /* Construct a memory reference to a part of an aggregate BASE at the given
1495 OFFSET and of the same type as MODEL. In case this is a reference to a
1496 bit-field, the function will replicate the last component_ref of model's
1497 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1498 build_ref_for_offset. */
1500 static tree
1501 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1502 struct access *model, gimple_stmt_iterator *gsi,
1503 bool insert_after)
1505 if (TREE_CODE (model->expr) == COMPONENT_REF
1506 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1508 /* This access represents a bit-field. */
1509 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1511 offset -= int_bit_position (fld);
1512 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1513 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1514 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1515 NULL_TREE);
1517 else
1518 return build_ref_for_offset (loc, base, offset, model->type,
1519 gsi, insert_after);
1522 /* Construct a memory reference consisting of component_refs and array_refs to
1523 a part of an aggregate *RES (which is of type TYPE). The requested part
1524 should have type EXP_TYPE at be the given OFFSET. This function might not
1525 succeed, it returns true when it does and only then *RES points to something
1526 meaningful. This function should be used only to build expressions that we
1527 might need to present to user (e.g. in warnings). In all other situations,
1528 build_ref_for_model or build_ref_for_offset should be used instead. */
1530 static bool
1531 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1532 tree exp_type)
1534 while (1)
1536 tree fld;
1537 tree tr_size, index, minidx;
1538 HOST_WIDE_INT el_size;
1540 if (offset == 0 && exp_type
1541 && types_compatible_p (exp_type, type))
1542 return true;
1544 switch (TREE_CODE (type))
1546 case UNION_TYPE:
1547 case QUAL_UNION_TYPE:
1548 case RECORD_TYPE:
1549 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1551 HOST_WIDE_INT pos, size;
1552 tree expr, *expr_ptr;
1554 if (TREE_CODE (fld) != FIELD_DECL)
1555 continue;
1557 pos = int_bit_position (fld);
1558 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1559 tr_size = DECL_SIZE (fld);
1560 if (!tr_size || !host_integerp (tr_size, 1))
1561 continue;
1562 size = tree_low_cst (tr_size, 1);
1563 if (size == 0)
1565 if (pos != offset)
1566 continue;
1568 else if (pos > offset || (pos + size) <= offset)
1569 continue;
1571 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1572 NULL_TREE);
1573 expr_ptr = &expr;
1574 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1575 offset - pos, exp_type))
1577 *res = expr;
1578 return true;
1581 return false;
1583 case ARRAY_TYPE:
1584 tr_size = TYPE_SIZE (TREE_TYPE (type));
1585 if (!tr_size || !host_integerp (tr_size, 1))
1586 return false;
1587 el_size = tree_low_cst (tr_size, 1);
1589 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1590 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1591 return false;
1592 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1593 if (!integer_zerop (minidx))
1594 index = int_const_binop (PLUS_EXPR, index, minidx);
1595 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1596 NULL_TREE, NULL_TREE);
1597 offset = offset % el_size;
1598 type = TREE_TYPE (type);
1599 break;
1601 default:
1602 if (offset != 0)
1603 return false;
1605 if (exp_type)
1606 return false;
1607 else
1608 return true;
1613 /* Return true iff TYPE is stdarg va_list type. */
1615 static inline bool
1616 is_va_list_type (tree type)
1618 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1621 /* Print message to dump file why a variable was rejected. */
1623 static void
1624 reject (tree var, const char *msg)
1626 if (dump_file && (dump_flags & TDF_DETAILS))
1628 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1629 print_generic_expr (dump_file, var, 0);
1630 fprintf (dump_file, "\n");
1634 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1635 those with type which is suitable for scalarization. */
1637 static bool
1638 find_var_candidates (void)
1640 tree var, type;
1641 referenced_var_iterator rvi;
1642 bool ret = false;
1643 const char *msg;
1645 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1647 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1648 continue;
1649 type = TREE_TYPE (var);
1651 if (!AGGREGATE_TYPE_P (type))
1653 reject (var, "not aggregate");
1654 continue;
1656 if (needs_to_live_in_memory (var))
1658 reject (var, "needs to live in memory");
1659 continue;
1661 if (TREE_THIS_VOLATILE (var))
1663 reject (var, "is volatile");
1664 continue;
1666 if (!COMPLETE_TYPE_P (type))
1668 reject (var, "has incomplete type");
1669 continue;
1671 if (!host_integerp (TYPE_SIZE (type), 1))
1673 reject (var, "type size not fixed");
1674 continue;
1676 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1678 reject (var, "type size is zero");
1679 continue;
1681 if (type_internals_preclude_sra_p (type, &msg))
1683 reject (var, msg);
1684 continue;
1686 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1687 we also want to schedule it rather late. Thus we ignore it in
1688 the early pass. */
1689 (sra_mode == SRA_MODE_EARLY_INTRA
1690 && is_va_list_type (type)))
1692 reject (var, "is va_list");
1693 continue;
1696 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1698 if (dump_file && (dump_flags & TDF_DETAILS))
1700 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1701 print_generic_expr (dump_file, var, 0);
1702 fprintf (dump_file, "\n");
1704 ret = true;
1707 return ret;
1710 /* Sort all accesses for the given variable, check for partial overlaps and
1711 return NULL if there are any. If there are none, pick a representative for
1712 each combination of offset and size and create a linked list out of them.
1713 Return the pointer to the first representative and make sure it is the first
1714 one in the vector of accesses. */
1716 static struct access *
1717 sort_and_splice_var_accesses (tree var)
1719 int i, j, access_count;
1720 struct access *res, **prev_acc_ptr = &res;
1721 VEC (access_p, heap) *access_vec;
1722 bool first = true;
1723 HOST_WIDE_INT low = -1, high = 0;
1725 access_vec = get_base_access_vector (var);
1726 if (!access_vec)
1727 return NULL;
1728 access_count = VEC_length (access_p, access_vec);
1730 /* Sort by <OFFSET, SIZE>. */
1731 VEC_qsort (access_p, access_vec, compare_access_positions);
1733 i = 0;
1734 while (i < access_count)
1736 struct access *access = VEC_index (access_p, access_vec, i);
1737 bool grp_write = access->write;
1738 bool grp_read = !access->write;
1739 bool grp_scalar_write = access->write
1740 && is_gimple_reg_type (access->type);
1741 bool grp_scalar_read = !access->write
1742 && is_gimple_reg_type (access->type);
1743 bool grp_assignment_read = access->grp_assignment_read;
1744 bool grp_assignment_write = access->grp_assignment_write;
1745 bool multiple_scalar_reads = false;
1746 bool total_scalarization = access->grp_total_scalarization;
1747 bool grp_partial_lhs = access->grp_partial_lhs;
1748 bool first_scalar = is_gimple_reg_type (access->type);
1749 bool unscalarizable_region = access->grp_unscalarizable_region;
1751 if (first || access->offset >= high)
1753 first = false;
1754 low = access->offset;
1755 high = access->offset + access->size;
1757 else if (access->offset > low && access->offset + access->size > high)
1758 return NULL;
1759 else
1760 gcc_assert (access->offset >= low
1761 && access->offset + access->size <= high);
1763 j = i + 1;
1764 while (j < access_count)
1766 struct access *ac2 = VEC_index (access_p, access_vec, j);
1767 if (ac2->offset != access->offset || ac2->size != access->size)
1768 break;
1769 if (ac2->write)
1771 grp_write = true;
1772 grp_scalar_write = (grp_scalar_write
1773 || is_gimple_reg_type (ac2->type));
1775 else
1777 grp_read = true;
1778 if (is_gimple_reg_type (ac2->type))
1780 if (grp_scalar_read)
1781 multiple_scalar_reads = true;
1782 else
1783 grp_scalar_read = true;
1786 grp_assignment_read |= ac2->grp_assignment_read;
1787 grp_assignment_write |= ac2->grp_assignment_write;
1788 grp_partial_lhs |= ac2->grp_partial_lhs;
1789 unscalarizable_region |= ac2->grp_unscalarizable_region;
1790 total_scalarization |= ac2->grp_total_scalarization;
1791 relink_to_new_repr (access, ac2);
1793 /* If there are both aggregate-type and scalar-type accesses with
1794 this combination of size and offset, the comparison function
1795 should have put the scalars first. */
1796 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1797 ac2->group_representative = access;
1798 j++;
1801 i = j;
1803 access->group_representative = access;
1804 access->grp_write = grp_write;
1805 access->grp_read = grp_read;
1806 access->grp_scalar_read = grp_scalar_read;
1807 access->grp_scalar_write = grp_scalar_write;
1808 access->grp_assignment_read = grp_assignment_read;
1809 access->grp_assignment_write = grp_assignment_write;
1810 access->grp_hint = multiple_scalar_reads || total_scalarization;
1811 access->grp_total_scalarization = total_scalarization;
1812 access->grp_partial_lhs = grp_partial_lhs;
1813 access->grp_unscalarizable_region = unscalarizable_region;
1814 if (access->first_link)
1815 add_access_to_work_queue (access);
1817 *prev_acc_ptr = access;
1818 prev_acc_ptr = &access->next_grp;
1821 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1822 return res;
1825 /* Create a variable for the given ACCESS which determines the type, name and a
1826 few other properties. Return the variable declaration and store it also to
1827 ACCESS->replacement. */
1829 static tree
1830 create_access_replacement (struct access *access, bool rename)
1832 tree repl;
1834 repl = create_tmp_var (access->type, "SR");
1835 add_referenced_var (repl);
1836 if (!access->grp_partial_lhs
1837 && rename)
1838 mark_sym_for_renaming (repl);
1840 if (TREE_CODE (access->type) == COMPLEX_TYPE
1841 || TREE_CODE (access->type) == VECTOR_TYPE)
1843 if (!access->grp_partial_lhs)
1844 DECL_GIMPLE_REG_P (repl) = 1;
1846 else if (access->grp_partial_lhs
1847 && is_gimple_reg_type (access->type))
1848 TREE_ADDRESSABLE (repl) = 1;
1850 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1851 DECL_ARTIFICIAL (repl) = 1;
1852 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1854 if (DECL_NAME (access->base)
1855 && !DECL_IGNORED_P (access->base)
1856 && !DECL_ARTIFICIAL (access->base))
1858 char *pretty_name = make_fancy_name (access->expr);
1859 tree debug_expr = unshare_expr (access->expr), d;
1861 DECL_NAME (repl) = get_identifier (pretty_name);
1862 obstack_free (&name_obstack, pretty_name);
1864 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1865 as DECL_DEBUG_EXPR isn't considered when looking for still
1866 used SSA_NAMEs and thus they could be freed. All debug info
1867 generation cares is whether something is constant or variable
1868 and that get_ref_base_and_extent works properly on the
1869 expression. */
1870 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1871 switch (TREE_CODE (d))
1873 case ARRAY_REF:
1874 case ARRAY_RANGE_REF:
1875 if (TREE_OPERAND (d, 1)
1876 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1877 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1878 if (TREE_OPERAND (d, 3)
1879 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1880 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1881 /* FALLTHRU */
1882 case COMPONENT_REF:
1883 if (TREE_OPERAND (d, 2)
1884 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1885 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1886 break;
1887 default:
1888 break;
1890 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1891 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1892 if (access->grp_no_warning)
1893 TREE_NO_WARNING (repl) = 1;
1894 else
1895 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1897 else
1898 TREE_NO_WARNING (repl) = 1;
1900 if (dump_file)
1902 fprintf (dump_file, "Created a replacement for ");
1903 print_generic_expr (dump_file, access->base, 0);
1904 fprintf (dump_file, " offset: %u, size: %u: ",
1905 (unsigned) access->offset, (unsigned) access->size);
1906 print_generic_expr (dump_file, repl, 0);
1907 fprintf (dump_file, "\n");
1909 sra_stats.replacements++;
1911 return repl;
1914 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1916 static inline tree
1917 get_access_replacement (struct access *access)
1919 gcc_assert (access->grp_to_be_replaced);
1921 if (!access->replacement_decl)
1922 access->replacement_decl = create_access_replacement (access, true);
1923 return access->replacement_decl;
1926 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1927 not mark it for renaming. */
1929 static inline tree
1930 get_unrenamed_access_replacement (struct access *access)
1932 gcc_assert (!access->grp_to_be_replaced);
1934 if (!access->replacement_decl)
1935 access->replacement_decl = create_access_replacement (access, false);
1936 return access->replacement_decl;
1940 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1941 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1942 to it is not "within" the root. Return false iff some accesses partially
1943 overlap. */
1945 static bool
1946 build_access_subtree (struct access **access)
1948 struct access *root = *access, *last_child = NULL;
1949 HOST_WIDE_INT limit = root->offset + root->size;
1951 *access = (*access)->next_grp;
1952 while (*access && (*access)->offset + (*access)->size <= limit)
1954 if (!last_child)
1955 root->first_child = *access;
1956 else
1957 last_child->next_sibling = *access;
1958 last_child = *access;
1960 if (!build_access_subtree (access))
1961 return false;
1964 if (*access && (*access)->offset < limit)
1965 return false;
1967 return true;
1970 /* Build a tree of access representatives, ACCESS is the pointer to the first
1971 one, others are linked in a list by the next_grp field. Return false iff
1972 some accesses partially overlap. */
1974 static bool
1975 build_access_trees (struct access *access)
1977 while (access)
1979 struct access *root = access;
1981 if (!build_access_subtree (&access))
1982 return false;
1983 root->next_grp = access;
1985 return true;
1988 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1989 array. */
1991 static bool
1992 expr_with_var_bounded_array_refs_p (tree expr)
1994 while (handled_component_p (expr))
1996 if (TREE_CODE (expr) == ARRAY_REF
1997 && !host_integerp (array_ref_low_bound (expr), 0))
1998 return true;
1999 expr = TREE_OPERAND (expr, 0);
2001 return false;
2004 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2005 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2006 sorts of access flags appropriately along the way, notably always set
2007 grp_read and grp_assign_read according to MARK_READ and grp_write when
2008 MARK_WRITE is true.
2010 Creating a replacement for a scalar access is considered beneficial if its
2011 grp_hint is set (this means we are either attempting total scalarization or
2012 there is more than one direct read access) or according to the following
2013 table:
2015 Access written to through a scalar type (once or more times)
2017 | Written to in an assignment statement
2019 | | Access read as scalar _once_
2020 | | |
2021 | | | Read in an assignment statement
2022 | | | |
2023 | | | | Scalarize Comment
2024 -----------------------------------------------------------------------------
2025 0 0 0 0 No access for the scalar
2026 0 0 0 1 No access for the scalar
2027 0 0 1 0 No Single read - won't help
2028 0 0 1 1 No The same case
2029 0 1 0 0 No access for the scalar
2030 0 1 0 1 No access for the scalar
2031 0 1 1 0 Yes s = *g; return s.i;
2032 0 1 1 1 Yes The same case as above
2033 1 0 0 0 No Won't help
2034 1 0 0 1 Yes s.i = 1; *g = s;
2035 1 0 1 0 Yes s.i = 5; g = s.i;
2036 1 0 1 1 Yes The same case as above
2037 1 1 0 0 No Won't help.
2038 1 1 0 1 Yes s.i = 1; *g = s;
2039 1 1 1 0 Yes s = *g; return s.i;
2040 1 1 1 1 Yes Any of the above yeses */
2042 static bool
2043 analyze_access_subtree (struct access *root, struct access *parent,
2044 bool allow_replacements)
2046 struct access *child;
2047 HOST_WIDE_INT limit = root->offset + root->size;
2048 HOST_WIDE_INT covered_to = root->offset;
2049 bool scalar = is_gimple_reg_type (root->type);
2050 bool hole = false, sth_created = false;
2052 if (parent)
2054 if (parent->grp_read)
2055 root->grp_read = 1;
2056 if (parent->grp_assignment_read)
2057 root->grp_assignment_read = 1;
2058 if (parent->grp_write)
2059 root->grp_write = 1;
2060 if (parent->grp_assignment_write)
2061 root->grp_assignment_write = 1;
2062 if (parent->grp_total_scalarization)
2063 root->grp_total_scalarization = 1;
2066 if (root->grp_unscalarizable_region)
2067 allow_replacements = false;
2069 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2070 allow_replacements = false;
2072 for (child = root->first_child; child; child = child->next_sibling)
2074 hole |= covered_to < child->offset;
2075 sth_created |= analyze_access_subtree (child, root,
2076 allow_replacements && !scalar);
2078 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2079 root->grp_total_scalarization &= child->grp_total_scalarization;
2080 if (child->grp_covered)
2081 covered_to += child->size;
2082 else
2083 hole = true;
2086 if (allow_replacements && scalar && !root->first_child
2087 && (root->grp_hint
2088 || ((root->grp_scalar_read || root->grp_assignment_read)
2089 && (root->grp_scalar_write || root->grp_assignment_write))))
2091 bool new_integer_type;
2092 /* Always create access replacements that cover the whole access.
2093 For integral types this means the precision has to match.
2094 Avoid assumptions based on the integral type kind, too. */
2095 if (INTEGRAL_TYPE_P (root->type)
2096 && (TREE_CODE (root->type) != INTEGER_TYPE
2097 || TYPE_PRECISION (root->type) != root->size)
2098 /* But leave bitfield accesses alone. */
2099 && (root->offset % BITS_PER_UNIT) == 0)
2101 tree rt = root->type;
2102 root->type = build_nonstandard_integer_type (root->size,
2103 TYPE_UNSIGNED (rt));
2104 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2105 root->base, root->offset,
2106 root->type, NULL, false);
2107 new_integer_type = true;
2109 else
2110 new_integer_type = false;
2112 if (dump_file && (dump_flags & TDF_DETAILS))
2114 fprintf (dump_file, "Marking ");
2115 print_generic_expr (dump_file, root->base, 0);
2116 fprintf (dump_file, " offset: %u, size: %u ",
2117 (unsigned) root->offset, (unsigned) root->size);
2118 fprintf (dump_file, " to be replaced%s.\n",
2119 new_integer_type ? " with an integer": "");
2122 root->grp_to_be_replaced = 1;
2123 sth_created = true;
2124 hole = false;
2126 else
2128 if (covered_to < limit)
2129 hole = true;
2130 if (scalar)
2131 root->grp_total_scalarization = 0;
2134 if (sth_created
2135 && (!hole || root->grp_total_scalarization))
2137 root->grp_covered = 1;
2138 return true;
2140 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2141 root->grp_unscalarized_data = 1; /* not covered and written to */
2142 if (sth_created)
2143 return true;
2144 return false;
2147 /* Analyze all access trees linked by next_grp by the means of
2148 analyze_access_subtree. */
2149 static bool
2150 analyze_access_trees (struct access *access)
2152 bool ret = false;
2154 while (access)
2156 if (analyze_access_subtree (access, NULL, true))
2157 ret = true;
2158 access = access->next_grp;
2161 return ret;
2164 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2165 SIZE would conflict with an already existing one. If exactly such a child
2166 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2168 static bool
2169 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2170 HOST_WIDE_INT size, struct access **exact_match)
2172 struct access *child;
2174 for (child = lacc->first_child; child; child = child->next_sibling)
2176 if (child->offset == norm_offset && child->size == size)
2178 *exact_match = child;
2179 return true;
2182 if (child->offset < norm_offset + size
2183 && child->offset + child->size > norm_offset)
2184 return true;
2187 return false;
2190 /* Create a new child access of PARENT, with all properties just like MODEL
2191 except for its offset and with its grp_write false and grp_read true.
2192 Return the new access or NULL if it cannot be created. Note that this access
2193 is created long after all splicing and sorting, it's not located in any
2194 access vector and is automatically a representative of its group. */
2196 static struct access *
2197 create_artificial_child_access (struct access *parent, struct access *model,
2198 HOST_WIDE_INT new_offset)
2200 struct access *access;
2201 struct access **child;
2202 tree expr = parent->base;
2204 gcc_assert (!model->grp_unscalarizable_region);
2206 access = (struct access *) pool_alloc (access_pool);
2207 memset (access, 0, sizeof (struct access));
2208 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2209 model->type))
2211 access->grp_no_warning = true;
2212 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2213 new_offset, model, NULL, false);
2216 access->base = parent->base;
2217 access->expr = expr;
2218 access->offset = new_offset;
2219 access->size = model->size;
2220 access->type = model->type;
2221 access->grp_write = true;
2222 access->grp_read = false;
2224 child = &parent->first_child;
2225 while (*child && (*child)->offset < new_offset)
2226 child = &(*child)->next_sibling;
2228 access->next_sibling = *child;
2229 *child = access;
2231 return access;
2235 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2236 true if any new subaccess was created. Additionally, if RACC is a scalar
2237 access but LACC is not, change the type of the latter, if possible. */
2239 static bool
2240 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2242 struct access *rchild;
2243 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2244 bool ret = false;
2246 if (is_gimple_reg_type (lacc->type)
2247 || lacc->grp_unscalarizable_region
2248 || racc->grp_unscalarizable_region)
2249 return false;
2251 if (is_gimple_reg_type (racc->type))
2253 if (!lacc->first_child && !racc->first_child)
2255 tree t = lacc->base;
2257 lacc->type = racc->type;
2258 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2259 lacc->offset, racc->type))
2260 lacc->expr = t;
2261 else
2263 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2264 lacc->base, lacc->offset,
2265 racc, NULL, false);
2266 lacc->grp_no_warning = true;
2269 return false;
2272 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2274 struct access *new_acc = NULL;
2275 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2277 if (rchild->grp_unscalarizable_region)
2278 continue;
2280 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2281 &new_acc))
2283 if (new_acc)
2285 rchild->grp_hint = 1;
2286 new_acc->grp_hint |= new_acc->grp_read;
2287 if (rchild->first_child)
2288 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2290 continue;
2293 rchild->grp_hint = 1;
2294 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2295 if (new_acc)
2297 ret = true;
2298 if (racc->first_child)
2299 propagate_subaccesses_across_link (new_acc, rchild);
2303 return ret;
2306 /* Propagate all subaccesses across assignment links. */
2308 static void
2309 propagate_all_subaccesses (void)
2311 while (work_queue_head)
2313 struct access *racc = pop_access_from_work_queue ();
2314 struct assign_link *link;
2316 gcc_assert (racc->first_link);
2318 for (link = racc->first_link; link; link = link->next)
2320 struct access *lacc = link->lacc;
2322 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2323 continue;
2324 lacc = lacc->group_representative;
2325 if (propagate_subaccesses_across_link (lacc, racc)
2326 && lacc->first_link)
2327 add_access_to_work_queue (lacc);
2332 /* Go through all accesses collected throughout the (intraprocedural) analysis
2333 stage, exclude overlapping ones, identify representatives and build trees
2334 out of them, making decisions about scalarization on the way. Return true
2335 iff there are any to-be-scalarized variables after this stage. */
2337 static bool
2338 analyze_all_variable_accesses (void)
2340 int res = 0;
2341 bitmap tmp = BITMAP_ALLOC (NULL);
2342 bitmap_iterator bi;
2343 unsigned i, max_total_scalarization_size;
2345 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2346 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2348 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2349 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2350 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2352 tree var = referenced_var (i);
2354 if (TREE_CODE (var) == VAR_DECL
2355 && type_consists_of_records_p (TREE_TYPE (var)))
2357 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2358 <= max_total_scalarization_size)
2360 completely_scalarize_var (var);
2361 if (dump_file && (dump_flags & TDF_DETAILS))
2363 fprintf (dump_file, "Will attempt to totally scalarize ");
2364 print_generic_expr (dump_file, var, 0);
2365 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2368 else if (dump_file && (dump_flags & TDF_DETAILS))
2370 fprintf (dump_file, "Too big to totally scalarize: ");
2371 print_generic_expr (dump_file, var, 0);
2372 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2377 bitmap_copy (tmp, candidate_bitmap);
2378 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2380 tree var = referenced_var (i);
2381 struct access *access;
2383 access = sort_and_splice_var_accesses (var);
2384 if (!access || !build_access_trees (access))
2385 disqualify_candidate (var,
2386 "No or inhibitingly overlapping accesses.");
2389 propagate_all_subaccesses ();
2391 bitmap_copy (tmp, candidate_bitmap);
2392 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2394 tree var = referenced_var (i);
2395 struct access *access = get_first_repr_for_decl (var);
2397 if (analyze_access_trees (access))
2399 res++;
2400 if (dump_file && (dump_flags & TDF_DETAILS))
2402 fprintf (dump_file, "\nAccess trees for ");
2403 print_generic_expr (dump_file, var, 0);
2404 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2405 dump_access_tree (dump_file, access);
2406 fprintf (dump_file, "\n");
2409 else
2410 disqualify_candidate (var, "No scalar replacements to be created.");
2413 BITMAP_FREE (tmp);
2415 if (res)
2417 statistics_counter_event (cfun, "Scalarized aggregates", res);
2418 return true;
2420 else
2421 return false;
2424 /* Generate statements copying scalar replacements of accesses within a subtree
2425 into or out of AGG. ACCESS, all its children, siblings and their children
2426 are to be processed. AGG is an aggregate type expression (can be a
2427 declaration but does not have to be, it can for example also be a mem_ref or
2428 a series of handled components). TOP_OFFSET is the offset of the processed
2429 subtree which has to be subtracted from offsets of individual accesses to
2430 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2431 replacements in the interval <start_offset, start_offset + chunk_size>,
2432 otherwise copy all. GSI is a statement iterator used to place the new
2433 statements. WRITE should be true when the statements should write from AGG
2434 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2435 statements will be added after the current statement in GSI, they will be
2436 added before the statement otherwise. */
2438 static void
2439 generate_subtree_copies (struct access *access, tree agg,
2440 HOST_WIDE_INT top_offset,
2441 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2442 gimple_stmt_iterator *gsi, bool write,
2443 bool insert_after, location_t loc)
2447 if (chunk_size && access->offset >= start_offset + chunk_size)
2448 return;
2450 if (access->grp_to_be_replaced
2451 && (chunk_size == 0
2452 || access->offset + access->size > start_offset))
2454 tree expr, repl = get_access_replacement (access);
2455 gimple stmt;
2457 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2458 access, gsi, insert_after);
2460 if (write)
2462 if (access->grp_partial_lhs)
2463 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2464 !insert_after,
2465 insert_after ? GSI_NEW_STMT
2466 : GSI_SAME_STMT);
2467 stmt = gimple_build_assign (repl, expr);
2469 else
2471 TREE_NO_WARNING (repl) = 1;
2472 if (access->grp_partial_lhs)
2473 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2474 !insert_after,
2475 insert_after ? GSI_NEW_STMT
2476 : GSI_SAME_STMT);
2477 stmt = gimple_build_assign (expr, repl);
2479 gimple_set_location (stmt, loc);
2481 if (insert_after)
2482 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2483 else
2484 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2485 update_stmt (stmt);
2486 sra_stats.subtree_copies++;
2489 if (access->first_child)
2490 generate_subtree_copies (access->first_child, agg, top_offset,
2491 start_offset, chunk_size, gsi,
2492 write, insert_after, loc);
2494 access = access->next_sibling;
2496 while (access);
2499 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2500 the root of the subtree to be processed. GSI is the statement iterator used
2501 for inserting statements which are added after the current statement if
2502 INSERT_AFTER is true or before it otherwise. */
2504 static void
2505 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2506 bool insert_after, location_t loc)
2509 struct access *child;
2511 if (access->grp_to_be_replaced)
2513 gimple stmt;
2515 stmt = gimple_build_assign (get_access_replacement (access),
2516 build_zero_cst (access->type));
2517 if (insert_after)
2518 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2519 else
2520 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2521 update_stmt (stmt);
2522 gimple_set_location (stmt, loc);
2525 for (child = access->first_child; child; child = child->next_sibling)
2526 init_subtree_with_zero (child, gsi, insert_after, loc);
2529 /* Search for an access representative for the given expression EXPR and
2530 return it or NULL if it cannot be found. */
2532 static struct access *
2533 get_access_for_expr (tree expr)
2535 HOST_WIDE_INT offset, size, max_size;
2536 tree base;
2538 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2539 a different size than the size of its argument and we need the latter
2540 one. */
2541 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2542 expr = TREE_OPERAND (expr, 0);
2544 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2545 if (max_size == -1 || !DECL_P (base))
2546 return NULL;
2548 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2549 return NULL;
2551 return get_var_base_offset_size_access (base, offset, max_size);
2554 /* Replace the expression EXPR with a scalar replacement if there is one and
2555 generate other statements to do type conversion or subtree copying if
2556 necessary. GSI is used to place newly created statements, WRITE is true if
2557 the expression is being written to (it is on a LHS of a statement or output
2558 in an assembly statement). */
2560 static bool
2561 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2563 location_t loc;
2564 struct access *access;
2565 tree type, bfr;
2567 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2569 bfr = *expr;
2570 expr = &TREE_OPERAND (*expr, 0);
2572 else
2573 bfr = NULL_TREE;
2575 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2576 expr = &TREE_OPERAND (*expr, 0);
2577 access = get_access_for_expr (*expr);
2578 if (!access)
2579 return false;
2580 type = TREE_TYPE (*expr);
2582 loc = gimple_location (gsi_stmt (*gsi));
2583 if (access->grp_to_be_replaced)
2585 tree repl = get_access_replacement (access);
2586 /* If we replace a non-register typed access simply use the original
2587 access expression to extract the scalar component afterwards.
2588 This happens if scalarizing a function return value or parameter
2589 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2590 gcc.c-torture/compile/20011217-1.c.
2592 We also want to use this when accessing a complex or vector which can
2593 be accessed as a different type too, potentially creating a need for
2594 type conversion (see PR42196) and when scalarized unions are involved
2595 in assembler statements (see PR42398). */
2596 if (!useless_type_conversion_p (type, access->type))
2598 tree ref;
2600 ref = build_ref_for_model (loc, access->base, access->offset, access,
2601 NULL, false);
2603 if (write)
2605 gimple stmt;
2607 if (access->grp_partial_lhs)
2608 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2609 false, GSI_NEW_STMT);
2610 stmt = gimple_build_assign (repl, ref);
2611 gimple_set_location (stmt, loc);
2612 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2614 else
2616 gimple stmt;
2618 if (access->grp_partial_lhs)
2619 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2620 true, GSI_SAME_STMT);
2621 stmt = gimple_build_assign (ref, repl);
2622 gimple_set_location (stmt, loc);
2623 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2626 else
2627 *expr = repl;
2628 sra_stats.exprs++;
2631 if (access->first_child)
2633 HOST_WIDE_INT start_offset, chunk_size;
2634 if (bfr
2635 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2636 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2638 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2639 start_offset = access->offset
2640 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2642 else
2643 start_offset = chunk_size = 0;
2645 generate_subtree_copies (access->first_child, access->base, 0,
2646 start_offset, chunk_size, gsi, write, write,
2647 loc);
2649 return true;
2652 /* Where scalar replacements of the RHS have been written to when a replacement
2653 of a LHS of an assigments cannot be direclty loaded from a replacement of
2654 the RHS. */
2655 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2656 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2657 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2659 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2660 base aggregate if there are unscalarized data or directly to LHS of the
2661 statement that is pointed to by GSI otherwise. */
2663 static enum unscalarized_data_handling
2664 handle_unscalarized_data_in_subtree (struct access *top_racc,
2665 gimple_stmt_iterator *gsi)
2667 if (top_racc->grp_unscalarized_data)
2669 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2670 gsi, false, false,
2671 gimple_location (gsi_stmt (*gsi)));
2672 return SRA_UDH_RIGHT;
2674 else
2676 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2677 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2678 0, 0, gsi, false, false,
2679 gimple_location (gsi_stmt (*gsi)));
2680 return SRA_UDH_LEFT;
2685 /* Try to generate statements to load all sub-replacements in an access subtree
2686 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2687 If that is not possible, refresh the TOP_RACC base aggregate and load the
2688 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2689 copied. NEW_GSI is stmt iterator used for statement insertions after the
2690 original assignment, OLD_GSI is used to insert statements before the
2691 assignment. *REFRESHED keeps the information whether we have needed to
2692 refresh replacements of the LHS and from which side of the assignments this
2693 takes place. */
2695 static void
2696 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2697 HOST_WIDE_INT left_offset,
2698 gimple_stmt_iterator *old_gsi,
2699 gimple_stmt_iterator *new_gsi,
2700 enum unscalarized_data_handling *refreshed)
2702 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2703 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2705 if (lacc->grp_to_be_replaced)
2707 struct access *racc;
2708 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2709 gimple stmt;
2710 tree rhs;
2712 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2713 if (racc && racc->grp_to_be_replaced)
2715 rhs = get_access_replacement (racc);
2716 if (!useless_type_conversion_p (lacc->type, racc->type))
2717 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2719 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2720 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2721 true, GSI_SAME_STMT);
2723 else
2725 /* No suitable access on the right hand side, need to load from
2726 the aggregate. See if we have to update it first... */
2727 if (*refreshed == SRA_UDH_NONE)
2728 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2729 old_gsi);
2731 if (*refreshed == SRA_UDH_LEFT)
2732 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2733 new_gsi, true);
2734 else
2735 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2736 new_gsi, true);
2737 if (lacc->grp_partial_lhs)
2738 rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2739 false, GSI_NEW_STMT);
2742 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2743 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2744 gimple_set_location (stmt, loc);
2745 update_stmt (stmt);
2746 sra_stats.subreplacements++;
2748 else if (*refreshed == SRA_UDH_NONE
2749 && lacc->grp_read && !lacc->grp_covered)
2750 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2751 old_gsi);
2753 if (lacc->first_child)
2754 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2755 old_gsi, new_gsi, refreshed);
2759 /* Result code for SRA assignment modification. */
2760 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2761 SRA_AM_MODIFIED, /* stmt changed but not
2762 removed */
2763 SRA_AM_REMOVED }; /* stmt eliminated */
2765 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2766 to the assignment and GSI is the statement iterator pointing at it. Returns
2767 the same values as sra_modify_assign. */
2769 static enum assignment_mod_result
2770 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2772 tree lhs = gimple_assign_lhs (*stmt);
2773 struct access *acc;
2774 location_t loc;
2776 acc = get_access_for_expr (lhs);
2777 if (!acc)
2778 return SRA_AM_NONE;
2780 if (gimple_clobber_p (*stmt))
2782 /* Remove clobbers of fully scalarized variables, otherwise
2783 do nothing. */
2784 if (acc->grp_covered)
2786 unlink_stmt_vdef (*stmt);
2787 gsi_remove (gsi, true);
2788 release_defs (*stmt);
2789 return SRA_AM_REMOVED;
2791 else
2792 return SRA_AM_NONE;
2795 loc = gimple_location (*stmt);
2796 if (VEC_length (constructor_elt,
2797 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2799 /* I have never seen this code path trigger but if it can happen the
2800 following should handle it gracefully. */
2801 if (access_has_children_p (acc))
2802 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2803 true, true, loc);
2804 return SRA_AM_MODIFIED;
2807 if (acc->grp_covered)
2809 init_subtree_with_zero (acc, gsi, false, loc);
2810 unlink_stmt_vdef (*stmt);
2811 gsi_remove (gsi, true);
2812 release_defs (*stmt);
2813 return SRA_AM_REMOVED;
2815 else
2817 init_subtree_with_zero (acc, gsi, true, loc);
2818 return SRA_AM_MODIFIED;
2822 /* Create and return a new suitable default definition SSA_NAME for RACC which
2823 is an access describing an uninitialized part of an aggregate that is being
2824 loaded. */
2826 static tree
2827 get_repl_default_def_ssa_name (struct access *racc)
2829 tree repl, decl;
2831 decl = get_unrenamed_access_replacement (racc);
2833 repl = gimple_default_def (cfun, decl);
2834 if (!repl)
2836 repl = make_ssa_name (decl, gimple_build_nop ());
2837 set_default_def (decl, repl);
2840 return repl;
2843 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2844 somewhere in it. */
2846 static inline bool
2847 contains_bitfld_comp_ref_p (const_tree ref)
2849 while (handled_component_p (ref))
2851 if (TREE_CODE (ref) == COMPONENT_REF
2852 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2853 return true;
2854 ref = TREE_OPERAND (ref, 0);
2857 return false;
2860 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2861 bit-field field declaration somewhere in it. */
2863 static inline bool
2864 contains_vce_or_bfcref_p (const_tree ref)
2866 while (handled_component_p (ref))
2868 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2869 || (TREE_CODE (ref) == COMPONENT_REF
2870 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2871 return true;
2872 ref = TREE_OPERAND (ref, 0);
2875 return false;
2878 /* Examine both sides of the assignment statement pointed to by STMT, replace
2879 them with a scalare replacement if there is one and generate copying of
2880 replacements if scalarized aggregates have been used in the assignment. GSI
2881 is used to hold generated statements for type conversions and subtree
2882 copying. */
2884 static enum assignment_mod_result
2885 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2887 struct access *lacc, *racc;
2888 tree lhs, rhs;
2889 bool modify_this_stmt = false;
2890 bool force_gimple_rhs = false;
2891 location_t loc;
2892 gimple_stmt_iterator orig_gsi = *gsi;
2894 if (!gimple_assign_single_p (*stmt))
2895 return SRA_AM_NONE;
2896 lhs = gimple_assign_lhs (*stmt);
2897 rhs = gimple_assign_rhs1 (*stmt);
2899 if (TREE_CODE (rhs) == CONSTRUCTOR)
2900 return sra_modify_constructor_assign (stmt, gsi);
2902 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2903 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2904 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2906 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2907 gsi, false);
2908 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2909 gsi, true);
2910 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2913 lacc = get_access_for_expr (lhs);
2914 racc = get_access_for_expr (rhs);
2915 if (!lacc && !racc)
2916 return SRA_AM_NONE;
2918 loc = gimple_location (*stmt);
2919 if (lacc && lacc->grp_to_be_replaced)
2921 lhs = get_access_replacement (lacc);
2922 gimple_assign_set_lhs (*stmt, lhs);
2923 modify_this_stmt = true;
2924 if (lacc->grp_partial_lhs)
2925 force_gimple_rhs = true;
2926 sra_stats.exprs++;
2929 if (racc && racc->grp_to_be_replaced)
2931 rhs = get_access_replacement (racc);
2932 modify_this_stmt = true;
2933 if (racc->grp_partial_lhs)
2934 force_gimple_rhs = true;
2935 sra_stats.exprs++;
2937 else if (racc
2938 && !racc->grp_unscalarized_data
2939 && TREE_CODE (lhs) == SSA_NAME
2940 && !access_has_replacements_p (racc))
2942 rhs = get_repl_default_def_ssa_name (racc);
2943 modify_this_stmt = true;
2944 sra_stats.exprs++;
2947 if (modify_this_stmt)
2949 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2951 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2952 ??? This should move to fold_stmt which we simply should
2953 call after building a VIEW_CONVERT_EXPR here. */
2954 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2955 && !contains_bitfld_comp_ref_p (lhs)
2956 && !access_has_children_p (lacc))
2958 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
2959 gimple_assign_set_lhs (*stmt, lhs);
2961 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2962 && !contains_vce_or_bfcref_p (rhs)
2963 && !access_has_children_p (racc))
2964 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
2966 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2968 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
2969 rhs);
2970 if (is_gimple_reg_type (TREE_TYPE (lhs))
2971 && TREE_CODE (lhs) != SSA_NAME)
2972 force_gimple_rhs = true;
2977 /* From this point on, the function deals with assignments in between
2978 aggregates when at least one has scalar reductions of some of its
2979 components. There are three possible scenarios: Both the LHS and RHS have
2980 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2982 In the first case, we would like to load the LHS components from RHS
2983 components whenever possible. If that is not possible, we would like to
2984 read it directly from the RHS (after updating it by storing in it its own
2985 components). If there are some necessary unscalarized data in the LHS,
2986 those will be loaded by the original assignment too. If neither of these
2987 cases happen, the original statement can be removed. Most of this is done
2988 by load_assign_lhs_subreplacements.
2990 In the second case, we would like to store all RHS scalarized components
2991 directly into LHS and if they cover the aggregate completely, remove the
2992 statement too. In the third case, we want the LHS components to be loaded
2993 directly from the RHS (DSE will remove the original statement if it
2994 becomes redundant).
2996 This is a bit complex but manageable when types match and when unions do
2997 not cause confusion in a way that we cannot really load a component of LHS
2998 from the RHS or vice versa (the access representing this level can have
2999 subaccesses that are accessible only through a different union field at a
3000 higher level - different from the one used in the examined expression).
3001 Unions are fun.
3003 Therefore, I specially handle a fourth case, happening when there is a
3004 specific type cast or it is impossible to locate a scalarized subaccess on
3005 the other side of the expression. If that happens, I simply "refresh" the
3006 RHS by storing in it is scalarized components leave the original statement
3007 there to do the copying and then load the scalar replacements of the LHS.
3008 This is what the first branch does. */
3010 if (modify_this_stmt
3011 || gimple_has_volatile_ops (*stmt)
3012 || contains_vce_or_bfcref_p (rhs)
3013 || contains_vce_or_bfcref_p (lhs))
3015 if (access_has_children_p (racc))
3016 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3017 gsi, false, false, loc);
3018 if (access_has_children_p (lacc))
3019 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3020 gsi, true, true, loc);
3021 sra_stats.separate_lhs_rhs_handling++;
3023 /* This gimplification must be done after generate_subtree_copies,
3024 lest we insert the subtree copies in the middle of the gimplified
3025 sequence. */
3026 if (force_gimple_rhs)
3027 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3028 true, GSI_SAME_STMT);
3029 if (gimple_assign_rhs1 (*stmt) != rhs)
3031 modify_this_stmt = true;
3032 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3033 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3036 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3038 else
3040 if (access_has_children_p (lacc)
3041 && access_has_children_p (racc)
3042 /* When an access represents an unscalarizable region, it usually
3043 represents accesses with variable offset and thus must not be used
3044 to generate new memory accesses. */
3045 && !lacc->grp_unscalarizable_region
3046 && !racc->grp_unscalarizable_region)
3048 gimple_stmt_iterator orig_gsi = *gsi;
3049 enum unscalarized_data_handling refreshed;
3051 if (lacc->grp_read && !lacc->grp_covered)
3052 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3053 else
3054 refreshed = SRA_UDH_NONE;
3056 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3057 &orig_gsi, gsi, &refreshed);
3058 if (refreshed != SRA_UDH_RIGHT)
3060 gsi_next (gsi);
3061 unlink_stmt_vdef (*stmt);
3062 gsi_remove (&orig_gsi, true);
3063 release_defs (*stmt);
3064 sra_stats.deleted++;
3065 return SRA_AM_REMOVED;
3068 else
3070 if (access_has_children_p (racc)
3071 && !racc->grp_unscalarized_data)
3073 if (dump_file)
3075 fprintf (dump_file, "Removing load: ");
3076 print_gimple_stmt (dump_file, *stmt, 0, 0);
3078 generate_subtree_copies (racc->first_child, lhs,
3079 racc->offset, 0, 0, gsi,
3080 false, false, loc);
3081 gcc_assert (*stmt == gsi_stmt (*gsi));
3082 unlink_stmt_vdef (*stmt);
3083 gsi_remove (gsi, true);
3084 release_defs (*stmt);
3085 sra_stats.deleted++;
3086 return SRA_AM_REMOVED;
3088 /* Restore the aggregate RHS from its components so the
3089 prevailing aggregate copy does the right thing. */
3090 if (access_has_children_p (racc))
3091 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3092 gsi, false, false, loc);
3093 /* Re-load the components of the aggregate copy destination.
3094 But use the RHS aggregate to load from to expose more
3095 optimization opportunities. */
3096 if (access_has_children_p (lacc))
3097 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3098 0, 0, gsi, true, true, loc);
3101 return SRA_AM_NONE;
3105 /* Traverse the function body and all modifications as decided in
3106 analyze_all_variable_accesses. Return true iff the CFG has been
3107 changed. */
3109 static bool
3110 sra_modify_function_body (void)
3112 bool cfg_changed = false;
3113 basic_block bb;
3115 FOR_EACH_BB (bb)
3117 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3118 while (!gsi_end_p (gsi))
3120 gimple stmt = gsi_stmt (gsi);
3121 enum assignment_mod_result assign_result;
3122 bool modified = false, deleted = false;
3123 tree *t;
3124 unsigned i;
3126 switch (gimple_code (stmt))
3128 case GIMPLE_RETURN:
3129 t = gimple_return_retval_ptr (stmt);
3130 if (*t != NULL_TREE)
3131 modified |= sra_modify_expr (t, &gsi, false);
3132 break;
3134 case GIMPLE_ASSIGN:
3135 assign_result = sra_modify_assign (&stmt, &gsi);
3136 modified |= assign_result == SRA_AM_MODIFIED;
3137 deleted = assign_result == SRA_AM_REMOVED;
3138 break;
3140 case GIMPLE_CALL:
3141 /* Operands must be processed before the lhs. */
3142 for (i = 0; i < gimple_call_num_args (stmt); i++)
3144 t = gimple_call_arg_ptr (stmt, i);
3145 modified |= sra_modify_expr (t, &gsi, false);
3148 if (gimple_call_lhs (stmt))
3150 t = gimple_call_lhs_ptr (stmt);
3151 modified |= sra_modify_expr (t, &gsi, true);
3153 break;
3155 case GIMPLE_ASM:
3156 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3158 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3159 modified |= sra_modify_expr (t, &gsi, false);
3161 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3163 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3164 modified |= sra_modify_expr (t, &gsi, true);
3166 break;
3168 default:
3169 break;
3172 if (modified)
3174 update_stmt (stmt);
3175 if (maybe_clean_eh_stmt (stmt)
3176 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3177 cfg_changed = true;
3179 if (!deleted)
3180 gsi_next (&gsi);
3184 return cfg_changed;
3187 /* Generate statements initializing scalar replacements of parts of function
3188 parameters. */
3190 static void
3191 initialize_parameter_reductions (void)
3193 gimple_stmt_iterator gsi;
3194 gimple_seq seq = NULL;
3195 tree parm;
3197 gsi = gsi_start (seq);
3198 for (parm = DECL_ARGUMENTS (current_function_decl);
3199 parm;
3200 parm = DECL_CHAIN (parm))
3202 VEC (access_p, heap) *access_vec;
3203 struct access *access;
3205 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3206 continue;
3207 access_vec = get_base_access_vector (parm);
3208 if (!access_vec)
3209 continue;
3211 for (access = VEC_index (access_p, access_vec, 0);
3212 access;
3213 access = access->next_grp)
3214 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3215 EXPR_LOCATION (parm));
3218 seq = gsi_seq (gsi);
3219 if (seq)
3220 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3223 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3224 it reveals there are components of some aggregates to be scalarized, it runs
3225 the required transformations. */
3226 static unsigned int
3227 perform_intra_sra (void)
3229 int ret = 0;
3230 sra_initialize ();
3232 if (!find_var_candidates ())
3233 goto out;
3235 if (!scan_function ())
3236 goto out;
3238 if (!analyze_all_variable_accesses ())
3239 goto out;
3241 if (sra_modify_function_body ())
3242 ret = TODO_update_ssa | TODO_cleanup_cfg;
3243 else
3244 ret = TODO_update_ssa;
3245 initialize_parameter_reductions ();
3247 statistics_counter_event (cfun, "Scalar replacements created",
3248 sra_stats.replacements);
3249 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3250 statistics_counter_event (cfun, "Subtree copy stmts",
3251 sra_stats.subtree_copies);
3252 statistics_counter_event (cfun, "Subreplacement stmts",
3253 sra_stats.subreplacements);
3254 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3255 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3256 sra_stats.separate_lhs_rhs_handling);
3258 out:
3259 sra_deinitialize ();
3260 return ret;
3263 /* Perform early intraprocedural SRA. */
3264 static unsigned int
3265 early_intra_sra (void)
3267 sra_mode = SRA_MODE_EARLY_INTRA;
3268 return perform_intra_sra ();
3271 /* Perform "late" intraprocedural SRA. */
3272 static unsigned int
3273 late_intra_sra (void)
3275 sra_mode = SRA_MODE_INTRA;
3276 return perform_intra_sra ();
3280 static bool
3281 gate_intra_sra (void)
3283 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3287 struct gimple_opt_pass pass_sra_early =
3290 GIMPLE_PASS,
3291 "esra", /* name */
3292 gate_intra_sra, /* gate */
3293 early_intra_sra, /* execute */
3294 NULL, /* sub */
3295 NULL, /* next */
3296 0, /* static_pass_number */
3297 TV_TREE_SRA, /* tv_id */
3298 PROP_cfg | PROP_ssa, /* properties_required */
3299 0, /* properties_provided */
3300 0, /* properties_destroyed */
3301 0, /* todo_flags_start */
3302 TODO_update_ssa
3303 | TODO_ggc_collect
3304 | TODO_verify_ssa /* todo_flags_finish */
3308 struct gimple_opt_pass pass_sra =
3311 GIMPLE_PASS,
3312 "sra", /* name */
3313 gate_intra_sra, /* gate */
3314 late_intra_sra, /* execute */
3315 NULL, /* sub */
3316 NULL, /* next */
3317 0, /* static_pass_number */
3318 TV_TREE_SRA, /* tv_id */
3319 PROP_cfg | PROP_ssa, /* properties_required */
3320 0, /* properties_provided */
3321 0, /* properties_destroyed */
3322 TODO_update_address_taken, /* todo_flags_start */
3323 TODO_update_ssa
3324 | TODO_ggc_collect
3325 | TODO_verify_ssa /* todo_flags_finish */
3330 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3331 parameter. */
3333 static bool
3334 is_unused_scalar_param (tree parm)
3336 tree name;
3337 return (is_gimple_reg (parm)
3338 && (!(name = gimple_default_def (cfun, parm))
3339 || has_zero_uses (name)));
3342 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3343 examine whether there are any direct or otherwise infeasible ones. If so,
3344 return true, otherwise return false. PARM must be a gimple register with a
3345 non-NULL default definition. */
3347 static bool
3348 ptr_parm_has_direct_uses (tree parm)
3350 imm_use_iterator ui;
3351 gimple stmt;
3352 tree name = gimple_default_def (cfun, parm);
3353 bool ret = false;
3355 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3357 int uses_ok = 0;
3358 use_operand_p use_p;
3360 if (is_gimple_debug (stmt))
3361 continue;
3363 /* Valid uses include dereferences on the lhs and the rhs. */
3364 if (gimple_has_lhs (stmt))
3366 tree lhs = gimple_get_lhs (stmt);
3367 while (handled_component_p (lhs))
3368 lhs = TREE_OPERAND (lhs, 0);
3369 if (TREE_CODE (lhs) == MEM_REF
3370 && TREE_OPERAND (lhs, 0) == name
3371 && integer_zerop (TREE_OPERAND (lhs, 1))
3372 && types_compatible_p (TREE_TYPE (lhs),
3373 TREE_TYPE (TREE_TYPE (name)))
3374 && !TREE_THIS_VOLATILE (lhs))
3375 uses_ok++;
3377 if (gimple_assign_single_p (stmt))
3379 tree rhs = gimple_assign_rhs1 (stmt);
3380 while (handled_component_p (rhs))
3381 rhs = TREE_OPERAND (rhs, 0);
3382 if (TREE_CODE (rhs) == MEM_REF
3383 && TREE_OPERAND (rhs, 0) == name
3384 && integer_zerop (TREE_OPERAND (rhs, 1))
3385 && types_compatible_p (TREE_TYPE (rhs),
3386 TREE_TYPE (TREE_TYPE (name)))
3387 && !TREE_THIS_VOLATILE (rhs))
3388 uses_ok++;
3390 else if (is_gimple_call (stmt))
3392 unsigned i;
3393 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3395 tree arg = gimple_call_arg (stmt, i);
3396 while (handled_component_p (arg))
3397 arg = TREE_OPERAND (arg, 0);
3398 if (TREE_CODE (arg) == MEM_REF
3399 && TREE_OPERAND (arg, 0) == name
3400 && integer_zerop (TREE_OPERAND (arg, 1))
3401 && types_compatible_p (TREE_TYPE (arg),
3402 TREE_TYPE (TREE_TYPE (name)))
3403 && !TREE_THIS_VOLATILE (arg))
3404 uses_ok++;
3408 /* If the number of valid uses does not match the number of
3409 uses in this stmt there is an unhandled use. */
3410 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3411 --uses_ok;
3413 if (uses_ok != 0)
3414 ret = true;
3416 if (ret)
3417 BREAK_FROM_IMM_USE_STMT (ui);
3420 return ret;
3423 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3424 them in candidate_bitmap. Note that these do not necessarily include
3425 parameter which are unused and thus can be removed. Return true iff any
3426 such candidate has been found. */
3428 static bool
3429 find_param_candidates (void)
3431 tree parm;
3432 int count = 0;
3433 bool ret = false;
3434 const char *msg;
3436 for (parm = DECL_ARGUMENTS (current_function_decl);
3437 parm;
3438 parm = DECL_CHAIN (parm))
3440 tree type = TREE_TYPE (parm);
3442 count++;
3444 if (TREE_THIS_VOLATILE (parm)
3445 || TREE_ADDRESSABLE (parm)
3446 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3447 continue;
3449 if (is_unused_scalar_param (parm))
3451 ret = true;
3452 continue;
3455 if (POINTER_TYPE_P (type))
3457 type = TREE_TYPE (type);
3459 if (TREE_CODE (type) == FUNCTION_TYPE
3460 || TYPE_VOLATILE (type)
3461 || (TREE_CODE (type) == ARRAY_TYPE
3462 && TYPE_NONALIASED_COMPONENT (type))
3463 || !is_gimple_reg (parm)
3464 || is_va_list_type (type)
3465 || ptr_parm_has_direct_uses (parm))
3466 continue;
3468 else if (!AGGREGATE_TYPE_P (type))
3469 continue;
3471 if (!COMPLETE_TYPE_P (type)
3472 || !host_integerp (TYPE_SIZE (type), 1)
3473 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3474 || (AGGREGATE_TYPE_P (type)
3475 && type_internals_preclude_sra_p (type, &msg)))
3476 continue;
3478 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3479 ret = true;
3480 if (dump_file && (dump_flags & TDF_DETAILS))
3482 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3483 print_generic_expr (dump_file, parm, 0);
3484 fprintf (dump_file, "\n");
3488 func_param_count = count;
3489 return ret;
3492 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3493 maybe_modified. */
3495 static bool
3496 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3497 void *data)
3499 struct access *repr = (struct access *) data;
3501 repr->grp_maybe_modified = 1;
3502 return true;
3505 /* Analyze what representatives (in linked lists accessible from
3506 REPRESENTATIVES) can be modified by side effects of statements in the
3507 current function. */
3509 static void
3510 analyze_modified_params (VEC (access_p, heap) *representatives)
3512 int i;
3514 for (i = 0; i < func_param_count; i++)
3516 struct access *repr;
3518 for (repr = VEC_index (access_p, representatives, i);
3519 repr;
3520 repr = repr->next_grp)
3522 struct access *access;
3523 bitmap visited;
3524 ao_ref ar;
3526 if (no_accesses_p (repr))
3527 continue;
3528 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3529 || repr->grp_maybe_modified)
3530 continue;
3532 ao_ref_init (&ar, repr->expr);
3533 visited = BITMAP_ALLOC (NULL);
3534 for (access = repr; access; access = access->next_sibling)
3536 /* All accesses are read ones, otherwise grp_maybe_modified would
3537 be trivially set. */
3538 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3539 mark_maybe_modified, repr, &visited);
3540 if (repr->grp_maybe_modified)
3541 break;
3543 BITMAP_FREE (visited);
3548 /* Propagate distances in bb_dereferences in the opposite direction than the
3549 control flow edges, in each step storing the maximum of the current value
3550 and the minimum of all successors. These steps are repeated until the table
3551 stabilizes. Note that BBs which might terminate the functions (according to
3552 final_bbs bitmap) never updated in this way. */
3554 static void
3555 propagate_dereference_distances (void)
3557 VEC (basic_block, heap) *queue;
3558 basic_block bb;
3560 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3561 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3562 FOR_EACH_BB (bb)
3564 VEC_quick_push (basic_block, queue, bb);
3565 bb->aux = bb;
3568 while (!VEC_empty (basic_block, queue))
3570 edge_iterator ei;
3571 edge e;
3572 bool change = false;
3573 int i;
3575 bb = VEC_pop (basic_block, queue);
3576 bb->aux = NULL;
3578 if (bitmap_bit_p (final_bbs, bb->index))
3579 continue;
3581 for (i = 0; i < func_param_count; i++)
3583 int idx = bb->index * func_param_count + i;
3584 bool first = true;
3585 HOST_WIDE_INT inh = 0;
3587 FOR_EACH_EDGE (e, ei, bb->succs)
3589 int succ_idx = e->dest->index * func_param_count + i;
3591 if (e->src == EXIT_BLOCK_PTR)
3592 continue;
3594 if (first)
3596 first = false;
3597 inh = bb_dereferences [succ_idx];
3599 else if (bb_dereferences [succ_idx] < inh)
3600 inh = bb_dereferences [succ_idx];
3603 if (!first && bb_dereferences[idx] < inh)
3605 bb_dereferences[idx] = inh;
3606 change = true;
3610 if (change && !bitmap_bit_p (final_bbs, bb->index))
3611 FOR_EACH_EDGE (e, ei, bb->preds)
3613 if (e->src->aux)
3614 continue;
3616 e->src->aux = e->src;
3617 VEC_quick_push (basic_block, queue, e->src);
3621 VEC_free (basic_block, heap, queue);
3624 /* Dump a dereferences TABLE with heading STR to file F. */
3626 static void
3627 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3629 basic_block bb;
3631 fprintf (dump_file, str);
3632 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3634 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3635 if (bb != EXIT_BLOCK_PTR)
3637 int i;
3638 for (i = 0; i < func_param_count; i++)
3640 int idx = bb->index * func_param_count + i;
3641 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3644 fprintf (f, "\n");
3646 fprintf (dump_file, "\n");
3649 /* Determine what (parts of) parameters passed by reference that are not
3650 assigned to are not certainly dereferenced in this function and thus the
3651 dereferencing cannot be safely moved to the caller without potentially
3652 introducing a segfault. Mark such REPRESENTATIVES as
3653 grp_not_necessarilly_dereferenced.
3655 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3656 part is calculated rather than simple booleans are calculated for each
3657 pointer parameter to handle cases when only a fraction of the whole
3658 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3659 an example).
3661 The maximum dereference distances for each pointer parameter and BB are
3662 already stored in bb_dereference. This routine simply propagates these
3663 values upwards by propagate_dereference_distances and then compares the
3664 distances of individual parameters in the ENTRY BB to the equivalent
3665 distances of each representative of a (fraction of a) parameter. */
3667 static void
3668 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3670 int i;
3672 if (dump_file && (dump_flags & TDF_DETAILS))
3673 dump_dereferences_table (dump_file,
3674 "Dereference table before propagation:\n",
3675 bb_dereferences);
3677 propagate_dereference_distances ();
3679 if (dump_file && (dump_flags & TDF_DETAILS))
3680 dump_dereferences_table (dump_file,
3681 "Dereference table after propagation:\n",
3682 bb_dereferences);
3684 for (i = 0; i < func_param_count; i++)
3686 struct access *repr = VEC_index (access_p, representatives, i);
3687 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3689 if (!repr || no_accesses_p (repr))
3690 continue;
3694 if ((repr->offset + repr->size) > bb_dereferences[idx])
3695 repr->grp_not_necessarilly_dereferenced = 1;
3696 repr = repr->next_grp;
3698 while (repr);
3702 /* Return the representative access for the parameter declaration PARM if it is
3703 a scalar passed by reference which is not written to and the pointer value
3704 is not used directly. Thus, if it is legal to dereference it in the caller
3705 and we can rule out modifications through aliases, such parameter should be
3706 turned into one passed by value. Return NULL otherwise. */
3708 static struct access *
3709 unmodified_by_ref_scalar_representative (tree parm)
3711 int i, access_count;
3712 struct access *repr;
3713 VEC (access_p, heap) *access_vec;
3715 access_vec = get_base_access_vector (parm);
3716 gcc_assert (access_vec);
3717 repr = VEC_index (access_p, access_vec, 0);
3718 if (repr->write)
3719 return NULL;
3720 repr->group_representative = repr;
3722 access_count = VEC_length (access_p, access_vec);
3723 for (i = 1; i < access_count; i++)
3725 struct access *access = VEC_index (access_p, access_vec, i);
3726 if (access->write)
3727 return NULL;
3728 access->group_representative = repr;
3729 access->next_sibling = repr->next_sibling;
3730 repr->next_sibling = access;
3733 repr->grp_read = 1;
3734 repr->grp_scalar_ptr = 1;
3735 return repr;
3738 /* Return true iff this access precludes IPA-SRA of the parameter it is
3739 associated with. */
3741 static bool
3742 access_precludes_ipa_sra_p (struct access *access)
3744 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3745 is incompatible assign in a call statement (and possibly even in asm
3746 statements). This can be relaxed by using a new temporary but only for
3747 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3748 intraprocedural SRA we deal with this by keeping the old aggregate around,
3749 something we cannot do in IPA-SRA.) */
3750 if (access->write
3751 && (is_gimple_call (access->stmt)
3752 || gimple_code (access->stmt) == GIMPLE_ASM))
3753 return true;
3755 return false;
3759 /* Sort collected accesses for parameter PARM, identify representatives for
3760 each accessed region and link them together. Return NULL if there are
3761 different but overlapping accesses, return the special ptr value meaning
3762 there are no accesses for this parameter if that is the case and return the
3763 first representative otherwise. Set *RO_GRP if there is a group of accesses
3764 with only read (i.e. no write) accesses. */
3766 static struct access *
3767 splice_param_accesses (tree parm, bool *ro_grp)
3769 int i, j, access_count, group_count;
3770 int agg_size, total_size = 0;
3771 struct access *access, *res, **prev_acc_ptr = &res;
3772 VEC (access_p, heap) *access_vec;
3774 access_vec = get_base_access_vector (parm);
3775 if (!access_vec)
3776 return &no_accesses_representant;
3777 access_count = VEC_length (access_p, access_vec);
3779 VEC_qsort (access_p, access_vec, compare_access_positions);
3781 i = 0;
3782 total_size = 0;
3783 group_count = 0;
3784 while (i < access_count)
3786 bool modification;
3787 tree a1_alias_type;
3788 access = VEC_index (access_p, access_vec, i);
3789 modification = access->write;
3790 if (access_precludes_ipa_sra_p (access))
3791 return NULL;
3792 a1_alias_type = reference_alias_ptr_type (access->expr);
3794 /* Access is about to become group representative unless we find some
3795 nasty overlap which would preclude us from breaking this parameter
3796 apart. */
3798 j = i + 1;
3799 while (j < access_count)
3801 struct access *ac2 = VEC_index (access_p, access_vec, j);
3802 if (ac2->offset != access->offset)
3804 /* All or nothing law for parameters. */
3805 if (access->offset + access->size > ac2->offset)
3806 return NULL;
3807 else
3808 break;
3810 else if (ac2->size != access->size)
3811 return NULL;
3813 if (access_precludes_ipa_sra_p (ac2)
3814 || (ac2->type != access->type
3815 && (TREE_ADDRESSABLE (ac2->type)
3816 || TREE_ADDRESSABLE (access->type)))
3817 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3818 return NULL;
3820 modification |= ac2->write;
3821 ac2->group_representative = access;
3822 ac2->next_sibling = access->next_sibling;
3823 access->next_sibling = ac2;
3824 j++;
3827 group_count++;
3828 access->grp_maybe_modified = modification;
3829 if (!modification)
3830 *ro_grp = true;
3831 *prev_acc_ptr = access;
3832 prev_acc_ptr = &access->next_grp;
3833 total_size += access->size;
3834 i = j;
3837 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3838 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3839 else
3840 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3841 if (total_size >= agg_size)
3842 return NULL;
3844 gcc_assert (group_count > 0);
3845 return res;
3848 /* Decide whether parameters with representative accesses given by REPR should
3849 be reduced into components. */
3851 static int
3852 decide_one_param_reduction (struct access *repr)
3854 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3855 bool by_ref;
3856 tree parm;
3858 parm = repr->base;
3859 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3860 gcc_assert (cur_parm_size > 0);
3862 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3864 by_ref = true;
3865 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3867 else
3869 by_ref = false;
3870 agg_size = cur_parm_size;
3873 if (dump_file)
3875 struct access *acc;
3876 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3877 print_generic_expr (dump_file, parm, 0);
3878 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3879 for (acc = repr; acc; acc = acc->next_grp)
3880 dump_access (dump_file, acc, true);
3883 total_size = 0;
3884 new_param_count = 0;
3886 for (; repr; repr = repr->next_grp)
3888 gcc_assert (parm == repr->base);
3890 /* Taking the address of a non-addressable field is verboten. */
3891 if (by_ref && repr->non_addressable)
3892 return 0;
3894 /* Do not decompose a non-BLKmode param in a way that would
3895 create BLKmode params. Especially for by-reference passing
3896 (thus, pointer-type param) this is hardly worthwhile. */
3897 if (DECL_MODE (parm) != BLKmode
3898 && TYPE_MODE (repr->type) == BLKmode)
3899 return 0;
3901 if (!by_ref || (!repr->grp_maybe_modified
3902 && !repr->grp_not_necessarilly_dereferenced))
3903 total_size += repr->size;
3904 else
3905 total_size += cur_parm_size;
3907 new_param_count++;
3910 gcc_assert (new_param_count > 0);
3912 if (optimize_function_for_size_p (cfun))
3913 parm_size_limit = cur_parm_size;
3914 else
3915 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3916 * cur_parm_size);
3918 if (total_size < agg_size
3919 && total_size <= parm_size_limit)
3921 if (dump_file)
3922 fprintf (dump_file, " ....will be split into %i components\n",
3923 new_param_count);
3924 return new_param_count;
3926 else
3927 return 0;
3930 /* The order of the following enums is important, we need to do extra work for
3931 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3932 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3933 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3935 /* Identify representatives of all accesses to all candidate parameters for
3936 IPA-SRA. Return result based on what representatives have been found. */
3938 static enum ipa_splicing_result
3939 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3941 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3942 tree parm;
3943 struct access *repr;
3945 *representatives = VEC_alloc (access_p, heap, func_param_count);
3947 for (parm = DECL_ARGUMENTS (current_function_decl);
3948 parm;
3949 parm = DECL_CHAIN (parm))
3951 if (is_unused_scalar_param (parm))
3953 VEC_quick_push (access_p, *representatives,
3954 &no_accesses_representant);
3955 if (result == NO_GOOD_ACCESS)
3956 result = UNUSED_PARAMS;
3958 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3959 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3960 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3962 repr = unmodified_by_ref_scalar_representative (parm);
3963 VEC_quick_push (access_p, *representatives, repr);
3964 if (repr)
3965 result = UNMODIF_BY_REF_ACCESSES;
3967 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3969 bool ro_grp = false;
3970 repr = splice_param_accesses (parm, &ro_grp);
3971 VEC_quick_push (access_p, *representatives, repr);
3973 if (repr && !no_accesses_p (repr))
3975 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3977 if (ro_grp)
3978 result = UNMODIF_BY_REF_ACCESSES;
3979 else if (result < MODIF_BY_REF_ACCESSES)
3980 result = MODIF_BY_REF_ACCESSES;
3982 else if (result < BY_VAL_ACCESSES)
3983 result = BY_VAL_ACCESSES;
3985 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3986 result = UNUSED_PARAMS;
3988 else
3989 VEC_quick_push (access_p, *representatives, NULL);
3992 if (result == NO_GOOD_ACCESS)
3994 VEC_free (access_p, heap, *representatives);
3995 *representatives = NULL;
3996 return NO_GOOD_ACCESS;
3999 return result;
4002 /* Return the index of BASE in PARMS. Abort if it is not found. */
4004 static inline int
4005 get_param_index (tree base, VEC(tree, heap) *parms)
4007 int i, len;
4009 len = VEC_length (tree, parms);
4010 for (i = 0; i < len; i++)
4011 if (VEC_index (tree, parms, i) == base)
4012 return i;
4013 gcc_unreachable ();
4016 /* Convert the decisions made at the representative level into compact
4017 parameter adjustments. REPRESENTATIVES are pointers to first
4018 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4019 final number of adjustments. */
4021 static ipa_parm_adjustment_vec
4022 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
4023 int adjustments_count)
4025 VEC (tree, heap) *parms;
4026 ipa_parm_adjustment_vec adjustments;
4027 tree parm;
4028 int i;
4030 gcc_assert (adjustments_count > 0);
4031 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4032 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
4033 parm = DECL_ARGUMENTS (current_function_decl);
4034 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4036 struct access *repr = VEC_index (access_p, representatives, i);
4038 if (!repr || no_accesses_p (repr))
4040 struct ipa_parm_adjustment *adj;
4042 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4043 memset (adj, 0, sizeof (*adj));
4044 adj->base_index = get_param_index (parm, parms);
4045 adj->base = parm;
4046 if (!repr)
4047 adj->copy_param = 1;
4048 else
4049 adj->remove_param = 1;
4051 else
4053 struct ipa_parm_adjustment *adj;
4054 int index = get_param_index (parm, parms);
4056 for (; repr; repr = repr->next_grp)
4058 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4059 memset (adj, 0, sizeof (*adj));
4060 gcc_assert (repr->base == parm);
4061 adj->base_index = index;
4062 adj->base = repr->base;
4063 adj->type = repr->type;
4064 adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
4065 adj->offset = repr->offset;
4066 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4067 && (repr->grp_maybe_modified
4068 || repr->grp_not_necessarilly_dereferenced));
4073 VEC_free (tree, heap, parms);
4074 return adjustments;
4077 /* Analyze the collected accesses and produce a plan what to do with the
4078 parameters in the form of adjustments, NULL meaning nothing. */
4080 static ipa_parm_adjustment_vec
4081 analyze_all_param_acesses (void)
4083 enum ipa_splicing_result repr_state;
4084 bool proceed = false;
4085 int i, adjustments_count = 0;
4086 VEC (access_p, heap) *representatives;
4087 ipa_parm_adjustment_vec adjustments;
4089 repr_state = splice_all_param_accesses (&representatives);
4090 if (repr_state == NO_GOOD_ACCESS)
4091 return NULL;
4093 /* If there are any parameters passed by reference which are not modified
4094 directly, we need to check whether they can be modified indirectly. */
4095 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4097 analyze_caller_dereference_legality (representatives);
4098 analyze_modified_params (representatives);
4101 for (i = 0; i < func_param_count; i++)
4103 struct access *repr = VEC_index (access_p, representatives, i);
4105 if (repr && !no_accesses_p (repr))
4107 if (repr->grp_scalar_ptr)
4109 adjustments_count++;
4110 if (repr->grp_not_necessarilly_dereferenced
4111 || repr->grp_maybe_modified)
4112 VEC_replace (access_p, representatives, i, NULL);
4113 else
4115 proceed = true;
4116 sra_stats.scalar_by_ref_to_by_val++;
4119 else
4121 int new_components = decide_one_param_reduction (repr);
4123 if (new_components == 0)
4125 VEC_replace (access_p, representatives, i, NULL);
4126 adjustments_count++;
4128 else
4130 adjustments_count += new_components;
4131 sra_stats.aggregate_params_reduced++;
4132 sra_stats.param_reductions_created += new_components;
4133 proceed = true;
4137 else
4139 if (no_accesses_p (repr))
4141 proceed = true;
4142 sra_stats.deleted_unused_parameters++;
4144 adjustments_count++;
4148 if (!proceed && dump_file)
4149 fprintf (dump_file, "NOT proceeding to change params.\n");
4151 if (proceed)
4152 adjustments = turn_representatives_into_adjustments (representatives,
4153 adjustments_count);
4154 else
4155 adjustments = NULL;
4157 VEC_free (access_p, heap, representatives);
4158 return adjustments;
4161 /* If a parameter replacement identified by ADJ does not yet exist in the form
4162 of declaration, create it and record it, otherwise return the previously
4163 created one. */
4165 static tree
4166 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4168 tree repl;
4169 if (!adj->new_ssa_base)
4171 char *pretty_name = make_fancy_name (adj->base);
4173 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4174 DECL_NAME (repl) = get_identifier (pretty_name);
4175 obstack_free (&name_obstack, pretty_name);
4177 add_referenced_var (repl);
4178 adj->new_ssa_base = repl;
4180 else
4181 repl = adj->new_ssa_base;
4182 return repl;
4185 /* Find the first adjustment for a particular parameter BASE in a vector of
4186 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4187 adjustment. */
4189 static struct ipa_parm_adjustment *
4190 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4192 int i, len;
4194 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4195 for (i = 0; i < len; i++)
4197 struct ipa_parm_adjustment *adj;
4199 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4200 if (!adj->copy_param && adj->base == base)
4201 return adj;
4204 return NULL;
4207 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4208 removed because its value is not used, replace the SSA_NAME with a one
4209 relating to a created VAR_DECL together all of its uses and return true.
4210 ADJUSTMENTS is a pointer to an adjustments vector. */
4212 static bool
4213 replace_removed_params_ssa_names (gimple stmt,
4214 ipa_parm_adjustment_vec adjustments)
4216 struct ipa_parm_adjustment *adj;
4217 tree lhs, decl, repl, name;
4219 if (gimple_code (stmt) == GIMPLE_PHI)
4220 lhs = gimple_phi_result (stmt);
4221 else if (is_gimple_assign (stmt))
4222 lhs = gimple_assign_lhs (stmt);
4223 else if (is_gimple_call (stmt))
4224 lhs = gimple_call_lhs (stmt);
4225 else
4226 gcc_unreachable ();
4228 if (TREE_CODE (lhs) != SSA_NAME)
4229 return false;
4230 decl = SSA_NAME_VAR (lhs);
4231 if (TREE_CODE (decl) != PARM_DECL)
4232 return false;
4234 adj = get_adjustment_for_base (adjustments, decl);
4235 if (!adj)
4236 return false;
4238 repl = get_replaced_param_substitute (adj);
4239 name = make_ssa_name (repl, stmt);
4241 if (dump_file)
4243 fprintf (dump_file, "replacing an SSA name of a removed param ");
4244 print_generic_expr (dump_file, lhs, 0);
4245 fprintf (dump_file, " with ");
4246 print_generic_expr (dump_file, name, 0);
4247 fprintf (dump_file, "\n");
4250 if (is_gimple_assign (stmt))
4251 gimple_assign_set_lhs (stmt, name);
4252 else if (is_gimple_call (stmt))
4253 gimple_call_set_lhs (stmt, name);
4254 else
4255 gimple_phi_set_result (stmt, name);
4257 replace_uses_by (lhs, name);
4258 release_ssa_name (lhs);
4259 return true;
4262 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4263 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4264 specifies whether the function should care about type incompatibility the
4265 current and new expressions. If it is false, the function will leave
4266 incompatibility issues to the caller. Return true iff the expression
4267 was modified. */
4269 static bool
4270 sra_ipa_modify_expr (tree *expr, bool convert,
4271 ipa_parm_adjustment_vec adjustments)
4273 int i, len;
4274 struct ipa_parm_adjustment *adj, *cand = NULL;
4275 HOST_WIDE_INT offset, size, max_size;
4276 tree base, src;
4278 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4280 if (TREE_CODE (*expr) == BIT_FIELD_REF
4281 || TREE_CODE (*expr) == IMAGPART_EXPR
4282 || TREE_CODE (*expr) == REALPART_EXPR)
4284 expr = &TREE_OPERAND (*expr, 0);
4285 convert = true;
4288 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4289 if (!base || size == -1 || max_size == -1)
4290 return false;
4292 if (TREE_CODE (base) == MEM_REF)
4294 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4295 base = TREE_OPERAND (base, 0);
4298 base = get_ssa_base_param (base);
4299 if (!base || TREE_CODE (base) != PARM_DECL)
4300 return false;
4302 for (i = 0; i < len; i++)
4304 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4306 if (adj->base == base &&
4307 (adj->offset == offset || adj->remove_param))
4309 cand = adj;
4310 break;
4313 if (!cand || cand->copy_param || cand->remove_param)
4314 return false;
4316 if (cand->by_ref)
4317 src = build_simple_mem_ref (cand->reduction);
4318 else
4319 src = cand->reduction;
4321 if (dump_file && (dump_flags & TDF_DETAILS))
4323 fprintf (dump_file, "About to replace expr ");
4324 print_generic_expr (dump_file, *expr, 0);
4325 fprintf (dump_file, " with ");
4326 print_generic_expr (dump_file, src, 0);
4327 fprintf (dump_file, "\n");
4330 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4332 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4333 *expr = vce;
4335 else
4336 *expr = src;
4337 return true;
4340 /* If the statement pointed to by STMT_PTR contains any expressions that need
4341 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4342 potential type incompatibilities (GSI is used to accommodate conversion
4343 statements and must point to the statement). Return true iff the statement
4344 was modified. */
4346 static bool
4347 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4348 ipa_parm_adjustment_vec adjustments)
4350 gimple stmt = *stmt_ptr;
4351 tree *lhs_p, *rhs_p;
4352 bool any;
4354 if (!gimple_assign_single_p (stmt))
4355 return false;
4357 rhs_p = gimple_assign_rhs1_ptr (stmt);
4358 lhs_p = gimple_assign_lhs_ptr (stmt);
4360 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4361 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4362 if (any)
4364 tree new_rhs = NULL_TREE;
4366 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4368 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4370 /* V_C_Es of constructors can cause trouble (PR 42714). */
4371 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4372 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4373 else
4374 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4376 else
4377 new_rhs = fold_build1_loc (gimple_location (stmt),
4378 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4379 *rhs_p);
4381 else if (REFERENCE_CLASS_P (*rhs_p)
4382 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4383 && !is_gimple_reg (*lhs_p))
4384 /* This can happen when an assignment in between two single field
4385 structures is turned into an assignment in between two pointers to
4386 scalars (PR 42237). */
4387 new_rhs = *rhs_p;
4389 if (new_rhs)
4391 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4392 true, GSI_SAME_STMT);
4394 gimple_assign_set_rhs_from_tree (gsi, tmp);
4397 return true;
4400 return false;
4403 /* Traverse the function body and all modifications as described in
4404 ADJUSTMENTS. Return true iff the CFG has been changed. */
4406 static bool
4407 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4409 bool cfg_changed = false;
4410 basic_block bb;
4412 FOR_EACH_BB (bb)
4414 gimple_stmt_iterator gsi;
4416 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4417 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4419 gsi = gsi_start_bb (bb);
4420 while (!gsi_end_p (gsi))
4422 gimple stmt = gsi_stmt (gsi);
4423 bool modified = false;
4424 tree *t;
4425 unsigned i;
4427 switch (gimple_code (stmt))
4429 case GIMPLE_RETURN:
4430 t = gimple_return_retval_ptr (stmt);
4431 if (*t != NULL_TREE)
4432 modified |= sra_ipa_modify_expr (t, true, adjustments);
4433 break;
4435 case GIMPLE_ASSIGN:
4436 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4437 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4438 break;
4440 case GIMPLE_CALL:
4441 /* Operands must be processed before the lhs. */
4442 for (i = 0; i < gimple_call_num_args (stmt); i++)
4444 t = gimple_call_arg_ptr (stmt, i);
4445 modified |= sra_ipa_modify_expr (t, true, adjustments);
4448 if (gimple_call_lhs (stmt))
4450 t = gimple_call_lhs_ptr (stmt);
4451 modified |= sra_ipa_modify_expr (t, false, adjustments);
4452 modified |= replace_removed_params_ssa_names (stmt,
4453 adjustments);
4455 break;
4457 case GIMPLE_ASM:
4458 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4460 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4461 modified |= sra_ipa_modify_expr (t, true, adjustments);
4463 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4465 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4466 modified |= sra_ipa_modify_expr (t, false, adjustments);
4468 break;
4470 default:
4471 break;
4474 if (modified)
4476 update_stmt (stmt);
4477 if (maybe_clean_eh_stmt (stmt)
4478 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4479 cfg_changed = true;
4481 gsi_next (&gsi);
4485 return cfg_changed;
4488 /* Call gimple_debug_bind_reset_value on all debug statements describing
4489 gimple register parameters that are being removed or replaced. */
4491 static void
4492 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4494 int i, len;
4495 gimple_stmt_iterator *gsip = NULL, gsi;
4497 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4499 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4500 gsip = &gsi;
4502 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4503 for (i = 0; i < len; i++)
4505 struct ipa_parm_adjustment *adj;
4506 imm_use_iterator ui;
4507 gimple stmt, def_temp;
4508 tree name, vexpr, copy = NULL_TREE;
4509 use_operand_p use_p;
4511 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4512 if (adj->copy_param || !is_gimple_reg (adj->base))
4513 continue;
4514 name = gimple_default_def (cfun, adj->base);
4515 vexpr = NULL;
4516 if (name)
4517 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4519 /* All other users must have been removed by
4520 ipa_sra_modify_function_body. */
4521 gcc_assert (is_gimple_debug (stmt));
4522 if (vexpr == NULL && gsip != NULL)
4524 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4525 vexpr = make_node (DEBUG_EXPR_DECL);
4526 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4527 NULL);
4528 DECL_ARTIFICIAL (vexpr) = 1;
4529 TREE_TYPE (vexpr) = TREE_TYPE (name);
4530 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4531 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4533 if (vexpr)
4535 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4536 SET_USE (use_p, vexpr);
4538 else
4539 gimple_debug_bind_reset_value (stmt);
4540 update_stmt (stmt);
4542 /* Create a VAR_DECL for debug info purposes. */
4543 if (!DECL_IGNORED_P (adj->base))
4545 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4546 VAR_DECL, DECL_NAME (adj->base),
4547 TREE_TYPE (adj->base));
4548 if (DECL_PT_UID_SET_P (adj->base))
4549 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4550 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4551 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4552 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4553 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4554 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4555 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4556 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4557 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4558 SET_DECL_RTL (copy, 0);
4559 TREE_USED (copy) = 1;
4560 DECL_CONTEXT (copy) = current_function_decl;
4561 add_referenced_var (copy);
4562 add_local_decl (cfun, copy);
4563 DECL_CHAIN (copy) =
4564 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4565 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4567 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4569 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4570 if (vexpr)
4571 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4572 else
4573 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4574 NULL);
4575 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4580 /* Return false iff all callers have at least as many actual arguments as there
4581 are formal parameters in the current function. */
4583 static bool
4584 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4585 void *data ATTRIBUTE_UNUSED)
4587 struct cgraph_edge *cs;
4588 for (cs = node->callers; cs; cs = cs->next_caller)
4589 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4590 return true;
4592 return false;
4595 /* Convert all callers of NODE. */
4597 static bool
4598 convert_callers_for_node (struct cgraph_node *node,
4599 void *data)
4601 ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
4602 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4603 struct cgraph_edge *cs;
4605 for (cs = node->callers; cs; cs = cs->next_caller)
4607 current_function_decl = cs->caller->symbol.decl;
4608 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl));
4610 if (dump_file)
4611 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4612 cs->caller->uid, cs->callee->uid,
4613 xstrdup (cgraph_node_name (cs->caller)),
4614 xstrdup (cgraph_node_name (cs->callee)));
4616 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4618 pop_cfun ();
4621 for (cs = node->callers; cs; cs = cs->next_caller)
4622 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4623 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl)))
4624 compute_inline_parameters (cs->caller, true);
4625 BITMAP_FREE (recomputed_callers);
4627 return true;
4630 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4632 static void
4633 convert_callers (struct cgraph_node *node, tree old_decl,
4634 ipa_parm_adjustment_vec adjustments)
4636 tree old_cur_fndecl = current_function_decl;
4637 basic_block this_block;
4639 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4640 adjustments, false);
4642 current_function_decl = old_cur_fndecl;
4644 if (!encountered_recursive_call)
4645 return;
4647 FOR_EACH_BB (this_block)
4649 gimple_stmt_iterator gsi;
4651 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4653 gimple stmt = gsi_stmt (gsi);
4654 tree call_fndecl;
4655 if (gimple_code (stmt) != GIMPLE_CALL)
4656 continue;
4657 call_fndecl = gimple_call_fndecl (stmt);
4658 if (call_fndecl == old_decl)
4660 if (dump_file)
4661 fprintf (dump_file, "Adjusting recursive call");
4662 gimple_call_set_fndecl (stmt, node->symbol.decl);
4663 ipa_modify_call_arguments (NULL, stmt, adjustments);
4668 return;
4671 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4672 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4674 static bool
4675 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4677 struct cgraph_node *new_node;
4678 bool cfg_changed;
4679 VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
4681 rebuild_cgraph_edges ();
4682 free_dominance_info (CDI_DOMINATORS);
4683 pop_cfun ();
4684 current_function_decl = NULL_TREE;
4686 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4687 false, NULL, NULL, "isra");
4688 current_function_decl = new_node->symbol.decl;
4689 push_cfun (DECL_STRUCT_FUNCTION (new_node->symbol.decl));
4691 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4692 cfg_changed = ipa_sra_modify_function_body (adjustments);
4693 sra_ipa_reset_debug_stmts (adjustments);
4694 convert_callers (new_node, node->symbol.decl, adjustments);
4695 cgraph_make_node_local (new_node);
4696 return cfg_changed;
4699 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4700 attributes, return true otherwise. NODE is the cgraph node of the current
4701 function. */
4703 static bool
4704 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4706 if (!cgraph_node_can_be_local_p (node))
4708 if (dump_file)
4709 fprintf (dump_file, "Function not local to this compilation unit.\n");
4710 return false;
4713 if (!node->local.can_change_signature)
4715 if (dump_file)
4716 fprintf (dump_file, "Function can not change signature.\n");
4717 return false;
4720 if (!tree_versionable_function_p (node->symbol.decl))
4722 if (dump_file)
4723 fprintf (dump_file, "Function is not versionable.\n");
4724 return false;
4727 if (DECL_VIRTUAL_P (current_function_decl))
4729 if (dump_file)
4730 fprintf (dump_file, "Function is a virtual method.\n");
4731 return false;
4734 if ((DECL_COMDAT (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
4735 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4737 if (dump_file)
4738 fprintf (dump_file, "Function too big to be made truly local.\n");
4739 return false;
4742 if (!node->callers)
4744 if (dump_file)
4745 fprintf (dump_file,
4746 "Function has no callers in this compilation unit.\n");
4747 return false;
4750 if (cfun->stdarg)
4752 if (dump_file)
4753 fprintf (dump_file, "Function uses stdarg. \n");
4754 return false;
4757 if (TYPE_ATTRIBUTES (TREE_TYPE (node->symbol.decl)))
4758 return false;
4760 return true;
4763 /* Perform early interprocedural SRA. */
4765 static unsigned int
4766 ipa_early_sra (void)
4768 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4769 ipa_parm_adjustment_vec adjustments;
4770 int ret = 0;
4772 if (!ipa_sra_preliminary_function_checks (node))
4773 return 0;
4775 sra_initialize ();
4776 sra_mode = SRA_MODE_EARLY_IPA;
4778 if (!find_param_candidates ())
4780 if (dump_file)
4781 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4782 goto simple_out;
4785 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4786 NULL, true))
4788 if (dump_file)
4789 fprintf (dump_file, "There are callers with insufficient number of "
4790 "arguments.\n");
4791 goto simple_out;
4794 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4795 func_param_count
4796 * last_basic_block_for_function (cfun));
4797 final_bbs = BITMAP_ALLOC (NULL);
4799 scan_function ();
4800 if (encountered_apply_args)
4802 if (dump_file)
4803 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4804 goto out;
4807 if (encountered_unchangable_recursive_call)
4809 if (dump_file)
4810 fprintf (dump_file, "Function calls itself with insufficient "
4811 "number of arguments.\n");
4812 goto out;
4815 adjustments = analyze_all_param_acesses ();
4816 if (!adjustments)
4817 goto out;
4818 if (dump_file)
4819 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4821 if (modify_function (node, adjustments))
4822 ret = TODO_update_ssa | TODO_cleanup_cfg;
4823 else
4824 ret = TODO_update_ssa;
4825 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4827 statistics_counter_event (cfun, "Unused parameters deleted",
4828 sra_stats.deleted_unused_parameters);
4829 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4830 sra_stats.scalar_by_ref_to_by_val);
4831 statistics_counter_event (cfun, "Aggregate parameters broken up",
4832 sra_stats.aggregate_params_reduced);
4833 statistics_counter_event (cfun, "Aggregate parameter components created",
4834 sra_stats.param_reductions_created);
4836 out:
4837 BITMAP_FREE (final_bbs);
4838 free (bb_dereferences);
4839 simple_out:
4840 sra_deinitialize ();
4841 return ret;
4844 /* Return if early ipa sra shall be performed. */
4845 static bool
4846 ipa_early_sra_gate (void)
4848 return flag_ipa_sra && dbg_cnt (eipa_sra);
4851 struct gimple_opt_pass pass_early_ipa_sra =
4854 GIMPLE_PASS,
4855 "eipa_sra", /* name */
4856 ipa_early_sra_gate, /* gate */
4857 ipa_early_sra, /* execute */
4858 NULL, /* sub */
4859 NULL, /* next */
4860 0, /* static_pass_number */
4861 TV_IPA_SRA, /* tv_id */
4862 0, /* properties_required */
4863 0, /* properties_provided */
4864 0, /* properties_destroyed */
4865 0, /* todo_flags_start */
4866 TODO_dump_symtab /* todo_flags_finish */