rs6000.c (rs6000_cannot_force_const_mem): Match CONST high part large-toc address.
[official-gcc.git] / gcc / tree-sra.c
blob322abb570a42d3b239e14b0b41bd7434f94560ba
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "ipa-prop.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
87 #include "timevar.h"
88 #include "params.h"
89 #include "target.h"
90 #include "flags.h"
91 #include "dbgcnt.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
101 /* Global variable describing which aggregate reduction we are performing at
102 the moment. */
103 static enum sra_mode sra_mode;
105 struct assign_link;
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
123 struct access
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
129 HOST_WIDE_INT size;
130 tree base;
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
134 testcase. */
135 tree expr;
136 /* Type. */
137 tree type;
139 /* The statement this access belongs to. */
140 gimple stmt;
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
158 /* Pointers to the first and last element in the linked list of assign
159 links. */
160 struct assign_link *first_link, *last_link;
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
170 /* Is this particular access write access? */
171 unsigned write : 1;
173 /* Is this access an artificial one created to scalarize some record
174 entirely? */
175 unsigned total_scalarization : 1;
177 /* Is this access an access to a non-addressable field? */
178 unsigned non_addressable : 1;
180 /* Is this access currently in the work queue? */
181 unsigned grp_queued : 1;
183 /* Does this group contain a write access? This flag is propagated down the
184 access tree. */
185 unsigned grp_write : 1;
187 /* Does this group contain a read access? This flag is propagated down the
188 access tree. */
189 unsigned grp_read : 1;
191 /* Does this group contain a read access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_read : 1;
195 /* Does this group contain a write access that comes from an assignment
196 statement? This flag is propagated down the access tree. */
197 unsigned grp_assignment_write : 1;
199 /* Does this group contain a read access through a scalar type? This flag is
200 not propagated in the access tree in any direction. */
201 unsigned grp_scalar_read : 1;
203 /* Does this group contain a write access through a scalar type? This flag
204 is not propagated in the access tree in any direction. */
205 unsigned grp_scalar_write : 1;
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
209 possible. */
210 unsigned grp_hint : 1;
212 /* Is the subtree rooted in this access fully covered by scalar
213 replacements? */
214 unsigned grp_covered : 1;
216 /* If set to true, this access and all below it in an access tree must not be
217 scalarized. */
218 unsigned grp_unscalarizable_region : 1;
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
222 access tree. */
223 unsigned grp_unscalarized_data : 1;
225 /* Does this access and/or group contain a write access through a
226 BIT_FIELD_REF? */
227 unsigned grp_partial_lhs : 1;
229 /* Set when a scalar replacement should be created for this variable. We do
230 the decision and creation at different places because create_tmp_var
231 cannot be called from within FOR_EACH_REFERENCED_VAR. */
232 unsigned grp_to_be_replaced : 1;
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning : 1;
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified : 1;
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr : 1;
246 /* Set when we discover that this pointer is not safe to dereference in the
247 caller. */
248 unsigned grp_not_necessarilly_dereferenced : 1;
251 typedef struct access *access_p;
253 DEF_VEC_P (access_p);
254 DEF_VEC_ALLOC_P (access_p, heap);
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool;
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
262 struct assign_link
264 struct access *lacc, *racc;
265 struct assign_link *next;
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool;
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
272 static struct pointer_map_t *base_access_vec;
274 /* Bitmap of candidates. */
275 static bitmap candidate_bitmap;
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278 those which cannot be (because they are and need be used as a whole). */
279 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
281 /* Obstack for creation of fancy names. */
282 static struct obstack name_obstack;
284 /* Head of a linked list of accesses that need to have its subaccesses
285 propagated to their assignment counterparts. */
286 static struct access *work_queue_head;
288 /* Number of parameters of the analyzed function when doing early ipa SRA. */
289 static int func_param_count;
291 /* scan_function sets the following to true if it encounters a call to
292 __builtin_apply_args. */
293 static bool encountered_apply_args;
295 /* Set by scan_function when it finds a recursive call. */
296 static bool encountered_recursive_call;
298 /* Set by scan_function when it finds a recursive call with less actual
299 arguments than formal parameters.. */
300 static bool encountered_unchangable_recursive_call;
302 /* This is a table in which for each basic block and parameter there is a
303 distance (offset + size) in that parameter which is dereferenced and
304 accessed in that BB. */
305 static HOST_WIDE_INT *bb_dereferences;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307 returning, throwing externally, looping infinitely or calling a function
308 which might abort etc.. */
309 static bitmap final_bbs;
311 /* Representative of no accesses at all. */
312 static struct access no_accesses_representant;
314 /* Predicate to test the special value. */
316 static inline bool
317 no_accesses_p (struct access *access)
319 return access == &no_accesses_representant;
322 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
323 representative fields are dumped, otherwise those which only describe the
324 individual access are. */
326 static struct
328 /* Number of processed aggregates is readily available in
329 analyze_all_variable_accesses and so is not stored here. */
331 /* Number of created scalar replacements. */
332 int replacements;
334 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
335 expression. */
336 int exprs;
338 /* Number of statements created by generate_subtree_copies. */
339 int subtree_copies;
341 /* Number of statements created by load_assign_lhs_subreplacements. */
342 int subreplacements;
344 /* Number of times sra_modify_assign has deleted a statement. */
345 int deleted;
347 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348 RHS reparately due to type conversions or nonexistent matching
349 references. */
350 int separate_lhs_rhs_handling;
352 /* Number of parameters that were removed because they were unused. */
353 int deleted_unused_parameters;
355 /* Number of scalars passed as parameters by reference that have been
356 converted to be passed by value. */
357 int scalar_by_ref_to_by_val;
359 /* Number of aggregate parameters that were replaced by one or more of their
360 components. */
361 int aggregate_params_reduced;
363 /* Numbber of components created when splitting aggregate parameters. */
364 int param_reductions_created;
365 } sra_stats;
367 static void
368 dump_access (FILE *f, struct access *access, bool grp)
370 fprintf (f, "access { ");
371 fprintf (f, "base = (%d)'", DECL_UID (access->base));
372 print_generic_expr (f, access->base, 0);
373 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
374 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
375 fprintf (f, ", expr = ");
376 print_generic_expr (f, access->expr, 0);
377 fprintf (f, ", type = ");
378 print_generic_expr (f, access->type, 0);
379 if (grp)
380 fprintf (f, ", total_scalarization = %d, grp_read = %d, grp_write = %d, "
381 "grp_assignment_read = %d, grp_assignment_write = %d, "
382 "grp_scalar_read = %d, grp_scalar_write = %d, "
383 "grp_hint = %d, grp_covered = %d, "
384 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 "grp_maybe_modified = %d, "
387 "grp_not_necessarilly_dereferenced = %d\n",
388 access->total_scalarization, access->grp_read, access->grp_write,
389 access->grp_assignment_read, access->grp_assignment_write,
390 access->grp_scalar_read, access->grp_scalar_write,
391 access->grp_hint, access->grp_covered,
392 access->grp_unscalarizable_region, access->grp_unscalarized_data,
393 access->grp_partial_lhs, access->grp_to_be_replaced,
394 access->grp_maybe_modified,
395 access->grp_not_necessarilly_dereferenced);
396 else
397 fprintf (f, ", write = %d, total_scalarization = %d, "
398 "grp_partial_lhs = %d\n",
399 access->write, access->total_scalarization,
400 access->grp_partial_lhs);
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
405 static void
406 dump_access_tree_1 (FILE *f, struct access *access, int level)
410 int i;
412 for (i = 0; i < level; i++)
413 fputs ("* ", dump_file);
415 dump_access (f, access, true);
417 if (access->first_child)
418 dump_access_tree_1 (f, access->first_child, level + 1);
420 access = access->next_sibling;
422 while (access);
425 /* Dump all access trees for a variable, given the pointer to the first root in
426 ACCESS. */
428 static void
429 dump_access_tree (FILE *f, struct access *access)
431 for (; access; access = access->next_grp)
432 dump_access_tree_1 (f, access, 0);
435 /* Return true iff ACC is non-NULL and has subaccesses. */
437 static inline bool
438 access_has_children_p (struct access *acc)
440 return acc && acc->first_child;
443 /* Return a vector of pointers to accesses for the variable given in BASE or
444 NULL if there is none. */
446 static VEC (access_p, heap) *
447 get_base_access_vector (tree base)
449 void **slot;
451 slot = pointer_map_contains (base_access_vec, base);
452 if (!slot)
453 return NULL;
454 else
455 return *(VEC (access_p, heap) **) slot;
458 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
459 in ACCESS. Return NULL if it cannot be found. */
461 static struct access *
462 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
463 HOST_WIDE_INT size)
465 while (access && (access->offset != offset || access->size != size))
467 struct access *child = access->first_child;
469 while (child && (child->offset + child->size <= offset))
470 child = child->next_sibling;
471 access = child;
474 return access;
477 /* Return the first group representative for DECL or NULL if none exists. */
479 static struct access *
480 get_first_repr_for_decl (tree base)
482 VEC (access_p, heap) *access_vec;
484 access_vec = get_base_access_vector (base);
485 if (!access_vec)
486 return NULL;
488 return VEC_index (access_p, access_vec, 0);
491 /* Find an access representative for the variable BASE and given OFFSET and
492 SIZE. Requires that access trees have already been built. Return NULL if
493 it cannot be found. */
495 static struct access *
496 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
497 HOST_WIDE_INT size)
499 struct access *access;
501 access = get_first_repr_for_decl (base);
502 while (access && (access->offset + access->size <= offset))
503 access = access->next_grp;
504 if (!access)
505 return NULL;
507 return find_access_in_subtree (access, offset, size);
510 /* Add LINK to the linked list of assign links of RACC. */
511 static void
512 add_link_to_rhs (struct access *racc, struct assign_link *link)
514 gcc_assert (link->racc == racc);
516 if (!racc->first_link)
518 gcc_assert (!racc->last_link);
519 racc->first_link = link;
521 else
522 racc->last_link->next = link;
524 racc->last_link = link;
525 link->next = NULL;
528 /* Move all link structures in their linked list in OLD_RACC to the linked list
529 in NEW_RACC. */
530 static void
531 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
533 if (!old_racc->first_link)
535 gcc_assert (!old_racc->last_link);
536 return;
539 if (new_racc->first_link)
541 gcc_assert (!new_racc->last_link->next);
542 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
544 new_racc->last_link->next = old_racc->first_link;
545 new_racc->last_link = old_racc->last_link;
547 else
549 gcc_assert (!new_racc->last_link);
551 new_racc->first_link = old_racc->first_link;
552 new_racc->last_link = old_racc->last_link;
554 old_racc->first_link = old_racc->last_link = NULL;
557 /* Add ACCESS to the work queue (which is actually a stack). */
559 static void
560 add_access_to_work_queue (struct access *access)
562 if (!access->grp_queued)
564 gcc_assert (!access->next_queued);
565 access->next_queued = work_queue_head;
566 access->grp_queued = 1;
567 work_queue_head = access;
571 /* Pop an access from the work queue, and return it, assuming there is one. */
573 static struct access *
574 pop_access_from_work_queue (void)
576 struct access *access = work_queue_head;
578 work_queue_head = access->next_queued;
579 access->next_queued = NULL;
580 access->grp_queued = 0;
581 return access;
585 /* Allocate necessary structures. */
587 static void
588 sra_initialize (void)
590 candidate_bitmap = BITMAP_ALLOC (NULL);
591 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
592 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
593 gcc_obstack_init (&name_obstack);
594 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
595 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
596 base_access_vec = pointer_map_create ();
597 memset (&sra_stats, 0, sizeof (sra_stats));
598 encountered_apply_args = false;
599 encountered_recursive_call = false;
600 encountered_unchangable_recursive_call = false;
603 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
605 static bool
606 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
607 void *data ATTRIBUTE_UNUSED)
609 VEC (access_p, heap) *access_vec;
610 access_vec = (VEC (access_p, heap) *) *value;
611 VEC_free (access_p, heap, access_vec);
613 return true;
616 /* Deallocate all general structures. */
618 static void
619 sra_deinitialize (void)
621 BITMAP_FREE (candidate_bitmap);
622 BITMAP_FREE (should_scalarize_away_bitmap);
623 BITMAP_FREE (cannot_scalarize_away_bitmap);
624 free_alloc_pool (access_pool);
625 free_alloc_pool (link_pool);
626 obstack_free (&name_obstack, NULL);
628 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
629 pointer_map_destroy (base_access_vec);
632 /* Remove DECL from candidates for SRA and write REASON to the dump file if
633 there is one. */
634 static void
635 disqualify_candidate (tree decl, const char *reason)
637 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
639 if (dump_file && (dump_flags & TDF_DETAILS))
641 fprintf (dump_file, "! Disqualifying ");
642 print_generic_expr (dump_file, decl, 0);
643 fprintf (dump_file, " - %s\n", reason);
647 /* Return true iff the type contains a field or an element which does not allow
648 scalarization. */
650 static bool
651 type_internals_preclude_sra_p (tree type)
653 tree fld;
654 tree et;
656 switch (TREE_CODE (type))
658 case RECORD_TYPE:
659 case UNION_TYPE:
660 case QUAL_UNION_TYPE:
661 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
662 if (TREE_CODE (fld) == FIELD_DECL)
664 tree ft = TREE_TYPE (fld);
666 if (TREE_THIS_VOLATILE (fld)
667 || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
668 || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
669 || !host_integerp (DECL_SIZE (fld), 1)
670 || (AGGREGATE_TYPE_P (ft)
671 && int_bit_position (fld) % BITS_PER_UNIT != 0))
672 return true;
674 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft))
675 return true;
678 return false;
680 case ARRAY_TYPE:
681 et = TREE_TYPE (type);
683 if (TYPE_VOLATILE (et))
684 return true;
686 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et))
687 return true;
689 return false;
691 default:
692 return false;
696 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
697 base variable if it is. Return T if it is not an SSA_NAME. */
699 static tree
700 get_ssa_base_param (tree t)
702 if (TREE_CODE (t) == SSA_NAME)
704 if (SSA_NAME_IS_DEFAULT_DEF (t))
705 return SSA_NAME_VAR (t);
706 else
707 return NULL_TREE;
709 return t;
712 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
713 belongs to, unless the BB has already been marked as a potentially
714 final. */
716 static void
717 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
719 basic_block bb = gimple_bb (stmt);
720 int idx, parm_index = 0;
721 tree parm;
723 if (bitmap_bit_p (final_bbs, bb->index))
724 return;
726 for (parm = DECL_ARGUMENTS (current_function_decl);
727 parm && parm != base;
728 parm = DECL_CHAIN (parm))
729 parm_index++;
731 gcc_assert (parm_index < func_param_count);
733 idx = bb->index * func_param_count + parm_index;
734 if (bb_dereferences[idx] < dist)
735 bb_dereferences[idx] = dist;
738 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
739 the three fields. Also add it to the vector of accesses corresponding to
740 the base. Finally, return the new access. */
742 static struct access *
743 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
745 VEC (access_p, heap) *vec;
746 struct access *access;
747 void **slot;
749 access = (struct access *) pool_alloc (access_pool);
750 memset (access, 0, sizeof (struct access));
751 access->base = base;
752 access->offset = offset;
753 access->size = size;
755 slot = pointer_map_contains (base_access_vec, base);
756 if (slot)
757 vec = (VEC (access_p, heap) *) *slot;
758 else
759 vec = VEC_alloc (access_p, heap, 32);
761 VEC_safe_push (access_p, heap, vec, access);
763 *((struct VEC (access_p,heap) **)
764 pointer_map_insert (base_access_vec, base)) = vec;
766 return access;
769 /* Create and insert access for EXPR. Return created access, or NULL if it is
770 not possible. */
772 static struct access *
773 create_access (tree expr, gimple stmt, bool write)
775 struct access *access;
776 HOST_WIDE_INT offset, size, max_size;
777 tree base = expr;
778 bool ptr, unscalarizable_region = false;
780 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
782 if (sra_mode == SRA_MODE_EARLY_IPA
783 && TREE_CODE (base) == MEM_REF)
785 base = get_ssa_base_param (TREE_OPERAND (base, 0));
786 if (!base)
787 return NULL;
788 ptr = true;
790 else
791 ptr = false;
793 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
794 return NULL;
796 if (sra_mode == SRA_MODE_EARLY_IPA)
798 if (size < 0 || size != max_size)
800 disqualify_candidate (base, "Encountered a variable sized access.");
801 return NULL;
803 if (TREE_CODE (expr) == COMPONENT_REF
804 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
806 disqualify_candidate (base, "Encountered a bit-field access.");
807 return NULL;
809 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
811 if (ptr)
812 mark_parm_dereference (base, offset + size, stmt);
814 else
816 if (size != max_size)
818 size = max_size;
819 unscalarizable_region = true;
821 if (size < 0)
823 disqualify_candidate (base, "Encountered an unconstrained access.");
824 return NULL;
828 access = create_access_1 (base, offset, size);
829 access->expr = expr;
830 access->type = TREE_TYPE (expr);
831 access->write = write;
832 access->grp_unscalarizable_region = unscalarizable_region;
833 access->stmt = stmt;
835 if (TREE_CODE (expr) == COMPONENT_REF
836 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
837 access->non_addressable = 1;
839 return access;
843 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
844 register types or (recursively) records with only these two kinds of fields.
845 It also returns false if any of these records contains a bit-field. */
847 static bool
848 type_consists_of_records_p (tree type)
850 tree fld;
852 if (TREE_CODE (type) != RECORD_TYPE)
853 return false;
855 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
856 if (TREE_CODE (fld) == FIELD_DECL)
858 tree ft = TREE_TYPE (fld);
860 if (DECL_BIT_FIELD (fld))
861 return false;
863 if (!is_gimple_reg_type (ft)
864 && !type_consists_of_records_p (ft))
865 return false;
868 return true;
871 /* Create total_scalarization accesses for all scalar type fields in DECL that
872 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
873 must be the top-most VAR_DECL representing the variable, OFFSET must be the
874 offset of DECL within BASE. REF must be the memory reference expression for
875 the given decl. */
877 static void
878 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
879 tree ref)
881 tree fld, decl_type = TREE_TYPE (decl);
883 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
884 if (TREE_CODE (fld) == FIELD_DECL)
886 HOST_WIDE_INT pos = offset + int_bit_position (fld);
887 tree ft = TREE_TYPE (fld);
888 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
889 NULL_TREE);
891 if (is_gimple_reg_type (ft))
893 struct access *access;
894 HOST_WIDE_INT size;
896 size = tree_low_cst (DECL_SIZE (fld), 1);
897 access = create_access_1 (base, pos, size);
898 access->expr = nref;
899 access->type = ft;
900 access->total_scalarization = 1;
901 /* Accesses for intraprocedural SRA can have their stmt NULL. */
903 else
904 completely_scalarize_record (base, fld, pos, nref);
909 /* Search the given tree for a declaration by skipping handled components and
910 exclude it from the candidates. */
912 static void
913 disqualify_base_of_expr (tree t, const char *reason)
915 t = get_base_address (t);
916 if (sra_mode == SRA_MODE_EARLY_IPA
917 && TREE_CODE (t) == MEM_REF)
918 t = get_ssa_base_param (TREE_OPERAND (t, 0));
920 if (t && DECL_P (t))
921 disqualify_candidate (t, reason);
924 /* Scan expression EXPR and create access structures for all accesses to
925 candidates for scalarization. Return the created access or NULL if none is
926 created. */
928 static struct access *
929 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
931 struct access *ret = NULL;
932 bool partial_ref;
934 if (TREE_CODE (expr) == BIT_FIELD_REF
935 || TREE_CODE (expr) == IMAGPART_EXPR
936 || TREE_CODE (expr) == REALPART_EXPR)
938 expr = TREE_OPERAND (expr, 0);
939 partial_ref = true;
941 else
942 partial_ref = false;
944 /* We need to dive through V_C_Es in order to get the size of its parameter
945 and not the result type. Ada produces such statements. We are also
946 capable of handling the topmost V_C_E but not any of those buried in other
947 handled components. */
948 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
949 expr = TREE_OPERAND (expr, 0);
951 if (contains_view_convert_expr_p (expr))
953 disqualify_base_of_expr (expr, "V_C_E under a different handled "
954 "component.");
955 return NULL;
958 switch (TREE_CODE (expr))
960 case MEM_REF:
961 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
962 && sra_mode != SRA_MODE_EARLY_IPA)
963 return NULL;
964 /* fall through */
965 case VAR_DECL:
966 case PARM_DECL:
967 case RESULT_DECL:
968 case COMPONENT_REF:
969 case ARRAY_REF:
970 case ARRAY_RANGE_REF:
971 ret = create_access (expr, stmt, write);
972 break;
974 default:
975 break;
978 if (write && partial_ref && ret)
979 ret->grp_partial_lhs = 1;
981 return ret;
984 /* Scan expression EXPR and create access structures for all accesses to
985 candidates for scalarization. Return true if any access has been inserted.
986 STMT must be the statement from which the expression is taken, WRITE must be
987 true if the expression is a store and false otherwise. */
989 static bool
990 build_access_from_expr (tree expr, gimple stmt, bool write)
992 struct access *access;
994 access = build_access_from_expr_1 (expr, stmt, write);
995 if (access)
997 /* This means the aggregate is accesses as a whole in a way other than an
998 assign statement and thus cannot be removed even if we had a scalar
999 replacement for everything. */
1000 if (cannot_scalarize_away_bitmap)
1001 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1002 return true;
1004 return false;
1007 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1008 modes in which it matters, return true iff they have been disqualified. RHS
1009 may be NULL, in that case ignore it. If we scalarize an aggregate in
1010 intra-SRA we may need to add statements after each statement. This is not
1011 possible if a statement unconditionally has to end the basic block. */
1012 static bool
1013 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1015 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1016 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1018 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1019 if (rhs)
1020 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1021 return true;
1023 return false;
1026 /* Scan expressions occuring in STMT, create access structures for all accesses
1027 to candidates for scalarization and remove those candidates which occur in
1028 statements or expressions that prevent them from being split apart. Return
1029 true if any access has been inserted. */
1031 static bool
1032 build_accesses_from_assign (gimple stmt)
1034 tree lhs, rhs;
1035 struct access *lacc, *racc;
1037 if (!gimple_assign_single_p (stmt))
1038 return false;
1040 lhs = gimple_assign_lhs (stmt);
1041 rhs = gimple_assign_rhs1 (stmt);
1043 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1044 return false;
1046 racc = build_access_from_expr_1 (rhs, stmt, false);
1047 lacc = build_access_from_expr_1 (lhs, stmt, true);
1049 if (lacc)
1050 lacc->grp_assignment_write = 1;
1052 if (racc)
1054 racc->grp_assignment_read = 1;
1055 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1056 && !is_gimple_reg_type (racc->type))
1057 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1060 if (lacc && racc
1061 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1062 && !lacc->grp_unscalarizable_region
1063 && !racc->grp_unscalarizable_region
1064 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1065 /* FIXME: Turn the following line into an assert after PR 40058 is
1066 fixed. */
1067 && lacc->size == racc->size
1068 && useless_type_conversion_p (lacc->type, racc->type))
1070 struct assign_link *link;
1072 link = (struct assign_link *) pool_alloc (link_pool);
1073 memset (link, 0, sizeof (struct assign_link));
1075 link->lacc = lacc;
1076 link->racc = racc;
1078 add_link_to_rhs (racc, link);
1081 return lacc || racc;
1084 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1085 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1087 static bool
1088 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1089 void *data ATTRIBUTE_UNUSED)
1091 op = get_base_address (op);
1092 if (op
1093 && DECL_P (op))
1094 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1096 return false;
1099 /* Return true iff callsite CALL has at least as many actual arguments as there
1100 are formal parameters of the function currently processed by IPA-SRA. */
1102 static inline bool
1103 callsite_has_enough_arguments_p (gimple call)
1105 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1108 /* Scan function and look for interesting expressions and create access
1109 structures for them. Return true iff any access is created. */
1111 static bool
1112 scan_function (void)
1114 basic_block bb;
1115 bool ret = false;
1117 FOR_EACH_BB (bb)
1119 gimple_stmt_iterator gsi;
1120 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1122 gimple stmt = gsi_stmt (gsi);
1123 tree t;
1124 unsigned i;
1126 if (final_bbs && stmt_can_throw_external (stmt))
1127 bitmap_set_bit (final_bbs, bb->index);
1128 switch (gimple_code (stmt))
1130 case GIMPLE_RETURN:
1131 t = gimple_return_retval (stmt);
1132 if (t != NULL_TREE)
1133 ret |= build_access_from_expr (t, stmt, false);
1134 if (final_bbs)
1135 bitmap_set_bit (final_bbs, bb->index);
1136 break;
1138 case GIMPLE_ASSIGN:
1139 ret |= build_accesses_from_assign (stmt);
1140 break;
1142 case GIMPLE_CALL:
1143 for (i = 0; i < gimple_call_num_args (stmt); i++)
1144 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1145 stmt, false);
1147 if (sra_mode == SRA_MODE_EARLY_IPA)
1149 tree dest = gimple_call_fndecl (stmt);
1150 int flags = gimple_call_flags (stmt);
1152 if (dest)
1154 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1155 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1156 encountered_apply_args = true;
1157 if (cgraph_get_node (dest)
1158 == cgraph_get_node (current_function_decl))
1160 encountered_recursive_call = true;
1161 if (!callsite_has_enough_arguments_p (stmt))
1162 encountered_unchangable_recursive_call = true;
1166 if (final_bbs
1167 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1168 bitmap_set_bit (final_bbs, bb->index);
1171 t = gimple_call_lhs (stmt);
1172 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1173 ret |= build_access_from_expr (t, stmt, true);
1174 break;
1176 case GIMPLE_ASM:
1177 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1178 asm_visit_addr);
1179 if (final_bbs)
1180 bitmap_set_bit (final_bbs, bb->index);
1182 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1184 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1185 ret |= build_access_from_expr (t, stmt, false);
1187 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1189 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1190 ret |= build_access_from_expr (t, stmt, true);
1192 break;
1194 default:
1195 break;
1200 return ret;
1203 /* Helper of QSORT function. There are pointers to accesses in the array. An
1204 access is considered smaller than another if it has smaller offset or if the
1205 offsets are the same but is size is bigger. */
1207 static int
1208 compare_access_positions (const void *a, const void *b)
1210 const access_p *fp1 = (const access_p *) a;
1211 const access_p *fp2 = (const access_p *) b;
1212 const access_p f1 = *fp1;
1213 const access_p f2 = *fp2;
1215 if (f1->offset != f2->offset)
1216 return f1->offset < f2->offset ? -1 : 1;
1218 if (f1->size == f2->size)
1220 if (f1->type == f2->type)
1221 return 0;
1222 /* Put any non-aggregate type before any aggregate type. */
1223 else if (!is_gimple_reg_type (f1->type)
1224 && is_gimple_reg_type (f2->type))
1225 return 1;
1226 else if (is_gimple_reg_type (f1->type)
1227 && !is_gimple_reg_type (f2->type))
1228 return -1;
1229 /* Put any complex or vector type before any other scalar type. */
1230 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1231 && TREE_CODE (f1->type) != VECTOR_TYPE
1232 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1233 || TREE_CODE (f2->type) == VECTOR_TYPE))
1234 return 1;
1235 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1236 || TREE_CODE (f1->type) == VECTOR_TYPE)
1237 && TREE_CODE (f2->type) != COMPLEX_TYPE
1238 && TREE_CODE (f2->type) != VECTOR_TYPE)
1239 return -1;
1240 /* Put the integral type with the bigger precision first. */
1241 else if (INTEGRAL_TYPE_P (f1->type)
1242 && INTEGRAL_TYPE_P (f2->type))
1243 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1244 /* Put any integral type with non-full precision last. */
1245 else if (INTEGRAL_TYPE_P (f1->type)
1246 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1247 != TYPE_PRECISION (f1->type)))
1248 return 1;
1249 else if (INTEGRAL_TYPE_P (f2->type)
1250 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1251 != TYPE_PRECISION (f2->type)))
1252 return -1;
1253 /* Stabilize the sort. */
1254 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1257 /* We want the bigger accesses first, thus the opposite operator in the next
1258 line: */
1259 return f1->size > f2->size ? -1 : 1;
1263 /* Append a name of the declaration to the name obstack. A helper function for
1264 make_fancy_name. */
1266 static void
1267 make_fancy_decl_name (tree decl)
1269 char buffer[32];
1271 tree name = DECL_NAME (decl);
1272 if (name)
1273 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1274 IDENTIFIER_LENGTH (name));
1275 else
1277 sprintf (buffer, "D%u", DECL_UID (decl));
1278 obstack_grow (&name_obstack, buffer, strlen (buffer));
1282 /* Helper for make_fancy_name. */
1284 static void
1285 make_fancy_name_1 (tree expr)
1287 char buffer[32];
1288 tree index;
1290 if (DECL_P (expr))
1292 make_fancy_decl_name (expr);
1293 return;
1296 switch (TREE_CODE (expr))
1298 case COMPONENT_REF:
1299 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1300 obstack_1grow (&name_obstack, '$');
1301 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1302 break;
1304 case ARRAY_REF:
1305 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1306 obstack_1grow (&name_obstack, '$');
1307 /* Arrays with only one element may not have a constant as their
1308 index. */
1309 index = TREE_OPERAND (expr, 1);
1310 if (TREE_CODE (index) != INTEGER_CST)
1311 break;
1312 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1313 obstack_grow (&name_obstack, buffer, strlen (buffer));
1314 break;
1316 case ADDR_EXPR:
1317 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1318 break;
1320 case MEM_REF:
1321 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1322 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1324 obstack_1grow (&name_obstack, '$');
1325 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1326 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1327 obstack_grow (&name_obstack, buffer, strlen (buffer));
1329 break;
1331 case BIT_FIELD_REF:
1332 case REALPART_EXPR:
1333 case IMAGPART_EXPR:
1334 gcc_unreachable (); /* we treat these as scalars. */
1335 break;
1336 default:
1337 break;
1341 /* Create a human readable name for replacement variable of ACCESS. */
1343 static char *
1344 make_fancy_name (tree expr)
1346 make_fancy_name_1 (expr);
1347 obstack_1grow (&name_obstack, '\0');
1348 return XOBFINISH (&name_obstack, char *);
1351 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1352 EXP_TYPE at the given OFFSET. If BASE is something for which
1353 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1354 to insert new statements either before or below the current one as specified
1355 by INSERT_AFTER. This function is not capable of handling bitfields. */
1357 tree
1358 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1359 tree exp_type, gimple_stmt_iterator *gsi,
1360 bool insert_after)
1362 tree prev_base = base;
1363 tree off;
1364 HOST_WIDE_INT base_offset;
1366 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1368 base = get_addr_base_and_unit_offset (base, &base_offset);
1370 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1371 offset such as array[var_index]. */
1372 if (!base)
1374 gimple stmt;
1375 tree tmp, addr;
1377 gcc_checking_assert (gsi);
1378 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1379 add_referenced_var (tmp);
1380 tmp = make_ssa_name (tmp, NULL);
1381 addr = build_fold_addr_expr (unshare_expr (prev_base));
1382 STRIP_USELESS_TYPE_CONVERSION (addr);
1383 stmt = gimple_build_assign (tmp, addr);
1384 gimple_set_location (stmt, loc);
1385 SSA_NAME_DEF_STMT (tmp) = stmt;
1386 if (insert_after)
1387 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1388 else
1389 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1390 update_stmt (stmt);
1392 off = build_int_cst (reference_alias_ptr_type (prev_base),
1393 offset / BITS_PER_UNIT);
1394 base = tmp;
1396 else if (TREE_CODE (base) == MEM_REF)
1398 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1399 base_offset + offset / BITS_PER_UNIT);
1400 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1401 base = unshare_expr (TREE_OPERAND (base, 0));
1403 else
1405 off = build_int_cst (reference_alias_ptr_type (base),
1406 base_offset + offset / BITS_PER_UNIT);
1407 base = build_fold_addr_expr (unshare_expr (base));
1410 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1413 /* Construct a memory reference to a part of an aggregate BASE at the given
1414 OFFSET and of the same type as MODEL. In case this is a reference to a
1415 component, the function will replicate the last COMPONENT_REF of model's
1416 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1417 build_ref_for_offset. */
1419 static tree
1420 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1421 struct access *model, gimple_stmt_iterator *gsi,
1422 bool insert_after)
1424 if (TREE_CODE (model->expr) == COMPONENT_REF)
1426 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1427 tree cr_offset = component_ref_field_offset (model->expr);
1429 gcc_assert (cr_offset && host_integerp (cr_offset, 1));
1430 offset -= TREE_INT_CST_LOW (cr_offset) * BITS_PER_UNIT;
1431 offset -= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fld));
1432 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1433 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1434 return fold_build3_loc (loc, COMPONENT_REF, model->type, t, fld,
1435 TREE_OPERAND (model->expr, 2));
1437 else
1438 return build_ref_for_offset (loc, base, offset, model->type,
1439 gsi, insert_after);
1442 /* Construct a memory reference consisting of component_refs and array_refs to
1443 a part of an aggregate *RES (which is of type TYPE). The requested part
1444 should have type EXP_TYPE at be the given OFFSET. This function might not
1445 succeed, it returns true when it does and only then *RES points to something
1446 meaningful. This function should be used only to build expressions that we
1447 might need to present to user (e.g. in warnings). In all other situations,
1448 build_ref_for_model or build_ref_for_offset should be used instead. */
1450 static bool
1451 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1452 tree exp_type)
1454 while (1)
1456 tree fld;
1457 tree tr_size, index, minidx;
1458 HOST_WIDE_INT el_size;
1460 if (offset == 0 && exp_type
1461 && types_compatible_p (exp_type, type))
1462 return true;
1464 switch (TREE_CODE (type))
1466 case UNION_TYPE:
1467 case QUAL_UNION_TYPE:
1468 case RECORD_TYPE:
1469 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1471 HOST_WIDE_INT pos, size;
1472 tree expr, *expr_ptr;
1474 if (TREE_CODE (fld) != FIELD_DECL)
1475 continue;
1477 pos = int_bit_position (fld);
1478 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1479 tr_size = DECL_SIZE (fld);
1480 if (!tr_size || !host_integerp (tr_size, 1))
1481 continue;
1482 size = tree_low_cst (tr_size, 1);
1483 if (size == 0)
1485 if (pos != offset)
1486 continue;
1488 else if (pos > offset || (pos + size) <= offset)
1489 continue;
1491 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1492 NULL_TREE);
1493 expr_ptr = &expr;
1494 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1495 offset - pos, exp_type))
1497 *res = expr;
1498 return true;
1501 return false;
1503 case ARRAY_TYPE:
1504 tr_size = TYPE_SIZE (TREE_TYPE (type));
1505 if (!tr_size || !host_integerp (tr_size, 1))
1506 return false;
1507 el_size = tree_low_cst (tr_size, 1);
1509 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1510 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1511 return false;
1512 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1513 if (!integer_zerop (minidx))
1514 index = int_const_binop (PLUS_EXPR, index, minidx);
1515 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1516 NULL_TREE, NULL_TREE);
1517 offset = offset % el_size;
1518 type = TREE_TYPE (type);
1519 break;
1521 default:
1522 if (offset != 0)
1523 return false;
1525 if (exp_type)
1526 return false;
1527 else
1528 return true;
1533 /* Return true iff TYPE is stdarg va_list type. */
1535 static inline bool
1536 is_va_list_type (tree type)
1538 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1541 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1542 those with type which is suitable for scalarization. */
1544 static bool
1545 find_var_candidates (void)
1547 tree var, type;
1548 referenced_var_iterator rvi;
1549 bool ret = false;
1551 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1553 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1554 continue;
1555 type = TREE_TYPE (var);
1557 if (!AGGREGATE_TYPE_P (type)
1558 || needs_to_live_in_memory (var)
1559 || TREE_THIS_VOLATILE (var)
1560 || !COMPLETE_TYPE_P (type)
1561 || !host_integerp (TYPE_SIZE (type), 1)
1562 || tree_low_cst (TYPE_SIZE (type), 1) == 0
1563 || type_internals_preclude_sra_p (type)
1564 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1565 we also want to schedule it rather late. Thus we ignore it in
1566 the early pass. */
1567 || (sra_mode == SRA_MODE_EARLY_INTRA
1568 && is_va_list_type (type)))
1569 continue;
1571 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1573 if (dump_file && (dump_flags & TDF_DETAILS))
1575 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1576 print_generic_expr (dump_file, var, 0);
1577 fprintf (dump_file, "\n");
1579 ret = true;
1582 return ret;
1585 /* Sort all accesses for the given variable, check for partial overlaps and
1586 return NULL if there are any. If there are none, pick a representative for
1587 each combination of offset and size and create a linked list out of them.
1588 Return the pointer to the first representative and make sure it is the first
1589 one in the vector of accesses. */
1591 static struct access *
1592 sort_and_splice_var_accesses (tree var)
1594 int i, j, access_count;
1595 struct access *res, **prev_acc_ptr = &res;
1596 VEC (access_p, heap) *access_vec;
1597 bool first = true;
1598 HOST_WIDE_INT low = -1, high = 0;
1600 access_vec = get_base_access_vector (var);
1601 if (!access_vec)
1602 return NULL;
1603 access_count = VEC_length (access_p, access_vec);
1605 /* Sort by <OFFSET, SIZE>. */
1606 VEC_qsort (access_p, access_vec, compare_access_positions);
1608 i = 0;
1609 while (i < access_count)
1611 struct access *access = VEC_index (access_p, access_vec, i);
1612 bool grp_write = access->write;
1613 bool grp_read = !access->write;
1614 bool grp_scalar_write = access->write
1615 && is_gimple_reg_type (access->type);
1616 bool grp_scalar_read = !access->write
1617 && is_gimple_reg_type (access->type);
1618 bool grp_assignment_read = access->grp_assignment_read;
1619 bool grp_assignment_write = access->grp_assignment_write;
1620 bool multiple_scalar_reads = false;
1621 bool total_scalarization = access->total_scalarization;
1622 bool grp_partial_lhs = access->grp_partial_lhs;
1623 bool first_scalar = is_gimple_reg_type (access->type);
1624 bool unscalarizable_region = access->grp_unscalarizable_region;
1626 if (first || access->offset >= high)
1628 first = false;
1629 low = access->offset;
1630 high = access->offset + access->size;
1632 else if (access->offset > low && access->offset + access->size > high)
1633 return NULL;
1634 else
1635 gcc_assert (access->offset >= low
1636 && access->offset + access->size <= high);
1638 j = i + 1;
1639 while (j < access_count)
1641 struct access *ac2 = VEC_index (access_p, access_vec, j);
1642 if (ac2->offset != access->offset || ac2->size != access->size)
1643 break;
1644 if (ac2->write)
1646 grp_write = true;
1647 grp_scalar_write = (grp_scalar_write
1648 || is_gimple_reg_type (ac2->type));
1650 else
1652 grp_read = true;
1653 if (is_gimple_reg_type (ac2->type))
1655 if (grp_scalar_read)
1656 multiple_scalar_reads = true;
1657 else
1658 grp_scalar_read = true;
1661 grp_assignment_read |= ac2->grp_assignment_read;
1662 grp_assignment_write |= ac2->grp_assignment_write;
1663 grp_partial_lhs |= ac2->grp_partial_lhs;
1664 unscalarizable_region |= ac2->grp_unscalarizable_region;
1665 total_scalarization |= ac2->total_scalarization;
1666 relink_to_new_repr (access, ac2);
1668 /* If there are both aggregate-type and scalar-type accesses with
1669 this combination of size and offset, the comparison function
1670 should have put the scalars first. */
1671 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1672 ac2->group_representative = access;
1673 j++;
1676 i = j;
1678 access->group_representative = access;
1679 access->grp_write = grp_write;
1680 access->grp_read = grp_read;
1681 access->grp_scalar_read = grp_scalar_read;
1682 access->grp_scalar_write = grp_scalar_write;
1683 access->grp_assignment_read = grp_assignment_read;
1684 access->grp_assignment_write = grp_assignment_write;
1685 access->grp_hint = multiple_scalar_reads || total_scalarization;
1686 access->grp_partial_lhs = grp_partial_lhs;
1687 access->grp_unscalarizable_region = unscalarizable_region;
1688 if (access->first_link)
1689 add_access_to_work_queue (access);
1691 *prev_acc_ptr = access;
1692 prev_acc_ptr = &access->next_grp;
1695 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1696 return res;
1699 /* Create a variable for the given ACCESS which determines the type, name and a
1700 few other properties. Return the variable declaration and store it also to
1701 ACCESS->replacement. */
1703 static tree
1704 create_access_replacement (struct access *access, bool rename)
1706 tree repl;
1708 repl = create_tmp_var (access->type, "SR");
1709 get_var_ann (repl);
1710 add_referenced_var (repl);
1711 if (rename)
1712 mark_sym_for_renaming (repl);
1714 if (!access->grp_partial_lhs
1715 && (TREE_CODE (access->type) == COMPLEX_TYPE
1716 || TREE_CODE (access->type) == VECTOR_TYPE))
1717 DECL_GIMPLE_REG_P (repl) = 1;
1719 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1720 DECL_ARTIFICIAL (repl) = 1;
1721 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1723 if (DECL_NAME (access->base)
1724 && !DECL_IGNORED_P (access->base)
1725 && !DECL_ARTIFICIAL (access->base))
1727 char *pretty_name = make_fancy_name (access->expr);
1728 tree debug_expr = unshare_expr (access->expr), d;
1730 DECL_NAME (repl) = get_identifier (pretty_name);
1731 obstack_free (&name_obstack, pretty_name);
1733 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1734 as DECL_DEBUG_EXPR isn't considered when looking for still
1735 used SSA_NAMEs and thus they could be freed. All debug info
1736 generation cares is whether something is constant or variable
1737 and that get_ref_base_and_extent works properly on the
1738 expression. */
1739 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1740 switch (TREE_CODE (d))
1742 case ARRAY_REF:
1743 case ARRAY_RANGE_REF:
1744 if (TREE_OPERAND (d, 1)
1745 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1746 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1747 if (TREE_OPERAND (d, 3)
1748 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1749 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1750 /* FALLTHRU */
1751 case COMPONENT_REF:
1752 if (TREE_OPERAND (d, 2)
1753 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1754 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1755 break;
1756 default:
1757 break;
1759 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1760 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1761 if (access->grp_no_warning)
1762 TREE_NO_WARNING (repl) = 1;
1763 else
1764 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1766 else
1767 TREE_NO_WARNING (repl) = 1;
1769 if (dump_file)
1771 fprintf (dump_file, "Created a replacement for ");
1772 print_generic_expr (dump_file, access->base, 0);
1773 fprintf (dump_file, " offset: %u, size: %u: ",
1774 (unsigned) access->offset, (unsigned) access->size);
1775 print_generic_expr (dump_file, repl, 0);
1776 fprintf (dump_file, "\n");
1778 sra_stats.replacements++;
1780 return repl;
1783 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1785 static inline tree
1786 get_access_replacement (struct access *access)
1788 gcc_assert (access->grp_to_be_replaced);
1790 if (!access->replacement_decl)
1791 access->replacement_decl = create_access_replacement (access, true);
1792 return access->replacement_decl;
1795 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1796 not mark it for renaming. */
1798 static inline tree
1799 get_unrenamed_access_replacement (struct access *access)
1801 gcc_assert (!access->grp_to_be_replaced);
1803 if (!access->replacement_decl)
1804 access->replacement_decl = create_access_replacement (access, false);
1805 return access->replacement_decl;
1809 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1810 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1811 to it is not "within" the root. Return false iff some accesses partially
1812 overlap. */
1814 static bool
1815 build_access_subtree (struct access **access)
1817 struct access *root = *access, *last_child = NULL;
1818 HOST_WIDE_INT limit = root->offset + root->size;
1820 *access = (*access)->next_grp;
1821 while (*access && (*access)->offset + (*access)->size <= limit)
1823 if (!last_child)
1824 root->first_child = *access;
1825 else
1826 last_child->next_sibling = *access;
1827 last_child = *access;
1829 if (!build_access_subtree (access))
1830 return false;
1833 if (*access && (*access)->offset < limit)
1834 return false;
1836 return true;
1839 /* Build a tree of access representatives, ACCESS is the pointer to the first
1840 one, others are linked in a list by the next_grp field. Return false iff
1841 some accesses partially overlap. */
1843 static bool
1844 build_access_trees (struct access *access)
1846 while (access)
1848 struct access *root = access;
1850 if (!build_access_subtree (&access))
1851 return false;
1852 root->next_grp = access;
1854 return true;
1857 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1858 array. */
1860 static bool
1861 expr_with_var_bounded_array_refs_p (tree expr)
1863 while (handled_component_p (expr))
1865 if (TREE_CODE (expr) == ARRAY_REF
1866 && !host_integerp (array_ref_low_bound (expr), 0))
1867 return true;
1868 expr = TREE_OPERAND (expr, 0);
1870 return false;
1873 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1874 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1875 sorts of access flags appropriately along the way, notably always set
1876 grp_read and grp_assign_read according to MARK_READ and grp_write when
1877 MARK_WRITE is true.
1879 Creating a replacement for a scalar access is considered beneficial if its
1880 grp_hint is set (this means we are either attempting total scalarization or
1881 there is more than one direct read access) or according to the following
1882 table:
1884 Access written to through a scalar type (once or more times)
1886 | Written to in an assignment statement
1888 | | Access read as scalar _once_
1889 | | |
1890 | | | Read in an assignment statement
1891 | | | |
1892 | | | | Scalarize Comment
1893 -----------------------------------------------------------------------------
1894 0 0 0 0 No access for the scalar
1895 0 0 0 1 No access for the scalar
1896 0 0 1 0 No Single read - won't help
1897 0 0 1 1 No The same case
1898 0 1 0 0 No access for the scalar
1899 0 1 0 1 No access for the scalar
1900 0 1 1 0 Yes s = *g; return s.i;
1901 0 1 1 1 Yes The same case as above
1902 1 0 0 0 No Won't help
1903 1 0 0 1 Yes s.i = 1; *g = s;
1904 1 0 1 0 Yes s.i = 5; g = s.i;
1905 1 0 1 1 Yes The same case as above
1906 1 1 0 0 No Won't help.
1907 1 1 0 1 Yes s.i = 1; *g = s;
1908 1 1 1 0 Yes s = *g; return s.i;
1909 1 1 1 1 Yes Any of the above yeses */
1911 static bool
1912 analyze_access_subtree (struct access *root, struct access *parent,
1913 bool allow_replacements)
1915 struct access *child;
1916 HOST_WIDE_INT limit = root->offset + root->size;
1917 HOST_WIDE_INT covered_to = root->offset;
1918 bool scalar = is_gimple_reg_type (root->type);
1919 bool hole = false, sth_created = false;
1921 if (parent)
1923 if (parent->grp_read)
1924 root->grp_read = 1;
1925 if (parent->grp_assignment_read)
1926 root->grp_assignment_read = 1;
1927 if (parent->grp_write)
1928 root->grp_write = 1;
1929 if (parent->grp_assignment_write)
1930 root->grp_assignment_write = 1;
1933 if (root->grp_unscalarizable_region)
1934 allow_replacements = false;
1936 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1937 allow_replacements = false;
1939 for (child = root->first_child; child; child = child->next_sibling)
1941 if (!hole && child->offset < covered_to)
1942 hole = true;
1943 else
1944 covered_to += child->size;
1946 sth_created |= analyze_access_subtree (child, root,
1947 allow_replacements && !scalar);
1949 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1950 hole |= !child->grp_covered;
1953 if (allow_replacements && scalar && !root->first_child
1954 && (root->grp_hint
1955 || ((root->grp_scalar_read || root->grp_assignment_read)
1956 && (root->grp_scalar_write || root->grp_assignment_write))))
1958 if (dump_file && (dump_flags & TDF_DETAILS))
1960 fprintf (dump_file, "Marking ");
1961 print_generic_expr (dump_file, root->base, 0);
1962 fprintf (dump_file, " offset: %u, size: %u: ",
1963 (unsigned) root->offset, (unsigned) root->size);
1964 fprintf (dump_file, " to be replaced.\n");
1967 root->grp_to_be_replaced = 1;
1968 sth_created = true;
1969 hole = false;
1971 else if (covered_to < limit)
1972 hole = true;
1974 if (sth_created && !hole)
1976 root->grp_covered = 1;
1977 return true;
1979 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1980 root->grp_unscalarized_data = 1; /* not covered and written to */
1981 if (sth_created)
1982 return true;
1983 return false;
1986 /* Analyze all access trees linked by next_grp by the means of
1987 analyze_access_subtree. */
1988 static bool
1989 analyze_access_trees (struct access *access)
1991 bool ret = false;
1993 while (access)
1995 if (analyze_access_subtree (access, NULL, true))
1996 ret = true;
1997 access = access->next_grp;
2000 return ret;
2003 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2004 SIZE would conflict with an already existing one. If exactly such a child
2005 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2007 static bool
2008 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2009 HOST_WIDE_INT size, struct access **exact_match)
2011 struct access *child;
2013 for (child = lacc->first_child; child; child = child->next_sibling)
2015 if (child->offset == norm_offset && child->size == size)
2017 *exact_match = child;
2018 return true;
2021 if (child->offset < norm_offset + size
2022 && child->offset + child->size > norm_offset)
2023 return true;
2026 return false;
2029 /* Create a new child access of PARENT, with all properties just like MODEL
2030 except for its offset and with its grp_write false and grp_read true.
2031 Return the new access or NULL if it cannot be created. Note that this access
2032 is created long after all splicing and sorting, it's not located in any
2033 access vector and is automatically a representative of its group. */
2035 static struct access *
2036 create_artificial_child_access (struct access *parent, struct access *model,
2037 HOST_WIDE_INT new_offset)
2039 struct access *access;
2040 struct access **child;
2041 tree expr = parent->base;
2043 gcc_assert (!model->grp_unscalarizable_region);
2045 access = (struct access *) pool_alloc (access_pool);
2046 memset (access, 0, sizeof (struct access));
2047 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2048 model->type))
2050 access->grp_no_warning = true;
2051 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2052 new_offset, model, NULL, false);
2055 access->base = parent->base;
2056 access->expr = expr;
2057 access->offset = new_offset;
2058 access->size = model->size;
2059 access->type = model->type;
2060 access->grp_write = true;
2061 access->grp_read = false;
2063 child = &parent->first_child;
2064 while (*child && (*child)->offset < new_offset)
2065 child = &(*child)->next_sibling;
2067 access->next_sibling = *child;
2068 *child = access;
2070 return access;
2074 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2075 true if any new subaccess was created. Additionally, if RACC is a scalar
2076 access but LACC is not, change the type of the latter, if possible. */
2078 static bool
2079 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2081 struct access *rchild;
2082 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2083 bool ret = false;
2085 if (is_gimple_reg_type (lacc->type)
2086 || lacc->grp_unscalarizable_region
2087 || racc->grp_unscalarizable_region)
2088 return false;
2090 if (!lacc->first_child && !racc->first_child
2091 && is_gimple_reg_type (racc->type))
2093 tree t = lacc->base;
2095 lacc->type = racc->type;
2096 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t), lacc->offset,
2097 racc->type))
2098 lacc->expr = t;
2099 else
2101 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2102 lacc->base, lacc->offset,
2103 racc, NULL, false);
2104 lacc->grp_no_warning = true;
2106 return false;
2109 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2111 struct access *new_acc = NULL;
2112 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2114 if (rchild->grp_unscalarizable_region)
2115 continue;
2117 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2118 &new_acc))
2120 if (new_acc)
2122 rchild->grp_hint = 1;
2123 new_acc->grp_hint |= new_acc->grp_read;
2124 if (rchild->first_child)
2125 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2127 continue;
2130 rchild->grp_hint = 1;
2131 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2132 if (new_acc)
2134 ret = true;
2135 if (racc->first_child)
2136 propagate_subaccesses_across_link (new_acc, rchild);
2140 return ret;
2143 /* Propagate all subaccesses across assignment links. */
2145 static void
2146 propagate_all_subaccesses (void)
2148 while (work_queue_head)
2150 struct access *racc = pop_access_from_work_queue ();
2151 struct assign_link *link;
2153 gcc_assert (racc->first_link);
2155 for (link = racc->first_link; link; link = link->next)
2157 struct access *lacc = link->lacc;
2159 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2160 continue;
2161 lacc = lacc->group_representative;
2162 if (propagate_subaccesses_across_link (lacc, racc)
2163 && lacc->first_link)
2164 add_access_to_work_queue (lacc);
2169 /* Go through all accesses collected throughout the (intraprocedural) analysis
2170 stage, exclude overlapping ones, identify representatives and build trees
2171 out of them, making decisions about scalarization on the way. Return true
2172 iff there are any to-be-scalarized variables after this stage. */
2174 static bool
2175 analyze_all_variable_accesses (void)
2177 int res = 0;
2178 bitmap tmp = BITMAP_ALLOC (NULL);
2179 bitmap_iterator bi;
2180 unsigned i, max_total_scalarization_size;
2182 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2183 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2185 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2186 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2187 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2189 tree var = referenced_var (i);
2191 if (TREE_CODE (var) == VAR_DECL
2192 && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2193 <= max_total_scalarization_size)
2194 && type_consists_of_records_p (TREE_TYPE (var)))
2196 completely_scalarize_record (var, var, 0, var);
2197 if (dump_file && (dump_flags & TDF_DETAILS))
2199 fprintf (dump_file, "Will attempt to totally scalarize ");
2200 print_generic_expr (dump_file, var, 0);
2201 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2206 bitmap_copy (tmp, candidate_bitmap);
2207 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2209 tree var = referenced_var (i);
2210 struct access *access;
2212 access = sort_and_splice_var_accesses (var);
2213 if (!access || !build_access_trees (access))
2214 disqualify_candidate (var,
2215 "No or inhibitingly overlapping accesses.");
2218 propagate_all_subaccesses ();
2220 bitmap_copy (tmp, candidate_bitmap);
2221 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2223 tree var = referenced_var (i);
2224 struct access *access = get_first_repr_for_decl (var);
2226 if (analyze_access_trees (access))
2228 res++;
2229 if (dump_file && (dump_flags & TDF_DETAILS))
2231 fprintf (dump_file, "\nAccess trees for ");
2232 print_generic_expr (dump_file, var, 0);
2233 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2234 dump_access_tree (dump_file, access);
2235 fprintf (dump_file, "\n");
2238 else
2239 disqualify_candidate (var, "No scalar replacements to be created.");
2242 BITMAP_FREE (tmp);
2244 if (res)
2246 statistics_counter_event (cfun, "Scalarized aggregates", res);
2247 return true;
2249 else
2250 return false;
2253 /* Generate statements copying scalar replacements of accesses within a subtree
2254 into or out of AGG. ACCESS, all its children, siblings and their children
2255 are to be processed. AGG is an aggregate type expression (can be a
2256 declaration but does not have to be, it can for example also be a mem_ref or
2257 a series of handled components). TOP_OFFSET is the offset of the processed
2258 subtree which has to be subtracted from offsets of individual accesses to
2259 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2260 replacements in the interval <start_offset, start_offset + chunk_size>,
2261 otherwise copy all. GSI is a statement iterator used to place the new
2262 statements. WRITE should be true when the statements should write from AGG
2263 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2264 statements will be added after the current statement in GSI, they will be
2265 added before the statement otherwise. */
2267 static void
2268 generate_subtree_copies (struct access *access, tree agg,
2269 HOST_WIDE_INT top_offset,
2270 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2271 gimple_stmt_iterator *gsi, bool write,
2272 bool insert_after, location_t loc)
2276 if (chunk_size && access->offset >= start_offset + chunk_size)
2277 return;
2279 if (access->grp_to_be_replaced
2280 && (chunk_size == 0
2281 || access->offset + access->size > start_offset))
2283 tree expr, repl = get_access_replacement (access);
2284 gimple stmt;
2286 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2287 access, gsi, insert_after);
2289 if (write)
2291 if (access->grp_partial_lhs)
2292 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2293 !insert_after,
2294 insert_after ? GSI_NEW_STMT
2295 : GSI_SAME_STMT);
2296 stmt = gimple_build_assign (repl, expr);
2298 else
2300 TREE_NO_WARNING (repl) = 1;
2301 if (access->grp_partial_lhs)
2302 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2303 !insert_after,
2304 insert_after ? GSI_NEW_STMT
2305 : GSI_SAME_STMT);
2306 stmt = gimple_build_assign (expr, repl);
2308 gimple_set_location (stmt, loc);
2310 if (insert_after)
2311 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2312 else
2313 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2314 update_stmt (stmt);
2315 sra_stats.subtree_copies++;
2318 if (access->first_child)
2319 generate_subtree_copies (access->first_child, agg, top_offset,
2320 start_offset, chunk_size, gsi,
2321 write, insert_after, loc);
2323 access = access->next_sibling;
2325 while (access);
2328 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2329 the root of the subtree to be processed. GSI is the statement iterator used
2330 for inserting statements which are added after the current statement if
2331 INSERT_AFTER is true or before it otherwise. */
2333 static void
2334 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2335 bool insert_after, location_t loc)
2338 struct access *child;
2340 if (access->grp_to_be_replaced)
2342 gimple stmt;
2344 stmt = gimple_build_assign (get_access_replacement (access),
2345 build_zero_cst (access->type));
2346 if (insert_after)
2347 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2348 else
2349 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2350 update_stmt (stmt);
2351 gimple_set_location (stmt, loc);
2354 for (child = access->first_child; child; child = child->next_sibling)
2355 init_subtree_with_zero (child, gsi, insert_after, loc);
2358 /* Search for an access representative for the given expression EXPR and
2359 return it or NULL if it cannot be found. */
2361 static struct access *
2362 get_access_for_expr (tree expr)
2364 HOST_WIDE_INT offset, size, max_size;
2365 tree base;
2367 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2368 a different size than the size of its argument and we need the latter
2369 one. */
2370 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2371 expr = TREE_OPERAND (expr, 0);
2373 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2374 if (max_size == -1 || !DECL_P (base))
2375 return NULL;
2377 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2378 return NULL;
2380 return get_var_base_offset_size_access (base, offset, max_size);
2383 /* Replace the expression EXPR with a scalar replacement if there is one and
2384 generate other statements to do type conversion or subtree copying if
2385 necessary. GSI is used to place newly created statements, WRITE is true if
2386 the expression is being written to (it is on a LHS of a statement or output
2387 in an assembly statement). */
2389 static bool
2390 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2392 location_t loc;
2393 struct access *access;
2394 tree type, bfr;
2396 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2398 bfr = *expr;
2399 expr = &TREE_OPERAND (*expr, 0);
2401 else
2402 bfr = NULL_TREE;
2404 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2405 expr = &TREE_OPERAND (*expr, 0);
2406 access = get_access_for_expr (*expr);
2407 if (!access)
2408 return false;
2409 type = TREE_TYPE (*expr);
2411 loc = gimple_location (gsi_stmt (*gsi));
2412 if (access->grp_to_be_replaced)
2414 tree repl = get_access_replacement (access);
2415 /* If we replace a non-register typed access simply use the original
2416 access expression to extract the scalar component afterwards.
2417 This happens if scalarizing a function return value or parameter
2418 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2419 gcc.c-torture/compile/20011217-1.c.
2421 We also want to use this when accessing a complex or vector which can
2422 be accessed as a different type too, potentially creating a need for
2423 type conversion (see PR42196) and when scalarized unions are involved
2424 in assembler statements (see PR42398). */
2425 if (!useless_type_conversion_p (type, access->type))
2427 tree ref;
2429 ref = build_ref_for_model (loc, access->base, access->offset, access,
2430 NULL, false);
2432 if (write)
2434 gimple stmt;
2436 if (access->grp_partial_lhs)
2437 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2438 false, GSI_NEW_STMT);
2439 stmt = gimple_build_assign (repl, ref);
2440 gimple_set_location (stmt, loc);
2441 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2443 else
2445 gimple stmt;
2447 if (access->grp_partial_lhs)
2448 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2449 true, GSI_SAME_STMT);
2450 stmt = gimple_build_assign (ref, repl);
2451 gimple_set_location (stmt, loc);
2452 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2455 else
2456 *expr = repl;
2457 sra_stats.exprs++;
2460 if (access->first_child)
2462 HOST_WIDE_INT start_offset, chunk_size;
2463 if (bfr
2464 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2465 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2467 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2468 start_offset = access->offset
2469 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2471 else
2472 start_offset = chunk_size = 0;
2474 generate_subtree_copies (access->first_child, access->base, 0,
2475 start_offset, chunk_size, gsi, write, write,
2476 loc);
2478 return true;
2481 /* Where scalar replacements of the RHS have been written to when a replacement
2482 of a LHS of an assigments cannot be direclty loaded from a replacement of
2483 the RHS. */
2484 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2485 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2486 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2488 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2489 base aggregate if there are unscalarized data or directly to LHS of the
2490 statement that is pointed to by GSI otherwise. */
2492 static enum unscalarized_data_handling
2493 handle_unscalarized_data_in_subtree (struct access *top_racc,
2494 gimple_stmt_iterator *gsi)
2496 if (top_racc->grp_unscalarized_data)
2498 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2499 gsi, false, false,
2500 gimple_location (gsi_stmt (*gsi)));
2501 return SRA_UDH_RIGHT;
2503 else
2505 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2506 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2507 0, 0, gsi, false, false,
2508 gimple_location (gsi_stmt (*gsi)));
2509 return SRA_UDH_LEFT;
2514 /* Try to generate statements to load all sub-replacements in an access subtree
2515 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2516 If that is not possible, refresh the TOP_RACC base aggregate and load the
2517 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2518 copied. NEW_GSI is stmt iterator used for statement insertions after the
2519 original assignment, OLD_GSI is used to insert statements before the
2520 assignment. *REFRESHED keeps the information whether we have needed to
2521 refresh replacements of the LHS and from which side of the assignments this
2522 takes place. */
2524 static void
2525 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2526 HOST_WIDE_INT left_offset,
2527 gimple_stmt_iterator *old_gsi,
2528 gimple_stmt_iterator *new_gsi,
2529 enum unscalarized_data_handling *refreshed)
2531 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2532 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2534 if (lacc->grp_to_be_replaced)
2536 struct access *racc;
2537 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2538 gimple stmt;
2539 tree rhs;
2541 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2542 if (racc && racc->grp_to_be_replaced)
2544 rhs = get_access_replacement (racc);
2545 if (!useless_type_conversion_p (lacc->type, racc->type))
2546 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2548 else
2550 /* No suitable access on the right hand side, need to load from
2551 the aggregate. See if we have to update it first... */
2552 if (*refreshed == SRA_UDH_NONE)
2553 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2554 old_gsi);
2556 if (*refreshed == SRA_UDH_LEFT)
2557 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2558 new_gsi, true);
2559 else
2560 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2561 new_gsi, true);
2564 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2565 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2566 gimple_set_location (stmt, loc);
2567 update_stmt (stmt);
2568 sra_stats.subreplacements++;
2570 else if (*refreshed == SRA_UDH_NONE
2571 && lacc->grp_read && !lacc->grp_covered)
2572 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2573 old_gsi);
2575 if (lacc->first_child)
2576 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2577 old_gsi, new_gsi, refreshed);
2581 /* Result code for SRA assignment modification. */
2582 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2583 SRA_AM_MODIFIED, /* stmt changed but not
2584 removed */
2585 SRA_AM_REMOVED }; /* stmt eliminated */
2587 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2588 to the assignment and GSI is the statement iterator pointing at it. Returns
2589 the same values as sra_modify_assign. */
2591 static enum assignment_mod_result
2592 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2594 tree lhs = gimple_assign_lhs (*stmt);
2595 struct access *acc;
2596 location_t loc;
2598 acc = get_access_for_expr (lhs);
2599 if (!acc)
2600 return SRA_AM_NONE;
2602 loc = gimple_location (*stmt);
2603 if (VEC_length (constructor_elt,
2604 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2606 /* I have never seen this code path trigger but if it can happen the
2607 following should handle it gracefully. */
2608 if (access_has_children_p (acc))
2609 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2610 true, true, loc);
2611 return SRA_AM_MODIFIED;
2614 if (acc->grp_covered)
2616 init_subtree_with_zero (acc, gsi, false, loc);
2617 unlink_stmt_vdef (*stmt);
2618 gsi_remove (gsi, true);
2619 return SRA_AM_REMOVED;
2621 else
2623 init_subtree_with_zero (acc, gsi, true, loc);
2624 return SRA_AM_MODIFIED;
2628 /* Create and return a new suitable default definition SSA_NAME for RACC which
2629 is an access describing an uninitialized part of an aggregate that is being
2630 loaded. */
2632 static tree
2633 get_repl_default_def_ssa_name (struct access *racc)
2635 tree repl, decl;
2637 decl = get_unrenamed_access_replacement (racc);
2639 repl = gimple_default_def (cfun, decl);
2640 if (!repl)
2642 repl = make_ssa_name (decl, gimple_build_nop ());
2643 set_default_def (decl, repl);
2646 return repl;
2649 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2650 somewhere in it. */
2652 static inline bool
2653 contains_bitfld_comp_ref_p (const_tree ref)
2655 while (handled_component_p (ref))
2657 if (TREE_CODE (ref) == COMPONENT_REF
2658 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2659 return true;
2660 ref = TREE_OPERAND (ref, 0);
2663 return false;
2666 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2667 bit-field field declaration somewhere in it. */
2669 static inline bool
2670 contains_vce_or_bfcref_p (const_tree ref)
2672 while (handled_component_p (ref))
2674 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2675 || (TREE_CODE (ref) == COMPONENT_REF
2676 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2677 return true;
2678 ref = TREE_OPERAND (ref, 0);
2681 return false;
2684 /* Examine both sides of the assignment statement pointed to by STMT, replace
2685 them with a scalare replacement if there is one and generate copying of
2686 replacements if scalarized aggregates have been used in the assignment. GSI
2687 is used to hold generated statements for type conversions and subtree
2688 copying. */
2690 static enum assignment_mod_result
2691 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2693 struct access *lacc, *racc;
2694 tree lhs, rhs;
2695 bool modify_this_stmt = false;
2696 bool force_gimple_rhs = false;
2697 location_t loc;
2698 gimple_stmt_iterator orig_gsi = *gsi;
2700 if (!gimple_assign_single_p (*stmt))
2701 return SRA_AM_NONE;
2702 lhs = gimple_assign_lhs (*stmt);
2703 rhs = gimple_assign_rhs1 (*stmt);
2705 if (TREE_CODE (rhs) == CONSTRUCTOR)
2706 return sra_modify_constructor_assign (stmt, gsi);
2708 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2709 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2710 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2712 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2713 gsi, false);
2714 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2715 gsi, true);
2716 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2719 lacc = get_access_for_expr (lhs);
2720 racc = get_access_for_expr (rhs);
2721 if (!lacc && !racc)
2722 return SRA_AM_NONE;
2724 loc = gimple_location (*stmt);
2725 if (lacc && lacc->grp_to_be_replaced)
2727 lhs = get_access_replacement (lacc);
2728 gimple_assign_set_lhs (*stmt, lhs);
2729 modify_this_stmt = true;
2730 if (lacc->grp_partial_lhs)
2731 force_gimple_rhs = true;
2732 sra_stats.exprs++;
2735 if (racc && racc->grp_to_be_replaced)
2737 rhs = get_access_replacement (racc);
2738 modify_this_stmt = true;
2739 if (racc->grp_partial_lhs)
2740 force_gimple_rhs = true;
2741 sra_stats.exprs++;
2744 if (modify_this_stmt)
2746 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2748 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2749 ??? This should move to fold_stmt which we simply should
2750 call after building a VIEW_CONVERT_EXPR here. */
2751 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2752 && !contains_bitfld_comp_ref_p (lhs)
2753 && !access_has_children_p (lacc))
2755 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
2756 gimple_assign_set_lhs (*stmt, lhs);
2758 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2759 && !contains_vce_or_bfcref_p (rhs)
2760 && !access_has_children_p (racc))
2761 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
2763 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2765 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
2766 rhs);
2767 if (is_gimple_reg_type (TREE_TYPE (lhs))
2768 && TREE_CODE (lhs) != SSA_NAME)
2769 force_gimple_rhs = true;
2774 /* From this point on, the function deals with assignments in between
2775 aggregates when at least one has scalar reductions of some of its
2776 components. There are three possible scenarios: Both the LHS and RHS have
2777 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2779 In the first case, we would like to load the LHS components from RHS
2780 components whenever possible. If that is not possible, we would like to
2781 read it directly from the RHS (after updating it by storing in it its own
2782 components). If there are some necessary unscalarized data in the LHS,
2783 those will be loaded by the original assignment too. If neither of these
2784 cases happen, the original statement can be removed. Most of this is done
2785 by load_assign_lhs_subreplacements.
2787 In the second case, we would like to store all RHS scalarized components
2788 directly into LHS and if they cover the aggregate completely, remove the
2789 statement too. In the third case, we want the LHS components to be loaded
2790 directly from the RHS (DSE will remove the original statement if it
2791 becomes redundant).
2793 This is a bit complex but manageable when types match and when unions do
2794 not cause confusion in a way that we cannot really load a component of LHS
2795 from the RHS or vice versa (the access representing this level can have
2796 subaccesses that are accessible only through a different union field at a
2797 higher level - different from the one used in the examined expression).
2798 Unions are fun.
2800 Therefore, I specially handle a fourth case, happening when there is a
2801 specific type cast or it is impossible to locate a scalarized subaccess on
2802 the other side of the expression. If that happens, I simply "refresh" the
2803 RHS by storing in it is scalarized components leave the original statement
2804 there to do the copying and then load the scalar replacements of the LHS.
2805 This is what the first branch does. */
2807 if (gimple_has_volatile_ops (*stmt)
2808 || contains_vce_or_bfcref_p (rhs)
2809 || contains_vce_or_bfcref_p (lhs))
2811 if (access_has_children_p (racc))
2812 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2813 gsi, false, false, loc);
2814 if (access_has_children_p (lacc))
2815 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2816 gsi, true, true, loc);
2817 sra_stats.separate_lhs_rhs_handling++;
2819 else
2821 if (access_has_children_p (lacc) && access_has_children_p (racc))
2823 gimple_stmt_iterator orig_gsi = *gsi;
2824 enum unscalarized_data_handling refreshed;
2826 if (lacc->grp_read && !lacc->grp_covered)
2827 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
2828 else
2829 refreshed = SRA_UDH_NONE;
2831 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
2832 &orig_gsi, gsi, &refreshed);
2833 if (refreshed != SRA_UDH_RIGHT)
2835 gsi_next (gsi);
2836 unlink_stmt_vdef (*stmt);
2837 gsi_remove (&orig_gsi, true);
2838 sra_stats.deleted++;
2839 return SRA_AM_REMOVED;
2842 else
2844 if (racc)
2846 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2848 if (dump_file)
2850 fprintf (dump_file, "Removing load: ");
2851 print_gimple_stmt (dump_file, *stmt, 0, 0);
2854 if (TREE_CODE (lhs) == SSA_NAME)
2856 rhs = get_repl_default_def_ssa_name (racc);
2857 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2858 TREE_TYPE (rhs)))
2859 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
2860 TREE_TYPE (lhs), rhs);
2862 else
2864 if (racc->first_child)
2865 generate_subtree_copies (racc->first_child, lhs,
2866 racc->offset, 0, 0, gsi,
2867 false, false, loc);
2869 gcc_assert (*stmt == gsi_stmt (*gsi));
2870 unlink_stmt_vdef (*stmt);
2871 gsi_remove (gsi, true);
2872 sra_stats.deleted++;
2873 return SRA_AM_REMOVED;
2876 else if (racc->first_child)
2877 generate_subtree_copies (racc->first_child, lhs, racc->offset,
2878 0, 0, gsi, false, true, loc);
2880 if (access_has_children_p (lacc))
2881 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2882 0, 0, gsi, true, true, loc);
2886 /* This gimplification must be done after generate_subtree_copies, lest we
2887 insert the subtree copies in the middle of the gimplified sequence. */
2888 if (force_gimple_rhs)
2889 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2890 true, GSI_SAME_STMT);
2891 if (gimple_assign_rhs1 (*stmt) != rhs)
2893 modify_this_stmt = true;
2894 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2895 gcc_assert (*stmt == gsi_stmt (orig_gsi));
2898 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2901 /* Traverse the function body and all modifications as decided in
2902 analyze_all_variable_accesses. Return true iff the CFG has been
2903 changed. */
2905 static bool
2906 sra_modify_function_body (void)
2908 bool cfg_changed = false;
2909 basic_block bb;
2911 FOR_EACH_BB (bb)
2913 gimple_stmt_iterator gsi = gsi_start_bb (bb);
2914 while (!gsi_end_p (gsi))
2916 gimple stmt = gsi_stmt (gsi);
2917 enum assignment_mod_result assign_result;
2918 bool modified = false, deleted = false;
2919 tree *t;
2920 unsigned i;
2922 switch (gimple_code (stmt))
2924 case GIMPLE_RETURN:
2925 t = gimple_return_retval_ptr (stmt);
2926 if (*t != NULL_TREE)
2927 modified |= sra_modify_expr (t, &gsi, false);
2928 break;
2930 case GIMPLE_ASSIGN:
2931 assign_result = sra_modify_assign (&stmt, &gsi);
2932 modified |= assign_result == SRA_AM_MODIFIED;
2933 deleted = assign_result == SRA_AM_REMOVED;
2934 break;
2936 case GIMPLE_CALL:
2937 /* Operands must be processed before the lhs. */
2938 for (i = 0; i < gimple_call_num_args (stmt); i++)
2940 t = gimple_call_arg_ptr (stmt, i);
2941 modified |= sra_modify_expr (t, &gsi, false);
2944 if (gimple_call_lhs (stmt))
2946 t = gimple_call_lhs_ptr (stmt);
2947 modified |= sra_modify_expr (t, &gsi, true);
2949 break;
2951 case GIMPLE_ASM:
2952 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
2954 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
2955 modified |= sra_modify_expr (t, &gsi, false);
2957 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
2959 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
2960 modified |= sra_modify_expr (t, &gsi, true);
2962 break;
2964 default:
2965 break;
2968 if (modified)
2970 update_stmt (stmt);
2971 if (maybe_clean_eh_stmt (stmt)
2972 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2973 cfg_changed = true;
2975 if (!deleted)
2976 gsi_next (&gsi);
2980 return cfg_changed;
2983 /* Generate statements initializing scalar replacements of parts of function
2984 parameters. */
2986 static void
2987 initialize_parameter_reductions (void)
2989 gimple_stmt_iterator gsi;
2990 gimple_seq seq = NULL;
2991 tree parm;
2993 for (parm = DECL_ARGUMENTS (current_function_decl);
2994 parm;
2995 parm = DECL_CHAIN (parm))
2997 VEC (access_p, heap) *access_vec;
2998 struct access *access;
3000 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3001 continue;
3002 access_vec = get_base_access_vector (parm);
3003 if (!access_vec)
3004 continue;
3006 if (!seq)
3008 seq = gimple_seq_alloc ();
3009 gsi = gsi_start (seq);
3012 for (access = VEC_index (access_p, access_vec, 0);
3013 access;
3014 access = access->next_grp)
3015 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3016 EXPR_LOCATION (parm));
3019 if (seq)
3020 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3023 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3024 it reveals there are components of some aggregates to be scalarized, it runs
3025 the required transformations. */
3026 static unsigned int
3027 perform_intra_sra (void)
3029 int ret = 0;
3030 sra_initialize ();
3032 if (!find_var_candidates ())
3033 goto out;
3035 if (!scan_function ())
3036 goto out;
3038 if (!analyze_all_variable_accesses ())
3039 goto out;
3041 if (sra_modify_function_body ())
3042 ret = TODO_update_ssa | TODO_cleanup_cfg;
3043 else
3044 ret = TODO_update_ssa;
3045 initialize_parameter_reductions ();
3047 statistics_counter_event (cfun, "Scalar replacements created",
3048 sra_stats.replacements);
3049 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3050 statistics_counter_event (cfun, "Subtree copy stmts",
3051 sra_stats.subtree_copies);
3052 statistics_counter_event (cfun, "Subreplacement stmts",
3053 sra_stats.subreplacements);
3054 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3055 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3056 sra_stats.separate_lhs_rhs_handling);
3058 out:
3059 sra_deinitialize ();
3060 return ret;
3063 /* Perform early intraprocedural SRA. */
3064 static unsigned int
3065 early_intra_sra (void)
3067 sra_mode = SRA_MODE_EARLY_INTRA;
3068 return perform_intra_sra ();
3071 /* Perform "late" intraprocedural SRA. */
3072 static unsigned int
3073 late_intra_sra (void)
3075 sra_mode = SRA_MODE_INTRA;
3076 return perform_intra_sra ();
3080 static bool
3081 gate_intra_sra (void)
3083 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3087 struct gimple_opt_pass pass_sra_early =
3090 GIMPLE_PASS,
3091 "esra", /* name */
3092 gate_intra_sra, /* gate */
3093 early_intra_sra, /* execute */
3094 NULL, /* sub */
3095 NULL, /* next */
3096 0, /* static_pass_number */
3097 TV_TREE_SRA, /* tv_id */
3098 PROP_cfg | PROP_ssa, /* properties_required */
3099 0, /* properties_provided */
3100 0, /* properties_destroyed */
3101 0, /* todo_flags_start */
3102 TODO_update_ssa
3103 | TODO_ggc_collect
3104 | TODO_verify_ssa /* todo_flags_finish */
3108 struct gimple_opt_pass pass_sra =
3111 GIMPLE_PASS,
3112 "sra", /* name */
3113 gate_intra_sra, /* gate */
3114 late_intra_sra, /* execute */
3115 NULL, /* sub */
3116 NULL, /* next */
3117 0, /* static_pass_number */
3118 TV_TREE_SRA, /* tv_id */
3119 PROP_cfg | PROP_ssa, /* properties_required */
3120 0, /* properties_provided */
3121 0, /* properties_destroyed */
3122 TODO_update_address_taken, /* todo_flags_start */
3123 TODO_update_ssa
3124 | TODO_ggc_collect
3125 | TODO_verify_ssa /* todo_flags_finish */
3130 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3131 parameter. */
3133 static bool
3134 is_unused_scalar_param (tree parm)
3136 tree name;
3137 return (is_gimple_reg (parm)
3138 && (!(name = gimple_default_def (cfun, parm))
3139 || has_zero_uses (name)));
3142 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3143 examine whether there are any direct or otherwise infeasible ones. If so,
3144 return true, otherwise return false. PARM must be a gimple register with a
3145 non-NULL default definition. */
3147 static bool
3148 ptr_parm_has_direct_uses (tree parm)
3150 imm_use_iterator ui;
3151 gimple stmt;
3152 tree name = gimple_default_def (cfun, parm);
3153 bool ret = false;
3155 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3157 int uses_ok = 0;
3158 use_operand_p use_p;
3160 if (is_gimple_debug (stmt))
3161 continue;
3163 /* Valid uses include dereferences on the lhs and the rhs. */
3164 if (gimple_has_lhs (stmt))
3166 tree lhs = gimple_get_lhs (stmt);
3167 while (handled_component_p (lhs))
3168 lhs = TREE_OPERAND (lhs, 0);
3169 if (TREE_CODE (lhs) == MEM_REF
3170 && TREE_OPERAND (lhs, 0) == name
3171 && integer_zerop (TREE_OPERAND (lhs, 1))
3172 && types_compatible_p (TREE_TYPE (lhs),
3173 TREE_TYPE (TREE_TYPE (name))))
3174 uses_ok++;
3176 if (gimple_assign_single_p (stmt))
3178 tree rhs = gimple_assign_rhs1 (stmt);
3179 while (handled_component_p (rhs))
3180 rhs = TREE_OPERAND (rhs, 0);
3181 if (TREE_CODE (rhs) == MEM_REF
3182 && TREE_OPERAND (rhs, 0) == name
3183 && integer_zerop (TREE_OPERAND (rhs, 1))
3184 && types_compatible_p (TREE_TYPE (rhs),
3185 TREE_TYPE (TREE_TYPE (name))))
3186 uses_ok++;
3188 else if (is_gimple_call (stmt))
3190 unsigned i;
3191 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3193 tree arg = gimple_call_arg (stmt, i);
3194 while (handled_component_p (arg))
3195 arg = TREE_OPERAND (arg, 0);
3196 if (TREE_CODE (arg) == MEM_REF
3197 && TREE_OPERAND (arg, 0) == name
3198 && integer_zerop (TREE_OPERAND (arg, 1))
3199 && types_compatible_p (TREE_TYPE (arg),
3200 TREE_TYPE (TREE_TYPE (name))))
3201 uses_ok++;
3205 /* If the number of valid uses does not match the number of
3206 uses in this stmt there is an unhandled use. */
3207 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3208 --uses_ok;
3210 if (uses_ok != 0)
3211 ret = true;
3213 if (ret)
3214 BREAK_FROM_IMM_USE_STMT (ui);
3217 return ret;
3220 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3221 them in candidate_bitmap. Note that these do not necessarily include
3222 parameter which are unused and thus can be removed. Return true iff any
3223 such candidate has been found. */
3225 static bool
3226 find_param_candidates (void)
3228 tree parm;
3229 int count = 0;
3230 bool ret = false;
3232 for (parm = DECL_ARGUMENTS (current_function_decl);
3233 parm;
3234 parm = DECL_CHAIN (parm))
3236 tree type = TREE_TYPE (parm);
3238 count++;
3240 if (TREE_THIS_VOLATILE (parm)
3241 || TREE_ADDRESSABLE (parm)
3242 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3243 continue;
3245 if (is_unused_scalar_param (parm))
3247 ret = true;
3248 continue;
3251 if (POINTER_TYPE_P (type))
3253 type = TREE_TYPE (type);
3255 if (TREE_CODE (type) == FUNCTION_TYPE
3256 || TYPE_VOLATILE (type)
3257 || (TREE_CODE (type) == ARRAY_TYPE
3258 && TYPE_NONALIASED_COMPONENT (type))
3259 || !is_gimple_reg (parm)
3260 || is_va_list_type (type)
3261 || ptr_parm_has_direct_uses (parm))
3262 continue;
3264 else if (!AGGREGATE_TYPE_P (type))
3265 continue;
3267 if (!COMPLETE_TYPE_P (type)
3268 || !host_integerp (TYPE_SIZE (type), 1)
3269 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3270 || (AGGREGATE_TYPE_P (type)
3271 && type_internals_preclude_sra_p (type)))
3272 continue;
3274 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3275 ret = true;
3276 if (dump_file && (dump_flags & TDF_DETAILS))
3278 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3279 print_generic_expr (dump_file, parm, 0);
3280 fprintf (dump_file, "\n");
3284 func_param_count = count;
3285 return ret;
3288 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3289 maybe_modified. */
3291 static bool
3292 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3293 void *data)
3295 struct access *repr = (struct access *) data;
3297 repr->grp_maybe_modified = 1;
3298 return true;
3301 /* Analyze what representatives (in linked lists accessible from
3302 REPRESENTATIVES) can be modified by side effects of statements in the
3303 current function. */
3305 static void
3306 analyze_modified_params (VEC (access_p, heap) *representatives)
3308 int i;
3310 for (i = 0; i < func_param_count; i++)
3312 struct access *repr;
3314 for (repr = VEC_index (access_p, representatives, i);
3315 repr;
3316 repr = repr->next_grp)
3318 struct access *access;
3319 bitmap visited;
3320 ao_ref ar;
3322 if (no_accesses_p (repr))
3323 continue;
3324 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3325 || repr->grp_maybe_modified)
3326 continue;
3328 ao_ref_init (&ar, repr->expr);
3329 visited = BITMAP_ALLOC (NULL);
3330 for (access = repr; access; access = access->next_sibling)
3332 /* All accesses are read ones, otherwise grp_maybe_modified would
3333 be trivially set. */
3334 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3335 mark_maybe_modified, repr, &visited);
3336 if (repr->grp_maybe_modified)
3337 break;
3339 BITMAP_FREE (visited);
3344 /* Propagate distances in bb_dereferences in the opposite direction than the
3345 control flow edges, in each step storing the maximum of the current value
3346 and the minimum of all successors. These steps are repeated until the table
3347 stabilizes. Note that BBs which might terminate the functions (according to
3348 final_bbs bitmap) never updated in this way. */
3350 static void
3351 propagate_dereference_distances (void)
3353 VEC (basic_block, heap) *queue;
3354 basic_block bb;
3356 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3357 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3358 FOR_EACH_BB (bb)
3360 VEC_quick_push (basic_block, queue, bb);
3361 bb->aux = bb;
3364 while (!VEC_empty (basic_block, queue))
3366 edge_iterator ei;
3367 edge e;
3368 bool change = false;
3369 int i;
3371 bb = VEC_pop (basic_block, queue);
3372 bb->aux = NULL;
3374 if (bitmap_bit_p (final_bbs, bb->index))
3375 continue;
3377 for (i = 0; i < func_param_count; i++)
3379 int idx = bb->index * func_param_count + i;
3380 bool first = true;
3381 HOST_WIDE_INT inh = 0;
3383 FOR_EACH_EDGE (e, ei, bb->succs)
3385 int succ_idx = e->dest->index * func_param_count + i;
3387 if (e->src == EXIT_BLOCK_PTR)
3388 continue;
3390 if (first)
3392 first = false;
3393 inh = bb_dereferences [succ_idx];
3395 else if (bb_dereferences [succ_idx] < inh)
3396 inh = bb_dereferences [succ_idx];
3399 if (!first && bb_dereferences[idx] < inh)
3401 bb_dereferences[idx] = inh;
3402 change = true;
3406 if (change && !bitmap_bit_p (final_bbs, bb->index))
3407 FOR_EACH_EDGE (e, ei, bb->preds)
3409 if (e->src->aux)
3410 continue;
3412 e->src->aux = e->src;
3413 VEC_quick_push (basic_block, queue, e->src);
3417 VEC_free (basic_block, heap, queue);
3420 /* Dump a dereferences TABLE with heading STR to file F. */
3422 static void
3423 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3425 basic_block bb;
3427 fprintf (dump_file, str);
3428 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3430 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3431 if (bb != EXIT_BLOCK_PTR)
3433 int i;
3434 for (i = 0; i < func_param_count; i++)
3436 int idx = bb->index * func_param_count + i;
3437 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3440 fprintf (f, "\n");
3442 fprintf (dump_file, "\n");
3445 /* Determine what (parts of) parameters passed by reference that are not
3446 assigned to are not certainly dereferenced in this function and thus the
3447 dereferencing cannot be safely moved to the caller without potentially
3448 introducing a segfault. Mark such REPRESENTATIVES as
3449 grp_not_necessarilly_dereferenced.
3451 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3452 part is calculated rather than simple booleans are calculated for each
3453 pointer parameter to handle cases when only a fraction of the whole
3454 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3455 an example).
3457 The maximum dereference distances for each pointer parameter and BB are
3458 already stored in bb_dereference. This routine simply propagates these
3459 values upwards by propagate_dereference_distances and then compares the
3460 distances of individual parameters in the ENTRY BB to the equivalent
3461 distances of each representative of a (fraction of a) parameter. */
3463 static void
3464 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3466 int i;
3468 if (dump_file && (dump_flags & TDF_DETAILS))
3469 dump_dereferences_table (dump_file,
3470 "Dereference table before propagation:\n",
3471 bb_dereferences);
3473 propagate_dereference_distances ();
3475 if (dump_file && (dump_flags & TDF_DETAILS))
3476 dump_dereferences_table (dump_file,
3477 "Dereference table after propagation:\n",
3478 bb_dereferences);
3480 for (i = 0; i < func_param_count; i++)
3482 struct access *repr = VEC_index (access_p, representatives, i);
3483 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3485 if (!repr || no_accesses_p (repr))
3486 continue;
3490 if ((repr->offset + repr->size) > bb_dereferences[idx])
3491 repr->grp_not_necessarilly_dereferenced = 1;
3492 repr = repr->next_grp;
3494 while (repr);
3498 /* Return the representative access for the parameter declaration PARM if it is
3499 a scalar passed by reference which is not written to and the pointer value
3500 is not used directly. Thus, if it is legal to dereference it in the caller
3501 and we can rule out modifications through aliases, such parameter should be
3502 turned into one passed by value. Return NULL otherwise. */
3504 static struct access *
3505 unmodified_by_ref_scalar_representative (tree parm)
3507 int i, access_count;
3508 struct access *repr;
3509 VEC (access_p, heap) *access_vec;
3511 access_vec = get_base_access_vector (parm);
3512 gcc_assert (access_vec);
3513 repr = VEC_index (access_p, access_vec, 0);
3514 if (repr->write)
3515 return NULL;
3516 repr->group_representative = repr;
3518 access_count = VEC_length (access_p, access_vec);
3519 for (i = 1; i < access_count; i++)
3521 struct access *access = VEC_index (access_p, access_vec, i);
3522 if (access->write)
3523 return NULL;
3524 access->group_representative = repr;
3525 access->next_sibling = repr->next_sibling;
3526 repr->next_sibling = access;
3529 repr->grp_read = 1;
3530 repr->grp_scalar_ptr = 1;
3531 return repr;
3534 /* Return true iff this access precludes IPA-SRA of the parameter it is
3535 associated with. */
3537 static bool
3538 access_precludes_ipa_sra_p (struct access *access)
3540 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3541 is incompatible assign in a call statement (and possibly even in asm
3542 statements). This can be relaxed by using a new temporary but only for
3543 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3544 intraprocedural SRA we deal with this by keeping the old aggregate around,
3545 something we cannot do in IPA-SRA.) */
3546 if (access->write
3547 && (is_gimple_call (access->stmt)
3548 || gimple_code (access->stmt) == GIMPLE_ASM))
3549 return true;
3551 return false;
3555 /* Sort collected accesses for parameter PARM, identify representatives for
3556 each accessed region and link them together. Return NULL if there are
3557 different but overlapping accesses, return the special ptr value meaning
3558 there are no accesses for this parameter if that is the case and return the
3559 first representative otherwise. Set *RO_GRP if there is a group of accesses
3560 with only read (i.e. no write) accesses. */
3562 static struct access *
3563 splice_param_accesses (tree parm, bool *ro_grp)
3565 int i, j, access_count, group_count;
3566 int agg_size, total_size = 0;
3567 struct access *access, *res, **prev_acc_ptr = &res;
3568 VEC (access_p, heap) *access_vec;
3570 access_vec = get_base_access_vector (parm);
3571 if (!access_vec)
3572 return &no_accesses_representant;
3573 access_count = VEC_length (access_p, access_vec);
3575 VEC_qsort (access_p, access_vec, compare_access_positions);
3577 i = 0;
3578 total_size = 0;
3579 group_count = 0;
3580 while (i < access_count)
3582 bool modification;
3583 tree a1_alias_type;
3584 access = VEC_index (access_p, access_vec, i);
3585 modification = access->write;
3586 if (access_precludes_ipa_sra_p (access))
3587 return NULL;
3588 a1_alias_type = reference_alias_ptr_type (access->expr);
3590 /* Access is about to become group representative unless we find some
3591 nasty overlap which would preclude us from breaking this parameter
3592 apart. */
3594 j = i + 1;
3595 while (j < access_count)
3597 struct access *ac2 = VEC_index (access_p, access_vec, j);
3598 if (ac2->offset != access->offset)
3600 /* All or nothing law for parameters. */
3601 if (access->offset + access->size > ac2->offset)
3602 return NULL;
3603 else
3604 break;
3606 else if (ac2->size != access->size)
3607 return NULL;
3609 if (access_precludes_ipa_sra_p (ac2)
3610 || (ac2->type != access->type
3611 && (TREE_ADDRESSABLE (ac2->type)
3612 || TREE_ADDRESSABLE (access->type)))
3613 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3614 return NULL;
3616 modification |= ac2->write;
3617 ac2->group_representative = access;
3618 ac2->next_sibling = access->next_sibling;
3619 access->next_sibling = ac2;
3620 j++;
3623 group_count++;
3624 access->grp_maybe_modified = modification;
3625 if (!modification)
3626 *ro_grp = true;
3627 *prev_acc_ptr = access;
3628 prev_acc_ptr = &access->next_grp;
3629 total_size += access->size;
3630 i = j;
3633 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3634 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3635 else
3636 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3637 if (total_size >= agg_size)
3638 return NULL;
3640 gcc_assert (group_count > 0);
3641 return res;
3644 /* Decide whether parameters with representative accesses given by REPR should
3645 be reduced into components. */
3647 static int
3648 decide_one_param_reduction (struct access *repr)
3650 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3651 bool by_ref;
3652 tree parm;
3654 parm = repr->base;
3655 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3656 gcc_assert (cur_parm_size > 0);
3658 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3660 by_ref = true;
3661 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3663 else
3665 by_ref = false;
3666 agg_size = cur_parm_size;
3669 if (dump_file)
3671 struct access *acc;
3672 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3673 print_generic_expr (dump_file, parm, 0);
3674 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3675 for (acc = repr; acc; acc = acc->next_grp)
3676 dump_access (dump_file, acc, true);
3679 total_size = 0;
3680 new_param_count = 0;
3682 for (; repr; repr = repr->next_grp)
3684 gcc_assert (parm == repr->base);
3686 /* Taking the address of a non-addressable field is verboten. */
3687 if (by_ref && repr->non_addressable)
3688 return 0;
3690 if (!by_ref || (!repr->grp_maybe_modified
3691 && !repr->grp_not_necessarilly_dereferenced))
3692 total_size += repr->size;
3693 else
3694 total_size += cur_parm_size;
3696 new_param_count++;
3699 gcc_assert (new_param_count > 0);
3701 if (optimize_function_for_size_p (cfun))
3702 parm_size_limit = cur_parm_size;
3703 else
3704 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3705 * cur_parm_size);
3707 if (total_size < agg_size
3708 && total_size <= parm_size_limit)
3710 if (dump_file)
3711 fprintf (dump_file, " ....will be split into %i components\n",
3712 new_param_count);
3713 return new_param_count;
3715 else
3716 return 0;
3719 /* The order of the following enums is important, we need to do extra work for
3720 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3721 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3722 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3724 /* Identify representatives of all accesses to all candidate parameters for
3725 IPA-SRA. Return result based on what representatives have been found. */
3727 static enum ipa_splicing_result
3728 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3730 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3731 tree parm;
3732 struct access *repr;
3734 *representatives = VEC_alloc (access_p, heap, func_param_count);
3736 for (parm = DECL_ARGUMENTS (current_function_decl);
3737 parm;
3738 parm = DECL_CHAIN (parm))
3740 if (is_unused_scalar_param (parm))
3742 VEC_quick_push (access_p, *representatives,
3743 &no_accesses_representant);
3744 if (result == NO_GOOD_ACCESS)
3745 result = UNUSED_PARAMS;
3747 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3748 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3749 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3751 repr = unmodified_by_ref_scalar_representative (parm);
3752 VEC_quick_push (access_p, *representatives, repr);
3753 if (repr)
3754 result = UNMODIF_BY_REF_ACCESSES;
3756 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3758 bool ro_grp = false;
3759 repr = splice_param_accesses (parm, &ro_grp);
3760 VEC_quick_push (access_p, *representatives, repr);
3762 if (repr && !no_accesses_p (repr))
3764 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3766 if (ro_grp)
3767 result = UNMODIF_BY_REF_ACCESSES;
3768 else if (result < MODIF_BY_REF_ACCESSES)
3769 result = MODIF_BY_REF_ACCESSES;
3771 else if (result < BY_VAL_ACCESSES)
3772 result = BY_VAL_ACCESSES;
3774 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3775 result = UNUSED_PARAMS;
3777 else
3778 VEC_quick_push (access_p, *representatives, NULL);
3781 if (result == NO_GOOD_ACCESS)
3783 VEC_free (access_p, heap, *representatives);
3784 *representatives = NULL;
3785 return NO_GOOD_ACCESS;
3788 return result;
3791 /* Return the index of BASE in PARMS. Abort if it is not found. */
3793 static inline int
3794 get_param_index (tree base, VEC(tree, heap) *parms)
3796 int i, len;
3798 len = VEC_length (tree, parms);
3799 for (i = 0; i < len; i++)
3800 if (VEC_index (tree, parms, i) == base)
3801 return i;
3802 gcc_unreachable ();
3805 /* Convert the decisions made at the representative level into compact
3806 parameter adjustments. REPRESENTATIVES are pointers to first
3807 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3808 final number of adjustments. */
3810 static ipa_parm_adjustment_vec
3811 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3812 int adjustments_count)
3814 VEC (tree, heap) *parms;
3815 ipa_parm_adjustment_vec adjustments;
3816 tree parm;
3817 int i;
3819 gcc_assert (adjustments_count > 0);
3820 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3821 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3822 parm = DECL_ARGUMENTS (current_function_decl);
3823 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
3825 struct access *repr = VEC_index (access_p, representatives, i);
3827 if (!repr || no_accesses_p (repr))
3829 struct ipa_parm_adjustment *adj;
3831 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3832 memset (adj, 0, sizeof (*adj));
3833 adj->base_index = get_param_index (parm, parms);
3834 adj->base = parm;
3835 if (!repr)
3836 adj->copy_param = 1;
3837 else
3838 adj->remove_param = 1;
3840 else
3842 struct ipa_parm_adjustment *adj;
3843 int index = get_param_index (parm, parms);
3845 for (; repr; repr = repr->next_grp)
3847 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3848 memset (adj, 0, sizeof (*adj));
3849 gcc_assert (repr->base == parm);
3850 adj->base_index = index;
3851 adj->base = repr->base;
3852 adj->type = repr->type;
3853 adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
3854 adj->offset = repr->offset;
3855 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3856 && (repr->grp_maybe_modified
3857 || repr->grp_not_necessarilly_dereferenced));
3862 VEC_free (tree, heap, parms);
3863 return adjustments;
3866 /* Analyze the collected accesses and produce a plan what to do with the
3867 parameters in the form of adjustments, NULL meaning nothing. */
3869 static ipa_parm_adjustment_vec
3870 analyze_all_param_acesses (void)
3872 enum ipa_splicing_result repr_state;
3873 bool proceed = false;
3874 int i, adjustments_count = 0;
3875 VEC (access_p, heap) *representatives;
3876 ipa_parm_adjustment_vec adjustments;
3878 repr_state = splice_all_param_accesses (&representatives);
3879 if (repr_state == NO_GOOD_ACCESS)
3880 return NULL;
3882 /* If there are any parameters passed by reference which are not modified
3883 directly, we need to check whether they can be modified indirectly. */
3884 if (repr_state == UNMODIF_BY_REF_ACCESSES)
3886 analyze_caller_dereference_legality (representatives);
3887 analyze_modified_params (representatives);
3890 for (i = 0; i < func_param_count; i++)
3892 struct access *repr = VEC_index (access_p, representatives, i);
3894 if (repr && !no_accesses_p (repr))
3896 if (repr->grp_scalar_ptr)
3898 adjustments_count++;
3899 if (repr->grp_not_necessarilly_dereferenced
3900 || repr->grp_maybe_modified)
3901 VEC_replace (access_p, representatives, i, NULL);
3902 else
3904 proceed = true;
3905 sra_stats.scalar_by_ref_to_by_val++;
3908 else
3910 int new_components = decide_one_param_reduction (repr);
3912 if (new_components == 0)
3914 VEC_replace (access_p, representatives, i, NULL);
3915 adjustments_count++;
3917 else
3919 adjustments_count += new_components;
3920 sra_stats.aggregate_params_reduced++;
3921 sra_stats.param_reductions_created += new_components;
3922 proceed = true;
3926 else
3928 if (no_accesses_p (repr))
3930 proceed = true;
3931 sra_stats.deleted_unused_parameters++;
3933 adjustments_count++;
3937 if (!proceed && dump_file)
3938 fprintf (dump_file, "NOT proceeding to change params.\n");
3940 if (proceed)
3941 adjustments = turn_representatives_into_adjustments (representatives,
3942 adjustments_count);
3943 else
3944 adjustments = NULL;
3946 VEC_free (access_p, heap, representatives);
3947 return adjustments;
3950 /* If a parameter replacement identified by ADJ does not yet exist in the form
3951 of declaration, create it and record it, otherwise return the previously
3952 created one. */
3954 static tree
3955 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3957 tree repl;
3958 if (!adj->new_ssa_base)
3960 char *pretty_name = make_fancy_name (adj->base);
3962 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
3963 DECL_NAME (repl) = get_identifier (pretty_name);
3964 obstack_free (&name_obstack, pretty_name);
3966 get_var_ann (repl);
3967 add_referenced_var (repl);
3968 adj->new_ssa_base = repl;
3970 else
3971 repl = adj->new_ssa_base;
3972 return repl;
3975 /* Find the first adjustment for a particular parameter BASE in a vector of
3976 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3977 adjustment. */
3979 static struct ipa_parm_adjustment *
3980 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3982 int i, len;
3984 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3985 for (i = 0; i < len; i++)
3987 struct ipa_parm_adjustment *adj;
3989 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3990 if (!adj->copy_param && adj->base == base)
3991 return adj;
3994 return NULL;
3997 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
3998 removed because its value is not used, replace the SSA_NAME with a one
3999 relating to a created VAR_DECL together all of its uses and return true.
4000 ADJUSTMENTS is a pointer to an adjustments vector. */
4002 static bool
4003 replace_removed_params_ssa_names (gimple stmt,
4004 ipa_parm_adjustment_vec adjustments)
4006 struct ipa_parm_adjustment *adj;
4007 tree lhs, decl, repl, name;
4009 if (gimple_code (stmt) == GIMPLE_PHI)
4010 lhs = gimple_phi_result (stmt);
4011 else if (is_gimple_assign (stmt))
4012 lhs = gimple_assign_lhs (stmt);
4013 else if (is_gimple_call (stmt))
4014 lhs = gimple_call_lhs (stmt);
4015 else
4016 gcc_unreachable ();
4018 if (TREE_CODE (lhs) != SSA_NAME)
4019 return false;
4020 decl = SSA_NAME_VAR (lhs);
4021 if (TREE_CODE (decl) != PARM_DECL)
4022 return false;
4024 adj = get_adjustment_for_base (adjustments, decl);
4025 if (!adj)
4026 return false;
4028 repl = get_replaced_param_substitute (adj);
4029 name = make_ssa_name (repl, stmt);
4031 if (dump_file)
4033 fprintf (dump_file, "replacing an SSA name of a removed param ");
4034 print_generic_expr (dump_file, lhs, 0);
4035 fprintf (dump_file, " with ");
4036 print_generic_expr (dump_file, name, 0);
4037 fprintf (dump_file, "\n");
4040 if (is_gimple_assign (stmt))
4041 gimple_assign_set_lhs (stmt, name);
4042 else if (is_gimple_call (stmt))
4043 gimple_call_set_lhs (stmt, name);
4044 else
4045 gimple_phi_set_result (stmt, name);
4047 replace_uses_by (lhs, name);
4048 release_ssa_name (lhs);
4049 return true;
4052 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4053 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4054 specifies whether the function should care about type incompatibility the
4055 current and new expressions. If it is false, the function will leave
4056 incompatibility issues to the caller. Return true iff the expression
4057 was modified. */
4059 static bool
4060 sra_ipa_modify_expr (tree *expr, bool convert,
4061 ipa_parm_adjustment_vec adjustments)
4063 int i, len;
4064 struct ipa_parm_adjustment *adj, *cand = NULL;
4065 HOST_WIDE_INT offset, size, max_size;
4066 tree base, src;
4068 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4070 if (TREE_CODE (*expr) == BIT_FIELD_REF
4071 || TREE_CODE (*expr) == IMAGPART_EXPR
4072 || TREE_CODE (*expr) == REALPART_EXPR)
4074 expr = &TREE_OPERAND (*expr, 0);
4075 convert = true;
4078 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4079 if (!base || size == -1 || max_size == -1)
4080 return false;
4082 if (TREE_CODE (base) == MEM_REF)
4084 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4085 base = TREE_OPERAND (base, 0);
4088 base = get_ssa_base_param (base);
4089 if (!base || TREE_CODE (base) != PARM_DECL)
4090 return false;
4092 for (i = 0; i < len; i++)
4094 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4096 if (adj->base == base &&
4097 (adj->offset == offset || adj->remove_param))
4099 cand = adj;
4100 break;
4103 if (!cand || cand->copy_param || cand->remove_param)
4104 return false;
4106 if (cand->by_ref)
4107 src = build_simple_mem_ref (cand->reduction);
4108 else
4109 src = cand->reduction;
4111 if (dump_file && (dump_flags & TDF_DETAILS))
4113 fprintf (dump_file, "About to replace expr ");
4114 print_generic_expr (dump_file, *expr, 0);
4115 fprintf (dump_file, " with ");
4116 print_generic_expr (dump_file, src, 0);
4117 fprintf (dump_file, "\n");
4120 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4122 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4123 *expr = vce;
4125 else
4126 *expr = src;
4127 return true;
4130 /* If the statement pointed to by STMT_PTR contains any expressions that need
4131 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4132 potential type incompatibilities (GSI is used to accommodate conversion
4133 statements and must point to the statement). Return true iff the statement
4134 was modified. */
4136 static bool
4137 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4138 ipa_parm_adjustment_vec adjustments)
4140 gimple stmt = *stmt_ptr;
4141 tree *lhs_p, *rhs_p;
4142 bool any;
4144 if (!gimple_assign_single_p (stmt))
4145 return false;
4147 rhs_p = gimple_assign_rhs1_ptr (stmt);
4148 lhs_p = gimple_assign_lhs_ptr (stmt);
4150 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4151 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4152 if (any)
4154 tree new_rhs = NULL_TREE;
4156 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4158 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4160 /* V_C_Es of constructors can cause trouble (PR 42714). */
4161 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4162 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4163 else
4164 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4166 else
4167 new_rhs = fold_build1_loc (gimple_location (stmt),
4168 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4169 *rhs_p);
4171 else if (REFERENCE_CLASS_P (*rhs_p)
4172 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4173 && !is_gimple_reg (*lhs_p))
4174 /* This can happen when an assignment in between two single field
4175 structures is turned into an assignment in between two pointers to
4176 scalars (PR 42237). */
4177 new_rhs = *rhs_p;
4179 if (new_rhs)
4181 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4182 true, GSI_SAME_STMT);
4184 gimple_assign_set_rhs_from_tree (gsi, tmp);
4187 return true;
4190 return false;
4193 /* Traverse the function body and all modifications as described in
4194 ADJUSTMENTS. Return true iff the CFG has been changed. */
4196 static bool
4197 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4199 bool cfg_changed = false;
4200 basic_block bb;
4202 FOR_EACH_BB (bb)
4204 gimple_stmt_iterator gsi;
4206 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4207 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4209 gsi = gsi_start_bb (bb);
4210 while (!gsi_end_p (gsi))
4212 gimple stmt = gsi_stmt (gsi);
4213 bool modified = false;
4214 tree *t;
4215 unsigned i;
4217 switch (gimple_code (stmt))
4219 case GIMPLE_RETURN:
4220 t = gimple_return_retval_ptr (stmt);
4221 if (*t != NULL_TREE)
4222 modified |= sra_ipa_modify_expr (t, true, adjustments);
4223 break;
4225 case GIMPLE_ASSIGN:
4226 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4227 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4228 break;
4230 case GIMPLE_CALL:
4231 /* Operands must be processed before the lhs. */
4232 for (i = 0; i < gimple_call_num_args (stmt); i++)
4234 t = gimple_call_arg_ptr (stmt, i);
4235 modified |= sra_ipa_modify_expr (t, true, adjustments);
4238 if (gimple_call_lhs (stmt))
4240 t = gimple_call_lhs_ptr (stmt);
4241 modified |= sra_ipa_modify_expr (t, false, adjustments);
4242 modified |= replace_removed_params_ssa_names (stmt,
4243 adjustments);
4245 break;
4247 case GIMPLE_ASM:
4248 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4250 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4251 modified |= sra_ipa_modify_expr (t, true, adjustments);
4253 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4255 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4256 modified |= sra_ipa_modify_expr (t, false, adjustments);
4258 break;
4260 default:
4261 break;
4264 if (modified)
4266 update_stmt (stmt);
4267 if (maybe_clean_eh_stmt (stmt)
4268 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4269 cfg_changed = true;
4271 gsi_next (&gsi);
4275 return cfg_changed;
4278 /* Call gimple_debug_bind_reset_value on all debug statements describing
4279 gimple register parameters that are being removed or replaced. */
4281 static void
4282 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4284 int i, len;
4286 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4287 for (i = 0; i < len; i++)
4289 struct ipa_parm_adjustment *adj;
4290 imm_use_iterator ui;
4291 gimple stmt;
4292 tree name;
4294 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4295 if (adj->copy_param || !is_gimple_reg (adj->base))
4296 continue;
4297 name = gimple_default_def (cfun, adj->base);
4298 if (!name)
4299 continue;
4300 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4302 /* All other users must have been removed by
4303 ipa_sra_modify_function_body. */
4304 gcc_assert (is_gimple_debug (stmt));
4305 gimple_debug_bind_reset_value (stmt);
4306 update_stmt (stmt);
4311 /* Return false iff all callers have at least as many actual arguments as there
4312 are formal parameters in the current function. */
4314 static bool
4315 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4316 void *data ATTRIBUTE_UNUSED)
4318 struct cgraph_edge *cs;
4319 for (cs = node->callers; cs; cs = cs->next_caller)
4320 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4321 return true;
4323 return false;
4326 /* Convert all callers of NODE. */
4328 static bool
4329 convert_callers_for_node (struct cgraph_node *node,
4330 void *data)
4332 ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
4333 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4334 struct cgraph_edge *cs;
4336 for (cs = node->callers; cs; cs = cs->next_caller)
4338 current_function_decl = cs->caller->decl;
4339 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4341 if (dump_file)
4342 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4343 cs->caller->uid, cs->callee->uid,
4344 cgraph_node_name (cs->caller),
4345 cgraph_node_name (cs->callee));
4347 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4349 pop_cfun ();
4352 for (cs = node->callers; cs; cs = cs->next_caller)
4353 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4354 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4355 compute_inline_parameters (cs->caller, true);
4356 BITMAP_FREE (recomputed_callers);
4358 return true;
4361 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4363 static void
4364 convert_callers (struct cgraph_node *node, tree old_decl,
4365 ipa_parm_adjustment_vec adjustments)
4367 tree old_cur_fndecl = current_function_decl;
4368 basic_block this_block;
4370 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4371 adjustments, false);
4373 current_function_decl = old_cur_fndecl;
4375 if (!encountered_recursive_call)
4376 return;
4378 FOR_EACH_BB (this_block)
4380 gimple_stmt_iterator gsi;
4382 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4384 gimple stmt = gsi_stmt (gsi);
4385 tree call_fndecl;
4386 if (gimple_code (stmt) != GIMPLE_CALL)
4387 continue;
4388 call_fndecl = gimple_call_fndecl (stmt);
4389 if (call_fndecl == old_decl)
4391 if (dump_file)
4392 fprintf (dump_file, "Adjusting recursive call");
4393 gimple_call_set_fndecl (stmt, node->decl);
4394 ipa_modify_call_arguments (NULL, stmt, adjustments);
4399 return;
4402 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4403 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4405 static bool
4406 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4408 struct cgraph_node *new_node;
4409 bool cfg_changed;
4410 VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
4412 rebuild_cgraph_edges ();
4413 pop_cfun ();
4414 current_function_decl = NULL_TREE;
4416 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4417 NULL, NULL, "isra");
4418 current_function_decl = new_node->decl;
4419 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4421 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4422 cfg_changed = ipa_sra_modify_function_body (adjustments);
4423 sra_ipa_reset_debug_stmts (adjustments);
4424 convert_callers (new_node, node->decl, adjustments);
4425 cgraph_make_node_local (new_node);
4426 return cfg_changed;
4429 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4430 attributes, return true otherwise. NODE is the cgraph node of the current
4431 function. */
4433 static bool
4434 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4436 if (!cgraph_node_can_be_local_p (node))
4438 if (dump_file)
4439 fprintf (dump_file, "Function not local to this compilation unit.\n");
4440 return false;
4443 if (!node->local.can_change_signature)
4445 if (dump_file)
4446 fprintf (dump_file, "Function can not change signature.\n");
4447 return false;
4450 if (!tree_versionable_function_p (node->decl))
4452 if (dump_file)
4453 fprintf (dump_file, "Function is not versionable.\n");
4454 return false;
4457 if (DECL_VIRTUAL_P (current_function_decl))
4459 if (dump_file)
4460 fprintf (dump_file, "Function is a virtual method.\n");
4461 return false;
4464 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4465 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4467 if (dump_file)
4468 fprintf (dump_file, "Function too big to be made truly local.\n");
4469 return false;
4472 if (!node->callers)
4474 if (dump_file)
4475 fprintf (dump_file,
4476 "Function has no callers in this compilation unit.\n");
4477 return false;
4480 if (cfun->stdarg)
4482 if (dump_file)
4483 fprintf (dump_file, "Function uses stdarg. \n");
4484 return false;
4487 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4488 return false;
4490 return true;
4493 /* Perform early interprocedural SRA. */
4495 static unsigned int
4496 ipa_early_sra (void)
4498 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4499 ipa_parm_adjustment_vec adjustments;
4500 int ret = 0;
4502 if (!ipa_sra_preliminary_function_checks (node))
4503 return 0;
4505 sra_initialize ();
4506 sra_mode = SRA_MODE_EARLY_IPA;
4508 if (!find_param_candidates ())
4510 if (dump_file)
4511 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4512 goto simple_out;
4515 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4516 NULL, true))
4518 if (dump_file)
4519 fprintf (dump_file, "There are callers with insufficient number of "
4520 "arguments.\n");
4521 goto simple_out;
4524 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4525 func_param_count
4526 * last_basic_block_for_function (cfun));
4527 final_bbs = BITMAP_ALLOC (NULL);
4529 scan_function ();
4530 if (encountered_apply_args)
4532 if (dump_file)
4533 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4534 goto out;
4537 if (encountered_unchangable_recursive_call)
4539 if (dump_file)
4540 fprintf (dump_file, "Function calls itself with insufficient "
4541 "number of arguments.\n");
4542 goto out;
4545 adjustments = analyze_all_param_acesses ();
4546 if (!adjustments)
4547 goto out;
4548 if (dump_file)
4549 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4551 if (modify_function (node, adjustments))
4552 ret = TODO_update_ssa | TODO_cleanup_cfg;
4553 else
4554 ret = TODO_update_ssa;
4555 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4557 statistics_counter_event (cfun, "Unused parameters deleted",
4558 sra_stats.deleted_unused_parameters);
4559 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4560 sra_stats.scalar_by_ref_to_by_val);
4561 statistics_counter_event (cfun, "Aggregate parameters broken up",
4562 sra_stats.aggregate_params_reduced);
4563 statistics_counter_event (cfun, "Aggregate parameter components created",
4564 sra_stats.param_reductions_created);
4566 out:
4567 BITMAP_FREE (final_bbs);
4568 free (bb_dereferences);
4569 simple_out:
4570 sra_deinitialize ();
4571 return ret;
4574 /* Return if early ipa sra shall be performed. */
4575 static bool
4576 ipa_early_sra_gate (void)
4578 return flag_ipa_sra && dbg_cnt (eipa_sra);
4581 struct gimple_opt_pass pass_early_ipa_sra =
4584 GIMPLE_PASS,
4585 "eipa_sra", /* name */
4586 ipa_early_sra_gate, /* gate */
4587 ipa_early_sra, /* execute */
4588 NULL, /* sub */
4589 NULL, /* next */
4590 0, /* static_pass_number */
4591 TV_IPA_SRA, /* tv_id */
4592 0, /* properties_required */
4593 0, /* properties_provided */
4594 0, /* properties_destroyed */
4595 0, /* todo_flags_start */
4596 TODO_dump_cgraph /* todo_flags_finish */