1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "alloc-pool.h"
82 #include "tree-flow.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
98 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
99 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
101 /* Global variable describing which aggregate reduction we are performing at
103 static enum sra_mode sra_mode
;
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset
;
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
139 /* The statement this access belongs to. */
142 /* Next group representative for this aggregate. */
143 struct access
*next_grp
;
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access
*group_representative
;
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access
*first_child
;
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access
*next_sibling
;
158 /* Pointers to the first and last element in the linked list of assign
160 struct assign_link
*first_link
, *last_link
;
162 /* Pointer to the next access in the work queue. */
163 struct access
*next_queued
;
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl
;
170 /* Is this particular access write access? */
173 /* Is this access an access to a non-addressable field? */
174 unsigned non_addressable
: 1;
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued
: 1;
179 /* Does this group contain a write access? This flag is propagated down the
181 unsigned grp_write
: 1;
183 /* Does this group contain a read access? This flag is propagated down the
185 unsigned grp_read
: 1;
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read
: 1;
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write
: 1;
195 /* Does this group contain a read access through a scalar type? This flag is
196 not propagated in the access tree in any direction. */
197 unsigned grp_scalar_read
: 1;
199 /* Does this group contain a write access through a scalar type? This flag
200 is not propagated in the access tree in any direction. */
201 unsigned grp_scalar_write
: 1;
203 /* Is this access an artificial one created to scalarize some record
205 unsigned grp_total_scalarization
: 1;
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
210 unsigned grp_hint
: 1;
212 /* Is the subtree rooted in this access fully covered by scalar
214 unsigned grp_covered
: 1;
216 /* If set to true, this access and all below it in an access tree must not be
218 unsigned grp_unscalarizable_region
: 1;
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
223 unsigned grp_unscalarized_data
: 1;
225 /* Does this access and/or group contain a write access through a
227 unsigned grp_partial_lhs
: 1;
229 /* Set when a scalar replacement should be created for this variable. We do
230 the decision and creation at different places because create_tmp_var
231 cannot be called from within FOR_EACH_REFERENCED_VAR. */
232 unsigned grp_to_be_replaced
: 1;
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning
: 1;
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified
: 1;
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr
: 1;
246 /* Set when we discover that this pointer is not safe to dereference in the
248 unsigned grp_not_necessarilly_dereferenced
: 1;
251 typedef struct access
*access_p
;
253 DEF_VEC_P (access_p
);
254 DEF_VEC_ALLOC_P (access_p
, heap
);
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool
;
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
264 struct access
*lacc
, *racc
;
265 struct assign_link
*next
;
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool
;
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
272 static struct pointer_map_t
*base_access_vec
;
274 /* Bitmap of candidates. */
275 static bitmap candidate_bitmap
;
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278 those which cannot be (because they are and need be used as a whole). */
279 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
281 /* Obstack for creation of fancy names. */
282 static struct obstack name_obstack
;
284 /* Head of a linked list of accesses that need to have its subaccesses
285 propagated to their assignment counterparts. */
286 static struct access
*work_queue_head
;
288 /* Number of parameters of the analyzed function when doing early ipa SRA. */
289 static int func_param_count
;
291 /* scan_function sets the following to true if it encounters a call to
292 __builtin_apply_args. */
293 static bool encountered_apply_args
;
295 /* Set by scan_function when it finds a recursive call. */
296 static bool encountered_recursive_call
;
298 /* Set by scan_function when it finds a recursive call with less actual
299 arguments than formal parameters.. */
300 static bool encountered_unchangable_recursive_call
;
302 /* This is a table in which for each basic block and parameter there is a
303 distance (offset + size) in that parameter which is dereferenced and
304 accessed in that BB. */
305 static HOST_WIDE_INT
*bb_dereferences
;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307 returning, throwing externally, looping infinitely or calling a function
308 which might abort etc.. */
309 static bitmap final_bbs
;
311 /* Representative of no accesses at all. */
312 static struct access no_accesses_representant
;
314 /* Predicate to test the special value. */
317 no_accesses_p (struct access
*access
)
319 return access
== &no_accesses_representant
;
322 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
323 representative fields are dumped, otherwise those which only describe the
324 individual access are. */
328 /* Number of processed aggregates is readily available in
329 analyze_all_variable_accesses and so is not stored here. */
331 /* Number of created scalar replacements. */
334 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
338 /* Number of statements created by generate_subtree_copies. */
341 /* Number of statements created by load_assign_lhs_subreplacements. */
344 /* Number of times sra_modify_assign has deleted a statement. */
347 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348 RHS reparately due to type conversions or nonexistent matching
350 int separate_lhs_rhs_handling
;
352 /* Number of parameters that were removed because they were unused. */
353 int deleted_unused_parameters
;
355 /* Number of scalars passed as parameters by reference that have been
356 converted to be passed by value. */
357 int scalar_by_ref_to_by_val
;
359 /* Number of aggregate parameters that were replaced by one or more of their
361 int aggregate_params_reduced
;
363 /* Numbber of components created when splitting aggregate parameters. */
364 int param_reductions_created
;
368 dump_access (FILE *f
, struct access
*access
, bool grp
)
370 fprintf (f
, "access { ");
371 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
372 print_generic_expr (f
, access
->base
, 0);
373 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
374 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
375 fprintf (f
, ", expr = ");
376 print_generic_expr (f
, access
->expr
, 0);
377 fprintf (f
, ", type = ");
378 print_generic_expr (f
, access
->type
, 0);
380 fprintf (f
, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381 "grp_assignment_write = %d, grp_scalar_read = %d, "
382 "grp_scalar_write = %d, grp_total_scalarization = %d, "
383 "grp_hint = %d, grp_covered = %d, "
384 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 "grp_maybe_modified = %d, "
387 "grp_not_necessarilly_dereferenced = %d\n",
388 access
->grp_read
, access
->grp_write
, access
->grp_assignment_read
,
389 access
->grp_assignment_write
, access
->grp_scalar_read
,
390 access
->grp_scalar_write
, access
->grp_total_scalarization
,
391 access
->grp_hint
, access
->grp_covered
,
392 access
->grp_unscalarizable_region
, access
->grp_unscalarized_data
,
393 access
->grp_partial_lhs
, access
->grp_to_be_replaced
,
394 access
->grp_maybe_modified
,
395 access
->grp_not_necessarilly_dereferenced
);
397 fprintf (f
, ", write = %d, grp_total_scalarization = %d, "
398 "grp_partial_lhs = %d\n",
399 access
->write
, access
->grp_total_scalarization
,
400 access
->grp_partial_lhs
);
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
406 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
412 for (i
= 0; i
< level
; i
++)
413 fputs ("* ", dump_file
);
415 dump_access (f
, access
, true);
417 if (access
->first_child
)
418 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
420 access
= access
->next_sibling
;
425 /* Dump all access trees for a variable, given the pointer to the first root in
429 dump_access_tree (FILE *f
, struct access
*access
)
431 for (; access
; access
= access
->next_grp
)
432 dump_access_tree_1 (f
, access
, 0);
435 /* Return true iff ACC is non-NULL and has subaccesses. */
438 access_has_children_p (struct access
*acc
)
440 return acc
&& acc
->first_child
;
443 /* Return true iff ACC is (partly) covered by at least one replacement. */
446 access_has_replacements_p (struct access
*acc
)
448 struct access
*child
;
449 if (acc
->grp_to_be_replaced
)
451 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
452 if (access_has_replacements_p (child
))
457 /* Return a vector of pointers to accesses for the variable given in BASE or
458 NULL if there is none. */
460 static VEC (access_p
, heap
) *
461 get_base_access_vector (tree base
)
465 slot
= pointer_map_contains (base_access_vec
, base
);
469 return *(VEC (access_p
, heap
) **) slot
;
472 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
473 in ACCESS. Return NULL if it cannot be found. */
475 static struct access
*
476 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
479 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
481 struct access
*child
= access
->first_child
;
483 while (child
&& (child
->offset
+ child
->size
<= offset
))
484 child
= child
->next_sibling
;
491 /* Return the first group representative for DECL or NULL if none exists. */
493 static struct access
*
494 get_first_repr_for_decl (tree base
)
496 VEC (access_p
, heap
) *access_vec
;
498 access_vec
= get_base_access_vector (base
);
502 return VEC_index (access_p
, access_vec
, 0);
505 /* Find an access representative for the variable BASE and given OFFSET and
506 SIZE. Requires that access trees have already been built. Return NULL if
507 it cannot be found. */
509 static struct access
*
510 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
513 struct access
*access
;
515 access
= get_first_repr_for_decl (base
);
516 while (access
&& (access
->offset
+ access
->size
<= offset
))
517 access
= access
->next_grp
;
521 return find_access_in_subtree (access
, offset
, size
);
524 /* Add LINK to the linked list of assign links of RACC. */
526 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
528 gcc_assert (link
->racc
== racc
);
530 if (!racc
->first_link
)
532 gcc_assert (!racc
->last_link
);
533 racc
->first_link
= link
;
536 racc
->last_link
->next
= link
;
538 racc
->last_link
= link
;
542 /* Move all link structures in their linked list in OLD_RACC to the linked list
545 relink_to_new_repr (struct access
*new_racc
, struct access
*old_racc
)
547 if (!old_racc
->first_link
)
549 gcc_assert (!old_racc
->last_link
);
553 if (new_racc
->first_link
)
555 gcc_assert (!new_racc
->last_link
->next
);
556 gcc_assert (!old_racc
->last_link
|| !old_racc
->last_link
->next
);
558 new_racc
->last_link
->next
= old_racc
->first_link
;
559 new_racc
->last_link
= old_racc
->last_link
;
563 gcc_assert (!new_racc
->last_link
);
565 new_racc
->first_link
= old_racc
->first_link
;
566 new_racc
->last_link
= old_racc
->last_link
;
568 old_racc
->first_link
= old_racc
->last_link
= NULL
;
571 /* Add ACCESS to the work queue (which is actually a stack). */
574 add_access_to_work_queue (struct access
*access
)
576 if (!access
->grp_queued
)
578 gcc_assert (!access
->next_queued
);
579 access
->next_queued
= work_queue_head
;
580 access
->grp_queued
= 1;
581 work_queue_head
= access
;
585 /* Pop an access from the work queue, and return it, assuming there is one. */
587 static struct access
*
588 pop_access_from_work_queue (void)
590 struct access
*access
= work_queue_head
;
592 work_queue_head
= access
->next_queued
;
593 access
->next_queued
= NULL
;
594 access
->grp_queued
= 0;
599 /* Allocate necessary structures. */
602 sra_initialize (void)
604 candidate_bitmap
= BITMAP_ALLOC (NULL
);
605 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
606 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
607 gcc_obstack_init (&name_obstack
);
608 access_pool
= create_alloc_pool ("SRA accesses", sizeof (struct access
), 16);
609 link_pool
= create_alloc_pool ("SRA links", sizeof (struct assign_link
), 16);
610 base_access_vec
= pointer_map_create ();
611 memset (&sra_stats
, 0, sizeof (sra_stats
));
612 encountered_apply_args
= false;
613 encountered_recursive_call
= false;
614 encountered_unchangable_recursive_call
= false;
617 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
620 delete_base_accesses (const void *key ATTRIBUTE_UNUSED
, void **value
,
621 void *data ATTRIBUTE_UNUSED
)
623 VEC (access_p
, heap
) *access_vec
;
624 access_vec
= (VEC (access_p
, heap
) *) *value
;
625 VEC_free (access_p
, heap
, access_vec
);
630 /* Deallocate all general structures. */
633 sra_deinitialize (void)
635 BITMAP_FREE (candidate_bitmap
);
636 BITMAP_FREE (should_scalarize_away_bitmap
);
637 BITMAP_FREE (cannot_scalarize_away_bitmap
);
638 free_alloc_pool (access_pool
);
639 free_alloc_pool (link_pool
);
640 obstack_free (&name_obstack
, NULL
);
642 pointer_map_traverse (base_access_vec
, delete_base_accesses
, NULL
);
643 pointer_map_destroy (base_access_vec
);
646 /* Remove DECL from candidates for SRA and write REASON to the dump file if
649 disqualify_candidate (tree decl
, const char *reason
)
651 bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
));
653 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
655 fprintf (dump_file
, "! Disqualifying ");
656 print_generic_expr (dump_file
, decl
, 0);
657 fprintf (dump_file
, " - %s\n", reason
);
661 /* Return true iff the type contains a field or an element which does not allow
665 type_internals_preclude_sra_p (tree type
, const char **msg
)
670 switch (TREE_CODE (type
))
674 case QUAL_UNION_TYPE
:
675 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
676 if (TREE_CODE (fld
) == FIELD_DECL
)
678 tree ft
= TREE_TYPE (fld
);
680 if (TREE_THIS_VOLATILE (fld
))
682 *msg
= "volatile structure field";
685 if (!DECL_FIELD_OFFSET (fld
))
687 *msg
= "no structure field offset";
690 if (!DECL_SIZE (fld
))
692 *msg
= "zero structure field size";
695 if (!host_integerp (DECL_FIELD_OFFSET (fld
), 1))
697 *msg
= "structure field offset not fixed";
700 if (!host_integerp (DECL_SIZE (fld
), 1))
702 *msg
= "structure field size not fixed";
705 if (AGGREGATE_TYPE_P (ft
)
706 && int_bit_position (fld
) % BITS_PER_UNIT
!= 0)
708 *msg
= "structure field is bit field";
712 if (AGGREGATE_TYPE_P (ft
) && type_internals_preclude_sra_p (ft
, msg
))
719 et
= TREE_TYPE (type
);
721 if (TYPE_VOLATILE (et
))
723 *msg
= "element type is volatile";
727 if (AGGREGATE_TYPE_P (et
) && type_internals_preclude_sra_p (et
, msg
))
737 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
738 base variable if it is. Return T if it is not an SSA_NAME. */
741 get_ssa_base_param (tree t
)
743 if (TREE_CODE (t
) == SSA_NAME
)
745 if (SSA_NAME_IS_DEFAULT_DEF (t
))
746 return SSA_NAME_VAR (t
);
753 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
754 belongs to, unless the BB has already been marked as a potentially
758 mark_parm_dereference (tree base
, HOST_WIDE_INT dist
, gimple stmt
)
760 basic_block bb
= gimple_bb (stmt
);
761 int idx
, parm_index
= 0;
764 if (bitmap_bit_p (final_bbs
, bb
->index
))
767 for (parm
= DECL_ARGUMENTS (current_function_decl
);
768 parm
&& parm
!= base
;
769 parm
= DECL_CHAIN (parm
))
772 gcc_assert (parm_index
< func_param_count
);
774 idx
= bb
->index
* func_param_count
+ parm_index
;
775 if (bb_dereferences
[idx
] < dist
)
776 bb_dereferences
[idx
] = dist
;
779 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
780 the three fields. Also add it to the vector of accesses corresponding to
781 the base. Finally, return the new access. */
783 static struct access
*
784 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
786 VEC (access_p
, heap
) *vec
;
787 struct access
*access
;
790 access
= (struct access
*) pool_alloc (access_pool
);
791 memset (access
, 0, sizeof (struct access
));
793 access
->offset
= offset
;
796 slot
= pointer_map_contains (base_access_vec
, base
);
798 vec
= (VEC (access_p
, heap
) *) *slot
;
800 vec
= VEC_alloc (access_p
, heap
, 32);
802 VEC_safe_push (access_p
, heap
, vec
, access
);
804 *((struct VEC (access_p
,heap
) **)
805 pointer_map_insert (base_access_vec
, base
)) = vec
;
810 /* Create and insert access for EXPR. Return created access, or NULL if it is
813 static struct access
*
814 create_access (tree expr
, gimple stmt
, bool write
)
816 struct access
*access
;
817 HOST_WIDE_INT offset
, size
, max_size
;
819 bool ptr
, unscalarizable_region
= false;
821 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
823 if (sra_mode
== SRA_MODE_EARLY_IPA
824 && TREE_CODE (base
) == MEM_REF
)
826 base
= get_ssa_base_param (TREE_OPERAND (base
, 0));
834 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
837 if (sra_mode
== SRA_MODE_EARLY_IPA
)
839 if (size
< 0 || size
!= max_size
)
841 disqualify_candidate (base
, "Encountered a variable sized access.");
844 if (TREE_CODE (expr
) == COMPONENT_REF
845 && DECL_BIT_FIELD (TREE_OPERAND (expr
, 1)))
847 disqualify_candidate (base
, "Encountered a bit-field access.");
850 gcc_checking_assert ((offset
% BITS_PER_UNIT
) == 0);
853 mark_parm_dereference (base
, offset
+ size
, stmt
);
857 if (size
!= max_size
)
860 unscalarizable_region
= true;
864 disqualify_candidate (base
, "Encountered an unconstrained access.");
869 access
= create_access_1 (base
, offset
, size
);
871 access
->type
= TREE_TYPE (expr
);
872 access
->write
= write
;
873 access
->grp_unscalarizable_region
= unscalarizable_region
;
876 if (TREE_CODE (expr
) == COMPONENT_REF
877 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr
, 1)))
878 access
->non_addressable
= 1;
884 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
885 register types or (recursively) records with only these two kinds of fields.
886 It also returns false if any of these records contains a bit-field. */
889 type_consists_of_records_p (tree type
)
893 if (TREE_CODE (type
) != RECORD_TYPE
)
896 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
897 if (TREE_CODE (fld
) == FIELD_DECL
)
899 tree ft
= TREE_TYPE (fld
);
901 if (DECL_BIT_FIELD (fld
))
904 if (!is_gimple_reg_type (ft
)
905 && !type_consists_of_records_p (ft
))
912 /* Create total_scalarization accesses for all scalar type fields in DECL that
913 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
914 must be the top-most VAR_DECL representing the variable, OFFSET must be the
915 offset of DECL within BASE. REF must be the memory reference expression for
919 completely_scalarize_record (tree base
, tree decl
, HOST_WIDE_INT offset
,
922 tree fld
, decl_type
= TREE_TYPE (decl
);
924 for (fld
= TYPE_FIELDS (decl_type
); fld
; fld
= DECL_CHAIN (fld
))
925 if (TREE_CODE (fld
) == FIELD_DECL
)
927 HOST_WIDE_INT pos
= offset
+ int_bit_position (fld
);
928 tree ft
= TREE_TYPE (fld
);
929 tree nref
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), ref
, fld
,
932 if (is_gimple_reg_type (ft
))
934 struct access
*access
;
937 size
= tree_low_cst (DECL_SIZE (fld
), 1);
938 access
= create_access_1 (base
, pos
, size
);
941 access
->grp_total_scalarization
= 1;
942 /* Accesses for intraprocedural SRA can have their stmt NULL. */
945 completely_scalarize_record (base
, fld
, pos
, nref
);
949 /* Create total_scalarization accesses for all scalar type fields in VAR and
950 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
951 type_consists_of_records_p. */
954 completely_scalarize_var (tree var
)
956 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (var
), 1);
957 struct access
*access
;
959 access
= create_access_1 (var
, 0, size
);
961 access
->type
= TREE_TYPE (var
);
962 access
->grp_total_scalarization
= 1;
964 completely_scalarize_record (var
, var
, 0, var
);
967 /* Search the given tree for a declaration by skipping handled components and
968 exclude it from the candidates. */
971 disqualify_base_of_expr (tree t
, const char *reason
)
973 t
= get_base_address (t
);
974 if (sra_mode
== SRA_MODE_EARLY_IPA
975 && TREE_CODE (t
) == MEM_REF
)
976 t
= get_ssa_base_param (TREE_OPERAND (t
, 0));
979 disqualify_candidate (t
, reason
);
982 /* Scan expression EXPR and create access structures for all accesses to
983 candidates for scalarization. Return the created access or NULL if none is
986 static struct access
*
987 build_access_from_expr_1 (tree expr
, gimple stmt
, bool write
)
989 struct access
*ret
= NULL
;
992 if (TREE_CODE (expr
) == BIT_FIELD_REF
993 || TREE_CODE (expr
) == IMAGPART_EXPR
994 || TREE_CODE (expr
) == REALPART_EXPR
)
996 expr
= TREE_OPERAND (expr
, 0);
1000 partial_ref
= false;
1002 /* We need to dive through V_C_Es in order to get the size of its parameter
1003 and not the result type. Ada produces such statements. We are also
1004 capable of handling the topmost V_C_E but not any of those buried in other
1005 handled components. */
1006 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
1007 expr
= TREE_OPERAND (expr
, 0);
1009 if (contains_view_convert_expr_p (expr
))
1011 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
1016 switch (TREE_CODE (expr
))
1019 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
1020 && sra_mode
!= SRA_MODE_EARLY_IPA
)
1028 case ARRAY_RANGE_REF
:
1029 ret
= create_access (expr
, stmt
, write
);
1036 if (write
&& partial_ref
&& ret
)
1037 ret
->grp_partial_lhs
= 1;
1042 /* Scan expression EXPR and create access structures for all accesses to
1043 candidates for scalarization. Return true if any access has been inserted.
1044 STMT must be the statement from which the expression is taken, WRITE must be
1045 true if the expression is a store and false otherwise. */
1048 build_access_from_expr (tree expr
, gimple stmt
, bool write
)
1050 struct access
*access
;
1052 access
= build_access_from_expr_1 (expr
, stmt
, write
);
1055 /* This means the aggregate is accesses as a whole in a way other than an
1056 assign statement and thus cannot be removed even if we had a scalar
1057 replacement for everything. */
1058 if (cannot_scalarize_away_bitmap
)
1059 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
1065 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1066 modes in which it matters, return true iff they have been disqualified. RHS
1067 may be NULL, in that case ignore it. If we scalarize an aggregate in
1068 intra-SRA we may need to add statements after each statement. This is not
1069 possible if a statement unconditionally has to end the basic block. */
1071 disqualify_ops_if_throwing_stmt (gimple stmt
, tree lhs
, tree rhs
)
1073 if ((sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1074 && (stmt_can_throw_internal (stmt
) || stmt_ends_bb_p (stmt
)))
1076 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
1078 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1084 /* Scan expressions occurring in STMT, create access structures for all accesses
1085 to candidates for scalarization and remove those candidates which occur in
1086 statements or expressions that prevent them from being split apart. Return
1087 true if any access has been inserted. */
1090 build_accesses_from_assign (gimple stmt
)
1093 struct access
*lacc
, *racc
;
1095 if (!gimple_assign_single_p (stmt
)
1096 /* Scope clobbers don't influence scalarization. */
1097 || gimple_clobber_p (stmt
))
1100 lhs
= gimple_assign_lhs (stmt
);
1101 rhs
= gimple_assign_rhs1 (stmt
);
1103 if (disqualify_ops_if_throwing_stmt (stmt
, lhs
, rhs
))
1106 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1107 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1110 lacc
->grp_assignment_write
= 1;
1114 racc
->grp_assignment_read
= 1;
1115 if (should_scalarize_away_bitmap
&& !gimple_has_volatile_ops (stmt
)
1116 && !is_gimple_reg_type (racc
->type
))
1117 bitmap_set_bit (should_scalarize_away_bitmap
, DECL_UID (racc
->base
));
1121 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1122 && !lacc
->grp_unscalarizable_region
1123 && !racc
->grp_unscalarizable_region
1124 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1125 && lacc
->size
== racc
->size
1126 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1128 struct assign_link
*link
;
1130 link
= (struct assign_link
*) pool_alloc (link_pool
);
1131 memset (link
, 0, sizeof (struct assign_link
));
1136 add_link_to_rhs (racc
, link
);
1139 return lacc
|| racc
;
1142 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1143 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1146 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED
, tree op
,
1147 void *data ATTRIBUTE_UNUSED
)
1149 op
= get_base_address (op
);
1152 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1157 /* Return true iff callsite CALL has at least as many actual arguments as there
1158 are formal parameters of the function currently processed by IPA-SRA. */
1161 callsite_has_enough_arguments_p (gimple call
)
1163 return gimple_call_num_args (call
) >= (unsigned) func_param_count
;
1166 /* Scan function and look for interesting expressions and create access
1167 structures for them. Return true iff any access is created. */
1170 scan_function (void)
1177 gimple_stmt_iterator gsi
;
1178 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1180 gimple stmt
= gsi_stmt (gsi
);
1184 if (final_bbs
&& stmt_can_throw_external (stmt
))
1185 bitmap_set_bit (final_bbs
, bb
->index
);
1186 switch (gimple_code (stmt
))
1189 t
= gimple_return_retval (stmt
);
1191 ret
|= build_access_from_expr (t
, stmt
, false);
1193 bitmap_set_bit (final_bbs
, bb
->index
);
1197 ret
|= build_accesses_from_assign (stmt
);
1201 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1202 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1205 if (sra_mode
== SRA_MODE_EARLY_IPA
)
1207 tree dest
= gimple_call_fndecl (stmt
);
1208 int flags
= gimple_call_flags (stmt
);
1212 if (DECL_BUILT_IN_CLASS (dest
) == BUILT_IN_NORMAL
1213 && DECL_FUNCTION_CODE (dest
) == BUILT_IN_APPLY_ARGS
)
1214 encountered_apply_args
= true;
1215 if (cgraph_get_node (dest
)
1216 == cgraph_get_node (current_function_decl
))
1218 encountered_recursive_call
= true;
1219 if (!callsite_has_enough_arguments_p (stmt
))
1220 encountered_unchangable_recursive_call
= true;
1225 && (flags
& (ECF_CONST
| ECF_PURE
)) == 0)
1226 bitmap_set_bit (final_bbs
, bb
->index
);
1229 t
= gimple_call_lhs (stmt
);
1230 if (t
&& !disqualify_ops_if_throwing_stmt (stmt
, t
, NULL
))
1231 ret
|= build_access_from_expr (t
, stmt
, true);
1235 walk_stmt_load_store_addr_ops (stmt
, NULL
, NULL
, NULL
,
1238 bitmap_set_bit (final_bbs
, bb
->index
);
1240 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
1242 t
= TREE_VALUE (gimple_asm_input_op (stmt
, i
));
1243 ret
|= build_access_from_expr (t
, stmt
, false);
1245 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
1247 t
= TREE_VALUE (gimple_asm_output_op (stmt
, i
));
1248 ret
|= build_access_from_expr (t
, stmt
, true);
1261 /* Helper of QSORT function. There are pointers to accesses in the array. An
1262 access is considered smaller than another if it has smaller offset or if the
1263 offsets are the same but is size is bigger. */
1266 compare_access_positions (const void *a
, const void *b
)
1268 const access_p
*fp1
= (const access_p
*) a
;
1269 const access_p
*fp2
= (const access_p
*) b
;
1270 const access_p f1
= *fp1
;
1271 const access_p f2
= *fp2
;
1273 if (f1
->offset
!= f2
->offset
)
1274 return f1
->offset
< f2
->offset
? -1 : 1;
1276 if (f1
->size
== f2
->size
)
1278 if (f1
->type
== f2
->type
)
1280 /* Put any non-aggregate type before any aggregate type. */
1281 else if (!is_gimple_reg_type (f1
->type
)
1282 && is_gimple_reg_type (f2
->type
))
1284 else if (is_gimple_reg_type (f1
->type
)
1285 && !is_gimple_reg_type (f2
->type
))
1287 /* Put any complex or vector type before any other scalar type. */
1288 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1289 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1290 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1291 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1293 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1294 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1295 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1296 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1298 /* Put the integral type with the bigger precision first. */
1299 else if (INTEGRAL_TYPE_P (f1
->type
)
1300 && INTEGRAL_TYPE_P (f2
->type
))
1301 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1302 /* Put any integral type with non-full precision last. */
1303 else if (INTEGRAL_TYPE_P (f1
->type
)
1304 && (TREE_INT_CST_LOW (TYPE_SIZE (f1
->type
))
1305 != TYPE_PRECISION (f1
->type
)))
1307 else if (INTEGRAL_TYPE_P (f2
->type
)
1308 && (TREE_INT_CST_LOW (TYPE_SIZE (f2
->type
))
1309 != TYPE_PRECISION (f2
->type
)))
1311 /* Stabilize the sort. */
1312 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1315 /* We want the bigger accesses first, thus the opposite operator in the next
1317 return f1
->size
> f2
->size
? -1 : 1;
1321 /* Append a name of the declaration to the name obstack. A helper function for
1325 make_fancy_decl_name (tree decl
)
1329 tree name
= DECL_NAME (decl
);
1331 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1332 IDENTIFIER_LENGTH (name
));
1335 sprintf (buffer
, "D%u", DECL_UID (decl
));
1336 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1340 /* Helper for make_fancy_name. */
1343 make_fancy_name_1 (tree expr
)
1350 make_fancy_decl_name (expr
);
1354 switch (TREE_CODE (expr
))
1357 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1358 obstack_1grow (&name_obstack
, '$');
1359 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1363 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1364 obstack_1grow (&name_obstack
, '$');
1365 /* Arrays with only one element may not have a constant as their
1367 index
= TREE_OPERAND (expr
, 1);
1368 if (TREE_CODE (index
) != INTEGER_CST
)
1370 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1371 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1375 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1379 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1380 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1382 obstack_1grow (&name_obstack
, '$');
1383 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1384 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1385 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1392 gcc_unreachable (); /* we treat these as scalars. */
1399 /* Create a human readable name for replacement variable of ACCESS. */
1402 make_fancy_name (tree expr
)
1404 make_fancy_name_1 (expr
);
1405 obstack_1grow (&name_obstack
, '\0');
1406 return XOBFINISH (&name_obstack
, char *);
1409 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1410 EXP_TYPE at the given OFFSET. If BASE is something for which
1411 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1412 to insert new statements either before or below the current one as specified
1413 by INSERT_AFTER. This function is not capable of handling bitfields. */
1416 build_ref_for_offset (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1417 tree exp_type
, gimple_stmt_iterator
*gsi
,
1420 tree prev_base
= base
;
1422 HOST_WIDE_INT base_offset
;
1423 unsigned HOST_WIDE_INT misalign
;
1426 gcc_checking_assert (offset
% BITS_PER_UNIT
== 0);
1428 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1430 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1431 offset such as array[var_index]. */
1437 gcc_checking_assert (gsi
);
1438 tmp
= create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base
)), NULL
);
1439 add_referenced_var (tmp
);
1440 tmp
= make_ssa_name (tmp
, NULL
);
1441 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1442 STRIP_USELESS_TYPE_CONVERSION (addr
);
1443 stmt
= gimple_build_assign (tmp
, addr
);
1444 gimple_set_location (stmt
, loc
);
1445 SSA_NAME_DEF_STMT (tmp
) = stmt
;
1447 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1449 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1452 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1453 offset
/ BITS_PER_UNIT
);
1456 else if (TREE_CODE (base
) == MEM_REF
)
1458 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1459 base_offset
+ offset
/ BITS_PER_UNIT
);
1460 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1461 base
= unshare_expr (TREE_OPERAND (base
, 0));
1465 off
= build_int_cst (reference_alias_ptr_type (base
),
1466 base_offset
+ offset
/ BITS_PER_UNIT
);
1467 base
= build_fold_addr_expr (unshare_expr (base
));
1470 /* If prev_base were always an originally performed access
1471 we can extract more optimistic alignment information
1472 by looking at the access mode. That would constrain the
1473 alignment of base + base_offset which we would need to
1474 adjust according to offset. */
1475 if (!get_pointer_alignment_1 (base
, &align
, &misalign
))
1477 gcc_assert (misalign
== 0);
1478 if (TREE_CODE (prev_base
) == MEM_REF
1479 || TREE_CODE (prev_base
) == TARGET_MEM_REF
)
1480 align
= TYPE_ALIGN (TREE_TYPE (prev_base
));
1482 misalign
+= (double_int_sext (tree_to_double_int (off
),
1483 TYPE_PRECISION (TREE_TYPE (off
))).low
1485 misalign
= misalign
& (align
- 1);
1487 align
= (misalign
& -misalign
);
1488 if (align
< TYPE_ALIGN (exp_type
))
1489 exp_type
= build_aligned_type (exp_type
, align
);
1491 return fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1494 /* Construct a memory reference to a part of an aggregate BASE at the given
1495 OFFSET and of the same type as MODEL. In case this is a reference to a
1496 bit-field, the function will replicate the last component_ref of model's
1497 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1498 build_ref_for_offset. */
1501 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1502 struct access
*model
, gimple_stmt_iterator
*gsi
,
1505 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1506 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1508 /* This access represents a bit-field. */
1509 tree t
, exp_type
, fld
= TREE_OPERAND (model
->expr
, 1);
1511 offset
-= int_bit_position (fld
);
1512 exp_type
= TREE_TYPE (TREE_OPERAND (model
->expr
, 0));
1513 t
= build_ref_for_offset (loc
, base
, offset
, exp_type
, gsi
, insert_after
);
1514 return fold_build3_loc (loc
, COMPONENT_REF
, TREE_TYPE (fld
), t
, fld
,
1518 return build_ref_for_offset (loc
, base
, offset
, model
->type
,
1522 /* Construct a memory reference consisting of component_refs and array_refs to
1523 a part of an aggregate *RES (which is of type TYPE). The requested part
1524 should have type EXP_TYPE at be the given OFFSET. This function might not
1525 succeed, it returns true when it does and only then *RES points to something
1526 meaningful. This function should be used only to build expressions that we
1527 might need to present to user (e.g. in warnings). In all other situations,
1528 build_ref_for_model or build_ref_for_offset should be used instead. */
1531 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1537 tree tr_size
, index
, minidx
;
1538 HOST_WIDE_INT el_size
;
1540 if (offset
== 0 && exp_type
1541 && types_compatible_p (exp_type
, type
))
1544 switch (TREE_CODE (type
))
1547 case QUAL_UNION_TYPE
:
1549 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1551 HOST_WIDE_INT pos
, size
;
1552 tree tr_pos
, expr
, *expr_ptr
;
1554 if (TREE_CODE (fld
) != FIELD_DECL
)
1557 tr_pos
= bit_position (fld
);
1558 if (!tr_pos
|| !host_integerp (tr_pos
, 1))
1560 pos
= TREE_INT_CST_LOW (tr_pos
);
1561 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1562 tr_size
= DECL_SIZE (fld
);
1563 if (!tr_size
|| !host_integerp (tr_size
, 1))
1565 size
= TREE_INT_CST_LOW (tr_size
);
1571 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1574 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1577 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
1578 offset
- pos
, exp_type
))
1587 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1588 if (!tr_size
|| !host_integerp (tr_size
, 1))
1590 el_size
= tree_low_cst (tr_size
, 1);
1592 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1593 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1595 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1596 if (!integer_zerop (minidx
))
1597 index
= int_const_binop (PLUS_EXPR
, index
, minidx
);
1598 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1599 NULL_TREE
, NULL_TREE
);
1600 offset
= offset
% el_size
;
1601 type
= TREE_TYPE (type
);
1616 /* Return true iff TYPE is stdarg va_list type. */
1619 is_va_list_type (tree type
)
1621 return TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (va_list_type_node
);
1624 /* Print message to dump file why a variable was rejected. */
1627 reject (tree var
, const char *msg
)
1629 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1631 fprintf (dump_file
, "Rejected (%d): %s: ", DECL_UID (var
), msg
);
1632 print_generic_expr (dump_file
, var
, 0);
1633 fprintf (dump_file
, "\n");
1637 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1638 those with type which is suitable for scalarization. */
1641 find_var_candidates (void)
1644 referenced_var_iterator rvi
;
1648 FOR_EACH_REFERENCED_VAR (cfun
, var
, rvi
)
1650 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (var
) != PARM_DECL
)
1652 type
= TREE_TYPE (var
);
1654 if (!AGGREGATE_TYPE_P (type
))
1656 reject (var
, "not aggregate");
1659 if (needs_to_live_in_memory (var
))
1661 reject (var
, "needs to live in memory");
1664 if (TREE_THIS_VOLATILE (var
))
1666 reject (var
, "is volatile");
1669 if (!COMPLETE_TYPE_P (type
))
1671 reject (var
, "has incomplete type");
1674 if (!host_integerp (TYPE_SIZE (type
), 1))
1676 reject (var
, "type size not fixed");
1679 if (tree_low_cst (TYPE_SIZE (type
), 1) == 0)
1681 reject (var
, "type size is zero");
1684 if (type_internals_preclude_sra_p (type
, &msg
))
1689 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1690 we also want to schedule it rather late. Thus we ignore it in
1692 (sra_mode
== SRA_MODE_EARLY_INTRA
1693 && is_va_list_type (type
)))
1695 reject (var
, "is va_list");
1699 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1701 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1703 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1704 print_generic_expr (dump_file
, var
, 0);
1705 fprintf (dump_file
, "\n");
1713 /* Sort all accesses for the given variable, check for partial overlaps and
1714 return NULL if there are any. If there are none, pick a representative for
1715 each combination of offset and size and create a linked list out of them.
1716 Return the pointer to the first representative and make sure it is the first
1717 one in the vector of accesses. */
1719 static struct access
*
1720 sort_and_splice_var_accesses (tree var
)
1722 int i
, j
, access_count
;
1723 struct access
*res
, **prev_acc_ptr
= &res
;
1724 VEC (access_p
, heap
) *access_vec
;
1726 HOST_WIDE_INT low
= -1, high
= 0;
1728 access_vec
= get_base_access_vector (var
);
1731 access_count
= VEC_length (access_p
, access_vec
);
1733 /* Sort by <OFFSET, SIZE>. */
1734 VEC_qsort (access_p
, access_vec
, compare_access_positions
);
1737 while (i
< access_count
)
1739 struct access
*access
= VEC_index (access_p
, access_vec
, i
);
1740 bool grp_write
= access
->write
;
1741 bool grp_read
= !access
->write
;
1742 bool grp_scalar_write
= access
->write
1743 && is_gimple_reg_type (access
->type
);
1744 bool grp_scalar_read
= !access
->write
1745 && is_gimple_reg_type (access
->type
);
1746 bool grp_assignment_read
= access
->grp_assignment_read
;
1747 bool grp_assignment_write
= access
->grp_assignment_write
;
1748 bool multiple_scalar_reads
= false;
1749 bool total_scalarization
= access
->grp_total_scalarization
;
1750 bool grp_partial_lhs
= access
->grp_partial_lhs
;
1751 bool first_scalar
= is_gimple_reg_type (access
->type
);
1752 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
1754 if (first
|| access
->offset
>= high
)
1757 low
= access
->offset
;
1758 high
= access
->offset
+ access
->size
;
1760 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
1763 gcc_assert (access
->offset
>= low
1764 && access
->offset
+ access
->size
<= high
);
1767 while (j
< access_count
)
1769 struct access
*ac2
= VEC_index (access_p
, access_vec
, j
);
1770 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
1775 grp_scalar_write
= (grp_scalar_write
1776 || is_gimple_reg_type (ac2
->type
));
1781 if (is_gimple_reg_type (ac2
->type
))
1783 if (grp_scalar_read
)
1784 multiple_scalar_reads
= true;
1786 grp_scalar_read
= true;
1789 grp_assignment_read
|= ac2
->grp_assignment_read
;
1790 grp_assignment_write
|= ac2
->grp_assignment_write
;
1791 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
1792 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
1793 total_scalarization
|= ac2
->grp_total_scalarization
;
1794 relink_to_new_repr (access
, ac2
);
1796 /* If there are both aggregate-type and scalar-type accesses with
1797 this combination of size and offset, the comparison function
1798 should have put the scalars first. */
1799 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
1800 ac2
->group_representative
= access
;
1806 access
->group_representative
= access
;
1807 access
->grp_write
= grp_write
;
1808 access
->grp_read
= grp_read
;
1809 access
->grp_scalar_read
= grp_scalar_read
;
1810 access
->grp_scalar_write
= grp_scalar_write
;
1811 access
->grp_assignment_read
= grp_assignment_read
;
1812 access
->grp_assignment_write
= grp_assignment_write
;
1813 access
->grp_hint
= multiple_scalar_reads
|| total_scalarization
;
1814 access
->grp_total_scalarization
= total_scalarization
;
1815 access
->grp_partial_lhs
= grp_partial_lhs
;
1816 access
->grp_unscalarizable_region
= unscalarizable_region
;
1817 if (access
->first_link
)
1818 add_access_to_work_queue (access
);
1820 *prev_acc_ptr
= access
;
1821 prev_acc_ptr
= &access
->next_grp
;
1824 gcc_assert (res
== VEC_index (access_p
, access_vec
, 0));
1828 /* Create a variable for the given ACCESS which determines the type, name and a
1829 few other properties. Return the variable declaration and store it also to
1830 ACCESS->replacement. */
1833 create_access_replacement (struct access
*access
, bool rename
)
1837 repl
= create_tmp_var (access
->type
, "SR");
1838 add_referenced_var (repl
);
1839 if (!access
->grp_partial_lhs
1841 mark_sym_for_renaming (repl
);
1843 if (TREE_CODE (access
->type
) == COMPLEX_TYPE
1844 || TREE_CODE (access
->type
) == VECTOR_TYPE
)
1846 if (!access
->grp_partial_lhs
)
1847 DECL_GIMPLE_REG_P (repl
) = 1;
1849 else if (access
->grp_partial_lhs
1850 && is_gimple_reg_type (access
->type
))
1851 TREE_ADDRESSABLE (repl
) = 1;
1853 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
1854 DECL_ARTIFICIAL (repl
) = 1;
1855 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
1857 if (DECL_NAME (access
->base
)
1858 && !DECL_IGNORED_P (access
->base
)
1859 && !DECL_ARTIFICIAL (access
->base
))
1861 char *pretty_name
= make_fancy_name (access
->expr
);
1862 tree debug_expr
= unshare_expr (access
->expr
), d
;
1864 DECL_NAME (repl
) = get_identifier (pretty_name
);
1865 obstack_free (&name_obstack
, pretty_name
);
1867 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1868 as DECL_DEBUG_EXPR isn't considered when looking for still
1869 used SSA_NAMEs and thus they could be freed. All debug info
1870 generation cares is whether something is constant or variable
1871 and that get_ref_base_and_extent works properly on the
1873 for (d
= debug_expr
; handled_component_p (d
); d
= TREE_OPERAND (d
, 0))
1874 switch (TREE_CODE (d
))
1877 case ARRAY_RANGE_REF
:
1878 if (TREE_OPERAND (d
, 1)
1879 && TREE_CODE (TREE_OPERAND (d
, 1)) == SSA_NAME
)
1880 TREE_OPERAND (d
, 1) = SSA_NAME_VAR (TREE_OPERAND (d
, 1));
1881 if (TREE_OPERAND (d
, 3)
1882 && TREE_CODE (TREE_OPERAND (d
, 3)) == SSA_NAME
)
1883 TREE_OPERAND (d
, 3) = SSA_NAME_VAR (TREE_OPERAND (d
, 3));
1886 if (TREE_OPERAND (d
, 2)
1887 && TREE_CODE (TREE_OPERAND (d
, 2)) == SSA_NAME
)
1888 TREE_OPERAND (d
, 2) = SSA_NAME_VAR (TREE_OPERAND (d
, 2));
1893 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
1894 DECL_DEBUG_EXPR_IS_FROM (repl
) = 1;
1895 if (access
->grp_no_warning
)
1896 TREE_NO_WARNING (repl
) = 1;
1898 TREE_NO_WARNING (repl
) = TREE_NO_WARNING (access
->base
);
1901 TREE_NO_WARNING (repl
) = 1;
1905 fprintf (dump_file
, "Created a replacement for ");
1906 print_generic_expr (dump_file
, access
->base
, 0);
1907 fprintf (dump_file
, " offset: %u, size: %u: ",
1908 (unsigned) access
->offset
, (unsigned) access
->size
);
1909 print_generic_expr (dump_file
, repl
, 0);
1910 fprintf (dump_file
, "\n");
1912 sra_stats
.replacements
++;
1917 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1920 get_access_replacement (struct access
*access
)
1922 gcc_assert (access
->grp_to_be_replaced
);
1924 if (!access
->replacement_decl
)
1925 access
->replacement_decl
= create_access_replacement (access
, true);
1926 return access
->replacement_decl
;
1929 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1930 not mark it for renaming. */
1933 get_unrenamed_access_replacement (struct access
*access
)
1935 gcc_assert (!access
->grp_to_be_replaced
);
1937 if (!access
->replacement_decl
)
1938 access
->replacement_decl
= create_access_replacement (access
, false);
1939 return access
->replacement_decl
;
1943 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1944 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1945 to it is not "within" the root. Return false iff some accesses partially
1949 build_access_subtree (struct access
**access
)
1951 struct access
*root
= *access
, *last_child
= NULL
;
1952 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
1954 *access
= (*access
)->next_grp
;
1955 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
1958 root
->first_child
= *access
;
1960 last_child
->next_sibling
= *access
;
1961 last_child
= *access
;
1963 if (!build_access_subtree (access
))
1967 if (*access
&& (*access
)->offset
< limit
)
1973 /* Build a tree of access representatives, ACCESS is the pointer to the first
1974 one, others are linked in a list by the next_grp field. Return false iff
1975 some accesses partially overlap. */
1978 build_access_trees (struct access
*access
)
1982 struct access
*root
= access
;
1984 if (!build_access_subtree (&access
))
1986 root
->next_grp
= access
;
1991 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1995 expr_with_var_bounded_array_refs_p (tree expr
)
1997 while (handled_component_p (expr
))
1999 if (TREE_CODE (expr
) == ARRAY_REF
2000 && !host_integerp (array_ref_low_bound (expr
), 0))
2002 expr
= TREE_OPERAND (expr
, 0);
2007 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2008 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2009 sorts of access flags appropriately along the way, notably always set
2010 grp_read and grp_assign_read according to MARK_READ and grp_write when
2013 Creating a replacement for a scalar access is considered beneficial if its
2014 grp_hint is set (this means we are either attempting total scalarization or
2015 there is more than one direct read access) or according to the following
2018 Access written to through a scalar type (once or more times)
2020 | Written to in an assignment statement
2022 | | Access read as scalar _once_
2024 | | | Read in an assignment statement
2026 | | | | Scalarize Comment
2027 -----------------------------------------------------------------------------
2028 0 0 0 0 No access for the scalar
2029 0 0 0 1 No access for the scalar
2030 0 0 1 0 No Single read - won't help
2031 0 0 1 1 No The same case
2032 0 1 0 0 No access for the scalar
2033 0 1 0 1 No access for the scalar
2034 0 1 1 0 Yes s = *g; return s.i;
2035 0 1 1 1 Yes The same case as above
2036 1 0 0 0 No Won't help
2037 1 0 0 1 Yes s.i = 1; *g = s;
2038 1 0 1 0 Yes s.i = 5; g = s.i;
2039 1 0 1 1 Yes The same case as above
2040 1 1 0 0 No Won't help.
2041 1 1 0 1 Yes s.i = 1; *g = s;
2042 1 1 1 0 Yes s = *g; return s.i;
2043 1 1 1 1 Yes Any of the above yeses */
2046 analyze_access_subtree (struct access
*root
, struct access
*parent
,
2047 bool allow_replacements
)
2049 struct access
*child
;
2050 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2051 HOST_WIDE_INT covered_to
= root
->offset
;
2052 bool scalar
= is_gimple_reg_type (root
->type
);
2053 bool hole
= false, sth_created
= false;
2057 if (parent
->grp_read
)
2059 if (parent
->grp_assignment_read
)
2060 root
->grp_assignment_read
= 1;
2061 if (parent
->grp_write
)
2062 root
->grp_write
= 1;
2063 if (parent
->grp_assignment_write
)
2064 root
->grp_assignment_write
= 1;
2065 if (parent
->grp_total_scalarization
)
2066 root
->grp_total_scalarization
= 1;
2069 if (root
->grp_unscalarizable_region
)
2070 allow_replacements
= false;
2072 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
2073 allow_replacements
= false;
2075 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
2077 hole
|= covered_to
< child
->offset
;
2078 sth_created
|= analyze_access_subtree (child
, root
,
2079 allow_replacements
&& !scalar
);
2081 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
2082 root
->grp_total_scalarization
&= child
->grp_total_scalarization
;
2083 if (child
->grp_covered
)
2084 covered_to
+= child
->size
;
2089 if (allow_replacements
&& scalar
&& !root
->first_child
2091 || ((root
->grp_scalar_read
|| root
->grp_assignment_read
)
2092 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))))
2094 bool new_integer_type
;
2095 /* Always create access replacements that cover the whole access.
2096 For integral types this means the precision has to match.
2097 Avoid assumptions based on the integral type kind, too. */
2098 if (INTEGRAL_TYPE_P (root
->type
)
2099 && (TREE_CODE (root
->type
) != INTEGER_TYPE
2100 || TYPE_PRECISION (root
->type
) != root
->size
)
2101 /* But leave bitfield accesses alone. */
2102 && (TREE_CODE (root
->expr
) != COMPONENT_REF
2103 || !DECL_BIT_FIELD (TREE_OPERAND (root
->expr
, 1))))
2105 tree rt
= root
->type
;
2106 gcc_assert ((root
->offset
% BITS_PER_UNIT
) == 0
2107 && (root
->size
% BITS_PER_UNIT
) == 0);
2108 root
->type
= build_nonstandard_integer_type (root
->size
,
2109 TYPE_UNSIGNED (rt
));
2110 root
->expr
= build_ref_for_offset (UNKNOWN_LOCATION
,
2111 root
->base
, root
->offset
,
2112 root
->type
, NULL
, false);
2113 new_integer_type
= true;
2116 new_integer_type
= false;
2118 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2120 fprintf (dump_file
, "Marking ");
2121 print_generic_expr (dump_file
, root
->base
, 0);
2122 fprintf (dump_file
, " offset: %u, size: %u ",
2123 (unsigned) root
->offset
, (unsigned) root
->size
);
2124 fprintf (dump_file
, " to be replaced%s.\n",
2125 new_integer_type
? " with an integer": "");
2128 root
->grp_to_be_replaced
= 1;
2134 if (covered_to
< limit
)
2137 root
->grp_total_scalarization
= 0;
2141 && (!hole
|| root
->grp_total_scalarization
))
2143 root
->grp_covered
= 1;
2146 if (root
->grp_write
|| TREE_CODE (root
->base
) == PARM_DECL
)
2147 root
->grp_unscalarized_data
= 1; /* not covered and written to */
2153 /* Analyze all access trees linked by next_grp by the means of
2154 analyze_access_subtree. */
2156 analyze_access_trees (struct access
*access
)
2162 if (analyze_access_subtree (access
, NULL
, true))
2164 access
= access
->next_grp
;
2170 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2171 SIZE would conflict with an already existing one. If exactly such a child
2172 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2175 child_would_conflict_in_lacc (struct access
*lacc
, HOST_WIDE_INT norm_offset
,
2176 HOST_WIDE_INT size
, struct access
**exact_match
)
2178 struct access
*child
;
2180 for (child
= lacc
->first_child
; child
; child
= child
->next_sibling
)
2182 if (child
->offset
== norm_offset
&& child
->size
== size
)
2184 *exact_match
= child
;
2188 if (child
->offset
< norm_offset
+ size
2189 && child
->offset
+ child
->size
> norm_offset
)
2196 /* Create a new child access of PARENT, with all properties just like MODEL
2197 except for its offset and with its grp_write false and grp_read true.
2198 Return the new access or NULL if it cannot be created. Note that this access
2199 is created long after all splicing and sorting, it's not located in any
2200 access vector and is automatically a representative of its group. */
2202 static struct access
*
2203 create_artificial_child_access (struct access
*parent
, struct access
*model
,
2204 HOST_WIDE_INT new_offset
)
2206 struct access
*access
;
2207 struct access
**child
;
2208 tree expr
= parent
->base
;
2210 gcc_assert (!model
->grp_unscalarizable_region
);
2212 access
= (struct access
*) pool_alloc (access_pool
);
2213 memset (access
, 0, sizeof (struct access
));
2214 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
2217 access
->grp_no_warning
= true;
2218 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
2219 new_offset
, model
, NULL
, false);
2222 access
->base
= parent
->base
;
2223 access
->expr
= expr
;
2224 access
->offset
= new_offset
;
2225 access
->size
= model
->size
;
2226 access
->type
= model
->type
;
2227 access
->grp_write
= true;
2228 access
->grp_read
= false;
2230 child
= &parent
->first_child
;
2231 while (*child
&& (*child
)->offset
< new_offset
)
2232 child
= &(*child
)->next_sibling
;
2234 access
->next_sibling
= *child
;
2241 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2242 true if any new subaccess was created. Additionally, if RACC is a scalar
2243 access but LACC is not, change the type of the latter, if possible. */
2246 propagate_subaccesses_across_link (struct access
*lacc
, struct access
*racc
)
2248 struct access
*rchild
;
2249 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
2252 if (is_gimple_reg_type (lacc
->type
)
2253 || lacc
->grp_unscalarizable_region
2254 || racc
->grp_unscalarizable_region
)
2257 if (is_gimple_reg_type (racc
->type
))
2259 if (!lacc
->first_child
&& !racc
->first_child
)
2261 tree t
= lacc
->base
;
2263 lacc
->type
= racc
->type
;
2264 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
),
2265 lacc
->offset
, racc
->type
))
2269 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
2270 lacc
->base
, lacc
->offset
,
2272 lacc
->grp_no_warning
= true;
2278 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
2280 struct access
*new_acc
= NULL
;
2281 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
2283 if (rchild
->grp_unscalarizable_region
)
2286 if (child_would_conflict_in_lacc (lacc
, norm_offset
, rchild
->size
,
2291 rchild
->grp_hint
= 1;
2292 new_acc
->grp_hint
|= new_acc
->grp_read
;
2293 if (rchild
->first_child
)
2294 ret
|= propagate_subaccesses_across_link (new_acc
, rchild
);
2299 rchild
->grp_hint
= 1;
2300 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
);
2304 if (racc
->first_child
)
2305 propagate_subaccesses_across_link (new_acc
, rchild
);
2312 /* Propagate all subaccesses across assignment links. */
2315 propagate_all_subaccesses (void)
2317 while (work_queue_head
)
2319 struct access
*racc
= pop_access_from_work_queue ();
2320 struct assign_link
*link
;
2322 gcc_assert (racc
->first_link
);
2324 for (link
= racc
->first_link
; link
; link
= link
->next
)
2326 struct access
*lacc
= link
->lacc
;
2328 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2330 lacc
= lacc
->group_representative
;
2331 if (propagate_subaccesses_across_link (lacc
, racc
)
2332 && lacc
->first_link
)
2333 add_access_to_work_queue (lacc
);
2338 /* Go through all accesses collected throughout the (intraprocedural) analysis
2339 stage, exclude overlapping ones, identify representatives and build trees
2340 out of them, making decisions about scalarization on the way. Return true
2341 iff there are any to-be-scalarized variables after this stage. */
2344 analyze_all_variable_accesses (void)
2347 bitmap tmp
= BITMAP_ALLOC (NULL
);
2349 unsigned i
, max_total_scalarization_size
;
2351 max_total_scalarization_size
= UNITS_PER_WORD
* BITS_PER_UNIT
2352 * MOVE_RATIO (optimize_function_for_speed_p (cfun
));
2354 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2355 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
2356 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
2358 tree var
= referenced_var (i
);
2360 if (TREE_CODE (var
) == VAR_DECL
2361 && type_consists_of_records_p (TREE_TYPE (var
)))
2363 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1)
2364 <= max_total_scalarization_size
)
2366 completely_scalarize_var (var
);
2367 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2369 fprintf (dump_file
, "Will attempt to totally scalarize ");
2370 print_generic_expr (dump_file
, var
, 0);
2371 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2374 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2376 fprintf (dump_file
, "Too big to totally scalarize: ");
2377 print_generic_expr (dump_file
, var
, 0);
2378 fprintf (dump_file
, " (UID: %u)\n", DECL_UID (var
));
2383 bitmap_copy (tmp
, candidate_bitmap
);
2384 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2386 tree var
= referenced_var (i
);
2387 struct access
*access
;
2389 access
= sort_and_splice_var_accesses (var
);
2390 if (!access
|| !build_access_trees (access
))
2391 disqualify_candidate (var
,
2392 "No or inhibitingly overlapping accesses.");
2395 propagate_all_subaccesses ();
2397 bitmap_copy (tmp
, candidate_bitmap
);
2398 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2400 tree var
= referenced_var (i
);
2401 struct access
*access
= get_first_repr_for_decl (var
);
2403 if (analyze_access_trees (access
))
2406 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2408 fprintf (dump_file
, "\nAccess trees for ");
2409 print_generic_expr (dump_file
, var
, 0);
2410 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2411 dump_access_tree (dump_file
, access
);
2412 fprintf (dump_file
, "\n");
2416 disqualify_candidate (var
, "No scalar replacements to be created.");
2423 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
2430 /* Generate statements copying scalar replacements of accesses within a subtree
2431 into or out of AGG. ACCESS, all its children, siblings and their children
2432 are to be processed. AGG is an aggregate type expression (can be a
2433 declaration but does not have to be, it can for example also be a mem_ref or
2434 a series of handled components). TOP_OFFSET is the offset of the processed
2435 subtree which has to be subtracted from offsets of individual accesses to
2436 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2437 replacements in the interval <start_offset, start_offset + chunk_size>,
2438 otherwise copy all. GSI is a statement iterator used to place the new
2439 statements. WRITE should be true when the statements should write from AGG
2440 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2441 statements will be added after the current statement in GSI, they will be
2442 added before the statement otherwise. */
2445 generate_subtree_copies (struct access
*access
, tree agg
,
2446 HOST_WIDE_INT top_offset
,
2447 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
2448 gimple_stmt_iterator
*gsi
, bool write
,
2449 bool insert_after
, location_t loc
)
2453 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
2456 if (access
->grp_to_be_replaced
2458 || access
->offset
+ access
->size
> start_offset
))
2460 tree expr
, repl
= get_access_replacement (access
);
2463 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
2464 access
, gsi
, insert_after
);
2468 if (access
->grp_partial_lhs
)
2469 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
2471 insert_after
? GSI_NEW_STMT
2473 stmt
= gimple_build_assign (repl
, expr
);
2477 TREE_NO_WARNING (repl
) = 1;
2478 if (access
->grp_partial_lhs
)
2479 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2481 insert_after
? GSI_NEW_STMT
2483 stmt
= gimple_build_assign (expr
, repl
);
2485 gimple_set_location (stmt
, loc
);
2488 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2490 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2492 sra_stats
.subtree_copies
++;
2495 if (access
->first_child
)
2496 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
2497 start_offset
, chunk_size
, gsi
,
2498 write
, insert_after
, loc
);
2500 access
= access
->next_sibling
;
2505 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2506 the root of the subtree to be processed. GSI is the statement iterator used
2507 for inserting statements which are added after the current statement if
2508 INSERT_AFTER is true or before it otherwise. */
2511 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
2512 bool insert_after
, location_t loc
)
2515 struct access
*child
;
2517 if (access
->grp_to_be_replaced
)
2521 stmt
= gimple_build_assign (get_access_replacement (access
),
2522 build_zero_cst (access
->type
));
2524 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2526 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2528 gimple_set_location (stmt
, loc
);
2531 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2532 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
2535 /* Search for an access representative for the given expression EXPR and
2536 return it or NULL if it cannot be found. */
2538 static struct access
*
2539 get_access_for_expr (tree expr
)
2541 HOST_WIDE_INT offset
, size
, max_size
;
2544 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2545 a different size than the size of its argument and we need the latter
2547 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2548 expr
= TREE_OPERAND (expr
, 0);
2550 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
2551 if (max_size
== -1 || !DECL_P (base
))
2554 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
2557 return get_var_base_offset_size_access (base
, offset
, max_size
);
2560 /* Replace the expression EXPR with a scalar replacement if there is one and
2561 generate other statements to do type conversion or subtree copying if
2562 necessary. GSI is used to place newly created statements, WRITE is true if
2563 the expression is being written to (it is on a LHS of a statement or output
2564 in an assembly statement). */
2567 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
2570 struct access
*access
;
2573 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
2576 expr
= &TREE_OPERAND (*expr
, 0);
2581 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
2582 expr
= &TREE_OPERAND (*expr
, 0);
2583 access
= get_access_for_expr (*expr
);
2586 type
= TREE_TYPE (*expr
);
2588 loc
= gimple_location (gsi_stmt (*gsi
));
2589 if (access
->grp_to_be_replaced
)
2591 tree repl
= get_access_replacement (access
);
2592 /* If we replace a non-register typed access simply use the original
2593 access expression to extract the scalar component afterwards.
2594 This happens if scalarizing a function return value or parameter
2595 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2596 gcc.c-torture/compile/20011217-1.c.
2598 We also want to use this when accessing a complex or vector which can
2599 be accessed as a different type too, potentially creating a need for
2600 type conversion (see PR42196) and when scalarized unions are involved
2601 in assembler statements (see PR42398). */
2602 if (!useless_type_conversion_p (type
, access
->type
))
2606 ref
= build_ref_for_model (loc
, access
->base
, access
->offset
, access
,
2613 if (access
->grp_partial_lhs
)
2614 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
2615 false, GSI_NEW_STMT
);
2616 stmt
= gimple_build_assign (repl
, ref
);
2617 gimple_set_location (stmt
, loc
);
2618 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2624 if (access
->grp_partial_lhs
)
2625 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2626 true, GSI_SAME_STMT
);
2627 stmt
= gimple_build_assign (ref
, repl
);
2628 gimple_set_location (stmt
, loc
);
2629 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2637 if (access
->first_child
)
2639 HOST_WIDE_INT start_offset
, chunk_size
;
2641 && host_integerp (TREE_OPERAND (bfr
, 1), 1)
2642 && host_integerp (TREE_OPERAND (bfr
, 2), 1))
2644 chunk_size
= tree_low_cst (TREE_OPERAND (bfr
, 1), 1);
2645 start_offset
= access
->offset
2646 + tree_low_cst (TREE_OPERAND (bfr
, 2), 1);
2649 start_offset
= chunk_size
= 0;
2651 generate_subtree_copies (access
->first_child
, access
->base
, 0,
2652 start_offset
, chunk_size
, gsi
, write
, write
,
2658 /* Where scalar replacements of the RHS have been written to when a replacement
2659 of a LHS of an assigments cannot be direclty loaded from a replacement of
2661 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
2662 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
2663 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
2665 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2666 base aggregate if there are unscalarized data or directly to LHS of the
2667 statement that is pointed to by GSI otherwise. */
2669 static enum unscalarized_data_handling
2670 handle_unscalarized_data_in_subtree (struct access
*top_racc
,
2671 gimple_stmt_iterator
*gsi
)
2673 if (top_racc
->grp_unscalarized_data
)
2675 generate_subtree_copies (top_racc
->first_child
, top_racc
->base
, 0, 0, 0,
2677 gimple_location (gsi_stmt (*gsi
)));
2678 return SRA_UDH_RIGHT
;
2682 tree lhs
= gimple_assign_lhs (gsi_stmt (*gsi
));
2683 generate_subtree_copies (top_racc
->first_child
, lhs
, top_racc
->offset
,
2684 0, 0, gsi
, false, false,
2685 gimple_location (gsi_stmt (*gsi
)));
2686 return SRA_UDH_LEFT
;
2691 /* Try to generate statements to load all sub-replacements in an access subtree
2692 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2693 If that is not possible, refresh the TOP_RACC base aggregate and load the
2694 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2695 copied. NEW_GSI is stmt iterator used for statement insertions after the
2696 original assignment, OLD_GSI is used to insert statements before the
2697 assignment. *REFRESHED keeps the information whether we have needed to
2698 refresh replacements of the LHS and from which side of the assignments this
2702 load_assign_lhs_subreplacements (struct access
*lacc
, struct access
*top_racc
,
2703 HOST_WIDE_INT left_offset
,
2704 gimple_stmt_iterator
*old_gsi
,
2705 gimple_stmt_iterator
*new_gsi
,
2706 enum unscalarized_data_handling
*refreshed
)
2708 location_t loc
= gimple_location (gsi_stmt (*old_gsi
));
2709 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
2711 if (lacc
->grp_to_be_replaced
)
2713 struct access
*racc
;
2714 HOST_WIDE_INT offset
= lacc
->offset
- left_offset
+ top_racc
->offset
;
2718 racc
= find_access_in_subtree (top_racc
, offset
, lacc
->size
);
2719 if (racc
&& racc
->grp_to_be_replaced
)
2721 rhs
= get_access_replacement (racc
);
2722 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
2723 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, lacc
->type
, rhs
);
2725 if (racc
->grp_partial_lhs
&& lacc
->grp_partial_lhs
)
2726 rhs
= force_gimple_operand_gsi (old_gsi
, rhs
, true, NULL_TREE
,
2727 true, GSI_SAME_STMT
);
2731 /* No suitable access on the right hand side, need to load from
2732 the aggregate. See if we have to update it first... */
2733 if (*refreshed
== SRA_UDH_NONE
)
2734 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2737 if (*refreshed
== SRA_UDH_LEFT
)
2738 rhs
= build_ref_for_model (loc
, lacc
->base
, lacc
->offset
, lacc
,
2741 rhs
= build_ref_for_model (loc
, top_racc
->base
, offset
, lacc
,
2743 if (lacc
->grp_partial_lhs
)
2744 rhs
= force_gimple_operand_gsi (new_gsi
, rhs
, true, NULL_TREE
,
2745 false, GSI_NEW_STMT
);
2748 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
2749 gsi_insert_after (new_gsi
, stmt
, GSI_NEW_STMT
);
2750 gimple_set_location (stmt
, loc
);
2752 sra_stats
.subreplacements
++;
2754 else if (*refreshed
== SRA_UDH_NONE
2755 && lacc
->grp_read
&& !lacc
->grp_covered
)
2756 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2759 if (lacc
->first_child
)
2760 load_assign_lhs_subreplacements (lacc
, top_racc
, left_offset
,
2761 old_gsi
, new_gsi
, refreshed
);
2765 /* Result code for SRA assignment modification. */
2766 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
2767 SRA_AM_MODIFIED
, /* stmt changed but not
2769 SRA_AM_REMOVED
}; /* stmt eliminated */
2771 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2772 to the assignment and GSI is the statement iterator pointing at it. Returns
2773 the same values as sra_modify_assign. */
2775 static enum assignment_mod_result
2776 sra_modify_constructor_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
2778 tree lhs
= gimple_assign_lhs (*stmt
);
2782 acc
= get_access_for_expr (lhs
);
2786 if (gimple_clobber_p (*stmt
))
2788 /* Remove clobbers of fully scalarized variables, otherwise
2790 if (acc
->grp_covered
)
2792 unlink_stmt_vdef (*stmt
);
2793 gsi_remove (gsi
, true);
2794 release_defs (*stmt
);
2795 return SRA_AM_REMOVED
;
2801 loc
= gimple_location (*stmt
);
2802 if (VEC_length (constructor_elt
,
2803 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt
))) > 0)
2805 /* I have never seen this code path trigger but if it can happen the
2806 following should handle it gracefully. */
2807 if (access_has_children_p (acc
))
2808 generate_subtree_copies (acc
->first_child
, acc
->base
, 0, 0, 0, gsi
,
2810 return SRA_AM_MODIFIED
;
2813 if (acc
->grp_covered
)
2815 init_subtree_with_zero (acc
, gsi
, false, loc
);
2816 unlink_stmt_vdef (*stmt
);
2817 gsi_remove (gsi
, true);
2818 release_defs (*stmt
);
2819 return SRA_AM_REMOVED
;
2823 init_subtree_with_zero (acc
, gsi
, true, loc
);
2824 return SRA_AM_MODIFIED
;
2828 /* Create and return a new suitable default definition SSA_NAME for RACC which
2829 is an access describing an uninitialized part of an aggregate that is being
2833 get_repl_default_def_ssa_name (struct access
*racc
)
2837 decl
= get_unrenamed_access_replacement (racc
);
2839 repl
= gimple_default_def (cfun
, decl
);
2842 repl
= make_ssa_name (decl
, gimple_build_nop ());
2843 set_default_def (decl
, repl
);
2849 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2853 contains_bitfld_comp_ref_p (const_tree ref
)
2855 while (handled_component_p (ref
))
2857 if (TREE_CODE (ref
) == COMPONENT_REF
2858 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
2860 ref
= TREE_OPERAND (ref
, 0);
2866 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2867 bit-field field declaration somewhere in it. */
2870 contains_vce_or_bfcref_p (const_tree ref
)
2872 while (handled_component_p (ref
))
2874 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
2875 || (TREE_CODE (ref
) == COMPONENT_REF
2876 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1))))
2878 ref
= TREE_OPERAND (ref
, 0);
2884 /* Examine both sides of the assignment statement pointed to by STMT, replace
2885 them with a scalare replacement if there is one and generate copying of
2886 replacements if scalarized aggregates have been used in the assignment. GSI
2887 is used to hold generated statements for type conversions and subtree
2890 static enum assignment_mod_result
2891 sra_modify_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
2893 struct access
*lacc
, *racc
;
2895 bool modify_this_stmt
= false;
2896 bool force_gimple_rhs
= false;
2898 gimple_stmt_iterator orig_gsi
= *gsi
;
2900 if (!gimple_assign_single_p (*stmt
))
2902 lhs
= gimple_assign_lhs (*stmt
);
2903 rhs
= gimple_assign_rhs1 (*stmt
);
2905 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
2906 return sra_modify_constructor_assign (stmt
, gsi
);
2908 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
2909 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
2910 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
2912 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (*stmt
),
2914 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (*stmt
),
2916 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
2919 lacc
= get_access_for_expr (lhs
);
2920 racc
= get_access_for_expr (rhs
);
2924 loc
= gimple_location (*stmt
);
2925 if (lacc
&& lacc
->grp_to_be_replaced
)
2927 lhs
= get_access_replacement (lacc
);
2928 gimple_assign_set_lhs (*stmt
, lhs
);
2929 modify_this_stmt
= true;
2930 if (lacc
->grp_partial_lhs
)
2931 force_gimple_rhs
= true;
2935 if (racc
&& racc
->grp_to_be_replaced
)
2937 rhs
= get_access_replacement (racc
);
2938 modify_this_stmt
= true;
2939 if (racc
->grp_partial_lhs
)
2940 force_gimple_rhs
= true;
2944 && !racc
->grp_unscalarized_data
2945 && TREE_CODE (lhs
) == SSA_NAME
2946 && !access_has_replacements_p (racc
))
2948 rhs
= get_repl_default_def_ssa_name (racc
);
2949 modify_this_stmt
= true;
2953 if (modify_this_stmt
)
2955 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2957 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2958 ??? This should move to fold_stmt which we simply should
2959 call after building a VIEW_CONVERT_EXPR here. */
2960 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
2961 && !contains_bitfld_comp_ref_p (lhs
)
2962 && !access_has_children_p (lacc
))
2964 lhs
= build_ref_for_model (loc
, lhs
, 0, racc
, gsi
, false);
2965 gimple_assign_set_lhs (*stmt
, lhs
);
2967 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
2968 && !contains_vce_or_bfcref_p (rhs
)
2969 && !access_has_children_p (racc
))
2970 rhs
= build_ref_for_model (loc
, rhs
, 0, lacc
, gsi
, false);
2972 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2974 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
2976 if (is_gimple_reg_type (TREE_TYPE (lhs
))
2977 && TREE_CODE (lhs
) != SSA_NAME
)
2978 force_gimple_rhs
= true;
2983 /* From this point on, the function deals with assignments in between
2984 aggregates when at least one has scalar reductions of some of its
2985 components. There are three possible scenarios: Both the LHS and RHS have
2986 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2988 In the first case, we would like to load the LHS components from RHS
2989 components whenever possible. If that is not possible, we would like to
2990 read it directly from the RHS (after updating it by storing in it its own
2991 components). If there are some necessary unscalarized data in the LHS,
2992 those will be loaded by the original assignment too. If neither of these
2993 cases happen, the original statement can be removed. Most of this is done
2994 by load_assign_lhs_subreplacements.
2996 In the second case, we would like to store all RHS scalarized components
2997 directly into LHS and if they cover the aggregate completely, remove the
2998 statement too. In the third case, we want the LHS components to be loaded
2999 directly from the RHS (DSE will remove the original statement if it
3002 This is a bit complex but manageable when types match and when unions do
3003 not cause confusion in a way that we cannot really load a component of LHS
3004 from the RHS or vice versa (the access representing this level can have
3005 subaccesses that are accessible only through a different union field at a
3006 higher level - different from the one used in the examined expression).
3009 Therefore, I specially handle a fourth case, happening when there is a
3010 specific type cast or it is impossible to locate a scalarized subaccess on
3011 the other side of the expression. If that happens, I simply "refresh" the
3012 RHS by storing in it is scalarized components leave the original statement
3013 there to do the copying and then load the scalar replacements of the LHS.
3014 This is what the first branch does. */
3016 if (modify_this_stmt
3017 || gimple_has_volatile_ops (*stmt
)
3018 || contains_vce_or_bfcref_p (rhs
)
3019 || contains_vce_or_bfcref_p (lhs
))
3021 if (access_has_children_p (racc
))
3022 generate_subtree_copies (racc
->first_child
, racc
->base
, 0, 0, 0,
3023 gsi
, false, false, loc
);
3024 if (access_has_children_p (lacc
))
3025 generate_subtree_copies (lacc
->first_child
, lacc
->base
, 0, 0, 0,
3026 gsi
, true, true, loc
);
3027 sra_stats
.separate_lhs_rhs_handling
++;
3029 /* This gimplification must be done after generate_subtree_copies,
3030 lest we insert the subtree copies in the middle of the gimplified
3032 if (force_gimple_rhs
)
3033 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
3034 true, GSI_SAME_STMT
);
3035 if (gimple_assign_rhs1 (*stmt
) != rhs
)
3037 modify_this_stmt
= true;
3038 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
3039 gcc_assert (*stmt
== gsi_stmt (orig_gsi
));
3042 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3046 if (access_has_children_p (lacc
)
3047 && access_has_children_p (racc
)
3048 /* When an access represents an unscalarizable region, it usually
3049 represents accesses with variable offset and thus must not be used
3050 to generate new memory accesses. */
3051 && !lacc
->grp_unscalarizable_region
3052 && !racc
->grp_unscalarizable_region
)
3054 gimple_stmt_iterator orig_gsi
= *gsi
;
3055 enum unscalarized_data_handling refreshed
;
3057 if (lacc
->grp_read
&& !lacc
->grp_covered
)
3058 refreshed
= handle_unscalarized_data_in_subtree (racc
, gsi
);
3060 refreshed
= SRA_UDH_NONE
;
3062 load_assign_lhs_subreplacements (lacc
, racc
, lacc
->offset
,
3063 &orig_gsi
, gsi
, &refreshed
);
3064 if (refreshed
!= SRA_UDH_RIGHT
)
3067 unlink_stmt_vdef (*stmt
);
3068 gsi_remove (&orig_gsi
, true);
3069 release_defs (*stmt
);
3070 sra_stats
.deleted
++;
3071 return SRA_AM_REMOVED
;
3076 if (access_has_children_p (racc
)
3077 && !racc
->grp_unscalarized_data
)
3081 fprintf (dump_file
, "Removing load: ");
3082 print_gimple_stmt (dump_file
, *stmt
, 0, 0);
3084 generate_subtree_copies (racc
->first_child
, lhs
,
3085 racc
->offset
, 0, 0, gsi
,
3087 gcc_assert (*stmt
== gsi_stmt (*gsi
));
3088 unlink_stmt_vdef (*stmt
);
3089 gsi_remove (gsi
, true);
3090 release_defs (*stmt
);
3091 sra_stats
.deleted
++;
3092 return SRA_AM_REMOVED
;
3094 /* Restore the aggregate RHS from its components so the
3095 prevailing aggregate copy does the right thing. */
3096 if (access_has_children_p (racc
))
3097 generate_subtree_copies (racc
->first_child
, racc
->base
, 0, 0, 0,
3098 gsi
, false, false, loc
);
3099 /* Re-load the components of the aggregate copy destination.
3100 But use the RHS aggregate to load from to expose more
3101 optimization opportunities. */
3102 if (access_has_children_p (lacc
))
3103 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
3104 0, 0, gsi
, true, true, loc
);
3111 /* Traverse the function body and all modifications as decided in
3112 analyze_all_variable_accesses. Return true iff the CFG has been
3116 sra_modify_function_body (void)
3118 bool cfg_changed
= false;
3123 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
3124 while (!gsi_end_p (gsi
))
3126 gimple stmt
= gsi_stmt (gsi
);
3127 enum assignment_mod_result assign_result
;
3128 bool modified
= false, deleted
= false;
3132 switch (gimple_code (stmt
))
3135 t
= gimple_return_retval_ptr (stmt
);
3136 if (*t
!= NULL_TREE
)
3137 modified
|= sra_modify_expr (t
, &gsi
, false);
3141 assign_result
= sra_modify_assign (&stmt
, &gsi
);
3142 modified
|= assign_result
== SRA_AM_MODIFIED
;
3143 deleted
= assign_result
== SRA_AM_REMOVED
;
3147 /* Operands must be processed before the lhs. */
3148 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3150 t
= gimple_call_arg_ptr (stmt
, i
);
3151 modified
|= sra_modify_expr (t
, &gsi
, false);
3154 if (gimple_call_lhs (stmt
))
3156 t
= gimple_call_lhs_ptr (stmt
);
3157 modified
|= sra_modify_expr (t
, &gsi
, true);
3162 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
3164 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
3165 modified
|= sra_modify_expr (t
, &gsi
, false);
3167 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
3169 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
3170 modified
|= sra_modify_expr (t
, &gsi
, true);
3181 if (maybe_clean_eh_stmt (stmt
)
3182 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
3193 /* Generate statements initializing scalar replacements of parts of function
3197 initialize_parameter_reductions (void)
3199 gimple_stmt_iterator gsi
;
3200 gimple_seq seq
= NULL
;
3203 gsi
= gsi_start (seq
);
3204 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3206 parm
= DECL_CHAIN (parm
))
3208 VEC (access_p
, heap
) *access_vec
;
3209 struct access
*access
;
3211 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3213 access_vec
= get_base_access_vector (parm
);
3217 for (access
= VEC_index (access_p
, access_vec
, 0);
3219 access
= access
->next_grp
)
3220 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
3221 EXPR_LOCATION (parm
));
3224 seq
= gsi_seq (gsi
);
3226 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR
), seq
);
3229 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3230 it reveals there are components of some aggregates to be scalarized, it runs
3231 the required transformations. */
3233 perform_intra_sra (void)
3238 if (!find_var_candidates ())
3241 if (!scan_function ())
3244 if (!analyze_all_variable_accesses ())
3247 if (sra_modify_function_body ())
3248 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
3250 ret
= TODO_update_ssa
;
3251 initialize_parameter_reductions ();
3253 statistics_counter_event (cfun
, "Scalar replacements created",
3254 sra_stats
.replacements
);
3255 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
3256 statistics_counter_event (cfun
, "Subtree copy stmts",
3257 sra_stats
.subtree_copies
);
3258 statistics_counter_event (cfun
, "Subreplacement stmts",
3259 sra_stats
.subreplacements
);
3260 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
3261 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
3262 sra_stats
.separate_lhs_rhs_handling
);
3265 sra_deinitialize ();
3269 /* Perform early intraprocedural SRA. */
3271 early_intra_sra (void)
3273 sra_mode
= SRA_MODE_EARLY_INTRA
;
3274 return perform_intra_sra ();
3277 /* Perform "late" intraprocedural SRA. */
3279 late_intra_sra (void)
3281 sra_mode
= SRA_MODE_INTRA
;
3282 return perform_intra_sra ();
3287 gate_intra_sra (void)
3289 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
3293 struct gimple_opt_pass pass_sra_early
=
3298 gate_intra_sra
, /* gate */
3299 early_intra_sra
, /* execute */
3302 0, /* static_pass_number */
3303 TV_TREE_SRA
, /* tv_id */
3304 PROP_cfg
| PROP_ssa
, /* properties_required */
3305 0, /* properties_provided */
3306 0, /* properties_destroyed */
3307 0, /* todo_flags_start */
3310 | TODO_verify_ssa
/* todo_flags_finish */
3314 struct gimple_opt_pass pass_sra
=
3319 gate_intra_sra
, /* gate */
3320 late_intra_sra
, /* execute */
3323 0, /* static_pass_number */
3324 TV_TREE_SRA
, /* tv_id */
3325 PROP_cfg
| PROP_ssa
, /* properties_required */
3326 0, /* properties_provided */
3327 0, /* properties_destroyed */
3328 TODO_update_address_taken
, /* todo_flags_start */
3331 | TODO_verify_ssa
/* todo_flags_finish */
3336 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3340 is_unused_scalar_param (tree parm
)
3343 return (is_gimple_reg (parm
)
3344 && (!(name
= gimple_default_def (cfun
, parm
))
3345 || has_zero_uses (name
)));
3348 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3349 examine whether there are any direct or otherwise infeasible ones. If so,
3350 return true, otherwise return false. PARM must be a gimple register with a
3351 non-NULL default definition. */
3354 ptr_parm_has_direct_uses (tree parm
)
3356 imm_use_iterator ui
;
3358 tree name
= gimple_default_def (cfun
, parm
);
3361 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
3364 use_operand_p use_p
;
3366 if (is_gimple_debug (stmt
))
3369 /* Valid uses include dereferences on the lhs and the rhs. */
3370 if (gimple_has_lhs (stmt
))
3372 tree lhs
= gimple_get_lhs (stmt
);
3373 while (handled_component_p (lhs
))
3374 lhs
= TREE_OPERAND (lhs
, 0);
3375 if (TREE_CODE (lhs
) == MEM_REF
3376 && TREE_OPERAND (lhs
, 0) == name
3377 && integer_zerop (TREE_OPERAND (lhs
, 1))
3378 && types_compatible_p (TREE_TYPE (lhs
),
3379 TREE_TYPE (TREE_TYPE (name
)))
3380 && !TREE_THIS_VOLATILE (lhs
))
3383 if (gimple_assign_single_p (stmt
))
3385 tree rhs
= gimple_assign_rhs1 (stmt
);
3386 while (handled_component_p (rhs
))
3387 rhs
= TREE_OPERAND (rhs
, 0);
3388 if (TREE_CODE (rhs
) == MEM_REF
3389 && TREE_OPERAND (rhs
, 0) == name
3390 && integer_zerop (TREE_OPERAND (rhs
, 1))
3391 && types_compatible_p (TREE_TYPE (rhs
),
3392 TREE_TYPE (TREE_TYPE (name
)))
3393 && !TREE_THIS_VOLATILE (rhs
))
3396 else if (is_gimple_call (stmt
))
3399 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3401 tree arg
= gimple_call_arg (stmt
, i
);
3402 while (handled_component_p (arg
))
3403 arg
= TREE_OPERAND (arg
, 0);
3404 if (TREE_CODE (arg
) == MEM_REF
3405 && TREE_OPERAND (arg
, 0) == name
3406 && integer_zerop (TREE_OPERAND (arg
, 1))
3407 && types_compatible_p (TREE_TYPE (arg
),
3408 TREE_TYPE (TREE_TYPE (name
)))
3409 && !TREE_THIS_VOLATILE (arg
))
3414 /* If the number of valid uses does not match the number of
3415 uses in this stmt there is an unhandled use. */
3416 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
3423 BREAK_FROM_IMM_USE_STMT (ui
);
3429 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3430 them in candidate_bitmap. Note that these do not necessarily include
3431 parameter which are unused and thus can be removed. Return true iff any
3432 such candidate has been found. */
3435 find_param_candidates (void)
3442 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3444 parm
= DECL_CHAIN (parm
))
3446 tree type
= TREE_TYPE (parm
);
3450 if (TREE_THIS_VOLATILE (parm
)
3451 || TREE_ADDRESSABLE (parm
)
3452 || (!is_gimple_reg_type (type
) && is_va_list_type (type
)))
3455 if (is_unused_scalar_param (parm
))
3461 if (POINTER_TYPE_P (type
))
3463 type
= TREE_TYPE (type
);
3465 if (TREE_CODE (type
) == FUNCTION_TYPE
3466 || TYPE_VOLATILE (type
)
3467 || (TREE_CODE (type
) == ARRAY_TYPE
3468 && TYPE_NONALIASED_COMPONENT (type
))
3469 || !is_gimple_reg (parm
)
3470 || is_va_list_type (type
)
3471 || ptr_parm_has_direct_uses (parm
))
3474 else if (!AGGREGATE_TYPE_P (type
))
3477 if (!COMPLETE_TYPE_P (type
)
3478 || !host_integerp (TYPE_SIZE (type
), 1)
3479 || tree_low_cst (TYPE_SIZE (type
), 1) == 0
3480 || (AGGREGATE_TYPE_P (type
)
3481 && type_internals_preclude_sra_p (type
, &msg
)))
3484 bitmap_set_bit (candidate_bitmap
, DECL_UID (parm
));
3486 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3488 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (parm
));
3489 print_generic_expr (dump_file
, parm
, 0);
3490 fprintf (dump_file
, "\n");
3494 func_param_count
= count
;
3498 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3502 mark_maybe_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
3505 struct access
*repr
= (struct access
*) data
;
3507 repr
->grp_maybe_modified
= 1;
3511 /* Analyze what representatives (in linked lists accessible from
3512 REPRESENTATIVES) can be modified by side effects of statements in the
3513 current function. */
3516 analyze_modified_params (VEC (access_p
, heap
) *representatives
)
3520 for (i
= 0; i
< func_param_count
; i
++)
3522 struct access
*repr
;
3524 for (repr
= VEC_index (access_p
, representatives
, i
);
3526 repr
= repr
->next_grp
)
3528 struct access
*access
;
3532 if (no_accesses_p (repr
))
3534 if (!POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3535 || repr
->grp_maybe_modified
)
3538 ao_ref_init (&ar
, repr
->expr
);
3539 visited
= BITMAP_ALLOC (NULL
);
3540 for (access
= repr
; access
; access
= access
->next_sibling
)
3542 /* All accesses are read ones, otherwise grp_maybe_modified would
3543 be trivially set. */
3544 walk_aliased_vdefs (&ar
, gimple_vuse (access
->stmt
),
3545 mark_maybe_modified
, repr
, &visited
);
3546 if (repr
->grp_maybe_modified
)
3549 BITMAP_FREE (visited
);
3554 /* Propagate distances in bb_dereferences in the opposite direction than the
3555 control flow edges, in each step storing the maximum of the current value
3556 and the minimum of all successors. These steps are repeated until the table
3557 stabilizes. Note that BBs which might terminate the functions (according to
3558 final_bbs bitmap) never updated in this way. */
3561 propagate_dereference_distances (void)
3563 VEC (basic_block
, heap
) *queue
;
3566 queue
= VEC_alloc (basic_block
, heap
, last_basic_block_for_function (cfun
));
3567 VEC_quick_push (basic_block
, queue
, ENTRY_BLOCK_PTR
);
3570 VEC_quick_push (basic_block
, queue
, bb
);
3574 while (!VEC_empty (basic_block
, queue
))
3578 bool change
= false;
3581 bb
= VEC_pop (basic_block
, queue
);
3584 if (bitmap_bit_p (final_bbs
, bb
->index
))
3587 for (i
= 0; i
< func_param_count
; i
++)
3589 int idx
= bb
->index
* func_param_count
+ i
;
3591 HOST_WIDE_INT inh
= 0;
3593 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3595 int succ_idx
= e
->dest
->index
* func_param_count
+ i
;
3597 if (e
->src
== EXIT_BLOCK_PTR
)
3603 inh
= bb_dereferences
[succ_idx
];
3605 else if (bb_dereferences
[succ_idx
] < inh
)
3606 inh
= bb_dereferences
[succ_idx
];
3609 if (!first
&& bb_dereferences
[idx
] < inh
)
3611 bb_dereferences
[idx
] = inh
;
3616 if (change
&& !bitmap_bit_p (final_bbs
, bb
->index
))
3617 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3622 e
->src
->aux
= e
->src
;
3623 VEC_quick_push (basic_block
, queue
, e
->src
);
3627 VEC_free (basic_block
, heap
, queue
);
3630 /* Dump a dereferences TABLE with heading STR to file F. */
3633 dump_dereferences_table (FILE *f
, const char *str
, HOST_WIDE_INT
*table
)
3637 fprintf (dump_file
, str
);
3638 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
3640 fprintf (f
, "%4i %i ", bb
->index
, bitmap_bit_p (final_bbs
, bb
->index
));
3641 if (bb
!= EXIT_BLOCK_PTR
)
3644 for (i
= 0; i
< func_param_count
; i
++)
3646 int idx
= bb
->index
* func_param_count
+ i
;
3647 fprintf (f
, " %4" HOST_WIDE_INT_PRINT
"d", table
[idx
]);
3652 fprintf (dump_file
, "\n");
3655 /* Determine what (parts of) parameters passed by reference that are not
3656 assigned to are not certainly dereferenced in this function and thus the
3657 dereferencing cannot be safely moved to the caller without potentially
3658 introducing a segfault. Mark such REPRESENTATIVES as
3659 grp_not_necessarilly_dereferenced.
3661 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3662 part is calculated rather than simple booleans are calculated for each
3663 pointer parameter to handle cases when only a fraction of the whole
3664 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3667 The maximum dereference distances for each pointer parameter and BB are
3668 already stored in bb_dereference. This routine simply propagates these
3669 values upwards by propagate_dereference_distances and then compares the
3670 distances of individual parameters in the ENTRY BB to the equivalent
3671 distances of each representative of a (fraction of a) parameter. */
3674 analyze_caller_dereference_legality (VEC (access_p
, heap
) *representatives
)
3678 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3679 dump_dereferences_table (dump_file
,
3680 "Dereference table before propagation:\n",
3683 propagate_dereference_distances ();
3685 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3686 dump_dereferences_table (dump_file
,
3687 "Dereference table after propagation:\n",
3690 for (i
= 0; i
< func_param_count
; i
++)
3692 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
3693 int idx
= ENTRY_BLOCK_PTR
->index
* func_param_count
+ i
;
3695 if (!repr
|| no_accesses_p (repr
))
3700 if ((repr
->offset
+ repr
->size
) > bb_dereferences
[idx
])
3701 repr
->grp_not_necessarilly_dereferenced
= 1;
3702 repr
= repr
->next_grp
;
3708 /* Return the representative access for the parameter declaration PARM if it is
3709 a scalar passed by reference which is not written to and the pointer value
3710 is not used directly. Thus, if it is legal to dereference it in the caller
3711 and we can rule out modifications through aliases, such parameter should be
3712 turned into one passed by value. Return NULL otherwise. */
3714 static struct access
*
3715 unmodified_by_ref_scalar_representative (tree parm
)
3717 int i
, access_count
;
3718 struct access
*repr
;
3719 VEC (access_p
, heap
) *access_vec
;
3721 access_vec
= get_base_access_vector (parm
);
3722 gcc_assert (access_vec
);
3723 repr
= VEC_index (access_p
, access_vec
, 0);
3726 repr
->group_representative
= repr
;
3728 access_count
= VEC_length (access_p
, access_vec
);
3729 for (i
= 1; i
< access_count
; i
++)
3731 struct access
*access
= VEC_index (access_p
, access_vec
, i
);
3734 access
->group_representative
= repr
;
3735 access
->next_sibling
= repr
->next_sibling
;
3736 repr
->next_sibling
= access
;
3740 repr
->grp_scalar_ptr
= 1;
3744 /* Return true iff this access precludes IPA-SRA of the parameter it is
3748 access_precludes_ipa_sra_p (struct access
*access
)
3750 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3751 is incompatible assign in a call statement (and possibly even in asm
3752 statements). This can be relaxed by using a new temporary but only for
3753 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3754 intraprocedural SRA we deal with this by keeping the old aggregate around,
3755 something we cannot do in IPA-SRA.) */
3757 && (is_gimple_call (access
->stmt
)
3758 || gimple_code (access
->stmt
) == GIMPLE_ASM
))
3765 /* Sort collected accesses for parameter PARM, identify representatives for
3766 each accessed region and link them together. Return NULL if there are
3767 different but overlapping accesses, return the special ptr value meaning
3768 there are no accesses for this parameter if that is the case and return the
3769 first representative otherwise. Set *RO_GRP if there is a group of accesses
3770 with only read (i.e. no write) accesses. */
3772 static struct access
*
3773 splice_param_accesses (tree parm
, bool *ro_grp
)
3775 int i
, j
, access_count
, group_count
;
3776 int agg_size
, total_size
= 0;
3777 struct access
*access
, *res
, **prev_acc_ptr
= &res
;
3778 VEC (access_p
, heap
) *access_vec
;
3780 access_vec
= get_base_access_vector (parm
);
3782 return &no_accesses_representant
;
3783 access_count
= VEC_length (access_p
, access_vec
);
3785 VEC_qsort (access_p
, access_vec
, compare_access_positions
);
3790 while (i
< access_count
)
3794 access
= VEC_index (access_p
, access_vec
, i
);
3795 modification
= access
->write
;
3796 if (access_precludes_ipa_sra_p (access
))
3798 a1_alias_type
= reference_alias_ptr_type (access
->expr
);
3800 /* Access is about to become group representative unless we find some
3801 nasty overlap which would preclude us from breaking this parameter
3805 while (j
< access_count
)
3807 struct access
*ac2
= VEC_index (access_p
, access_vec
, j
);
3808 if (ac2
->offset
!= access
->offset
)
3810 /* All or nothing law for parameters. */
3811 if (access
->offset
+ access
->size
> ac2
->offset
)
3816 else if (ac2
->size
!= access
->size
)
3819 if (access_precludes_ipa_sra_p (ac2
)
3820 || (ac2
->type
!= access
->type
3821 && (TREE_ADDRESSABLE (ac2
->type
)
3822 || TREE_ADDRESSABLE (access
->type
)))
3823 || (reference_alias_ptr_type (ac2
->expr
) != a1_alias_type
))
3826 modification
|= ac2
->write
;
3827 ac2
->group_representative
= access
;
3828 ac2
->next_sibling
= access
->next_sibling
;
3829 access
->next_sibling
= ac2
;
3834 access
->grp_maybe_modified
= modification
;
3837 *prev_acc_ptr
= access
;
3838 prev_acc_ptr
= &access
->next_grp
;
3839 total_size
+= access
->size
;
3843 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3844 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
3846 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
3847 if (total_size
>= agg_size
)
3850 gcc_assert (group_count
> 0);
3854 /* Decide whether parameters with representative accesses given by REPR should
3855 be reduced into components. */
3858 decide_one_param_reduction (struct access
*repr
)
3860 int total_size
, cur_parm_size
, agg_size
, new_param_count
, parm_size_limit
;
3865 cur_parm_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
3866 gcc_assert (cur_parm_size
> 0);
3868 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3871 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
3876 agg_size
= cur_parm_size
;
3882 fprintf (dump_file
, "Evaluating PARAM group sizes for ");
3883 print_generic_expr (dump_file
, parm
, 0);
3884 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (parm
));
3885 for (acc
= repr
; acc
; acc
= acc
->next_grp
)
3886 dump_access (dump_file
, acc
, true);
3890 new_param_count
= 0;
3892 for (; repr
; repr
= repr
->next_grp
)
3894 gcc_assert (parm
== repr
->base
);
3896 /* Taking the address of a non-addressable field is verboten. */
3897 if (by_ref
&& repr
->non_addressable
)
3900 /* Do not decompose a non-BLKmode param in a way that would
3901 create BLKmode params. Especially for by-reference passing
3902 (thus, pointer-type param) this is hardly worthwhile. */
3903 if (DECL_MODE (parm
) != BLKmode
3904 && TYPE_MODE (repr
->type
) == BLKmode
)
3907 if (!by_ref
|| (!repr
->grp_maybe_modified
3908 && !repr
->grp_not_necessarilly_dereferenced
))
3909 total_size
+= repr
->size
;
3911 total_size
+= cur_parm_size
;
3916 gcc_assert (new_param_count
> 0);
3918 if (optimize_function_for_size_p (cfun
))
3919 parm_size_limit
= cur_parm_size
;
3921 parm_size_limit
= (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR
)
3924 if (total_size
< agg_size
3925 && total_size
<= parm_size_limit
)
3928 fprintf (dump_file
, " ....will be split into %i components\n",
3930 return new_param_count
;
3936 /* The order of the following enums is important, we need to do extra work for
3937 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3938 enum ipa_splicing_result
{ NO_GOOD_ACCESS
, UNUSED_PARAMS
, BY_VAL_ACCESSES
,
3939 MODIF_BY_REF_ACCESSES
, UNMODIF_BY_REF_ACCESSES
};
3941 /* Identify representatives of all accesses to all candidate parameters for
3942 IPA-SRA. Return result based on what representatives have been found. */
3944 static enum ipa_splicing_result
3945 splice_all_param_accesses (VEC (access_p
, heap
) **representatives
)
3947 enum ipa_splicing_result result
= NO_GOOD_ACCESS
;
3949 struct access
*repr
;
3951 *representatives
= VEC_alloc (access_p
, heap
, func_param_count
);
3953 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3955 parm
= DECL_CHAIN (parm
))
3957 if (is_unused_scalar_param (parm
))
3959 VEC_quick_push (access_p
, *representatives
,
3960 &no_accesses_representant
);
3961 if (result
== NO_GOOD_ACCESS
)
3962 result
= UNUSED_PARAMS
;
3964 else if (POINTER_TYPE_P (TREE_TYPE (parm
))
3965 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm
)))
3966 && bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3968 repr
= unmodified_by_ref_scalar_representative (parm
);
3969 VEC_quick_push (access_p
, *representatives
, repr
);
3971 result
= UNMODIF_BY_REF_ACCESSES
;
3973 else if (bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3975 bool ro_grp
= false;
3976 repr
= splice_param_accesses (parm
, &ro_grp
);
3977 VEC_quick_push (access_p
, *representatives
, repr
);
3979 if (repr
&& !no_accesses_p (repr
))
3981 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3984 result
= UNMODIF_BY_REF_ACCESSES
;
3985 else if (result
< MODIF_BY_REF_ACCESSES
)
3986 result
= MODIF_BY_REF_ACCESSES
;
3988 else if (result
< BY_VAL_ACCESSES
)
3989 result
= BY_VAL_ACCESSES
;
3991 else if (no_accesses_p (repr
) && (result
== NO_GOOD_ACCESS
))
3992 result
= UNUSED_PARAMS
;
3995 VEC_quick_push (access_p
, *representatives
, NULL
);
3998 if (result
== NO_GOOD_ACCESS
)
4000 VEC_free (access_p
, heap
, *representatives
);
4001 *representatives
= NULL
;
4002 return NO_GOOD_ACCESS
;
4008 /* Return the index of BASE in PARMS. Abort if it is not found. */
4011 get_param_index (tree base
, VEC(tree
, heap
) *parms
)
4015 len
= VEC_length (tree
, parms
);
4016 for (i
= 0; i
< len
; i
++)
4017 if (VEC_index (tree
, parms
, i
) == base
)
4022 /* Convert the decisions made at the representative level into compact
4023 parameter adjustments. REPRESENTATIVES are pointers to first
4024 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4025 final number of adjustments. */
4027 static ipa_parm_adjustment_vec
4028 turn_representatives_into_adjustments (VEC (access_p
, heap
) *representatives
,
4029 int adjustments_count
)
4031 VEC (tree
, heap
) *parms
;
4032 ipa_parm_adjustment_vec adjustments
;
4036 gcc_assert (adjustments_count
> 0);
4037 parms
= ipa_get_vector_of_formal_parms (current_function_decl
);
4038 adjustments
= VEC_alloc (ipa_parm_adjustment_t
, heap
, adjustments_count
);
4039 parm
= DECL_ARGUMENTS (current_function_decl
);
4040 for (i
= 0; i
< func_param_count
; i
++, parm
= DECL_CHAIN (parm
))
4042 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
4044 if (!repr
|| no_accesses_p (repr
))
4046 struct ipa_parm_adjustment
*adj
;
4048 adj
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
4049 memset (adj
, 0, sizeof (*adj
));
4050 adj
->base_index
= get_param_index (parm
, parms
);
4053 adj
->copy_param
= 1;
4055 adj
->remove_param
= 1;
4059 struct ipa_parm_adjustment
*adj
;
4060 int index
= get_param_index (parm
, parms
);
4062 for (; repr
; repr
= repr
->next_grp
)
4064 adj
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
4065 memset (adj
, 0, sizeof (*adj
));
4066 gcc_assert (repr
->base
== parm
);
4067 adj
->base_index
= index
;
4068 adj
->base
= repr
->base
;
4069 adj
->type
= repr
->type
;
4070 adj
->alias_ptr_type
= reference_alias_ptr_type (repr
->expr
);
4071 adj
->offset
= repr
->offset
;
4072 adj
->by_ref
= (POINTER_TYPE_P (TREE_TYPE (repr
->base
))
4073 && (repr
->grp_maybe_modified
4074 || repr
->grp_not_necessarilly_dereferenced
));
4079 VEC_free (tree
, heap
, parms
);
4083 /* Analyze the collected accesses and produce a plan what to do with the
4084 parameters in the form of adjustments, NULL meaning nothing. */
4086 static ipa_parm_adjustment_vec
4087 analyze_all_param_acesses (void)
4089 enum ipa_splicing_result repr_state
;
4090 bool proceed
= false;
4091 int i
, adjustments_count
= 0;
4092 VEC (access_p
, heap
) *representatives
;
4093 ipa_parm_adjustment_vec adjustments
;
4095 repr_state
= splice_all_param_accesses (&representatives
);
4096 if (repr_state
== NO_GOOD_ACCESS
)
4099 /* If there are any parameters passed by reference which are not modified
4100 directly, we need to check whether they can be modified indirectly. */
4101 if (repr_state
== UNMODIF_BY_REF_ACCESSES
)
4103 analyze_caller_dereference_legality (representatives
);
4104 analyze_modified_params (representatives
);
4107 for (i
= 0; i
< func_param_count
; i
++)
4109 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
4111 if (repr
&& !no_accesses_p (repr
))
4113 if (repr
->grp_scalar_ptr
)
4115 adjustments_count
++;
4116 if (repr
->grp_not_necessarilly_dereferenced
4117 || repr
->grp_maybe_modified
)
4118 VEC_replace (access_p
, representatives
, i
, NULL
);
4122 sra_stats
.scalar_by_ref_to_by_val
++;
4127 int new_components
= decide_one_param_reduction (repr
);
4129 if (new_components
== 0)
4131 VEC_replace (access_p
, representatives
, i
, NULL
);
4132 adjustments_count
++;
4136 adjustments_count
+= new_components
;
4137 sra_stats
.aggregate_params_reduced
++;
4138 sra_stats
.param_reductions_created
+= new_components
;
4145 if (no_accesses_p (repr
))
4148 sra_stats
.deleted_unused_parameters
++;
4150 adjustments_count
++;
4154 if (!proceed
&& dump_file
)
4155 fprintf (dump_file
, "NOT proceeding to change params.\n");
4158 adjustments
= turn_representatives_into_adjustments (representatives
,
4163 VEC_free (access_p
, heap
, representatives
);
4167 /* If a parameter replacement identified by ADJ does not yet exist in the form
4168 of declaration, create it and record it, otherwise return the previously
4172 get_replaced_param_substitute (struct ipa_parm_adjustment
*adj
)
4175 if (!adj
->new_ssa_base
)
4177 char *pretty_name
= make_fancy_name (adj
->base
);
4179 repl
= create_tmp_reg (TREE_TYPE (adj
->base
), "ISR");
4180 DECL_NAME (repl
) = get_identifier (pretty_name
);
4181 obstack_free (&name_obstack
, pretty_name
);
4183 add_referenced_var (repl
);
4184 adj
->new_ssa_base
= repl
;
4187 repl
= adj
->new_ssa_base
;
4191 /* Find the first adjustment for a particular parameter BASE in a vector of
4192 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4195 static struct ipa_parm_adjustment
*
4196 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments
, tree base
)
4200 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
4201 for (i
= 0; i
< len
; i
++)
4203 struct ipa_parm_adjustment
*adj
;
4205 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
4206 if (!adj
->copy_param
&& adj
->base
== base
)
4213 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4214 removed because its value is not used, replace the SSA_NAME with a one
4215 relating to a created VAR_DECL together all of its uses and return true.
4216 ADJUSTMENTS is a pointer to an adjustments vector. */
4219 replace_removed_params_ssa_names (gimple stmt
,
4220 ipa_parm_adjustment_vec adjustments
)
4222 struct ipa_parm_adjustment
*adj
;
4223 tree lhs
, decl
, repl
, name
;
4225 if (gimple_code (stmt
) == GIMPLE_PHI
)
4226 lhs
= gimple_phi_result (stmt
);
4227 else if (is_gimple_assign (stmt
))
4228 lhs
= gimple_assign_lhs (stmt
);
4229 else if (is_gimple_call (stmt
))
4230 lhs
= gimple_call_lhs (stmt
);
4234 if (TREE_CODE (lhs
) != SSA_NAME
)
4236 decl
= SSA_NAME_VAR (lhs
);
4237 if (TREE_CODE (decl
) != PARM_DECL
)
4240 adj
= get_adjustment_for_base (adjustments
, decl
);
4244 repl
= get_replaced_param_substitute (adj
);
4245 name
= make_ssa_name (repl
, stmt
);
4249 fprintf (dump_file
, "replacing an SSA name of a removed param ");
4250 print_generic_expr (dump_file
, lhs
, 0);
4251 fprintf (dump_file
, " with ");
4252 print_generic_expr (dump_file
, name
, 0);
4253 fprintf (dump_file
, "\n");
4256 if (is_gimple_assign (stmt
))
4257 gimple_assign_set_lhs (stmt
, name
);
4258 else if (is_gimple_call (stmt
))
4259 gimple_call_set_lhs (stmt
, name
);
4261 gimple_phi_set_result (stmt
, name
);
4263 replace_uses_by (lhs
, name
);
4264 release_ssa_name (lhs
);
4268 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4269 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4270 specifies whether the function should care about type incompatibility the
4271 current and new expressions. If it is false, the function will leave
4272 incompatibility issues to the caller. Return true iff the expression
4276 sra_ipa_modify_expr (tree
*expr
, bool convert
,
4277 ipa_parm_adjustment_vec adjustments
)
4280 struct ipa_parm_adjustment
*adj
, *cand
= NULL
;
4281 HOST_WIDE_INT offset
, size
, max_size
;
4284 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
4286 if (TREE_CODE (*expr
) == BIT_FIELD_REF
4287 || TREE_CODE (*expr
) == IMAGPART_EXPR
4288 || TREE_CODE (*expr
) == REALPART_EXPR
)
4290 expr
= &TREE_OPERAND (*expr
, 0);
4294 base
= get_ref_base_and_extent (*expr
, &offset
, &size
, &max_size
);
4295 if (!base
|| size
== -1 || max_size
== -1)
4298 if (TREE_CODE (base
) == MEM_REF
)
4300 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
4301 base
= TREE_OPERAND (base
, 0);
4304 base
= get_ssa_base_param (base
);
4305 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4308 for (i
= 0; i
< len
; i
++)
4310 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
4312 if (adj
->base
== base
&&
4313 (adj
->offset
== offset
|| adj
->remove_param
))
4319 if (!cand
|| cand
->copy_param
|| cand
->remove_param
)
4323 src
= build_simple_mem_ref (cand
->reduction
);
4325 src
= cand
->reduction
;
4327 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4329 fprintf (dump_file
, "About to replace expr ");
4330 print_generic_expr (dump_file
, *expr
, 0);
4331 fprintf (dump_file
, " with ");
4332 print_generic_expr (dump_file
, src
, 0);
4333 fprintf (dump_file
, "\n");
4336 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4338 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4346 /* If the statement pointed to by STMT_PTR contains any expressions that need
4347 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4348 potential type incompatibilities (GSI is used to accommodate conversion
4349 statements and must point to the statement). Return true iff the statement
4353 sra_ipa_modify_assign (gimple
*stmt_ptr
, gimple_stmt_iterator
*gsi
,
4354 ipa_parm_adjustment_vec adjustments
)
4356 gimple stmt
= *stmt_ptr
;
4357 tree
*lhs_p
, *rhs_p
;
4360 if (!gimple_assign_single_p (stmt
))
4363 rhs_p
= gimple_assign_rhs1_ptr (stmt
);
4364 lhs_p
= gimple_assign_lhs_ptr (stmt
);
4366 any
= sra_ipa_modify_expr (rhs_p
, false, adjustments
);
4367 any
|= sra_ipa_modify_expr (lhs_p
, false, adjustments
);
4370 tree new_rhs
= NULL_TREE
;
4372 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p
), TREE_TYPE (*rhs_p
)))
4374 if (TREE_CODE (*rhs_p
) == CONSTRUCTOR
)
4376 /* V_C_Es of constructors can cause trouble (PR 42714). */
4377 if (is_gimple_reg_type (TREE_TYPE (*lhs_p
)))
4378 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
4380 *rhs_p
= build_constructor (TREE_TYPE (*lhs_p
), 0);
4383 new_rhs
= fold_build1_loc (gimple_location (stmt
),
4384 VIEW_CONVERT_EXPR
, TREE_TYPE (*lhs_p
),
4387 else if (REFERENCE_CLASS_P (*rhs_p
)
4388 && is_gimple_reg_type (TREE_TYPE (*lhs_p
))
4389 && !is_gimple_reg (*lhs_p
))
4390 /* This can happen when an assignment in between two single field
4391 structures is turned into an assignment in between two pointers to
4392 scalars (PR 42237). */
4397 tree tmp
= force_gimple_operand_gsi (gsi
, new_rhs
, true, NULL_TREE
,
4398 true, GSI_SAME_STMT
);
4400 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
4409 /* Traverse the function body and all modifications as described in
4410 ADJUSTMENTS. Return true iff the CFG has been changed. */
4413 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments
)
4415 bool cfg_changed
= false;
4420 gimple_stmt_iterator gsi
;
4422 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4423 replace_removed_params_ssa_names (gsi_stmt (gsi
), adjustments
);
4425 gsi
= gsi_start_bb (bb
);
4426 while (!gsi_end_p (gsi
))
4428 gimple stmt
= gsi_stmt (gsi
);
4429 bool modified
= false;
4433 switch (gimple_code (stmt
))
4436 t
= gimple_return_retval_ptr (stmt
);
4437 if (*t
!= NULL_TREE
)
4438 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4442 modified
|= sra_ipa_modify_assign (&stmt
, &gsi
, adjustments
);
4443 modified
|= replace_removed_params_ssa_names (stmt
, adjustments
);
4447 /* Operands must be processed before the lhs. */
4448 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4450 t
= gimple_call_arg_ptr (stmt
, i
);
4451 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4454 if (gimple_call_lhs (stmt
))
4456 t
= gimple_call_lhs_ptr (stmt
);
4457 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4458 modified
|= replace_removed_params_ssa_names (stmt
,
4464 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
4466 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
4467 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4469 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
4471 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
4472 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4483 if (maybe_clean_eh_stmt (stmt
)
4484 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4494 /* Call gimple_debug_bind_reset_value on all debug statements describing
4495 gimple register parameters that are being removed or replaced. */
4498 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments
)
4501 gimple_stmt_iterator
*gsip
= NULL
, gsi
;
4503 if (MAY_HAVE_DEBUG_STMTS
&& single_succ_p (ENTRY_BLOCK_PTR
))
4505 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR
));
4508 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
4509 for (i
= 0; i
< len
; i
++)
4511 struct ipa_parm_adjustment
*adj
;
4512 imm_use_iterator ui
;
4513 gimple stmt
, def_temp
;
4514 tree name
, vexpr
, copy
= NULL_TREE
;
4515 use_operand_p use_p
;
4517 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
4518 if (adj
->copy_param
|| !is_gimple_reg (adj
->base
))
4520 name
= gimple_default_def (cfun
, adj
->base
);
4523 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
4525 /* All other users must have been removed by
4526 ipa_sra_modify_function_body. */
4527 gcc_assert (is_gimple_debug (stmt
));
4528 if (vexpr
== NULL
&& gsip
!= NULL
)
4530 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4531 vexpr
= make_node (DEBUG_EXPR_DECL
);
4532 def_temp
= gimple_build_debug_source_bind (vexpr
, adj
->base
,
4534 DECL_ARTIFICIAL (vexpr
) = 1;
4535 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
4536 DECL_MODE (vexpr
) = DECL_MODE (adj
->base
);
4537 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4541 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
4542 SET_USE (use_p
, vexpr
);
4545 gimple_debug_bind_reset_value (stmt
);
4548 /* Create a VAR_DECL for debug info purposes. */
4549 if (!DECL_IGNORED_P (adj
->base
))
4551 copy
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
4552 VAR_DECL
, DECL_NAME (adj
->base
),
4553 TREE_TYPE (adj
->base
));
4554 if (DECL_PT_UID_SET_P (adj
->base
))
4555 SET_DECL_PT_UID (copy
, DECL_PT_UID (adj
->base
));
4556 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (adj
->base
);
4557 TREE_READONLY (copy
) = TREE_READONLY (adj
->base
);
4558 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (adj
->base
);
4559 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (adj
->base
);
4560 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (adj
->base
);
4561 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (adj
->base
);
4562 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (adj
->base
);
4563 DECL_SEEN_IN_BIND_EXPR_P (copy
) = 1;
4564 SET_DECL_RTL (copy
, 0);
4565 TREE_USED (copy
) = 1;
4566 DECL_CONTEXT (copy
) = current_function_decl
;
4567 add_referenced_var (copy
);
4568 add_local_decl (cfun
, copy
);
4570 BLOCK_VARS (DECL_INITIAL (current_function_decl
));
4571 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = copy
;
4573 if (gsip
!= NULL
&& copy
&& target_for_debug_bind (adj
->base
))
4575 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4577 def_temp
= gimple_build_debug_bind (copy
, vexpr
, NULL
);
4579 def_temp
= gimple_build_debug_source_bind (copy
, adj
->base
,
4581 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4586 /* Return false iff all callers have at least as many actual arguments as there
4587 are formal parameters in the current function. */
4590 not_all_callers_have_enough_arguments_p (struct cgraph_node
*node
,
4591 void *data ATTRIBUTE_UNUSED
)
4593 struct cgraph_edge
*cs
;
4594 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4595 if (!callsite_has_enough_arguments_p (cs
->call_stmt
))
4601 /* Convert all callers of NODE. */
4604 convert_callers_for_node (struct cgraph_node
*node
,
4607 ipa_parm_adjustment_vec adjustments
= (ipa_parm_adjustment_vec
)data
;
4608 bitmap recomputed_callers
= BITMAP_ALLOC (NULL
);
4609 struct cgraph_edge
*cs
;
4611 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4613 current_function_decl
= cs
->caller
->symbol
.decl
;
4614 push_cfun (DECL_STRUCT_FUNCTION (cs
->caller
->symbol
.decl
));
4617 fprintf (dump_file
, "Adjusting call (%i -> %i) %s -> %s\n",
4618 cs
->caller
->uid
, cs
->callee
->uid
,
4619 xstrdup (cgraph_node_name (cs
->caller
)),
4620 xstrdup (cgraph_node_name (cs
->callee
)));
4622 ipa_modify_call_arguments (cs
, cs
->call_stmt
, adjustments
);
4627 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4628 if (bitmap_set_bit (recomputed_callers
, cs
->caller
->uid
)
4629 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs
->caller
->symbol
.decl
)))
4630 compute_inline_parameters (cs
->caller
, true);
4631 BITMAP_FREE (recomputed_callers
);
4636 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4639 convert_callers (struct cgraph_node
*node
, tree old_decl
,
4640 ipa_parm_adjustment_vec adjustments
)
4642 tree old_cur_fndecl
= current_function_decl
;
4643 basic_block this_block
;
4645 cgraph_for_node_and_aliases (node
, convert_callers_for_node
,
4646 adjustments
, false);
4648 current_function_decl
= old_cur_fndecl
;
4650 if (!encountered_recursive_call
)
4653 FOR_EACH_BB (this_block
)
4655 gimple_stmt_iterator gsi
;
4657 for (gsi
= gsi_start_bb (this_block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4659 gimple stmt
= gsi_stmt (gsi
);
4661 if (gimple_code (stmt
) != GIMPLE_CALL
)
4663 call_fndecl
= gimple_call_fndecl (stmt
);
4664 if (call_fndecl
== old_decl
)
4667 fprintf (dump_file
, "Adjusting recursive call");
4668 gimple_call_set_fndecl (stmt
, node
->symbol
.decl
);
4669 ipa_modify_call_arguments (NULL
, stmt
, adjustments
);
4677 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4678 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4681 modify_function (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
4683 struct cgraph_node
*new_node
;
4685 VEC (cgraph_edge_p
, heap
) * redirect_callers
= collect_callers_of_node (node
);
4687 rebuild_cgraph_edges ();
4688 free_dominance_info (CDI_DOMINATORS
);
4690 current_function_decl
= NULL_TREE
;
4692 new_node
= cgraph_function_versioning (node
, redirect_callers
, NULL
, NULL
,
4693 false, NULL
, NULL
, "isra");
4694 current_function_decl
= new_node
->symbol
.decl
;
4695 push_cfun (DECL_STRUCT_FUNCTION (new_node
->symbol
.decl
));
4697 ipa_modify_formal_parameters (current_function_decl
, adjustments
, "ISRA");
4698 cfg_changed
= ipa_sra_modify_function_body (adjustments
);
4699 sra_ipa_reset_debug_stmts (adjustments
);
4700 convert_callers (new_node
, node
->symbol
.decl
, adjustments
);
4701 cgraph_make_node_local (new_node
);
4705 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4706 attributes, return true otherwise. NODE is the cgraph node of the current
4710 ipa_sra_preliminary_function_checks (struct cgraph_node
*node
)
4712 if (!cgraph_node_can_be_local_p (node
))
4715 fprintf (dump_file
, "Function not local to this compilation unit.\n");
4719 if (!node
->local
.can_change_signature
)
4722 fprintf (dump_file
, "Function can not change signature.\n");
4726 if (!tree_versionable_function_p (node
->symbol
.decl
))
4729 fprintf (dump_file
, "Function is not versionable.\n");
4733 if (DECL_VIRTUAL_P (current_function_decl
))
4736 fprintf (dump_file
, "Function is a virtual method.\n");
4740 if ((DECL_COMDAT (node
->symbol
.decl
) || DECL_EXTERNAL (node
->symbol
.decl
))
4741 && inline_summary(node
)->size
>= MAX_INLINE_INSNS_AUTO
)
4744 fprintf (dump_file
, "Function too big to be made truly local.\n");
4752 "Function has no callers in this compilation unit.\n");
4759 fprintf (dump_file
, "Function uses stdarg. \n");
4763 if (TYPE_ATTRIBUTES (TREE_TYPE (node
->symbol
.decl
)))
4769 /* Perform early interprocedural SRA. */
4772 ipa_early_sra (void)
4774 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
4775 ipa_parm_adjustment_vec adjustments
;
4778 if (!ipa_sra_preliminary_function_checks (node
))
4782 sra_mode
= SRA_MODE_EARLY_IPA
;
4784 if (!find_param_candidates ())
4787 fprintf (dump_file
, "Function has no IPA-SRA candidates.\n");
4791 if (cgraph_for_node_and_aliases (node
, not_all_callers_have_enough_arguments_p
,
4795 fprintf (dump_file
, "There are callers with insufficient number of "
4800 bb_dereferences
= XCNEWVEC (HOST_WIDE_INT
,
4802 * last_basic_block_for_function (cfun
));
4803 final_bbs
= BITMAP_ALLOC (NULL
);
4806 if (encountered_apply_args
)
4809 fprintf (dump_file
, "Function calls __builtin_apply_args().\n");
4813 if (encountered_unchangable_recursive_call
)
4816 fprintf (dump_file
, "Function calls itself with insufficient "
4817 "number of arguments.\n");
4821 adjustments
= analyze_all_param_acesses ();
4825 ipa_dump_param_adjustments (dump_file
, adjustments
, current_function_decl
);
4827 if (modify_function (node
, adjustments
))
4828 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
4830 ret
= TODO_update_ssa
;
4831 VEC_free (ipa_parm_adjustment_t
, heap
, adjustments
);
4833 statistics_counter_event (cfun
, "Unused parameters deleted",
4834 sra_stats
.deleted_unused_parameters
);
4835 statistics_counter_event (cfun
, "Scalar parameters converted to by-value",
4836 sra_stats
.scalar_by_ref_to_by_val
);
4837 statistics_counter_event (cfun
, "Aggregate parameters broken up",
4838 sra_stats
.aggregate_params_reduced
);
4839 statistics_counter_event (cfun
, "Aggregate parameter components created",
4840 sra_stats
.param_reductions_created
);
4843 BITMAP_FREE (final_bbs
);
4844 free (bb_dereferences
);
4846 sra_deinitialize ();
4850 /* Return if early ipa sra shall be performed. */
4852 ipa_early_sra_gate (void)
4854 return flag_ipa_sra
&& dbg_cnt (eipa_sra
);
4857 struct gimple_opt_pass pass_early_ipa_sra
=
4861 "eipa_sra", /* name */
4862 ipa_early_sra_gate
, /* gate */
4863 ipa_early_sra
, /* execute */
4866 0, /* static_pass_number */
4867 TV_IPA_SRA
, /* tv_id */
4868 0, /* properties_required */
4869 0, /* properties_provided */
4870 0, /* properties_destroyed */
4871 0, /* todo_flags_start */
4872 TODO_dump_symtab
/* todo_flags_finish */