1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "alloc-pool.h"
82 #include "tree-flow.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
98 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
99 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
101 /* Global variable describing which aggregate reduction we are performing at
103 static enum sra_mode sra_mode
;
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset
;
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
139 /* The statement this access belongs to. */
142 /* Next group representative for this aggregate. */
143 struct access
*next_grp
;
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access
*group_representative
;
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access
*first_child
;
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access
*next_sibling
;
158 /* Pointers to the first and last element in the linked list of assign
160 struct assign_link
*first_link
, *last_link
;
162 /* Pointer to the next access in the work queue. */
163 struct access
*next_queued
;
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl
;
170 /* Is this particular access write access? */
173 /* Is this access an access to a non-addressable field? */
174 unsigned non_addressable
: 1;
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued
: 1;
179 /* Does this group contain a write access? This flag is propagated down the
181 unsigned grp_write
: 1;
183 /* Does this group contain a read access? This flag is propagated down the
185 unsigned grp_read
: 1;
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read
: 1;
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write
: 1;
195 /* Does this group contain a read access through a scalar type? This flag is
196 not propagated in the access tree in any direction. */
197 unsigned grp_scalar_read
: 1;
199 /* Does this group contain a write access through a scalar type? This flag
200 is not propagated in the access tree in any direction. */
201 unsigned grp_scalar_write
: 1;
203 /* Is this access an artificial one created to scalarize some record
205 unsigned grp_total_scalarization
: 1;
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
210 unsigned grp_hint
: 1;
212 /* Is the subtree rooted in this access fully covered by scalar
214 unsigned grp_covered
: 1;
216 /* If set to true, this access and all below it in an access tree must not be
218 unsigned grp_unscalarizable_region
: 1;
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
223 unsigned grp_unscalarized_data
: 1;
225 /* Does this access and/or group contain a write access through a
227 unsigned grp_partial_lhs
: 1;
229 /* Set when a scalar replacement should be created for this variable. We do
230 the decision and creation at different places because create_tmp_var
231 cannot be called from within FOR_EACH_REFERENCED_VAR. */
232 unsigned grp_to_be_replaced
: 1;
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning
: 1;
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified
: 1;
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr
: 1;
246 /* Set when we discover that this pointer is not safe to dereference in the
248 unsigned grp_not_necessarilly_dereferenced
: 1;
251 typedef struct access
*access_p
;
253 DEF_VEC_P (access_p
);
254 DEF_VEC_ALLOC_P (access_p
, heap
);
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool
;
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
264 struct access
*lacc
, *racc
;
265 struct assign_link
*next
;
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool
;
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
272 static struct pointer_map_t
*base_access_vec
;
274 /* Bitmap of candidates. */
275 static bitmap candidate_bitmap
;
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278 those which cannot be (because they are and need be used as a whole). */
279 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
281 /* Obstack for creation of fancy names. */
282 static struct obstack name_obstack
;
284 /* Head of a linked list of accesses that need to have its subaccesses
285 propagated to their assignment counterparts. */
286 static struct access
*work_queue_head
;
288 /* Number of parameters of the analyzed function when doing early ipa SRA. */
289 static int func_param_count
;
291 /* scan_function sets the following to true if it encounters a call to
292 __builtin_apply_args. */
293 static bool encountered_apply_args
;
295 /* Set by scan_function when it finds a recursive call. */
296 static bool encountered_recursive_call
;
298 /* Set by scan_function when it finds a recursive call with less actual
299 arguments than formal parameters.. */
300 static bool encountered_unchangable_recursive_call
;
302 /* This is a table in which for each basic block and parameter there is a
303 distance (offset + size) in that parameter which is dereferenced and
304 accessed in that BB. */
305 static HOST_WIDE_INT
*bb_dereferences
;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307 returning, throwing externally, looping infinitely or calling a function
308 which might abort etc.. */
309 static bitmap final_bbs
;
311 /* Representative of no accesses at all. */
312 static struct access no_accesses_representant
;
314 /* Predicate to test the special value. */
317 no_accesses_p (struct access
*access
)
319 return access
== &no_accesses_representant
;
322 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
323 representative fields are dumped, otherwise those which only describe the
324 individual access are. */
328 /* Number of processed aggregates is readily available in
329 analyze_all_variable_accesses and so is not stored here. */
331 /* Number of created scalar replacements. */
334 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
338 /* Number of statements created by generate_subtree_copies. */
341 /* Number of statements created by load_assign_lhs_subreplacements. */
344 /* Number of times sra_modify_assign has deleted a statement. */
347 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348 RHS reparately due to type conversions or nonexistent matching
350 int separate_lhs_rhs_handling
;
352 /* Number of parameters that were removed because they were unused. */
353 int deleted_unused_parameters
;
355 /* Number of scalars passed as parameters by reference that have been
356 converted to be passed by value. */
357 int scalar_by_ref_to_by_val
;
359 /* Number of aggregate parameters that were replaced by one or more of their
361 int aggregate_params_reduced
;
363 /* Numbber of components created when splitting aggregate parameters. */
364 int param_reductions_created
;
368 dump_access (FILE *f
, struct access
*access
, bool grp
)
370 fprintf (f
, "access { ");
371 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
372 print_generic_expr (f
, access
->base
, 0);
373 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
374 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
375 fprintf (f
, ", expr = ");
376 print_generic_expr (f
, access
->expr
, 0);
377 fprintf (f
, ", type = ");
378 print_generic_expr (f
, access
->type
, 0);
380 fprintf (f
, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381 "grp_assignment_write = %d, grp_scalar_read = %d, "
382 "grp_scalar_write = %d, grp_total_scalarization = %d, "
383 "grp_hint = %d, grp_covered = %d, "
384 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 "grp_maybe_modified = %d, "
387 "grp_not_necessarilly_dereferenced = %d\n",
388 access
->grp_read
, access
->grp_write
, access
->grp_assignment_read
,
389 access
->grp_assignment_write
, access
->grp_scalar_read
,
390 access
->grp_scalar_write
, access
->grp_total_scalarization
,
391 access
->grp_hint
, access
->grp_covered
,
392 access
->grp_unscalarizable_region
, access
->grp_unscalarized_data
,
393 access
->grp_partial_lhs
, access
->grp_to_be_replaced
,
394 access
->grp_maybe_modified
,
395 access
->grp_not_necessarilly_dereferenced
);
397 fprintf (f
, ", write = %d, grp_total_scalarization = %d, "
398 "grp_partial_lhs = %d\n",
399 access
->write
, access
->grp_total_scalarization
,
400 access
->grp_partial_lhs
);
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
406 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
412 for (i
= 0; i
< level
; i
++)
413 fputs ("* ", dump_file
);
415 dump_access (f
, access
, true);
417 if (access
->first_child
)
418 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
420 access
= access
->next_sibling
;
425 /* Dump all access trees for a variable, given the pointer to the first root in
429 dump_access_tree (FILE *f
, struct access
*access
)
431 for (; access
; access
= access
->next_grp
)
432 dump_access_tree_1 (f
, access
, 0);
435 /* Return true iff ACC is non-NULL and has subaccesses. */
438 access_has_children_p (struct access
*acc
)
440 return acc
&& acc
->first_child
;
443 /* Return a vector of pointers to accesses for the variable given in BASE or
444 NULL if there is none. */
446 static VEC (access_p
, heap
) *
447 get_base_access_vector (tree base
)
451 slot
= pointer_map_contains (base_access_vec
, base
);
455 return *(VEC (access_p
, heap
) **) slot
;
458 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
459 in ACCESS. Return NULL if it cannot be found. */
461 static struct access
*
462 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
465 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
467 struct access
*child
= access
->first_child
;
469 while (child
&& (child
->offset
+ child
->size
<= offset
))
470 child
= child
->next_sibling
;
477 /* Return the first group representative for DECL or NULL if none exists. */
479 static struct access
*
480 get_first_repr_for_decl (tree base
)
482 VEC (access_p
, heap
) *access_vec
;
484 access_vec
= get_base_access_vector (base
);
488 return VEC_index (access_p
, access_vec
, 0);
491 /* Find an access representative for the variable BASE and given OFFSET and
492 SIZE. Requires that access trees have already been built. Return NULL if
493 it cannot be found. */
495 static struct access
*
496 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
499 struct access
*access
;
501 access
= get_first_repr_for_decl (base
);
502 while (access
&& (access
->offset
+ access
->size
<= offset
))
503 access
= access
->next_grp
;
507 return find_access_in_subtree (access
, offset
, size
);
510 /* Add LINK to the linked list of assign links of RACC. */
512 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
514 gcc_assert (link
->racc
== racc
);
516 if (!racc
->first_link
)
518 gcc_assert (!racc
->last_link
);
519 racc
->first_link
= link
;
522 racc
->last_link
->next
= link
;
524 racc
->last_link
= link
;
528 /* Move all link structures in their linked list in OLD_RACC to the linked list
531 relink_to_new_repr (struct access
*new_racc
, struct access
*old_racc
)
533 if (!old_racc
->first_link
)
535 gcc_assert (!old_racc
->last_link
);
539 if (new_racc
->first_link
)
541 gcc_assert (!new_racc
->last_link
->next
);
542 gcc_assert (!old_racc
->last_link
|| !old_racc
->last_link
->next
);
544 new_racc
->last_link
->next
= old_racc
->first_link
;
545 new_racc
->last_link
= old_racc
->last_link
;
549 gcc_assert (!new_racc
->last_link
);
551 new_racc
->first_link
= old_racc
->first_link
;
552 new_racc
->last_link
= old_racc
->last_link
;
554 old_racc
->first_link
= old_racc
->last_link
= NULL
;
557 /* Add ACCESS to the work queue (which is actually a stack). */
560 add_access_to_work_queue (struct access
*access
)
562 if (!access
->grp_queued
)
564 gcc_assert (!access
->next_queued
);
565 access
->next_queued
= work_queue_head
;
566 access
->grp_queued
= 1;
567 work_queue_head
= access
;
571 /* Pop an access from the work queue, and return it, assuming there is one. */
573 static struct access
*
574 pop_access_from_work_queue (void)
576 struct access
*access
= work_queue_head
;
578 work_queue_head
= access
->next_queued
;
579 access
->next_queued
= NULL
;
580 access
->grp_queued
= 0;
585 /* Allocate necessary structures. */
588 sra_initialize (void)
590 candidate_bitmap
= BITMAP_ALLOC (NULL
);
591 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
592 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
593 gcc_obstack_init (&name_obstack
);
594 access_pool
= create_alloc_pool ("SRA accesses", sizeof (struct access
), 16);
595 link_pool
= create_alloc_pool ("SRA links", sizeof (struct assign_link
), 16);
596 base_access_vec
= pointer_map_create ();
597 memset (&sra_stats
, 0, sizeof (sra_stats
));
598 encountered_apply_args
= false;
599 encountered_recursive_call
= false;
600 encountered_unchangable_recursive_call
= false;
603 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
606 delete_base_accesses (const void *key ATTRIBUTE_UNUSED
, void **value
,
607 void *data ATTRIBUTE_UNUSED
)
609 VEC (access_p
, heap
) *access_vec
;
610 access_vec
= (VEC (access_p
, heap
) *) *value
;
611 VEC_free (access_p
, heap
, access_vec
);
616 /* Deallocate all general structures. */
619 sra_deinitialize (void)
621 BITMAP_FREE (candidate_bitmap
);
622 BITMAP_FREE (should_scalarize_away_bitmap
);
623 BITMAP_FREE (cannot_scalarize_away_bitmap
);
624 free_alloc_pool (access_pool
);
625 free_alloc_pool (link_pool
);
626 obstack_free (&name_obstack
, NULL
);
628 pointer_map_traverse (base_access_vec
, delete_base_accesses
, NULL
);
629 pointer_map_destroy (base_access_vec
);
632 /* Remove DECL from candidates for SRA and write REASON to the dump file if
635 disqualify_candidate (tree decl
, const char *reason
)
637 bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
));
639 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
641 fprintf (dump_file
, "! Disqualifying ");
642 print_generic_expr (dump_file
, decl
, 0);
643 fprintf (dump_file
, " - %s\n", reason
);
647 /* Return true iff the type contains a field or an element which does not allow
651 type_internals_preclude_sra_p (tree type
, const char **msg
)
656 switch (TREE_CODE (type
))
660 case QUAL_UNION_TYPE
:
661 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
662 if (TREE_CODE (fld
) == FIELD_DECL
)
664 tree ft
= TREE_TYPE (fld
);
666 if (TREE_THIS_VOLATILE (fld
))
668 *msg
= "volatile structure field";
671 if (!DECL_FIELD_OFFSET (fld
))
673 *msg
= "no structure field offset";
676 if (!DECL_SIZE (fld
))
678 *msg
= "zero structure field size";
681 if (!host_integerp (DECL_FIELD_OFFSET (fld
), 1))
683 *msg
= "structure field offset not fixed";
686 if (!host_integerp (DECL_SIZE (fld
), 1))
688 *msg
= "structure field size not fixed";
691 if (AGGREGATE_TYPE_P (ft
)
692 && int_bit_position (fld
) % BITS_PER_UNIT
!= 0)
694 *msg
= "structure field is bit field";
698 if (AGGREGATE_TYPE_P (ft
) && type_internals_preclude_sra_p (ft
, msg
))
705 et
= TREE_TYPE (type
);
707 if (TYPE_VOLATILE (et
))
709 *msg
= "element type is volatile";
713 if (AGGREGATE_TYPE_P (et
) && type_internals_preclude_sra_p (et
, msg
))
723 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
724 base variable if it is. Return T if it is not an SSA_NAME. */
727 get_ssa_base_param (tree t
)
729 if (TREE_CODE (t
) == SSA_NAME
)
731 if (SSA_NAME_IS_DEFAULT_DEF (t
))
732 return SSA_NAME_VAR (t
);
739 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
740 belongs to, unless the BB has already been marked as a potentially
744 mark_parm_dereference (tree base
, HOST_WIDE_INT dist
, gimple stmt
)
746 basic_block bb
= gimple_bb (stmt
);
747 int idx
, parm_index
= 0;
750 if (bitmap_bit_p (final_bbs
, bb
->index
))
753 for (parm
= DECL_ARGUMENTS (current_function_decl
);
754 parm
&& parm
!= base
;
755 parm
= DECL_CHAIN (parm
))
758 gcc_assert (parm_index
< func_param_count
);
760 idx
= bb
->index
* func_param_count
+ parm_index
;
761 if (bb_dereferences
[idx
] < dist
)
762 bb_dereferences
[idx
] = dist
;
765 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
766 the three fields. Also add it to the vector of accesses corresponding to
767 the base. Finally, return the new access. */
769 static struct access
*
770 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
772 VEC (access_p
, heap
) *vec
;
773 struct access
*access
;
776 access
= (struct access
*) pool_alloc (access_pool
);
777 memset (access
, 0, sizeof (struct access
));
779 access
->offset
= offset
;
782 slot
= pointer_map_contains (base_access_vec
, base
);
784 vec
= (VEC (access_p
, heap
) *) *slot
;
786 vec
= VEC_alloc (access_p
, heap
, 32);
788 VEC_safe_push (access_p
, heap
, vec
, access
);
790 *((struct VEC (access_p
,heap
) **)
791 pointer_map_insert (base_access_vec
, base
)) = vec
;
796 /* Create and insert access for EXPR. Return created access, or NULL if it is
799 static struct access
*
800 create_access (tree expr
, gimple stmt
, bool write
)
802 struct access
*access
;
803 HOST_WIDE_INT offset
, size
, max_size
;
805 bool ptr
, unscalarizable_region
= false;
807 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
809 if (sra_mode
== SRA_MODE_EARLY_IPA
810 && TREE_CODE (base
) == MEM_REF
)
812 base
= get_ssa_base_param (TREE_OPERAND (base
, 0));
820 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
823 if (sra_mode
== SRA_MODE_EARLY_IPA
)
825 if (size
< 0 || size
!= max_size
)
827 disqualify_candidate (base
, "Encountered a variable sized access.");
830 if (TREE_CODE (expr
) == COMPONENT_REF
831 && DECL_BIT_FIELD (TREE_OPERAND (expr
, 1)))
833 disqualify_candidate (base
, "Encountered a bit-field access.");
836 gcc_checking_assert ((offset
% BITS_PER_UNIT
) == 0);
839 mark_parm_dereference (base
, offset
+ size
, stmt
);
843 if (size
!= max_size
)
846 unscalarizable_region
= true;
850 disqualify_candidate (base
, "Encountered an unconstrained access.");
855 access
= create_access_1 (base
, offset
, size
);
857 access
->type
= TREE_TYPE (expr
);
858 access
->write
= write
;
859 access
->grp_unscalarizable_region
= unscalarizable_region
;
862 if (TREE_CODE (expr
) == COMPONENT_REF
863 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr
, 1)))
864 access
->non_addressable
= 1;
870 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
871 register types or (recursively) records with only these two kinds of fields.
872 It also returns false if any of these records contains a bit-field. */
875 type_consists_of_records_p (tree type
)
879 if (TREE_CODE (type
) != RECORD_TYPE
)
882 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
883 if (TREE_CODE (fld
) == FIELD_DECL
)
885 tree ft
= TREE_TYPE (fld
);
887 if (DECL_BIT_FIELD (fld
))
890 if (!is_gimple_reg_type (ft
)
891 && !type_consists_of_records_p (ft
))
898 /* Create total_scalarization accesses for all scalar type fields in DECL that
899 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
900 must be the top-most VAR_DECL representing the variable, OFFSET must be the
901 offset of DECL within BASE. REF must be the memory reference expression for
905 completely_scalarize_record (tree base
, tree decl
, HOST_WIDE_INT offset
,
908 tree fld
, decl_type
= TREE_TYPE (decl
);
910 for (fld
= TYPE_FIELDS (decl_type
); fld
; fld
= DECL_CHAIN (fld
))
911 if (TREE_CODE (fld
) == FIELD_DECL
)
913 HOST_WIDE_INT pos
= offset
+ int_bit_position (fld
);
914 tree ft
= TREE_TYPE (fld
);
915 tree nref
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), ref
, fld
,
918 if (is_gimple_reg_type (ft
))
920 struct access
*access
;
923 size
= tree_low_cst (DECL_SIZE (fld
), 1);
924 access
= create_access_1 (base
, pos
, size
);
927 access
->grp_total_scalarization
= 1;
928 /* Accesses for intraprocedural SRA can have their stmt NULL. */
931 completely_scalarize_record (base
, fld
, pos
, nref
);
935 /* Create total_scalarization accesses for all scalar type fields in VAR and
936 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
937 type_consists_of_records_p. */
940 completely_scalarize_var (tree var
)
942 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (var
), 1);
943 struct access
*access
;
945 access
= create_access_1 (var
, 0, size
);
947 access
->type
= TREE_TYPE (var
);
948 access
->grp_total_scalarization
= 1;
950 completely_scalarize_record (var
, var
, 0, var
);
953 /* Search the given tree for a declaration by skipping handled components and
954 exclude it from the candidates. */
957 disqualify_base_of_expr (tree t
, const char *reason
)
959 t
= get_base_address (t
);
960 if (sra_mode
== SRA_MODE_EARLY_IPA
961 && TREE_CODE (t
) == MEM_REF
)
962 t
= get_ssa_base_param (TREE_OPERAND (t
, 0));
965 disqualify_candidate (t
, reason
);
968 /* Scan expression EXPR and create access structures for all accesses to
969 candidates for scalarization. Return the created access or NULL if none is
972 static struct access
*
973 build_access_from_expr_1 (tree expr
, gimple stmt
, bool write
)
975 struct access
*ret
= NULL
;
978 if (TREE_CODE (expr
) == BIT_FIELD_REF
979 || TREE_CODE (expr
) == IMAGPART_EXPR
980 || TREE_CODE (expr
) == REALPART_EXPR
)
982 expr
= TREE_OPERAND (expr
, 0);
988 /* We need to dive through V_C_Es in order to get the size of its parameter
989 and not the result type. Ada produces such statements. We are also
990 capable of handling the topmost V_C_E but not any of those buried in other
991 handled components. */
992 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
993 expr
= TREE_OPERAND (expr
, 0);
995 if (contains_view_convert_expr_p (expr
))
997 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
1002 switch (TREE_CODE (expr
))
1005 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
1006 && sra_mode
!= SRA_MODE_EARLY_IPA
)
1014 case ARRAY_RANGE_REF
:
1015 ret
= create_access (expr
, stmt
, write
);
1022 if (write
&& partial_ref
&& ret
)
1023 ret
->grp_partial_lhs
= 1;
1028 /* Scan expression EXPR and create access structures for all accesses to
1029 candidates for scalarization. Return true if any access has been inserted.
1030 STMT must be the statement from which the expression is taken, WRITE must be
1031 true if the expression is a store and false otherwise. */
1034 build_access_from_expr (tree expr
, gimple stmt
, bool write
)
1036 struct access
*access
;
1038 access
= build_access_from_expr_1 (expr
, stmt
, write
);
1041 /* This means the aggregate is accesses as a whole in a way other than an
1042 assign statement and thus cannot be removed even if we had a scalar
1043 replacement for everything. */
1044 if (cannot_scalarize_away_bitmap
)
1045 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
1051 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1052 modes in which it matters, return true iff they have been disqualified. RHS
1053 may be NULL, in that case ignore it. If we scalarize an aggregate in
1054 intra-SRA we may need to add statements after each statement. This is not
1055 possible if a statement unconditionally has to end the basic block. */
1057 disqualify_ops_if_throwing_stmt (gimple stmt
, tree lhs
, tree rhs
)
1059 if ((sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1060 && (stmt_can_throw_internal (stmt
) || stmt_ends_bb_p (stmt
)))
1062 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
1064 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1070 /* Return true if EXP is a memory reference less aligned than ALIGN. This is
1071 invoked only on strict-alignment targets. */
1074 tree_non_aligned_mem_p (tree exp
, unsigned int align
)
1076 unsigned int exp_align
;
1078 if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
1079 exp
= TREE_OPERAND (exp
, 0);
1081 if (TREE_CODE (exp
) == SSA_NAME
|| is_gimple_min_invariant (exp
))
1084 /* get_object_alignment will fall back to BITS_PER_UNIT if it cannot
1085 compute an explicit alignment. Pretend that dereferenced pointers
1086 are always aligned on strict-alignment targets. */
1087 if (TREE_CODE (exp
) == MEM_REF
|| TREE_CODE (exp
) == TARGET_MEM_REF
)
1088 exp_align
= get_object_or_type_alignment (exp
);
1090 exp_align
= get_object_alignment (exp
);
1092 if (exp_align
< align
)
1098 /* Return true if EXP is a memory reference less aligned than what the access
1099 ACC would require. This is invoked only on strict-alignment targets. */
1102 tree_non_aligned_mem_for_access_p (tree exp
, struct access
*acc
)
1104 unsigned int acc_align
;
1106 /* The alignment of the access is that of its expression. However, it may
1107 have been artificially increased, e.g. by a local alignment promotion,
1108 so we cap it to the alignment of the type of the base, on the grounds
1109 that valid sub-accesses cannot be more aligned than that. */
1110 acc_align
= get_object_alignment (acc
->expr
);
1111 if (acc
->base
&& acc_align
> TYPE_ALIGN (TREE_TYPE (acc
->base
)))
1112 acc_align
= TYPE_ALIGN (TREE_TYPE (acc
->base
));
1114 return tree_non_aligned_mem_p (exp
, acc_align
);
1117 /* Scan expressions occuring in STMT, create access structures for all accesses
1118 to candidates for scalarization and remove those candidates which occur in
1119 statements or expressions that prevent them from being split apart. Return
1120 true if any access has been inserted. */
1123 build_accesses_from_assign (gimple stmt
)
1126 struct access
*lacc
, *racc
;
1128 if (!gimple_assign_single_p (stmt
)
1129 /* Scope clobbers don't influence scalarization. */
1130 || gimple_clobber_p (stmt
))
1133 lhs
= gimple_assign_lhs (stmt
);
1134 rhs
= gimple_assign_rhs1 (stmt
);
1136 if (disqualify_ops_if_throwing_stmt (stmt
, lhs
, rhs
))
1139 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1140 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1144 lacc
->grp_assignment_write
= 1;
1145 if (STRICT_ALIGNMENT
&& tree_non_aligned_mem_for_access_p (rhs
, lacc
))
1146 lacc
->grp_unscalarizable_region
= 1;
1151 racc
->grp_assignment_read
= 1;
1152 if (should_scalarize_away_bitmap
&& !gimple_has_volatile_ops (stmt
)
1153 && !is_gimple_reg_type (racc
->type
))
1154 bitmap_set_bit (should_scalarize_away_bitmap
, DECL_UID (racc
->base
));
1155 if (STRICT_ALIGNMENT
&& tree_non_aligned_mem_for_access_p (lhs
, racc
))
1156 racc
->grp_unscalarizable_region
= 1;
1160 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1161 && !lacc
->grp_unscalarizable_region
1162 && !racc
->grp_unscalarizable_region
1163 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1164 /* FIXME: Turn the following line into an assert after PR 40058 is
1166 && lacc
->size
== racc
->size
1167 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1169 struct assign_link
*link
;
1171 link
= (struct assign_link
*) pool_alloc (link_pool
);
1172 memset (link
, 0, sizeof (struct assign_link
));
1177 add_link_to_rhs (racc
, link
);
1180 return lacc
|| racc
;
1183 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1184 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1187 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED
, tree op
,
1188 void *data ATTRIBUTE_UNUSED
)
1190 op
= get_base_address (op
);
1193 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1198 /* Return true iff callsite CALL has at least as many actual arguments as there
1199 are formal parameters of the function currently processed by IPA-SRA. */
1202 callsite_has_enough_arguments_p (gimple call
)
1204 return gimple_call_num_args (call
) >= (unsigned) func_param_count
;
1207 /* Scan function and look for interesting expressions and create access
1208 structures for them. Return true iff any access is created. */
1211 scan_function (void)
1218 gimple_stmt_iterator gsi
;
1219 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1221 gimple stmt
= gsi_stmt (gsi
);
1225 if (final_bbs
&& stmt_can_throw_external (stmt
))
1226 bitmap_set_bit (final_bbs
, bb
->index
);
1227 switch (gimple_code (stmt
))
1230 t
= gimple_return_retval (stmt
);
1232 ret
|= build_access_from_expr (t
, stmt
, false);
1234 bitmap_set_bit (final_bbs
, bb
->index
);
1238 ret
|= build_accesses_from_assign (stmt
);
1242 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1243 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1246 if (sra_mode
== SRA_MODE_EARLY_IPA
)
1248 tree dest
= gimple_call_fndecl (stmt
);
1249 int flags
= gimple_call_flags (stmt
);
1253 if (DECL_BUILT_IN_CLASS (dest
) == BUILT_IN_NORMAL
1254 && DECL_FUNCTION_CODE (dest
) == BUILT_IN_APPLY_ARGS
)
1255 encountered_apply_args
= true;
1256 if (cgraph_get_node (dest
)
1257 == cgraph_get_node (current_function_decl
))
1259 encountered_recursive_call
= true;
1260 if (!callsite_has_enough_arguments_p (stmt
))
1261 encountered_unchangable_recursive_call
= true;
1266 && (flags
& (ECF_CONST
| ECF_PURE
)) == 0)
1267 bitmap_set_bit (final_bbs
, bb
->index
);
1270 t
= gimple_call_lhs (stmt
);
1271 if (t
&& !disqualify_ops_if_throwing_stmt (stmt
, t
, NULL
))
1272 ret
|= build_access_from_expr (t
, stmt
, true);
1276 walk_stmt_load_store_addr_ops (stmt
, NULL
, NULL
, NULL
,
1279 bitmap_set_bit (final_bbs
, bb
->index
);
1281 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
1283 t
= TREE_VALUE (gimple_asm_input_op (stmt
, i
));
1284 ret
|= build_access_from_expr (t
, stmt
, false);
1286 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
1288 t
= TREE_VALUE (gimple_asm_output_op (stmt
, i
));
1289 ret
|= build_access_from_expr (t
, stmt
, true);
1302 /* Helper of QSORT function. There are pointers to accesses in the array. An
1303 access is considered smaller than another if it has smaller offset or if the
1304 offsets are the same but is size is bigger. */
1307 compare_access_positions (const void *a
, const void *b
)
1309 const access_p
*fp1
= (const access_p
*) a
;
1310 const access_p
*fp2
= (const access_p
*) b
;
1311 const access_p f1
= *fp1
;
1312 const access_p f2
= *fp2
;
1314 if (f1
->offset
!= f2
->offset
)
1315 return f1
->offset
< f2
->offset
? -1 : 1;
1317 if (f1
->size
== f2
->size
)
1319 if (f1
->type
== f2
->type
)
1321 /* Put any non-aggregate type before any aggregate type. */
1322 else if (!is_gimple_reg_type (f1
->type
)
1323 && is_gimple_reg_type (f2
->type
))
1325 else if (is_gimple_reg_type (f1
->type
)
1326 && !is_gimple_reg_type (f2
->type
))
1328 /* Put any complex or vector type before any other scalar type. */
1329 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1330 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1331 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1332 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1334 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1335 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1336 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1337 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1339 /* Put the integral type with the bigger precision first. */
1340 else if (INTEGRAL_TYPE_P (f1
->type
)
1341 && INTEGRAL_TYPE_P (f2
->type
))
1342 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1343 /* Put any integral type with non-full precision last. */
1344 else if (INTEGRAL_TYPE_P (f1
->type
)
1345 && (TREE_INT_CST_LOW (TYPE_SIZE (f1
->type
))
1346 != TYPE_PRECISION (f1
->type
)))
1348 else if (INTEGRAL_TYPE_P (f2
->type
)
1349 && (TREE_INT_CST_LOW (TYPE_SIZE (f2
->type
))
1350 != TYPE_PRECISION (f2
->type
)))
1352 /* Stabilize the sort. */
1353 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1356 /* We want the bigger accesses first, thus the opposite operator in the next
1358 return f1
->size
> f2
->size
? -1 : 1;
1362 /* Append a name of the declaration to the name obstack. A helper function for
1366 make_fancy_decl_name (tree decl
)
1370 tree name
= DECL_NAME (decl
);
1372 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1373 IDENTIFIER_LENGTH (name
));
1376 sprintf (buffer
, "D%u", DECL_UID (decl
));
1377 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1381 /* Helper for make_fancy_name. */
1384 make_fancy_name_1 (tree expr
)
1391 make_fancy_decl_name (expr
);
1395 switch (TREE_CODE (expr
))
1398 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1399 obstack_1grow (&name_obstack
, '$');
1400 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1404 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1405 obstack_1grow (&name_obstack
, '$');
1406 /* Arrays with only one element may not have a constant as their
1408 index
= TREE_OPERAND (expr
, 1);
1409 if (TREE_CODE (index
) != INTEGER_CST
)
1411 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1412 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1416 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1420 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1421 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1423 obstack_1grow (&name_obstack
, '$');
1424 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1425 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1426 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1433 gcc_unreachable (); /* we treat these as scalars. */
1440 /* Create a human readable name for replacement variable of ACCESS. */
1443 make_fancy_name (tree expr
)
1445 make_fancy_name_1 (expr
);
1446 obstack_1grow (&name_obstack
, '\0');
1447 return XOBFINISH (&name_obstack
, char *);
1450 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1451 EXP_TYPE at the given OFFSET. If BASE is something for which
1452 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1453 to insert new statements either before or below the current one as specified
1454 by INSERT_AFTER. This function is not capable of handling bitfields. */
1457 build_ref_for_offset (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1458 tree exp_type
, gimple_stmt_iterator
*gsi
,
1461 tree prev_base
= base
;
1463 HOST_WIDE_INT base_offset
;
1465 gcc_checking_assert (offset
% BITS_PER_UNIT
== 0);
1467 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1469 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1470 offset such as array[var_index]. */
1476 gcc_checking_assert (gsi
);
1477 tmp
= create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base
)), NULL
);
1478 add_referenced_var (tmp
);
1479 tmp
= make_ssa_name (tmp
, NULL
);
1480 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1481 STRIP_USELESS_TYPE_CONVERSION (addr
);
1482 stmt
= gimple_build_assign (tmp
, addr
);
1483 gimple_set_location (stmt
, loc
);
1484 SSA_NAME_DEF_STMT (tmp
) = stmt
;
1486 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1488 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1491 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1492 offset
/ BITS_PER_UNIT
);
1495 else if (TREE_CODE (base
) == MEM_REF
)
1497 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1498 base_offset
+ offset
/ BITS_PER_UNIT
);
1499 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1500 base
= unshare_expr (TREE_OPERAND (base
, 0));
1504 off
= build_int_cst (reference_alias_ptr_type (base
),
1505 base_offset
+ offset
/ BITS_PER_UNIT
);
1506 base
= build_fold_addr_expr (unshare_expr (base
));
1509 return fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1512 DEF_VEC_ALLOC_P_STACK (tree
);
1513 #define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
1515 /* Construct a memory reference to a part of an aggregate BASE at the given
1516 OFFSET and of the type of MODEL. In case this is a chain of references
1517 to component, the function will replicate the chain of COMPONENT_REFs of
1518 the expression of MODEL to access it. GSI and INSERT_AFTER have the same
1519 meaning as in build_ref_for_offset. */
1522 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1523 struct access
*model
, gimple_stmt_iterator
*gsi
,
1526 tree type
= model
->type
, t
;
1527 VEC(tree
,stack
) *cr_stack
= NULL
;
1529 if (TREE_CODE (model
->expr
) == COMPONENT_REF
)
1531 tree expr
= model
->expr
;
1533 /* Create a stack of the COMPONENT_REFs so later we can walk them in
1534 order from inner to outer. */
1535 cr_stack
= VEC_alloc (tree
, stack
, 6);
1538 tree field
= TREE_OPERAND (expr
, 1);
1539 tree cr_offset
= component_ref_field_offset (expr
);
1540 HOST_WIDE_INT bit_pos
1541 = tree_low_cst (cr_offset
, 1) * BITS_PER_UNIT
1542 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1544 /* We can be called with a model different from the one associated
1545 with BASE so we need to avoid going up the chain too far. */
1546 if (offset
- bit_pos
< 0)
1550 VEC_safe_push (tree
, stack
, cr_stack
, expr
);
1552 expr
= TREE_OPERAND (expr
, 0);
1553 type
= TREE_TYPE (expr
);
1554 } while (TREE_CODE (expr
) == COMPONENT_REF
);
1557 t
= build_ref_for_offset (loc
, base
, offset
, type
, gsi
, insert_after
);
1559 if (TREE_CODE (model
->expr
) == COMPONENT_REF
)
1564 /* Now replicate the chain of COMPONENT_REFs from inner to outer. */
1565 FOR_EACH_VEC_ELT_REVERSE (tree
, cr_stack
, i
, expr
)
1567 tree field
= TREE_OPERAND (expr
, 1);
1568 t
= fold_build3_loc (loc
, COMPONENT_REF
, TREE_TYPE (field
), t
, field
,
1569 TREE_OPERAND (expr
, 2));
1572 VEC_free (tree
, stack
, cr_stack
);
1578 /* Construct a memory reference consisting of component_refs and array_refs to
1579 a part of an aggregate *RES (which is of type TYPE). The requested part
1580 should have type EXP_TYPE at be the given OFFSET. This function might not
1581 succeed, it returns true when it does and only then *RES points to something
1582 meaningful. This function should be used only to build expressions that we
1583 might need to present to user (e.g. in warnings). In all other situations,
1584 build_ref_for_model or build_ref_for_offset should be used instead. */
1587 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1593 tree tr_size
, index
, minidx
;
1594 HOST_WIDE_INT el_size
;
1596 if (offset
== 0 && exp_type
1597 && types_compatible_p (exp_type
, type
))
1600 switch (TREE_CODE (type
))
1603 case QUAL_UNION_TYPE
:
1605 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1607 HOST_WIDE_INT pos
, size
;
1608 tree expr
, *expr_ptr
;
1610 if (TREE_CODE (fld
) != FIELD_DECL
)
1613 pos
= int_bit_position (fld
);
1614 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1615 tr_size
= DECL_SIZE (fld
);
1616 if (!tr_size
|| !host_integerp (tr_size
, 1))
1618 size
= tree_low_cst (tr_size
, 1);
1624 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1627 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1630 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
1631 offset
- pos
, exp_type
))
1640 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1641 if (!tr_size
|| !host_integerp (tr_size
, 1))
1643 el_size
= tree_low_cst (tr_size
, 1);
1645 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1646 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1648 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1649 if (!integer_zerop (minidx
))
1650 index
= int_const_binop (PLUS_EXPR
, index
, minidx
);
1651 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1652 NULL_TREE
, NULL_TREE
);
1653 offset
= offset
% el_size
;
1654 type
= TREE_TYPE (type
);
1669 /* Return true iff TYPE is stdarg va_list type. */
1672 is_va_list_type (tree type
)
1674 return TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (va_list_type_node
);
1677 /* Print message to dump file why a variable was rejected. */
1680 reject (tree var
, const char *msg
)
1682 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1684 fprintf (dump_file
, "Rejected (%d): %s: ", DECL_UID (var
), msg
);
1685 print_generic_expr (dump_file
, var
, 0);
1686 fprintf (dump_file
, "\n");
1690 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1691 those with type which is suitable for scalarization. */
1694 find_var_candidates (void)
1697 referenced_var_iterator rvi
;
1701 FOR_EACH_REFERENCED_VAR (cfun
, var
, rvi
)
1703 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (var
) != PARM_DECL
)
1705 type
= TREE_TYPE (var
);
1707 if (!AGGREGATE_TYPE_P (type
))
1709 reject (var
, "not aggregate");
1712 if (needs_to_live_in_memory (var
))
1714 reject (var
, "needs to live in memory");
1717 if (TREE_THIS_VOLATILE (var
))
1719 reject (var
, "is volatile");
1722 if (!COMPLETE_TYPE_P (type
))
1724 reject (var
, "has incomplete type");
1727 if (!host_integerp (TYPE_SIZE (type
), 1))
1729 reject (var
, "type size not fixed");
1732 if (tree_low_cst (TYPE_SIZE (type
), 1) == 0)
1734 reject (var
, "type size is zero");
1737 if (type_internals_preclude_sra_p (type
, &msg
))
1742 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1743 we also want to schedule it rather late. Thus we ignore it in
1745 (sra_mode
== SRA_MODE_EARLY_INTRA
1746 && is_va_list_type (type
)))
1748 reject (var
, "is va_list");
1752 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1754 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1756 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1757 print_generic_expr (dump_file
, var
, 0);
1758 fprintf (dump_file
, "\n");
1766 /* Sort all accesses for the given variable, check for partial overlaps and
1767 return NULL if there are any. If there are none, pick a representative for
1768 each combination of offset and size and create a linked list out of them.
1769 Return the pointer to the first representative and make sure it is the first
1770 one in the vector of accesses. */
1772 static struct access
*
1773 sort_and_splice_var_accesses (tree var
)
1775 int i
, j
, access_count
;
1776 struct access
*res
, **prev_acc_ptr
= &res
;
1777 VEC (access_p
, heap
) *access_vec
;
1779 HOST_WIDE_INT low
= -1, high
= 0;
1781 access_vec
= get_base_access_vector (var
);
1784 access_count
= VEC_length (access_p
, access_vec
);
1786 /* Sort by <OFFSET, SIZE>. */
1787 VEC_qsort (access_p
, access_vec
, compare_access_positions
);
1790 while (i
< access_count
)
1792 struct access
*access
= VEC_index (access_p
, access_vec
, i
);
1793 bool grp_write
= access
->write
;
1794 bool grp_read
= !access
->write
;
1795 bool grp_scalar_write
= access
->write
1796 && is_gimple_reg_type (access
->type
);
1797 bool grp_scalar_read
= !access
->write
1798 && is_gimple_reg_type (access
->type
);
1799 bool grp_assignment_read
= access
->grp_assignment_read
;
1800 bool grp_assignment_write
= access
->grp_assignment_write
;
1801 bool multiple_scalar_reads
= false;
1802 bool total_scalarization
= access
->grp_total_scalarization
;
1803 bool grp_partial_lhs
= access
->grp_partial_lhs
;
1804 bool first_scalar
= is_gimple_reg_type (access
->type
);
1805 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
1807 if (first
|| access
->offset
>= high
)
1810 low
= access
->offset
;
1811 high
= access
->offset
+ access
->size
;
1813 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
1816 gcc_assert (access
->offset
>= low
1817 && access
->offset
+ access
->size
<= high
);
1820 while (j
< access_count
)
1822 struct access
*ac2
= VEC_index (access_p
, access_vec
, j
);
1823 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
1828 grp_scalar_write
= (grp_scalar_write
1829 || is_gimple_reg_type (ac2
->type
));
1834 if (is_gimple_reg_type (ac2
->type
))
1836 if (grp_scalar_read
)
1837 multiple_scalar_reads
= true;
1839 grp_scalar_read
= true;
1842 grp_assignment_read
|= ac2
->grp_assignment_read
;
1843 grp_assignment_write
|= ac2
->grp_assignment_write
;
1844 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
1845 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
1846 total_scalarization
|= ac2
->grp_total_scalarization
;
1847 relink_to_new_repr (access
, ac2
);
1849 /* If there are both aggregate-type and scalar-type accesses with
1850 this combination of size and offset, the comparison function
1851 should have put the scalars first. */
1852 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
1853 ac2
->group_representative
= access
;
1859 access
->group_representative
= access
;
1860 access
->grp_write
= grp_write
;
1861 access
->grp_read
= grp_read
;
1862 access
->grp_scalar_read
= grp_scalar_read
;
1863 access
->grp_scalar_write
= grp_scalar_write
;
1864 access
->grp_assignment_read
= grp_assignment_read
;
1865 access
->grp_assignment_write
= grp_assignment_write
;
1866 access
->grp_hint
= multiple_scalar_reads
|| total_scalarization
;
1867 access
->grp_total_scalarization
= total_scalarization
;
1868 access
->grp_partial_lhs
= grp_partial_lhs
;
1869 access
->grp_unscalarizable_region
= unscalarizable_region
;
1870 if (access
->first_link
)
1871 add_access_to_work_queue (access
);
1873 *prev_acc_ptr
= access
;
1874 prev_acc_ptr
= &access
->next_grp
;
1877 gcc_assert (res
== VEC_index (access_p
, access_vec
, 0));
1881 /* Create a variable for the given ACCESS which determines the type, name and a
1882 few other properties. Return the variable declaration and store it also to
1883 ACCESS->replacement. */
1886 create_access_replacement (struct access
*access
, bool rename
)
1890 repl
= create_tmp_var (access
->type
, "SR");
1891 add_referenced_var (repl
);
1893 mark_sym_for_renaming (repl
);
1895 if (!access
->grp_partial_lhs
1896 && (TREE_CODE (access
->type
) == COMPLEX_TYPE
1897 || TREE_CODE (access
->type
) == VECTOR_TYPE
))
1898 DECL_GIMPLE_REG_P (repl
) = 1;
1900 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
1901 DECL_ARTIFICIAL (repl
) = 1;
1902 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
1904 if (DECL_NAME (access
->base
)
1905 && !DECL_IGNORED_P (access
->base
)
1906 && !DECL_ARTIFICIAL (access
->base
))
1908 char *pretty_name
= make_fancy_name (access
->expr
);
1909 tree debug_expr
= unshare_expr (access
->expr
), d
;
1911 DECL_NAME (repl
) = get_identifier (pretty_name
);
1912 obstack_free (&name_obstack
, pretty_name
);
1914 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1915 as DECL_DEBUG_EXPR isn't considered when looking for still
1916 used SSA_NAMEs and thus they could be freed. All debug info
1917 generation cares is whether something is constant or variable
1918 and that get_ref_base_and_extent works properly on the
1920 for (d
= debug_expr
; handled_component_p (d
); d
= TREE_OPERAND (d
, 0))
1921 switch (TREE_CODE (d
))
1924 case ARRAY_RANGE_REF
:
1925 if (TREE_OPERAND (d
, 1)
1926 && TREE_CODE (TREE_OPERAND (d
, 1)) == SSA_NAME
)
1927 TREE_OPERAND (d
, 1) = SSA_NAME_VAR (TREE_OPERAND (d
, 1));
1928 if (TREE_OPERAND (d
, 3)
1929 && TREE_CODE (TREE_OPERAND (d
, 3)) == SSA_NAME
)
1930 TREE_OPERAND (d
, 3) = SSA_NAME_VAR (TREE_OPERAND (d
, 3));
1933 if (TREE_OPERAND (d
, 2)
1934 && TREE_CODE (TREE_OPERAND (d
, 2)) == SSA_NAME
)
1935 TREE_OPERAND (d
, 2) = SSA_NAME_VAR (TREE_OPERAND (d
, 2));
1940 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
1941 DECL_DEBUG_EXPR_IS_FROM (repl
) = 1;
1942 if (access
->grp_no_warning
)
1943 TREE_NO_WARNING (repl
) = 1;
1945 TREE_NO_WARNING (repl
) = TREE_NO_WARNING (access
->base
);
1948 TREE_NO_WARNING (repl
) = 1;
1952 fprintf (dump_file
, "Created a replacement for ");
1953 print_generic_expr (dump_file
, access
->base
, 0);
1954 fprintf (dump_file
, " offset: %u, size: %u: ",
1955 (unsigned) access
->offset
, (unsigned) access
->size
);
1956 print_generic_expr (dump_file
, repl
, 0);
1957 fprintf (dump_file
, "\n");
1959 sra_stats
.replacements
++;
1964 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1967 get_access_replacement (struct access
*access
)
1969 gcc_assert (access
->grp_to_be_replaced
);
1971 if (!access
->replacement_decl
)
1972 access
->replacement_decl
= create_access_replacement (access
, true);
1973 return access
->replacement_decl
;
1976 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1977 not mark it for renaming. */
1980 get_unrenamed_access_replacement (struct access
*access
)
1982 gcc_assert (!access
->grp_to_be_replaced
);
1984 if (!access
->replacement_decl
)
1985 access
->replacement_decl
= create_access_replacement (access
, false);
1986 return access
->replacement_decl
;
1990 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1991 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1992 to it is not "within" the root. Return false iff some accesses partially
1996 build_access_subtree (struct access
**access
)
1998 struct access
*root
= *access
, *last_child
= NULL
;
1999 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2001 *access
= (*access
)->next_grp
;
2002 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
2005 root
->first_child
= *access
;
2007 last_child
->next_sibling
= *access
;
2008 last_child
= *access
;
2010 if (!build_access_subtree (access
))
2014 if (*access
&& (*access
)->offset
< limit
)
2020 /* Build a tree of access representatives, ACCESS is the pointer to the first
2021 one, others are linked in a list by the next_grp field. Return false iff
2022 some accesses partially overlap. */
2025 build_access_trees (struct access
*access
)
2029 struct access
*root
= access
;
2031 if (!build_access_subtree (&access
))
2033 root
->next_grp
= access
;
2038 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2042 expr_with_var_bounded_array_refs_p (tree expr
)
2044 while (handled_component_p (expr
))
2046 if (TREE_CODE (expr
) == ARRAY_REF
2047 && !host_integerp (array_ref_low_bound (expr
), 0))
2049 expr
= TREE_OPERAND (expr
, 0);
2054 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2055 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2056 sorts of access flags appropriately along the way, notably always set
2057 grp_read and grp_assign_read according to MARK_READ and grp_write when
2060 Creating a replacement for a scalar access is considered beneficial if its
2061 grp_hint is set (this means we are either attempting total scalarization or
2062 there is more than one direct read access) or according to the following
2065 Access written to through a scalar type (once or more times)
2067 | Written to in an assignment statement
2069 | | Access read as scalar _once_
2071 | | | Read in an assignment statement
2073 | | | | Scalarize Comment
2074 -----------------------------------------------------------------------------
2075 0 0 0 0 No access for the scalar
2076 0 0 0 1 No access for the scalar
2077 0 0 1 0 No Single read - won't help
2078 0 0 1 1 No The same case
2079 0 1 0 0 No access for the scalar
2080 0 1 0 1 No access for the scalar
2081 0 1 1 0 Yes s = *g; return s.i;
2082 0 1 1 1 Yes The same case as above
2083 1 0 0 0 No Won't help
2084 1 0 0 1 Yes s.i = 1; *g = s;
2085 1 0 1 0 Yes s.i = 5; g = s.i;
2086 1 0 1 1 Yes The same case as above
2087 1 1 0 0 No Won't help.
2088 1 1 0 1 Yes s.i = 1; *g = s;
2089 1 1 1 0 Yes s = *g; return s.i;
2090 1 1 1 1 Yes Any of the above yeses */
2093 analyze_access_subtree (struct access
*root
, struct access
*parent
,
2094 bool allow_replacements
)
2096 struct access
*child
;
2097 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2098 HOST_WIDE_INT covered_to
= root
->offset
;
2099 bool scalar
= is_gimple_reg_type (root
->type
);
2100 bool hole
= false, sth_created
= false;
2104 if (parent
->grp_read
)
2106 if (parent
->grp_assignment_read
)
2107 root
->grp_assignment_read
= 1;
2108 if (parent
->grp_write
)
2109 root
->grp_write
= 1;
2110 if (parent
->grp_assignment_write
)
2111 root
->grp_assignment_write
= 1;
2112 if (parent
->grp_total_scalarization
)
2113 root
->grp_total_scalarization
= 1;
2116 if (root
->grp_unscalarizable_region
)
2117 allow_replacements
= false;
2119 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
2120 allow_replacements
= false;
2122 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
2124 hole
|= covered_to
< child
->offset
;
2125 sth_created
|= analyze_access_subtree (child
, root
,
2126 allow_replacements
&& !scalar
);
2128 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
2129 root
->grp_total_scalarization
&= child
->grp_total_scalarization
;
2130 if (child
->grp_covered
)
2131 covered_to
+= child
->size
;
2136 if (allow_replacements
&& scalar
&& !root
->first_child
2138 || ((root
->grp_scalar_read
|| root
->grp_assignment_read
)
2139 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))))
2141 bool new_integer_type
;
2142 if (TREE_CODE (root
->type
) == ENUMERAL_TYPE
)
2144 tree rt
= root
->type
;
2145 root
->type
= build_nonstandard_integer_type (TYPE_PRECISION (rt
),
2146 TYPE_UNSIGNED (rt
));
2147 new_integer_type
= true;
2150 new_integer_type
= false;
2152 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2154 fprintf (dump_file
, "Marking ");
2155 print_generic_expr (dump_file
, root
->base
, 0);
2156 fprintf (dump_file
, " offset: %u, size: %u ",
2157 (unsigned) root
->offset
, (unsigned) root
->size
);
2158 fprintf (dump_file
, " to be replaced%s.\n",
2159 new_integer_type
? " with an integer": "");
2162 root
->grp_to_be_replaced
= 1;
2168 if (covered_to
< limit
)
2171 root
->grp_total_scalarization
= 0;
2175 && (!hole
|| root
->grp_total_scalarization
))
2177 root
->grp_covered
= 1;
2180 if (root
->grp_write
|| TREE_CODE (root
->base
) == PARM_DECL
)
2181 root
->grp_unscalarized_data
= 1; /* not covered and written to */
2187 /* Analyze all access trees linked by next_grp by the means of
2188 analyze_access_subtree. */
2190 analyze_access_trees (struct access
*access
)
2196 if (analyze_access_subtree (access
, NULL
, true))
2198 access
= access
->next_grp
;
2204 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2205 SIZE would conflict with an already existing one. If exactly such a child
2206 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2209 child_would_conflict_in_lacc (struct access
*lacc
, HOST_WIDE_INT norm_offset
,
2210 HOST_WIDE_INT size
, struct access
**exact_match
)
2212 struct access
*child
;
2214 for (child
= lacc
->first_child
; child
; child
= child
->next_sibling
)
2216 if (child
->offset
== norm_offset
&& child
->size
== size
)
2218 *exact_match
= child
;
2222 if (child
->offset
< norm_offset
+ size
2223 && child
->offset
+ child
->size
> norm_offset
)
2230 /* Create a new child access of PARENT, with all properties just like MODEL
2231 except for its offset and with its grp_write false and grp_read true.
2232 Return the new access or NULL if it cannot be created. Note that this access
2233 is created long after all splicing and sorting, it's not located in any
2234 access vector and is automatically a representative of its group. */
2236 static struct access
*
2237 create_artificial_child_access (struct access
*parent
, struct access
*model
,
2238 HOST_WIDE_INT new_offset
)
2240 struct access
*access
;
2241 struct access
**child
;
2242 tree expr
= parent
->base
;
2244 gcc_assert (!model
->grp_unscalarizable_region
);
2246 access
= (struct access
*) pool_alloc (access_pool
);
2247 memset (access
, 0, sizeof (struct access
));
2248 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
2251 access
->grp_no_warning
= true;
2252 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
2253 new_offset
, model
, NULL
, false);
2256 access
->base
= parent
->base
;
2257 access
->expr
= expr
;
2258 access
->offset
= new_offset
;
2259 access
->size
= model
->size
;
2260 access
->type
= model
->type
;
2261 access
->grp_write
= true;
2262 access
->grp_read
= false;
2264 child
= &parent
->first_child
;
2265 while (*child
&& (*child
)->offset
< new_offset
)
2266 child
= &(*child
)->next_sibling
;
2268 access
->next_sibling
= *child
;
2275 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2276 true if any new subaccess was created. Additionally, if RACC is a scalar
2277 access but LACC is not, change the type of the latter, if possible. */
2280 propagate_subaccesses_across_link (struct access
*lacc
, struct access
*racc
)
2282 struct access
*rchild
;
2283 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
2286 if (is_gimple_reg_type (lacc
->type
)
2287 || lacc
->grp_unscalarizable_region
2288 || racc
->grp_unscalarizable_region
)
2291 if (is_gimple_reg_type (racc
->type
))
2293 if (!lacc
->first_child
&& !racc
->first_child
)
2295 tree t
= lacc
->base
;
2297 lacc
->type
= racc
->type
;
2298 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
),
2299 lacc
->offset
, racc
->type
))
2303 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
2304 lacc
->base
, lacc
->offset
,
2306 lacc
->grp_no_warning
= true;
2312 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
2314 struct access
*new_acc
= NULL
;
2315 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
2317 if (rchild
->grp_unscalarizable_region
)
2320 if (child_would_conflict_in_lacc (lacc
, norm_offset
, rchild
->size
,
2325 rchild
->grp_hint
= 1;
2326 new_acc
->grp_hint
|= new_acc
->grp_read
;
2327 if (rchild
->first_child
)
2328 ret
|= propagate_subaccesses_across_link (new_acc
, rchild
);
2333 rchild
->grp_hint
= 1;
2334 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
);
2338 if (racc
->first_child
)
2339 propagate_subaccesses_across_link (new_acc
, rchild
);
2346 /* Propagate all subaccesses across assignment links. */
2349 propagate_all_subaccesses (void)
2351 while (work_queue_head
)
2353 struct access
*racc
= pop_access_from_work_queue ();
2354 struct assign_link
*link
;
2356 gcc_assert (racc
->first_link
);
2358 for (link
= racc
->first_link
; link
; link
= link
->next
)
2360 struct access
*lacc
= link
->lacc
;
2362 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2364 lacc
= lacc
->group_representative
;
2365 if (propagate_subaccesses_across_link (lacc
, racc
)
2366 && lacc
->first_link
)
2367 add_access_to_work_queue (lacc
);
2372 /* Go through all accesses collected throughout the (intraprocedural) analysis
2373 stage, exclude overlapping ones, identify representatives and build trees
2374 out of them, making decisions about scalarization on the way. Return true
2375 iff there are any to-be-scalarized variables after this stage. */
2378 analyze_all_variable_accesses (void)
2381 bitmap tmp
= BITMAP_ALLOC (NULL
);
2383 unsigned i
, max_total_scalarization_size
;
2385 max_total_scalarization_size
= UNITS_PER_WORD
* BITS_PER_UNIT
2386 * MOVE_RATIO (optimize_function_for_speed_p (cfun
));
2388 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2389 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
2390 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
2392 tree var
= referenced_var (i
);
2394 if (TREE_CODE (var
) == VAR_DECL
2395 && type_consists_of_records_p (TREE_TYPE (var
)))
2397 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1)
2398 <= max_total_scalarization_size
)
2400 completely_scalarize_var (var
);
2401 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2403 fprintf (dump_file
, "Will attempt to totally scalarize ");
2404 print_generic_expr (dump_file
, var
, 0);
2405 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2408 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2410 fprintf (dump_file
, "Too big to totally scalarize: ");
2411 print_generic_expr (dump_file
, var
, 0);
2412 fprintf (dump_file
, " (UID: %u)\n", DECL_UID (var
));
2417 bitmap_copy (tmp
, candidate_bitmap
);
2418 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2420 tree var
= referenced_var (i
);
2421 struct access
*access
;
2423 access
= sort_and_splice_var_accesses (var
);
2424 if (!access
|| !build_access_trees (access
))
2425 disqualify_candidate (var
,
2426 "No or inhibitingly overlapping accesses.");
2429 propagate_all_subaccesses ();
2431 bitmap_copy (tmp
, candidate_bitmap
);
2432 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2434 tree var
= referenced_var (i
);
2435 struct access
*access
= get_first_repr_for_decl (var
);
2437 if (analyze_access_trees (access
))
2440 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2442 fprintf (dump_file
, "\nAccess trees for ");
2443 print_generic_expr (dump_file
, var
, 0);
2444 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2445 dump_access_tree (dump_file
, access
);
2446 fprintf (dump_file
, "\n");
2450 disqualify_candidate (var
, "No scalar replacements to be created.");
2457 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
2464 /* Generate statements copying scalar replacements of accesses within a subtree
2465 into or out of AGG. ACCESS, all its children, siblings and their children
2466 are to be processed. AGG is an aggregate type expression (can be a
2467 declaration but does not have to be, it can for example also be a mem_ref or
2468 a series of handled components). TOP_OFFSET is the offset of the processed
2469 subtree which has to be subtracted from offsets of individual accesses to
2470 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2471 replacements in the interval <start_offset, start_offset + chunk_size>,
2472 otherwise copy all. GSI is a statement iterator used to place the new
2473 statements. WRITE should be true when the statements should write from AGG
2474 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2475 statements will be added after the current statement in GSI, they will be
2476 added before the statement otherwise. */
2479 generate_subtree_copies (struct access
*access
, tree agg
,
2480 HOST_WIDE_INT top_offset
,
2481 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
2482 gimple_stmt_iterator
*gsi
, bool write
,
2483 bool insert_after
, location_t loc
)
2487 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
2490 if (access
->grp_to_be_replaced
2492 || access
->offset
+ access
->size
> start_offset
))
2494 tree expr
, repl
= get_access_replacement (access
);
2497 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
2498 access
, gsi
, insert_after
);
2502 if (access
->grp_partial_lhs
)
2503 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
2505 insert_after
? GSI_NEW_STMT
2507 stmt
= gimple_build_assign (repl
, expr
);
2511 TREE_NO_WARNING (repl
) = 1;
2512 if (access
->grp_partial_lhs
)
2513 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2515 insert_after
? GSI_NEW_STMT
2517 stmt
= gimple_build_assign (expr
, repl
);
2519 gimple_set_location (stmt
, loc
);
2522 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2524 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2526 sra_stats
.subtree_copies
++;
2529 if (access
->first_child
)
2530 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
2531 start_offset
, chunk_size
, gsi
,
2532 write
, insert_after
, loc
);
2534 access
= access
->next_sibling
;
2539 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2540 the root of the subtree to be processed. GSI is the statement iterator used
2541 for inserting statements which are added after the current statement if
2542 INSERT_AFTER is true or before it otherwise. */
2545 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
2546 bool insert_after
, location_t loc
)
2549 struct access
*child
;
2551 if (access
->grp_to_be_replaced
)
2555 stmt
= gimple_build_assign (get_access_replacement (access
),
2556 build_zero_cst (access
->type
));
2558 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2560 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2562 gimple_set_location (stmt
, loc
);
2565 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2566 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
2569 /* Search for an access representative for the given expression EXPR and
2570 return it or NULL if it cannot be found. */
2572 static struct access
*
2573 get_access_for_expr (tree expr
)
2575 HOST_WIDE_INT offset
, size
, max_size
;
2578 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2579 a different size than the size of its argument and we need the latter
2581 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2582 expr
= TREE_OPERAND (expr
, 0);
2584 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
2585 if (max_size
== -1 || !DECL_P (base
))
2588 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
2591 return get_var_base_offset_size_access (base
, offset
, max_size
);
2594 /* Replace the expression EXPR with a scalar replacement if there is one and
2595 generate other statements to do type conversion or subtree copying if
2596 necessary. GSI is used to place newly created statements, WRITE is true if
2597 the expression is being written to (it is on a LHS of a statement or output
2598 in an assembly statement). */
2601 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
2604 struct access
*access
;
2607 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
2610 expr
= &TREE_OPERAND (*expr
, 0);
2615 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
2616 expr
= &TREE_OPERAND (*expr
, 0);
2617 access
= get_access_for_expr (*expr
);
2620 type
= TREE_TYPE (*expr
);
2622 loc
= gimple_location (gsi_stmt (*gsi
));
2623 if (access
->grp_to_be_replaced
)
2625 tree repl
= get_access_replacement (access
);
2626 /* If we replace a non-register typed access simply use the original
2627 access expression to extract the scalar component afterwards.
2628 This happens if scalarizing a function return value or parameter
2629 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2630 gcc.c-torture/compile/20011217-1.c.
2632 We also want to use this when accessing a complex or vector which can
2633 be accessed as a different type too, potentially creating a need for
2634 type conversion (see PR42196) and when scalarized unions are involved
2635 in assembler statements (see PR42398). */
2636 if (!useless_type_conversion_p (type
, access
->type
))
2640 ref
= build_ref_for_model (loc
, access
->base
, access
->offset
, access
,
2647 if (access
->grp_partial_lhs
)
2648 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
2649 false, GSI_NEW_STMT
);
2650 stmt
= gimple_build_assign (repl
, ref
);
2651 gimple_set_location (stmt
, loc
);
2652 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2658 if (access
->grp_partial_lhs
)
2659 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2660 true, GSI_SAME_STMT
);
2661 stmt
= gimple_build_assign (ref
, repl
);
2662 gimple_set_location (stmt
, loc
);
2663 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2671 if (access
->first_child
)
2673 HOST_WIDE_INT start_offset
, chunk_size
;
2675 && host_integerp (TREE_OPERAND (bfr
, 1), 1)
2676 && host_integerp (TREE_OPERAND (bfr
, 2), 1))
2678 chunk_size
= tree_low_cst (TREE_OPERAND (bfr
, 1), 1);
2679 start_offset
= access
->offset
2680 + tree_low_cst (TREE_OPERAND (bfr
, 2), 1);
2683 start_offset
= chunk_size
= 0;
2685 generate_subtree_copies (access
->first_child
, access
->base
, 0,
2686 start_offset
, chunk_size
, gsi
, write
, write
,
2692 /* Where scalar replacements of the RHS have been written to when a replacement
2693 of a LHS of an assigments cannot be direclty loaded from a replacement of
2695 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
2696 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
2697 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
2699 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2700 base aggregate if there are unscalarized data or directly to LHS of the
2701 statement that is pointed to by GSI otherwise. */
2703 static enum unscalarized_data_handling
2704 handle_unscalarized_data_in_subtree (struct access
*top_racc
,
2705 gimple_stmt_iterator
*gsi
)
2707 if (top_racc
->grp_unscalarized_data
)
2709 generate_subtree_copies (top_racc
->first_child
, top_racc
->base
, 0, 0, 0,
2711 gimple_location (gsi_stmt (*gsi
)));
2712 return SRA_UDH_RIGHT
;
2716 tree lhs
= gimple_assign_lhs (gsi_stmt (*gsi
));
2717 generate_subtree_copies (top_racc
->first_child
, lhs
, top_racc
->offset
,
2718 0, 0, gsi
, false, false,
2719 gimple_location (gsi_stmt (*gsi
)));
2720 return SRA_UDH_LEFT
;
2725 /* Try to generate statements to load all sub-replacements in an access subtree
2726 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2727 If that is not possible, refresh the TOP_RACC base aggregate and load the
2728 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2729 copied. NEW_GSI is stmt iterator used for statement insertions after the
2730 original assignment, OLD_GSI is used to insert statements before the
2731 assignment. *REFRESHED keeps the information whether we have needed to
2732 refresh replacements of the LHS and from which side of the assignments this
2736 load_assign_lhs_subreplacements (struct access
*lacc
, struct access
*top_racc
,
2737 HOST_WIDE_INT left_offset
,
2738 gimple_stmt_iterator
*old_gsi
,
2739 gimple_stmt_iterator
*new_gsi
,
2740 enum unscalarized_data_handling
*refreshed
)
2742 location_t loc
= gimple_location (gsi_stmt (*old_gsi
));
2743 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
2745 if (lacc
->grp_to_be_replaced
)
2747 struct access
*racc
;
2748 HOST_WIDE_INT offset
= lacc
->offset
- left_offset
+ top_racc
->offset
;
2752 racc
= find_access_in_subtree (top_racc
, offset
, lacc
->size
);
2753 if (racc
&& racc
->grp_to_be_replaced
)
2755 rhs
= get_access_replacement (racc
);
2756 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
2757 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, lacc
->type
, rhs
);
2759 if (racc
->grp_partial_lhs
&& lacc
->grp_partial_lhs
)
2760 rhs
= force_gimple_operand_gsi (old_gsi
, rhs
, true, NULL_TREE
,
2761 true, GSI_SAME_STMT
);
2765 /* No suitable access on the right hand side, need to load from
2766 the aggregate. See if we have to update it first... */
2767 if (*refreshed
== SRA_UDH_NONE
)
2768 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2771 if (*refreshed
== SRA_UDH_LEFT
)
2772 rhs
= build_ref_for_model (loc
, lacc
->base
, lacc
->offset
, lacc
,
2775 rhs
= build_ref_for_model (loc
, top_racc
->base
, offset
, lacc
,
2777 if (lacc
->grp_partial_lhs
)
2778 rhs
= force_gimple_operand_gsi (new_gsi
, rhs
, true, NULL_TREE
,
2779 false, GSI_NEW_STMT
);
2782 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
2783 gsi_insert_after (new_gsi
, stmt
, GSI_NEW_STMT
);
2784 gimple_set_location (stmt
, loc
);
2786 sra_stats
.subreplacements
++;
2788 else if (*refreshed
== SRA_UDH_NONE
2789 && lacc
->grp_read
&& !lacc
->grp_covered
)
2790 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2793 if (lacc
->first_child
)
2794 load_assign_lhs_subreplacements (lacc
, top_racc
, left_offset
,
2795 old_gsi
, new_gsi
, refreshed
);
2799 /* Result code for SRA assignment modification. */
2800 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
2801 SRA_AM_MODIFIED
, /* stmt changed but not
2803 SRA_AM_REMOVED
}; /* stmt eliminated */
2805 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2806 to the assignment and GSI is the statement iterator pointing at it. Returns
2807 the same values as sra_modify_assign. */
2809 static enum assignment_mod_result
2810 sra_modify_constructor_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
2812 tree lhs
= gimple_assign_lhs (*stmt
);
2816 acc
= get_access_for_expr (lhs
);
2820 loc
= gimple_location (*stmt
);
2821 if (VEC_length (constructor_elt
,
2822 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt
))) > 0)
2824 /* I have never seen this code path trigger but if it can happen the
2825 following should handle it gracefully. */
2826 if (access_has_children_p (acc
))
2827 generate_subtree_copies (acc
->first_child
, acc
->base
, 0, 0, 0, gsi
,
2829 return SRA_AM_MODIFIED
;
2832 if (acc
->grp_covered
)
2834 init_subtree_with_zero (acc
, gsi
, false, loc
);
2835 unlink_stmt_vdef (*stmt
);
2836 gsi_remove (gsi
, true);
2837 return SRA_AM_REMOVED
;
2841 init_subtree_with_zero (acc
, gsi
, true, loc
);
2842 return SRA_AM_MODIFIED
;
2846 /* Create and return a new suitable default definition SSA_NAME for RACC which
2847 is an access describing an uninitialized part of an aggregate that is being
2851 get_repl_default_def_ssa_name (struct access
*racc
)
2855 decl
= get_unrenamed_access_replacement (racc
);
2857 repl
= gimple_default_def (cfun
, decl
);
2860 repl
= make_ssa_name (decl
, gimple_build_nop ());
2861 set_default_def (decl
, repl
);
2867 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2871 contains_bitfld_comp_ref_p (const_tree ref
)
2873 while (handled_component_p (ref
))
2875 if (TREE_CODE (ref
) == COMPONENT_REF
2876 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
2878 ref
= TREE_OPERAND (ref
, 0);
2884 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2885 bit-field field declaration somewhere in it. */
2888 contains_vce_or_bfcref_p (const_tree ref
)
2890 while (handled_component_p (ref
))
2892 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
2893 || (TREE_CODE (ref
) == COMPONENT_REF
2894 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1))))
2896 ref
= TREE_OPERAND (ref
, 0);
2902 /* Examine both sides of the assignment statement pointed to by STMT, replace
2903 them with a scalare replacement if there is one and generate copying of
2904 replacements if scalarized aggregates have been used in the assignment. GSI
2905 is used to hold generated statements for type conversions and subtree
2908 static enum assignment_mod_result
2909 sra_modify_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
2911 struct access
*lacc
, *racc
;
2913 bool modify_this_stmt
= false;
2914 bool force_gimple_rhs
= false;
2916 gimple_stmt_iterator orig_gsi
= *gsi
;
2918 if (!gimple_assign_single_p (*stmt
))
2920 lhs
= gimple_assign_lhs (*stmt
);
2921 rhs
= gimple_assign_rhs1 (*stmt
);
2923 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
2924 return sra_modify_constructor_assign (stmt
, gsi
);
2926 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
2927 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
2928 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
2930 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (*stmt
),
2932 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (*stmt
),
2934 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
2937 lacc
= get_access_for_expr (lhs
);
2938 racc
= get_access_for_expr (rhs
);
2942 loc
= gimple_location (*stmt
);
2943 if (lacc
&& lacc
->grp_to_be_replaced
)
2945 lhs
= get_access_replacement (lacc
);
2946 gimple_assign_set_lhs (*stmt
, lhs
);
2947 modify_this_stmt
= true;
2948 if (lacc
->grp_partial_lhs
)
2949 force_gimple_rhs
= true;
2953 if (racc
&& racc
->grp_to_be_replaced
)
2955 rhs
= get_access_replacement (racc
);
2956 modify_this_stmt
= true;
2957 if (racc
->grp_partial_lhs
)
2958 force_gimple_rhs
= true;
2962 if (modify_this_stmt
)
2964 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2966 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2967 ??? This should move to fold_stmt which we simply should
2968 call after building a VIEW_CONVERT_EXPR here. */
2969 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
2970 && !contains_bitfld_comp_ref_p (lhs
)
2971 && !access_has_children_p (lacc
))
2973 lhs
= build_ref_for_model (loc
, lhs
, 0, racc
, gsi
, false);
2974 gimple_assign_set_lhs (*stmt
, lhs
);
2976 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
2977 && !contains_vce_or_bfcref_p (rhs
)
2978 && !access_has_children_p (racc
))
2979 rhs
= build_ref_for_model (loc
, rhs
, 0, lacc
, gsi
, false);
2981 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2983 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
2985 if (is_gimple_reg_type (TREE_TYPE (lhs
))
2986 && TREE_CODE (lhs
) != SSA_NAME
)
2987 force_gimple_rhs
= true;
2992 /* From this point on, the function deals with assignments in between
2993 aggregates when at least one has scalar reductions of some of its
2994 components. There are three possible scenarios: Both the LHS and RHS have
2995 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2997 In the first case, we would like to load the LHS components from RHS
2998 components whenever possible. If that is not possible, we would like to
2999 read it directly from the RHS (after updating it by storing in it its own
3000 components). If there are some necessary unscalarized data in the LHS,
3001 those will be loaded by the original assignment too. If neither of these
3002 cases happen, the original statement can be removed. Most of this is done
3003 by load_assign_lhs_subreplacements.
3005 In the second case, we would like to store all RHS scalarized components
3006 directly into LHS and if they cover the aggregate completely, remove the
3007 statement too. In the third case, we want the LHS components to be loaded
3008 directly from the RHS (DSE will remove the original statement if it
3011 This is a bit complex but manageable when types match and when unions do
3012 not cause confusion in a way that we cannot really load a component of LHS
3013 from the RHS or vice versa (the access representing this level can have
3014 subaccesses that are accessible only through a different union field at a
3015 higher level - different from the one used in the examined expression).
3018 Therefore, I specially handle a fourth case, happening when there is a
3019 specific type cast or it is impossible to locate a scalarized subaccess on
3020 the other side of the expression. If that happens, I simply "refresh" the
3021 RHS by storing in it is scalarized components leave the original statement
3022 there to do the copying and then load the scalar replacements of the LHS.
3023 This is what the first branch does. */
3025 if (modify_this_stmt
3026 || gimple_has_volatile_ops (*stmt
)
3027 || contains_vce_or_bfcref_p (rhs
)
3028 || contains_vce_or_bfcref_p (lhs
))
3030 if (access_has_children_p (racc
))
3031 generate_subtree_copies (racc
->first_child
, racc
->base
, 0, 0, 0,
3032 gsi
, false, false, loc
);
3033 if (access_has_children_p (lacc
))
3034 generate_subtree_copies (lacc
->first_child
, lacc
->base
, 0, 0, 0,
3035 gsi
, true, true, loc
);
3036 sra_stats
.separate_lhs_rhs_handling
++;
3040 if (access_has_children_p (lacc
) && access_has_children_p (racc
))
3042 gimple_stmt_iterator orig_gsi
= *gsi
;
3043 enum unscalarized_data_handling refreshed
;
3045 if (lacc
->grp_read
&& !lacc
->grp_covered
)
3046 refreshed
= handle_unscalarized_data_in_subtree (racc
, gsi
);
3048 refreshed
= SRA_UDH_NONE
;
3050 load_assign_lhs_subreplacements (lacc
, racc
, lacc
->offset
,
3051 &orig_gsi
, gsi
, &refreshed
);
3052 if (refreshed
!= SRA_UDH_RIGHT
)
3055 unlink_stmt_vdef (*stmt
);
3056 gsi_remove (&orig_gsi
, true);
3057 sra_stats
.deleted
++;
3058 return SRA_AM_REMOVED
;
3065 if (!racc
->grp_to_be_replaced
&& !racc
->grp_unscalarized_data
)
3069 fprintf (dump_file
, "Removing load: ");
3070 print_gimple_stmt (dump_file
, *stmt
, 0, 0);
3073 if (TREE_CODE (lhs
) == SSA_NAME
)
3075 rhs
= get_repl_default_def_ssa_name (racc
);
3076 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3078 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
3079 TREE_TYPE (lhs
), rhs
);
3083 if (racc
->first_child
)
3084 generate_subtree_copies (racc
->first_child
, lhs
,
3085 racc
->offset
, 0, 0, gsi
,
3088 gcc_assert (*stmt
== gsi_stmt (*gsi
));
3089 unlink_stmt_vdef (*stmt
);
3090 gsi_remove (gsi
, true);
3091 sra_stats
.deleted
++;
3092 return SRA_AM_REMOVED
;
3095 else if (racc
->first_child
)
3096 generate_subtree_copies (racc
->first_child
, lhs
, racc
->offset
,
3097 0, 0, gsi
, false, true, loc
);
3099 if (access_has_children_p (lacc
))
3100 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
3101 0, 0, gsi
, true, true, loc
);
3105 /* This gimplification must be done after generate_subtree_copies, lest we
3106 insert the subtree copies in the middle of the gimplified sequence. */
3107 if (force_gimple_rhs
)
3108 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
3109 true, GSI_SAME_STMT
);
3110 if (gimple_assign_rhs1 (*stmt
) != rhs
)
3112 modify_this_stmt
= true;
3113 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
3114 gcc_assert (*stmt
== gsi_stmt (orig_gsi
));
3117 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3120 /* Traverse the function body and all modifications as decided in
3121 analyze_all_variable_accesses. Return true iff the CFG has been
3125 sra_modify_function_body (void)
3127 bool cfg_changed
= false;
3132 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
3133 while (!gsi_end_p (gsi
))
3135 gimple stmt
= gsi_stmt (gsi
);
3136 enum assignment_mod_result assign_result
;
3137 bool modified
= false, deleted
= false;
3141 switch (gimple_code (stmt
))
3144 t
= gimple_return_retval_ptr (stmt
);
3145 if (*t
!= NULL_TREE
)
3146 modified
|= sra_modify_expr (t
, &gsi
, false);
3150 assign_result
= sra_modify_assign (&stmt
, &gsi
);
3151 modified
|= assign_result
== SRA_AM_MODIFIED
;
3152 deleted
= assign_result
== SRA_AM_REMOVED
;
3156 /* Operands must be processed before the lhs. */
3157 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3159 t
= gimple_call_arg_ptr (stmt
, i
);
3160 modified
|= sra_modify_expr (t
, &gsi
, false);
3163 if (gimple_call_lhs (stmt
))
3165 t
= gimple_call_lhs_ptr (stmt
);
3166 modified
|= sra_modify_expr (t
, &gsi
, true);
3171 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
3173 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
3174 modified
|= sra_modify_expr (t
, &gsi
, false);
3176 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
3178 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
3179 modified
|= sra_modify_expr (t
, &gsi
, true);
3190 if (maybe_clean_eh_stmt (stmt
)
3191 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
3202 /* Generate statements initializing scalar replacements of parts of function
3206 initialize_parameter_reductions (void)
3208 gimple_stmt_iterator gsi
;
3209 gimple_seq seq
= NULL
;
3212 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3214 parm
= DECL_CHAIN (parm
))
3216 VEC (access_p
, heap
) *access_vec
;
3217 struct access
*access
;
3219 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3221 access_vec
= get_base_access_vector (parm
);
3227 seq
= gimple_seq_alloc ();
3228 gsi
= gsi_start (seq
);
3231 for (access
= VEC_index (access_p
, access_vec
, 0);
3233 access
= access
->next_grp
)
3234 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
3235 EXPR_LOCATION (parm
));
3239 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR
), seq
);
3242 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3243 it reveals there are components of some aggregates to be scalarized, it runs
3244 the required transformations. */
3246 perform_intra_sra (void)
3251 if (!find_var_candidates ())
3254 if (!scan_function ())
3257 if (!analyze_all_variable_accesses ())
3260 if (sra_modify_function_body ())
3261 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
3263 ret
= TODO_update_ssa
;
3264 initialize_parameter_reductions ();
3266 statistics_counter_event (cfun
, "Scalar replacements created",
3267 sra_stats
.replacements
);
3268 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
3269 statistics_counter_event (cfun
, "Subtree copy stmts",
3270 sra_stats
.subtree_copies
);
3271 statistics_counter_event (cfun
, "Subreplacement stmts",
3272 sra_stats
.subreplacements
);
3273 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
3274 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
3275 sra_stats
.separate_lhs_rhs_handling
);
3278 sra_deinitialize ();
3282 /* Perform early intraprocedural SRA. */
3284 early_intra_sra (void)
3286 sra_mode
= SRA_MODE_EARLY_INTRA
;
3287 return perform_intra_sra ();
3290 /* Perform "late" intraprocedural SRA. */
3292 late_intra_sra (void)
3294 sra_mode
= SRA_MODE_INTRA
;
3295 return perform_intra_sra ();
3300 gate_intra_sra (void)
3302 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
3306 struct gimple_opt_pass pass_sra_early
=
3311 gate_intra_sra
, /* gate */
3312 early_intra_sra
, /* execute */
3315 0, /* static_pass_number */
3316 TV_TREE_SRA
, /* tv_id */
3317 PROP_cfg
| PROP_ssa
, /* properties_required */
3318 0, /* properties_provided */
3319 0, /* properties_destroyed */
3320 0, /* todo_flags_start */
3323 | TODO_verify_ssa
/* todo_flags_finish */
3327 struct gimple_opt_pass pass_sra
=
3332 gate_intra_sra
, /* gate */
3333 late_intra_sra
, /* execute */
3336 0, /* static_pass_number */
3337 TV_TREE_SRA
, /* tv_id */
3338 PROP_cfg
| PROP_ssa
, /* properties_required */
3339 0, /* properties_provided */
3340 0, /* properties_destroyed */
3341 TODO_update_address_taken
, /* todo_flags_start */
3344 | TODO_verify_ssa
/* todo_flags_finish */
3349 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3353 is_unused_scalar_param (tree parm
)
3356 return (is_gimple_reg (parm
)
3357 && (!(name
= gimple_default_def (cfun
, parm
))
3358 || has_zero_uses (name
)));
3361 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3362 examine whether there are any direct or otherwise infeasible ones. If so,
3363 return true, otherwise return false. PARM must be a gimple register with a
3364 non-NULL default definition. */
3367 ptr_parm_has_direct_uses (tree parm
)
3369 imm_use_iterator ui
;
3371 tree name
= gimple_default_def (cfun
, parm
);
3374 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
3377 use_operand_p use_p
;
3379 if (is_gimple_debug (stmt
))
3382 /* Valid uses include dereferences on the lhs and the rhs. */
3383 if (gimple_has_lhs (stmt
))
3385 tree lhs
= gimple_get_lhs (stmt
);
3386 while (handled_component_p (lhs
))
3387 lhs
= TREE_OPERAND (lhs
, 0);
3388 if (TREE_CODE (lhs
) == MEM_REF
3389 && TREE_OPERAND (lhs
, 0) == name
3390 && integer_zerop (TREE_OPERAND (lhs
, 1))
3391 && types_compatible_p (TREE_TYPE (lhs
),
3392 TREE_TYPE (TREE_TYPE (name
)))
3393 && !TREE_THIS_VOLATILE (lhs
))
3396 if (gimple_assign_single_p (stmt
))
3398 tree rhs
= gimple_assign_rhs1 (stmt
);
3399 while (handled_component_p (rhs
))
3400 rhs
= TREE_OPERAND (rhs
, 0);
3401 if (TREE_CODE (rhs
) == MEM_REF
3402 && TREE_OPERAND (rhs
, 0) == name
3403 && integer_zerop (TREE_OPERAND (rhs
, 1))
3404 && types_compatible_p (TREE_TYPE (rhs
),
3405 TREE_TYPE (TREE_TYPE (name
)))
3406 && !TREE_THIS_VOLATILE (rhs
))
3409 else if (is_gimple_call (stmt
))
3412 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3414 tree arg
= gimple_call_arg (stmt
, i
);
3415 while (handled_component_p (arg
))
3416 arg
= TREE_OPERAND (arg
, 0);
3417 if (TREE_CODE (arg
) == MEM_REF
3418 && TREE_OPERAND (arg
, 0) == name
3419 && integer_zerop (TREE_OPERAND (arg
, 1))
3420 && types_compatible_p (TREE_TYPE (arg
),
3421 TREE_TYPE (TREE_TYPE (name
)))
3422 && !TREE_THIS_VOLATILE (arg
))
3427 /* If the number of valid uses does not match the number of
3428 uses in this stmt there is an unhandled use. */
3429 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
3436 BREAK_FROM_IMM_USE_STMT (ui
);
3442 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3443 them in candidate_bitmap. Note that these do not necessarily include
3444 parameter which are unused and thus can be removed. Return true iff any
3445 such candidate has been found. */
3448 find_param_candidates (void)
3455 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3457 parm
= DECL_CHAIN (parm
))
3459 tree type
= TREE_TYPE (parm
);
3463 if (TREE_THIS_VOLATILE (parm
)
3464 || TREE_ADDRESSABLE (parm
)
3465 || (!is_gimple_reg_type (type
) && is_va_list_type (type
)))
3468 if (is_unused_scalar_param (parm
))
3474 if (POINTER_TYPE_P (type
))
3476 type
= TREE_TYPE (type
);
3478 if (TREE_CODE (type
) == FUNCTION_TYPE
3479 || TYPE_VOLATILE (type
)
3480 || (TREE_CODE (type
) == ARRAY_TYPE
3481 && TYPE_NONALIASED_COMPONENT (type
))
3482 || !is_gimple_reg (parm
)
3483 || is_va_list_type (type
)
3484 || ptr_parm_has_direct_uses (parm
))
3487 else if (!AGGREGATE_TYPE_P (type
))
3490 if (!COMPLETE_TYPE_P (type
)
3491 || !host_integerp (TYPE_SIZE (type
), 1)
3492 || tree_low_cst (TYPE_SIZE (type
), 1) == 0
3493 || (AGGREGATE_TYPE_P (type
)
3494 && type_internals_preclude_sra_p (type
, &msg
)))
3497 bitmap_set_bit (candidate_bitmap
, DECL_UID (parm
));
3499 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3501 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (parm
));
3502 print_generic_expr (dump_file
, parm
, 0);
3503 fprintf (dump_file
, "\n");
3507 func_param_count
= count
;
3511 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3515 mark_maybe_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
3518 struct access
*repr
= (struct access
*) data
;
3520 repr
->grp_maybe_modified
= 1;
3524 /* Analyze what representatives (in linked lists accessible from
3525 REPRESENTATIVES) can be modified by side effects of statements in the
3526 current function. */
3529 analyze_modified_params (VEC (access_p
, heap
) *representatives
)
3533 for (i
= 0; i
< func_param_count
; i
++)
3535 struct access
*repr
;
3537 for (repr
= VEC_index (access_p
, representatives
, i
);
3539 repr
= repr
->next_grp
)
3541 struct access
*access
;
3545 if (no_accesses_p (repr
))
3547 if (!POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3548 || repr
->grp_maybe_modified
)
3551 ao_ref_init (&ar
, repr
->expr
);
3552 visited
= BITMAP_ALLOC (NULL
);
3553 for (access
= repr
; access
; access
= access
->next_sibling
)
3555 /* All accesses are read ones, otherwise grp_maybe_modified would
3556 be trivially set. */
3557 walk_aliased_vdefs (&ar
, gimple_vuse (access
->stmt
),
3558 mark_maybe_modified
, repr
, &visited
);
3559 if (repr
->grp_maybe_modified
)
3562 BITMAP_FREE (visited
);
3567 /* Propagate distances in bb_dereferences in the opposite direction than the
3568 control flow edges, in each step storing the maximum of the current value
3569 and the minimum of all successors. These steps are repeated until the table
3570 stabilizes. Note that BBs which might terminate the functions (according to
3571 final_bbs bitmap) never updated in this way. */
3574 propagate_dereference_distances (void)
3576 VEC (basic_block
, heap
) *queue
;
3579 queue
= VEC_alloc (basic_block
, heap
, last_basic_block_for_function (cfun
));
3580 VEC_quick_push (basic_block
, queue
, ENTRY_BLOCK_PTR
);
3583 VEC_quick_push (basic_block
, queue
, bb
);
3587 while (!VEC_empty (basic_block
, queue
))
3591 bool change
= false;
3594 bb
= VEC_pop (basic_block
, queue
);
3597 if (bitmap_bit_p (final_bbs
, bb
->index
))
3600 for (i
= 0; i
< func_param_count
; i
++)
3602 int idx
= bb
->index
* func_param_count
+ i
;
3604 HOST_WIDE_INT inh
= 0;
3606 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3608 int succ_idx
= e
->dest
->index
* func_param_count
+ i
;
3610 if (e
->src
== EXIT_BLOCK_PTR
)
3616 inh
= bb_dereferences
[succ_idx
];
3618 else if (bb_dereferences
[succ_idx
] < inh
)
3619 inh
= bb_dereferences
[succ_idx
];
3622 if (!first
&& bb_dereferences
[idx
] < inh
)
3624 bb_dereferences
[idx
] = inh
;
3629 if (change
&& !bitmap_bit_p (final_bbs
, bb
->index
))
3630 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3635 e
->src
->aux
= e
->src
;
3636 VEC_quick_push (basic_block
, queue
, e
->src
);
3640 VEC_free (basic_block
, heap
, queue
);
3643 /* Dump a dereferences TABLE with heading STR to file F. */
3646 dump_dereferences_table (FILE *f
, const char *str
, HOST_WIDE_INT
*table
)
3650 fprintf (dump_file
, str
);
3651 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
3653 fprintf (f
, "%4i %i ", bb
->index
, bitmap_bit_p (final_bbs
, bb
->index
));
3654 if (bb
!= EXIT_BLOCK_PTR
)
3657 for (i
= 0; i
< func_param_count
; i
++)
3659 int idx
= bb
->index
* func_param_count
+ i
;
3660 fprintf (f
, " %4" HOST_WIDE_INT_PRINT
"d", table
[idx
]);
3665 fprintf (dump_file
, "\n");
3668 /* Determine what (parts of) parameters passed by reference that are not
3669 assigned to are not certainly dereferenced in this function and thus the
3670 dereferencing cannot be safely moved to the caller without potentially
3671 introducing a segfault. Mark such REPRESENTATIVES as
3672 grp_not_necessarilly_dereferenced.
3674 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3675 part is calculated rather than simple booleans are calculated for each
3676 pointer parameter to handle cases when only a fraction of the whole
3677 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3680 The maximum dereference distances for each pointer parameter and BB are
3681 already stored in bb_dereference. This routine simply propagates these
3682 values upwards by propagate_dereference_distances and then compares the
3683 distances of individual parameters in the ENTRY BB to the equivalent
3684 distances of each representative of a (fraction of a) parameter. */
3687 analyze_caller_dereference_legality (VEC (access_p
, heap
) *representatives
)
3691 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3692 dump_dereferences_table (dump_file
,
3693 "Dereference table before propagation:\n",
3696 propagate_dereference_distances ();
3698 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3699 dump_dereferences_table (dump_file
,
3700 "Dereference table after propagation:\n",
3703 for (i
= 0; i
< func_param_count
; i
++)
3705 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
3706 int idx
= ENTRY_BLOCK_PTR
->index
* func_param_count
+ i
;
3708 if (!repr
|| no_accesses_p (repr
))
3713 if ((repr
->offset
+ repr
->size
) > bb_dereferences
[idx
])
3714 repr
->grp_not_necessarilly_dereferenced
= 1;
3715 repr
= repr
->next_grp
;
3721 /* Return the representative access for the parameter declaration PARM if it is
3722 a scalar passed by reference which is not written to and the pointer value
3723 is not used directly. Thus, if it is legal to dereference it in the caller
3724 and we can rule out modifications through aliases, such parameter should be
3725 turned into one passed by value. Return NULL otherwise. */
3727 static struct access
*
3728 unmodified_by_ref_scalar_representative (tree parm
)
3730 int i
, access_count
;
3731 struct access
*repr
;
3732 VEC (access_p
, heap
) *access_vec
;
3734 access_vec
= get_base_access_vector (parm
);
3735 gcc_assert (access_vec
);
3736 repr
= VEC_index (access_p
, access_vec
, 0);
3739 repr
->group_representative
= repr
;
3741 access_count
= VEC_length (access_p
, access_vec
);
3742 for (i
= 1; i
< access_count
; i
++)
3744 struct access
*access
= VEC_index (access_p
, access_vec
, i
);
3747 access
->group_representative
= repr
;
3748 access
->next_sibling
= repr
->next_sibling
;
3749 repr
->next_sibling
= access
;
3753 repr
->grp_scalar_ptr
= 1;
3757 /* Return true iff this access precludes IPA-SRA of the parameter it is
3761 access_precludes_ipa_sra_p (struct access
*access
)
3763 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3764 is incompatible assign in a call statement (and possibly even in asm
3765 statements). This can be relaxed by using a new temporary but only for
3766 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3767 intraprocedural SRA we deal with this by keeping the old aggregate around,
3768 something we cannot do in IPA-SRA.) */
3770 && (is_gimple_call (access
->stmt
)
3771 || gimple_code (access
->stmt
) == GIMPLE_ASM
))
3774 if (STRICT_ALIGNMENT
3775 && tree_non_aligned_mem_p (access
->expr
, TYPE_ALIGN (access
->type
)))
3782 /* Sort collected accesses for parameter PARM, identify representatives for
3783 each accessed region and link them together. Return NULL if there are
3784 different but overlapping accesses, return the special ptr value meaning
3785 there are no accesses for this parameter if that is the case and return the
3786 first representative otherwise. Set *RO_GRP if there is a group of accesses
3787 with only read (i.e. no write) accesses. */
3789 static struct access
*
3790 splice_param_accesses (tree parm
, bool *ro_grp
)
3792 int i
, j
, access_count
, group_count
;
3793 int agg_size
, total_size
= 0;
3794 struct access
*access
, *res
, **prev_acc_ptr
= &res
;
3795 VEC (access_p
, heap
) *access_vec
;
3797 access_vec
= get_base_access_vector (parm
);
3799 return &no_accesses_representant
;
3800 access_count
= VEC_length (access_p
, access_vec
);
3802 VEC_qsort (access_p
, access_vec
, compare_access_positions
);
3807 while (i
< access_count
)
3811 access
= VEC_index (access_p
, access_vec
, i
);
3812 modification
= access
->write
;
3813 if (access_precludes_ipa_sra_p (access
))
3815 a1_alias_type
= reference_alias_ptr_type (access
->expr
);
3817 /* Access is about to become group representative unless we find some
3818 nasty overlap which would preclude us from breaking this parameter
3822 while (j
< access_count
)
3824 struct access
*ac2
= VEC_index (access_p
, access_vec
, j
);
3825 if (ac2
->offset
!= access
->offset
)
3827 /* All or nothing law for parameters. */
3828 if (access
->offset
+ access
->size
> ac2
->offset
)
3833 else if (ac2
->size
!= access
->size
)
3836 if (access_precludes_ipa_sra_p (ac2
)
3837 || (ac2
->type
!= access
->type
3838 && (TREE_ADDRESSABLE (ac2
->type
)
3839 || TREE_ADDRESSABLE (access
->type
)))
3840 || (reference_alias_ptr_type (ac2
->expr
) != a1_alias_type
))
3843 modification
|= ac2
->write
;
3844 ac2
->group_representative
= access
;
3845 ac2
->next_sibling
= access
->next_sibling
;
3846 access
->next_sibling
= ac2
;
3851 access
->grp_maybe_modified
= modification
;
3854 *prev_acc_ptr
= access
;
3855 prev_acc_ptr
= &access
->next_grp
;
3856 total_size
+= access
->size
;
3860 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3861 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
3863 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
3864 if (total_size
>= agg_size
)
3867 gcc_assert (group_count
> 0);
3871 /* Decide whether parameters with representative accesses given by REPR should
3872 be reduced into components. */
3875 decide_one_param_reduction (struct access
*repr
)
3877 int total_size
, cur_parm_size
, agg_size
, new_param_count
, parm_size_limit
;
3882 cur_parm_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
3883 gcc_assert (cur_parm_size
> 0);
3885 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3888 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
3893 agg_size
= cur_parm_size
;
3899 fprintf (dump_file
, "Evaluating PARAM group sizes for ");
3900 print_generic_expr (dump_file
, parm
, 0);
3901 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (parm
));
3902 for (acc
= repr
; acc
; acc
= acc
->next_grp
)
3903 dump_access (dump_file
, acc
, true);
3907 new_param_count
= 0;
3909 for (; repr
; repr
= repr
->next_grp
)
3911 gcc_assert (parm
== repr
->base
);
3913 /* Taking the address of a non-addressable field is verboten. */
3914 if (by_ref
&& repr
->non_addressable
)
3917 if (!by_ref
|| (!repr
->grp_maybe_modified
3918 && !repr
->grp_not_necessarilly_dereferenced
))
3919 total_size
+= repr
->size
;
3921 total_size
+= cur_parm_size
;
3926 gcc_assert (new_param_count
> 0);
3928 if (optimize_function_for_size_p (cfun
))
3929 parm_size_limit
= cur_parm_size
;
3931 parm_size_limit
= (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR
)
3934 if (total_size
< agg_size
3935 && total_size
<= parm_size_limit
)
3938 fprintf (dump_file
, " ....will be split into %i components\n",
3940 return new_param_count
;
3946 /* The order of the following enums is important, we need to do extra work for
3947 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3948 enum ipa_splicing_result
{ NO_GOOD_ACCESS
, UNUSED_PARAMS
, BY_VAL_ACCESSES
,
3949 MODIF_BY_REF_ACCESSES
, UNMODIF_BY_REF_ACCESSES
};
3951 /* Identify representatives of all accesses to all candidate parameters for
3952 IPA-SRA. Return result based on what representatives have been found. */
3954 static enum ipa_splicing_result
3955 splice_all_param_accesses (VEC (access_p
, heap
) **representatives
)
3957 enum ipa_splicing_result result
= NO_GOOD_ACCESS
;
3959 struct access
*repr
;
3961 *representatives
= VEC_alloc (access_p
, heap
, func_param_count
);
3963 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3965 parm
= DECL_CHAIN (parm
))
3967 if (is_unused_scalar_param (parm
))
3969 VEC_quick_push (access_p
, *representatives
,
3970 &no_accesses_representant
);
3971 if (result
== NO_GOOD_ACCESS
)
3972 result
= UNUSED_PARAMS
;
3974 else if (POINTER_TYPE_P (TREE_TYPE (parm
))
3975 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm
)))
3976 && bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3978 repr
= unmodified_by_ref_scalar_representative (parm
);
3979 VEC_quick_push (access_p
, *representatives
, repr
);
3981 result
= UNMODIF_BY_REF_ACCESSES
;
3983 else if (bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3985 bool ro_grp
= false;
3986 repr
= splice_param_accesses (parm
, &ro_grp
);
3987 VEC_quick_push (access_p
, *representatives
, repr
);
3989 if (repr
&& !no_accesses_p (repr
))
3991 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3994 result
= UNMODIF_BY_REF_ACCESSES
;
3995 else if (result
< MODIF_BY_REF_ACCESSES
)
3996 result
= MODIF_BY_REF_ACCESSES
;
3998 else if (result
< BY_VAL_ACCESSES
)
3999 result
= BY_VAL_ACCESSES
;
4001 else if (no_accesses_p (repr
) && (result
== NO_GOOD_ACCESS
))
4002 result
= UNUSED_PARAMS
;
4005 VEC_quick_push (access_p
, *representatives
, NULL
);
4008 if (result
== NO_GOOD_ACCESS
)
4010 VEC_free (access_p
, heap
, *representatives
);
4011 *representatives
= NULL
;
4012 return NO_GOOD_ACCESS
;
4018 /* Return the index of BASE in PARMS. Abort if it is not found. */
4021 get_param_index (tree base
, VEC(tree
, heap
) *parms
)
4025 len
= VEC_length (tree
, parms
);
4026 for (i
= 0; i
< len
; i
++)
4027 if (VEC_index (tree
, parms
, i
) == base
)
4032 /* Convert the decisions made at the representative level into compact
4033 parameter adjustments. REPRESENTATIVES are pointers to first
4034 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4035 final number of adjustments. */
4037 static ipa_parm_adjustment_vec
4038 turn_representatives_into_adjustments (VEC (access_p
, heap
) *representatives
,
4039 int adjustments_count
)
4041 VEC (tree
, heap
) *parms
;
4042 ipa_parm_adjustment_vec adjustments
;
4046 gcc_assert (adjustments_count
> 0);
4047 parms
= ipa_get_vector_of_formal_parms (current_function_decl
);
4048 adjustments
= VEC_alloc (ipa_parm_adjustment_t
, heap
, adjustments_count
);
4049 parm
= DECL_ARGUMENTS (current_function_decl
);
4050 for (i
= 0; i
< func_param_count
; i
++, parm
= DECL_CHAIN (parm
))
4052 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
4054 if (!repr
|| no_accesses_p (repr
))
4056 struct ipa_parm_adjustment
*adj
;
4058 adj
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
4059 memset (adj
, 0, sizeof (*adj
));
4060 adj
->base_index
= get_param_index (parm
, parms
);
4063 adj
->copy_param
= 1;
4065 adj
->remove_param
= 1;
4069 struct ipa_parm_adjustment
*adj
;
4070 int index
= get_param_index (parm
, parms
);
4072 for (; repr
; repr
= repr
->next_grp
)
4074 adj
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
4075 memset (adj
, 0, sizeof (*adj
));
4076 gcc_assert (repr
->base
== parm
);
4077 adj
->base_index
= index
;
4078 adj
->base
= repr
->base
;
4079 adj
->type
= repr
->type
;
4080 adj
->alias_ptr_type
= reference_alias_ptr_type (repr
->expr
);
4081 adj
->offset
= repr
->offset
;
4082 adj
->by_ref
= (POINTER_TYPE_P (TREE_TYPE (repr
->base
))
4083 && (repr
->grp_maybe_modified
4084 || repr
->grp_not_necessarilly_dereferenced
));
4089 VEC_free (tree
, heap
, parms
);
4093 /* Analyze the collected accesses and produce a plan what to do with the
4094 parameters in the form of adjustments, NULL meaning nothing. */
4096 static ipa_parm_adjustment_vec
4097 analyze_all_param_acesses (void)
4099 enum ipa_splicing_result repr_state
;
4100 bool proceed
= false;
4101 int i
, adjustments_count
= 0;
4102 VEC (access_p
, heap
) *representatives
;
4103 ipa_parm_adjustment_vec adjustments
;
4105 repr_state
= splice_all_param_accesses (&representatives
);
4106 if (repr_state
== NO_GOOD_ACCESS
)
4109 /* If there are any parameters passed by reference which are not modified
4110 directly, we need to check whether they can be modified indirectly. */
4111 if (repr_state
== UNMODIF_BY_REF_ACCESSES
)
4113 analyze_caller_dereference_legality (representatives
);
4114 analyze_modified_params (representatives
);
4117 for (i
= 0; i
< func_param_count
; i
++)
4119 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
4121 if (repr
&& !no_accesses_p (repr
))
4123 if (repr
->grp_scalar_ptr
)
4125 adjustments_count
++;
4126 if (repr
->grp_not_necessarilly_dereferenced
4127 || repr
->grp_maybe_modified
)
4128 VEC_replace (access_p
, representatives
, i
, NULL
);
4132 sra_stats
.scalar_by_ref_to_by_val
++;
4137 int new_components
= decide_one_param_reduction (repr
);
4139 if (new_components
== 0)
4141 VEC_replace (access_p
, representatives
, i
, NULL
);
4142 adjustments_count
++;
4146 adjustments_count
+= new_components
;
4147 sra_stats
.aggregate_params_reduced
++;
4148 sra_stats
.param_reductions_created
+= new_components
;
4155 if (no_accesses_p (repr
))
4158 sra_stats
.deleted_unused_parameters
++;
4160 adjustments_count
++;
4164 if (!proceed
&& dump_file
)
4165 fprintf (dump_file
, "NOT proceeding to change params.\n");
4168 adjustments
= turn_representatives_into_adjustments (representatives
,
4173 VEC_free (access_p
, heap
, representatives
);
4177 /* If a parameter replacement identified by ADJ does not yet exist in the form
4178 of declaration, create it and record it, otherwise return the previously
4182 get_replaced_param_substitute (struct ipa_parm_adjustment
*adj
)
4185 if (!adj
->new_ssa_base
)
4187 char *pretty_name
= make_fancy_name (adj
->base
);
4189 repl
= create_tmp_reg (TREE_TYPE (adj
->base
), "ISR");
4190 DECL_NAME (repl
) = get_identifier (pretty_name
);
4191 obstack_free (&name_obstack
, pretty_name
);
4193 add_referenced_var (repl
);
4194 adj
->new_ssa_base
= repl
;
4197 repl
= adj
->new_ssa_base
;
4201 /* Find the first adjustment for a particular parameter BASE in a vector of
4202 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4205 static struct ipa_parm_adjustment
*
4206 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments
, tree base
)
4210 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
4211 for (i
= 0; i
< len
; i
++)
4213 struct ipa_parm_adjustment
*adj
;
4215 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
4216 if (!adj
->copy_param
&& adj
->base
== base
)
4223 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4224 removed because its value is not used, replace the SSA_NAME with a one
4225 relating to a created VAR_DECL together all of its uses and return true.
4226 ADJUSTMENTS is a pointer to an adjustments vector. */
4229 replace_removed_params_ssa_names (gimple stmt
,
4230 ipa_parm_adjustment_vec adjustments
)
4232 struct ipa_parm_adjustment
*adj
;
4233 tree lhs
, decl
, repl
, name
;
4235 if (gimple_code (stmt
) == GIMPLE_PHI
)
4236 lhs
= gimple_phi_result (stmt
);
4237 else if (is_gimple_assign (stmt
))
4238 lhs
= gimple_assign_lhs (stmt
);
4239 else if (is_gimple_call (stmt
))
4240 lhs
= gimple_call_lhs (stmt
);
4244 if (TREE_CODE (lhs
) != SSA_NAME
)
4246 decl
= SSA_NAME_VAR (lhs
);
4247 if (TREE_CODE (decl
) != PARM_DECL
)
4250 adj
= get_adjustment_for_base (adjustments
, decl
);
4254 repl
= get_replaced_param_substitute (adj
);
4255 name
= make_ssa_name (repl
, stmt
);
4259 fprintf (dump_file
, "replacing an SSA name of a removed param ");
4260 print_generic_expr (dump_file
, lhs
, 0);
4261 fprintf (dump_file
, " with ");
4262 print_generic_expr (dump_file
, name
, 0);
4263 fprintf (dump_file
, "\n");
4266 if (is_gimple_assign (stmt
))
4267 gimple_assign_set_lhs (stmt
, name
);
4268 else if (is_gimple_call (stmt
))
4269 gimple_call_set_lhs (stmt
, name
);
4271 gimple_phi_set_result (stmt
, name
);
4273 replace_uses_by (lhs
, name
);
4274 release_ssa_name (lhs
);
4278 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4279 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4280 specifies whether the function should care about type incompatibility the
4281 current and new expressions. If it is false, the function will leave
4282 incompatibility issues to the caller. Return true iff the expression
4286 sra_ipa_modify_expr (tree
*expr
, bool convert
,
4287 ipa_parm_adjustment_vec adjustments
)
4290 struct ipa_parm_adjustment
*adj
, *cand
= NULL
;
4291 HOST_WIDE_INT offset
, size
, max_size
;
4294 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
4296 if (TREE_CODE (*expr
) == BIT_FIELD_REF
4297 || TREE_CODE (*expr
) == IMAGPART_EXPR
4298 || TREE_CODE (*expr
) == REALPART_EXPR
)
4300 expr
= &TREE_OPERAND (*expr
, 0);
4304 base
= get_ref_base_and_extent (*expr
, &offset
, &size
, &max_size
);
4305 if (!base
|| size
== -1 || max_size
== -1)
4308 if (TREE_CODE (base
) == MEM_REF
)
4310 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
4311 base
= TREE_OPERAND (base
, 0);
4314 base
= get_ssa_base_param (base
);
4315 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4318 for (i
= 0; i
< len
; i
++)
4320 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
4322 if (adj
->base
== base
&&
4323 (adj
->offset
== offset
|| adj
->remove_param
))
4329 if (!cand
|| cand
->copy_param
|| cand
->remove_param
)
4333 src
= build_simple_mem_ref (cand
->reduction
);
4335 src
= cand
->reduction
;
4337 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4339 fprintf (dump_file
, "About to replace expr ");
4340 print_generic_expr (dump_file
, *expr
, 0);
4341 fprintf (dump_file
, " with ");
4342 print_generic_expr (dump_file
, src
, 0);
4343 fprintf (dump_file
, "\n");
4346 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4348 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4356 /* If the statement pointed to by STMT_PTR contains any expressions that need
4357 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4358 potential type incompatibilities (GSI is used to accommodate conversion
4359 statements and must point to the statement). Return true iff the statement
4363 sra_ipa_modify_assign (gimple
*stmt_ptr
, gimple_stmt_iterator
*gsi
,
4364 ipa_parm_adjustment_vec adjustments
)
4366 gimple stmt
= *stmt_ptr
;
4367 tree
*lhs_p
, *rhs_p
;
4370 if (!gimple_assign_single_p (stmt
))
4373 rhs_p
= gimple_assign_rhs1_ptr (stmt
);
4374 lhs_p
= gimple_assign_lhs_ptr (stmt
);
4376 any
= sra_ipa_modify_expr (rhs_p
, false, adjustments
);
4377 any
|= sra_ipa_modify_expr (lhs_p
, false, adjustments
);
4380 tree new_rhs
= NULL_TREE
;
4382 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p
), TREE_TYPE (*rhs_p
)))
4384 if (TREE_CODE (*rhs_p
) == CONSTRUCTOR
)
4386 /* V_C_Es of constructors can cause trouble (PR 42714). */
4387 if (is_gimple_reg_type (TREE_TYPE (*lhs_p
)))
4388 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
4390 *rhs_p
= build_constructor (TREE_TYPE (*lhs_p
), 0);
4393 new_rhs
= fold_build1_loc (gimple_location (stmt
),
4394 VIEW_CONVERT_EXPR
, TREE_TYPE (*lhs_p
),
4397 else if (REFERENCE_CLASS_P (*rhs_p
)
4398 && is_gimple_reg_type (TREE_TYPE (*lhs_p
))
4399 && !is_gimple_reg (*lhs_p
))
4400 /* This can happen when an assignment in between two single field
4401 structures is turned into an assignment in between two pointers to
4402 scalars (PR 42237). */
4407 tree tmp
= force_gimple_operand_gsi (gsi
, new_rhs
, true, NULL_TREE
,
4408 true, GSI_SAME_STMT
);
4410 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
4419 /* Traverse the function body and all modifications as described in
4420 ADJUSTMENTS. Return true iff the CFG has been changed. */
4423 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments
)
4425 bool cfg_changed
= false;
4430 gimple_stmt_iterator gsi
;
4432 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4433 replace_removed_params_ssa_names (gsi_stmt (gsi
), adjustments
);
4435 gsi
= gsi_start_bb (bb
);
4436 while (!gsi_end_p (gsi
))
4438 gimple stmt
= gsi_stmt (gsi
);
4439 bool modified
= false;
4443 switch (gimple_code (stmt
))
4446 t
= gimple_return_retval_ptr (stmt
);
4447 if (*t
!= NULL_TREE
)
4448 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4452 modified
|= sra_ipa_modify_assign (&stmt
, &gsi
, adjustments
);
4453 modified
|= replace_removed_params_ssa_names (stmt
, adjustments
);
4457 /* Operands must be processed before the lhs. */
4458 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4460 t
= gimple_call_arg_ptr (stmt
, i
);
4461 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4464 if (gimple_call_lhs (stmt
))
4466 t
= gimple_call_lhs_ptr (stmt
);
4467 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4468 modified
|= replace_removed_params_ssa_names (stmt
,
4474 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
4476 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
4477 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4479 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
4481 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
4482 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4493 if (maybe_clean_eh_stmt (stmt
)
4494 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4504 /* Call gimple_debug_bind_reset_value on all debug statements describing
4505 gimple register parameters that are being removed or replaced. */
4508 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments
)
4511 gimple_stmt_iterator
*gsip
= NULL
, gsi
;
4513 if (MAY_HAVE_DEBUG_STMTS
&& single_succ_p (ENTRY_BLOCK_PTR
))
4515 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR
));
4518 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
4519 for (i
= 0; i
< len
; i
++)
4521 struct ipa_parm_adjustment
*adj
;
4522 imm_use_iterator ui
;
4523 gimple stmt
, def_temp
;
4524 tree name
, vexpr
, copy
= NULL_TREE
;
4525 use_operand_p use_p
;
4527 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
4528 if (adj
->copy_param
|| !is_gimple_reg (adj
->base
))
4530 name
= gimple_default_def (cfun
, adj
->base
);
4533 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
4535 /* All other users must have been removed by
4536 ipa_sra_modify_function_body. */
4537 gcc_assert (is_gimple_debug (stmt
));
4538 if (vexpr
== NULL
&& gsip
!= NULL
)
4540 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4541 vexpr
= make_node (DEBUG_EXPR_DECL
);
4542 def_temp
= gimple_build_debug_source_bind (vexpr
, adj
->base
,
4544 DECL_ARTIFICIAL (vexpr
) = 1;
4545 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
4546 DECL_MODE (vexpr
) = DECL_MODE (adj
->base
);
4547 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4551 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
4552 SET_USE (use_p
, vexpr
);
4555 gimple_debug_bind_reset_value (stmt
);
4558 /* Create a VAR_DECL for debug info purposes. */
4559 if (!DECL_IGNORED_P (adj
->base
))
4561 copy
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
4562 VAR_DECL
, DECL_NAME (adj
->base
),
4563 TREE_TYPE (adj
->base
));
4564 if (DECL_PT_UID_SET_P (adj
->base
))
4565 SET_DECL_PT_UID (copy
, DECL_PT_UID (adj
->base
));
4566 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (adj
->base
);
4567 TREE_READONLY (copy
) = TREE_READONLY (adj
->base
);
4568 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (adj
->base
);
4569 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (adj
->base
);
4570 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (adj
->base
);
4571 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (adj
->base
);
4572 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (adj
->base
);
4573 DECL_SEEN_IN_BIND_EXPR_P (copy
) = 1;
4574 SET_DECL_RTL (copy
, 0);
4575 TREE_USED (copy
) = 1;
4576 DECL_CONTEXT (copy
) = current_function_decl
;
4577 add_referenced_var (copy
);
4578 add_local_decl (cfun
, copy
);
4580 BLOCK_VARS (DECL_INITIAL (current_function_decl
));
4581 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = copy
;
4583 if (gsip
!= NULL
&& copy
&& target_for_debug_bind (adj
->base
))
4585 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4587 def_temp
= gimple_build_debug_bind (copy
, vexpr
, NULL
);
4589 def_temp
= gimple_build_debug_source_bind (copy
, adj
->base
,
4591 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4596 /* Return false iff all callers have at least as many actual arguments as there
4597 are formal parameters in the current function. */
4600 not_all_callers_have_enough_arguments_p (struct cgraph_node
*node
,
4601 void *data ATTRIBUTE_UNUSED
)
4603 struct cgraph_edge
*cs
;
4604 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4605 if (!callsite_has_enough_arguments_p (cs
->call_stmt
))
4611 /* Convert all callers of NODE. */
4614 convert_callers_for_node (struct cgraph_node
*node
,
4617 ipa_parm_adjustment_vec adjustments
= (ipa_parm_adjustment_vec
)data
;
4618 bitmap recomputed_callers
= BITMAP_ALLOC (NULL
);
4619 struct cgraph_edge
*cs
;
4621 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4623 current_function_decl
= cs
->caller
->decl
;
4624 push_cfun (DECL_STRUCT_FUNCTION (cs
->caller
->decl
));
4627 fprintf (dump_file
, "Adjusting call (%i -> %i) %s -> %s\n",
4628 cs
->caller
->uid
, cs
->callee
->uid
,
4629 cgraph_node_name (cs
->caller
),
4630 cgraph_node_name (cs
->callee
));
4632 ipa_modify_call_arguments (cs
, cs
->call_stmt
, adjustments
);
4637 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4638 if (bitmap_set_bit (recomputed_callers
, cs
->caller
->uid
)
4639 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs
->caller
->decl
)))
4640 compute_inline_parameters (cs
->caller
, true);
4641 BITMAP_FREE (recomputed_callers
);
4646 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4649 convert_callers (struct cgraph_node
*node
, tree old_decl
,
4650 ipa_parm_adjustment_vec adjustments
)
4652 tree old_cur_fndecl
= current_function_decl
;
4653 basic_block this_block
;
4655 cgraph_for_node_and_aliases (node
, convert_callers_for_node
,
4656 adjustments
, false);
4658 current_function_decl
= old_cur_fndecl
;
4660 if (!encountered_recursive_call
)
4663 FOR_EACH_BB (this_block
)
4665 gimple_stmt_iterator gsi
;
4667 for (gsi
= gsi_start_bb (this_block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4669 gimple stmt
= gsi_stmt (gsi
);
4671 if (gimple_code (stmt
) != GIMPLE_CALL
)
4673 call_fndecl
= gimple_call_fndecl (stmt
);
4674 if (call_fndecl
== old_decl
)
4677 fprintf (dump_file
, "Adjusting recursive call");
4678 gimple_call_set_fndecl (stmt
, node
->decl
);
4679 ipa_modify_call_arguments (NULL
, stmt
, adjustments
);
4687 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4688 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4691 modify_function (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
4693 struct cgraph_node
*new_node
;
4695 VEC (cgraph_edge_p
, heap
) * redirect_callers
= collect_callers_of_node (node
);
4697 rebuild_cgraph_edges ();
4698 free_dominance_info (CDI_DOMINATORS
);
4700 current_function_decl
= NULL_TREE
;
4702 new_node
= cgraph_function_versioning (node
, redirect_callers
, NULL
, NULL
,
4703 NULL
, NULL
, "isra");
4704 current_function_decl
= new_node
->decl
;
4705 push_cfun (DECL_STRUCT_FUNCTION (new_node
->decl
));
4707 ipa_modify_formal_parameters (current_function_decl
, adjustments
, "ISRA");
4708 cfg_changed
= ipa_sra_modify_function_body (adjustments
);
4709 sra_ipa_reset_debug_stmts (adjustments
);
4710 convert_callers (new_node
, node
->decl
, adjustments
);
4711 cgraph_make_node_local (new_node
);
4715 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4716 attributes, return true otherwise. NODE is the cgraph node of the current
4720 ipa_sra_preliminary_function_checks (struct cgraph_node
*node
)
4722 if (!cgraph_node_can_be_local_p (node
))
4725 fprintf (dump_file
, "Function not local to this compilation unit.\n");
4729 if (!node
->local
.can_change_signature
)
4732 fprintf (dump_file
, "Function can not change signature.\n");
4736 if (!tree_versionable_function_p (node
->decl
))
4739 fprintf (dump_file
, "Function is not versionable.\n");
4743 if (DECL_VIRTUAL_P (current_function_decl
))
4746 fprintf (dump_file
, "Function is a virtual method.\n");
4750 if ((DECL_COMDAT (node
->decl
) || DECL_EXTERNAL (node
->decl
))
4751 && inline_summary(node
)->size
>= MAX_INLINE_INSNS_AUTO
)
4754 fprintf (dump_file
, "Function too big to be made truly local.\n");
4762 "Function has no callers in this compilation unit.\n");
4769 fprintf (dump_file
, "Function uses stdarg. \n");
4773 if (TYPE_ATTRIBUTES (TREE_TYPE (node
->decl
)))
4779 /* Perform early interprocedural SRA. */
4782 ipa_early_sra (void)
4784 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
4785 ipa_parm_adjustment_vec adjustments
;
4788 if (!ipa_sra_preliminary_function_checks (node
))
4792 sra_mode
= SRA_MODE_EARLY_IPA
;
4794 if (!find_param_candidates ())
4797 fprintf (dump_file
, "Function has no IPA-SRA candidates.\n");
4801 if (cgraph_for_node_and_aliases (node
, not_all_callers_have_enough_arguments_p
,
4805 fprintf (dump_file
, "There are callers with insufficient number of "
4810 bb_dereferences
= XCNEWVEC (HOST_WIDE_INT
,
4812 * last_basic_block_for_function (cfun
));
4813 final_bbs
= BITMAP_ALLOC (NULL
);
4816 if (encountered_apply_args
)
4819 fprintf (dump_file
, "Function calls __builtin_apply_args().\n");
4823 if (encountered_unchangable_recursive_call
)
4826 fprintf (dump_file
, "Function calls itself with insufficient "
4827 "number of arguments.\n");
4831 adjustments
= analyze_all_param_acesses ();
4835 ipa_dump_param_adjustments (dump_file
, adjustments
, current_function_decl
);
4837 if (modify_function (node
, adjustments
))
4838 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
4840 ret
= TODO_update_ssa
;
4841 VEC_free (ipa_parm_adjustment_t
, heap
, adjustments
);
4843 statistics_counter_event (cfun
, "Unused parameters deleted",
4844 sra_stats
.deleted_unused_parameters
);
4845 statistics_counter_event (cfun
, "Scalar parameters converted to by-value",
4846 sra_stats
.scalar_by_ref_to_by_val
);
4847 statistics_counter_event (cfun
, "Aggregate parameters broken up",
4848 sra_stats
.aggregate_params_reduced
);
4849 statistics_counter_event (cfun
, "Aggregate parameter components created",
4850 sra_stats
.param_reductions_created
);
4853 BITMAP_FREE (final_bbs
);
4854 free (bb_dereferences
);
4856 sra_deinitialize ();
4860 /* Return if early ipa sra shall be performed. */
4862 ipa_early_sra_gate (void)
4864 return flag_ipa_sra
&& dbg_cnt (eipa_sra
);
4867 struct gimple_opt_pass pass_early_ipa_sra
=
4871 "eipa_sra", /* name */
4872 ipa_early_sra_gate
, /* gate */
4873 ipa_early_sra
, /* execute */
4876 0, /* static_pass_number */
4877 TV_IPA_SRA
, /* tv_id */
4878 0, /* properties_required */
4879 0, /* properties_provided */
4880 0, /* properties_destroyed */
4881 0, /* todo_flags_start */
4882 TODO_dump_cgraph
/* todo_flags_finish */