1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "alloc-pool.h"
83 #include "tree-flow.h"
85 #include "tree-pretty-print.h"
86 #include "statistics.h"
87 #include "tree-dump.h"
93 #include "tree-inline.h"
94 #include "gimple-pretty-print.h"
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
98 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
99 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
101 /* Global variable describing which aggregate reduction we are performing at
103 static enum sra_mode sra_mode
;
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset
;
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
139 /* The statement this access belongs to. */
142 /* Next group representative for this aggregate. */
143 struct access
*next_grp
;
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access
*group_representative
;
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access
*first_child
;
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access
*next_sibling
;
158 /* Pointers to the first and last element in the linked list of assign
160 struct assign_link
*first_link
, *last_link
;
162 /* Pointer to the next access in the work queue. */
163 struct access
*next_queued
;
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl
;
170 /* Is this particular access write access? */
173 /* Is this access an artificial one created to scalarize some record
175 unsigned total_scalarization
: 1;
177 /* Is this access currently in the work queue? */
178 unsigned grp_queued
: 1;
180 /* Does this group contain a write access? This flag is propagated down the
182 unsigned grp_write
: 1;
184 /* Does this group contain a read access? This flag is propagated down the
186 unsigned grp_read
: 1;
188 /* Does this group contain a read access that comes from an assignment
189 statement? This flag is propagated down the access tree. */
190 unsigned grp_assignment_read
: 1;
192 /* Other passes of the analysis use this bit to make function
193 analyze_access_subtree create scalar replacements for this group if
195 unsigned grp_hint
: 1;
197 /* Is the subtree rooted in this access fully covered by scalar
199 unsigned grp_covered
: 1;
201 /* If set to true, this access and all below it in an access tree must not be
203 unsigned grp_unscalarizable_region
: 1;
205 /* Whether data have been written to parts of the aggregate covered by this
206 access which is not to be scalarized. This flag is propagated up in the
208 unsigned grp_unscalarized_data
: 1;
210 /* Does this access and/or group contain a write access through a
212 unsigned grp_partial_lhs
: 1;
214 /* Set when a scalar replacement should be created for this variable. We do
215 the decision and creation at different places because create_tmp_var
216 cannot be called from within FOR_EACH_REFERENCED_VAR. */
217 unsigned grp_to_be_replaced
: 1;
219 /* Should TREE_NO_WARNING of a replacement be set? */
220 unsigned grp_no_warning
: 1;
222 /* Is it possible that the group refers to data which might be (directly or
223 otherwise) modified? */
224 unsigned grp_maybe_modified
: 1;
226 /* Set when this is a representative of a pointer to scalar (i.e. by
227 reference) parameter which we consider for turning into a plain scalar
228 (i.e. a by value parameter). */
229 unsigned grp_scalar_ptr
: 1;
231 /* Set when we discover that this pointer is not safe to dereference in the
233 unsigned grp_not_necessarilly_dereferenced
: 1;
236 typedef struct access
*access_p
;
238 DEF_VEC_P (access_p
);
239 DEF_VEC_ALLOC_P (access_p
, heap
);
241 /* Alloc pool for allocating access structures. */
242 static alloc_pool access_pool
;
244 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
245 are used to propagate subaccesses from rhs to lhs as long as they don't
246 conflict with what is already there. */
249 struct access
*lacc
, *racc
;
250 struct assign_link
*next
;
253 /* Alloc pool for allocating assign link structures. */
254 static alloc_pool link_pool
;
256 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
257 static struct pointer_map_t
*base_access_vec
;
259 /* Bitmap of candidates. */
260 static bitmap candidate_bitmap
;
262 /* Bitmap of candidates which we should try to entirely scalarize away and
263 those which cannot be (because they are and need be used as a whole). */
264 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
266 /* Obstack for creation of fancy names. */
267 static struct obstack name_obstack
;
269 /* Head of a linked list of accesses that need to have its subaccesses
270 propagated to their assignment counterparts. */
271 static struct access
*work_queue_head
;
273 /* Number of parameters of the analyzed function when doing early ipa SRA. */
274 static int func_param_count
;
276 /* scan_function sets the following to true if it encounters a call to
277 __builtin_apply_args. */
278 static bool encountered_apply_args
;
280 /* Set by scan_function when it finds a recursive call. */
281 static bool encountered_recursive_call
;
283 /* Set by scan_function when it finds a recursive call with less actual
284 arguments than formal parameters.. */
285 static bool encountered_unchangable_recursive_call
;
287 /* This is a table in which for each basic block and parameter there is a
288 distance (offset + size) in that parameter which is dereferenced and
289 accessed in that BB. */
290 static HOST_WIDE_INT
*bb_dereferences
;
291 /* Bitmap of BBs that can cause the function to "stop" progressing by
292 returning, throwing externally, looping infinitely or calling a function
293 which might abort etc.. */
294 static bitmap final_bbs
;
296 /* Representative of no accesses at all. */
297 static struct access no_accesses_representant
;
299 /* Predicate to test the special value. */
302 no_accesses_p (struct access
*access
)
304 return access
== &no_accesses_representant
;
307 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
308 representative fields are dumped, otherwise those which only describe the
309 individual access are. */
313 /* Number of processed aggregates is readily available in
314 analyze_all_variable_accesses and so is not stored here. */
316 /* Number of created scalar replacements. */
319 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
323 /* Number of statements created by generate_subtree_copies. */
326 /* Number of statements created by load_assign_lhs_subreplacements. */
329 /* Number of times sra_modify_assign has deleted a statement. */
332 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
333 RHS reparately due to type conversions or nonexistent matching
335 int separate_lhs_rhs_handling
;
337 /* Number of parameters that were removed because they were unused. */
338 int deleted_unused_parameters
;
340 /* Number of scalars passed as parameters by reference that have been
341 converted to be passed by value. */
342 int scalar_by_ref_to_by_val
;
344 /* Number of aggregate parameters that were replaced by one or more of their
346 int aggregate_params_reduced
;
348 /* Numbber of components created when splitting aggregate parameters. */
349 int param_reductions_created
;
353 dump_access (FILE *f
, struct access
*access
, bool grp
)
355 fprintf (f
, "access { ");
356 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
357 print_generic_expr (f
, access
->base
, 0);
358 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
359 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
360 fprintf (f
, ", expr = ");
361 print_generic_expr (f
, access
->expr
, 0);
362 fprintf (f
, ", type = ");
363 print_generic_expr (f
, access
->type
, 0);
365 fprintf (f
, ", grp_write = %d, total_scalarization = %d, "
366 "grp_read = %d, grp_hint = %d, grp_assignment_read = %d,"
367 "grp_covered = %d, grp_unscalarizable_region = %d, "
368 "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
369 "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
370 "grp_not_necessarilly_dereferenced = %d\n",
371 access
->grp_write
, access
->total_scalarization
,
372 access
->grp_read
, access
->grp_hint
, access
->grp_assignment_read
,
373 access
->grp_covered
, access
->grp_unscalarizable_region
,
374 access
->grp_unscalarized_data
, access
->grp_partial_lhs
,
375 access
->grp_to_be_replaced
, access
->grp_maybe_modified
,
376 access
->grp_not_necessarilly_dereferenced
);
378 fprintf (f
, ", write = %d, total_scalarization = %d, "
379 "grp_partial_lhs = %d\n",
380 access
->write
, access
->total_scalarization
,
381 access
->grp_partial_lhs
);
384 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
387 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
393 for (i
= 0; i
< level
; i
++)
394 fputs ("* ", dump_file
);
396 dump_access (f
, access
, true);
398 if (access
->first_child
)
399 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
401 access
= access
->next_sibling
;
406 /* Dump all access trees for a variable, given the pointer to the first root in
410 dump_access_tree (FILE *f
, struct access
*access
)
412 for (; access
; access
= access
->next_grp
)
413 dump_access_tree_1 (f
, access
, 0);
416 /* Return true iff ACC is non-NULL and has subaccesses. */
419 access_has_children_p (struct access
*acc
)
421 return acc
&& acc
->first_child
;
424 /* Return a vector of pointers to accesses for the variable given in BASE or
425 NULL if there is none. */
427 static VEC (access_p
, heap
) *
428 get_base_access_vector (tree base
)
432 slot
= pointer_map_contains (base_access_vec
, base
);
436 return *(VEC (access_p
, heap
) **) slot
;
439 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
440 in ACCESS. Return NULL if it cannot be found. */
442 static struct access
*
443 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
446 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
448 struct access
*child
= access
->first_child
;
450 while (child
&& (child
->offset
+ child
->size
<= offset
))
451 child
= child
->next_sibling
;
458 /* Return the first group representative for DECL or NULL if none exists. */
460 static struct access
*
461 get_first_repr_for_decl (tree base
)
463 VEC (access_p
, heap
) *access_vec
;
465 access_vec
= get_base_access_vector (base
);
469 return VEC_index (access_p
, access_vec
, 0);
472 /* Find an access representative for the variable BASE and given OFFSET and
473 SIZE. Requires that access trees have already been built. Return NULL if
474 it cannot be found. */
476 static struct access
*
477 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
480 struct access
*access
;
482 access
= get_first_repr_for_decl (base
);
483 while (access
&& (access
->offset
+ access
->size
<= offset
))
484 access
= access
->next_grp
;
488 return find_access_in_subtree (access
, offset
, size
);
491 /* Add LINK to the linked list of assign links of RACC. */
493 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
495 gcc_assert (link
->racc
== racc
);
497 if (!racc
->first_link
)
499 gcc_assert (!racc
->last_link
);
500 racc
->first_link
= link
;
503 racc
->last_link
->next
= link
;
505 racc
->last_link
= link
;
509 /* Move all link structures in their linked list in OLD_RACC to the linked list
512 relink_to_new_repr (struct access
*new_racc
, struct access
*old_racc
)
514 if (!old_racc
->first_link
)
516 gcc_assert (!old_racc
->last_link
);
520 if (new_racc
->first_link
)
522 gcc_assert (!new_racc
->last_link
->next
);
523 gcc_assert (!old_racc
->last_link
|| !old_racc
->last_link
->next
);
525 new_racc
->last_link
->next
= old_racc
->first_link
;
526 new_racc
->last_link
= old_racc
->last_link
;
530 gcc_assert (!new_racc
->last_link
);
532 new_racc
->first_link
= old_racc
->first_link
;
533 new_racc
->last_link
= old_racc
->last_link
;
535 old_racc
->first_link
= old_racc
->last_link
= NULL
;
538 /* Add ACCESS to the work queue (which is actually a stack). */
541 add_access_to_work_queue (struct access
*access
)
543 if (!access
->grp_queued
)
545 gcc_assert (!access
->next_queued
);
546 access
->next_queued
= work_queue_head
;
547 access
->grp_queued
= 1;
548 work_queue_head
= access
;
552 /* Pop an access from the work queue, and return it, assuming there is one. */
554 static struct access
*
555 pop_access_from_work_queue (void)
557 struct access
*access
= work_queue_head
;
559 work_queue_head
= access
->next_queued
;
560 access
->next_queued
= NULL
;
561 access
->grp_queued
= 0;
566 /* Allocate necessary structures. */
569 sra_initialize (void)
571 candidate_bitmap
= BITMAP_ALLOC (NULL
);
572 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
573 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
574 gcc_obstack_init (&name_obstack
);
575 access_pool
= create_alloc_pool ("SRA accesses", sizeof (struct access
), 16);
576 link_pool
= create_alloc_pool ("SRA links", sizeof (struct assign_link
), 16);
577 base_access_vec
= pointer_map_create ();
578 memset (&sra_stats
, 0, sizeof (sra_stats
));
579 encountered_apply_args
= false;
580 encountered_recursive_call
= false;
581 encountered_unchangable_recursive_call
= false;
584 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
587 delete_base_accesses (const void *key ATTRIBUTE_UNUSED
, void **value
,
588 void *data ATTRIBUTE_UNUSED
)
590 VEC (access_p
, heap
) *access_vec
;
591 access_vec
= (VEC (access_p
, heap
) *) *value
;
592 VEC_free (access_p
, heap
, access_vec
);
597 /* Deallocate all general structures. */
600 sra_deinitialize (void)
602 BITMAP_FREE (candidate_bitmap
);
603 BITMAP_FREE (should_scalarize_away_bitmap
);
604 BITMAP_FREE (cannot_scalarize_away_bitmap
);
605 free_alloc_pool (access_pool
);
606 free_alloc_pool (link_pool
);
607 obstack_free (&name_obstack
, NULL
);
609 pointer_map_traverse (base_access_vec
, delete_base_accesses
, NULL
);
610 pointer_map_destroy (base_access_vec
);
613 /* Remove DECL from candidates for SRA and write REASON to the dump file if
616 disqualify_candidate (tree decl
, const char *reason
)
618 bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
));
620 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
622 fprintf (dump_file
, "! Disqualifying ");
623 print_generic_expr (dump_file
, decl
, 0);
624 fprintf (dump_file
, " - %s\n", reason
);
628 /* Return true iff the type contains a field or an element which does not allow
632 type_internals_preclude_sra_p (tree type
)
637 switch (TREE_CODE (type
))
641 case QUAL_UNION_TYPE
:
642 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
643 if (TREE_CODE (fld
) == FIELD_DECL
)
645 tree ft
= TREE_TYPE (fld
);
647 if (TREE_THIS_VOLATILE (fld
)
648 || !DECL_FIELD_OFFSET (fld
) || !DECL_SIZE (fld
)
649 || !host_integerp (DECL_FIELD_OFFSET (fld
), 1)
650 || !host_integerp (DECL_SIZE (fld
), 1))
653 if (AGGREGATE_TYPE_P (ft
)
654 && type_internals_preclude_sra_p (ft
))
661 et
= TREE_TYPE (type
);
663 if (AGGREGATE_TYPE_P (et
))
664 return type_internals_preclude_sra_p (et
);
673 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
674 base variable if it is. Return T if it is not an SSA_NAME. */
677 get_ssa_base_param (tree t
)
679 if (TREE_CODE (t
) == SSA_NAME
)
681 if (SSA_NAME_IS_DEFAULT_DEF (t
))
682 return SSA_NAME_VAR (t
);
689 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
690 belongs to, unless the BB has already been marked as a potentially
694 mark_parm_dereference (tree base
, HOST_WIDE_INT dist
, gimple stmt
)
696 basic_block bb
= gimple_bb (stmt
);
697 int idx
, parm_index
= 0;
700 if (bitmap_bit_p (final_bbs
, bb
->index
))
703 for (parm
= DECL_ARGUMENTS (current_function_decl
);
704 parm
&& parm
!= base
;
705 parm
= DECL_CHAIN (parm
))
708 gcc_assert (parm_index
< func_param_count
);
710 idx
= bb
->index
* func_param_count
+ parm_index
;
711 if (bb_dereferences
[idx
] < dist
)
712 bb_dereferences
[idx
] = dist
;
715 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
716 the three fields. Also add it to the vector of accesses corresponding to
717 the base. Finally, return the new access. */
719 static struct access
*
720 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
722 VEC (access_p
, heap
) *vec
;
723 struct access
*access
;
726 access
= (struct access
*) pool_alloc (access_pool
);
727 memset (access
, 0, sizeof (struct access
));
729 access
->offset
= offset
;
732 slot
= pointer_map_contains (base_access_vec
, base
);
734 vec
= (VEC (access_p
, heap
) *) *slot
;
736 vec
= VEC_alloc (access_p
, heap
, 32);
738 VEC_safe_push (access_p
, heap
, vec
, access
);
740 *((struct VEC (access_p
,heap
) **)
741 pointer_map_insert (base_access_vec
, base
)) = vec
;
746 /* Create and insert access for EXPR. Return created access, or NULL if it is
749 static struct access
*
750 create_access (tree expr
, gimple stmt
, bool write
)
752 struct access
*access
;
753 HOST_WIDE_INT offset
, size
, max_size
;
755 bool ptr
, unscalarizable_region
= false;
757 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
759 if (sra_mode
== SRA_MODE_EARLY_IPA
760 && TREE_CODE (base
) == MEM_REF
)
762 base
= get_ssa_base_param (TREE_OPERAND (base
, 0));
770 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
773 if (sra_mode
== SRA_MODE_EARLY_IPA
)
775 if (size
< 0 || size
!= max_size
)
777 disqualify_candidate (base
, "Encountered a variable sized access.");
780 if (TREE_CODE (expr
) == COMPONENT_REF
781 && DECL_BIT_FIELD (TREE_OPERAND (expr
, 1)))
783 disqualify_candidate (base
, "Encountered a bit-field access.");
786 gcc_checking_assert ((offset
% BITS_PER_UNIT
) == 0);
789 mark_parm_dereference (base
, offset
+ size
, stmt
);
793 if (size
!= max_size
)
796 unscalarizable_region
= true;
800 disqualify_candidate (base
, "Encountered an unconstrained access.");
805 access
= create_access_1 (base
, offset
, size
);
807 access
->type
= TREE_TYPE (expr
);
808 access
->write
= write
;
809 access
->grp_unscalarizable_region
= unscalarizable_region
;
816 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
817 register types or (recursively) records with only these two kinds of fields.
818 It also returns false if any of these records contains a bit-field. */
821 type_consists_of_records_p (tree type
)
825 if (TREE_CODE (type
) != RECORD_TYPE
)
828 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
829 if (TREE_CODE (fld
) == FIELD_DECL
)
831 tree ft
= TREE_TYPE (fld
);
833 if (DECL_BIT_FIELD (fld
))
836 if (!is_gimple_reg_type (ft
)
837 && !type_consists_of_records_p (ft
))
844 /* Create total_scalarization accesses for all scalar type fields in DECL that
845 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
846 must be the top-most VAR_DECL representing the variable, OFFSET must be the
847 offset of DECL within BASE. REF must be the memory reference expression for
851 completely_scalarize_record (tree base
, tree decl
, HOST_WIDE_INT offset
,
854 tree fld
, decl_type
= TREE_TYPE (decl
);
856 for (fld
= TYPE_FIELDS (decl_type
); fld
; fld
= DECL_CHAIN (fld
))
857 if (TREE_CODE (fld
) == FIELD_DECL
)
859 HOST_WIDE_INT pos
= offset
+ int_bit_position (fld
);
860 tree ft
= TREE_TYPE (fld
);
861 tree nref
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), ref
, fld
,
864 if (is_gimple_reg_type (ft
))
866 struct access
*access
;
869 size
= tree_low_cst (DECL_SIZE (fld
), 1);
870 access
= create_access_1 (base
, pos
, size
);
873 access
->total_scalarization
= 1;
874 /* Accesses for intraprocedural SRA can have their stmt NULL. */
877 completely_scalarize_record (base
, fld
, pos
, nref
);
882 /* Search the given tree for a declaration by skipping handled components and
883 exclude it from the candidates. */
886 disqualify_base_of_expr (tree t
, const char *reason
)
888 t
= get_base_address (t
);
889 if (sra_mode
== SRA_MODE_EARLY_IPA
890 && TREE_CODE (t
) == MEM_REF
)
891 t
= get_ssa_base_param (TREE_OPERAND (t
, 0));
894 disqualify_candidate (t
, reason
);
897 /* Scan expression EXPR and create access structures for all accesses to
898 candidates for scalarization. Return the created access or NULL if none is
901 static struct access
*
902 build_access_from_expr_1 (tree expr
, gimple stmt
, bool write
)
904 struct access
*ret
= NULL
;
907 if (TREE_CODE (expr
) == BIT_FIELD_REF
908 || TREE_CODE (expr
) == IMAGPART_EXPR
909 || TREE_CODE (expr
) == REALPART_EXPR
)
911 expr
= TREE_OPERAND (expr
, 0);
917 /* We need to dive through V_C_Es in order to get the size of its parameter
918 and not the result type. Ada produces such statements. We are also
919 capable of handling the topmost V_C_E but not any of those buried in other
920 handled components. */
921 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
922 expr
= TREE_OPERAND (expr
, 0);
924 if (contains_view_convert_expr_p (expr
))
926 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
931 switch (TREE_CODE (expr
))
934 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
935 && sra_mode
!= SRA_MODE_EARLY_IPA
)
943 case ARRAY_RANGE_REF
:
944 ret
= create_access (expr
, stmt
, write
);
951 if (write
&& partial_ref
&& ret
)
952 ret
->grp_partial_lhs
= 1;
957 /* Scan expression EXPR and create access structures for all accesses to
958 candidates for scalarization. Return true if any access has been inserted.
959 STMT must be the statement from which the expression is taken, WRITE must be
960 true if the expression is a store and false otherwise. */
963 build_access_from_expr (tree expr
, gimple stmt
, bool write
)
965 struct access
*access
;
967 access
= build_access_from_expr_1 (expr
, stmt
, write
);
970 /* This means the aggregate is accesses as a whole in a way other than an
971 assign statement and thus cannot be removed even if we had a scalar
972 replacement for everything. */
973 if (cannot_scalarize_away_bitmap
)
974 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
980 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
981 modes in which it matters, return true iff they have been disqualified. RHS
982 may be NULL, in that case ignore it. If we scalarize an aggregate in
983 intra-SRA we may need to add statements after each statement. This is not
984 possible if a statement unconditionally has to end the basic block. */
986 disqualify_ops_if_throwing_stmt (gimple stmt
, tree lhs
, tree rhs
)
988 if ((sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
989 && (stmt_can_throw_internal (stmt
) || stmt_ends_bb_p (stmt
)))
991 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
993 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
999 /* Scan expressions occuring in STMT, create access structures for all accesses
1000 to candidates for scalarization and remove those candidates which occur in
1001 statements or expressions that prevent them from being split apart. Return
1002 true if any access has been inserted. */
1005 build_accesses_from_assign (gimple stmt
)
1008 struct access
*lacc
, *racc
;
1010 if (!gimple_assign_single_p (stmt
))
1013 lhs
= gimple_assign_lhs (stmt
);
1014 rhs
= gimple_assign_rhs1 (stmt
);
1016 if (disqualify_ops_if_throwing_stmt (stmt
, lhs
, rhs
))
1019 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1020 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1024 racc
->grp_assignment_read
= 1;
1025 if (should_scalarize_away_bitmap
&& !gimple_has_volatile_ops (stmt
)
1026 && !is_gimple_reg_type (racc
->type
))
1027 bitmap_set_bit (should_scalarize_away_bitmap
, DECL_UID (racc
->base
));
1031 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1032 && !lacc
->grp_unscalarizable_region
1033 && !racc
->grp_unscalarizable_region
1034 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1035 /* FIXME: Turn the following line into an assert after PR 40058 is
1037 && lacc
->size
== racc
->size
1038 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1040 struct assign_link
*link
;
1042 link
= (struct assign_link
*) pool_alloc (link_pool
);
1043 memset (link
, 0, sizeof (struct assign_link
));
1048 add_link_to_rhs (racc
, link
);
1051 return lacc
|| racc
;
1054 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1055 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1058 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED
, tree op
,
1059 void *data ATTRIBUTE_UNUSED
)
1061 op
= get_base_address (op
);
1064 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1069 /* Return true iff callsite CALL has at least as many actual arguments as there
1070 are formal parameters of the function currently processed by IPA-SRA. */
1073 callsite_has_enough_arguments_p (gimple call
)
1075 return gimple_call_num_args (call
) >= (unsigned) func_param_count
;
1078 /* Scan function and look for interesting expressions and create access
1079 structures for them. Return true iff any access is created. */
1082 scan_function (void)
1089 gimple_stmt_iterator gsi
;
1090 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1092 gimple stmt
= gsi_stmt (gsi
);
1096 if (final_bbs
&& stmt_can_throw_external (stmt
))
1097 bitmap_set_bit (final_bbs
, bb
->index
);
1098 switch (gimple_code (stmt
))
1101 t
= gimple_return_retval (stmt
);
1103 ret
|= build_access_from_expr (t
, stmt
, false);
1105 bitmap_set_bit (final_bbs
, bb
->index
);
1109 ret
|= build_accesses_from_assign (stmt
);
1113 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1114 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1117 if (sra_mode
== SRA_MODE_EARLY_IPA
)
1119 tree dest
= gimple_call_fndecl (stmt
);
1120 int flags
= gimple_call_flags (stmt
);
1124 if (DECL_BUILT_IN_CLASS (dest
) == BUILT_IN_NORMAL
1125 && DECL_FUNCTION_CODE (dest
) == BUILT_IN_APPLY_ARGS
)
1126 encountered_apply_args
= true;
1127 if (cgraph_get_node (dest
)
1128 == cgraph_get_node (current_function_decl
))
1130 encountered_recursive_call
= true;
1131 if (!callsite_has_enough_arguments_p (stmt
))
1132 encountered_unchangable_recursive_call
= true;
1137 && (flags
& (ECF_CONST
| ECF_PURE
)) == 0)
1138 bitmap_set_bit (final_bbs
, bb
->index
);
1141 t
= gimple_call_lhs (stmt
);
1142 if (t
&& !disqualify_ops_if_throwing_stmt (stmt
, t
, NULL
))
1143 ret
|= build_access_from_expr (t
, stmt
, true);
1147 walk_stmt_load_store_addr_ops (stmt
, NULL
, NULL
, NULL
,
1150 bitmap_set_bit (final_bbs
, bb
->index
);
1152 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
1154 t
= TREE_VALUE (gimple_asm_input_op (stmt
, i
));
1155 ret
|= build_access_from_expr (t
, stmt
, false);
1157 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
1159 t
= TREE_VALUE (gimple_asm_output_op (stmt
, i
));
1160 ret
|= build_access_from_expr (t
, stmt
, true);
1173 /* Helper of QSORT function. There are pointers to accesses in the array. An
1174 access is considered smaller than another if it has smaller offset or if the
1175 offsets are the same but is size is bigger. */
1178 compare_access_positions (const void *a
, const void *b
)
1180 const access_p
*fp1
= (const access_p
*) a
;
1181 const access_p
*fp2
= (const access_p
*) b
;
1182 const access_p f1
= *fp1
;
1183 const access_p f2
= *fp2
;
1185 if (f1
->offset
!= f2
->offset
)
1186 return f1
->offset
< f2
->offset
? -1 : 1;
1188 if (f1
->size
== f2
->size
)
1190 if (f1
->type
== f2
->type
)
1192 /* Put any non-aggregate type before any aggregate type. */
1193 else if (!is_gimple_reg_type (f1
->type
)
1194 && is_gimple_reg_type (f2
->type
))
1196 else if (is_gimple_reg_type (f1
->type
)
1197 && !is_gimple_reg_type (f2
->type
))
1199 /* Put any complex or vector type before any other scalar type. */
1200 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1201 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1202 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1203 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1205 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1206 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1207 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1208 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1210 /* Put the integral type with the bigger precision first. */
1211 else if (INTEGRAL_TYPE_P (f1
->type
)
1212 && INTEGRAL_TYPE_P (f2
->type
))
1213 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1214 /* Put any integral type with non-full precision last. */
1215 else if (INTEGRAL_TYPE_P (f1
->type
)
1216 && (TREE_INT_CST_LOW (TYPE_SIZE (f1
->type
))
1217 != TYPE_PRECISION (f1
->type
)))
1219 else if (INTEGRAL_TYPE_P (f2
->type
)
1220 && (TREE_INT_CST_LOW (TYPE_SIZE (f2
->type
))
1221 != TYPE_PRECISION (f2
->type
)))
1223 /* Stabilize the sort. */
1224 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1227 /* We want the bigger accesses first, thus the opposite operator in the next
1229 return f1
->size
> f2
->size
? -1 : 1;
1233 /* Append a name of the declaration to the name obstack. A helper function for
1237 make_fancy_decl_name (tree decl
)
1241 tree name
= DECL_NAME (decl
);
1243 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1244 IDENTIFIER_LENGTH (name
));
1247 sprintf (buffer
, "D%u", DECL_UID (decl
));
1248 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1252 /* Helper for make_fancy_name. */
1255 make_fancy_name_1 (tree expr
)
1262 make_fancy_decl_name (expr
);
1266 switch (TREE_CODE (expr
))
1269 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1270 obstack_1grow (&name_obstack
, '$');
1271 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1275 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1276 obstack_1grow (&name_obstack
, '$');
1277 /* Arrays with only one element may not have a constant as their
1279 index
= TREE_OPERAND (expr
, 1);
1280 if (TREE_CODE (index
) != INTEGER_CST
)
1282 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1283 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1287 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1291 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1292 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1294 obstack_1grow (&name_obstack
, '$');
1295 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1296 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1297 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1304 gcc_unreachable (); /* we treat these as scalars. */
1311 /* Create a human readable name for replacement variable of ACCESS. */
1314 make_fancy_name (tree expr
)
1316 make_fancy_name_1 (expr
);
1317 obstack_1grow (&name_obstack
, '\0');
1318 return XOBFINISH (&name_obstack
, char *);
1321 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1322 EXP_TYPE at the given OFFSET. If BASE is something for which
1323 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1324 to insert new statements either before or below the current one as specified
1325 by INSERT_AFTER. This function is not capable of handling bitfields. */
1328 build_ref_for_offset (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1329 tree exp_type
, gimple_stmt_iterator
*gsi
,
1332 tree prev_base
= base
;
1334 HOST_WIDE_INT base_offset
;
1336 gcc_checking_assert (offset
% BITS_PER_UNIT
== 0);
1338 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1340 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1341 offset such as array[var_index]. */
1347 gcc_checking_assert (gsi
);
1348 tmp
= create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base
)), NULL
);
1349 add_referenced_var (tmp
);
1350 tmp
= make_ssa_name (tmp
, NULL
);
1351 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1352 stmt
= gimple_build_assign (tmp
, addr
);
1353 gimple_set_location (stmt
, loc
);
1354 SSA_NAME_DEF_STMT (tmp
) = stmt
;
1356 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1358 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1361 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1362 offset
/ BITS_PER_UNIT
);
1365 else if (TREE_CODE (base
) == MEM_REF
)
1367 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1368 base_offset
+ offset
/ BITS_PER_UNIT
);
1369 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
, 0);
1370 base
= unshare_expr (TREE_OPERAND (base
, 0));
1374 off
= build_int_cst (reference_alias_ptr_type (base
),
1375 base_offset
+ offset
/ BITS_PER_UNIT
);
1376 base
= build_fold_addr_expr (unshare_expr (base
));
1379 return fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1382 /* Construct a memory reference to a part of an aggregate BASE at the given
1383 OFFSET and of the same type as MODEL. In case this is a reference to a
1384 bit-field, the function will replicate the last component_ref of model's
1385 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1386 build_ref_for_offset. */
1389 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1390 struct access
*model
, gimple_stmt_iterator
*gsi
,
1393 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1394 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1396 /* This access represents a bit-field. */
1399 offset
-= int_bit_position (TREE_OPERAND (model
->expr
, 1));
1400 exp_type
= TREE_TYPE (TREE_OPERAND (model
->expr
, 0));
1401 t
= build_ref_for_offset (loc
, base
, offset
, exp_type
, gsi
, insert_after
);
1402 return fold_build3_loc (loc
, COMPONENT_REF
, model
->type
, t
,
1403 TREE_OPERAND (model
->expr
, 1), NULL_TREE
);
1406 return build_ref_for_offset (loc
, base
, offset
, model
->type
,
1410 /* Construct a memory reference consisting of component_refs and array_refs to
1411 a part of an aggregate *RES (which is of type TYPE). The requested part
1412 should have type EXP_TYPE at be the given OFFSET. This function might not
1413 succeed, it returns true when it does and only then *RES points to something
1414 meaningful. This function should be used only to build expressions that we
1415 might need to present to user (e.g. in warnings). In all other situations,
1416 build_ref_for_model or build_ref_for_offset should be used instead. */
1419 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1425 tree tr_size
, index
, minidx
;
1426 HOST_WIDE_INT el_size
;
1428 if (offset
== 0 && exp_type
1429 && types_compatible_p (exp_type
, type
))
1432 switch (TREE_CODE (type
))
1435 case QUAL_UNION_TYPE
:
1437 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1439 HOST_WIDE_INT pos
, size
;
1440 tree expr
, *expr_ptr
;
1442 if (TREE_CODE (fld
) != FIELD_DECL
)
1445 pos
= int_bit_position (fld
);
1446 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1447 tr_size
= DECL_SIZE (fld
);
1448 if (!tr_size
|| !host_integerp (tr_size
, 1))
1450 size
= tree_low_cst (tr_size
, 1);
1456 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1459 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1462 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
1463 offset
- pos
, exp_type
))
1472 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1473 if (!tr_size
|| !host_integerp (tr_size
, 1))
1475 el_size
= tree_low_cst (tr_size
, 1);
1477 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1478 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1480 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1481 if (!integer_zerop (minidx
))
1482 index
= int_const_binop (PLUS_EXPR
, index
, minidx
, 0);
1483 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1484 NULL_TREE
, NULL_TREE
);
1485 offset
= offset
% el_size
;
1486 type
= TREE_TYPE (type
);
1501 /* Return true iff TYPE is stdarg va_list type. */
1504 is_va_list_type (tree type
)
1506 return TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (va_list_type_node
);
1509 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1510 those with type which is suitable for scalarization. */
1513 find_var_candidates (void)
1516 referenced_var_iterator rvi
;
1519 FOR_EACH_REFERENCED_VAR (var
, rvi
)
1521 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (var
) != PARM_DECL
)
1523 type
= TREE_TYPE (var
);
1525 if (!AGGREGATE_TYPE_P (type
)
1526 || needs_to_live_in_memory (var
)
1527 || TREE_THIS_VOLATILE (var
)
1528 || !COMPLETE_TYPE_P (type
)
1529 || !host_integerp (TYPE_SIZE (type
), 1)
1530 || tree_low_cst (TYPE_SIZE (type
), 1) == 0
1531 || type_internals_preclude_sra_p (type
)
1532 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1533 we also want to schedule it rather late. Thus we ignore it in
1535 || (sra_mode
== SRA_MODE_EARLY_INTRA
1536 && is_va_list_type (type
)))
1539 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1541 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1543 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1544 print_generic_expr (dump_file
, var
, 0);
1545 fprintf (dump_file
, "\n");
1553 /* Sort all accesses for the given variable, check for partial overlaps and
1554 return NULL if there are any. If there are none, pick a representative for
1555 each combination of offset and size and create a linked list out of them.
1556 Return the pointer to the first representative and make sure it is the first
1557 one in the vector of accesses. */
1559 static struct access
*
1560 sort_and_splice_var_accesses (tree var
)
1562 int i
, j
, access_count
;
1563 struct access
*res
, **prev_acc_ptr
= &res
;
1564 VEC (access_p
, heap
) *access_vec
;
1566 HOST_WIDE_INT low
= -1, high
= 0;
1568 access_vec
= get_base_access_vector (var
);
1571 access_count
= VEC_length (access_p
, access_vec
);
1573 /* Sort by <OFFSET, SIZE>. */
1574 qsort (VEC_address (access_p
, access_vec
), access_count
, sizeof (access_p
),
1575 compare_access_positions
);
1578 while (i
< access_count
)
1580 struct access
*access
= VEC_index (access_p
, access_vec
, i
);
1581 bool grp_write
= access
->write
;
1582 bool grp_read
= !access
->write
;
1583 bool grp_assignment_read
= access
->grp_assignment_read
;
1584 bool multiple_reads
= false;
1585 bool total_scalarization
= access
->total_scalarization
;
1586 bool grp_partial_lhs
= access
->grp_partial_lhs
;
1587 bool first_scalar
= is_gimple_reg_type (access
->type
);
1588 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
1590 if (first
|| access
->offset
>= high
)
1593 low
= access
->offset
;
1594 high
= access
->offset
+ access
->size
;
1596 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
1599 gcc_assert (access
->offset
>= low
1600 && access
->offset
+ access
->size
<= high
);
1603 while (j
< access_count
)
1605 struct access
*ac2
= VEC_index (access_p
, access_vec
, j
);
1606 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
1613 multiple_reads
= true;
1617 grp_assignment_read
|= ac2
->grp_assignment_read
;
1618 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
1619 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
1620 total_scalarization
|= ac2
->total_scalarization
;
1621 relink_to_new_repr (access
, ac2
);
1623 /* If there are both aggregate-type and scalar-type accesses with
1624 this combination of size and offset, the comparison function
1625 should have put the scalars first. */
1626 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
1627 ac2
->group_representative
= access
;
1633 access
->group_representative
= access
;
1634 access
->grp_write
= grp_write
;
1635 access
->grp_read
= grp_read
;
1636 access
->grp_assignment_read
= grp_assignment_read
;
1637 access
->grp_hint
= multiple_reads
|| total_scalarization
;
1638 access
->grp_partial_lhs
= grp_partial_lhs
;
1639 access
->grp_unscalarizable_region
= unscalarizable_region
;
1640 if (access
->first_link
)
1641 add_access_to_work_queue (access
);
1643 *prev_acc_ptr
= access
;
1644 prev_acc_ptr
= &access
->next_grp
;
1647 gcc_assert (res
== VEC_index (access_p
, access_vec
, 0));
1651 /* Create a variable for the given ACCESS which determines the type, name and a
1652 few other properties. Return the variable declaration and store it also to
1653 ACCESS->replacement. */
1656 create_access_replacement (struct access
*access
, bool rename
)
1660 repl
= create_tmp_var (access
->type
, "SR");
1662 add_referenced_var (repl
);
1664 mark_sym_for_renaming (repl
);
1666 if (!access
->grp_partial_lhs
1667 && (TREE_CODE (access
->type
) == COMPLEX_TYPE
1668 || TREE_CODE (access
->type
) == VECTOR_TYPE
))
1669 DECL_GIMPLE_REG_P (repl
) = 1;
1671 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
1672 DECL_ARTIFICIAL (repl
) = 1;
1673 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
1675 if (DECL_NAME (access
->base
)
1676 && !DECL_IGNORED_P (access
->base
)
1677 && !DECL_ARTIFICIAL (access
->base
))
1679 char *pretty_name
= make_fancy_name (access
->expr
);
1680 tree debug_expr
= unshare_expr (access
->expr
), d
;
1682 DECL_NAME (repl
) = get_identifier (pretty_name
);
1683 obstack_free (&name_obstack
, pretty_name
);
1685 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1686 as DECL_DEBUG_EXPR isn't considered when looking for still
1687 used SSA_NAMEs and thus they could be freed. All debug info
1688 generation cares is whether something is constant or variable
1689 and that get_ref_base_and_extent works properly on the
1691 for (d
= debug_expr
; handled_component_p (d
); d
= TREE_OPERAND (d
, 0))
1692 switch (TREE_CODE (d
))
1695 case ARRAY_RANGE_REF
:
1696 if (TREE_OPERAND (d
, 1)
1697 && TREE_CODE (TREE_OPERAND (d
, 1)) == SSA_NAME
)
1698 TREE_OPERAND (d
, 1) = SSA_NAME_VAR (TREE_OPERAND (d
, 1));
1699 if (TREE_OPERAND (d
, 3)
1700 && TREE_CODE (TREE_OPERAND (d
, 3)) == SSA_NAME
)
1701 TREE_OPERAND (d
, 3) = SSA_NAME_VAR (TREE_OPERAND (d
, 3));
1704 if (TREE_OPERAND (d
, 2)
1705 && TREE_CODE (TREE_OPERAND (d
, 2)) == SSA_NAME
)
1706 TREE_OPERAND (d
, 2) = SSA_NAME_VAR (TREE_OPERAND (d
, 2));
1711 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
1712 DECL_DEBUG_EXPR_IS_FROM (repl
) = 1;
1713 if (access
->grp_no_warning
)
1714 TREE_NO_WARNING (repl
) = 1;
1716 TREE_NO_WARNING (repl
) = TREE_NO_WARNING (access
->base
);
1719 TREE_NO_WARNING (repl
) = 1;
1723 fprintf (dump_file
, "Created a replacement for ");
1724 print_generic_expr (dump_file
, access
->base
, 0);
1725 fprintf (dump_file
, " offset: %u, size: %u: ",
1726 (unsigned) access
->offset
, (unsigned) access
->size
);
1727 print_generic_expr (dump_file
, repl
, 0);
1728 fprintf (dump_file
, "\n");
1730 sra_stats
.replacements
++;
1735 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1738 get_access_replacement (struct access
*access
)
1740 gcc_assert (access
->grp_to_be_replaced
);
1742 if (!access
->replacement_decl
)
1743 access
->replacement_decl
= create_access_replacement (access
, true);
1744 return access
->replacement_decl
;
1747 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1748 not mark it for renaming. */
1751 get_unrenamed_access_replacement (struct access
*access
)
1753 gcc_assert (!access
->grp_to_be_replaced
);
1755 if (!access
->replacement_decl
)
1756 access
->replacement_decl
= create_access_replacement (access
, false);
1757 return access
->replacement_decl
;
1761 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1762 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1763 to it is not "within" the root. Return false iff some accesses partially
1767 build_access_subtree (struct access
**access
)
1769 struct access
*root
= *access
, *last_child
= NULL
;
1770 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
1772 *access
= (*access
)->next_grp
;
1773 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
1776 root
->first_child
= *access
;
1778 last_child
->next_sibling
= *access
;
1779 last_child
= *access
;
1781 if (!build_access_subtree (access
))
1785 if (*access
&& (*access
)->offset
< limit
)
1791 /* Build a tree of access representatives, ACCESS is the pointer to the first
1792 one, others are linked in a list by the next_grp field. Return false iff
1793 some accesses partially overlap. */
1796 build_access_trees (struct access
*access
)
1800 struct access
*root
= access
;
1802 if (!build_access_subtree (&access
))
1804 root
->next_grp
= access
;
1809 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1813 expr_with_var_bounded_array_refs_p (tree expr
)
1815 while (handled_component_p (expr
))
1817 if (TREE_CODE (expr
) == ARRAY_REF
1818 && !host_integerp (array_ref_low_bound (expr
), 0))
1820 expr
= TREE_OPERAND (expr
, 0);
1825 enum mark_read_status
{ SRA_MR_NOT_READ
, SRA_MR_READ
, SRA_MR_ASSIGN_READ
};
1827 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1828 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1829 sorts of access flags appropriately along the way, notably always set
1830 grp_read and grp_assign_read according to MARK_READ and grp_write when
1831 MARK_WRITE is true. */
1834 analyze_access_subtree (struct access
*root
, bool allow_replacements
,
1835 enum mark_read_status mark_read
, bool mark_write
)
1837 struct access
*child
;
1838 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
1839 HOST_WIDE_INT covered_to
= root
->offset
;
1840 bool scalar
= is_gimple_reg_type (root
->type
);
1841 bool hole
= false, sth_created
= false;
1842 bool direct_read
= root
->grp_read
;
1844 if (mark_read
== SRA_MR_ASSIGN_READ
)
1847 root
->grp_assignment_read
= 1;
1849 if (mark_read
== SRA_MR_READ
)
1851 else if (root
->grp_assignment_read
)
1852 mark_read
= SRA_MR_ASSIGN_READ
;
1853 else if (root
->grp_read
)
1854 mark_read
= SRA_MR_READ
;
1857 root
->grp_write
= true;
1858 else if (root
->grp_write
)
1861 if (root
->grp_unscalarizable_region
)
1862 allow_replacements
= false;
1864 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
1865 allow_replacements
= false;
1867 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
1869 if (!hole
&& child
->offset
< covered_to
)
1872 covered_to
+= child
->size
;
1874 sth_created
|= analyze_access_subtree (child
,
1875 allow_replacements
&& !scalar
,
1876 mark_read
, mark_write
);
1878 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
1879 hole
|= !child
->grp_covered
;
1882 if (allow_replacements
&& scalar
&& !root
->first_child
1884 || (root
->grp_write
&& (direct_read
|| root
->grp_assignment_read
))))
1886 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1888 fprintf (dump_file
, "Marking ");
1889 print_generic_expr (dump_file
, root
->base
, 0);
1890 fprintf (dump_file
, " offset: %u, size: %u: ",
1891 (unsigned) root
->offset
, (unsigned) root
->size
);
1892 fprintf (dump_file
, " to be replaced.\n");
1895 root
->grp_to_be_replaced
= 1;
1899 else if (covered_to
< limit
)
1902 if (sth_created
&& !hole
)
1904 root
->grp_covered
= 1;
1907 if (root
->grp_write
|| TREE_CODE (root
->base
) == PARM_DECL
)
1908 root
->grp_unscalarized_data
= 1; /* not covered and written to */
1914 /* Analyze all access trees linked by next_grp by the means of
1915 analyze_access_subtree. */
1917 analyze_access_trees (struct access
*access
)
1923 if (analyze_access_subtree (access
, true, SRA_MR_NOT_READ
, false))
1925 access
= access
->next_grp
;
1931 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1932 SIZE would conflict with an already existing one. If exactly such a child
1933 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1936 child_would_conflict_in_lacc (struct access
*lacc
, HOST_WIDE_INT norm_offset
,
1937 HOST_WIDE_INT size
, struct access
**exact_match
)
1939 struct access
*child
;
1941 for (child
= lacc
->first_child
; child
; child
= child
->next_sibling
)
1943 if (child
->offset
== norm_offset
&& child
->size
== size
)
1945 *exact_match
= child
;
1949 if (child
->offset
< norm_offset
+ size
1950 && child
->offset
+ child
->size
> norm_offset
)
1957 /* Create a new child access of PARENT, with all properties just like MODEL
1958 except for its offset and with its grp_write false and grp_read true.
1959 Return the new access or NULL if it cannot be created. Note that this access
1960 is created long after all splicing and sorting, it's not located in any
1961 access vector and is automatically a representative of its group. */
1963 static struct access
*
1964 create_artificial_child_access (struct access
*parent
, struct access
*model
,
1965 HOST_WIDE_INT new_offset
)
1967 struct access
*access
;
1968 struct access
**child
;
1969 tree expr
= parent
->base
;
1971 gcc_assert (!model
->grp_unscalarizable_region
);
1973 access
= (struct access
*) pool_alloc (access_pool
);
1974 memset (access
, 0, sizeof (struct access
));
1975 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
1978 access
->grp_no_warning
= true;
1979 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
1980 new_offset
, model
, NULL
, false);
1983 access
->base
= parent
->base
;
1984 access
->expr
= expr
;
1985 access
->offset
= new_offset
;
1986 access
->size
= model
->size
;
1987 access
->type
= model
->type
;
1988 access
->grp_write
= true;
1989 access
->grp_read
= false;
1991 child
= &parent
->first_child
;
1992 while (*child
&& (*child
)->offset
< new_offset
)
1993 child
= &(*child
)->next_sibling
;
1995 access
->next_sibling
= *child
;
2002 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2003 true if any new subaccess was created. Additionally, if RACC is a scalar
2004 access but LACC is not, change the type of the latter, if possible. */
2007 propagate_subaccesses_across_link (struct access
*lacc
, struct access
*racc
)
2009 struct access
*rchild
;
2010 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
2013 if (is_gimple_reg_type (lacc
->type
)
2014 || lacc
->grp_unscalarizable_region
2015 || racc
->grp_unscalarizable_region
)
2018 if (!lacc
->first_child
&& !racc
->first_child
2019 && is_gimple_reg_type (racc
->type
))
2021 tree t
= lacc
->base
;
2023 lacc
->type
= racc
->type
;
2024 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
), lacc
->offset
,
2029 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
2030 lacc
->base
, lacc
->offset
,
2032 lacc
->grp_no_warning
= true;
2037 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
2039 struct access
*new_acc
= NULL
;
2040 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
2042 if (rchild
->grp_unscalarizable_region
)
2045 if (child_would_conflict_in_lacc (lacc
, norm_offset
, rchild
->size
,
2050 rchild
->grp_hint
= 1;
2051 new_acc
->grp_hint
|= new_acc
->grp_read
;
2052 if (rchild
->first_child
)
2053 ret
|= propagate_subaccesses_across_link (new_acc
, rchild
);
2058 rchild
->grp_hint
= 1;
2059 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
);
2063 if (racc
->first_child
)
2064 propagate_subaccesses_across_link (new_acc
, rchild
);
2071 /* Propagate all subaccesses across assignment links. */
2074 propagate_all_subaccesses (void)
2076 while (work_queue_head
)
2078 struct access
*racc
= pop_access_from_work_queue ();
2079 struct assign_link
*link
;
2081 gcc_assert (racc
->first_link
);
2083 for (link
= racc
->first_link
; link
; link
= link
->next
)
2085 struct access
*lacc
= link
->lacc
;
2087 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2089 lacc
= lacc
->group_representative
;
2090 if (propagate_subaccesses_across_link (lacc
, racc
)
2091 && lacc
->first_link
)
2092 add_access_to_work_queue (lacc
);
2097 /* Go through all accesses collected throughout the (intraprocedural) analysis
2098 stage, exclude overlapping ones, identify representatives and build trees
2099 out of them, making decisions about scalarization on the way. Return true
2100 iff there are any to-be-scalarized variables after this stage. */
2103 analyze_all_variable_accesses (void)
2106 bitmap tmp
= BITMAP_ALLOC (NULL
);
2108 unsigned i
, max_total_scalarization_size
;
2110 max_total_scalarization_size
= UNITS_PER_WORD
* BITS_PER_UNIT
2111 * MOVE_RATIO (optimize_function_for_speed_p (cfun
));
2113 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2114 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
2115 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
2117 tree var
= referenced_var (i
);
2119 if (TREE_CODE (var
) == VAR_DECL
2120 && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1)
2121 <= max_total_scalarization_size
)
2122 && type_consists_of_records_p (TREE_TYPE (var
)))
2124 completely_scalarize_record (var
, var
, 0, var
);
2125 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2127 fprintf (dump_file
, "Will attempt to totally scalarize ");
2128 print_generic_expr (dump_file
, var
, 0);
2129 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2134 bitmap_copy (tmp
, candidate_bitmap
);
2135 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2137 tree var
= referenced_var (i
);
2138 struct access
*access
;
2140 access
= sort_and_splice_var_accesses (var
);
2141 if (!access
|| !build_access_trees (access
))
2142 disqualify_candidate (var
,
2143 "No or inhibitingly overlapping accesses.");
2146 propagate_all_subaccesses ();
2148 bitmap_copy (tmp
, candidate_bitmap
);
2149 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2151 tree var
= referenced_var (i
);
2152 struct access
*access
= get_first_repr_for_decl (var
);
2154 if (analyze_access_trees (access
))
2157 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2159 fprintf (dump_file
, "\nAccess trees for ");
2160 print_generic_expr (dump_file
, var
, 0);
2161 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2162 dump_access_tree (dump_file
, access
);
2163 fprintf (dump_file
, "\n");
2167 disqualify_candidate (var
, "No scalar replacements to be created.");
2174 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
2181 /* Generate statements copying scalar replacements of accesses within a subtree
2182 into or out of AGG. ACCESS, all its children, siblings and their children
2183 are to be processed. AGG is an aggregate type expression (can be a
2184 declaration but does not have to be, it can for example also be a mem_ref or
2185 a series of handled components). TOP_OFFSET is the offset of the processed
2186 subtree which has to be subtracted from offsets of individual accesses to
2187 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2188 replacements in the interval <start_offset, start_offset + chunk_size>,
2189 otherwise copy all. GSI is a statement iterator used to place the new
2190 statements. WRITE should be true when the statements should write from AGG
2191 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2192 statements will be added after the current statement in GSI, they will be
2193 added before the statement otherwise. */
2196 generate_subtree_copies (struct access
*access
, tree agg
,
2197 HOST_WIDE_INT top_offset
,
2198 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
2199 gimple_stmt_iterator
*gsi
, bool write
,
2200 bool insert_after
, location_t loc
)
2204 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
2207 if (access
->grp_to_be_replaced
2209 || access
->offset
+ access
->size
> start_offset
))
2211 tree expr
, repl
= get_access_replacement (access
);
2214 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
2215 access
, gsi
, insert_after
);
2219 if (access
->grp_partial_lhs
)
2220 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
2222 insert_after
? GSI_NEW_STMT
2224 stmt
= gimple_build_assign (repl
, expr
);
2228 TREE_NO_WARNING (repl
) = 1;
2229 if (access
->grp_partial_lhs
)
2230 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2232 insert_after
? GSI_NEW_STMT
2234 stmt
= gimple_build_assign (expr
, repl
);
2236 gimple_set_location (stmt
, loc
);
2239 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2241 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2243 sra_stats
.subtree_copies
++;
2246 if (access
->first_child
)
2247 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
2248 start_offset
, chunk_size
, gsi
,
2249 write
, insert_after
, loc
);
2251 access
= access
->next_sibling
;
2256 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2257 the root of the subtree to be processed. GSI is the statement iterator used
2258 for inserting statements which are added after the current statement if
2259 INSERT_AFTER is true or before it otherwise. */
2262 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
2263 bool insert_after
, location_t loc
)
2266 struct access
*child
;
2268 if (access
->grp_to_be_replaced
)
2272 stmt
= gimple_build_assign (get_access_replacement (access
),
2273 fold_convert (access
->type
,
2274 integer_zero_node
));
2276 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2278 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2280 gimple_set_location (stmt
, loc
);
2283 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2284 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
2287 /* Search for an access representative for the given expression EXPR and
2288 return it or NULL if it cannot be found. */
2290 static struct access
*
2291 get_access_for_expr (tree expr
)
2293 HOST_WIDE_INT offset
, size
, max_size
;
2296 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2297 a different size than the size of its argument and we need the latter
2299 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2300 expr
= TREE_OPERAND (expr
, 0);
2302 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
2303 if (max_size
== -1 || !DECL_P (base
))
2306 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
2309 return get_var_base_offset_size_access (base
, offset
, max_size
);
2312 /* Replace the expression EXPR with a scalar replacement if there is one and
2313 generate other statements to do type conversion or subtree copying if
2314 necessary. GSI is used to place newly created statements, WRITE is true if
2315 the expression is being written to (it is on a LHS of a statement or output
2316 in an assembly statement). */
2319 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
2322 struct access
*access
;
2325 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
2328 expr
= &TREE_OPERAND (*expr
, 0);
2333 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
2334 expr
= &TREE_OPERAND (*expr
, 0);
2335 access
= get_access_for_expr (*expr
);
2338 type
= TREE_TYPE (*expr
);
2340 loc
= gimple_location (gsi_stmt (*gsi
));
2341 if (access
->grp_to_be_replaced
)
2343 tree repl
= get_access_replacement (access
);
2344 /* If we replace a non-register typed access simply use the original
2345 access expression to extract the scalar component afterwards.
2346 This happens if scalarizing a function return value or parameter
2347 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2348 gcc.c-torture/compile/20011217-1.c.
2350 We also want to use this when accessing a complex or vector which can
2351 be accessed as a different type too, potentially creating a need for
2352 type conversion (see PR42196) and when scalarized unions are involved
2353 in assembler statements (see PR42398). */
2354 if (!useless_type_conversion_p (type
, access
->type
))
2358 ref
= build_ref_for_model (loc
, access
->base
, access
->offset
, access
,
2365 if (access
->grp_partial_lhs
)
2366 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
2367 false, GSI_NEW_STMT
);
2368 stmt
= gimple_build_assign (repl
, ref
);
2369 gimple_set_location (stmt
, loc
);
2370 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2376 if (access
->grp_partial_lhs
)
2377 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2378 true, GSI_SAME_STMT
);
2379 stmt
= gimple_build_assign (ref
, repl
);
2380 gimple_set_location (stmt
, loc
);
2381 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2389 if (access
->first_child
)
2391 HOST_WIDE_INT start_offset
, chunk_size
;
2393 && host_integerp (TREE_OPERAND (bfr
, 1), 1)
2394 && host_integerp (TREE_OPERAND (bfr
, 2), 1))
2396 chunk_size
= tree_low_cst (TREE_OPERAND (bfr
, 1), 1);
2397 start_offset
= access
->offset
2398 + tree_low_cst (TREE_OPERAND (bfr
, 2), 1);
2401 start_offset
= chunk_size
= 0;
2403 generate_subtree_copies (access
->first_child
, access
->base
, 0,
2404 start_offset
, chunk_size
, gsi
, write
, write
,
2410 /* Where scalar replacements of the RHS have been written to when a replacement
2411 of a LHS of an assigments cannot be direclty loaded from a replacement of
2413 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
2414 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
2415 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
2417 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2418 base aggregate if there are unscalarized data or directly to LHS of the
2419 statement that is pointed to by GSI otherwise. */
2421 static enum unscalarized_data_handling
2422 handle_unscalarized_data_in_subtree (struct access
*top_racc
,
2423 gimple_stmt_iterator
*gsi
)
2425 if (top_racc
->grp_unscalarized_data
)
2427 generate_subtree_copies (top_racc
->first_child
, top_racc
->base
, 0, 0, 0,
2429 gimple_location (gsi_stmt (*gsi
)));
2430 return SRA_UDH_RIGHT
;
2434 tree lhs
= gimple_assign_lhs (gsi_stmt (*gsi
));
2435 generate_subtree_copies (top_racc
->first_child
, lhs
, top_racc
->offset
,
2436 0, 0, gsi
, false, false,
2437 gimple_location (gsi_stmt (*gsi
)));
2438 return SRA_UDH_LEFT
;
2443 /* Try to generate statements to load all sub-replacements in an access subtree
2444 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2445 If that is not possible, refresh the TOP_RACC base aggregate and load the
2446 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2447 copied. NEW_GSI is stmt iterator used for statement insertions after the
2448 original assignment, OLD_GSI is used to insert statements before the
2449 assignment. *REFRESHED keeps the information whether we have needed to
2450 refresh replacements of the LHS and from which side of the assignments this
2454 load_assign_lhs_subreplacements (struct access
*lacc
, struct access
*top_racc
,
2455 HOST_WIDE_INT left_offset
,
2456 gimple_stmt_iterator
*old_gsi
,
2457 gimple_stmt_iterator
*new_gsi
,
2458 enum unscalarized_data_handling
*refreshed
)
2460 location_t loc
= gimple_location (gsi_stmt (*old_gsi
));
2461 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
2463 if (lacc
->grp_to_be_replaced
)
2465 struct access
*racc
;
2466 HOST_WIDE_INT offset
= lacc
->offset
- left_offset
+ top_racc
->offset
;
2470 racc
= find_access_in_subtree (top_racc
, offset
, lacc
->size
);
2471 if (racc
&& racc
->grp_to_be_replaced
)
2473 rhs
= get_access_replacement (racc
);
2474 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
2475 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, lacc
->type
, rhs
);
2479 /* No suitable access on the right hand side, need to load from
2480 the aggregate. See if we have to update it first... */
2481 if (*refreshed
== SRA_UDH_NONE
)
2482 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2485 if (*refreshed
== SRA_UDH_LEFT
)
2486 rhs
= build_ref_for_model (loc
, lacc
->base
, lacc
->offset
, lacc
,
2489 rhs
= build_ref_for_model (loc
, top_racc
->base
, offset
, lacc
,
2493 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
2494 gsi_insert_after (new_gsi
, stmt
, GSI_NEW_STMT
);
2495 gimple_set_location (stmt
, loc
);
2497 sra_stats
.subreplacements
++;
2499 else if (*refreshed
== SRA_UDH_NONE
2500 && lacc
->grp_read
&& !lacc
->grp_covered
)
2501 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2504 if (lacc
->first_child
)
2505 load_assign_lhs_subreplacements (lacc
, top_racc
, left_offset
,
2506 old_gsi
, new_gsi
, refreshed
);
2510 /* Result code for SRA assignment modification. */
2511 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
2512 SRA_AM_MODIFIED
, /* stmt changed but not
2514 SRA_AM_REMOVED
}; /* stmt eliminated */
2516 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2517 to the assignment and GSI is the statement iterator pointing at it. Returns
2518 the same values as sra_modify_assign. */
2520 static enum assignment_mod_result
2521 sra_modify_constructor_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
2523 tree lhs
= gimple_assign_lhs (*stmt
);
2527 acc
= get_access_for_expr (lhs
);
2531 loc
= gimple_location (*stmt
);
2532 if (VEC_length (constructor_elt
,
2533 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt
))) > 0)
2535 /* I have never seen this code path trigger but if it can happen the
2536 following should handle it gracefully. */
2537 if (access_has_children_p (acc
))
2538 generate_subtree_copies (acc
->first_child
, acc
->base
, 0, 0, 0, gsi
,
2540 return SRA_AM_MODIFIED
;
2543 if (acc
->grp_covered
)
2545 init_subtree_with_zero (acc
, gsi
, false, loc
);
2546 unlink_stmt_vdef (*stmt
);
2547 gsi_remove (gsi
, true);
2548 return SRA_AM_REMOVED
;
2552 init_subtree_with_zero (acc
, gsi
, true, loc
);
2553 return SRA_AM_MODIFIED
;
2557 /* Create and return a new suitable default definition SSA_NAME for RACC which
2558 is an access describing an uninitialized part of an aggregate that is being
2562 get_repl_default_def_ssa_name (struct access
*racc
)
2566 decl
= get_unrenamed_access_replacement (racc
);
2568 repl
= gimple_default_def (cfun
, decl
);
2571 repl
= make_ssa_name (decl
, gimple_build_nop ());
2572 set_default_def (decl
, repl
);
2578 /* Examine both sides of the assignment statement pointed to by STMT, replace
2579 them with a scalare replacement if there is one and generate copying of
2580 replacements if scalarized aggregates have been used in the assignment. GSI
2581 is used to hold generated statements for type conversions and subtree
2584 static enum assignment_mod_result
2585 sra_modify_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
2587 struct access
*lacc
, *racc
;
2589 bool modify_this_stmt
= false;
2590 bool force_gimple_rhs
= false;
2592 gimple_stmt_iterator orig_gsi
= *gsi
;
2594 if (!gimple_assign_single_p (*stmt
))
2596 lhs
= gimple_assign_lhs (*stmt
);
2597 rhs
= gimple_assign_rhs1 (*stmt
);
2599 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
2600 return sra_modify_constructor_assign (stmt
, gsi
);
2602 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
2603 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
2604 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
2606 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (*stmt
),
2608 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (*stmt
),
2610 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
2613 lacc
= get_access_for_expr (lhs
);
2614 racc
= get_access_for_expr (rhs
);
2618 loc
= gimple_location (*stmt
);
2619 if (lacc
&& lacc
->grp_to_be_replaced
)
2621 lhs
= get_access_replacement (lacc
);
2622 gimple_assign_set_lhs (*stmt
, lhs
);
2623 modify_this_stmt
= true;
2624 if (lacc
->grp_partial_lhs
)
2625 force_gimple_rhs
= true;
2629 if (racc
&& racc
->grp_to_be_replaced
)
2631 rhs
= get_access_replacement (racc
);
2632 modify_this_stmt
= true;
2633 if (racc
->grp_partial_lhs
)
2634 force_gimple_rhs
= true;
2638 if (modify_this_stmt
)
2640 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2642 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2643 ??? This should move to fold_stmt which we simply should
2644 call after building a VIEW_CONVERT_EXPR here. */
2645 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
2646 && !access_has_children_p (lacc
))
2648 lhs
= build_ref_for_offset (loc
, lhs
, 0, TREE_TYPE (rhs
),
2650 gimple_assign_set_lhs (*stmt
, lhs
);
2652 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
2653 && !contains_view_convert_expr_p (rhs
)
2654 && !access_has_children_p (racc
))
2655 rhs
= build_ref_for_offset (loc
, rhs
, 0, TREE_TYPE (lhs
),
2658 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2660 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
2662 if (is_gimple_reg_type (TREE_TYPE (lhs
))
2663 && TREE_CODE (lhs
) != SSA_NAME
)
2664 force_gimple_rhs
= true;
2669 /* From this point on, the function deals with assignments in between
2670 aggregates when at least one has scalar reductions of some of its
2671 components. There are three possible scenarios: Both the LHS and RHS have
2672 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2674 In the first case, we would like to load the LHS components from RHS
2675 components whenever possible. If that is not possible, we would like to
2676 read it directly from the RHS (after updating it by storing in it its own
2677 components). If there are some necessary unscalarized data in the LHS,
2678 those will be loaded by the original assignment too. If neither of these
2679 cases happen, the original statement can be removed. Most of this is done
2680 by load_assign_lhs_subreplacements.
2682 In the second case, we would like to store all RHS scalarized components
2683 directly into LHS and if they cover the aggregate completely, remove the
2684 statement too. In the third case, we want the LHS components to be loaded
2685 directly from the RHS (DSE will remove the original statement if it
2688 This is a bit complex but manageable when types match and when unions do
2689 not cause confusion in a way that we cannot really load a component of LHS
2690 from the RHS or vice versa (the access representing this level can have
2691 subaccesses that are accessible only through a different union field at a
2692 higher level - different from the one used in the examined expression).
2695 Therefore, I specially handle a fourth case, happening when there is a
2696 specific type cast or it is impossible to locate a scalarized subaccess on
2697 the other side of the expression. If that happens, I simply "refresh" the
2698 RHS by storing in it is scalarized components leave the original statement
2699 there to do the copying and then load the scalar replacements of the LHS.
2700 This is what the first branch does. */
2702 if (gimple_has_volatile_ops (*stmt
)
2703 || contains_view_convert_expr_p (rhs
)
2704 || contains_view_convert_expr_p (lhs
))
2706 if (access_has_children_p (racc
))
2707 generate_subtree_copies (racc
->first_child
, racc
->base
, 0, 0, 0,
2708 gsi
, false, false, loc
);
2709 if (access_has_children_p (lacc
))
2710 generate_subtree_copies (lacc
->first_child
, lacc
->base
, 0, 0, 0,
2711 gsi
, true, true, loc
);
2712 sra_stats
.separate_lhs_rhs_handling
++;
2716 if (access_has_children_p (lacc
) && access_has_children_p (racc
))
2718 gimple_stmt_iterator orig_gsi
= *gsi
;
2719 enum unscalarized_data_handling refreshed
;
2721 if (lacc
->grp_read
&& !lacc
->grp_covered
)
2722 refreshed
= handle_unscalarized_data_in_subtree (racc
, gsi
);
2724 refreshed
= SRA_UDH_NONE
;
2726 load_assign_lhs_subreplacements (lacc
, racc
, lacc
->offset
,
2727 &orig_gsi
, gsi
, &refreshed
);
2728 if (refreshed
!= SRA_UDH_RIGHT
)
2731 unlink_stmt_vdef (*stmt
);
2732 gsi_remove (&orig_gsi
, true);
2733 sra_stats
.deleted
++;
2734 return SRA_AM_REMOVED
;
2741 if (!racc
->grp_to_be_replaced
&& !racc
->grp_unscalarized_data
)
2745 fprintf (dump_file
, "Removing load: ");
2746 print_gimple_stmt (dump_file
, *stmt
, 0, 0);
2749 if (TREE_CODE (lhs
) == SSA_NAME
)
2751 rhs
= get_repl_default_def_ssa_name (racc
);
2752 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
2754 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
2755 TREE_TYPE (lhs
), rhs
);
2759 if (racc
->first_child
)
2760 generate_subtree_copies (racc
->first_child
, lhs
,
2761 racc
->offset
, 0, 0, gsi
,
2764 gcc_assert (*stmt
== gsi_stmt (*gsi
));
2765 unlink_stmt_vdef (*stmt
);
2766 gsi_remove (gsi
, true);
2767 sra_stats
.deleted
++;
2768 return SRA_AM_REMOVED
;
2771 else if (racc
->first_child
)
2772 generate_subtree_copies (racc
->first_child
, lhs
, racc
->offset
,
2773 0, 0, gsi
, false, true, loc
);
2775 if (access_has_children_p (lacc
))
2776 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
2777 0, 0, gsi
, true, true, loc
);
2781 /* This gimplification must be done after generate_subtree_copies, lest we
2782 insert the subtree copies in the middle of the gimplified sequence. */
2783 if (force_gimple_rhs
)
2784 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
2785 true, GSI_SAME_STMT
);
2786 if (gimple_assign_rhs1 (*stmt
) != rhs
)
2788 modify_this_stmt
= true;
2789 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
2790 gcc_assert (*stmt
== gsi_stmt (orig_gsi
));
2793 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
2796 /* Traverse the function body and all modifications as decided in
2797 analyze_all_variable_accesses. Return true iff the CFG has been
2801 sra_modify_function_body (void)
2803 bool cfg_changed
= false;
2808 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
2809 while (!gsi_end_p (gsi
))
2811 gimple stmt
= gsi_stmt (gsi
);
2812 enum assignment_mod_result assign_result
;
2813 bool modified
= false, deleted
= false;
2817 switch (gimple_code (stmt
))
2820 t
= gimple_return_retval_ptr (stmt
);
2821 if (*t
!= NULL_TREE
)
2822 modified
|= sra_modify_expr (t
, &gsi
, false);
2826 assign_result
= sra_modify_assign (&stmt
, &gsi
);
2827 modified
|= assign_result
== SRA_AM_MODIFIED
;
2828 deleted
= assign_result
== SRA_AM_REMOVED
;
2832 /* Operands must be processed before the lhs. */
2833 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2835 t
= gimple_call_arg_ptr (stmt
, i
);
2836 modified
|= sra_modify_expr (t
, &gsi
, false);
2839 if (gimple_call_lhs (stmt
))
2841 t
= gimple_call_lhs_ptr (stmt
);
2842 modified
|= sra_modify_expr (t
, &gsi
, true);
2847 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
2849 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
2850 modified
|= sra_modify_expr (t
, &gsi
, false);
2852 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
2854 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
2855 modified
|= sra_modify_expr (t
, &gsi
, true);
2866 if (maybe_clean_eh_stmt (stmt
)
2867 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
2878 /* Generate statements initializing scalar replacements of parts of function
2882 initialize_parameter_reductions (void)
2884 gimple_stmt_iterator gsi
;
2885 gimple_seq seq
= NULL
;
2888 for (parm
= DECL_ARGUMENTS (current_function_decl
);
2890 parm
= DECL_CHAIN (parm
))
2892 VEC (access_p
, heap
) *access_vec
;
2893 struct access
*access
;
2895 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
2897 access_vec
= get_base_access_vector (parm
);
2903 seq
= gimple_seq_alloc ();
2904 gsi
= gsi_start (seq
);
2907 for (access
= VEC_index (access_p
, access_vec
, 0);
2909 access
= access
->next_grp
)
2910 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
2911 EXPR_LOCATION (parm
));
2915 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR
), seq
);
2918 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
2919 it reveals there are components of some aggregates to be scalarized, it runs
2920 the required transformations. */
2922 perform_intra_sra (void)
2927 if (!find_var_candidates ())
2930 if (!scan_function ())
2933 if (!analyze_all_variable_accesses ())
2936 if (sra_modify_function_body ())
2937 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
2939 ret
= TODO_update_ssa
;
2940 initialize_parameter_reductions ();
2942 statistics_counter_event (cfun
, "Scalar replacements created",
2943 sra_stats
.replacements
);
2944 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
2945 statistics_counter_event (cfun
, "Subtree copy stmts",
2946 sra_stats
.subtree_copies
);
2947 statistics_counter_event (cfun
, "Subreplacement stmts",
2948 sra_stats
.subreplacements
);
2949 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
2950 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
2951 sra_stats
.separate_lhs_rhs_handling
);
2954 sra_deinitialize ();
2958 /* Perform early intraprocedural SRA. */
2960 early_intra_sra (void)
2962 sra_mode
= SRA_MODE_EARLY_INTRA
;
2963 return perform_intra_sra ();
2966 /* Perform "late" intraprocedural SRA. */
2968 late_intra_sra (void)
2970 sra_mode
= SRA_MODE_INTRA
;
2971 return perform_intra_sra ();
2976 gate_intra_sra (void)
2978 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
2982 struct gimple_opt_pass pass_sra_early
=
2987 gate_intra_sra
, /* gate */
2988 early_intra_sra
, /* execute */
2991 0, /* static_pass_number */
2992 TV_TREE_SRA
, /* tv_id */
2993 PROP_cfg
| PROP_ssa
, /* properties_required */
2994 0, /* properties_provided */
2995 0, /* properties_destroyed */
2996 0, /* todo_flags_start */
3000 | TODO_verify_ssa
/* todo_flags_finish */
3004 struct gimple_opt_pass pass_sra
=
3009 gate_intra_sra
, /* gate */
3010 late_intra_sra
, /* execute */
3013 0, /* static_pass_number */
3014 TV_TREE_SRA
, /* tv_id */
3015 PROP_cfg
| PROP_ssa
, /* properties_required */
3016 0, /* properties_provided */
3017 0, /* properties_destroyed */
3018 TODO_update_address_taken
, /* todo_flags_start */
3022 | TODO_verify_ssa
/* todo_flags_finish */
3027 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3031 is_unused_scalar_param (tree parm
)
3034 return (is_gimple_reg (parm
)
3035 && (!(name
= gimple_default_def (cfun
, parm
))
3036 || has_zero_uses (name
)));
3039 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3040 examine whether there are any direct or otherwise infeasible ones. If so,
3041 return true, otherwise return false. PARM must be a gimple register with a
3042 non-NULL default definition. */
3045 ptr_parm_has_direct_uses (tree parm
)
3047 imm_use_iterator ui
;
3049 tree name
= gimple_default_def (cfun
, parm
);
3052 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
3055 use_operand_p use_p
;
3057 if (is_gimple_debug (stmt
))
3060 /* Valid uses include dereferences on the lhs and the rhs. */
3061 if (gimple_has_lhs (stmt
))
3063 tree lhs
= gimple_get_lhs (stmt
);
3064 while (handled_component_p (lhs
))
3065 lhs
= TREE_OPERAND (lhs
, 0);
3066 if (TREE_CODE (lhs
) == MEM_REF
3067 && TREE_OPERAND (lhs
, 0) == name
3068 && integer_zerop (TREE_OPERAND (lhs
, 1))
3069 && types_compatible_p (TREE_TYPE (lhs
),
3070 TREE_TYPE (TREE_TYPE (name
))))
3073 if (gimple_assign_single_p (stmt
))
3075 tree rhs
= gimple_assign_rhs1 (stmt
);
3076 while (handled_component_p (rhs
))
3077 rhs
= TREE_OPERAND (rhs
, 0);
3078 if (TREE_CODE (rhs
) == MEM_REF
3079 && TREE_OPERAND (rhs
, 0) == name
3080 && integer_zerop (TREE_OPERAND (rhs
, 1))
3081 && types_compatible_p (TREE_TYPE (rhs
),
3082 TREE_TYPE (TREE_TYPE (name
))))
3085 else if (is_gimple_call (stmt
))
3088 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3090 tree arg
= gimple_call_arg (stmt
, i
);
3091 while (handled_component_p (arg
))
3092 arg
= TREE_OPERAND (arg
, 0);
3093 if (TREE_CODE (arg
) == MEM_REF
3094 && TREE_OPERAND (arg
, 0) == name
3095 && integer_zerop (TREE_OPERAND (arg
, 1))
3096 && types_compatible_p (TREE_TYPE (arg
),
3097 TREE_TYPE (TREE_TYPE (name
))))
3102 /* If the number of valid uses does not match the number of
3103 uses in this stmt there is an unhandled use. */
3104 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
3111 BREAK_FROM_IMM_USE_STMT (ui
);
3117 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3118 them in candidate_bitmap. Note that these do not necessarily include
3119 parameter which are unused and thus can be removed. Return true iff any
3120 such candidate has been found. */
3123 find_param_candidates (void)
3129 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3131 parm
= DECL_CHAIN (parm
))
3133 tree type
= TREE_TYPE (parm
);
3137 if (TREE_THIS_VOLATILE (parm
)
3138 || TREE_ADDRESSABLE (parm
)
3139 || (!is_gimple_reg_type (type
) && is_va_list_type (type
)))
3142 if (is_unused_scalar_param (parm
))
3148 if (POINTER_TYPE_P (type
))
3150 type
= TREE_TYPE (type
);
3152 if (TREE_CODE (type
) == FUNCTION_TYPE
3153 || TYPE_VOLATILE (type
)
3154 || (TREE_CODE (type
) == ARRAY_TYPE
3155 && TYPE_NONALIASED_COMPONENT (type
))
3156 || !is_gimple_reg (parm
)
3157 || is_va_list_type (type
)
3158 || ptr_parm_has_direct_uses (parm
))
3161 else if (!AGGREGATE_TYPE_P (type
))
3164 if (!COMPLETE_TYPE_P (type
)
3165 || !host_integerp (TYPE_SIZE (type
), 1)
3166 || tree_low_cst (TYPE_SIZE (type
), 1) == 0
3167 || (AGGREGATE_TYPE_P (type
)
3168 && type_internals_preclude_sra_p (type
)))
3171 bitmap_set_bit (candidate_bitmap
, DECL_UID (parm
));
3173 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3175 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (parm
));
3176 print_generic_expr (dump_file
, parm
, 0);
3177 fprintf (dump_file
, "\n");
3181 func_param_count
= count
;
3185 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3189 mark_maybe_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
3192 struct access
*repr
= (struct access
*) data
;
3194 repr
->grp_maybe_modified
= 1;
3198 /* Analyze what representatives (in linked lists accessible from
3199 REPRESENTATIVES) can be modified by side effects of statements in the
3200 current function. */
3203 analyze_modified_params (VEC (access_p
, heap
) *representatives
)
3207 for (i
= 0; i
< func_param_count
; i
++)
3209 struct access
*repr
;
3211 for (repr
= VEC_index (access_p
, representatives
, i
);
3213 repr
= repr
->next_grp
)
3215 struct access
*access
;
3219 if (no_accesses_p (repr
))
3221 if (!POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3222 || repr
->grp_maybe_modified
)
3225 ao_ref_init (&ar
, repr
->expr
);
3226 visited
= BITMAP_ALLOC (NULL
);
3227 for (access
= repr
; access
; access
= access
->next_sibling
)
3229 /* All accesses are read ones, otherwise grp_maybe_modified would
3230 be trivially set. */
3231 walk_aliased_vdefs (&ar
, gimple_vuse (access
->stmt
),
3232 mark_maybe_modified
, repr
, &visited
);
3233 if (repr
->grp_maybe_modified
)
3236 BITMAP_FREE (visited
);
3241 /* Propagate distances in bb_dereferences in the opposite direction than the
3242 control flow edges, in each step storing the maximum of the current value
3243 and the minimum of all successors. These steps are repeated until the table
3244 stabilizes. Note that BBs which might terminate the functions (according to
3245 final_bbs bitmap) never updated in this way. */
3248 propagate_dereference_distances (void)
3250 VEC (basic_block
, heap
) *queue
;
3253 queue
= VEC_alloc (basic_block
, heap
, last_basic_block_for_function (cfun
));
3254 VEC_quick_push (basic_block
, queue
, ENTRY_BLOCK_PTR
);
3257 VEC_quick_push (basic_block
, queue
, bb
);
3261 while (!VEC_empty (basic_block
, queue
))
3265 bool change
= false;
3268 bb
= VEC_pop (basic_block
, queue
);
3271 if (bitmap_bit_p (final_bbs
, bb
->index
))
3274 for (i
= 0; i
< func_param_count
; i
++)
3276 int idx
= bb
->index
* func_param_count
+ i
;
3278 HOST_WIDE_INT inh
= 0;
3280 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3282 int succ_idx
= e
->dest
->index
* func_param_count
+ i
;
3284 if (e
->src
== EXIT_BLOCK_PTR
)
3290 inh
= bb_dereferences
[succ_idx
];
3292 else if (bb_dereferences
[succ_idx
] < inh
)
3293 inh
= bb_dereferences
[succ_idx
];
3296 if (!first
&& bb_dereferences
[idx
] < inh
)
3298 bb_dereferences
[idx
] = inh
;
3303 if (change
&& !bitmap_bit_p (final_bbs
, bb
->index
))
3304 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3309 e
->src
->aux
= e
->src
;
3310 VEC_quick_push (basic_block
, queue
, e
->src
);
3314 VEC_free (basic_block
, heap
, queue
);
3317 /* Dump a dereferences TABLE with heading STR to file F. */
3320 dump_dereferences_table (FILE *f
, const char *str
, HOST_WIDE_INT
*table
)
3324 fprintf (dump_file
, str
);
3325 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
3327 fprintf (f
, "%4i %i ", bb
->index
, bitmap_bit_p (final_bbs
, bb
->index
));
3328 if (bb
!= EXIT_BLOCK_PTR
)
3331 for (i
= 0; i
< func_param_count
; i
++)
3333 int idx
= bb
->index
* func_param_count
+ i
;
3334 fprintf (f
, " %4" HOST_WIDE_INT_PRINT
"d", table
[idx
]);
3339 fprintf (dump_file
, "\n");
3342 /* Determine what (parts of) parameters passed by reference that are not
3343 assigned to are not certainly dereferenced in this function and thus the
3344 dereferencing cannot be safely moved to the caller without potentially
3345 introducing a segfault. Mark such REPRESENTATIVES as
3346 grp_not_necessarilly_dereferenced.
3348 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3349 part is calculated rather than simple booleans are calculated for each
3350 pointer parameter to handle cases when only a fraction of the whole
3351 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3354 The maximum dereference distances for each pointer parameter and BB are
3355 already stored in bb_dereference. This routine simply propagates these
3356 values upwards by propagate_dereference_distances and then compares the
3357 distances of individual parameters in the ENTRY BB to the equivalent
3358 distances of each representative of a (fraction of a) parameter. */
3361 analyze_caller_dereference_legality (VEC (access_p
, heap
) *representatives
)
3365 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3366 dump_dereferences_table (dump_file
,
3367 "Dereference table before propagation:\n",
3370 propagate_dereference_distances ();
3372 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3373 dump_dereferences_table (dump_file
,
3374 "Dereference table after propagation:\n",
3377 for (i
= 0; i
< func_param_count
; i
++)
3379 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
3380 int idx
= ENTRY_BLOCK_PTR
->index
* func_param_count
+ i
;
3382 if (!repr
|| no_accesses_p (repr
))
3387 if ((repr
->offset
+ repr
->size
) > bb_dereferences
[idx
])
3388 repr
->grp_not_necessarilly_dereferenced
= 1;
3389 repr
= repr
->next_grp
;
3395 /* Return the representative access for the parameter declaration PARM if it is
3396 a scalar passed by reference which is not written to and the pointer value
3397 is not used directly. Thus, if it is legal to dereference it in the caller
3398 and we can rule out modifications through aliases, such parameter should be
3399 turned into one passed by value. Return NULL otherwise. */
3401 static struct access
*
3402 unmodified_by_ref_scalar_representative (tree parm
)
3404 int i
, access_count
;
3405 struct access
*repr
;
3406 VEC (access_p
, heap
) *access_vec
;
3408 access_vec
= get_base_access_vector (parm
);
3409 gcc_assert (access_vec
);
3410 repr
= VEC_index (access_p
, access_vec
, 0);
3413 repr
->group_representative
= repr
;
3415 access_count
= VEC_length (access_p
, access_vec
);
3416 for (i
= 1; i
< access_count
; i
++)
3418 struct access
*access
= VEC_index (access_p
, access_vec
, i
);
3421 access
->group_representative
= repr
;
3422 access
->next_sibling
= repr
->next_sibling
;
3423 repr
->next_sibling
= access
;
3427 repr
->grp_scalar_ptr
= 1;
3431 /* Return true iff this access precludes IPA-SRA of the parameter it is
3435 access_precludes_ipa_sra_p (struct access
*access
)
3437 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3438 is incompatible assign in a call statement (and possibly even in asm
3439 statements). This can be relaxed by using a new temporary but only for
3440 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3441 intraprocedural SRA we deal with this by keeping the old aggregate around,
3442 something we cannot do in IPA-SRA.) */
3444 && (is_gimple_call (access
->stmt
)
3445 || gimple_code (access
->stmt
) == GIMPLE_ASM
))
3452 /* Sort collected accesses for parameter PARM, identify representatives for
3453 each accessed region and link them together. Return NULL if there are
3454 different but overlapping accesses, return the special ptr value meaning
3455 there are no accesses for this parameter if that is the case and return the
3456 first representative otherwise. Set *RO_GRP if there is a group of accesses
3457 with only read (i.e. no write) accesses. */
3459 static struct access
*
3460 splice_param_accesses (tree parm
, bool *ro_grp
)
3462 int i
, j
, access_count
, group_count
;
3463 int agg_size
, total_size
= 0;
3464 struct access
*access
, *res
, **prev_acc_ptr
= &res
;
3465 VEC (access_p
, heap
) *access_vec
;
3467 access_vec
= get_base_access_vector (parm
);
3469 return &no_accesses_representant
;
3470 access_count
= VEC_length (access_p
, access_vec
);
3472 qsort (VEC_address (access_p
, access_vec
), access_count
, sizeof (access_p
),
3473 compare_access_positions
);
3478 while (i
< access_count
)
3481 access
= VEC_index (access_p
, access_vec
, i
);
3482 modification
= access
->write
;
3483 if (access_precludes_ipa_sra_p (access
))
3486 /* Access is about to become group representative unless we find some
3487 nasty overlap which would preclude us from breaking this parameter
3491 while (j
< access_count
)
3493 struct access
*ac2
= VEC_index (access_p
, access_vec
, j
);
3494 if (ac2
->offset
!= access
->offset
)
3496 /* All or nothing law for parameters. */
3497 if (access
->offset
+ access
->size
> ac2
->offset
)
3502 else if (ac2
->size
!= access
->size
)
3505 if (access_precludes_ipa_sra_p (ac2
))
3508 modification
|= ac2
->write
;
3509 ac2
->group_representative
= access
;
3510 ac2
->next_sibling
= access
->next_sibling
;
3511 access
->next_sibling
= ac2
;
3516 access
->grp_maybe_modified
= modification
;
3519 *prev_acc_ptr
= access
;
3520 prev_acc_ptr
= &access
->next_grp
;
3521 total_size
+= access
->size
;
3525 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3526 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
3528 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
3529 if (total_size
>= agg_size
)
3532 gcc_assert (group_count
> 0);
3536 /* Decide whether parameters with representative accesses given by REPR should
3537 be reduced into components. */
3540 decide_one_param_reduction (struct access
*repr
)
3542 int total_size
, cur_parm_size
, agg_size
, new_param_count
, parm_size_limit
;
3547 cur_parm_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
3548 gcc_assert (cur_parm_size
> 0);
3550 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3553 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
3558 agg_size
= cur_parm_size
;
3564 fprintf (dump_file
, "Evaluating PARAM group sizes for ");
3565 print_generic_expr (dump_file
, parm
, 0);
3566 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (parm
));
3567 for (acc
= repr
; acc
; acc
= acc
->next_grp
)
3568 dump_access (dump_file
, acc
, true);
3572 new_param_count
= 0;
3574 for (; repr
; repr
= repr
->next_grp
)
3576 gcc_assert (parm
== repr
->base
);
3579 if (!by_ref
|| (!repr
->grp_maybe_modified
3580 && !repr
->grp_not_necessarilly_dereferenced
))
3581 total_size
+= repr
->size
;
3583 total_size
+= cur_parm_size
;
3586 gcc_assert (new_param_count
> 0);
3588 if (optimize_function_for_size_p (cfun
))
3589 parm_size_limit
= cur_parm_size
;
3591 parm_size_limit
= (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR
)
3594 if (total_size
< agg_size
3595 && total_size
<= parm_size_limit
)
3598 fprintf (dump_file
, " ....will be split into %i components\n",
3600 return new_param_count
;
3606 /* The order of the following enums is important, we need to do extra work for
3607 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3608 enum ipa_splicing_result
{ NO_GOOD_ACCESS
, UNUSED_PARAMS
, BY_VAL_ACCESSES
,
3609 MODIF_BY_REF_ACCESSES
, UNMODIF_BY_REF_ACCESSES
};
3611 /* Identify representatives of all accesses to all candidate parameters for
3612 IPA-SRA. Return result based on what representatives have been found. */
3614 static enum ipa_splicing_result
3615 splice_all_param_accesses (VEC (access_p
, heap
) **representatives
)
3617 enum ipa_splicing_result result
= NO_GOOD_ACCESS
;
3619 struct access
*repr
;
3621 *representatives
= VEC_alloc (access_p
, heap
, func_param_count
);
3623 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3625 parm
= DECL_CHAIN (parm
))
3627 if (is_unused_scalar_param (parm
))
3629 VEC_quick_push (access_p
, *representatives
,
3630 &no_accesses_representant
);
3631 if (result
== NO_GOOD_ACCESS
)
3632 result
= UNUSED_PARAMS
;
3634 else if (POINTER_TYPE_P (TREE_TYPE (parm
))
3635 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm
)))
3636 && bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3638 repr
= unmodified_by_ref_scalar_representative (parm
);
3639 VEC_quick_push (access_p
, *representatives
, repr
);
3641 result
= UNMODIF_BY_REF_ACCESSES
;
3643 else if (bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3645 bool ro_grp
= false;
3646 repr
= splice_param_accesses (parm
, &ro_grp
);
3647 VEC_quick_push (access_p
, *representatives
, repr
);
3649 if (repr
&& !no_accesses_p (repr
))
3651 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3654 result
= UNMODIF_BY_REF_ACCESSES
;
3655 else if (result
< MODIF_BY_REF_ACCESSES
)
3656 result
= MODIF_BY_REF_ACCESSES
;
3658 else if (result
< BY_VAL_ACCESSES
)
3659 result
= BY_VAL_ACCESSES
;
3661 else if (no_accesses_p (repr
) && (result
== NO_GOOD_ACCESS
))
3662 result
= UNUSED_PARAMS
;
3665 VEC_quick_push (access_p
, *representatives
, NULL
);
3668 if (result
== NO_GOOD_ACCESS
)
3670 VEC_free (access_p
, heap
, *representatives
);
3671 *representatives
= NULL
;
3672 return NO_GOOD_ACCESS
;
3678 /* Return the index of BASE in PARMS. Abort if it is not found. */
3681 get_param_index (tree base
, VEC(tree
, heap
) *parms
)
3685 len
= VEC_length (tree
, parms
);
3686 for (i
= 0; i
< len
; i
++)
3687 if (VEC_index (tree
, parms
, i
) == base
)
3692 /* Convert the decisions made at the representative level into compact
3693 parameter adjustments. REPRESENTATIVES are pointers to first
3694 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3695 final number of adjustments. */
3697 static ipa_parm_adjustment_vec
3698 turn_representatives_into_adjustments (VEC (access_p
, heap
) *representatives
,
3699 int adjustments_count
)
3701 VEC (tree
, heap
) *parms
;
3702 ipa_parm_adjustment_vec adjustments
;
3706 gcc_assert (adjustments_count
> 0);
3707 parms
= ipa_get_vector_of_formal_parms (current_function_decl
);
3708 adjustments
= VEC_alloc (ipa_parm_adjustment_t
, heap
, adjustments_count
);
3709 parm
= DECL_ARGUMENTS (current_function_decl
);
3710 for (i
= 0; i
< func_param_count
; i
++, parm
= DECL_CHAIN (parm
))
3712 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
3714 if (!repr
|| no_accesses_p (repr
))
3716 struct ipa_parm_adjustment
*adj
;
3718 adj
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
3719 memset (adj
, 0, sizeof (*adj
));
3720 adj
->base_index
= get_param_index (parm
, parms
);
3723 adj
->copy_param
= 1;
3725 adj
->remove_param
= 1;
3729 struct ipa_parm_adjustment
*adj
;
3730 int index
= get_param_index (parm
, parms
);
3732 for (; repr
; repr
= repr
->next_grp
)
3734 adj
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
3735 memset (adj
, 0, sizeof (*adj
));
3736 gcc_assert (repr
->base
== parm
);
3737 adj
->base_index
= index
;
3738 adj
->base
= repr
->base
;
3739 adj
->type
= repr
->type
;
3740 adj
->offset
= repr
->offset
;
3741 adj
->by_ref
= (POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3742 && (repr
->grp_maybe_modified
3743 || repr
->grp_not_necessarilly_dereferenced
));
3748 VEC_free (tree
, heap
, parms
);
3752 /* Analyze the collected accesses and produce a plan what to do with the
3753 parameters in the form of adjustments, NULL meaning nothing. */
3755 static ipa_parm_adjustment_vec
3756 analyze_all_param_acesses (void)
3758 enum ipa_splicing_result repr_state
;
3759 bool proceed
= false;
3760 int i
, adjustments_count
= 0;
3761 VEC (access_p
, heap
) *representatives
;
3762 ipa_parm_adjustment_vec adjustments
;
3764 repr_state
= splice_all_param_accesses (&representatives
);
3765 if (repr_state
== NO_GOOD_ACCESS
)
3768 /* If there are any parameters passed by reference which are not modified
3769 directly, we need to check whether they can be modified indirectly. */
3770 if (repr_state
== UNMODIF_BY_REF_ACCESSES
)
3772 analyze_caller_dereference_legality (representatives
);
3773 analyze_modified_params (representatives
);
3776 for (i
= 0; i
< func_param_count
; i
++)
3778 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
3780 if (repr
&& !no_accesses_p (repr
))
3782 if (repr
->grp_scalar_ptr
)
3784 adjustments_count
++;
3785 if (repr
->grp_not_necessarilly_dereferenced
3786 || repr
->grp_maybe_modified
)
3787 VEC_replace (access_p
, representatives
, i
, NULL
);
3791 sra_stats
.scalar_by_ref_to_by_val
++;
3796 int new_components
= decide_one_param_reduction (repr
);
3798 if (new_components
== 0)
3800 VEC_replace (access_p
, representatives
, i
, NULL
);
3801 adjustments_count
++;
3805 adjustments_count
+= new_components
;
3806 sra_stats
.aggregate_params_reduced
++;
3807 sra_stats
.param_reductions_created
+= new_components
;
3814 if (no_accesses_p (repr
))
3817 sra_stats
.deleted_unused_parameters
++;
3819 adjustments_count
++;
3823 if (!proceed
&& dump_file
)
3824 fprintf (dump_file
, "NOT proceeding to change params.\n");
3827 adjustments
= turn_representatives_into_adjustments (representatives
,
3832 VEC_free (access_p
, heap
, representatives
);
3836 /* If a parameter replacement identified by ADJ does not yet exist in the form
3837 of declaration, create it and record it, otherwise return the previously
3841 get_replaced_param_substitute (struct ipa_parm_adjustment
*adj
)
3844 if (!adj
->new_ssa_base
)
3846 char *pretty_name
= make_fancy_name (adj
->base
);
3848 repl
= create_tmp_reg (TREE_TYPE (adj
->base
), "ISR");
3849 DECL_NAME (repl
) = get_identifier (pretty_name
);
3850 obstack_free (&name_obstack
, pretty_name
);
3853 add_referenced_var (repl
);
3854 adj
->new_ssa_base
= repl
;
3857 repl
= adj
->new_ssa_base
;
3861 /* Find the first adjustment for a particular parameter BASE in a vector of
3862 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3865 static struct ipa_parm_adjustment
*
3866 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments
, tree base
)
3870 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
3871 for (i
= 0; i
< len
; i
++)
3873 struct ipa_parm_adjustment
*adj
;
3875 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
3876 if (!adj
->copy_param
&& adj
->base
== base
)
3883 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
3884 removed because its value is not used, replace the SSA_NAME with a one
3885 relating to a created VAR_DECL together all of its uses and return true.
3886 ADJUSTMENTS is a pointer to an adjustments vector. */
3889 replace_removed_params_ssa_names (gimple stmt
,
3890 ipa_parm_adjustment_vec adjustments
)
3892 struct ipa_parm_adjustment
*adj
;
3893 tree lhs
, decl
, repl
, name
;
3895 if (gimple_code (stmt
) == GIMPLE_PHI
)
3896 lhs
= gimple_phi_result (stmt
);
3897 else if (is_gimple_assign (stmt
))
3898 lhs
= gimple_assign_lhs (stmt
);
3899 else if (is_gimple_call (stmt
))
3900 lhs
= gimple_call_lhs (stmt
);
3904 if (TREE_CODE (lhs
) != SSA_NAME
)
3906 decl
= SSA_NAME_VAR (lhs
);
3907 if (TREE_CODE (decl
) != PARM_DECL
)
3910 adj
= get_adjustment_for_base (adjustments
, decl
);
3914 repl
= get_replaced_param_substitute (adj
);
3915 name
= make_ssa_name (repl
, stmt
);
3919 fprintf (dump_file
, "replacing an SSA name of a removed param ");
3920 print_generic_expr (dump_file
, lhs
, 0);
3921 fprintf (dump_file
, " with ");
3922 print_generic_expr (dump_file
, name
, 0);
3923 fprintf (dump_file
, "\n");
3926 if (is_gimple_assign (stmt
))
3927 gimple_assign_set_lhs (stmt
, name
);
3928 else if (is_gimple_call (stmt
))
3929 gimple_call_set_lhs (stmt
, name
);
3931 gimple_phi_set_result (stmt
, name
);
3933 replace_uses_by (lhs
, name
);
3934 release_ssa_name (lhs
);
3938 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3939 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3940 specifies whether the function should care about type incompatibility the
3941 current and new expressions. If it is false, the function will leave
3942 incompatibility issues to the caller. Return true iff the expression
3946 sra_ipa_modify_expr (tree
*expr
, bool convert
,
3947 ipa_parm_adjustment_vec adjustments
)
3950 struct ipa_parm_adjustment
*adj
, *cand
= NULL
;
3951 HOST_WIDE_INT offset
, size
, max_size
;
3954 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
3956 if (TREE_CODE (*expr
) == BIT_FIELD_REF
3957 || TREE_CODE (*expr
) == IMAGPART_EXPR
3958 || TREE_CODE (*expr
) == REALPART_EXPR
)
3960 expr
= &TREE_OPERAND (*expr
, 0);
3964 base
= get_ref_base_and_extent (*expr
, &offset
, &size
, &max_size
);
3965 if (!base
|| size
== -1 || max_size
== -1)
3968 if (TREE_CODE (base
) == MEM_REF
)
3970 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
3971 base
= TREE_OPERAND (base
, 0);
3974 base
= get_ssa_base_param (base
);
3975 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
3978 for (i
= 0; i
< len
; i
++)
3980 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
3982 if (adj
->base
== base
&&
3983 (adj
->offset
== offset
|| adj
->remove_param
))
3989 if (!cand
|| cand
->copy_param
|| cand
->remove_param
)
3993 src
= build_simple_mem_ref (cand
->reduction
);
3995 src
= cand
->reduction
;
3997 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3999 fprintf (dump_file
, "About to replace expr ");
4000 print_generic_expr (dump_file
, *expr
, 0);
4001 fprintf (dump_file
, " with ");
4002 print_generic_expr (dump_file
, src
, 0);
4003 fprintf (dump_file
, "\n");
4006 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4008 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4016 /* If the statement pointed to by STMT_PTR contains any expressions that need
4017 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4018 potential type incompatibilities (GSI is used to accommodate conversion
4019 statements and must point to the statement). Return true iff the statement
4023 sra_ipa_modify_assign (gimple
*stmt_ptr
, gimple_stmt_iterator
*gsi
,
4024 ipa_parm_adjustment_vec adjustments
)
4026 gimple stmt
= *stmt_ptr
;
4027 tree
*lhs_p
, *rhs_p
;
4030 if (!gimple_assign_single_p (stmt
))
4033 rhs_p
= gimple_assign_rhs1_ptr (stmt
);
4034 lhs_p
= gimple_assign_lhs_ptr (stmt
);
4036 any
= sra_ipa_modify_expr (rhs_p
, false, adjustments
);
4037 any
|= sra_ipa_modify_expr (lhs_p
, false, adjustments
);
4040 tree new_rhs
= NULL_TREE
;
4042 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p
), TREE_TYPE (*rhs_p
)))
4044 if (TREE_CODE (*rhs_p
) == CONSTRUCTOR
)
4046 /* V_C_Es of constructors can cause trouble (PR 42714). */
4047 if (is_gimple_reg_type (TREE_TYPE (*lhs_p
)))
4048 *rhs_p
= fold_convert (TREE_TYPE (*lhs_p
), integer_zero_node
);
4050 *rhs_p
= build_constructor (TREE_TYPE (*lhs_p
), 0);
4053 new_rhs
= fold_build1_loc (gimple_location (stmt
),
4054 VIEW_CONVERT_EXPR
, TREE_TYPE (*lhs_p
),
4057 else if (REFERENCE_CLASS_P (*rhs_p
)
4058 && is_gimple_reg_type (TREE_TYPE (*lhs_p
))
4059 && !is_gimple_reg (*lhs_p
))
4060 /* This can happen when an assignment in between two single field
4061 structures is turned into an assignment in between two pointers to
4062 scalars (PR 42237). */
4067 tree tmp
= force_gimple_operand_gsi (gsi
, new_rhs
, true, NULL_TREE
,
4068 true, GSI_SAME_STMT
);
4070 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
4079 /* Traverse the function body and all modifications as described in
4080 ADJUSTMENTS. Return true iff the CFG has been changed. */
4083 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments
)
4085 bool cfg_changed
= false;
4090 gimple_stmt_iterator gsi
;
4092 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4093 replace_removed_params_ssa_names (gsi_stmt (gsi
), adjustments
);
4095 gsi
= gsi_start_bb (bb
);
4096 while (!gsi_end_p (gsi
))
4098 gimple stmt
= gsi_stmt (gsi
);
4099 bool modified
= false;
4103 switch (gimple_code (stmt
))
4106 t
= gimple_return_retval_ptr (stmt
);
4107 if (*t
!= NULL_TREE
)
4108 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4112 modified
|= sra_ipa_modify_assign (&stmt
, &gsi
, adjustments
);
4113 modified
|= replace_removed_params_ssa_names (stmt
, adjustments
);
4117 /* Operands must be processed before the lhs. */
4118 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4120 t
= gimple_call_arg_ptr (stmt
, i
);
4121 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4124 if (gimple_call_lhs (stmt
))
4126 t
= gimple_call_lhs_ptr (stmt
);
4127 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4128 modified
|= replace_removed_params_ssa_names (stmt
,
4134 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
4136 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
4137 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4139 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
4141 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
4142 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4153 if (maybe_clean_eh_stmt (stmt
)
4154 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4164 /* Call gimple_debug_bind_reset_value on all debug statements describing
4165 gimple register parameters that are being removed or replaced. */
4168 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments
)
4172 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
4173 for (i
= 0; i
< len
; i
++)
4175 struct ipa_parm_adjustment
*adj
;
4176 imm_use_iterator ui
;
4180 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
4181 if (adj
->copy_param
|| !is_gimple_reg (adj
->base
))
4183 name
= gimple_default_def (cfun
, adj
->base
);
4186 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
4188 /* All other users must have been removed by
4189 ipa_sra_modify_function_body. */
4190 gcc_assert (is_gimple_debug (stmt
));
4191 gimple_debug_bind_reset_value (stmt
);
4197 /* Return true iff all callers have at least as many actual arguments as there
4198 are formal parameters in the current function. */
4201 all_callers_have_enough_arguments_p (struct cgraph_node
*node
)
4203 struct cgraph_edge
*cs
;
4204 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4205 if (!callsite_has_enough_arguments_p (cs
->call_stmt
))
4212 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4215 convert_callers (struct cgraph_node
*node
, tree old_decl
,
4216 ipa_parm_adjustment_vec adjustments
)
4218 tree old_cur_fndecl
= current_function_decl
;
4219 struct cgraph_edge
*cs
;
4220 basic_block this_block
;
4221 bitmap recomputed_callers
= BITMAP_ALLOC (NULL
);
4223 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4225 current_function_decl
= cs
->caller
->decl
;
4226 push_cfun (DECL_STRUCT_FUNCTION (cs
->caller
->decl
));
4229 fprintf (dump_file
, "Adjusting call (%i -> %i) %s -> %s\n",
4230 cs
->caller
->uid
, cs
->callee
->uid
,
4231 cgraph_node_name (cs
->caller
),
4232 cgraph_node_name (cs
->callee
));
4234 ipa_modify_call_arguments (cs
, cs
->call_stmt
, adjustments
);
4239 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4240 if (bitmap_set_bit (recomputed_callers
, cs
->caller
->uid
))
4241 compute_inline_parameters (cs
->caller
);
4242 BITMAP_FREE (recomputed_callers
);
4244 current_function_decl
= old_cur_fndecl
;
4246 if (!encountered_recursive_call
)
4249 FOR_EACH_BB (this_block
)
4251 gimple_stmt_iterator gsi
;
4253 for (gsi
= gsi_start_bb (this_block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4255 gimple stmt
= gsi_stmt (gsi
);
4257 if (gimple_code (stmt
) != GIMPLE_CALL
)
4259 call_fndecl
= gimple_call_fndecl (stmt
);
4260 if (call_fndecl
== old_decl
)
4263 fprintf (dump_file
, "Adjusting recursive call");
4264 gimple_call_set_fndecl (stmt
, node
->decl
);
4265 ipa_modify_call_arguments (NULL
, stmt
, adjustments
);
4273 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4274 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4277 modify_function (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
4279 struct cgraph_node
*new_node
;
4280 struct cgraph_edge
*cs
;
4282 VEC (cgraph_edge_p
, heap
) * redirect_callers
;
4286 for (cs
= node
->callers
; cs
!= NULL
; cs
= cs
->next_caller
)
4288 redirect_callers
= VEC_alloc (cgraph_edge_p
, heap
, node_callers
);
4289 for (cs
= node
->callers
; cs
!= NULL
; cs
= cs
->next_caller
)
4290 VEC_quick_push (cgraph_edge_p
, redirect_callers
, cs
);
4292 rebuild_cgraph_edges ();
4294 current_function_decl
= NULL_TREE
;
4296 new_node
= cgraph_function_versioning (node
, redirect_callers
, NULL
, NULL
,
4297 NULL
, NULL
, "isra");
4298 current_function_decl
= new_node
->decl
;
4299 push_cfun (DECL_STRUCT_FUNCTION (new_node
->decl
));
4301 ipa_modify_formal_parameters (current_function_decl
, adjustments
, "ISRA");
4302 cfg_changed
= ipa_sra_modify_function_body (adjustments
);
4303 sra_ipa_reset_debug_stmts (adjustments
);
4304 convert_callers (new_node
, node
->decl
, adjustments
);
4305 cgraph_make_node_local (new_node
);
4309 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4310 attributes, return true otherwise. NODE is the cgraph node of the current
4314 ipa_sra_preliminary_function_checks (struct cgraph_node
*node
)
4316 if (!cgraph_node_can_be_local_p (node
))
4319 fprintf (dump_file
, "Function not local to this compilation unit.\n");
4323 if (!tree_versionable_function_p (node
->decl
))
4326 fprintf (dump_file
, "Function is not versionable.\n");
4330 if (DECL_VIRTUAL_P (current_function_decl
))
4333 fprintf (dump_file
, "Function is a virtual method.\n");
4337 if ((DECL_COMDAT (node
->decl
) || DECL_EXTERNAL (node
->decl
))
4338 && node
->global
.size
>= MAX_INLINE_INSNS_AUTO
)
4341 fprintf (dump_file
, "Function too big to be made truly local.\n");
4349 "Function has no callers in this compilation unit.\n");
4356 fprintf (dump_file
, "Function uses stdarg. \n");
4360 if (TYPE_ATTRIBUTES (TREE_TYPE (node
->decl
)))
4366 /* Perform early interprocedural SRA. */
4369 ipa_early_sra (void)
4371 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
4372 ipa_parm_adjustment_vec adjustments
;
4375 if (!ipa_sra_preliminary_function_checks (node
))
4379 sra_mode
= SRA_MODE_EARLY_IPA
;
4381 if (!find_param_candidates ())
4384 fprintf (dump_file
, "Function has no IPA-SRA candidates.\n");
4388 if (!all_callers_have_enough_arguments_p (node
))
4391 fprintf (dump_file
, "There are callers with insufficient number of "
4396 bb_dereferences
= XCNEWVEC (HOST_WIDE_INT
,
4398 * last_basic_block_for_function (cfun
));
4399 final_bbs
= BITMAP_ALLOC (NULL
);
4402 if (encountered_apply_args
)
4405 fprintf (dump_file
, "Function calls __builtin_apply_args().\n");
4409 if (encountered_unchangable_recursive_call
)
4412 fprintf (dump_file
, "Function calls itself with insufficient "
4413 "number of arguments.\n");
4417 adjustments
= analyze_all_param_acesses ();
4421 ipa_dump_param_adjustments (dump_file
, adjustments
, current_function_decl
);
4423 if (modify_function (node
, adjustments
))
4424 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
4426 ret
= TODO_update_ssa
;
4427 VEC_free (ipa_parm_adjustment_t
, heap
, adjustments
);
4429 statistics_counter_event (cfun
, "Unused parameters deleted",
4430 sra_stats
.deleted_unused_parameters
);
4431 statistics_counter_event (cfun
, "Scalar parameters converted to by-value",
4432 sra_stats
.scalar_by_ref_to_by_val
);
4433 statistics_counter_event (cfun
, "Aggregate parameters broken up",
4434 sra_stats
.aggregate_params_reduced
);
4435 statistics_counter_event (cfun
, "Aggregate parameter components created",
4436 sra_stats
.param_reductions_created
);
4439 BITMAP_FREE (final_bbs
);
4440 free (bb_dereferences
);
4442 sra_deinitialize ();
4446 /* Return if early ipa sra shall be performed. */
4448 ipa_early_sra_gate (void)
4450 return flag_ipa_sra
&& dbg_cnt (eipa_sra
);
4453 struct gimple_opt_pass pass_early_ipa_sra
=
4457 "eipa_sra", /* name */
4458 ipa_early_sra_gate
, /* gate */
4459 ipa_early_sra
, /* execute */
4462 0, /* static_pass_number */
4463 TV_IPA_SRA
, /* tv_id */
4464 0, /* properties_required */
4465 0, /* properties_provided */
4466 0, /* properties_destroyed */
4467 0, /* todo_flags_start */
4468 TODO_dump_func
| TODO_dump_cgraph
/* todo_flags_finish */