1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "alloc-pool.h"
83 #include "tree-flow.h"
85 #include "diagnostic.h"
86 #include "tree-pretty-print.h"
87 #include "statistics.h"
88 #include "tree-dump.h"
94 /* Enumeration of all aggregate reductions we can do. */
95 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
96 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
97 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
99 /* Global variable describing which aggregate reduction we are performing at
101 static enum sra_mode sra_mode
;
105 /* ACCESS represents each access to an aggregate variable (as a whole or a
106 part). It can also represent a group of accesses that refer to exactly the
107 same fragment of an aggregate (i.e. those that have exactly the same offset
108 and size). Such representatives for a single aggregate, once determined,
109 are linked in a linked list and have the group fields set.
111 Moreover, when doing intraprocedural SRA, a tree is built from those
112 representatives (by the means of first_child and next_sibling pointers), in
113 which all items in a subtree are "within" the root, i.e. their offset is
114 greater or equal to offset of the root and offset+size is smaller or equal
115 to offset+size of the root. Children of an access are sorted by offset.
117 Note that accesses to parts of vector and complex number types always
118 represented by an access to the whole complex number or a vector. It is a
119 duty of the modifying functions to replace them appropriately. */
123 /* Values returned by `get_ref_base_and_extent' for each component reference
124 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
125 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
126 HOST_WIDE_INT offset
;
130 /* Expression. It is context dependent so do not use it to create new
131 expressions to access the original aggregate. See PR 42154 for a
137 /* The statement this access belongs to. */
140 /* Next group representative for this aggregate. */
141 struct access
*next_grp
;
143 /* Pointer to the group representative. Pointer to itself if the struct is
144 the representative. */
145 struct access
*group_representative
;
147 /* If this access has any children (in terms of the definition above), this
148 points to the first one. */
149 struct access
*first_child
;
151 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
152 described above. In IPA-SRA this is a pointer to the next access
153 belonging to the same group (having the same representative). */
154 struct access
*next_sibling
;
156 /* Pointers to the first and last element in the linked list of assign
158 struct assign_link
*first_link
, *last_link
;
160 /* Pointer to the next access in the work queue. */
161 struct access
*next_queued
;
163 /* Replacement variable for this access "region." Never to be accessed
164 directly, always only by the means of get_access_replacement() and only
165 when grp_to_be_replaced flag is set. */
166 tree replacement_decl
;
168 /* Is this particular access write access? */
171 /* Is this access an artificial one created to scalarize some record
173 unsigned total_scalarization
: 1;
175 /* Is this access currently in the work queue? */
176 unsigned grp_queued
: 1;
178 /* Does this group contain a write access? This flag is propagated down the
180 unsigned grp_write
: 1;
182 /* Does this group contain a read access? This flag is propagated down the
184 unsigned grp_read
: 1;
186 /* Does this group contain a read access that comes from an assignment
187 statement? This flag is propagated down the access tree. */
188 unsigned grp_assignment_read
: 1;
190 /* Other passes of the analysis use this bit to make function
191 analyze_access_subtree create scalar replacements for this group if
193 unsigned grp_hint
: 1;
195 /* Is the subtree rooted in this access fully covered by scalar
197 unsigned grp_covered
: 1;
199 /* If set to true, this access and all below it in an access tree must not be
201 unsigned grp_unscalarizable_region
: 1;
203 /* Whether data have been written to parts of the aggregate covered by this
204 access which is not to be scalarized. This flag is propagated up in the
206 unsigned grp_unscalarized_data
: 1;
208 /* Does this access and/or group contain a write access through a
210 unsigned grp_partial_lhs
: 1;
212 /* Set when a scalar replacement should be created for this variable. We do
213 the decision and creation at different places because create_tmp_var
214 cannot be called from within FOR_EACH_REFERENCED_VAR. */
215 unsigned grp_to_be_replaced
: 1;
217 /* Is it possible that the group refers to data which might be (directly or
218 otherwise) modified? */
219 unsigned grp_maybe_modified
: 1;
221 /* Set when this is a representative of a pointer to scalar (i.e. by
222 reference) parameter which we consider for turning into a plain scalar
223 (i.e. a by value parameter). */
224 unsigned grp_scalar_ptr
: 1;
226 /* Set when we discover that this pointer is not safe to dereference in the
228 unsigned grp_not_necessarilly_dereferenced
: 1;
231 typedef struct access
*access_p
;
233 DEF_VEC_P (access_p
);
234 DEF_VEC_ALLOC_P (access_p
, heap
);
236 /* Alloc pool for allocating access structures. */
237 static alloc_pool access_pool
;
239 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
240 are used to propagate subaccesses from rhs to lhs as long as they don't
241 conflict with what is already there. */
244 struct access
*lacc
, *racc
;
245 struct assign_link
*next
;
248 /* Alloc pool for allocating assign link structures. */
249 static alloc_pool link_pool
;
251 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
252 static struct pointer_map_t
*base_access_vec
;
254 /* Bitmap of candidates. */
255 static bitmap candidate_bitmap
;
257 /* Bitmap of candidates which we should try to entirely scalarize away and
258 those which cannot be (because they are and need be used as a whole). */
259 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
261 /* Obstack for creation of fancy names. */
262 static struct obstack name_obstack
;
264 /* Head of a linked list of accesses that need to have its subaccesses
265 propagated to their assignment counterparts. */
266 static struct access
*work_queue_head
;
268 /* Number of parameters of the analyzed function when doing early ipa SRA. */
269 static int func_param_count
;
271 /* scan_function sets the following to true if it encounters a call to
272 __builtin_apply_args. */
273 static bool encountered_apply_args
;
275 /* Set by scan_function when it finds a recursive call. */
276 static bool encountered_recursive_call
;
278 /* Set by scan_function when it finds a recursive call with less actual
279 arguments than formal parameters.. */
280 static bool encountered_unchangable_recursive_call
;
282 /* This is a table in which for each basic block and parameter there is a
283 distance (offset + size) in that parameter which is dereferenced and
284 accessed in that BB. */
285 static HOST_WIDE_INT
*bb_dereferences
;
286 /* Bitmap of BBs that can cause the function to "stop" progressing by
287 returning, throwing externally, looping infinitely or calling a function
288 which might abort etc.. */
289 static bitmap final_bbs
;
291 /* Representative of no accesses at all. */
292 static struct access no_accesses_representant
;
294 /* Predicate to test the special value. */
297 no_accesses_p (struct access
*access
)
299 return access
== &no_accesses_representant
;
302 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
303 representative fields are dumped, otherwise those which only describe the
304 individual access are. */
308 /* Number of processed aggregates is readily available in
309 analyze_all_variable_accesses and so is not stored here. */
311 /* Number of created scalar replacements. */
314 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
318 /* Number of statements created by generate_subtree_copies. */
321 /* Number of statements created by load_assign_lhs_subreplacements. */
324 /* Number of times sra_modify_assign has deleted a statement. */
327 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
328 RHS reparately due to type conversions or nonexistent matching
330 int separate_lhs_rhs_handling
;
332 /* Number of parameters that were removed because they were unused. */
333 int deleted_unused_parameters
;
335 /* Number of scalars passed as parameters by reference that have been
336 converted to be passed by value. */
337 int scalar_by_ref_to_by_val
;
339 /* Number of aggregate parameters that were replaced by one or more of their
341 int aggregate_params_reduced
;
343 /* Numbber of components created when splitting aggregate parameters. */
344 int param_reductions_created
;
348 dump_access (FILE *f
, struct access
*access
, bool grp
)
350 fprintf (f
, "access { ");
351 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
352 print_generic_expr (f
, access
->base
, 0);
353 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
354 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
355 fprintf (f
, ", expr = ");
356 print_generic_expr (f
, access
->expr
, 0);
357 fprintf (f
, ", type = ");
358 print_generic_expr (f
, access
->type
, 0);
360 fprintf (f
, ", grp_write = %d, total_scalarization = %d, "
361 "grp_read = %d, grp_hint = %d, "
362 "grp_covered = %d, grp_unscalarizable_region = %d, "
363 "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
364 "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
365 "grp_not_necessarilly_dereferenced = %d\n",
366 access
->grp_write
, access
->total_scalarization
,
367 access
->grp_read
, access
->grp_hint
,
368 access
->grp_covered
, access
->grp_unscalarizable_region
,
369 access
->grp_unscalarized_data
, access
->grp_partial_lhs
,
370 access
->grp_to_be_replaced
, access
->grp_maybe_modified
,
371 access
->grp_not_necessarilly_dereferenced
);
373 fprintf (f
, ", write = %d, total_scalarization = %d, "
374 "grp_partial_lhs = %d\n",
375 access
->write
, access
->total_scalarization
,
376 access
->grp_partial_lhs
);
379 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
382 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
388 for (i
= 0; i
< level
; i
++)
389 fputs ("* ", dump_file
);
391 dump_access (f
, access
, true);
393 if (access
->first_child
)
394 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
396 access
= access
->next_sibling
;
401 /* Dump all access trees for a variable, given the pointer to the first root in
405 dump_access_tree (FILE *f
, struct access
*access
)
407 for (; access
; access
= access
->next_grp
)
408 dump_access_tree_1 (f
, access
, 0);
411 /* Return true iff ACC is non-NULL and has subaccesses. */
414 access_has_children_p (struct access
*acc
)
416 return acc
&& acc
->first_child
;
419 /* Return a vector of pointers to accesses for the variable given in BASE or
420 NULL if there is none. */
422 static VEC (access_p
, heap
) *
423 get_base_access_vector (tree base
)
427 slot
= pointer_map_contains (base_access_vec
, base
);
431 return *(VEC (access_p
, heap
) **) slot
;
434 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
435 in ACCESS. Return NULL if it cannot be found. */
437 static struct access
*
438 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
441 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
443 struct access
*child
= access
->first_child
;
445 while (child
&& (child
->offset
+ child
->size
<= offset
))
446 child
= child
->next_sibling
;
453 /* Return the first group representative for DECL or NULL if none exists. */
455 static struct access
*
456 get_first_repr_for_decl (tree base
)
458 VEC (access_p
, heap
) *access_vec
;
460 access_vec
= get_base_access_vector (base
);
464 return VEC_index (access_p
, access_vec
, 0);
467 /* Find an access representative for the variable BASE and given OFFSET and
468 SIZE. Requires that access trees have already been built. Return NULL if
469 it cannot be found. */
471 static struct access
*
472 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
475 struct access
*access
;
477 access
= get_first_repr_for_decl (base
);
478 while (access
&& (access
->offset
+ access
->size
<= offset
))
479 access
= access
->next_grp
;
483 return find_access_in_subtree (access
, offset
, size
);
486 /* Add LINK to the linked list of assign links of RACC. */
488 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
490 gcc_assert (link
->racc
== racc
);
492 if (!racc
->first_link
)
494 gcc_assert (!racc
->last_link
);
495 racc
->first_link
= link
;
498 racc
->last_link
->next
= link
;
500 racc
->last_link
= link
;
504 /* Move all link structures in their linked list in OLD_RACC to the linked list
507 relink_to_new_repr (struct access
*new_racc
, struct access
*old_racc
)
509 if (!old_racc
->first_link
)
511 gcc_assert (!old_racc
->last_link
);
515 if (new_racc
->first_link
)
517 gcc_assert (!new_racc
->last_link
->next
);
518 gcc_assert (!old_racc
->last_link
|| !old_racc
->last_link
->next
);
520 new_racc
->last_link
->next
= old_racc
->first_link
;
521 new_racc
->last_link
= old_racc
->last_link
;
525 gcc_assert (!new_racc
->last_link
);
527 new_racc
->first_link
= old_racc
->first_link
;
528 new_racc
->last_link
= old_racc
->last_link
;
530 old_racc
->first_link
= old_racc
->last_link
= NULL
;
533 /* Add ACCESS to the work queue (which is actually a stack). */
536 add_access_to_work_queue (struct access
*access
)
538 if (!access
->grp_queued
)
540 gcc_assert (!access
->next_queued
);
541 access
->next_queued
= work_queue_head
;
542 access
->grp_queued
= 1;
543 work_queue_head
= access
;
547 /* Pop an access from the work queue, and return it, assuming there is one. */
549 static struct access
*
550 pop_access_from_work_queue (void)
552 struct access
*access
= work_queue_head
;
554 work_queue_head
= access
->next_queued
;
555 access
->next_queued
= NULL
;
556 access
->grp_queued
= 0;
561 /* Allocate necessary structures. */
564 sra_initialize (void)
566 candidate_bitmap
= BITMAP_ALLOC (NULL
);
567 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
568 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
569 gcc_obstack_init (&name_obstack
);
570 access_pool
= create_alloc_pool ("SRA accesses", sizeof (struct access
), 16);
571 link_pool
= create_alloc_pool ("SRA links", sizeof (struct assign_link
), 16);
572 base_access_vec
= pointer_map_create ();
573 memset (&sra_stats
, 0, sizeof (sra_stats
));
574 encountered_apply_args
= false;
575 encountered_recursive_call
= false;
576 encountered_unchangable_recursive_call
= false;
579 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
582 delete_base_accesses (const void *key ATTRIBUTE_UNUSED
, void **value
,
583 void *data ATTRIBUTE_UNUSED
)
585 VEC (access_p
, heap
) *access_vec
;
586 access_vec
= (VEC (access_p
, heap
) *) *value
;
587 VEC_free (access_p
, heap
, access_vec
);
592 /* Deallocate all general structures. */
595 sra_deinitialize (void)
597 BITMAP_FREE (candidate_bitmap
);
598 BITMAP_FREE (should_scalarize_away_bitmap
);
599 BITMAP_FREE (cannot_scalarize_away_bitmap
);
600 free_alloc_pool (access_pool
);
601 free_alloc_pool (link_pool
);
602 obstack_free (&name_obstack
, NULL
);
604 pointer_map_traverse (base_access_vec
, delete_base_accesses
, NULL
);
605 pointer_map_destroy (base_access_vec
);
608 /* Remove DECL from candidates for SRA and write REASON to the dump file if
611 disqualify_candidate (tree decl
, const char *reason
)
613 bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
));
615 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
617 fprintf (dump_file
, "! Disqualifying ");
618 print_generic_expr (dump_file
, decl
, 0);
619 fprintf (dump_file
, " - %s\n", reason
);
623 /* Return true iff the type contains a field or an element which does not allow
627 type_internals_preclude_sra_p (tree type
)
632 switch (TREE_CODE (type
))
636 case QUAL_UNION_TYPE
:
637 for (fld
= TYPE_FIELDS (type
); fld
; fld
= TREE_CHAIN (fld
))
638 if (TREE_CODE (fld
) == FIELD_DECL
)
640 tree ft
= TREE_TYPE (fld
);
642 if (TREE_THIS_VOLATILE (fld
)
643 || !DECL_FIELD_OFFSET (fld
) || !DECL_SIZE (fld
)
644 || !host_integerp (DECL_FIELD_OFFSET (fld
), 1)
645 || !host_integerp (DECL_SIZE (fld
), 1))
648 if (AGGREGATE_TYPE_P (ft
)
649 && type_internals_preclude_sra_p (ft
))
656 et
= TREE_TYPE (type
);
658 if (AGGREGATE_TYPE_P (et
))
659 return type_internals_preclude_sra_p (et
);
668 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
669 base variable if it is. Return T if it is not an SSA_NAME. */
672 get_ssa_base_param (tree t
)
674 if (TREE_CODE (t
) == SSA_NAME
)
676 if (SSA_NAME_IS_DEFAULT_DEF (t
))
677 return SSA_NAME_VAR (t
);
684 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
685 belongs to, unless the BB has already been marked as a potentially
689 mark_parm_dereference (tree base
, HOST_WIDE_INT dist
, gimple stmt
)
691 basic_block bb
= gimple_bb (stmt
);
692 int idx
, parm_index
= 0;
695 if (bitmap_bit_p (final_bbs
, bb
->index
))
698 for (parm
= DECL_ARGUMENTS (current_function_decl
);
699 parm
&& parm
!= base
;
700 parm
= TREE_CHAIN (parm
))
703 gcc_assert (parm_index
< func_param_count
);
705 idx
= bb
->index
* func_param_count
+ parm_index
;
706 if (bb_dereferences
[idx
] < dist
)
707 bb_dereferences
[idx
] = dist
;
710 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
711 the three fields. Also add it to the vector of accesses corresponding to
712 the base. Finally, return the new access. */
714 static struct access
*
715 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
717 VEC (access_p
, heap
) *vec
;
718 struct access
*access
;
721 access
= (struct access
*) pool_alloc (access_pool
);
722 memset (access
, 0, sizeof (struct access
));
724 access
->offset
= offset
;
727 slot
= pointer_map_contains (base_access_vec
, base
);
729 vec
= (VEC (access_p
, heap
) *) *slot
;
731 vec
= VEC_alloc (access_p
, heap
, 32);
733 VEC_safe_push (access_p
, heap
, vec
, access
);
735 *((struct VEC (access_p
,heap
) **)
736 pointer_map_insert (base_access_vec
, base
)) = vec
;
741 /* Create and insert access for EXPR. Return created access, or NULL if it is
744 static struct access
*
745 create_access (tree expr
, gimple stmt
, bool write
)
747 struct access
*access
;
748 HOST_WIDE_INT offset
, size
, max_size
;
750 bool ptr
, unscalarizable_region
= false;
752 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
754 if (sra_mode
== SRA_MODE_EARLY_IPA
&& INDIRECT_REF_P (base
))
756 base
= get_ssa_base_param (TREE_OPERAND (base
, 0));
764 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
767 if (sra_mode
== SRA_MODE_EARLY_IPA
)
769 if (size
< 0 || size
!= max_size
)
771 disqualify_candidate (base
, "Encountered a variable sized access.");
774 if ((offset
% BITS_PER_UNIT
) != 0 || (size
% BITS_PER_UNIT
) != 0)
776 disqualify_candidate (base
,
777 "Encountered an acces not aligned to a byte.");
782 mark_parm_dereference (base
, offset
+ size
, stmt
);
786 if (size
!= max_size
)
789 unscalarizable_region
= true;
793 disqualify_candidate (base
, "Encountered an unconstrained access.");
798 access
= create_access_1 (base
, offset
, size
);
800 access
->type
= TREE_TYPE (expr
);
801 access
->write
= write
;
802 access
->grp_unscalarizable_region
= unscalarizable_region
;
809 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
810 register types or (recursively) records with only these two kinds of fields.
811 It also returns false if any of these records has a zero-size field as its
815 type_consists_of_records_p (tree type
)
818 bool last_fld_has_zero_size
= false;
820 if (TREE_CODE (type
) != RECORD_TYPE
)
823 for (fld
= TYPE_FIELDS (type
); fld
; fld
= TREE_CHAIN (fld
))
824 if (TREE_CODE (fld
) == FIELD_DECL
)
826 tree ft
= TREE_TYPE (fld
);
828 if (!is_gimple_reg_type (ft
)
829 && !type_consists_of_records_p (ft
))
832 last_fld_has_zero_size
= tree_low_cst (DECL_SIZE (fld
), 1) == 0;
835 if (last_fld_has_zero_size
)
841 /* Create total_scalarization accesses for all scalar type fields in DECL that
842 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
843 must be the top-most VAR_DECL representing the variable, OFFSET must be the
844 offset of DECL within BASE. */
847 completely_scalarize_record (tree base
, tree decl
, HOST_WIDE_INT offset
)
849 tree fld
, decl_type
= TREE_TYPE (decl
);
851 for (fld
= TYPE_FIELDS (decl_type
); fld
; fld
= TREE_CHAIN (fld
))
852 if (TREE_CODE (fld
) == FIELD_DECL
)
854 HOST_WIDE_INT pos
= offset
+ int_bit_position (fld
);
855 tree ft
= TREE_TYPE (fld
);
857 if (is_gimple_reg_type (ft
))
859 struct access
*access
;
864 size
= tree_low_cst (DECL_SIZE (fld
), 1);
866 ok
= build_ref_for_offset (&expr
, TREE_TYPE (base
), pos
,
870 access
= create_access_1 (base
, pos
, size
);
873 access
->total_scalarization
= 1;
874 /* Accesses for intraprocedural SRA can have their stmt NULL. */
877 completely_scalarize_record (base
, fld
, pos
);
882 /* Search the given tree for a declaration by skipping handled components and
883 exclude it from the candidates. */
886 disqualify_base_of_expr (tree t
, const char *reason
)
888 while (handled_component_p (t
))
889 t
= TREE_OPERAND (t
, 0);
891 if (sra_mode
== SRA_MODE_EARLY_IPA
)
893 if (INDIRECT_REF_P (t
))
894 t
= TREE_OPERAND (t
, 0);
895 t
= get_ssa_base_param (t
);
899 disqualify_candidate (t
, reason
);
902 /* Scan expression EXPR and create access structures for all accesses to
903 candidates for scalarization. Return the created access or NULL if none is
906 static struct access
*
907 build_access_from_expr_1 (tree expr
, gimple stmt
, bool write
)
909 struct access
*ret
= NULL
;
912 if (TREE_CODE (expr
) == BIT_FIELD_REF
913 || TREE_CODE (expr
) == IMAGPART_EXPR
914 || TREE_CODE (expr
) == REALPART_EXPR
)
916 expr
= TREE_OPERAND (expr
, 0);
922 /* We need to dive through V_C_Es in order to get the size of its parameter
923 and not the result type. Ada produces such statements. We are also
924 capable of handling the topmost V_C_E but not any of those buried in other
925 handled components. */
926 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
927 expr
= TREE_OPERAND (expr
, 0);
929 if (contains_view_convert_expr_p (expr
))
931 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
936 switch (TREE_CODE (expr
))
939 if (sra_mode
!= SRA_MODE_EARLY_IPA
)
947 case ARRAY_RANGE_REF
:
948 ret
= create_access (expr
, stmt
, write
);
955 if (write
&& partial_ref
&& ret
)
956 ret
->grp_partial_lhs
= 1;
961 /* Scan expression EXPR and create access structures for all accesses to
962 candidates for scalarization. Return true if any access has been inserted.
963 STMT must be the statement from which the expression is taken, WRITE must be
964 true if the expression is a store and false otherwise. */
967 build_access_from_expr (tree expr
, gimple stmt
, bool write
)
969 struct access
*access
;
971 access
= build_access_from_expr_1 (expr
, stmt
, write
);
974 /* This means the aggregate is accesses as a whole in a way other than an
975 assign statement and thus cannot be removed even if we had a scalar
976 replacement for everything. */
977 if (cannot_scalarize_away_bitmap
)
978 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
984 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
985 modes in which it matters, return true iff they have been disqualified. RHS
986 may be NULL, in that case ignore it. If we scalarize an aggregate in
987 intra-SRA we may need to add statements after each statement. This is not
988 possible if a statement unconditionally has to end the basic block. */
990 disqualify_ops_if_throwing_stmt (gimple stmt
, tree lhs
, tree rhs
)
992 if ((sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
993 && (stmt_can_throw_internal (stmt
) || stmt_ends_bb_p (stmt
)))
995 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
997 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1003 /* Scan expressions occuring in STMT, create access structures for all accesses
1004 to candidates for scalarization and remove those candidates which occur in
1005 statements or expressions that prevent them from being split apart. Return
1006 true if any access has been inserted. */
1009 build_accesses_from_assign (gimple stmt
)
1012 struct access
*lacc
, *racc
;
1014 if (!gimple_assign_single_p (stmt
))
1017 lhs
= gimple_assign_lhs (stmt
);
1018 rhs
= gimple_assign_rhs1 (stmt
);
1020 if (disqualify_ops_if_throwing_stmt (stmt
, lhs
, rhs
))
1023 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1024 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1028 racc
->grp_assignment_read
= 1;
1029 if (should_scalarize_away_bitmap
&& !gimple_has_volatile_ops (stmt
)
1030 && !is_gimple_reg_type (racc
->type
))
1031 bitmap_set_bit (should_scalarize_away_bitmap
, DECL_UID (racc
->base
));
1035 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1036 && !lacc
->grp_unscalarizable_region
1037 && !racc
->grp_unscalarizable_region
1038 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1039 /* FIXME: Turn the following line into an assert after PR 40058 is
1041 && lacc
->size
== racc
->size
1042 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1044 struct assign_link
*link
;
1046 link
= (struct assign_link
*) pool_alloc (link_pool
);
1047 memset (link
, 0, sizeof (struct assign_link
));
1052 add_link_to_rhs (racc
, link
);
1055 return lacc
|| racc
;
1058 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1059 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1062 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED
, tree op
,
1063 void *data ATTRIBUTE_UNUSED
)
1065 op
= get_base_address (op
);
1068 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1073 /* Return true iff callsite CALL has at least as many actual arguments as there
1074 are formal parameters of the function currently processed by IPA-SRA. */
1077 callsite_has_enough_arguments_p (gimple call
)
1079 return gimple_call_num_args (call
) >= (unsigned) func_param_count
;
1082 /* Scan function and look for interesting expressions and create access
1083 structures for them. Return true iff any access is created. */
1086 scan_function (void)
1093 gimple_stmt_iterator gsi
;
1094 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1096 gimple stmt
= gsi_stmt (gsi
);
1100 if (final_bbs
&& stmt_can_throw_external (stmt
))
1101 bitmap_set_bit (final_bbs
, bb
->index
);
1102 switch (gimple_code (stmt
))
1105 t
= gimple_return_retval (stmt
);
1107 ret
|= build_access_from_expr (t
, stmt
, false);
1109 bitmap_set_bit (final_bbs
, bb
->index
);
1113 ret
|= build_accesses_from_assign (stmt
);
1117 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1118 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1121 if (sra_mode
== SRA_MODE_EARLY_IPA
)
1123 tree dest
= gimple_call_fndecl (stmt
);
1124 int flags
= gimple_call_flags (stmt
);
1128 if (DECL_BUILT_IN_CLASS (dest
) == BUILT_IN_NORMAL
1129 && DECL_FUNCTION_CODE (dest
) == BUILT_IN_APPLY_ARGS
)
1130 encountered_apply_args
= true;
1131 if (cgraph_get_node (dest
)
1132 == cgraph_get_node (current_function_decl
))
1134 encountered_recursive_call
= true;
1135 if (!callsite_has_enough_arguments_p (stmt
))
1136 encountered_unchangable_recursive_call
= true;
1141 && (flags
& (ECF_CONST
| ECF_PURE
)) == 0)
1142 bitmap_set_bit (final_bbs
, bb
->index
);
1145 t
= gimple_call_lhs (stmt
);
1146 if (t
&& !disqualify_ops_if_throwing_stmt (stmt
, t
, NULL
))
1147 ret
|= build_access_from_expr (t
, stmt
, true);
1151 walk_stmt_load_store_addr_ops (stmt
, NULL
, NULL
, NULL
,
1154 bitmap_set_bit (final_bbs
, bb
->index
);
1156 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
1158 t
= TREE_VALUE (gimple_asm_input_op (stmt
, i
));
1159 ret
|= build_access_from_expr (t
, stmt
, false);
1161 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
1163 t
= TREE_VALUE (gimple_asm_output_op (stmt
, i
));
1164 ret
|= build_access_from_expr (t
, stmt
, true);
1177 /* Helper of QSORT function. There are pointers to accesses in the array. An
1178 access is considered smaller than another if it has smaller offset or if the
1179 offsets are the same but is size is bigger. */
1182 compare_access_positions (const void *a
, const void *b
)
1184 const access_p
*fp1
= (const access_p
*) a
;
1185 const access_p
*fp2
= (const access_p
*) b
;
1186 const access_p f1
= *fp1
;
1187 const access_p f2
= *fp2
;
1189 if (f1
->offset
!= f2
->offset
)
1190 return f1
->offset
< f2
->offset
? -1 : 1;
1192 if (f1
->size
== f2
->size
)
1194 if (f1
->type
== f2
->type
)
1196 /* Put any non-aggregate type before any aggregate type. */
1197 else if (!is_gimple_reg_type (f1
->type
)
1198 && is_gimple_reg_type (f2
->type
))
1200 else if (is_gimple_reg_type (f1
->type
)
1201 && !is_gimple_reg_type (f2
->type
))
1203 /* Put any complex or vector type before any other scalar type. */
1204 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1205 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1206 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1207 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1209 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1210 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1211 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1212 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1214 /* Put the integral type with the bigger precision first. */
1215 else if (INTEGRAL_TYPE_P (f1
->type
)
1216 && INTEGRAL_TYPE_P (f2
->type
))
1217 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1218 /* Put any integral type with non-full precision last. */
1219 else if (INTEGRAL_TYPE_P (f1
->type
)
1220 && (TREE_INT_CST_LOW (TYPE_SIZE (f1
->type
))
1221 != TYPE_PRECISION (f1
->type
)))
1223 else if (INTEGRAL_TYPE_P (f2
->type
)
1224 && (TREE_INT_CST_LOW (TYPE_SIZE (f2
->type
))
1225 != TYPE_PRECISION (f2
->type
)))
1227 /* Stabilize the sort. */
1228 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1231 /* We want the bigger accesses first, thus the opposite operator in the next
1233 return f1
->size
> f2
->size
? -1 : 1;
1237 /* Append a name of the declaration to the name obstack. A helper function for
1241 make_fancy_decl_name (tree decl
)
1245 tree name
= DECL_NAME (decl
);
1247 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1248 IDENTIFIER_LENGTH (name
));
1251 sprintf (buffer
, "D%u", DECL_UID (decl
));
1252 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1256 /* Helper for make_fancy_name. */
1259 make_fancy_name_1 (tree expr
)
1266 make_fancy_decl_name (expr
);
1270 switch (TREE_CODE (expr
))
1273 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1274 obstack_1grow (&name_obstack
, '$');
1275 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1279 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1280 obstack_1grow (&name_obstack
, '$');
1281 /* Arrays with only one element may not have a constant as their
1283 index
= TREE_OPERAND (expr
, 1);
1284 if (TREE_CODE (index
) != INTEGER_CST
)
1286 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1287 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1294 gcc_unreachable (); /* we treat these as scalars. */
1301 /* Create a human readable name for replacement variable of ACCESS. */
1304 make_fancy_name (tree expr
)
1306 make_fancy_name_1 (expr
);
1307 obstack_1grow (&name_obstack
, '\0');
1308 return XOBFINISH (&name_obstack
, char *);
1311 /* Helper function for build_ref_for_offset. */
1314 build_ref_for_offset_1 (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1320 tree tr_size
, index
, minidx
;
1321 HOST_WIDE_INT el_size
;
1323 if (offset
== 0 && exp_type
1324 && types_compatible_p (exp_type
, type
))
1327 switch (TREE_CODE (type
))
1330 case QUAL_UNION_TYPE
:
1332 for (fld
= TYPE_FIELDS (type
); fld
; fld
= TREE_CHAIN (fld
))
1334 HOST_WIDE_INT pos
, size
;
1335 tree expr
, *expr_ptr
;
1337 if (TREE_CODE (fld
) != FIELD_DECL
)
1340 pos
= int_bit_position (fld
);
1341 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1342 tr_size
= DECL_SIZE (fld
);
1343 if (!tr_size
|| !host_integerp (tr_size
, 1))
1345 size
= tree_low_cst (tr_size
, 1);
1351 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1356 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1362 if (build_ref_for_offset_1 (expr_ptr
, TREE_TYPE (fld
),
1363 offset
- pos
, exp_type
))
1373 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1374 if (!tr_size
|| !host_integerp (tr_size
, 1))
1376 el_size
= tree_low_cst (tr_size
, 1);
1378 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1379 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1383 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1384 if (!integer_zerop (minidx
))
1385 index
= int_const_binop (PLUS_EXPR
, index
, minidx
, 0);
1386 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1387 NULL_TREE
, NULL_TREE
);
1389 offset
= offset
% el_size
;
1390 type
= TREE_TYPE (type
);
1405 /* Construct an expression that would reference a part of aggregate *EXPR of
1406 type TYPE at the given OFFSET of the type EXP_TYPE. If EXPR is NULL, the
1407 function only determines whether it can build such a reference without
1408 actually doing it, otherwise, the tree it points to is unshared first and
1409 then used as a base for furhter sub-references.
1411 FIXME: Eventually this should be replaced with
1412 maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
1413 minor rewrite of fold_stmt.
1417 build_ref_for_offset (tree
*expr
, tree type
, HOST_WIDE_INT offset
,
1418 tree exp_type
, bool allow_ptr
)
1420 location_t loc
= expr
? EXPR_LOCATION (*expr
) : UNKNOWN_LOCATION
;
1423 *expr
= unshare_expr (*expr
);
1425 if (allow_ptr
&& POINTER_TYPE_P (type
))
1427 type
= TREE_TYPE (type
);
1429 *expr
= fold_build1_loc (loc
, INDIRECT_REF
, type
, *expr
);
1432 return build_ref_for_offset_1 (expr
, type
, offset
, exp_type
);
1435 /* Return true iff TYPE is stdarg va_list type. */
1438 is_va_list_type (tree type
)
1440 return TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (va_list_type_node
);
1443 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1444 those with type which is suitable for scalarization. */
1447 find_var_candidates (void)
1450 referenced_var_iterator rvi
;
1453 FOR_EACH_REFERENCED_VAR (var
, rvi
)
1455 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (var
) != PARM_DECL
)
1457 type
= TREE_TYPE (var
);
1459 if (!AGGREGATE_TYPE_P (type
)
1460 || needs_to_live_in_memory (var
)
1461 || TREE_THIS_VOLATILE (var
)
1462 || !COMPLETE_TYPE_P (type
)
1463 || !host_integerp (TYPE_SIZE (type
), 1)
1464 || tree_low_cst (TYPE_SIZE (type
), 1) == 0
1465 || type_internals_preclude_sra_p (type
)
1466 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1467 we also want to schedule it rather late. Thus we ignore it in
1469 || (sra_mode
== SRA_MODE_EARLY_INTRA
1470 && is_va_list_type (type
)))
1473 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1475 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1477 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1478 print_generic_expr (dump_file
, var
, 0);
1479 fprintf (dump_file
, "\n");
1487 /* Sort all accesses for the given variable, check for partial overlaps and
1488 return NULL if there are any. If there are none, pick a representative for
1489 each combination of offset and size and create a linked list out of them.
1490 Return the pointer to the first representative and make sure it is the first
1491 one in the vector of accesses. */
1493 static struct access
*
1494 sort_and_splice_var_accesses (tree var
)
1496 int i
, j
, access_count
;
1497 struct access
*res
, **prev_acc_ptr
= &res
;
1498 VEC (access_p
, heap
) *access_vec
;
1500 HOST_WIDE_INT low
= -1, high
= 0;
1502 access_vec
= get_base_access_vector (var
);
1505 access_count
= VEC_length (access_p
, access_vec
);
1507 /* Sort by <OFFSET, SIZE>. */
1508 qsort (VEC_address (access_p
, access_vec
), access_count
, sizeof (access_p
),
1509 compare_access_positions
);
1512 while (i
< access_count
)
1514 struct access
*access
= VEC_index (access_p
, access_vec
, i
);
1515 bool grp_write
= access
->write
;
1516 bool grp_read
= !access
->write
;
1517 bool grp_assignment_read
= access
->grp_assignment_read
;
1518 bool multiple_reads
= false;
1519 bool total_scalarization
= access
->total_scalarization
;
1520 bool grp_partial_lhs
= access
->grp_partial_lhs
;
1521 bool first_scalar
= is_gimple_reg_type (access
->type
);
1522 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
1524 if (first
|| access
->offset
>= high
)
1527 low
= access
->offset
;
1528 high
= access
->offset
+ access
->size
;
1530 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
1533 gcc_assert (access
->offset
>= low
1534 && access
->offset
+ access
->size
<= high
);
1537 while (j
< access_count
)
1539 struct access
*ac2
= VEC_index (access_p
, access_vec
, j
);
1540 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
1547 multiple_reads
= true;
1551 grp_assignment_read
|= ac2
->grp_assignment_read
;
1552 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
1553 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
1554 total_scalarization
|= ac2
->total_scalarization
;
1555 relink_to_new_repr (access
, ac2
);
1557 /* If there are both aggregate-type and scalar-type accesses with
1558 this combination of size and offset, the comparison function
1559 should have put the scalars first. */
1560 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
1561 ac2
->group_representative
= access
;
1567 access
->group_representative
= access
;
1568 access
->grp_write
= grp_write
;
1569 access
->grp_read
= grp_read
;
1570 access
->grp_assignment_read
= grp_assignment_read
;
1571 access
->grp_hint
= multiple_reads
|| total_scalarization
;
1572 access
->grp_partial_lhs
= grp_partial_lhs
;
1573 access
->grp_unscalarizable_region
= unscalarizable_region
;
1574 if (access
->first_link
)
1575 add_access_to_work_queue (access
);
1577 *prev_acc_ptr
= access
;
1578 prev_acc_ptr
= &access
->next_grp
;
1581 gcc_assert (res
== VEC_index (access_p
, access_vec
, 0));
1585 /* Create a variable for the given ACCESS which determines the type, name and a
1586 few other properties. Return the variable declaration and store it also to
1587 ACCESS->replacement. */
1590 create_access_replacement (struct access
*access
, bool rename
)
1594 repl
= create_tmp_var (access
->type
, "SR");
1596 add_referenced_var (repl
);
1598 mark_sym_for_renaming (repl
);
1600 if (!access
->grp_partial_lhs
1601 && (TREE_CODE (access
->type
) == COMPLEX_TYPE
1602 || TREE_CODE (access
->type
) == VECTOR_TYPE
))
1603 DECL_GIMPLE_REG_P (repl
) = 1;
1605 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
1606 DECL_ARTIFICIAL (repl
) = 1;
1607 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
1609 if (DECL_NAME (access
->base
)
1610 && !DECL_IGNORED_P (access
->base
)
1611 && !DECL_ARTIFICIAL (access
->base
))
1613 char *pretty_name
= make_fancy_name (access
->expr
);
1614 tree debug_expr
= unshare_expr (access
->expr
), d
;
1616 DECL_NAME (repl
) = get_identifier (pretty_name
);
1617 obstack_free (&name_obstack
, pretty_name
);
1619 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1620 as DECL_DEBUG_EXPR isn't considered when looking for still
1621 used SSA_NAMEs and thus they could be freed. All debug info
1622 generation cares is whether something is constant or variable
1623 and that get_ref_base_and_extent works properly on the
1625 for (d
= debug_expr
; handled_component_p (d
); d
= TREE_OPERAND (d
, 0))
1626 switch (TREE_CODE (d
))
1629 case ARRAY_RANGE_REF
:
1630 if (TREE_OPERAND (d
, 1)
1631 && TREE_CODE (TREE_OPERAND (d
, 1)) == SSA_NAME
)
1632 TREE_OPERAND (d
, 1) = SSA_NAME_VAR (TREE_OPERAND (d
, 1));
1633 if (TREE_OPERAND (d
, 3)
1634 && TREE_CODE (TREE_OPERAND (d
, 3)) == SSA_NAME
)
1635 TREE_OPERAND (d
, 3) = SSA_NAME_VAR (TREE_OPERAND (d
, 3));
1638 if (TREE_OPERAND (d
, 2)
1639 && TREE_CODE (TREE_OPERAND (d
, 2)) == SSA_NAME
)
1640 TREE_OPERAND (d
, 2) = SSA_NAME_VAR (TREE_OPERAND (d
, 2));
1645 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
1646 DECL_DEBUG_EXPR_IS_FROM (repl
) = 1;
1647 TREE_NO_WARNING (repl
) = TREE_NO_WARNING (access
->base
);
1650 TREE_NO_WARNING (repl
) = 1;
1654 fprintf (dump_file
, "Created a replacement for ");
1655 print_generic_expr (dump_file
, access
->base
, 0);
1656 fprintf (dump_file
, " offset: %u, size: %u: ",
1657 (unsigned) access
->offset
, (unsigned) access
->size
);
1658 print_generic_expr (dump_file
, repl
, 0);
1659 fprintf (dump_file
, "\n");
1661 sra_stats
.replacements
++;
1666 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1669 get_access_replacement (struct access
*access
)
1671 gcc_assert (access
->grp_to_be_replaced
);
1673 if (!access
->replacement_decl
)
1674 access
->replacement_decl
= create_access_replacement (access
, true);
1675 return access
->replacement_decl
;
1678 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1679 not mark it for renaming. */
1682 get_unrenamed_access_replacement (struct access
*access
)
1684 gcc_assert (!access
->grp_to_be_replaced
);
1686 if (!access
->replacement_decl
)
1687 access
->replacement_decl
= create_access_replacement (access
, false);
1688 return access
->replacement_decl
;
1692 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1693 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1694 to it is not "within" the root. */
1697 build_access_subtree (struct access
**access
)
1699 struct access
*root
= *access
, *last_child
= NULL
;
1700 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
1702 *access
= (*access
)->next_grp
;
1703 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
1706 root
->first_child
= *access
;
1708 last_child
->next_sibling
= *access
;
1709 last_child
= *access
;
1711 build_access_subtree (access
);
1715 /* Build a tree of access representatives, ACCESS is the pointer to the first
1716 one, others are linked in a list by the next_grp field. Decide about scalar
1717 replacements on the way, return true iff any are to be created. */
1720 build_access_trees (struct access
*access
)
1724 struct access
*root
= access
;
1726 build_access_subtree (&access
);
1727 root
->next_grp
= access
;
1731 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1735 expr_with_var_bounded_array_refs_p (tree expr
)
1737 while (handled_component_p (expr
))
1739 if (TREE_CODE (expr
) == ARRAY_REF
1740 && !host_integerp (array_ref_low_bound (expr
), 0))
1742 expr
= TREE_OPERAND (expr
, 0);
1747 enum mark_read_status
{ SRA_MR_NOT_READ
, SRA_MR_READ
, SRA_MR_ASSIGN_READ
};
1749 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1750 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1751 sorts of access flags appropriately along the way, notably always set
1752 grp_read and grp_assign_read according to MARK_READ and grp_write when
1753 MARK_WRITE is true. */
1756 analyze_access_subtree (struct access
*root
, bool allow_replacements
,
1757 enum mark_read_status mark_read
, bool mark_write
)
1759 struct access
*child
;
1760 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
1761 HOST_WIDE_INT covered_to
= root
->offset
;
1762 bool scalar
= is_gimple_reg_type (root
->type
);
1763 bool hole
= false, sth_created
= false;
1764 bool direct_read
= root
->grp_read
;
1766 if (mark_read
== SRA_MR_ASSIGN_READ
)
1769 root
->grp_assignment_read
= 1;
1771 if (mark_read
== SRA_MR_READ
)
1773 else if (root
->grp_assignment_read
)
1774 mark_read
= SRA_MR_ASSIGN_READ
;
1775 else if (root
->grp_read
)
1776 mark_read
= SRA_MR_READ
;
1779 root
->grp_write
= true;
1780 else if (root
->grp_write
)
1783 if (root
->grp_unscalarizable_region
)
1784 allow_replacements
= false;
1786 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
1787 allow_replacements
= false;
1789 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
1791 if (!hole
&& child
->offset
< covered_to
)
1794 covered_to
+= child
->size
;
1796 sth_created
|= analyze_access_subtree (child
, allow_replacements
,
1797 mark_read
, mark_write
);
1799 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
1800 hole
|= !child
->grp_covered
;
1803 if (allow_replacements
&& scalar
&& !root
->first_child
1805 || (root
->grp_write
&& (direct_read
|| root
->grp_assignment_read
)))
1806 /* We must not ICE later on when trying to build an access to the
1807 original data within the aggregate even when it is impossible to do in
1808 a defined way like in the PR 42703 testcase. Therefore we check
1809 pre-emptively here that we will be able to do that. */
1810 && build_ref_for_offset (NULL
, TREE_TYPE (root
->base
), root
->offset
,
1813 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1815 fprintf (dump_file
, "Marking ");
1816 print_generic_expr (dump_file
, root
->base
, 0);
1817 fprintf (dump_file
, " offset: %u, size: %u: ",
1818 (unsigned) root
->offset
, (unsigned) root
->size
);
1819 fprintf (dump_file
, " to be replaced.\n");
1822 root
->grp_to_be_replaced
= 1;
1826 else if (covered_to
< limit
)
1829 if (sth_created
&& !hole
)
1831 root
->grp_covered
= 1;
1834 if (root
->grp_write
|| TREE_CODE (root
->base
) == PARM_DECL
)
1835 root
->grp_unscalarized_data
= 1; /* not covered and written to */
1841 /* Analyze all access trees linked by next_grp by the means of
1842 analyze_access_subtree. */
1844 analyze_access_trees (struct access
*access
)
1850 if (analyze_access_subtree (access
, true, SRA_MR_NOT_READ
, false))
1852 access
= access
->next_grp
;
1858 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1859 SIZE would conflict with an already existing one. If exactly such a child
1860 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1863 child_would_conflict_in_lacc (struct access
*lacc
, HOST_WIDE_INT norm_offset
,
1864 HOST_WIDE_INT size
, struct access
**exact_match
)
1866 struct access
*child
;
1868 for (child
= lacc
->first_child
; child
; child
= child
->next_sibling
)
1870 if (child
->offset
== norm_offset
&& child
->size
== size
)
1872 *exact_match
= child
;
1876 if (child
->offset
< norm_offset
+ size
1877 && child
->offset
+ child
->size
> norm_offset
)
1884 /* Create a new child access of PARENT, with all properties just like MODEL
1885 except for its offset and with its grp_write false and grp_read true.
1886 Return the new access or NULL if it cannot be created. Note that this access
1887 is created long after all splicing and sorting, it's not located in any
1888 access vector and is automatically a representative of its group. */
1890 static struct access
*
1891 create_artificial_child_access (struct access
*parent
, struct access
*model
,
1892 HOST_WIDE_INT new_offset
)
1894 struct access
*access
;
1895 struct access
**child
;
1896 tree expr
= parent
->base
;;
1898 gcc_assert (!model
->grp_unscalarizable_region
);
1900 if (!build_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
1901 model
->type
, false))
1904 access
= (struct access
*) pool_alloc (access_pool
);
1905 memset (access
, 0, sizeof (struct access
));
1906 access
->base
= parent
->base
;
1907 access
->expr
= expr
;
1908 access
->offset
= new_offset
;
1909 access
->size
= model
->size
;
1910 access
->type
= model
->type
;
1911 access
->grp_write
= true;
1912 access
->grp_read
= false;
1914 child
= &parent
->first_child
;
1915 while (*child
&& (*child
)->offset
< new_offset
)
1916 child
= &(*child
)->next_sibling
;
1918 access
->next_sibling
= *child
;
1925 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
1926 true if any new subaccess was created. Additionally, if RACC is a scalar
1927 access but LACC is not, change the type of the latter, if possible. */
1930 propagate_subaccesses_across_link (struct access
*lacc
, struct access
*racc
)
1932 struct access
*rchild
;
1933 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
1936 if (is_gimple_reg_type (lacc
->type
)
1937 || lacc
->grp_unscalarizable_region
1938 || racc
->grp_unscalarizable_region
)
1941 if (!lacc
->first_child
&& !racc
->first_child
1942 && is_gimple_reg_type (racc
->type
))
1944 tree t
= lacc
->base
;
1946 if (build_ref_for_offset (&t
, TREE_TYPE (t
), lacc
->offset
, racc
->type
,
1950 lacc
->type
= racc
->type
;
1955 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
1957 struct access
*new_acc
= NULL
;
1958 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
1960 if (rchild
->grp_unscalarizable_region
)
1963 if (child_would_conflict_in_lacc (lacc
, norm_offset
, rchild
->size
,
1968 rchild
->grp_hint
= 1;
1969 new_acc
->grp_hint
|= new_acc
->grp_read
;
1970 if (rchild
->first_child
)
1971 ret
|= propagate_subaccesses_across_link (new_acc
, rchild
);
1976 /* If a (part of) a union field is on the RHS of an assignment, it can
1977 have sub-accesses which do not make sense on the LHS (PR 40351).
1978 Check that this is not the case. */
1979 if (!build_ref_for_offset (NULL
, TREE_TYPE (lacc
->base
), norm_offset
,
1980 rchild
->type
, false))
1983 rchild
->grp_hint
= 1;
1984 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
);
1988 if (racc
->first_child
)
1989 propagate_subaccesses_across_link (new_acc
, rchild
);
1996 /* Propagate all subaccesses across assignment links. */
1999 propagate_all_subaccesses (void)
2001 while (work_queue_head
)
2003 struct access
*racc
= pop_access_from_work_queue ();
2004 struct assign_link
*link
;
2006 gcc_assert (racc
->first_link
);
2008 for (link
= racc
->first_link
; link
; link
= link
->next
)
2010 struct access
*lacc
= link
->lacc
;
2012 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2014 lacc
= lacc
->group_representative
;
2015 if (propagate_subaccesses_across_link (lacc
, racc
)
2016 && lacc
->first_link
)
2017 add_access_to_work_queue (lacc
);
2022 /* Go through all accesses collected throughout the (intraprocedural) analysis
2023 stage, exclude overlapping ones, identify representatives and build trees
2024 out of them, making decisions about scalarization on the way. Return true
2025 iff there are any to-be-scalarized variables after this stage. */
2028 analyze_all_variable_accesses (void)
2031 bitmap tmp
= BITMAP_ALLOC (NULL
);
2033 unsigned i
, max_total_scalarization_size
;
2035 max_total_scalarization_size
= UNITS_PER_WORD
* BITS_PER_UNIT
2036 * MOVE_RATIO (optimize_function_for_speed_p (cfun
));
2038 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2039 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
2040 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
2042 tree var
= referenced_var (i
);
2044 if (TREE_CODE (var
) == VAR_DECL
2045 && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1)
2046 <= max_total_scalarization_size
)
2047 && type_consists_of_records_p (TREE_TYPE (var
)))
2049 completely_scalarize_record (var
, var
, 0);
2050 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2052 fprintf (dump_file
, "Will attempt to totally scalarize ");
2053 print_generic_expr (dump_file
, var
, 0);
2054 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2059 bitmap_copy (tmp
, candidate_bitmap
);
2060 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2062 tree var
= referenced_var (i
);
2063 struct access
*access
;
2065 access
= sort_and_splice_var_accesses (var
);
2067 build_access_trees (access
);
2069 disqualify_candidate (var
,
2070 "No or inhibitingly overlapping accesses.");
2073 propagate_all_subaccesses ();
2075 bitmap_copy (tmp
, candidate_bitmap
);
2076 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2078 tree var
= referenced_var (i
);
2079 struct access
*access
= get_first_repr_for_decl (var
);
2081 if (analyze_access_trees (access
))
2084 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2086 fprintf (dump_file
, "\nAccess trees for ");
2087 print_generic_expr (dump_file
, var
, 0);
2088 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2089 dump_access_tree (dump_file
, access
);
2090 fprintf (dump_file
, "\n");
2094 disqualify_candidate (var
, "No scalar replacements to be created.");
2101 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
2108 /* Return true iff a reference statement into aggregate AGG can be built for
2109 every single to-be-replaced accesses that is a child of ACCESS, its sibling
2110 or a child of its sibling. TOP_OFFSET is the offset from the processed
2111 access subtree that has to be subtracted from offset of each access. */
2114 ref_expr_for_all_replacements_p (struct access
*access
, tree agg
,
2115 HOST_WIDE_INT top_offset
)
2119 if (access
->grp_to_be_replaced
2120 && !build_ref_for_offset (NULL
, TREE_TYPE (agg
),
2121 access
->offset
- top_offset
,
2122 access
->type
, false))
2125 if (access
->first_child
2126 && !ref_expr_for_all_replacements_p (access
->first_child
, agg
,
2130 access
= access
->next_sibling
;
2137 /* Generate statements copying scalar replacements of accesses within a subtree
2138 into or out of AGG. ACCESS is the first child of the root of the subtree to
2139 be processed. AGG is an aggregate type expression (can be a declaration but
2140 does not have to be, it can for example also be an indirect_ref).
2141 TOP_OFFSET is the offset of the processed subtree which has to be subtracted
2142 from offsets of individual accesses to get corresponding offsets for AGG.
2143 If CHUNK_SIZE is non-null, copy only replacements in the interval
2144 <start_offset, start_offset + chunk_size>, otherwise copy all. GSI is a
2145 statement iterator used to place the new statements. WRITE should be true
2146 when the statements should write from AGG to the replacement and false if
2147 vice versa. if INSERT_AFTER is true, new statements will be added after the
2148 current statement in GSI, they will be added before the statement
2152 generate_subtree_copies (struct access
*access
, tree agg
,
2153 HOST_WIDE_INT top_offset
,
2154 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
2155 gimple_stmt_iterator
*gsi
, bool write
,
2162 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
2165 if (access
->grp_to_be_replaced
2167 || access
->offset
+ access
->size
> start_offset
))
2169 tree repl
= get_access_replacement (access
);
2173 ref_found
= build_ref_for_offset (&expr
, TREE_TYPE (agg
),
2174 access
->offset
- top_offset
,
2175 access
->type
, false);
2176 gcc_assert (ref_found
);
2180 if (access
->grp_partial_lhs
)
2181 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
2183 insert_after
? GSI_NEW_STMT
2185 stmt
= gimple_build_assign (repl
, expr
);
2189 TREE_NO_WARNING (repl
) = 1;
2190 if (access
->grp_partial_lhs
)
2191 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2193 insert_after
? GSI_NEW_STMT
2195 stmt
= gimple_build_assign (expr
, repl
);
2199 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2201 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2203 sra_stats
.subtree_copies
++;
2206 if (access
->first_child
)
2207 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
2208 start_offset
, chunk_size
, gsi
,
2209 write
, insert_after
);
2211 access
= access
->next_sibling
;
2216 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2217 the root of the subtree to be processed. GSI is the statement iterator used
2218 for inserting statements which are added after the current statement if
2219 INSERT_AFTER is true or before it otherwise. */
2222 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
2226 struct access
*child
;
2228 if (access
->grp_to_be_replaced
)
2232 stmt
= gimple_build_assign (get_access_replacement (access
),
2233 fold_convert (access
->type
,
2234 integer_zero_node
));
2236 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2238 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2242 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2243 init_subtree_with_zero (child
, gsi
, insert_after
);
2246 /* Search for an access representative for the given expression EXPR and
2247 return it or NULL if it cannot be found. */
2249 static struct access
*
2250 get_access_for_expr (tree expr
)
2252 HOST_WIDE_INT offset
, size
, max_size
;
2255 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2256 a different size than the size of its argument and we need the latter
2258 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2259 expr
= TREE_OPERAND (expr
, 0);
2261 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
2262 if (max_size
== -1 || !DECL_P (base
))
2265 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
2268 return get_var_base_offset_size_access (base
, offset
, max_size
);
2271 /* Replace the expression EXPR with a scalar replacement if there is one and
2272 generate other statements to do type conversion or subtree copying if
2273 necessary. GSI is used to place newly created statements, WRITE is true if
2274 the expression is being written to (it is on a LHS of a statement or output
2275 in an assembly statement). */
2278 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
2280 struct access
*access
;
2283 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
2286 expr
= &TREE_OPERAND (*expr
, 0);
2291 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
2292 expr
= &TREE_OPERAND (*expr
, 0);
2293 access
= get_access_for_expr (*expr
);
2296 type
= TREE_TYPE (*expr
);
2298 if (access
->grp_to_be_replaced
)
2300 tree repl
= get_access_replacement (access
);
2301 /* If we replace a non-register typed access simply use the original
2302 access expression to extract the scalar component afterwards.
2303 This happens if scalarizing a function return value or parameter
2304 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2305 gcc.c-torture/compile/20011217-1.c.
2307 We also want to use this when accessing a complex or vector which can
2308 be accessed as a different type too, potentially creating a need for
2309 type conversion (see PR42196) and when scalarized unions are involved
2310 in assembler statements (see PR42398). */
2311 if (!useless_type_conversion_p (type
, access
->type
))
2313 tree ref
= access
->base
;
2316 ok
= build_ref_for_offset (&ref
, TREE_TYPE (ref
),
2317 access
->offset
, access
->type
, false);
2324 if (access
->grp_partial_lhs
)
2325 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
2326 false, GSI_NEW_STMT
);
2327 stmt
= gimple_build_assign (repl
, ref
);
2328 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2334 if (access
->grp_partial_lhs
)
2335 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2336 true, GSI_SAME_STMT
);
2337 stmt
= gimple_build_assign (ref
, repl
);
2338 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2346 if (access
->first_child
)
2348 HOST_WIDE_INT start_offset
, chunk_size
;
2350 && host_integerp (TREE_OPERAND (bfr
, 1), 1)
2351 && host_integerp (TREE_OPERAND (bfr
, 2), 1))
2353 chunk_size
= tree_low_cst (TREE_OPERAND (bfr
, 1), 1);
2354 start_offset
= access
->offset
2355 + tree_low_cst (TREE_OPERAND (bfr
, 2), 1);
2358 start_offset
= chunk_size
= 0;
2360 generate_subtree_copies (access
->first_child
, access
->base
, 0,
2361 start_offset
, chunk_size
, gsi
, write
, write
);
2366 /* Where scalar replacements of the RHS have been written to when a replacement
2367 of a LHS of an assigments cannot be direclty loaded from a replacement of
2369 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
2370 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
2371 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
2373 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2374 base aggregate if there are unscalarized data or directly to LHS
2377 static enum unscalarized_data_handling
2378 handle_unscalarized_data_in_subtree (struct access
*top_racc
, tree lhs
,
2379 gimple_stmt_iterator
*gsi
)
2381 if (top_racc
->grp_unscalarized_data
)
2383 generate_subtree_copies (top_racc
->first_child
, top_racc
->base
, 0, 0, 0,
2385 return SRA_UDH_RIGHT
;
2389 generate_subtree_copies (top_racc
->first_child
, lhs
, top_racc
->offset
,
2390 0, 0, gsi
, false, false);
2391 return SRA_UDH_LEFT
;
2396 /* Try to generate statements to load all sub-replacements in an access
2397 (sub)tree (LACC is the first child) from scalar replacements in the TOP_RACC
2398 (sub)tree. If that is not possible, refresh the TOP_RACC base aggregate and
2399 load the accesses from it. LEFT_OFFSET is the offset of the left whole
2400 subtree being copied, RIGHT_OFFSET is the same thing for the right subtree.
2401 GSI is stmt iterator used for statement insertions. *REFRESHED is true iff
2402 the rhs top aggregate has already been refreshed by contents of its scalar
2403 reductions and is set to true if this function has to do it. */
2406 load_assign_lhs_subreplacements (struct access
*lacc
, struct access
*top_racc
,
2407 HOST_WIDE_INT left_offset
,
2408 HOST_WIDE_INT right_offset
,
2409 gimple_stmt_iterator
*old_gsi
,
2410 gimple_stmt_iterator
*new_gsi
,
2411 enum unscalarized_data_handling
*refreshed
,
2414 location_t loc
= EXPR_LOCATION (lacc
->expr
);
2417 if (lacc
->grp_to_be_replaced
)
2419 struct access
*racc
;
2420 HOST_WIDE_INT offset
= lacc
->offset
- left_offset
+ right_offset
;
2424 racc
= find_access_in_subtree (top_racc
, offset
, lacc
->size
);
2425 if (racc
&& racc
->grp_to_be_replaced
)
2427 rhs
= get_access_replacement (racc
);
2428 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
2429 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, lacc
->type
, rhs
);
2433 /* No suitable access on the right hand side, need to load from
2434 the aggregate. See if we have to update it first... */
2435 if (*refreshed
== SRA_UDH_NONE
)
2436 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2439 if (*refreshed
== SRA_UDH_LEFT
)
2444 repl_found
= build_ref_for_offset (&rhs
, TREE_TYPE (rhs
),
2445 lacc
->offset
, lacc
->type
,
2447 gcc_assert (repl_found
);
2453 rhs
= top_racc
->base
;
2454 repl_found
= build_ref_for_offset (&rhs
,
2455 TREE_TYPE (top_racc
->base
),
2456 offset
, lacc
->type
, false);
2457 gcc_assert (repl_found
);
2461 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
2462 gsi_insert_after (new_gsi
, stmt
, GSI_NEW_STMT
);
2464 sra_stats
.subreplacements
++;
2466 else if (*refreshed
== SRA_UDH_NONE
2467 && lacc
->grp_read
&& !lacc
->grp_covered
)
2468 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
, lhs
,
2471 if (lacc
->first_child
)
2472 load_assign_lhs_subreplacements (lacc
->first_child
, top_racc
,
2473 left_offset
, right_offset
,
2474 old_gsi
, new_gsi
, refreshed
, lhs
);
2475 lacc
= lacc
->next_sibling
;
2480 /* Result code for SRA assignment modification. */
2481 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
2482 SRA_AM_MODIFIED
, /* stmt changed but not
2484 SRA_AM_REMOVED
}; /* stmt eliminated */
2486 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2487 to the assignment and GSI is the statement iterator pointing at it. Returns
2488 the same values as sra_modify_assign. */
2490 static enum assignment_mod_result
2491 sra_modify_constructor_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
2493 tree lhs
= gimple_assign_lhs (*stmt
);
2496 acc
= get_access_for_expr (lhs
);
2500 if (VEC_length (constructor_elt
,
2501 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt
))) > 0)
2503 /* I have never seen this code path trigger but if it can happen the
2504 following should handle it gracefully. */
2505 if (access_has_children_p (acc
))
2506 generate_subtree_copies (acc
->first_child
, acc
->base
, 0, 0, 0, gsi
,
2508 return SRA_AM_MODIFIED
;
2511 if (acc
->grp_covered
)
2513 init_subtree_with_zero (acc
, gsi
, false);
2514 unlink_stmt_vdef (*stmt
);
2515 gsi_remove (gsi
, true);
2516 return SRA_AM_REMOVED
;
2520 init_subtree_with_zero (acc
, gsi
, true);
2521 return SRA_AM_MODIFIED
;
2525 /* Create a new suitable default definition SSA_NAME and replace all uses of
2526 SSA with it, RACC is access describing the uninitialized part of an
2527 aggregate that is being loaded. */
2530 replace_uses_with_default_def_ssa_name (tree ssa
, struct access
*racc
)
2534 decl
= get_unrenamed_access_replacement (racc
);
2536 repl
= gimple_default_def (cfun
, decl
);
2539 repl
= make_ssa_name (decl
, gimple_build_nop ());
2540 set_default_def (decl
, repl
);
2543 replace_uses_by (ssa
, repl
);
2546 /* Examine both sides of the assignment statement pointed to by STMT, replace
2547 them with a scalare replacement if there is one and generate copying of
2548 replacements if scalarized aggregates have been used in the assignment. GSI
2549 is used to hold generated statements for type conversions and subtree
2552 static enum assignment_mod_result
2553 sra_modify_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
2555 struct access
*lacc
, *racc
;
2557 bool modify_this_stmt
= false;
2558 bool force_gimple_rhs
= false;
2559 location_t loc
= gimple_location (*stmt
);
2560 gimple_stmt_iterator orig_gsi
= *gsi
;
2562 if (!gimple_assign_single_p (*stmt
))
2564 lhs
= gimple_assign_lhs (*stmt
);
2565 rhs
= gimple_assign_rhs1 (*stmt
);
2567 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
2568 return sra_modify_constructor_assign (stmt
, gsi
);
2570 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
2571 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
2572 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
2574 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (*stmt
),
2576 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (*stmt
),
2578 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
2581 lacc
= get_access_for_expr (lhs
);
2582 racc
= get_access_for_expr (rhs
);
2586 if (lacc
&& lacc
->grp_to_be_replaced
)
2588 lhs
= get_access_replacement (lacc
);
2589 gimple_assign_set_lhs (*stmt
, lhs
);
2590 modify_this_stmt
= true;
2591 if (lacc
->grp_partial_lhs
)
2592 force_gimple_rhs
= true;
2596 if (racc
&& racc
->grp_to_be_replaced
)
2598 rhs
= get_access_replacement (racc
);
2599 modify_this_stmt
= true;
2600 if (racc
->grp_partial_lhs
)
2601 force_gimple_rhs
= true;
2605 if (modify_this_stmt
)
2607 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2609 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2610 ??? This should move to fold_stmt which we simply should
2611 call after building a VIEW_CONVERT_EXPR here. */
2612 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
2613 && !access_has_children_p (lacc
))
2616 if (build_ref_for_offset (&expr
, TREE_TYPE (lhs
), 0,
2617 TREE_TYPE (rhs
), false))
2620 gimple_assign_set_lhs (*stmt
, expr
);
2623 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
2624 && !access_has_children_p (racc
))
2627 if (build_ref_for_offset (&expr
, TREE_TYPE (rhs
), 0,
2628 TREE_TYPE (lhs
), false))
2631 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
2633 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
2634 if (is_gimple_reg_type (TREE_TYPE (lhs
))
2635 && TREE_CODE (lhs
) != SSA_NAME
)
2636 force_gimple_rhs
= true;
2641 /* From this point on, the function deals with assignments in between
2642 aggregates when at least one has scalar reductions of some of its
2643 components. There are three possible scenarios: Both the LHS and RHS have
2644 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2646 In the first case, we would like to load the LHS components from RHS
2647 components whenever possible. If that is not possible, we would like to
2648 read it directly from the RHS (after updating it by storing in it its own
2649 components). If there are some necessary unscalarized data in the LHS,
2650 those will be loaded by the original assignment too. If neither of these
2651 cases happen, the original statement can be removed. Most of this is done
2652 by load_assign_lhs_subreplacements.
2654 In the second case, we would like to store all RHS scalarized components
2655 directly into LHS and if they cover the aggregate completely, remove the
2656 statement too. In the third case, we want the LHS components to be loaded
2657 directly from the RHS (DSE will remove the original statement if it
2660 This is a bit complex but manageable when types match and when unions do
2661 not cause confusion in a way that we cannot really load a component of LHS
2662 from the RHS or vice versa (the access representing this level can have
2663 subaccesses that are accessible only through a different union field at a
2664 higher level - different from the one used in the examined expression).
2667 Therefore, I specially handle a fourth case, happening when there is a
2668 specific type cast or it is impossible to locate a scalarized subaccess on
2669 the other side of the expression. If that happens, I simply "refresh" the
2670 RHS by storing in it is scalarized components leave the original statement
2671 there to do the copying and then load the scalar replacements of the LHS.
2672 This is what the first branch does. */
2674 if (gimple_has_volatile_ops (*stmt
)
2675 || contains_view_convert_expr_p (rhs
)
2676 || contains_view_convert_expr_p (lhs
)
2677 || (access_has_children_p (racc
)
2678 && !ref_expr_for_all_replacements_p (racc
, lhs
, racc
->offset
))
2679 || (access_has_children_p (lacc
)
2680 && !ref_expr_for_all_replacements_p (lacc
, rhs
, lacc
->offset
)))
2682 if (access_has_children_p (racc
))
2683 generate_subtree_copies (racc
->first_child
, racc
->base
, 0, 0, 0,
2685 if (access_has_children_p (lacc
))
2686 generate_subtree_copies (lacc
->first_child
, lacc
->base
, 0, 0, 0,
2688 sra_stats
.separate_lhs_rhs_handling
++;
2692 if (access_has_children_p (lacc
) && access_has_children_p (racc
))
2694 gimple_stmt_iterator orig_gsi
= *gsi
;
2695 enum unscalarized_data_handling refreshed
;
2697 if (lacc
->grp_read
&& !lacc
->grp_covered
)
2698 refreshed
= handle_unscalarized_data_in_subtree (racc
, lhs
, gsi
);
2700 refreshed
= SRA_UDH_NONE
;
2702 load_assign_lhs_subreplacements (lacc
->first_child
, racc
,
2703 lacc
->offset
, racc
->offset
,
2704 &orig_gsi
, gsi
, &refreshed
, lhs
);
2705 if (refreshed
!= SRA_UDH_RIGHT
)
2707 if (*stmt
== gsi_stmt (*gsi
))
2710 unlink_stmt_vdef (*stmt
);
2711 gsi_remove (&orig_gsi
, true);
2712 sra_stats
.deleted
++;
2713 return SRA_AM_REMOVED
;
2720 if (!racc
->grp_to_be_replaced
&& !racc
->grp_unscalarized_data
)
2722 if (racc
->first_child
)
2723 generate_subtree_copies (racc
->first_child
, lhs
,
2724 racc
->offset
, 0, 0, gsi
,
2726 gcc_assert (*stmt
== gsi_stmt (*gsi
));
2727 if (TREE_CODE (lhs
) == SSA_NAME
)
2728 replace_uses_with_default_def_ssa_name (lhs
, racc
);
2730 unlink_stmt_vdef (*stmt
);
2731 gsi_remove (gsi
, true);
2732 sra_stats
.deleted
++;
2733 return SRA_AM_REMOVED
;
2735 else if (racc
->first_child
)
2736 generate_subtree_copies (racc
->first_child
, lhs
,
2737 racc
->offset
, 0, 0, gsi
, false, true);
2739 if (access_has_children_p (lacc
))
2740 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
2741 0, 0, gsi
, true, true);
2745 /* This gimplification must be done after generate_subtree_copies, lest we
2746 insert the subtree copies in the middle of the gimplified sequence. */
2747 if (force_gimple_rhs
)
2748 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
2749 true, GSI_SAME_STMT
);
2750 if (gimple_assign_rhs1 (*stmt
) != rhs
)
2752 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
2753 gcc_assert (*stmt
== gsi_stmt (orig_gsi
));
2756 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
2759 /* Traverse the function body and all modifications as decided in
2760 analyze_all_variable_accesses. */
2763 sra_modify_function_body (void)
2769 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
2770 while (!gsi_end_p (gsi
))
2772 gimple stmt
= gsi_stmt (gsi
);
2773 enum assignment_mod_result assign_result
;
2774 bool modified
= false, deleted
= false;
2778 switch (gimple_code (stmt
))
2781 t
= gimple_return_retval_ptr (stmt
);
2782 if (*t
!= NULL_TREE
)
2783 modified
|= sra_modify_expr (t
, &gsi
, false);
2787 assign_result
= sra_modify_assign (&stmt
, &gsi
);
2788 modified
|= assign_result
== SRA_AM_MODIFIED
;
2789 deleted
= assign_result
== SRA_AM_REMOVED
;
2793 /* Operands must be processed before the lhs. */
2794 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2796 t
= gimple_call_arg_ptr (stmt
, i
);
2797 modified
|= sra_modify_expr (t
, &gsi
, false);
2800 if (gimple_call_lhs (stmt
))
2802 t
= gimple_call_lhs_ptr (stmt
);
2803 modified
|= sra_modify_expr (t
, &gsi
, true);
2808 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
2810 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
2811 modified
|= sra_modify_expr (t
, &gsi
, false);
2813 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
2815 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
2816 modified
|= sra_modify_expr (t
, &gsi
, true);
2827 maybe_clean_eh_stmt (stmt
);
2835 /* Generate statements initializing scalar replacements of parts of function
2839 initialize_parameter_reductions (void)
2841 gimple_stmt_iterator gsi
;
2842 gimple_seq seq
= NULL
;
2845 for (parm
= DECL_ARGUMENTS (current_function_decl
);
2847 parm
= TREE_CHAIN (parm
))
2849 VEC (access_p
, heap
) *access_vec
;
2850 struct access
*access
;
2852 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
2854 access_vec
= get_base_access_vector (parm
);
2860 seq
= gimple_seq_alloc ();
2861 gsi
= gsi_start (seq
);
2864 for (access
= VEC_index (access_p
, access_vec
, 0);
2866 access
= access
->next_grp
)
2867 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true);
2871 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR
), seq
);
2874 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
2875 it reveals there are components of some aggregates to be scalarized, it runs
2876 the required transformations. */
2878 perform_intra_sra (void)
2883 if (!find_var_candidates ())
2886 if (!scan_function ())
2889 if (!analyze_all_variable_accesses ())
2892 sra_modify_function_body ();
2893 initialize_parameter_reductions ();
2895 statistics_counter_event (cfun
, "Scalar replacements created",
2896 sra_stats
.replacements
);
2897 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
2898 statistics_counter_event (cfun
, "Subtree copy stmts",
2899 sra_stats
.subtree_copies
);
2900 statistics_counter_event (cfun
, "Subreplacement stmts",
2901 sra_stats
.subreplacements
);
2902 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
2903 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
2904 sra_stats
.separate_lhs_rhs_handling
);
2906 ret
= TODO_update_ssa
;
2909 sra_deinitialize ();
2913 /* Perform early intraprocedural SRA. */
2915 early_intra_sra (void)
2917 sra_mode
= SRA_MODE_EARLY_INTRA
;
2918 return perform_intra_sra ();
2921 /* Perform "late" intraprocedural SRA. */
2923 late_intra_sra (void)
2925 sra_mode
= SRA_MODE_INTRA
;
2926 return perform_intra_sra ();
2931 gate_intra_sra (void)
2933 return flag_tree_sra
!= 0;
2937 struct gimple_opt_pass pass_sra_early
=
2942 gate_intra_sra
, /* gate */
2943 early_intra_sra
, /* execute */
2946 0, /* static_pass_number */
2947 TV_TREE_SRA
, /* tv_id */
2948 PROP_cfg
| PROP_ssa
, /* properties_required */
2949 0, /* properties_provided */
2950 0, /* properties_destroyed */
2951 0, /* todo_flags_start */
2955 | TODO_verify_ssa
/* todo_flags_finish */
2959 struct gimple_opt_pass pass_sra
=
2964 gate_intra_sra
, /* gate */
2965 late_intra_sra
, /* execute */
2968 0, /* static_pass_number */
2969 TV_TREE_SRA
, /* tv_id */
2970 PROP_cfg
| PROP_ssa
, /* properties_required */
2971 0, /* properties_provided */
2972 0, /* properties_destroyed */
2973 TODO_update_address_taken
, /* todo_flags_start */
2977 | TODO_verify_ssa
/* todo_flags_finish */
2982 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
2986 is_unused_scalar_param (tree parm
)
2989 return (is_gimple_reg (parm
)
2990 && (!(name
= gimple_default_def (cfun
, parm
))
2991 || has_zero_uses (name
)));
2994 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
2995 examine whether there are any direct or otherwise infeasible ones. If so,
2996 return true, otherwise return false. PARM must be a gimple register with a
2997 non-NULL default definition. */
3000 ptr_parm_has_direct_uses (tree parm
)
3002 imm_use_iterator ui
;
3004 tree name
= gimple_default_def (cfun
, parm
);
3007 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
3010 use_operand_p use_p
;
3012 if (is_gimple_debug (stmt
))
3015 /* Valid uses include dereferences on the lhs and the rhs. */
3016 if (gimple_has_lhs (stmt
))
3018 tree lhs
= gimple_get_lhs (stmt
);
3019 while (handled_component_p (lhs
))
3020 lhs
= TREE_OPERAND (lhs
, 0);
3021 if (INDIRECT_REF_P (lhs
)
3022 && TREE_OPERAND (lhs
, 0) == name
)
3025 if (gimple_assign_single_p (stmt
))
3027 tree rhs
= gimple_assign_rhs1 (stmt
);
3028 while (handled_component_p (rhs
))
3029 rhs
= TREE_OPERAND (rhs
, 0);
3030 if (INDIRECT_REF_P (rhs
)
3031 && TREE_OPERAND (rhs
, 0) == name
)
3034 else if (is_gimple_call (stmt
))
3037 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3039 tree arg
= gimple_call_arg (stmt
, i
);
3040 while (handled_component_p (arg
))
3041 arg
= TREE_OPERAND (arg
, 0);
3042 if (INDIRECT_REF_P (arg
)
3043 && TREE_OPERAND (arg
, 0) == name
)
3048 /* If the number of valid uses does not match the number of
3049 uses in this stmt there is an unhandled use. */
3050 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
3057 BREAK_FROM_IMM_USE_STMT (ui
);
3063 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3064 them in candidate_bitmap. Note that these do not necessarily include
3065 parameter which are unused and thus can be removed. Return true iff any
3066 such candidate has been found. */
3069 find_param_candidates (void)
3075 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3077 parm
= TREE_CHAIN (parm
))
3079 tree type
= TREE_TYPE (parm
);
3083 if (TREE_THIS_VOLATILE (parm
)
3084 || TREE_ADDRESSABLE (parm
)
3085 || (!is_gimple_reg_type (type
) && is_va_list_type (type
)))
3088 if (is_unused_scalar_param (parm
))
3094 if (POINTER_TYPE_P (type
))
3096 type
= TREE_TYPE (type
);
3098 if (TREE_CODE (type
) == FUNCTION_TYPE
3099 || TYPE_VOLATILE (type
)
3100 || !is_gimple_reg (parm
)
3101 || is_va_list_type (type
)
3102 || ptr_parm_has_direct_uses (parm
))
3105 else if (!AGGREGATE_TYPE_P (type
))
3108 if (!COMPLETE_TYPE_P (type
)
3109 || !host_integerp (TYPE_SIZE (type
), 1)
3110 || tree_low_cst (TYPE_SIZE (type
), 1) == 0
3111 || (AGGREGATE_TYPE_P (type
)
3112 && type_internals_preclude_sra_p (type
)))
3115 bitmap_set_bit (candidate_bitmap
, DECL_UID (parm
));
3117 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3119 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (parm
));
3120 print_generic_expr (dump_file
, parm
, 0);
3121 fprintf (dump_file
, "\n");
3125 func_param_count
= count
;
3129 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3133 mark_maybe_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
3136 struct access
*repr
= (struct access
*) data
;
3138 repr
->grp_maybe_modified
= 1;
3142 /* Analyze what representatives (in linked lists accessible from
3143 REPRESENTATIVES) can be modified by side effects of statements in the
3144 current function. */
3147 analyze_modified_params (VEC (access_p
, heap
) *representatives
)
3151 for (i
= 0; i
< func_param_count
; i
++)
3153 struct access
*repr
;
3155 for (repr
= VEC_index (access_p
, representatives
, i
);
3157 repr
= repr
->next_grp
)
3159 struct access
*access
;
3163 if (no_accesses_p (repr
))
3165 if (!POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3166 || repr
->grp_maybe_modified
)
3169 ao_ref_init (&ar
, repr
->expr
);
3170 visited
= BITMAP_ALLOC (NULL
);
3171 for (access
= repr
; access
; access
= access
->next_sibling
)
3173 /* All accesses are read ones, otherwise grp_maybe_modified would
3174 be trivially set. */
3175 walk_aliased_vdefs (&ar
, gimple_vuse (access
->stmt
),
3176 mark_maybe_modified
, repr
, &visited
);
3177 if (repr
->grp_maybe_modified
)
3180 BITMAP_FREE (visited
);
3185 /* Propagate distances in bb_dereferences in the opposite direction than the
3186 control flow edges, in each step storing the maximum of the current value
3187 and the minimum of all successors. These steps are repeated until the table
3188 stabilizes. Note that BBs which might terminate the functions (according to
3189 final_bbs bitmap) never updated in this way. */
3192 propagate_dereference_distances (void)
3194 VEC (basic_block
, heap
) *queue
;
3197 queue
= VEC_alloc (basic_block
, heap
, last_basic_block_for_function (cfun
));
3198 VEC_quick_push (basic_block
, queue
, ENTRY_BLOCK_PTR
);
3201 VEC_quick_push (basic_block
, queue
, bb
);
3205 while (!VEC_empty (basic_block
, queue
))
3209 bool change
= false;
3212 bb
= VEC_pop (basic_block
, queue
);
3215 if (bitmap_bit_p (final_bbs
, bb
->index
))
3218 for (i
= 0; i
< func_param_count
; i
++)
3220 int idx
= bb
->index
* func_param_count
+ i
;
3222 HOST_WIDE_INT inh
= 0;
3224 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3226 int succ_idx
= e
->dest
->index
* func_param_count
+ i
;
3228 if (e
->src
== EXIT_BLOCK_PTR
)
3234 inh
= bb_dereferences
[succ_idx
];
3236 else if (bb_dereferences
[succ_idx
] < inh
)
3237 inh
= bb_dereferences
[succ_idx
];
3240 if (!first
&& bb_dereferences
[idx
] < inh
)
3242 bb_dereferences
[idx
] = inh
;
3247 if (change
&& !bitmap_bit_p (final_bbs
, bb
->index
))
3248 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3253 e
->src
->aux
= e
->src
;
3254 VEC_quick_push (basic_block
, queue
, e
->src
);
3258 VEC_free (basic_block
, heap
, queue
);
3261 /* Dump a dereferences TABLE with heading STR to file F. */
3264 dump_dereferences_table (FILE *f
, const char *str
, HOST_WIDE_INT
*table
)
3268 fprintf (dump_file
, str
);
3269 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
3271 fprintf (f
, "%4i %i ", bb
->index
, bitmap_bit_p (final_bbs
, bb
->index
));
3272 if (bb
!= EXIT_BLOCK_PTR
)
3275 for (i
= 0; i
< func_param_count
; i
++)
3277 int idx
= bb
->index
* func_param_count
+ i
;
3278 fprintf (f
, " %4" HOST_WIDE_INT_PRINT
"d", table
[idx
]);
3283 fprintf (dump_file
, "\n");
3286 /* Determine what (parts of) parameters passed by reference that are not
3287 assigned to are not certainly dereferenced in this function and thus the
3288 dereferencing cannot be safely moved to the caller without potentially
3289 introducing a segfault. Mark such REPRESENTATIVES as
3290 grp_not_necessarilly_dereferenced.
3292 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3293 part is calculated rather than simple booleans are calculated for each
3294 pointer parameter to handle cases when only a fraction of the whole
3295 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3298 The maximum dereference distances for each pointer parameter and BB are
3299 already stored in bb_dereference. This routine simply propagates these
3300 values upwards by propagate_dereference_distances and then compares the
3301 distances of individual parameters in the ENTRY BB to the equivalent
3302 distances of each representative of a (fraction of a) parameter. */
3305 analyze_caller_dereference_legality (VEC (access_p
, heap
) *representatives
)
3309 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3310 dump_dereferences_table (dump_file
,
3311 "Dereference table before propagation:\n",
3314 propagate_dereference_distances ();
3316 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3317 dump_dereferences_table (dump_file
,
3318 "Dereference table after propagation:\n",
3321 for (i
= 0; i
< func_param_count
; i
++)
3323 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
3324 int idx
= ENTRY_BLOCK_PTR
->index
* func_param_count
+ i
;
3326 if (!repr
|| no_accesses_p (repr
))
3331 if ((repr
->offset
+ repr
->size
) > bb_dereferences
[idx
])
3332 repr
->grp_not_necessarilly_dereferenced
= 1;
3333 repr
= repr
->next_grp
;
3339 /* Return the representative access for the parameter declaration PARM if it is
3340 a scalar passed by reference which is not written to and the pointer value
3341 is not used directly. Thus, if it is legal to dereference it in the caller
3342 and we can rule out modifications through aliases, such parameter should be
3343 turned into one passed by value. Return NULL otherwise. */
3345 static struct access
*
3346 unmodified_by_ref_scalar_representative (tree parm
)
3348 int i
, access_count
;
3349 struct access
*repr
;
3350 VEC (access_p
, heap
) *access_vec
;
3352 access_vec
= get_base_access_vector (parm
);
3353 gcc_assert (access_vec
);
3354 repr
= VEC_index (access_p
, access_vec
, 0);
3357 repr
->group_representative
= repr
;
3359 access_count
= VEC_length (access_p
, access_vec
);
3360 for (i
= 1; i
< access_count
; i
++)
3362 struct access
*access
= VEC_index (access_p
, access_vec
, i
);
3365 access
->group_representative
= repr
;
3366 access
->next_sibling
= repr
->next_sibling
;
3367 repr
->next_sibling
= access
;
3371 repr
->grp_scalar_ptr
= 1;
3375 /* Return true iff this access precludes IPA-SRA of the parameter it is
3379 access_precludes_ipa_sra_p (struct access
*access
)
3381 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3382 is incompatible assign in a call statement (and possibly even in asm
3383 statements). This can be relaxed by using a new temporary but only for
3384 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3385 intraprocedural SRA we deal with this by keeping the old aggregate around,
3386 something we cannot do in IPA-SRA.) */
3388 && (is_gimple_call (access
->stmt
)
3389 || gimple_code (access
->stmt
) == GIMPLE_ASM
))
3396 /* Sort collected accesses for parameter PARM, identify representatives for
3397 each accessed region and link them together. Return NULL if there are
3398 different but overlapping accesses, return the special ptr value meaning
3399 there are no accesses for this parameter if that is the case and return the
3400 first representative otherwise. Set *RO_GRP if there is a group of accesses
3401 with only read (i.e. no write) accesses. */
3403 static struct access
*
3404 splice_param_accesses (tree parm
, bool *ro_grp
)
3406 int i
, j
, access_count
, group_count
;
3407 int agg_size
, total_size
= 0;
3408 struct access
*access
, *res
, **prev_acc_ptr
= &res
;
3409 VEC (access_p
, heap
) *access_vec
;
3411 access_vec
= get_base_access_vector (parm
);
3413 return &no_accesses_representant
;
3414 access_count
= VEC_length (access_p
, access_vec
);
3416 qsort (VEC_address (access_p
, access_vec
), access_count
, sizeof (access_p
),
3417 compare_access_positions
);
3422 while (i
< access_count
)
3425 access
= VEC_index (access_p
, access_vec
, i
);
3426 modification
= access
->write
;
3427 if (access_precludes_ipa_sra_p (access
))
3430 /* Access is about to become group representative unless we find some
3431 nasty overlap which would preclude us from breaking this parameter
3435 while (j
< access_count
)
3437 struct access
*ac2
= VEC_index (access_p
, access_vec
, j
);
3438 if (ac2
->offset
!= access
->offset
)
3440 /* All or nothing law for parameters. */
3441 if (access
->offset
+ access
->size
> ac2
->offset
)
3446 else if (ac2
->size
!= access
->size
)
3449 if (access_precludes_ipa_sra_p (ac2
))
3452 modification
|= ac2
->write
;
3453 ac2
->group_representative
= access
;
3454 ac2
->next_sibling
= access
->next_sibling
;
3455 access
->next_sibling
= ac2
;
3460 access
->grp_maybe_modified
= modification
;
3463 *prev_acc_ptr
= access
;
3464 prev_acc_ptr
= &access
->next_grp
;
3465 total_size
+= access
->size
;
3469 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3470 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
3472 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
3473 if (total_size
>= agg_size
)
3476 gcc_assert (group_count
> 0);
3480 /* Decide whether parameters with representative accesses given by REPR should
3481 be reduced into components. */
3484 decide_one_param_reduction (struct access
*repr
)
3486 int total_size
, cur_parm_size
, agg_size
, new_param_count
, parm_size_limit
;
3491 cur_parm_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
3492 gcc_assert (cur_parm_size
> 0);
3494 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3497 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
3502 agg_size
= cur_parm_size
;
3508 fprintf (dump_file
, "Evaluating PARAM group sizes for ");
3509 print_generic_expr (dump_file
, parm
, 0);
3510 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (parm
));
3511 for (acc
= repr
; acc
; acc
= acc
->next_grp
)
3512 dump_access (dump_file
, acc
, true);
3516 new_param_count
= 0;
3518 for (; repr
; repr
= repr
->next_grp
)
3520 gcc_assert (parm
== repr
->base
);
3523 if (!by_ref
|| (!repr
->grp_maybe_modified
3524 && !repr
->grp_not_necessarilly_dereferenced
))
3525 total_size
+= repr
->size
;
3527 total_size
+= cur_parm_size
;
3530 gcc_assert (new_param_count
> 0);
3532 if (optimize_function_for_size_p (cfun
))
3533 parm_size_limit
= cur_parm_size
;
3535 parm_size_limit
= (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR
)
3538 if (total_size
< agg_size
3539 && total_size
<= parm_size_limit
)
3542 fprintf (dump_file
, " ....will be split into %i components\n",
3544 return new_param_count
;
3550 /* The order of the following enums is important, we need to do extra work for
3551 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3552 enum ipa_splicing_result
{ NO_GOOD_ACCESS
, UNUSED_PARAMS
, BY_VAL_ACCESSES
,
3553 MODIF_BY_REF_ACCESSES
, UNMODIF_BY_REF_ACCESSES
};
3555 /* Identify representatives of all accesses to all candidate parameters for
3556 IPA-SRA. Return result based on what representatives have been found. */
3558 static enum ipa_splicing_result
3559 splice_all_param_accesses (VEC (access_p
, heap
) **representatives
)
3561 enum ipa_splicing_result result
= NO_GOOD_ACCESS
;
3563 struct access
*repr
;
3565 *representatives
= VEC_alloc (access_p
, heap
, func_param_count
);
3567 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3569 parm
= TREE_CHAIN (parm
))
3571 if (is_unused_scalar_param (parm
))
3573 VEC_quick_push (access_p
, *representatives
,
3574 &no_accesses_representant
);
3575 if (result
== NO_GOOD_ACCESS
)
3576 result
= UNUSED_PARAMS
;
3578 else if (POINTER_TYPE_P (TREE_TYPE (parm
))
3579 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm
)))
3580 && bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3582 repr
= unmodified_by_ref_scalar_representative (parm
);
3583 VEC_quick_push (access_p
, *representatives
, repr
);
3585 result
= UNMODIF_BY_REF_ACCESSES
;
3587 else if (bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3589 bool ro_grp
= false;
3590 repr
= splice_param_accesses (parm
, &ro_grp
);
3591 VEC_quick_push (access_p
, *representatives
, repr
);
3593 if (repr
&& !no_accesses_p (repr
))
3595 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3598 result
= UNMODIF_BY_REF_ACCESSES
;
3599 else if (result
< MODIF_BY_REF_ACCESSES
)
3600 result
= MODIF_BY_REF_ACCESSES
;
3602 else if (result
< BY_VAL_ACCESSES
)
3603 result
= BY_VAL_ACCESSES
;
3605 else if (no_accesses_p (repr
) && (result
== NO_GOOD_ACCESS
))
3606 result
= UNUSED_PARAMS
;
3609 VEC_quick_push (access_p
, *representatives
, NULL
);
3612 if (result
== NO_GOOD_ACCESS
)
3614 VEC_free (access_p
, heap
, *representatives
);
3615 *representatives
= NULL
;
3616 return NO_GOOD_ACCESS
;
3622 /* Return the index of BASE in PARMS. Abort if it is not found. */
3625 get_param_index (tree base
, VEC(tree
, heap
) *parms
)
3629 len
= VEC_length (tree
, parms
);
3630 for (i
= 0; i
< len
; i
++)
3631 if (VEC_index (tree
, parms
, i
) == base
)
3636 /* Convert the decisions made at the representative level into compact
3637 parameter adjustments. REPRESENTATIVES are pointers to first
3638 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3639 final number of adjustments. */
3641 static ipa_parm_adjustment_vec
3642 turn_representatives_into_adjustments (VEC (access_p
, heap
) *representatives
,
3643 int adjustments_count
)
3645 VEC (tree
, heap
) *parms
;
3646 ipa_parm_adjustment_vec adjustments
;
3650 gcc_assert (adjustments_count
> 0);
3651 parms
= ipa_get_vector_of_formal_parms (current_function_decl
);
3652 adjustments
= VEC_alloc (ipa_parm_adjustment_t
, heap
, adjustments_count
);
3653 parm
= DECL_ARGUMENTS (current_function_decl
);
3654 for (i
= 0; i
< func_param_count
; i
++, parm
= TREE_CHAIN (parm
))
3656 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
3658 if (!repr
|| no_accesses_p (repr
))
3660 struct ipa_parm_adjustment
*adj
;
3662 adj
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
3663 memset (adj
, 0, sizeof (*adj
));
3664 adj
->base_index
= get_param_index (parm
, parms
);
3667 adj
->copy_param
= 1;
3669 adj
->remove_param
= 1;
3673 struct ipa_parm_adjustment
*adj
;
3674 int index
= get_param_index (parm
, parms
);
3676 for (; repr
; repr
= repr
->next_grp
)
3678 adj
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
3679 memset (adj
, 0, sizeof (*adj
));
3680 gcc_assert (repr
->base
== parm
);
3681 adj
->base_index
= index
;
3682 adj
->base
= repr
->base
;
3683 adj
->type
= repr
->type
;
3684 adj
->offset
= repr
->offset
;
3685 adj
->by_ref
= (POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3686 && (repr
->grp_maybe_modified
3687 || repr
->grp_not_necessarilly_dereferenced
));
3692 VEC_free (tree
, heap
, parms
);
3696 /* Analyze the collected accesses and produce a plan what to do with the
3697 parameters in the form of adjustments, NULL meaning nothing. */
3699 static ipa_parm_adjustment_vec
3700 analyze_all_param_acesses (void)
3702 enum ipa_splicing_result repr_state
;
3703 bool proceed
= false;
3704 int i
, adjustments_count
= 0;
3705 VEC (access_p
, heap
) *representatives
;
3706 ipa_parm_adjustment_vec adjustments
;
3708 repr_state
= splice_all_param_accesses (&representatives
);
3709 if (repr_state
== NO_GOOD_ACCESS
)
3712 /* If there are any parameters passed by reference which are not modified
3713 directly, we need to check whether they can be modified indirectly. */
3714 if (repr_state
== UNMODIF_BY_REF_ACCESSES
)
3716 analyze_caller_dereference_legality (representatives
);
3717 analyze_modified_params (representatives
);
3720 for (i
= 0; i
< func_param_count
; i
++)
3722 struct access
*repr
= VEC_index (access_p
, representatives
, i
);
3724 if (repr
&& !no_accesses_p (repr
))
3726 if (repr
->grp_scalar_ptr
)
3728 adjustments_count
++;
3729 if (repr
->grp_not_necessarilly_dereferenced
3730 || repr
->grp_maybe_modified
)
3731 VEC_replace (access_p
, representatives
, i
, NULL
);
3735 sra_stats
.scalar_by_ref_to_by_val
++;
3740 int new_components
= decide_one_param_reduction (repr
);
3742 if (new_components
== 0)
3744 VEC_replace (access_p
, representatives
, i
, NULL
);
3745 adjustments_count
++;
3749 adjustments_count
+= new_components
;
3750 sra_stats
.aggregate_params_reduced
++;
3751 sra_stats
.param_reductions_created
+= new_components
;
3758 if (no_accesses_p (repr
))
3761 sra_stats
.deleted_unused_parameters
++;
3763 adjustments_count
++;
3767 if (!proceed
&& dump_file
)
3768 fprintf (dump_file
, "NOT proceeding to change params.\n");
3771 adjustments
= turn_representatives_into_adjustments (representatives
,
3776 VEC_free (access_p
, heap
, representatives
);
3780 /* If a parameter replacement identified by ADJ does not yet exist in the form
3781 of declaration, create it and record it, otherwise return the previously
3785 get_replaced_param_substitute (struct ipa_parm_adjustment
*adj
)
3788 if (!adj
->new_ssa_base
)
3790 char *pretty_name
= make_fancy_name (adj
->base
);
3792 repl
= create_tmp_reg (TREE_TYPE (adj
->base
), "ISR");
3793 DECL_NAME (repl
) = get_identifier (pretty_name
);
3794 obstack_free (&name_obstack
, pretty_name
);
3797 add_referenced_var (repl
);
3798 adj
->new_ssa_base
= repl
;
3801 repl
= adj
->new_ssa_base
;
3805 /* Find the first adjustment for a particular parameter BASE in a vector of
3806 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3809 static struct ipa_parm_adjustment
*
3810 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments
, tree base
)
3814 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
3815 for (i
= 0; i
< len
; i
++)
3817 struct ipa_parm_adjustment
*adj
;
3819 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
3820 if (!adj
->copy_param
&& adj
->base
== base
)
3827 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
3828 removed because its value is not used, replace the SSA_NAME with a one
3829 relating to a created VAR_DECL together all of its uses and return true.
3830 ADJUSTMENTS is a pointer to an adjustments vector. */
3833 replace_removed_params_ssa_names (gimple stmt
,
3834 ipa_parm_adjustment_vec adjustments
)
3836 struct ipa_parm_adjustment
*adj
;
3837 tree lhs
, decl
, repl
, name
;
3839 if (gimple_code (stmt
) == GIMPLE_PHI
)
3840 lhs
= gimple_phi_result (stmt
);
3841 else if (is_gimple_assign (stmt
))
3842 lhs
= gimple_assign_lhs (stmt
);
3843 else if (is_gimple_call (stmt
))
3844 lhs
= gimple_call_lhs (stmt
);
3848 if (TREE_CODE (lhs
) != SSA_NAME
)
3850 decl
= SSA_NAME_VAR (lhs
);
3851 if (TREE_CODE (decl
) != PARM_DECL
)
3854 adj
= get_adjustment_for_base (adjustments
, decl
);
3858 repl
= get_replaced_param_substitute (adj
);
3859 name
= make_ssa_name (repl
, stmt
);
3863 fprintf (dump_file
, "replacing an SSA name of a removed param ");
3864 print_generic_expr (dump_file
, lhs
, 0);
3865 fprintf (dump_file
, " with ");
3866 print_generic_expr (dump_file
, name
, 0);
3867 fprintf (dump_file
, "\n");
3870 if (is_gimple_assign (stmt
))
3871 gimple_assign_set_lhs (stmt
, name
);
3872 else if (is_gimple_call (stmt
))
3873 gimple_call_set_lhs (stmt
, name
);
3875 gimple_phi_set_result (stmt
, name
);
3877 replace_uses_by (lhs
, name
);
3881 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3882 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3883 specifies whether the function should care about type incompatibility the
3884 current and new expressions. If it is false, the function will leave
3885 incompatibility issues to the caller. Return true iff the expression
3889 sra_ipa_modify_expr (tree
*expr
, bool convert
,
3890 ipa_parm_adjustment_vec adjustments
)
3893 struct ipa_parm_adjustment
*adj
, *cand
= NULL
;
3894 HOST_WIDE_INT offset
, size
, max_size
;
3897 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
3899 if (TREE_CODE (*expr
) == BIT_FIELD_REF
3900 || TREE_CODE (*expr
) == IMAGPART_EXPR
3901 || TREE_CODE (*expr
) == REALPART_EXPR
)
3903 expr
= &TREE_OPERAND (*expr
, 0);
3907 base
= get_ref_base_and_extent (*expr
, &offset
, &size
, &max_size
);
3908 if (!base
|| size
== -1 || max_size
== -1)
3911 if (INDIRECT_REF_P (base
))
3912 base
= TREE_OPERAND (base
, 0);
3914 base
= get_ssa_base_param (base
);
3915 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
3918 for (i
= 0; i
< len
; i
++)
3920 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
3922 if (adj
->base
== base
&&
3923 (adj
->offset
== offset
|| adj
->remove_param
))
3929 if (!cand
|| cand
->copy_param
|| cand
->remove_param
)
3935 src
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (cand
->reduction
)),
3937 folded
= gimple_fold_indirect_ref (src
);
3942 src
= cand
->reduction
;
3944 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3946 fprintf (dump_file
, "About to replace expr ");
3947 print_generic_expr (dump_file
, *expr
, 0);
3948 fprintf (dump_file
, " with ");
3949 print_generic_expr (dump_file
, src
, 0);
3950 fprintf (dump_file
, "\n");
3953 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
3955 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
3963 /* If the statement pointed to by STMT_PTR contains any expressions that need
3964 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
3965 potential type incompatibilities (GSI is used to accommodate conversion
3966 statements and must point to the statement). Return true iff the statement
3970 sra_ipa_modify_assign (gimple
*stmt_ptr
, gimple_stmt_iterator
*gsi
,
3971 ipa_parm_adjustment_vec adjustments
)
3973 gimple stmt
= *stmt_ptr
;
3974 tree
*lhs_p
, *rhs_p
;
3977 if (!gimple_assign_single_p (stmt
))
3980 rhs_p
= gimple_assign_rhs1_ptr (stmt
);
3981 lhs_p
= gimple_assign_lhs_ptr (stmt
);
3983 any
= sra_ipa_modify_expr (rhs_p
, false, adjustments
);
3984 any
|= sra_ipa_modify_expr (lhs_p
, false, adjustments
);
3987 tree new_rhs
= NULL_TREE
;
3989 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p
), TREE_TYPE (*rhs_p
)))
3991 if (TREE_CODE (*rhs_p
) == CONSTRUCTOR
)
3993 /* V_C_Es of constructors can cause trouble (PR 42714). */
3994 if (is_gimple_reg_type (TREE_TYPE (*lhs_p
)))
3995 *rhs_p
= fold_convert (TREE_TYPE (*lhs_p
), integer_zero_node
);
3997 *rhs_p
= build_constructor (TREE_TYPE (*lhs_p
), 0);
4000 new_rhs
= fold_build1_loc (gimple_location (stmt
),
4001 VIEW_CONVERT_EXPR
, TREE_TYPE (*lhs_p
),
4004 else if (REFERENCE_CLASS_P (*rhs_p
)
4005 && is_gimple_reg_type (TREE_TYPE (*lhs_p
))
4006 && !is_gimple_reg (*lhs_p
))
4007 /* This can happen when an assignment in between two single field
4008 structures is turned into an assignment in between two pointers to
4009 scalars (PR 42237). */
4014 tree tmp
= force_gimple_operand_gsi (gsi
, new_rhs
, true, NULL_TREE
,
4015 true, GSI_SAME_STMT
);
4017 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
4026 /* Traverse the function body and all modifications as described in
4030 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments
)
4036 gimple_stmt_iterator gsi
;
4037 bool bb_changed
= false;
4039 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4040 replace_removed_params_ssa_names (gsi_stmt (gsi
), adjustments
);
4042 gsi
= gsi_start_bb (bb
);
4043 while (!gsi_end_p (gsi
))
4045 gimple stmt
= gsi_stmt (gsi
);
4046 bool modified
= false;
4050 switch (gimple_code (stmt
))
4053 t
= gimple_return_retval_ptr (stmt
);
4054 if (*t
!= NULL_TREE
)
4055 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4059 modified
|= sra_ipa_modify_assign (&stmt
, &gsi
, adjustments
);
4060 modified
|= replace_removed_params_ssa_names (stmt
, adjustments
);
4064 /* Operands must be processed before the lhs. */
4065 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4067 t
= gimple_call_arg_ptr (stmt
, i
);
4068 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4071 if (gimple_call_lhs (stmt
))
4073 t
= gimple_call_lhs_ptr (stmt
);
4074 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4075 modified
|= replace_removed_params_ssa_names (stmt
,
4081 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
4083 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
4084 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4086 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
4088 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
4089 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4101 maybe_clean_eh_stmt (stmt
);
4106 gimple_purge_dead_eh_edges (bb
);
4110 /* Call gimple_debug_bind_reset_value on all debug statements describing
4111 gimple register parameters that are being removed or replaced. */
4114 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments
)
4118 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
4119 for (i
= 0; i
< len
; i
++)
4121 struct ipa_parm_adjustment
*adj
;
4122 imm_use_iterator ui
;
4126 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
4127 if (adj
->copy_param
|| !is_gimple_reg (adj
->base
))
4129 name
= gimple_default_def (cfun
, adj
->base
);
4132 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
4134 /* All other users must have been removed by
4135 ipa_sra_modify_function_body. */
4136 gcc_assert (is_gimple_debug (stmt
));
4137 gimple_debug_bind_reset_value (stmt
);
4143 /* Return true iff all callers have at least as many actual arguments as there
4144 are formal parameters in the current function. */
4147 all_callers_have_enough_arguments_p (struct cgraph_node
*node
)
4149 struct cgraph_edge
*cs
;
4150 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4151 if (!callsite_has_enough_arguments_p (cs
->call_stmt
))
4158 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4161 convert_callers (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
4163 tree old_cur_fndecl
= current_function_decl
;
4164 struct cgraph_edge
*cs
;
4165 basic_block this_block
;
4166 bitmap recomputed_callers
= BITMAP_ALLOC (NULL
);
4168 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4170 current_function_decl
= cs
->caller
->decl
;
4171 push_cfun (DECL_STRUCT_FUNCTION (cs
->caller
->decl
));
4174 fprintf (dump_file
, "Adjusting call (%i -> %i) %s -> %s\n",
4175 cs
->caller
->uid
, cs
->callee
->uid
,
4176 cgraph_node_name (cs
->caller
),
4177 cgraph_node_name (cs
->callee
));
4179 ipa_modify_call_arguments (cs
, cs
->call_stmt
, adjustments
);
4184 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4185 if (!bitmap_bit_p (recomputed_callers
, cs
->caller
->uid
))
4187 compute_inline_parameters (cs
->caller
);
4188 bitmap_set_bit (recomputed_callers
, cs
->caller
->uid
);
4190 BITMAP_FREE (recomputed_callers
);
4192 current_function_decl
= old_cur_fndecl
;
4194 if (!encountered_recursive_call
)
4197 FOR_EACH_BB (this_block
)
4199 gimple_stmt_iterator gsi
;
4201 for (gsi
= gsi_start_bb (this_block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4203 gimple stmt
= gsi_stmt (gsi
);
4205 if (gimple_code (stmt
) != GIMPLE_CALL
)
4207 call_fndecl
= gimple_call_fndecl (stmt
);
4208 if (call_fndecl
&& cgraph_get_node (call_fndecl
) == node
)
4211 fprintf (dump_file
, "Adjusting recursive call");
4212 ipa_modify_call_arguments (NULL
, stmt
, adjustments
);
4220 /* Create an abstract origin declaration for OLD_DECL and make it an abstract
4221 origin of the provided decl so that there are preserved parameters for debug
4225 create_abstract_origin (tree old_decl
)
4227 if (!DECL_ABSTRACT_ORIGIN (old_decl
))
4229 tree new_decl
= copy_node (old_decl
);
4231 DECL_ABSTRACT (new_decl
) = 1;
4232 SET_DECL_ASSEMBLER_NAME (new_decl
, NULL_TREE
);
4233 SET_DECL_RTL (new_decl
, NULL
);
4234 DECL_STRUCT_FUNCTION (new_decl
) = NULL
;
4235 DECL_ARTIFICIAL (old_decl
) = 1;
4236 DECL_ABSTRACT_ORIGIN (old_decl
) = new_decl
;
4240 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4241 as given in ADJUSTMENTS. */
4244 modify_function (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
4246 struct cgraph_node
*alias
;
4247 for (alias
= node
->same_body
; alias
; alias
= alias
->next
)
4248 ipa_modify_formal_parameters (alias
->decl
, adjustments
, "ISRA");
4249 /* current_function_decl must be handled last, after same_body aliases,
4250 as following functions will use what it computed. */
4251 create_abstract_origin (current_function_decl
);
4252 ipa_modify_formal_parameters (current_function_decl
, adjustments
, "ISRA");
4253 ipa_sra_modify_function_body (adjustments
);
4254 sra_ipa_reset_debug_stmts (adjustments
);
4255 convert_callers (node
, adjustments
);
4256 cgraph_make_node_local (node
);
4260 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4261 attributes, return true otherwise. NODE is the cgraph node of the current
4265 ipa_sra_preliminary_function_checks (struct cgraph_node
*node
)
4267 if (!cgraph_node_can_be_local_p (node
))
4270 fprintf (dump_file
, "Function not local to this compilation unit.\n");
4274 if (DECL_VIRTUAL_P (current_function_decl
))
4277 fprintf (dump_file
, "Function is a virtual method.\n");
4281 if ((DECL_COMDAT (node
->decl
) || DECL_EXTERNAL (node
->decl
))
4282 && node
->global
.size
>= MAX_INLINE_INSNS_AUTO
)
4285 fprintf (dump_file
, "Function too big to be made truly local.\n");
4293 "Function has no callers in this compilation unit.\n");
4300 fprintf (dump_file
, "Function uses stdarg. \n");
4304 if (TYPE_ATTRIBUTES (TREE_TYPE (node
->decl
)))
4310 /* Perform early interprocedural SRA. */
4313 ipa_early_sra (void)
4315 struct cgraph_node
*node
= cgraph_node (current_function_decl
);
4316 ipa_parm_adjustment_vec adjustments
;
4319 if (!ipa_sra_preliminary_function_checks (node
))
4323 sra_mode
= SRA_MODE_EARLY_IPA
;
4325 if (!find_param_candidates ())
4328 fprintf (dump_file
, "Function has no IPA-SRA candidates.\n");
4332 if (!all_callers_have_enough_arguments_p (node
))
4335 fprintf (dump_file
, "There are callers with insufficient number of "
4340 bb_dereferences
= XCNEWVEC (HOST_WIDE_INT
,
4342 * last_basic_block_for_function (cfun
));
4343 final_bbs
= BITMAP_ALLOC (NULL
);
4346 if (encountered_apply_args
)
4349 fprintf (dump_file
, "Function calls __builtin_apply_args().\n");
4353 if (encountered_unchangable_recursive_call
)
4356 fprintf (dump_file
, "Function calls itself with insufficient "
4357 "number of arguments.\n");
4361 adjustments
= analyze_all_param_acesses ();
4365 ipa_dump_param_adjustments (dump_file
, adjustments
, current_function_decl
);
4367 modify_function (node
, adjustments
);
4368 VEC_free (ipa_parm_adjustment_t
, heap
, adjustments
);
4369 ret
= TODO_update_ssa
;
4371 statistics_counter_event (cfun
, "Unused parameters deleted",
4372 sra_stats
.deleted_unused_parameters
);
4373 statistics_counter_event (cfun
, "Scalar parameters converted to by-value",
4374 sra_stats
.scalar_by_ref_to_by_val
);
4375 statistics_counter_event (cfun
, "Aggregate parameters broken up",
4376 sra_stats
.aggregate_params_reduced
);
4377 statistics_counter_event (cfun
, "Aggregate parameter components created",
4378 sra_stats
.param_reductions_created
);
4381 BITMAP_FREE (final_bbs
);
4382 free (bb_dereferences
);
4384 sra_deinitialize ();
4388 /* Return if early ipa sra shall be performed. */
4390 ipa_early_sra_gate (void)
4392 return flag_ipa_sra
;
4395 struct gimple_opt_pass pass_early_ipa_sra
=
4399 "eipa_sra", /* name */
4400 ipa_early_sra_gate
, /* gate */
4401 ipa_early_sra
, /* execute */
4404 0, /* static_pass_number */
4405 TV_IPA_SRA
, /* tv_id */
4406 0, /* properties_required */
4407 0, /* properties_provided */
4408 0, /* properties_destroyed */
4409 0, /* todo_flags_start */
4410 TODO_dump_func
| TODO_dump_cgraph
/* todo_flags_finish */