1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "alloc-pool.h"
85 #include "fold-const.h"
86 #include "internal-fn.h"
88 #include "stor-layout.h"
90 #include "gimple-iterator.h"
91 #include "gimplify-me.h"
92 #include "gimple-walk.h"
95 #include "insn-config.h"
100 #include "emit-rtl.h"
104 #include "tree-dfa.h"
105 #include "tree-ssa.h"
106 #include "tree-pass.h"
108 #include "symbol-summary.h"
109 #include "ipa-prop.h"
113 #include "tree-inline.h"
114 #include "gimple-pretty-print.h"
115 #include "ipa-inline.h"
116 #include "ipa-utils.h"
117 #include "builtins.h"
119 /* Enumeration of all aggregate reductions we can do. */
120 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
121 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
122 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
124 /* Global variable describing which aggregate reduction we are performing at
126 static enum sra_mode sra_mode
;
130 /* ACCESS represents each access to an aggregate variable (as a whole or a
131 part). It can also represent a group of accesses that refer to exactly the
132 same fragment of an aggregate (i.e. those that have exactly the same offset
133 and size). Such representatives for a single aggregate, once determined,
134 are linked in a linked list and have the group fields set.
136 Moreover, when doing intraprocedural SRA, a tree is built from those
137 representatives (by the means of first_child and next_sibling pointers), in
138 which all items in a subtree are "within" the root, i.e. their offset is
139 greater or equal to offset of the root and offset+size is smaller or equal
140 to offset+size of the root. Children of an access are sorted by offset.
142 Note that accesses to parts of vector and complex number types always
143 represented by an access to the whole complex number or a vector. It is a
144 duty of the modifying functions to replace them appropriately. */
148 /* Values returned by `get_ref_base_and_extent' for each component reference
149 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
150 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
151 HOST_WIDE_INT offset
;
155 /* Expression. It is context dependent so do not use it to create new
156 expressions to access the original aggregate. See PR 42154 for a
162 /* The statement this access belongs to. */
165 /* Next group representative for this aggregate. */
166 struct access
*next_grp
;
168 /* Pointer to the group representative. Pointer to itself if the struct is
169 the representative. */
170 struct access
*group_representative
;
172 /* If this access has any children (in terms of the definition above), this
173 points to the first one. */
174 struct access
*first_child
;
176 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
177 described above. In IPA-SRA this is a pointer to the next access
178 belonging to the same group (having the same representative). */
179 struct access
*next_sibling
;
181 /* Pointers to the first and last element in the linked list of assign
183 struct assign_link
*first_link
, *last_link
;
185 /* Pointer to the next access in the work queue. */
186 struct access
*next_queued
;
188 /* Replacement variable for this access "region." Never to be accessed
189 directly, always only by the means of get_access_replacement() and only
190 when grp_to_be_replaced flag is set. */
191 tree replacement_decl
;
193 /* Is this particular access write access? */
196 /* Is this access an access to a non-addressable field? */
197 unsigned non_addressable
: 1;
199 /* Is this access currently in the work queue? */
200 unsigned grp_queued
: 1;
202 /* Does this group contain a write access? This flag is propagated down the
204 unsigned grp_write
: 1;
206 /* Does this group contain a read access? This flag is propagated down the
208 unsigned grp_read
: 1;
210 /* Does this group contain a read access that comes from an assignment
211 statement? This flag is propagated down the access tree. */
212 unsigned grp_assignment_read
: 1;
214 /* Does this group contain a write access that comes from an assignment
215 statement? This flag is propagated down the access tree. */
216 unsigned grp_assignment_write
: 1;
218 /* Does this group contain a read access through a scalar type? This flag is
219 not propagated in the access tree in any direction. */
220 unsigned grp_scalar_read
: 1;
222 /* Does this group contain a write access through a scalar type? This flag
223 is not propagated in the access tree in any direction. */
224 unsigned grp_scalar_write
: 1;
226 /* Is this access an artificial one created to scalarize some record
228 unsigned grp_total_scalarization
: 1;
230 /* Other passes of the analysis use this bit to make function
231 analyze_access_subtree create scalar replacements for this group if
233 unsigned grp_hint
: 1;
235 /* Is the subtree rooted in this access fully covered by scalar
237 unsigned grp_covered
: 1;
239 /* If set to true, this access and all below it in an access tree must not be
241 unsigned grp_unscalarizable_region
: 1;
243 /* Whether data have been written to parts of the aggregate covered by this
244 access which is not to be scalarized. This flag is propagated up in the
246 unsigned grp_unscalarized_data
: 1;
248 /* Does this access and/or group contain a write access through a
250 unsigned grp_partial_lhs
: 1;
252 /* Set when a scalar replacement should be created for this variable. */
253 unsigned grp_to_be_replaced
: 1;
255 /* Set when we want a replacement for the sole purpose of having it in
256 generated debug statements. */
257 unsigned grp_to_be_debug_replaced
: 1;
259 /* Should TREE_NO_WARNING of a replacement be set? */
260 unsigned grp_no_warning
: 1;
262 /* Is it possible that the group refers to data which might be (directly or
263 otherwise) modified? */
264 unsigned grp_maybe_modified
: 1;
266 /* Set when this is a representative of a pointer to scalar (i.e. by
267 reference) parameter which we consider for turning into a plain scalar
268 (i.e. a by value parameter). */
269 unsigned grp_scalar_ptr
: 1;
271 /* Set when we discover that this pointer is not safe to dereference in the
273 unsigned grp_not_necessarilly_dereferenced
: 1;
276 typedef struct access
*access_p
;
279 /* Alloc pool for allocating access structures. */
280 static object_allocator
<struct access
> access_pool ("SRA accesses", 16);
282 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
283 are used to propagate subaccesses from rhs to lhs as long as they don't
284 conflict with what is already there. */
287 struct access
*lacc
, *racc
;
288 struct assign_link
*next
;
291 /* Alloc pool for allocating assign link structures. */
292 static object_allocator
<assign_link
> assign_link_pool ("SRA links", 16);
294 /* Base (tree) -> Vector (vec<access_p> *) map. */
295 static hash_map
<tree
, auto_vec
<access_p
> > *base_access_vec
;
297 /* Candidate hash table helpers. */
299 struct uid_decl_hasher
: nofree_ptr_hash
<tree_node
>
301 static inline hashval_t
hash (const tree_node
*);
302 static inline bool equal (const tree_node
*, const tree_node
*);
305 /* Hash a tree in a uid_decl_map. */
308 uid_decl_hasher::hash (const tree_node
*item
)
310 return item
->decl_minimal
.uid
;
313 /* Return true if the DECL_UID in both trees are equal. */
316 uid_decl_hasher::equal (const tree_node
*a
, const tree_node
*b
)
318 return (a
->decl_minimal
.uid
== b
->decl_minimal
.uid
);
321 /* Set of candidates. */
322 static bitmap candidate_bitmap
;
323 static hash_table
<uid_decl_hasher
> *candidates
;
325 /* For a candidate UID return the candidates decl. */
328 candidate (unsigned uid
)
331 t
.decl_minimal
.uid
= uid
;
332 return candidates
->find_with_hash (&t
, static_cast <hashval_t
> (uid
));
335 /* Bitmap of candidates which we should try to entirely scalarize away and
336 those which cannot be (because they are and need be used as a whole). */
337 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
339 /* Obstack for creation of fancy names. */
340 static struct obstack name_obstack
;
342 /* Head of a linked list of accesses that need to have its subaccesses
343 propagated to their assignment counterparts. */
344 static struct access
*work_queue_head
;
346 /* Number of parameters of the analyzed function when doing early ipa SRA. */
347 static int func_param_count
;
349 /* scan_function sets the following to true if it encounters a call to
350 __builtin_apply_args. */
351 static bool encountered_apply_args
;
353 /* Set by scan_function when it finds a recursive call. */
354 static bool encountered_recursive_call
;
356 /* Set by scan_function when it finds a recursive call with less actual
357 arguments than formal parameters.. */
358 static bool encountered_unchangable_recursive_call
;
360 /* This is a table in which for each basic block and parameter there is a
361 distance (offset + size) in that parameter which is dereferenced and
362 accessed in that BB. */
363 static HOST_WIDE_INT
*bb_dereferences
;
364 /* Bitmap of BBs that can cause the function to "stop" progressing by
365 returning, throwing externally, looping infinitely or calling a function
366 which might abort etc.. */
367 static bitmap final_bbs
;
369 /* Representative of no accesses at all. */
370 static struct access no_accesses_representant
;
372 /* Predicate to test the special value. */
375 no_accesses_p (struct access
*access
)
377 return access
== &no_accesses_representant
;
380 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
381 representative fields are dumped, otherwise those which only describe the
382 individual access are. */
386 /* Number of processed aggregates is readily available in
387 analyze_all_variable_accesses and so is not stored here. */
389 /* Number of created scalar replacements. */
392 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
396 /* Number of statements created by generate_subtree_copies. */
399 /* Number of statements created by load_assign_lhs_subreplacements. */
402 /* Number of times sra_modify_assign has deleted a statement. */
405 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
406 RHS reparately due to type conversions or nonexistent matching
408 int separate_lhs_rhs_handling
;
410 /* Number of parameters that were removed because they were unused. */
411 int deleted_unused_parameters
;
413 /* Number of scalars passed as parameters by reference that have been
414 converted to be passed by value. */
415 int scalar_by_ref_to_by_val
;
417 /* Number of aggregate parameters that were replaced by one or more of their
419 int aggregate_params_reduced
;
421 /* Numbber of components created when splitting aggregate parameters. */
422 int param_reductions_created
;
426 dump_access (FILE *f
, struct access
*access
, bool grp
)
428 fprintf (f
, "access { ");
429 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
430 print_generic_expr (f
, access
->base
, 0);
431 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
432 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
433 fprintf (f
, ", expr = ");
434 print_generic_expr (f
, access
->expr
, 0);
435 fprintf (f
, ", type = ");
436 print_generic_expr (f
, access
->type
, 0);
438 fprintf (f
, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
439 "grp_assignment_write = %d, grp_scalar_read = %d, "
440 "grp_scalar_write = %d, grp_total_scalarization = %d, "
441 "grp_hint = %d, grp_covered = %d, "
442 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
443 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
444 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
445 "grp_not_necessarilly_dereferenced = %d\n",
446 access
->grp_read
, access
->grp_write
, access
->grp_assignment_read
,
447 access
->grp_assignment_write
, access
->grp_scalar_read
,
448 access
->grp_scalar_write
, access
->grp_total_scalarization
,
449 access
->grp_hint
, access
->grp_covered
,
450 access
->grp_unscalarizable_region
, access
->grp_unscalarized_data
,
451 access
->grp_partial_lhs
, access
->grp_to_be_replaced
,
452 access
->grp_to_be_debug_replaced
, access
->grp_maybe_modified
,
453 access
->grp_not_necessarilly_dereferenced
);
455 fprintf (f
, ", write = %d, grp_total_scalarization = %d, "
456 "grp_partial_lhs = %d\n",
457 access
->write
, access
->grp_total_scalarization
,
458 access
->grp_partial_lhs
);
461 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
464 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
470 for (i
= 0; i
< level
; i
++)
471 fputs ("* ", dump_file
);
473 dump_access (f
, access
, true);
475 if (access
->first_child
)
476 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
478 access
= access
->next_sibling
;
483 /* Dump all access trees for a variable, given the pointer to the first root in
487 dump_access_tree (FILE *f
, struct access
*access
)
489 for (; access
; access
= access
->next_grp
)
490 dump_access_tree_1 (f
, access
, 0);
493 /* Return true iff ACC is non-NULL and has subaccesses. */
496 access_has_children_p (struct access
*acc
)
498 return acc
&& acc
->first_child
;
501 /* Return true iff ACC is (partly) covered by at least one replacement. */
504 access_has_replacements_p (struct access
*acc
)
506 struct access
*child
;
507 if (acc
->grp_to_be_replaced
)
509 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
510 if (access_has_replacements_p (child
))
515 /* Return a vector of pointers to accesses for the variable given in BASE or
516 NULL if there is none. */
518 static vec
<access_p
> *
519 get_base_access_vector (tree base
)
521 return base_access_vec
->get (base
);
524 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
525 in ACCESS. Return NULL if it cannot be found. */
527 static struct access
*
528 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
531 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
533 struct access
*child
= access
->first_child
;
535 while (child
&& (child
->offset
+ child
->size
<= offset
))
536 child
= child
->next_sibling
;
543 /* Return the first group representative for DECL or NULL if none exists. */
545 static struct access
*
546 get_first_repr_for_decl (tree base
)
548 vec
<access_p
> *access_vec
;
550 access_vec
= get_base_access_vector (base
);
554 return (*access_vec
)[0];
557 /* Find an access representative for the variable BASE and given OFFSET and
558 SIZE. Requires that access trees have already been built. Return NULL if
559 it cannot be found. */
561 static struct access
*
562 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
565 struct access
*access
;
567 access
= get_first_repr_for_decl (base
);
568 while (access
&& (access
->offset
+ access
->size
<= offset
))
569 access
= access
->next_grp
;
573 return find_access_in_subtree (access
, offset
, size
);
576 /* Add LINK to the linked list of assign links of RACC. */
578 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
580 gcc_assert (link
->racc
== racc
);
582 if (!racc
->first_link
)
584 gcc_assert (!racc
->last_link
);
585 racc
->first_link
= link
;
588 racc
->last_link
->next
= link
;
590 racc
->last_link
= link
;
594 /* Move all link structures in their linked list in OLD_RACC to the linked list
597 relink_to_new_repr (struct access
*new_racc
, struct access
*old_racc
)
599 if (!old_racc
->first_link
)
601 gcc_assert (!old_racc
->last_link
);
605 if (new_racc
->first_link
)
607 gcc_assert (!new_racc
->last_link
->next
);
608 gcc_assert (!old_racc
->last_link
|| !old_racc
->last_link
->next
);
610 new_racc
->last_link
->next
= old_racc
->first_link
;
611 new_racc
->last_link
= old_racc
->last_link
;
615 gcc_assert (!new_racc
->last_link
);
617 new_racc
->first_link
= old_racc
->first_link
;
618 new_racc
->last_link
= old_racc
->last_link
;
620 old_racc
->first_link
= old_racc
->last_link
= NULL
;
623 /* Add ACCESS to the work queue (which is actually a stack). */
626 add_access_to_work_queue (struct access
*access
)
628 if (!access
->grp_queued
)
630 gcc_assert (!access
->next_queued
);
631 access
->next_queued
= work_queue_head
;
632 access
->grp_queued
= 1;
633 work_queue_head
= access
;
637 /* Pop an access from the work queue, and return it, assuming there is one. */
639 static struct access
*
640 pop_access_from_work_queue (void)
642 struct access
*access
= work_queue_head
;
644 work_queue_head
= access
->next_queued
;
645 access
->next_queued
= NULL
;
646 access
->grp_queued
= 0;
651 /* Allocate necessary structures. */
654 sra_initialize (void)
656 candidate_bitmap
= BITMAP_ALLOC (NULL
);
657 candidates
= new hash_table
<uid_decl_hasher
>
658 (vec_safe_length (cfun
->local_decls
) / 2);
659 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
660 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
661 gcc_obstack_init (&name_obstack
);
662 base_access_vec
= new hash_map
<tree
, auto_vec
<access_p
> >;
663 memset (&sra_stats
, 0, sizeof (sra_stats
));
664 encountered_apply_args
= false;
665 encountered_recursive_call
= false;
666 encountered_unchangable_recursive_call
= false;
669 /* Deallocate all general structures. */
672 sra_deinitialize (void)
674 BITMAP_FREE (candidate_bitmap
);
677 BITMAP_FREE (should_scalarize_away_bitmap
);
678 BITMAP_FREE (cannot_scalarize_away_bitmap
);
679 access_pool
.release ();
680 assign_link_pool
.release ();
681 obstack_free (&name_obstack
, NULL
);
683 delete base_access_vec
;
686 /* Remove DECL from candidates for SRA and write REASON to the dump file if
689 disqualify_candidate (tree decl
, const char *reason
)
691 if (bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
)))
692 candidates
->remove_elt_with_hash (decl
, DECL_UID (decl
));
694 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
696 fprintf (dump_file
, "! Disqualifying ");
697 print_generic_expr (dump_file
, decl
, 0);
698 fprintf (dump_file
, " - %s\n", reason
);
702 /* Return true iff the type contains a field or an element which does not allow
706 type_internals_preclude_sra_p (tree type
, const char **msg
)
711 switch (TREE_CODE (type
))
715 case QUAL_UNION_TYPE
:
716 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
717 if (TREE_CODE (fld
) == FIELD_DECL
)
719 tree ft
= TREE_TYPE (fld
);
721 if (TREE_THIS_VOLATILE (fld
))
723 *msg
= "volatile structure field";
726 if (!DECL_FIELD_OFFSET (fld
))
728 *msg
= "no structure field offset";
731 if (!DECL_SIZE (fld
))
733 *msg
= "zero structure field size";
736 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
738 *msg
= "structure field offset not fixed";
741 if (!tree_fits_uhwi_p (DECL_SIZE (fld
)))
743 *msg
= "structure field size not fixed";
746 if (!tree_fits_shwi_p (bit_position (fld
)))
748 *msg
= "structure field size too big";
751 if (AGGREGATE_TYPE_P (ft
)
752 && int_bit_position (fld
) % BITS_PER_UNIT
!= 0)
754 *msg
= "structure field is bit field";
758 if (AGGREGATE_TYPE_P (ft
) && type_internals_preclude_sra_p (ft
, msg
))
765 et
= TREE_TYPE (type
);
767 if (TYPE_VOLATILE (et
))
769 *msg
= "element type is volatile";
773 if (AGGREGATE_TYPE_P (et
) && type_internals_preclude_sra_p (et
, msg
))
783 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
784 base variable if it is. Return T if it is not an SSA_NAME. */
787 get_ssa_base_param (tree t
)
789 if (TREE_CODE (t
) == SSA_NAME
)
791 if (SSA_NAME_IS_DEFAULT_DEF (t
))
792 return SSA_NAME_VAR (t
);
799 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
800 belongs to, unless the BB has already been marked as a potentially
804 mark_parm_dereference (tree base
, HOST_WIDE_INT dist
, gimple stmt
)
806 basic_block bb
= gimple_bb (stmt
);
807 int idx
, parm_index
= 0;
810 if (bitmap_bit_p (final_bbs
, bb
->index
))
813 for (parm
= DECL_ARGUMENTS (current_function_decl
);
814 parm
&& parm
!= base
;
815 parm
= DECL_CHAIN (parm
))
818 gcc_assert (parm_index
< func_param_count
);
820 idx
= bb
->index
* func_param_count
+ parm_index
;
821 if (bb_dereferences
[idx
] < dist
)
822 bb_dereferences
[idx
] = dist
;
825 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
826 the three fields. Also add it to the vector of accesses corresponding to
827 the base. Finally, return the new access. */
829 static struct access
*
830 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
832 struct access
*access
= access_pool
.allocate ();
834 memset (access
, 0, sizeof (struct access
));
836 access
->offset
= offset
;
839 base_access_vec
->get_or_insert (base
).safe_push (access
);
844 /* Create and insert access for EXPR. Return created access, or NULL if it is
847 static struct access
*
848 create_access (tree expr
, gimple stmt
, bool write
)
850 struct access
*access
;
851 HOST_WIDE_INT offset
, size
, max_size
;
853 bool ptr
, unscalarizable_region
= false;
855 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
857 if (sra_mode
== SRA_MODE_EARLY_IPA
858 && TREE_CODE (base
) == MEM_REF
)
860 base
= get_ssa_base_param (TREE_OPERAND (base
, 0));
868 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
871 if (sra_mode
== SRA_MODE_EARLY_IPA
)
873 if (size
< 0 || size
!= max_size
)
875 disqualify_candidate (base
, "Encountered a variable sized access.");
878 if (TREE_CODE (expr
) == COMPONENT_REF
879 && DECL_BIT_FIELD (TREE_OPERAND (expr
, 1)))
881 disqualify_candidate (base
, "Encountered a bit-field access.");
884 gcc_checking_assert ((offset
% BITS_PER_UNIT
) == 0);
887 mark_parm_dereference (base
, offset
+ size
, stmt
);
891 if (size
!= max_size
)
894 unscalarizable_region
= true;
898 disqualify_candidate (base
, "Encountered an unconstrained access.");
903 access
= create_access_1 (base
, offset
, size
);
905 access
->type
= TREE_TYPE (expr
);
906 access
->write
= write
;
907 access
->grp_unscalarizable_region
= unscalarizable_region
;
910 if (TREE_CODE (expr
) == COMPONENT_REF
911 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr
, 1)))
912 access
->non_addressable
= 1;
918 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
919 register types or (recursively) records with only these two kinds of fields.
920 It also returns false if any of these records contains a bit-field. */
923 type_consists_of_records_p (tree type
)
927 if (TREE_CODE (type
) != RECORD_TYPE
)
930 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
931 if (TREE_CODE (fld
) == FIELD_DECL
)
933 tree ft
= TREE_TYPE (fld
);
935 if (DECL_BIT_FIELD (fld
))
938 if (!is_gimple_reg_type (ft
)
939 && !type_consists_of_records_p (ft
))
946 /* Create total_scalarization accesses for all scalar type fields in DECL that
947 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
948 must be the top-most VAR_DECL representing the variable, OFFSET must be the
949 offset of DECL within BASE. REF must be the memory reference expression for
953 completely_scalarize_record (tree base
, tree decl
, HOST_WIDE_INT offset
,
956 tree fld
, decl_type
= TREE_TYPE (decl
);
958 for (fld
= TYPE_FIELDS (decl_type
); fld
; fld
= DECL_CHAIN (fld
))
959 if (TREE_CODE (fld
) == FIELD_DECL
)
961 HOST_WIDE_INT pos
= offset
+ int_bit_position (fld
);
962 tree ft
= TREE_TYPE (fld
);
963 tree nref
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), ref
, fld
,
966 if (is_gimple_reg_type (ft
))
968 struct access
*access
;
971 size
= tree_to_uhwi (DECL_SIZE (fld
));
972 access
= create_access_1 (base
, pos
, size
);
975 access
->grp_total_scalarization
= 1;
976 /* Accesses for intraprocedural SRA can have their stmt NULL. */
979 completely_scalarize_record (base
, fld
, pos
, nref
);
983 /* Create total_scalarization accesses for all scalar type fields in VAR and
984 for VAR as a whole. VAR must be of a RECORD_TYPE conforming to
985 type_consists_of_records_p. */
988 completely_scalarize_var (tree var
)
990 HOST_WIDE_INT size
= tree_to_uhwi (DECL_SIZE (var
));
991 struct access
*access
;
993 access
= create_access_1 (var
, 0, size
);
995 access
->type
= TREE_TYPE (var
);
996 access
->grp_total_scalarization
= 1;
998 completely_scalarize_record (var
, var
, 0, var
);
1001 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1004 contains_view_convert_expr_p (const_tree ref
)
1006 while (handled_component_p (ref
))
1008 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
1010 ref
= TREE_OPERAND (ref
, 0);
1016 /* Search the given tree for a declaration by skipping handled components and
1017 exclude it from the candidates. */
1020 disqualify_base_of_expr (tree t
, const char *reason
)
1022 t
= get_base_address (t
);
1023 if (sra_mode
== SRA_MODE_EARLY_IPA
1024 && TREE_CODE (t
) == MEM_REF
)
1025 t
= get_ssa_base_param (TREE_OPERAND (t
, 0));
1027 if (t
&& DECL_P (t
))
1028 disqualify_candidate (t
, reason
);
1031 /* Scan expression EXPR and create access structures for all accesses to
1032 candidates for scalarization. Return the created access or NULL if none is
1035 static struct access
*
1036 build_access_from_expr_1 (tree expr
, gimple stmt
, bool write
)
1038 struct access
*ret
= NULL
;
1041 if (TREE_CODE (expr
) == BIT_FIELD_REF
1042 || TREE_CODE (expr
) == IMAGPART_EXPR
1043 || TREE_CODE (expr
) == REALPART_EXPR
)
1045 expr
= TREE_OPERAND (expr
, 0);
1049 partial_ref
= false;
1051 /* We need to dive through V_C_Es in order to get the size of its parameter
1052 and not the result type. Ada produces such statements. We are also
1053 capable of handling the topmost V_C_E but not any of those buried in other
1054 handled components. */
1055 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
1056 expr
= TREE_OPERAND (expr
, 0);
1058 if (contains_view_convert_expr_p (expr
))
1060 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
1064 if (TREE_THIS_VOLATILE (expr
))
1066 disqualify_base_of_expr (expr
, "part of a volatile reference.");
1070 switch (TREE_CODE (expr
))
1073 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
1074 && sra_mode
!= SRA_MODE_EARLY_IPA
)
1082 case ARRAY_RANGE_REF
:
1083 ret
= create_access (expr
, stmt
, write
);
1090 if (write
&& partial_ref
&& ret
)
1091 ret
->grp_partial_lhs
= 1;
1096 /* Scan expression EXPR and create access structures for all accesses to
1097 candidates for scalarization. Return true if any access has been inserted.
1098 STMT must be the statement from which the expression is taken, WRITE must be
1099 true if the expression is a store and false otherwise. */
1102 build_access_from_expr (tree expr
, gimple stmt
, bool write
)
1104 struct access
*access
;
1106 access
= build_access_from_expr_1 (expr
, stmt
, write
);
1109 /* This means the aggregate is accesses as a whole in a way other than an
1110 assign statement and thus cannot be removed even if we had a scalar
1111 replacement for everything. */
1112 if (cannot_scalarize_away_bitmap
)
1113 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
1119 /* Return the single non-EH successor edge of BB or NULL if there is none or
1123 single_non_eh_succ (basic_block bb
)
1128 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1129 if (!(e
->flags
& EDGE_EH
))
1139 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1140 there is no alternative spot where to put statements SRA might need to
1141 generate after it. The spot we are looking for is an edge leading to a
1142 single non-EH successor, if it exists and is indeed single. RHS may be
1143 NULL, in that case ignore it. */
1146 disqualify_if_bad_bb_terminating_stmt (gimple stmt
, tree lhs
, tree rhs
)
1148 if ((sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1149 && stmt_ends_bb_p (stmt
))
1151 if (single_non_eh_succ (gimple_bb (stmt
)))
1154 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
1156 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1162 /* Scan expressions occurring in STMT, create access structures for all accesses
1163 to candidates for scalarization and remove those candidates which occur in
1164 statements or expressions that prevent them from being split apart. Return
1165 true if any access has been inserted. */
1168 build_accesses_from_assign (gimple stmt
)
1171 struct access
*lacc
, *racc
;
1173 if (!gimple_assign_single_p (stmt
)
1174 /* Scope clobbers don't influence scalarization. */
1175 || gimple_clobber_p (stmt
))
1178 lhs
= gimple_assign_lhs (stmt
);
1179 rhs
= gimple_assign_rhs1 (stmt
);
1181 if (disqualify_if_bad_bb_terminating_stmt (stmt
, lhs
, rhs
))
1184 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1185 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1188 lacc
->grp_assignment_write
= 1;
1192 racc
->grp_assignment_read
= 1;
1193 if (should_scalarize_away_bitmap
&& !gimple_has_volatile_ops (stmt
)
1194 && !is_gimple_reg_type (racc
->type
))
1195 bitmap_set_bit (should_scalarize_away_bitmap
, DECL_UID (racc
->base
));
1199 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1200 && !lacc
->grp_unscalarizable_region
1201 && !racc
->grp_unscalarizable_region
1202 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1203 && lacc
->size
== racc
->size
1204 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1206 struct assign_link
*link
;
1208 link
= assign_link_pool
.allocate ();
1209 memset (link
, 0, sizeof (struct assign_link
));
1214 add_link_to_rhs (racc
, link
);
1217 return lacc
|| racc
;
1220 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1221 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1224 asm_visit_addr (gimple
, tree op
, tree
, void *)
1226 op
= get_base_address (op
);
1229 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1234 /* Return true iff callsite CALL has at least as many actual arguments as there
1235 are formal parameters of the function currently processed by IPA-SRA and
1236 that their types match. */
1239 callsite_arguments_match_p (gimple call
)
1241 if (gimple_call_num_args (call
) < (unsigned) func_param_count
)
1246 for (parm
= DECL_ARGUMENTS (current_function_decl
), i
= 0;
1248 parm
= DECL_CHAIN (parm
), i
++)
1250 tree arg
= gimple_call_arg (call
, i
);
1251 if (!useless_type_conversion_p (TREE_TYPE (parm
), TREE_TYPE (arg
)))
1257 /* Scan function and look for interesting expressions and create access
1258 structures for them. Return true iff any access is created. */
1261 scan_function (void)
1266 FOR_EACH_BB_FN (bb
, cfun
)
1268 gimple_stmt_iterator gsi
;
1269 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1271 gimple stmt
= gsi_stmt (gsi
);
1275 if (final_bbs
&& stmt_can_throw_external (stmt
))
1276 bitmap_set_bit (final_bbs
, bb
->index
);
1277 switch (gimple_code (stmt
))
1280 t
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1282 ret
|= build_access_from_expr (t
, stmt
, false);
1284 bitmap_set_bit (final_bbs
, bb
->index
);
1288 ret
|= build_accesses_from_assign (stmt
);
1292 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1293 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1296 if (sra_mode
== SRA_MODE_EARLY_IPA
)
1298 tree dest
= gimple_call_fndecl (stmt
);
1299 int flags
= gimple_call_flags (stmt
);
1303 if (DECL_BUILT_IN_CLASS (dest
) == BUILT_IN_NORMAL
1304 && DECL_FUNCTION_CODE (dest
) == BUILT_IN_APPLY_ARGS
)
1305 encountered_apply_args
= true;
1306 if (recursive_call_p (current_function_decl
, dest
))
1308 encountered_recursive_call
= true;
1309 if (!callsite_arguments_match_p (stmt
))
1310 encountered_unchangable_recursive_call
= true;
1315 && (flags
& (ECF_CONST
| ECF_PURE
)) == 0)
1316 bitmap_set_bit (final_bbs
, bb
->index
);
1319 t
= gimple_call_lhs (stmt
);
1320 if (t
&& !disqualify_if_bad_bb_terminating_stmt (stmt
, t
, NULL
))
1321 ret
|= build_access_from_expr (t
, stmt
, true);
1326 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1327 walk_stmt_load_store_addr_ops (asm_stmt
, NULL
, NULL
, NULL
,
1330 bitmap_set_bit (final_bbs
, bb
->index
);
1332 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
1334 t
= TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
1335 ret
|= build_access_from_expr (t
, asm_stmt
, false);
1337 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
1339 t
= TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
1340 ret
|= build_access_from_expr (t
, asm_stmt
, true);
1354 /* Helper of QSORT function. There are pointers to accesses in the array. An
1355 access is considered smaller than another if it has smaller offset or if the
1356 offsets are the same but is size is bigger. */
1359 compare_access_positions (const void *a
, const void *b
)
1361 const access_p
*fp1
= (const access_p
*) a
;
1362 const access_p
*fp2
= (const access_p
*) b
;
1363 const access_p f1
= *fp1
;
1364 const access_p f2
= *fp2
;
1366 if (f1
->offset
!= f2
->offset
)
1367 return f1
->offset
< f2
->offset
? -1 : 1;
1369 if (f1
->size
== f2
->size
)
1371 if (f1
->type
== f2
->type
)
1373 /* Put any non-aggregate type before any aggregate type. */
1374 else if (!is_gimple_reg_type (f1
->type
)
1375 && is_gimple_reg_type (f2
->type
))
1377 else if (is_gimple_reg_type (f1
->type
)
1378 && !is_gimple_reg_type (f2
->type
))
1380 /* Put any complex or vector type before any other scalar type. */
1381 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1382 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1383 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1384 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1386 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1387 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1388 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1389 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1391 /* Put the integral type with the bigger precision first. */
1392 else if (INTEGRAL_TYPE_P (f1
->type
)
1393 && INTEGRAL_TYPE_P (f2
->type
))
1394 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1395 /* Put any integral type with non-full precision last. */
1396 else if (INTEGRAL_TYPE_P (f1
->type
)
1397 && (TREE_INT_CST_LOW (TYPE_SIZE (f1
->type
))
1398 != TYPE_PRECISION (f1
->type
)))
1400 else if (INTEGRAL_TYPE_P (f2
->type
)
1401 && (TREE_INT_CST_LOW (TYPE_SIZE (f2
->type
))
1402 != TYPE_PRECISION (f2
->type
)))
1404 /* Stabilize the sort. */
1405 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1408 /* We want the bigger accesses first, thus the opposite operator in the next
1410 return f1
->size
> f2
->size
? -1 : 1;
1414 /* Append a name of the declaration to the name obstack. A helper function for
1418 make_fancy_decl_name (tree decl
)
1422 tree name
= DECL_NAME (decl
);
1424 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1425 IDENTIFIER_LENGTH (name
));
1428 sprintf (buffer
, "D%u", DECL_UID (decl
));
1429 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1433 /* Helper for make_fancy_name. */
1436 make_fancy_name_1 (tree expr
)
1443 make_fancy_decl_name (expr
);
1447 switch (TREE_CODE (expr
))
1450 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1451 obstack_1grow (&name_obstack
, '$');
1452 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1456 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1457 obstack_1grow (&name_obstack
, '$');
1458 /* Arrays with only one element may not have a constant as their
1460 index
= TREE_OPERAND (expr
, 1);
1461 if (TREE_CODE (index
) != INTEGER_CST
)
1463 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1464 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1468 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1472 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1473 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1475 obstack_1grow (&name_obstack
, '$');
1476 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1477 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1478 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1485 gcc_unreachable (); /* we treat these as scalars. */
1492 /* Create a human readable name for replacement variable of ACCESS. */
1495 make_fancy_name (tree expr
)
1497 make_fancy_name_1 (expr
);
1498 obstack_1grow (&name_obstack
, '\0');
1499 return XOBFINISH (&name_obstack
, char *);
1502 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1503 EXP_TYPE at the given OFFSET. If BASE is something for which
1504 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1505 to insert new statements either before or below the current one as specified
1506 by INSERT_AFTER. This function is not capable of handling bitfields.
1508 BASE must be either a declaration or a memory reference that has correct
1509 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1512 build_ref_for_offset (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1513 tree exp_type
, gimple_stmt_iterator
*gsi
,
1516 tree prev_base
= base
;
1519 HOST_WIDE_INT base_offset
;
1520 unsigned HOST_WIDE_INT misalign
;
1523 gcc_checking_assert (offset
% BITS_PER_UNIT
== 0);
1524 get_object_alignment_1 (base
, &align
, &misalign
);
1525 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1527 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1528 offset such as array[var_index]. */
1534 gcc_checking_assert (gsi
);
1535 tmp
= make_ssa_name (build_pointer_type (TREE_TYPE (prev_base
)));
1536 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1537 STRIP_USELESS_TYPE_CONVERSION (addr
);
1538 stmt
= gimple_build_assign (tmp
, addr
);
1539 gimple_set_location (stmt
, loc
);
1541 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1543 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1545 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1546 offset
/ BITS_PER_UNIT
);
1549 else if (TREE_CODE (base
) == MEM_REF
)
1551 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1552 base_offset
+ offset
/ BITS_PER_UNIT
);
1553 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1554 base
= unshare_expr (TREE_OPERAND (base
, 0));
1558 off
= build_int_cst (reference_alias_ptr_type (base
),
1559 base_offset
+ offset
/ BITS_PER_UNIT
);
1560 base
= build_fold_addr_expr (unshare_expr (base
));
1563 misalign
= (misalign
+ offset
) & (align
- 1);
1565 align
= (misalign
& -misalign
);
1566 if (align
!= TYPE_ALIGN (exp_type
))
1567 exp_type
= build_aligned_type (exp_type
, align
);
1569 mem_ref
= fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1570 if (TREE_THIS_VOLATILE (prev_base
))
1571 TREE_THIS_VOLATILE (mem_ref
) = 1;
1572 if (TREE_SIDE_EFFECTS (prev_base
))
1573 TREE_SIDE_EFFECTS (mem_ref
) = 1;
1577 /* Construct a memory reference to a part of an aggregate BASE at the given
1578 OFFSET and of the same type as MODEL. In case this is a reference to a
1579 bit-field, the function will replicate the last component_ref of model's
1580 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1581 build_ref_for_offset. */
1584 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1585 struct access
*model
, gimple_stmt_iterator
*gsi
,
1588 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1589 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1591 /* This access represents a bit-field. */
1592 tree t
, exp_type
, fld
= TREE_OPERAND (model
->expr
, 1);
1594 offset
-= int_bit_position (fld
);
1595 exp_type
= TREE_TYPE (TREE_OPERAND (model
->expr
, 0));
1596 t
= build_ref_for_offset (loc
, base
, offset
, exp_type
, gsi
, insert_after
);
1597 return fold_build3_loc (loc
, COMPONENT_REF
, TREE_TYPE (fld
), t
, fld
,
1601 return build_ref_for_offset (loc
, base
, offset
, model
->type
,
1605 /* Attempt to build a memory reference that we could but into a gimple
1606 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1607 create statements and return s NULL instead. This function also ignores
1608 alignment issues and so its results should never end up in non-debug
1612 build_debug_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1613 struct access
*model
)
1615 HOST_WIDE_INT base_offset
;
1618 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1619 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1622 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1625 if (TREE_CODE (base
) == MEM_REF
)
1627 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1628 base_offset
+ offset
/ BITS_PER_UNIT
);
1629 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1630 base
= unshare_expr (TREE_OPERAND (base
, 0));
1634 off
= build_int_cst (reference_alias_ptr_type (base
),
1635 base_offset
+ offset
/ BITS_PER_UNIT
);
1636 base
= build_fold_addr_expr (unshare_expr (base
));
1639 return fold_build2_loc (loc
, MEM_REF
, model
->type
, base
, off
);
1642 /* Construct a memory reference consisting of component_refs and array_refs to
1643 a part of an aggregate *RES (which is of type TYPE). The requested part
1644 should have type EXP_TYPE at be the given OFFSET. This function might not
1645 succeed, it returns true when it does and only then *RES points to something
1646 meaningful. This function should be used only to build expressions that we
1647 might need to present to user (e.g. in warnings). In all other situations,
1648 build_ref_for_model or build_ref_for_offset should be used instead. */
1651 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1657 tree tr_size
, index
, minidx
;
1658 HOST_WIDE_INT el_size
;
1660 if (offset
== 0 && exp_type
1661 && types_compatible_p (exp_type
, type
))
1664 switch (TREE_CODE (type
))
1667 case QUAL_UNION_TYPE
:
1669 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1671 HOST_WIDE_INT pos
, size
;
1672 tree tr_pos
, expr
, *expr_ptr
;
1674 if (TREE_CODE (fld
) != FIELD_DECL
)
1677 tr_pos
= bit_position (fld
);
1678 if (!tr_pos
|| !tree_fits_uhwi_p (tr_pos
))
1680 pos
= tree_to_uhwi (tr_pos
);
1681 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1682 tr_size
= DECL_SIZE (fld
);
1683 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
1685 size
= tree_to_uhwi (tr_size
);
1691 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1694 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1697 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
1698 offset
- pos
, exp_type
))
1707 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1708 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
1710 el_size
= tree_to_uhwi (tr_size
);
1712 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1713 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1715 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1716 if (!integer_zerop (minidx
))
1717 index
= int_const_binop (PLUS_EXPR
, index
, minidx
);
1718 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1719 NULL_TREE
, NULL_TREE
);
1720 offset
= offset
% el_size
;
1721 type
= TREE_TYPE (type
);
1736 /* Return true iff TYPE is stdarg va_list type. */
1739 is_va_list_type (tree type
)
1741 return TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (va_list_type_node
);
1744 /* Print message to dump file why a variable was rejected. */
1747 reject (tree var
, const char *msg
)
1749 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1751 fprintf (dump_file
, "Rejected (%d): %s: ", DECL_UID (var
), msg
);
1752 print_generic_expr (dump_file
, var
, 0);
1753 fprintf (dump_file
, "\n");
1757 /* Return true if VAR is a candidate for SRA. */
1760 maybe_add_sra_candidate (tree var
)
1762 tree type
= TREE_TYPE (var
);
1766 if (!AGGREGATE_TYPE_P (type
))
1768 reject (var
, "not aggregate");
1771 if (needs_to_live_in_memory (var
))
1773 reject (var
, "needs to live in memory");
1776 if (TREE_THIS_VOLATILE (var
))
1778 reject (var
, "is volatile");
1781 if (!COMPLETE_TYPE_P (type
))
1783 reject (var
, "has incomplete type");
1786 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
1788 reject (var
, "type size not fixed");
1791 if (tree_to_uhwi (TYPE_SIZE (type
)) == 0)
1793 reject (var
, "type size is zero");
1796 if (type_internals_preclude_sra_p (type
, &msg
))
1801 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1802 we also want to schedule it rather late. Thus we ignore it in
1804 (sra_mode
== SRA_MODE_EARLY_INTRA
1805 && is_va_list_type (type
)))
1807 reject (var
, "is va_list");
1811 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1812 slot
= candidates
->find_slot_with_hash (var
, DECL_UID (var
), INSERT
);
1815 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1817 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1818 print_generic_expr (dump_file
, var
, 0);
1819 fprintf (dump_file
, "\n");
1825 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1826 those with type which is suitable for scalarization. */
1829 find_var_candidates (void)
1835 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1837 parm
= DECL_CHAIN (parm
))
1838 ret
|= maybe_add_sra_candidate (parm
);
1840 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1842 if (TREE_CODE (var
) != VAR_DECL
)
1845 ret
|= maybe_add_sra_candidate (var
);
1851 /* Sort all accesses for the given variable, check for partial overlaps and
1852 return NULL if there are any. If there are none, pick a representative for
1853 each combination of offset and size and create a linked list out of them.
1854 Return the pointer to the first representative and make sure it is the first
1855 one in the vector of accesses. */
1857 static struct access
*
1858 sort_and_splice_var_accesses (tree var
)
1860 int i
, j
, access_count
;
1861 struct access
*res
, **prev_acc_ptr
= &res
;
1862 vec
<access_p
> *access_vec
;
1864 HOST_WIDE_INT low
= -1, high
= 0;
1866 access_vec
= get_base_access_vector (var
);
1869 access_count
= access_vec
->length ();
1871 /* Sort by <OFFSET, SIZE>. */
1872 access_vec
->qsort (compare_access_positions
);
1875 while (i
< access_count
)
1877 struct access
*access
= (*access_vec
)[i
];
1878 bool grp_write
= access
->write
;
1879 bool grp_read
= !access
->write
;
1880 bool grp_scalar_write
= access
->write
1881 && is_gimple_reg_type (access
->type
);
1882 bool grp_scalar_read
= !access
->write
1883 && is_gimple_reg_type (access
->type
);
1884 bool grp_assignment_read
= access
->grp_assignment_read
;
1885 bool grp_assignment_write
= access
->grp_assignment_write
;
1886 bool multiple_scalar_reads
= false;
1887 bool total_scalarization
= access
->grp_total_scalarization
;
1888 bool grp_partial_lhs
= access
->grp_partial_lhs
;
1889 bool first_scalar
= is_gimple_reg_type (access
->type
);
1890 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
1892 if (first
|| access
->offset
>= high
)
1895 low
= access
->offset
;
1896 high
= access
->offset
+ access
->size
;
1898 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
1901 gcc_assert (access
->offset
>= low
1902 && access
->offset
+ access
->size
<= high
);
1905 while (j
< access_count
)
1907 struct access
*ac2
= (*access_vec
)[j
];
1908 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
1913 grp_scalar_write
= (grp_scalar_write
1914 || is_gimple_reg_type (ac2
->type
));
1919 if (is_gimple_reg_type (ac2
->type
))
1921 if (grp_scalar_read
)
1922 multiple_scalar_reads
= true;
1924 grp_scalar_read
= true;
1927 grp_assignment_read
|= ac2
->grp_assignment_read
;
1928 grp_assignment_write
|= ac2
->grp_assignment_write
;
1929 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
1930 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
1931 total_scalarization
|= ac2
->grp_total_scalarization
;
1932 relink_to_new_repr (access
, ac2
);
1934 /* If there are both aggregate-type and scalar-type accesses with
1935 this combination of size and offset, the comparison function
1936 should have put the scalars first. */
1937 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
1938 ac2
->group_representative
= access
;
1944 access
->group_representative
= access
;
1945 access
->grp_write
= grp_write
;
1946 access
->grp_read
= grp_read
;
1947 access
->grp_scalar_read
= grp_scalar_read
;
1948 access
->grp_scalar_write
= grp_scalar_write
;
1949 access
->grp_assignment_read
= grp_assignment_read
;
1950 access
->grp_assignment_write
= grp_assignment_write
;
1951 access
->grp_hint
= multiple_scalar_reads
|| total_scalarization
;
1952 access
->grp_total_scalarization
= total_scalarization
;
1953 access
->grp_partial_lhs
= grp_partial_lhs
;
1954 access
->grp_unscalarizable_region
= unscalarizable_region
;
1955 if (access
->first_link
)
1956 add_access_to_work_queue (access
);
1958 *prev_acc_ptr
= access
;
1959 prev_acc_ptr
= &access
->next_grp
;
1962 gcc_assert (res
== (*access_vec
)[0]);
1966 /* Create a variable for the given ACCESS which determines the type, name and a
1967 few other properties. Return the variable declaration and store it also to
1968 ACCESS->replacement. */
1971 create_access_replacement (struct access
*access
)
1975 if (access
->grp_to_be_debug_replaced
)
1977 repl
= create_tmp_var_raw (access
->type
);
1978 DECL_CONTEXT (repl
) = current_function_decl
;
1981 /* Drop any special alignment on the type if it's not on the main
1982 variant. This avoids issues with weirdo ABIs like AAPCS. */
1983 repl
= create_tmp_var (build_qualified_type
1984 (TYPE_MAIN_VARIANT (access
->type
),
1985 TYPE_QUALS (access
->type
)), "SR");
1986 if (TREE_CODE (access
->type
) == COMPLEX_TYPE
1987 || TREE_CODE (access
->type
) == VECTOR_TYPE
)
1989 if (!access
->grp_partial_lhs
)
1990 DECL_GIMPLE_REG_P (repl
) = 1;
1992 else if (access
->grp_partial_lhs
1993 && is_gimple_reg_type (access
->type
))
1994 TREE_ADDRESSABLE (repl
) = 1;
1996 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
1997 DECL_ARTIFICIAL (repl
) = 1;
1998 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
2000 if (DECL_NAME (access
->base
)
2001 && !DECL_IGNORED_P (access
->base
)
2002 && !DECL_ARTIFICIAL (access
->base
))
2004 char *pretty_name
= make_fancy_name (access
->expr
);
2005 tree debug_expr
= unshare_expr_without_location (access
->expr
), d
;
2008 DECL_NAME (repl
) = get_identifier (pretty_name
);
2009 obstack_free (&name_obstack
, pretty_name
);
2011 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2012 as DECL_DEBUG_EXPR isn't considered when looking for still
2013 used SSA_NAMEs and thus they could be freed. All debug info
2014 generation cares is whether something is constant or variable
2015 and that get_ref_base_and_extent works properly on the
2016 expression. It cannot handle accesses at a non-constant offset
2017 though, so just give up in those cases. */
2018 for (d
= debug_expr
;
2019 !fail
&& (handled_component_p (d
) || TREE_CODE (d
) == MEM_REF
);
2020 d
= TREE_OPERAND (d
, 0))
2021 switch (TREE_CODE (d
))
2024 case ARRAY_RANGE_REF
:
2025 if (TREE_OPERAND (d
, 1)
2026 && TREE_CODE (TREE_OPERAND (d
, 1)) != INTEGER_CST
)
2028 if (TREE_OPERAND (d
, 3)
2029 && TREE_CODE (TREE_OPERAND (d
, 3)) != INTEGER_CST
)
2033 if (TREE_OPERAND (d
, 2)
2034 && TREE_CODE (TREE_OPERAND (d
, 2)) != INTEGER_CST
)
2038 if (TREE_CODE (TREE_OPERAND (d
, 0)) != ADDR_EXPR
)
2041 d
= TREE_OPERAND (d
, 0);
2048 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
2049 DECL_HAS_DEBUG_EXPR_P (repl
) = 1;
2051 if (access
->grp_no_warning
)
2052 TREE_NO_WARNING (repl
) = 1;
2054 TREE_NO_WARNING (repl
) = TREE_NO_WARNING (access
->base
);
2057 TREE_NO_WARNING (repl
) = 1;
2061 if (access
->grp_to_be_debug_replaced
)
2063 fprintf (dump_file
, "Created a debug-only replacement for ");
2064 print_generic_expr (dump_file
, access
->base
, 0);
2065 fprintf (dump_file
, " offset: %u, size: %u\n",
2066 (unsigned) access
->offset
, (unsigned) access
->size
);
2070 fprintf (dump_file
, "Created a replacement for ");
2071 print_generic_expr (dump_file
, access
->base
, 0);
2072 fprintf (dump_file
, " offset: %u, size: %u: ",
2073 (unsigned) access
->offset
, (unsigned) access
->size
);
2074 print_generic_expr (dump_file
, repl
, 0);
2075 fprintf (dump_file
, "\n");
2078 sra_stats
.replacements
++;
2083 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2086 get_access_replacement (struct access
*access
)
2088 gcc_checking_assert (access
->replacement_decl
);
2089 return access
->replacement_decl
;
2093 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2094 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2095 to it is not "within" the root. Return false iff some accesses partially
2099 build_access_subtree (struct access
**access
)
2101 struct access
*root
= *access
, *last_child
= NULL
;
2102 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2104 *access
= (*access
)->next_grp
;
2105 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
2108 root
->first_child
= *access
;
2110 last_child
->next_sibling
= *access
;
2111 last_child
= *access
;
2113 if (!build_access_subtree (access
))
2117 if (*access
&& (*access
)->offset
< limit
)
2123 /* Build a tree of access representatives, ACCESS is the pointer to the first
2124 one, others are linked in a list by the next_grp field. Return false iff
2125 some accesses partially overlap. */
2128 build_access_trees (struct access
*access
)
2132 struct access
*root
= access
;
2134 if (!build_access_subtree (&access
))
2136 root
->next_grp
= access
;
2141 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2145 expr_with_var_bounded_array_refs_p (tree expr
)
2147 while (handled_component_p (expr
))
2149 if (TREE_CODE (expr
) == ARRAY_REF
2150 && !tree_fits_shwi_p (array_ref_low_bound (expr
)))
2152 expr
= TREE_OPERAND (expr
, 0);
2157 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2158 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2159 sorts of access flags appropriately along the way, notably always set
2160 grp_read and grp_assign_read according to MARK_READ and grp_write when
2163 Creating a replacement for a scalar access is considered beneficial if its
2164 grp_hint is set (this means we are either attempting total scalarization or
2165 there is more than one direct read access) or according to the following
2168 Access written to through a scalar type (once or more times)
2170 | Written to in an assignment statement
2172 | | Access read as scalar _once_
2174 | | | Read in an assignment statement
2176 | | | | Scalarize Comment
2177 -----------------------------------------------------------------------------
2178 0 0 0 0 No access for the scalar
2179 0 0 0 1 No access for the scalar
2180 0 0 1 0 No Single read - won't help
2181 0 0 1 1 No The same case
2182 0 1 0 0 No access for the scalar
2183 0 1 0 1 No access for the scalar
2184 0 1 1 0 Yes s = *g; return s.i;
2185 0 1 1 1 Yes The same case as above
2186 1 0 0 0 No Won't help
2187 1 0 0 1 Yes s.i = 1; *g = s;
2188 1 0 1 0 Yes s.i = 5; g = s.i;
2189 1 0 1 1 Yes The same case as above
2190 1 1 0 0 No Won't help.
2191 1 1 0 1 Yes s.i = 1; *g = s;
2192 1 1 1 0 Yes s = *g; return s.i;
2193 1 1 1 1 Yes Any of the above yeses */
2196 analyze_access_subtree (struct access
*root
, struct access
*parent
,
2197 bool allow_replacements
)
2199 struct access
*child
;
2200 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2201 HOST_WIDE_INT covered_to
= root
->offset
;
2202 bool scalar
= is_gimple_reg_type (root
->type
);
2203 bool hole
= false, sth_created
= false;
2207 if (parent
->grp_read
)
2209 if (parent
->grp_assignment_read
)
2210 root
->grp_assignment_read
= 1;
2211 if (parent
->grp_write
)
2212 root
->grp_write
= 1;
2213 if (parent
->grp_assignment_write
)
2214 root
->grp_assignment_write
= 1;
2215 if (parent
->grp_total_scalarization
)
2216 root
->grp_total_scalarization
= 1;
2219 if (root
->grp_unscalarizable_region
)
2220 allow_replacements
= false;
2222 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
2223 allow_replacements
= false;
2225 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
2227 hole
|= covered_to
< child
->offset
;
2228 sth_created
|= analyze_access_subtree (child
, root
,
2229 allow_replacements
&& !scalar
);
2231 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
2232 root
->grp_total_scalarization
&= child
->grp_total_scalarization
;
2233 if (child
->grp_covered
)
2234 covered_to
+= child
->size
;
2239 if (allow_replacements
&& scalar
&& !root
->first_child
2241 || ((root
->grp_scalar_read
|| root
->grp_assignment_read
)
2242 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))))
2244 /* Always create access replacements that cover the whole access.
2245 For integral types this means the precision has to match.
2246 Avoid assumptions based on the integral type kind, too. */
2247 if (INTEGRAL_TYPE_P (root
->type
)
2248 && (TREE_CODE (root
->type
) != INTEGER_TYPE
2249 || TYPE_PRECISION (root
->type
) != root
->size
)
2250 /* But leave bitfield accesses alone. */
2251 && (TREE_CODE (root
->expr
) != COMPONENT_REF
2252 || !DECL_BIT_FIELD (TREE_OPERAND (root
->expr
, 1))))
2254 tree rt
= root
->type
;
2255 gcc_assert ((root
->offset
% BITS_PER_UNIT
) == 0
2256 && (root
->size
% BITS_PER_UNIT
) == 0);
2257 root
->type
= build_nonstandard_integer_type (root
->size
,
2258 TYPE_UNSIGNED (rt
));
2259 root
->expr
= build_ref_for_offset (UNKNOWN_LOCATION
,
2260 root
->base
, root
->offset
,
2261 root
->type
, NULL
, false);
2263 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2265 fprintf (dump_file
, "Changing the type of a replacement for ");
2266 print_generic_expr (dump_file
, root
->base
, 0);
2267 fprintf (dump_file
, " offset: %u, size: %u ",
2268 (unsigned) root
->offset
, (unsigned) root
->size
);
2269 fprintf (dump_file
, " to an integer.\n");
2273 root
->grp_to_be_replaced
= 1;
2274 root
->replacement_decl
= create_access_replacement (root
);
2280 if (allow_replacements
2281 && scalar
&& !root
->first_child
2282 && (root
->grp_scalar_write
|| root
->grp_assignment_write
)
2283 && !bitmap_bit_p (cannot_scalarize_away_bitmap
,
2284 DECL_UID (root
->base
)))
2286 gcc_checking_assert (!root
->grp_scalar_read
2287 && !root
->grp_assignment_read
);
2289 if (MAY_HAVE_DEBUG_STMTS
)
2291 root
->grp_to_be_debug_replaced
= 1;
2292 root
->replacement_decl
= create_access_replacement (root
);
2296 if (covered_to
< limit
)
2299 root
->grp_total_scalarization
= 0;
2302 if (!hole
|| root
->grp_total_scalarization
)
2303 root
->grp_covered
= 1;
2304 else if (root
->grp_write
|| TREE_CODE (root
->base
) == PARM_DECL
)
2305 root
->grp_unscalarized_data
= 1; /* not covered and written to */
2309 /* Analyze all access trees linked by next_grp by the means of
2310 analyze_access_subtree. */
2312 analyze_access_trees (struct access
*access
)
2318 if (analyze_access_subtree (access
, NULL
, true))
2320 access
= access
->next_grp
;
2326 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2327 SIZE would conflict with an already existing one. If exactly such a child
2328 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2331 child_would_conflict_in_lacc (struct access
*lacc
, HOST_WIDE_INT norm_offset
,
2332 HOST_WIDE_INT size
, struct access
**exact_match
)
2334 struct access
*child
;
2336 for (child
= lacc
->first_child
; child
; child
= child
->next_sibling
)
2338 if (child
->offset
== norm_offset
&& child
->size
== size
)
2340 *exact_match
= child
;
2344 if (child
->offset
< norm_offset
+ size
2345 && child
->offset
+ child
->size
> norm_offset
)
2352 /* Create a new child access of PARENT, with all properties just like MODEL
2353 except for its offset and with its grp_write false and grp_read true.
2354 Return the new access or NULL if it cannot be created. Note that this access
2355 is created long after all splicing and sorting, it's not located in any
2356 access vector and is automatically a representative of its group. */
2358 static struct access
*
2359 create_artificial_child_access (struct access
*parent
, struct access
*model
,
2360 HOST_WIDE_INT new_offset
)
2362 struct access
**child
;
2363 tree expr
= parent
->base
;
2365 gcc_assert (!model
->grp_unscalarizable_region
);
2367 struct access
*access
= access_pool
.allocate ();
2368 memset (access
, 0, sizeof (struct access
));
2369 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
2372 access
->grp_no_warning
= true;
2373 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
2374 new_offset
, model
, NULL
, false);
2377 access
->base
= parent
->base
;
2378 access
->expr
= expr
;
2379 access
->offset
= new_offset
;
2380 access
->size
= model
->size
;
2381 access
->type
= model
->type
;
2382 access
->grp_write
= true;
2383 access
->grp_read
= false;
2385 child
= &parent
->first_child
;
2386 while (*child
&& (*child
)->offset
< new_offset
)
2387 child
= &(*child
)->next_sibling
;
2389 access
->next_sibling
= *child
;
2396 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2397 true if any new subaccess was created. Additionally, if RACC is a scalar
2398 access but LACC is not, change the type of the latter, if possible. */
2401 propagate_subaccesses_across_link (struct access
*lacc
, struct access
*racc
)
2403 struct access
*rchild
;
2404 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
2407 if (is_gimple_reg_type (lacc
->type
)
2408 || lacc
->grp_unscalarizable_region
2409 || racc
->grp_unscalarizable_region
)
2412 if (is_gimple_reg_type (racc
->type
))
2414 if (!lacc
->first_child
&& !racc
->first_child
)
2416 tree t
= lacc
->base
;
2418 lacc
->type
= racc
->type
;
2419 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
),
2420 lacc
->offset
, racc
->type
))
2424 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
2425 lacc
->base
, lacc
->offset
,
2427 lacc
->grp_no_warning
= true;
2433 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
2435 struct access
*new_acc
= NULL
;
2436 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
2438 if (rchild
->grp_unscalarizable_region
)
2441 if (child_would_conflict_in_lacc (lacc
, norm_offset
, rchild
->size
,
2446 rchild
->grp_hint
= 1;
2447 new_acc
->grp_hint
|= new_acc
->grp_read
;
2448 if (rchild
->first_child
)
2449 ret
|= propagate_subaccesses_across_link (new_acc
, rchild
);
2454 rchild
->grp_hint
= 1;
2455 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
);
2459 if (racc
->first_child
)
2460 propagate_subaccesses_across_link (new_acc
, rchild
);
2467 /* Propagate all subaccesses across assignment links. */
2470 propagate_all_subaccesses (void)
2472 while (work_queue_head
)
2474 struct access
*racc
= pop_access_from_work_queue ();
2475 struct assign_link
*link
;
2477 gcc_assert (racc
->first_link
);
2479 for (link
= racc
->first_link
; link
; link
= link
->next
)
2481 struct access
*lacc
= link
->lacc
;
2483 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2485 lacc
= lacc
->group_representative
;
2486 if (propagate_subaccesses_across_link (lacc
, racc
)
2487 && lacc
->first_link
)
2488 add_access_to_work_queue (lacc
);
2493 /* Go through all accesses collected throughout the (intraprocedural) analysis
2494 stage, exclude overlapping ones, identify representatives and build trees
2495 out of them, making decisions about scalarization on the way. Return true
2496 iff there are any to-be-scalarized variables after this stage. */
2499 analyze_all_variable_accesses (void)
2502 bitmap tmp
= BITMAP_ALLOC (NULL
);
2505 bool optimize_speed_p
= !optimize_function_for_size_p (cfun
);
2507 enum compiler_param param
= optimize_speed_p
2508 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2509 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE
;
2511 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2512 fall back to a target default. */
2513 unsigned HOST_WIDE_INT max_scalarization_size
2514 = global_options_set
.x_param_values
[param
]
2515 ? PARAM_VALUE (param
)
2516 : get_move_ratio (optimize_speed_p
) * UNITS_PER_WORD
;
2518 max_scalarization_size
*= BITS_PER_UNIT
;
2520 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2521 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
2522 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
2524 tree var
= candidate (i
);
2526 if (TREE_CODE (var
) == VAR_DECL
2527 && type_consists_of_records_p (TREE_TYPE (var
)))
2529 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var
)))
2530 <= max_scalarization_size
)
2532 completely_scalarize_var (var
);
2533 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2535 fprintf (dump_file
, "Will attempt to totally scalarize ");
2536 print_generic_expr (dump_file
, var
, 0);
2537 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2540 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2542 fprintf (dump_file
, "Too big to totally scalarize: ");
2543 print_generic_expr (dump_file
, var
, 0);
2544 fprintf (dump_file
, " (UID: %u)\n", DECL_UID (var
));
2549 bitmap_copy (tmp
, candidate_bitmap
);
2550 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2552 tree var
= candidate (i
);
2553 struct access
*access
;
2555 access
= sort_and_splice_var_accesses (var
);
2556 if (!access
|| !build_access_trees (access
))
2557 disqualify_candidate (var
,
2558 "No or inhibitingly overlapping accesses.");
2561 propagate_all_subaccesses ();
2563 bitmap_copy (tmp
, candidate_bitmap
);
2564 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2566 tree var
= candidate (i
);
2567 struct access
*access
= get_first_repr_for_decl (var
);
2569 if (analyze_access_trees (access
))
2572 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2574 fprintf (dump_file
, "\nAccess trees for ");
2575 print_generic_expr (dump_file
, var
, 0);
2576 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2577 dump_access_tree (dump_file
, access
);
2578 fprintf (dump_file
, "\n");
2582 disqualify_candidate (var
, "No scalar replacements to be created.");
2589 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
2596 /* Generate statements copying scalar replacements of accesses within a subtree
2597 into or out of AGG. ACCESS, all its children, siblings and their children
2598 are to be processed. AGG is an aggregate type expression (can be a
2599 declaration but does not have to be, it can for example also be a mem_ref or
2600 a series of handled components). TOP_OFFSET is the offset of the processed
2601 subtree which has to be subtracted from offsets of individual accesses to
2602 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2603 replacements in the interval <start_offset, start_offset + chunk_size>,
2604 otherwise copy all. GSI is a statement iterator used to place the new
2605 statements. WRITE should be true when the statements should write from AGG
2606 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2607 statements will be added after the current statement in GSI, they will be
2608 added before the statement otherwise. */
2611 generate_subtree_copies (struct access
*access
, tree agg
,
2612 HOST_WIDE_INT top_offset
,
2613 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
2614 gimple_stmt_iterator
*gsi
, bool write
,
2615 bool insert_after
, location_t loc
)
2619 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
2622 if (access
->grp_to_be_replaced
2624 || access
->offset
+ access
->size
> start_offset
))
2626 tree expr
, repl
= get_access_replacement (access
);
2629 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
2630 access
, gsi
, insert_after
);
2634 if (access
->grp_partial_lhs
)
2635 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
2637 insert_after
? GSI_NEW_STMT
2639 stmt
= gimple_build_assign (repl
, expr
);
2643 TREE_NO_WARNING (repl
) = 1;
2644 if (access
->grp_partial_lhs
)
2645 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2647 insert_after
? GSI_NEW_STMT
2649 stmt
= gimple_build_assign (expr
, repl
);
2651 gimple_set_location (stmt
, loc
);
2654 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2656 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2658 sra_stats
.subtree_copies
++;
2661 && access
->grp_to_be_debug_replaced
2663 || access
->offset
+ access
->size
> start_offset
))
2666 tree drhs
= build_debug_ref_for_model (loc
, agg
,
2667 access
->offset
- top_offset
,
2669 ds
= gimple_build_debug_bind (get_access_replacement (access
),
2670 drhs
, gsi_stmt (*gsi
));
2672 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2674 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2677 if (access
->first_child
)
2678 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
2679 start_offset
, chunk_size
, gsi
,
2680 write
, insert_after
, loc
);
2682 access
= access
->next_sibling
;
2687 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2688 the root of the subtree to be processed. GSI is the statement iterator used
2689 for inserting statements which are added after the current statement if
2690 INSERT_AFTER is true or before it otherwise. */
2693 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
2694 bool insert_after
, location_t loc
)
2697 struct access
*child
;
2699 if (access
->grp_to_be_replaced
)
2703 stmt
= gimple_build_assign (get_access_replacement (access
),
2704 build_zero_cst (access
->type
));
2706 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2708 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2710 gimple_set_location (stmt
, loc
);
2712 else if (access
->grp_to_be_debug_replaced
)
2715 = gimple_build_debug_bind (get_access_replacement (access
),
2716 build_zero_cst (access
->type
),
2719 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2721 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2724 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2725 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
2728 /* Clobber all scalar replacements in an access subtree. ACCESS is the the
2729 root of the subtree to be processed. GSI is the statement iterator used
2730 for inserting statements which are added after the current statement if
2731 INSERT_AFTER is true or before it otherwise. */
2734 clobber_subtree (struct access
*access
, gimple_stmt_iterator
*gsi
,
2735 bool insert_after
, location_t loc
)
2738 struct access
*child
;
2740 if (access
->grp_to_be_replaced
)
2742 tree rep
= get_access_replacement (access
);
2743 tree clobber
= build_constructor (access
->type
, NULL
);
2744 TREE_THIS_VOLATILE (clobber
) = 1;
2745 gimple stmt
= gimple_build_assign (rep
, clobber
);
2748 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2750 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2752 gimple_set_location (stmt
, loc
);
2755 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2756 clobber_subtree (child
, gsi
, insert_after
, loc
);
2759 /* Search for an access representative for the given expression EXPR and
2760 return it or NULL if it cannot be found. */
2762 static struct access
*
2763 get_access_for_expr (tree expr
)
2765 HOST_WIDE_INT offset
, size
, max_size
;
2768 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2769 a different size than the size of its argument and we need the latter
2771 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2772 expr
= TREE_OPERAND (expr
, 0);
2774 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
2775 if (max_size
== -1 || !DECL_P (base
))
2778 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
2781 return get_var_base_offset_size_access (base
, offset
, max_size
);
2784 /* Replace the expression EXPR with a scalar replacement if there is one and
2785 generate other statements to do type conversion or subtree copying if
2786 necessary. GSI is used to place newly created statements, WRITE is true if
2787 the expression is being written to (it is on a LHS of a statement or output
2788 in an assembly statement). */
2791 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
2794 struct access
*access
;
2795 tree type
, bfr
, orig_expr
;
2797 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
2800 expr
= &TREE_OPERAND (*expr
, 0);
2805 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
2806 expr
= &TREE_OPERAND (*expr
, 0);
2807 access
= get_access_for_expr (*expr
);
2810 type
= TREE_TYPE (*expr
);
2813 loc
= gimple_location (gsi_stmt (*gsi
));
2814 gimple_stmt_iterator alt_gsi
= gsi_none ();
2815 if (write
&& stmt_ends_bb_p (gsi_stmt (*gsi
)))
2817 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi
)));
2821 if (access
->grp_to_be_replaced
)
2823 tree repl
= get_access_replacement (access
);
2824 /* If we replace a non-register typed access simply use the original
2825 access expression to extract the scalar component afterwards.
2826 This happens if scalarizing a function return value or parameter
2827 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2828 gcc.c-torture/compile/20011217-1.c.
2830 We also want to use this when accessing a complex or vector which can
2831 be accessed as a different type too, potentially creating a need for
2832 type conversion (see PR42196) and when scalarized unions are involved
2833 in assembler statements (see PR42398). */
2834 if (!useless_type_conversion_p (type
, access
->type
))
2838 ref
= build_ref_for_model (loc
, orig_expr
, 0, access
, gsi
, false);
2844 if (access
->grp_partial_lhs
)
2845 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
2846 false, GSI_NEW_STMT
);
2847 stmt
= gimple_build_assign (repl
, ref
);
2848 gimple_set_location (stmt
, loc
);
2849 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2855 if (access
->grp_partial_lhs
)
2856 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2857 true, GSI_SAME_STMT
);
2858 stmt
= gimple_build_assign (ref
, repl
);
2859 gimple_set_location (stmt
, loc
);
2860 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2867 else if (write
&& access
->grp_to_be_debug_replaced
)
2869 gdebug
*ds
= gimple_build_debug_bind (get_access_replacement (access
),
2872 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2875 if (access
->first_child
)
2877 HOST_WIDE_INT start_offset
, chunk_size
;
2879 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 1))
2880 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 2)))
2882 chunk_size
= tree_to_uhwi (TREE_OPERAND (bfr
, 1));
2883 start_offset
= access
->offset
2884 + tree_to_uhwi (TREE_OPERAND (bfr
, 2));
2887 start_offset
= chunk_size
= 0;
2889 generate_subtree_copies (access
->first_child
, orig_expr
, access
->offset
,
2890 start_offset
, chunk_size
, gsi
, write
, write
,
2896 /* Where scalar replacements of the RHS have been written to when a replacement
2897 of a LHS of an assigments cannot be direclty loaded from a replacement of
2899 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
2900 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
2901 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
2903 struct subreplacement_assignment_data
2905 /* Offset of the access representing the lhs of the assignment. */
2906 HOST_WIDE_INT left_offset
;
2908 /* LHS and RHS of the original assignment. */
2909 tree assignment_lhs
, assignment_rhs
;
2911 /* Access representing the rhs of the whole assignment. */
2912 struct access
*top_racc
;
2914 /* Stmt iterator used for statement insertions after the original assignment.
2915 It points to the main GSI used to traverse a BB during function body
2917 gimple_stmt_iterator
*new_gsi
;
2919 /* Stmt iterator used for statement insertions before the original
2920 assignment. Keeps on pointing to the original statement. */
2921 gimple_stmt_iterator old_gsi
;
2923 /* Location of the assignment. */
2926 /* Keeps the information whether we have needed to refresh replacements of
2927 the LHS and from which side of the assignments this takes place. */
2928 enum unscalarized_data_handling refreshed
;
2931 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2932 base aggregate if there are unscalarized data or directly to LHS of the
2933 statement that is pointed to by GSI otherwise. */
2936 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data
*sad
)
2939 if (sad
->top_racc
->grp_unscalarized_data
)
2941 src
= sad
->assignment_rhs
;
2942 sad
->refreshed
= SRA_UDH_RIGHT
;
2946 src
= sad
->assignment_lhs
;
2947 sad
->refreshed
= SRA_UDH_LEFT
;
2949 generate_subtree_copies (sad
->top_racc
->first_child
, src
,
2950 sad
->top_racc
->offset
, 0, 0,
2951 &sad
->old_gsi
, false, false, sad
->loc
);
2954 /* Try to generate statements to load all sub-replacements in an access subtree
2955 formed by children of LACC from scalar replacements in the SAD->top_racc
2956 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
2957 and load the accesses from it. */
2960 load_assign_lhs_subreplacements (struct access
*lacc
,
2961 struct subreplacement_assignment_data
*sad
)
2963 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
2965 HOST_WIDE_INT offset
;
2966 offset
= lacc
->offset
- sad
->left_offset
+ sad
->top_racc
->offset
;
2968 if (lacc
->grp_to_be_replaced
)
2970 struct access
*racc
;
2974 racc
= find_access_in_subtree (sad
->top_racc
, offset
, lacc
->size
);
2975 if (racc
&& racc
->grp_to_be_replaced
)
2977 rhs
= get_access_replacement (racc
);
2978 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
2979 rhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
2982 if (racc
->grp_partial_lhs
&& lacc
->grp_partial_lhs
)
2983 rhs
= force_gimple_operand_gsi (&sad
->old_gsi
, rhs
, true,
2984 NULL_TREE
, true, GSI_SAME_STMT
);
2988 /* No suitable access on the right hand side, need to load from
2989 the aggregate. See if we have to update it first... */
2990 if (sad
->refreshed
== SRA_UDH_NONE
)
2991 handle_unscalarized_data_in_subtree (sad
);
2993 if (sad
->refreshed
== SRA_UDH_LEFT
)
2994 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_lhs
,
2995 lacc
->offset
- sad
->left_offset
,
2996 lacc
, sad
->new_gsi
, true);
2998 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_rhs
,
2999 lacc
->offset
- sad
->left_offset
,
3000 lacc
, sad
->new_gsi
, true);
3001 if (lacc
->grp_partial_lhs
)
3002 rhs
= force_gimple_operand_gsi (sad
->new_gsi
,
3003 rhs
, true, NULL_TREE
,
3004 false, GSI_NEW_STMT
);
3007 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
3008 gsi_insert_after (sad
->new_gsi
, stmt
, GSI_NEW_STMT
);
3009 gimple_set_location (stmt
, sad
->loc
);
3011 sra_stats
.subreplacements
++;
3015 if (sad
->refreshed
== SRA_UDH_NONE
3016 && lacc
->grp_read
&& !lacc
->grp_covered
)
3017 handle_unscalarized_data_in_subtree (sad
);
3019 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
3023 struct access
*racc
= find_access_in_subtree (sad
->top_racc
,
3027 if (racc
&& racc
->grp_to_be_replaced
)
3029 if (racc
->grp_write
)
3030 drhs
= get_access_replacement (racc
);
3034 else if (sad
->refreshed
== SRA_UDH_LEFT
)
3035 drhs
= build_debug_ref_for_model (sad
->loc
, lacc
->base
,
3036 lacc
->offset
, lacc
);
3037 else if (sad
->refreshed
== SRA_UDH_RIGHT
)
3038 drhs
= build_debug_ref_for_model (sad
->loc
, sad
->top_racc
->base
,
3043 && !useless_type_conversion_p (lacc
->type
, TREE_TYPE (drhs
)))
3044 drhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
3046 ds
= gimple_build_debug_bind (get_access_replacement (lacc
),
3047 drhs
, gsi_stmt (sad
->old_gsi
));
3048 gsi_insert_after (sad
->new_gsi
, ds
, GSI_NEW_STMT
);
3052 if (lacc
->first_child
)
3053 load_assign_lhs_subreplacements (lacc
, sad
);
3057 /* Result code for SRA assignment modification. */
3058 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
3059 SRA_AM_MODIFIED
, /* stmt changed but not
3061 SRA_AM_REMOVED
}; /* stmt eliminated */
3063 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3064 to the assignment and GSI is the statement iterator pointing at it. Returns
3065 the same values as sra_modify_assign. */
3067 static enum assignment_mod_result
3068 sra_modify_constructor_assign (gimple stmt
, gimple_stmt_iterator
*gsi
)
3070 tree lhs
= gimple_assign_lhs (stmt
);
3071 struct access
*acc
= get_access_for_expr (lhs
);
3074 location_t loc
= gimple_location (stmt
);
3076 if (gimple_clobber_p (stmt
))
3078 /* Clobber the replacement variable. */
3079 clobber_subtree (acc
, gsi
, !acc
->grp_covered
, loc
);
3080 /* Remove clobbers of fully scalarized variables, they are dead. */
3081 if (acc
->grp_covered
)
3083 unlink_stmt_vdef (stmt
);
3084 gsi_remove (gsi
, true);
3085 release_defs (stmt
);
3086 return SRA_AM_REMOVED
;
3089 return SRA_AM_MODIFIED
;
3092 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt
))) > 0)
3094 /* I have never seen this code path trigger but if it can happen the
3095 following should handle it gracefully. */
3096 if (access_has_children_p (acc
))
3097 generate_subtree_copies (acc
->first_child
, lhs
, acc
->offset
, 0, 0, gsi
,
3099 return SRA_AM_MODIFIED
;
3102 if (acc
->grp_covered
)
3104 init_subtree_with_zero (acc
, gsi
, false, loc
);
3105 unlink_stmt_vdef (stmt
);
3106 gsi_remove (gsi
, true);
3107 release_defs (stmt
);
3108 return SRA_AM_REMOVED
;
3112 init_subtree_with_zero (acc
, gsi
, true, loc
);
3113 return SRA_AM_MODIFIED
;
3117 /* Create and return a new suitable default definition SSA_NAME for RACC which
3118 is an access describing an uninitialized part of an aggregate that is being
3122 get_repl_default_def_ssa_name (struct access
*racc
)
3124 gcc_checking_assert (!racc
->grp_to_be_replaced
3125 && !racc
->grp_to_be_debug_replaced
);
3126 if (!racc
->replacement_decl
)
3127 racc
->replacement_decl
= create_access_replacement (racc
);
3128 return get_or_create_ssa_default_def (cfun
, racc
->replacement_decl
);
3131 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3132 bit-field field declaration somewhere in it. */
3135 contains_vce_or_bfcref_p (const_tree ref
)
3137 while (handled_component_p (ref
))
3139 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
3140 || (TREE_CODE (ref
) == COMPONENT_REF
3141 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1))))
3143 ref
= TREE_OPERAND (ref
, 0);
3149 /* Examine both sides of the assignment statement pointed to by STMT, replace
3150 them with a scalare replacement if there is one and generate copying of
3151 replacements if scalarized aggregates have been used in the assignment. GSI
3152 is used to hold generated statements for type conversions and subtree
3155 static enum assignment_mod_result
3156 sra_modify_assign (gimple stmt
, gimple_stmt_iterator
*gsi
)
3158 struct access
*lacc
, *racc
;
3160 bool modify_this_stmt
= false;
3161 bool force_gimple_rhs
= false;
3163 gimple_stmt_iterator orig_gsi
= *gsi
;
3165 if (!gimple_assign_single_p (stmt
))
3167 lhs
= gimple_assign_lhs (stmt
);
3168 rhs
= gimple_assign_rhs1 (stmt
);
3170 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3171 return sra_modify_constructor_assign (stmt
, gsi
);
3173 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
3174 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
3175 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
3177 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (stmt
),
3179 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (stmt
),
3181 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3184 lacc
= get_access_for_expr (lhs
);
3185 racc
= get_access_for_expr (rhs
);
3189 loc
= gimple_location (stmt
);
3190 if (lacc
&& lacc
->grp_to_be_replaced
)
3192 lhs
= get_access_replacement (lacc
);
3193 gimple_assign_set_lhs (stmt
, lhs
);
3194 modify_this_stmt
= true;
3195 if (lacc
->grp_partial_lhs
)
3196 force_gimple_rhs
= true;
3200 if (racc
&& racc
->grp_to_be_replaced
)
3202 rhs
= get_access_replacement (racc
);
3203 modify_this_stmt
= true;
3204 if (racc
->grp_partial_lhs
)
3205 force_gimple_rhs
= true;
3209 && !racc
->grp_unscalarized_data
3210 && TREE_CODE (lhs
) == SSA_NAME
3211 && !access_has_replacements_p (racc
))
3213 rhs
= get_repl_default_def_ssa_name (racc
);
3214 modify_this_stmt
= true;
3218 if (modify_this_stmt
)
3220 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3222 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3223 ??? This should move to fold_stmt which we simply should
3224 call after building a VIEW_CONVERT_EXPR here. */
3225 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
3226 && !contains_bitfld_component_ref_p (lhs
))
3228 lhs
= build_ref_for_model (loc
, lhs
, 0, racc
, gsi
, false);
3229 gimple_assign_set_lhs (stmt
, lhs
);
3231 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
3232 && !contains_vce_or_bfcref_p (rhs
))
3233 rhs
= build_ref_for_model (loc
, rhs
, 0, lacc
, gsi
, false);
3235 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3237 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
3239 if (is_gimple_reg_type (TREE_TYPE (lhs
))
3240 && TREE_CODE (lhs
) != SSA_NAME
)
3241 force_gimple_rhs
= true;
3246 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
3248 tree dlhs
= get_access_replacement (lacc
);
3249 tree drhs
= unshare_expr (rhs
);
3250 if (!useless_type_conversion_p (TREE_TYPE (dlhs
), TREE_TYPE (drhs
)))
3252 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs
))
3253 && !contains_vce_or_bfcref_p (drhs
))
3254 drhs
= build_debug_ref_for_model (loc
, drhs
, 0, lacc
);
3256 && !useless_type_conversion_p (TREE_TYPE (dlhs
),
3258 drhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
3259 TREE_TYPE (dlhs
), drhs
);
3261 gdebug
*ds
= gimple_build_debug_bind (dlhs
, drhs
, stmt
);
3262 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
3265 /* From this point on, the function deals with assignments in between
3266 aggregates when at least one has scalar reductions of some of its
3267 components. There are three possible scenarios: Both the LHS and RHS have
3268 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3270 In the first case, we would like to load the LHS components from RHS
3271 components whenever possible. If that is not possible, we would like to
3272 read it directly from the RHS (after updating it by storing in it its own
3273 components). If there are some necessary unscalarized data in the LHS,
3274 those will be loaded by the original assignment too. If neither of these
3275 cases happen, the original statement can be removed. Most of this is done
3276 by load_assign_lhs_subreplacements.
3278 In the second case, we would like to store all RHS scalarized components
3279 directly into LHS and if they cover the aggregate completely, remove the
3280 statement too. In the third case, we want the LHS components to be loaded
3281 directly from the RHS (DSE will remove the original statement if it
3284 This is a bit complex but manageable when types match and when unions do
3285 not cause confusion in a way that we cannot really load a component of LHS
3286 from the RHS or vice versa (the access representing this level can have
3287 subaccesses that are accessible only through a different union field at a
3288 higher level - different from the one used in the examined expression).
3291 Therefore, I specially handle a fourth case, happening when there is a
3292 specific type cast or it is impossible to locate a scalarized subaccess on
3293 the other side of the expression. If that happens, I simply "refresh" the
3294 RHS by storing in it is scalarized components leave the original statement
3295 there to do the copying and then load the scalar replacements of the LHS.
3296 This is what the first branch does. */
3298 if (modify_this_stmt
3299 || gimple_has_volatile_ops (stmt
)
3300 || contains_vce_or_bfcref_p (rhs
)
3301 || contains_vce_or_bfcref_p (lhs
)
3302 || stmt_ends_bb_p (stmt
))
3304 if (access_has_children_p (racc
))
3305 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
3306 gsi
, false, false, loc
);
3307 if (access_has_children_p (lacc
))
3309 gimple_stmt_iterator alt_gsi
= gsi_none ();
3310 if (stmt_ends_bb_p (stmt
))
3312 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi
)));
3315 generate_subtree_copies (lacc
->first_child
, lhs
, lacc
->offset
, 0, 0,
3316 gsi
, true, true, loc
);
3318 sra_stats
.separate_lhs_rhs_handling
++;
3320 /* This gimplification must be done after generate_subtree_copies,
3321 lest we insert the subtree copies in the middle of the gimplified
3323 if (force_gimple_rhs
)
3324 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
3325 true, GSI_SAME_STMT
);
3326 if (gimple_assign_rhs1 (stmt
) != rhs
)
3328 modify_this_stmt
= true;
3329 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
3330 gcc_assert (stmt
== gsi_stmt (orig_gsi
));
3333 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3337 if (access_has_children_p (lacc
)
3338 && access_has_children_p (racc
)
3339 /* When an access represents an unscalarizable region, it usually
3340 represents accesses with variable offset and thus must not be used
3341 to generate new memory accesses. */
3342 && !lacc
->grp_unscalarizable_region
3343 && !racc
->grp_unscalarizable_region
)
3345 struct subreplacement_assignment_data sad
;
3347 sad
.left_offset
= lacc
->offset
;
3348 sad
.assignment_lhs
= lhs
;
3349 sad
.assignment_rhs
= rhs
;
3350 sad
.top_racc
= racc
;
3353 sad
.loc
= gimple_location (stmt
);
3354 sad
.refreshed
= SRA_UDH_NONE
;
3356 if (lacc
->grp_read
&& !lacc
->grp_covered
)
3357 handle_unscalarized_data_in_subtree (&sad
);
3359 load_assign_lhs_subreplacements (lacc
, &sad
);
3360 if (sad
.refreshed
!= SRA_UDH_RIGHT
)
3363 unlink_stmt_vdef (stmt
);
3364 gsi_remove (&sad
.old_gsi
, true);
3365 release_defs (stmt
);
3366 sra_stats
.deleted
++;
3367 return SRA_AM_REMOVED
;
3372 if (access_has_children_p (racc
)
3373 && !racc
->grp_unscalarized_data
)
3377 fprintf (dump_file
, "Removing load: ");
3378 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3380 generate_subtree_copies (racc
->first_child
, lhs
,
3381 racc
->offset
, 0, 0, gsi
,
3383 gcc_assert (stmt
== gsi_stmt (*gsi
));
3384 unlink_stmt_vdef (stmt
);
3385 gsi_remove (gsi
, true);
3386 release_defs (stmt
);
3387 sra_stats
.deleted
++;
3388 return SRA_AM_REMOVED
;
3390 /* Restore the aggregate RHS from its components so the
3391 prevailing aggregate copy does the right thing. */
3392 if (access_has_children_p (racc
))
3393 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
3394 gsi
, false, false, loc
);
3395 /* Re-load the components of the aggregate copy destination.
3396 But use the RHS aggregate to load from to expose more
3397 optimization opportunities. */
3398 if (access_has_children_p (lacc
))
3399 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
3400 0, 0, gsi
, true, true, loc
);
3407 /* Traverse the function body and all modifications as decided in
3408 analyze_all_variable_accesses. Return true iff the CFG has been
3412 sra_modify_function_body (void)
3414 bool cfg_changed
= false;
3417 FOR_EACH_BB_FN (bb
, cfun
)
3419 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
3420 while (!gsi_end_p (gsi
))
3422 gimple stmt
= gsi_stmt (gsi
);
3423 enum assignment_mod_result assign_result
;
3424 bool modified
= false, deleted
= false;
3428 switch (gimple_code (stmt
))
3431 t
= gimple_return_retval_ptr (as_a
<greturn
*> (stmt
));
3432 if (*t
!= NULL_TREE
)
3433 modified
|= sra_modify_expr (t
, &gsi
, false);
3437 assign_result
= sra_modify_assign (stmt
, &gsi
);
3438 modified
|= assign_result
== SRA_AM_MODIFIED
;
3439 deleted
= assign_result
== SRA_AM_REMOVED
;
3443 /* Operands must be processed before the lhs. */
3444 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3446 t
= gimple_call_arg_ptr (stmt
, i
);
3447 modified
|= sra_modify_expr (t
, &gsi
, false);
3450 if (gimple_call_lhs (stmt
))
3452 t
= gimple_call_lhs_ptr (stmt
);
3453 modified
|= sra_modify_expr (t
, &gsi
, true);
3459 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3460 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
3462 t
= &TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
3463 modified
|= sra_modify_expr (t
, &gsi
, false);
3465 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
3467 t
= &TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
3468 modified
|= sra_modify_expr (t
, &gsi
, true);
3480 if (maybe_clean_eh_stmt (stmt
)
3481 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
3489 gsi_commit_edge_inserts ();
3493 /* Generate statements initializing scalar replacements of parts of function
3497 initialize_parameter_reductions (void)
3499 gimple_stmt_iterator gsi
;
3500 gimple_seq seq
= NULL
;
3503 gsi
= gsi_start (seq
);
3504 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3506 parm
= DECL_CHAIN (parm
))
3508 vec
<access_p
> *access_vec
;
3509 struct access
*access
;
3511 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3513 access_vec
= get_base_access_vector (parm
);
3517 for (access
= (*access_vec
)[0];
3519 access
= access
->next_grp
)
3520 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
3521 EXPR_LOCATION (parm
));
3524 seq
= gsi_seq (gsi
);
3526 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), seq
);
3529 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3530 it reveals there are components of some aggregates to be scalarized, it runs
3531 the required transformations. */
3533 perform_intra_sra (void)
3538 if (!find_var_candidates ())
3541 if (!scan_function ())
3544 if (!analyze_all_variable_accesses ())
3547 if (sra_modify_function_body ())
3548 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
3550 ret
= TODO_update_ssa
;
3551 initialize_parameter_reductions ();
3553 statistics_counter_event (cfun
, "Scalar replacements created",
3554 sra_stats
.replacements
);
3555 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
3556 statistics_counter_event (cfun
, "Subtree copy stmts",
3557 sra_stats
.subtree_copies
);
3558 statistics_counter_event (cfun
, "Subreplacement stmts",
3559 sra_stats
.subreplacements
);
3560 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
3561 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
3562 sra_stats
.separate_lhs_rhs_handling
);
3565 sra_deinitialize ();
3569 /* Perform early intraprocedural SRA. */
3571 early_intra_sra (void)
3573 sra_mode
= SRA_MODE_EARLY_INTRA
;
3574 return perform_intra_sra ();
3577 /* Perform "late" intraprocedural SRA. */
3579 late_intra_sra (void)
3581 sra_mode
= SRA_MODE_INTRA
;
3582 return perform_intra_sra ();
3587 gate_intra_sra (void)
3589 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
3595 const pass_data pass_data_sra_early
=
3597 GIMPLE_PASS
, /* type */
3599 OPTGROUP_NONE
, /* optinfo_flags */
3600 TV_TREE_SRA
, /* tv_id */
3601 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3602 0, /* properties_provided */
3603 0, /* properties_destroyed */
3604 0, /* todo_flags_start */
3605 TODO_update_ssa
, /* todo_flags_finish */
3608 class pass_sra_early
: public gimple_opt_pass
3611 pass_sra_early (gcc::context
*ctxt
)
3612 : gimple_opt_pass (pass_data_sra_early
, ctxt
)
3615 /* opt_pass methods: */
3616 virtual bool gate (function
*) { return gate_intra_sra (); }
3617 virtual unsigned int execute (function
*) { return early_intra_sra (); }
3619 }; // class pass_sra_early
3624 make_pass_sra_early (gcc::context
*ctxt
)
3626 return new pass_sra_early (ctxt
);
3631 const pass_data pass_data_sra
=
3633 GIMPLE_PASS
, /* type */
3635 OPTGROUP_NONE
, /* optinfo_flags */
3636 TV_TREE_SRA
, /* tv_id */
3637 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3638 0, /* properties_provided */
3639 0, /* properties_destroyed */
3640 TODO_update_address_taken
, /* todo_flags_start */
3641 TODO_update_ssa
, /* todo_flags_finish */
3644 class pass_sra
: public gimple_opt_pass
3647 pass_sra (gcc::context
*ctxt
)
3648 : gimple_opt_pass (pass_data_sra
, ctxt
)
3651 /* opt_pass methods: */
3652 virtual bool gate (function
*) { return gate_intra_sra (); }
3653 virtual unsigned int execute (function
*) { return late_intra_sra (); }
3655 }; // class pass_sra
3660 make_pass_sra (gcc::context
*ctxt
)
3662 return new pass_sra (ctxt
);
3666 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3670 is_unused_scalar_param (tree parm
)
3673 return (is_gimple_reg (parm
)
3674 && (!(name
= ssa_default_def (cfun
, parm
))
3675 || has_zero_uses (name
)));
3678 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3679 examine whether there are any direct or otherwise infeasible ones. If so,
3680 return true, otherwise return false. PARM must be a gimple register with a
3681 non-NULL default definition. */
3684 ptr_parm_has_direct_uses (tree parm
)
3686 imm_use_iterator ui
;
3688 tree name
= ssa_default_def (cfun
, parm
);
3691 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
3694 use_operand_p use_p
;
3696 if (is_gimple_debug (stmt
))
3699 /* Valid uses include dereferences on the lhs and the rhs. */
3700 if (gimple_has_lhs (stmt
))
3702 tree lhs
= gimple_get_lhs (stmt
);
3703 while (handled_component_p (lhs
))
3704 lhs
= TREE_OPERAND (lhs
, 0);
3705 if (TREE_CODE (lhs
) == MEM_REF
3706 && TREE_OPERAND (lhs
, 0) == name
3707 && integer_zerop (TREE_OPERAND (lhs
, 1))
3708 && types_compatible_p (TREE_TYPE (lhs
),
3709 TREE_TYPE (TREE_TYPE (name
)))
3710 && !TREE_THIS_VOLATILE (lhs
))
3713 if (gimple_assign_single_p (stmt
))
3715 tree rhs
= gimple_assign_rhs1 (stmt
);
3716 while (handled_component_p (rhs
))
3717 rhs
= TREE_OPERAND (rhs
, 0);
3718 if (TREE_CODE (rhs
) == MEM_REF
3719 && TREE_OPERAND (rhs
, 0) == name
3720 && integer_zerop (TREE_OPERAND (rhs
, 1))
3721 && types_compatible_p (TREE_TYPE (rhs
),
3722 TREE_TYPE (TREE_TYPE (name
)))
3723 && !TREE_THIS_VOLATILE (rhs
))
3726 else if (is_gimple_call (stmt
))
3729 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3731 tree arg
= gimple_call_arg (stmt
, i
);
3732 while (handled_component_p (arg
))
3733 arg
= TREE_OPERAND (arg
, 0);
3734 if (TREE_CODE (arg
) == MEM_REF
3735 && TREE_OPERAND (arg
, 0) == name
3736 && integer_zerop (TREE_OPERAND (arg
, 1))
3737 && types_compatible_p (TREE_TYPE (arg
),
3738 TREE_TYPE (TREE_TYPE (name
)))
3739 && !TREE_THIS_VOLATILE (arg
))
3744 /* If the number of valid uses does not match the number of
3745 uses in this stmt there is an unhandled use. */
3746 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
3753 BREAK_FROM_IMM_USE_STMT (ui
);
3759 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3760 them in candidate_bitmap. Note that these do not necessarily include
3761 parameter which are unused and thus can be removed. Return true iff any
3762 such candidate has been found. */
3765 find_param_candidates (void)
3772 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3774 parm
= DECL_CHAIN (parm
))
3776 tree type
= TREE_TYPE (parm
);
3781 if (TREE_THIS_VOLATILE (parm
)
3782 || TREE_ADDRESSABLE (parm
)
3783 || (!is_gimple_reg_type (type
) && is_va_list_type (type
)))
3786 if (is_unused_scalar_param (parm
))
3792 if (POINTER_TYPE_P (type
))
3794 type
= TREE_TYPE (type
);
3796 if (TREE_CODE (type
) == FUNCTION_TYPE
3797 || TYPE_VOLATILE (type
)
3798 || (TREE_CODE (type
) == ARRAY_TYPE
3799 && TYPE_NONALIASED_COMPONENT (type
))
3800 || !is_gimple_reg (parm
)
3801 || is_va_list_type (type
)
3802 || ptr_parm_has_direct_uses (parm
))
3805 else if (!AGGREGATE_TYPE_P (type
))
3808 if (!COMPLETE_TYPE_P (type
)
3809 || !tree_fits_uhwi_p (TYPE_SIZE (type
))
3810 || tree_to_uhwi (TYPE_SIZE (type
)) == 0
3811 || (AGGREGATE_TYPE_P (type
)
3812 && type_internals_preclude_sra_p (type
, &msg
)))
3815 bitmap_set_bit (candidate_bitmap
, DECL_UID (parm
));
3816 slot
= candidates
->find_slot_with_hash (parm
, DECL_UID (parm
), INSERT
);
3820 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3822 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (parm
));
3823 print_generic_expr (dump_file
, parm
, 0);
3824 fprintf (dump_file
, "\n");
3828 func_param_count
= count
;
3832 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3836 mark_maybe_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
3839 struct access
*repr
= (struct access
*) data
;
3841 repr
->grp_maybe_modified
= 1;
3845 /* Analyze what representatives (in linked lists accessible from
3846 REPRESENTATIVES) can be modified by side effects of statements in the
3847 current function. */
3850 analyze_modified_params (vec
<access_p
> representatives
)
3854 for (i
= 0; i
< func_param_count
; i
++)
3856 struct access
*repr
;
3858 for (repr
= representatives
[i
];
3860 repr
= repr
->next_grp
)
3862 struct access
*access
;
3866 if (no_accesses_p (repr
))
3868 if (!POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3869 || repr
->grp_maybe_modified
)
3872 ao_ref_init (&ar
, repr
->expr
);
3873 visited
= BITMAP_ALLOC (NULL
);
3874 for (access
= repr
; access
; access
= access
->next_sibling
)
3876 /* All accesses are read ones, otherwise grp_maybe_modified would
3877 be trivially set. */
3878 walk_aliased_vdefs (&ar
, gimple_vuse (access
->stmt
),
3879 mark_maybe_modified
, repr
, &visited
);
3880 if (repr
->grp_maybe_modified
)
3883 BITMAP_FREE (visited
);
3888 /* Propagate distances in bb_dereferences in the opposite direction than the
3889 control flow edges, in each step storing the maximum of the current value
3890 and the minimum of all successors. These steps are repeated until the table
3891 stabilizes. Note that BBs which might terminate the functions (according to
3892 final_bbs bitmap) never updated in this way. */
3895 propagate_dereference_distances (void)
3899 auto_vec
<basic_block
> queue (last_basic_block_for_fn (cfun
));
3900 queue
.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3901 FOR_EACH_BB_FN (bb
, cfun
)
3903 queue
.quick_push (bb
);
3907 while (!queue
.is_empty ())
3911 bool change
= false;
3917 if (bitmap_bit_p (final_bbs
, bb
->index
))
3920 for (i
= 0; i
< func_param_count
; i
++)
3922 int idx
= bb
->index
* func_param_count
+ i
;
3924 HOST_WIDE_INT inh
= 0;
3926 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3928 int succ_idx
= e
->dest
->index
* func_param_count
+ i
;
3930 if (e
->src
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
3936 inh
= bb_dereferences
[succ_idx
];
3938 else if (bb_dereferences
[succ_idx
] < inh
)
3939 inh
= bb_dereferences
[succ_idx
];
3942 if (!first
&& bb_dereferences
[idx
] < inh
)
3944 bb_dereferences
[idx
] = inh
;
3949 if (change
&& !bitmap_bit_p (final_bbs
, bb
->index
))
3950 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3955 e
->src
->aux
= e
->src
;
3956 queue
.quick_push (e
->src
);
3961 /* Dump a dereferences TABLE with heading STR to file F. */
3964 dump_dereferences_table (FILE *f
, const char *str
, HOST_WIDE_INT
*table
)
3968 fprintf (dump_file
, "%s", str
);
3969 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
),
3970 EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
3972 fprintf (f
, "%4i %i ", bb
->index
, bitmap_bit_p (final_bbs
, bb
->index
));
3973 if (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3976 for (i
= 0; i
< func_param_count
; i
++)
3978 int idx
= bb
->index
* func_param_count
+ i
;
3979 fprintf (f
, " %4" HOST_WIDE_INT_PRINT
"d", table
[idx
]);
3984 fprintf (dump_file
, "\n");
3987 /* Determine what (parts of) parameters passed by reference that are not
3988 assigned to are not certainly dereferenced in this function and thus the
3989 dereferencing cannot be safely moved to the caller without potentially
3990 introducing a segfault. Mark such REPRESENTATIVES as
3991 grp_not_necessarilly_dereferenced.
3993 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3994 part is calculated rather than simple booleans are calculated for each
3995 pointer parameter to handle cases when only a fraction of the whole
3996 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3999 The maximum dereference distances for each pointer parameter and BB are
4000 already stored in bb_dereference. This routine simply propagates these
4001 values upwards by propagate_dereference_distances and then compares the
4002 distances of individual parameters in the ENTRY BB to the equivalent
4003 distances of each representative of a (fraction of a) parameter. */
4006 analyze_caller_dereference_legality (vec
<access_p
> representatives
)
4010 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4011 dump_dereferences_table (dump_file
,
4012 "Dereference table before propagation:\n",
4015 propagate_dereference_distances ();
4017 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4018 dump_dereferences_table (dump_file
,
4019 "Dereference table after propagation:\n",
4022 for (i
= 0; i
< func_param_count
; i
++)
4024 struct access
*repr
= representatives
[i
];
4025 int idx
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->index
* func_param_count
+ i
;
4027 if (!repr
|| no_accesses_p (repr
))
4032 if ((repr
->offset
+ repr
->size
) > bb_dereferences
[idx
])
4033 repr
->grp_not_necessarilly_dereferenced
= 1;
4034 repr
= repr
->next_grp
;
4040 /* Return the representative access for the parameter declaration PARM if it is
4041 a scalar passed by reference which is not written to and the pointer value
4042 is not used directly. Thus, if it is legal to dereference it in the caller
4043 and we can rule out modifications through aliases, such parameter should be
4044 turned into one passed by value. Return NULL otherwise. */
4046 static struct access
*
4047 unmodified_by_ref_scalar_representative (tree parm
)
4049 int i
, access_count
;
4050 struct access
*repr
;
4051 vec
<access_p
> *access_vec
;
4053 access_vec
= get_base_access_vector (parm
);
4054 gcc_assert (access_vec
);
4055 repr
= (*access_vec
)[0];
4058 repr
->group_representative
= repr
;
4060 access_count
= access_vec
->length ();
4061 for (i
= 1; i
< access_count
; i
++)
4063 struct access
*access
= (*access_vec
)[i
];
4066 access
->group_representative
= repr
;
4067 access
->next_sibling
= repr
->next_sibling
;
4068 repr
->next_sibling
= access
;
4072 repr
->grp_scalar_ptr
= 1;
4076 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4077 associated with. REQ_ALIGN is the minimum required alignment. */
4080 access_precludes_ipa_sra_p (struct access
*access
, unsigned int req_align
)
4082 unsigned int exp_align
;
4083 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4084 is incompatible assign in a call statement (and possibly even in asm
4085 statements). This can be relaxed by using a new temporary but only for
4086 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4087 intraprocedural SRA we deal with this by keeping the old aggregate around,
4088 something we cannot do in IPA-SRA.) */
4090 && (is_gimple_call (access
->stmt
)
4091 || gimple_code (access
->stmt
) == GIMPLE_ASM
))
4094 exp_align
= get_object_alignment (access
->expr
);
4095 if (exp_align
< req_align
)
4102 /* Sort collected accesses for parameter PARM, identify representatives for
4103 each accessed region and link them together. Return NULL if there are
4104 different but overlapping accesses, return the special ptr value meaning
4105 there are no accesses for this parameter if that is the case and return the
4106 first representative otherwise. Set *RO_GRP if there is a group of accesses
4107 with only read (i.e. no write) accesses. */
4109 static struct access
*
4110 splice_param_accesses (tree parm
, bool *ro_grp
)
4112 int i
, j
, access_count
, group_count
;
4113 int agg_size
, total_size
= 0;
4114 struct access
*access
, *res
, **prev_acc_ptr
= &res
;
4115 vec
<access_p
> *access_vec
;
4117 access_vec
= get_base_access_vector (parm
);
4119 return &no_accesses_representant
;
4120 access_count
= access_vec
->length ();
4122 access_vec
->qsort (compare_access_positions
);
4127 while (i
< access_count
)
4131 access
= (*access_vec
)[i
];
4132 modification
= access
->write
;
4133 if (access_precludes_ipa_sra_p (access
, TYPE_ALIGN (access
->type
)))
4135 a1_alias_type
= reference_alias_ptr_type (access
->expr
);
4137 /* Access is about to become group representative unless we find some
4138 nasty overlap which would preclude us from breaking this parameter
4142 while (j
< access_count
)
4144 struct access
*ac2
= (*access_vec
)[j
];
4145 if (ac2
->offset
!= access
->offset
)
4147 /* All or nothing law for parameters. */
4148 if (access
->offset
+ access
->size
> ac2
->offset
)
4153 else if (ac2
->size
!= access
->size
)
4156 if (access_precludes_ipa_sra_p (ac2
, TYPE_ALIGN (access
->type
))
4157 || (ac2
->type
!= access
->type
4158 && (TREE_ADDRESSABLE (ac2
->type
)
4159 || TREE_ADDRESSABLE (access
->type
)))
4160 || (reference_alias_ptr_type (ac2
->expr
) != a1_alias_type
))
4163 modification
|= ac2
->write
;
4164 ac2
->group_representative
= access
;
4165 ac2
->next_sibling
= access
->next_sibling
;
4166 access
->next_sibling
= ac2
;
4171 access
->grp_maybe_modified
= modification
;
4174 *prev_acc_ptr
= access
;
4175 prev_acc_ptr
= &access
->next_grp
;
4176 total_size
+= access
->size
;
4180 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4181 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))));
4183 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm
)));
4184 if (total_size
>= agg_size
)
4187 gcc_assert (group_count
> 0);
4191 /* Decide whether parameters with representative accesses given by REPR should
4192 be reduced into components. */
4195 decide_one_param_reduction (struct access
*repr
)
4197 int total_size
, cur_parm_size
, agg_size
, new_param_count
, parm_size_limit
;
4202 cur_parm_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm
)));
4203 gcc_assert (cur_parm_size
> 0);
4205 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4208 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))));
4213 agg_size
= cur_parm_size
;
4219 fprintf (dump_file
, "Evaluating PARAM group sizes for ");
4220 print_generic_expr (dump_file
, parm
, 0);
4221 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (parm
));
4222 for (acc
= repr
; acc
; acc
= acc
->next_grp
)
4223 dump_access (dump_file
, acc
, true);
4227 new_param_count
= 0;
4229 for (; repr
; repr
= repr
->next_grp
)
4231 gcc_assert (parm
== repr
->base
);
4233 /* Taking the address of a non-addressable field is verboten. */
4234 if (by_ref
&& repr
->non_addressable
)
4237 /* Do not decompose a non-BLKmode param in a way that would
4238 create BLKmode params. Especially for by-reference passing
4239 (thus, pointer-type param) this is hardly worthwhile. */
4240 if (DECL_MODE (parm
) != BLKmode
4241 && TYPE_MODE (repr
->type
) == BLKmode
)
4244 if (!by_ref
|| (!repr
->grp_maybe_modified
4245 && !repr
->grp_not_necessarilly_dereferenced
))
4246 total_size
+= repr
->size
;
4248 total_size
+= cur_parm_size
;
4253 gcc_assert (new_param_count
> 0);
4255 if (optimize_function_for_size_p (cfun
))
4256 parm_size_limit
= cur_parm_size
;
4258 parm_size_limit
= (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR
)
4261 if (total_size
< agg_size
4262 && total_size
<= parm_size_limit
)
4265 fprintf (dump_file
, " ....will be split into %i components\n",
4267 return new_param_count
;
4273 /* The order of the following enums is important, we need to do extra work for
4274 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4275 enum ipa_splicing_result
{ NO_GOOD_ACCESS
, UNUSED_PARAMS
, BY_VAL_ACCESSES
,
4276 MODIF_BY_REF_ACCESSES
, UNMODIF_BY_REF_ACCESSES
};
4278 /* Identify representatives of all accesses to all candidate parameters for
4279 IPA-SRA. Return result based on what representatives have been found. */
4281 static enum ipa_splicing_result
4282 splice_all_param_accesses (vec
<access_p
> &representatives
)
4284 enum ipa_splicing_result result
= NO_GOOD_ACCESS
;
4286 struct access
*repr
;
4288 representatives
.create (func_param_count
);
4290 for (parm
= DECL_ARGUMENTS (current_function_decl
);
4292 parm
= DECL_CHAIN (parm
))
4294 if (is_unused_scalar_param (parm
))
4296 representatives
.quick_push (&no_accesses_representant
);
4297 if (result
== NO_GOOD_ACCESS
)
4298 result
= UNUSED_PARAMS
;
4300 else if (POINTER_TYPE_P (TREE_TYPE (parm
))
4301 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm
)))
4302 && bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4304 repr
= unmodified_by_ref_scalar_representative (parm
);
4305 representatives
.quick_push (repr
);
4307 result
= UNMODIF_BY_REF_ACCESSES
;
4309 else if (bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4311 bool ro_grp
= false;
4312 repr
= splice_param_accesses (parm
, &ro_grp
);
4313 representatives
.quick_push (repr
);
4315 if (repr
&& !no_accesses_p (repr
))
4317 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4320 result
= UNMODIF_BY_REF_ACCESSES
;
4321 else if (result
< MODIF_BY_REF_ACCESSES
)
4322 result
= MODIF_BY_REF_ACCESSES
;
4324 else if (result
< BY_VAL_ACCESSES
)
4325 result
= BY_VAL_ACCESSES
;
4327 else if (no_accesses_p (repr
) && (result
== NO_GOOD_ACCESS
))
4328 result
= UNUSED_PARAMS
;
4331 representatives
.quick_push (NULL
);
4334 if (result
== NO_GOOD_ACCESS
)
4336 representatives
.release ();
4337 return NO_GOOD_ACCESS
;
4343 /* Return the index of BASE in PARMS. Abort if it is not found. */
4346 get_param_index (tree base
, vec
<tree
> parms
)
4350 len
= parms
.length ();
4351 for (i
= 0; i
< len
; i
++)
4352 if (parms
[i
] == base
)
4357 /* Convert the decisions made at the representative level into compact
4358 parameter adjustments. REPRESENTATIVES are pointers to first
4359 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4360 final number of adjustments. */
4362 static ipa_parm_adjustment_vec
4363 turn_representatives_into_adjustments (vec
<access_p
> representatives
,
4364 int adjustments_count
)
4367 ipa_parm_adjustment_vec adjustments
;
4371 gcc_assert (adjustments_count
> 0);
4372 parms
= ipa_get_vector_of_formal_parms (current_function_decl
);
4373 adjustments
.create (adjustments_count
);
4374 parm
= DECL_ARGUMENTS (current_function_decl
);
4375 for (i
= 0; i
< func_param_count
; i
++, parm
= DECL_CHAIN (parm
))
4377 struct access
*repr
= representatives
[i
];
4379 if (!repr
|| no_accesses_p (repr
))
4381 struct ipa_parm_adjustment adj
;
4383 memset (&adj
, 0, sizeof (adj
));
4384 adj
.base_index
= get_param_index (parm
, parms
);
4387 adj
.op
= IPA_PARM_OP_COPY
;
4389 adj
.op
= IPA_PARM_OP_REMOVE
;
4390 adj
.arg_prefix
= "ISRA";
4391 adjustments
.quick_push (adj
);
4395 struct ipa_parm_adjustment adj
;
4396 int index
= get_param_index (parm
, parms
);
4398 for (; repr
; repr
= repr
->next_grp
)
4400 memset (&adj
, 0, sizeof (adj
));
4401 gcc_assert (repr
->base
== parm
);
4402 adj
.base_index
= index
;
4403 adj
.base
= repr
->base
;
4404 adj
.type
= repr
->type
;
4405 adj
.alias_ptr_type
= reference_alias_ptr_type (repr
->expr
);
4406 adj
.offset
= repr
->offset
;
4407 adj
.by_ref
= (POINTER_TYPE_P (TREE_TYPE (repr
->base
))
4408 && (repr
->grp_maybe_modified
4409 || repr
->grp_not_necessarilly_dereferenced
));
4410 adj
.arg_prefix
= "ISRA";
4411 adjustments
.quick_push (adj
);
4419 /* Analyze the collected accesses and produce a plan what to do with the
4420 parameters in the form of adjustments, NULL meaning nothing. */
4422 static ipa_parm_adjustment_vec
4423 analyze_all_param_acesses (void)
4425 enum ipa_splicing_result repr_state
;
4426 bool proceed
= false;
4427 int i
, adjustments_count
= 0;
4428 vec
<access_p
> representatives
;
4429 ipa_parm_adjustment_vec adjustments
;
4431 repr_state
= splice_all_param_accesses (representatives
);
4432 if (repr_state
== NO_GOOD_ACCESS
)
4433 return ipa_parm_adjustment_vec ();
4435 /* If there are any parameters passed by reference which are not modified
4436 directly, we need to check whether they can be modified indirectly. */
4437 if (repr_state
== UNMODIF_BY_REF_ACCESSES
)
4439 analyze_caller_dereference_legality (representatives
);
4440 analyze_modified_params (representatives
);
4443 for (i
= 0; i
< func_param_count
; i
++)
4445 struct access
*repr
= representatives
[i
];
4447 if (repr
&& !no_accesses_p (repr
))
4449 if (repr
->grp_scalar_ptr
)
4451 adjustments_count
++;
4452 if (repr
->grp_not_necessarilly_dereferenced
4453 || repr
->grp_maybe_modified
)
4454 representatives
[i
] = NULL
;
4458 sra_stats
.scalar_by_ref_to_by_val
++;
4463 int new_components
= decide_one_param_reduction (repr
);
4465 if (new_components
== 0)
4467 representatives
[i
] = NULL
;
4468 adjustments_count
++;
4472 adjustments_count
+= new_components
;
4473 sra_stats
.aggregate_params_reduced
++;
4474 sra_stats
.param_reductions_created
+= new_components
;
4481 if (no_accesses_p (repr
))
4484 sra_stats
.deleted_unused_parameters
++;
4486 adjustments_count
++;
4490 if (!proceed
&& dump_file
)
4491 fprintf (dump_file
, "NOT proceeding to change params.\n");
4494 adjustments
= turn_representatives_into_adjustments (representatives
,
4497 adjustments
= ipa_parm_adjustment_vec ();
4499 representatives
.release ();
4503 /* If a parameter replacement identified by ADJ does not yet exist in the form
4504 of declaration, create it and record it, otherwise return the previously
4508 get_replaced_param_substitute (struct ipa_parm_adjustment
*adj
)
4511 if (!adj
->new_ssa_base
)
4513 char *pretty_name
= make_fancy_name (adj
->base
);
4515 repl
= create_tmp_reg (TREE_TYPE (adj
->base
), "ISR");
4516 DECL_NAME (repl
) = get_identifier (pretty_name
);
4517 obstack_free (&name_obstack
, pretty_name
);
4519 adj
->new_ssa_base
= repl
;
4522 repl
= adj
->new_ssa_base
;
4526 /* Find the first adjustment for a particular parameter BASE in a vector of
4527 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4530 static struct ipa_parm_adjustment
*
4531 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments
, tree base
)
4535 len
= adjustments
.length ();
4536 for (i
= 0; i
< len
; i
++)
4538 struct ipa_parm_adjustment
*adj
;
4540 adj
= &adjustments
[i
];
4541 if (adj
->op
!= IPA_PARM_OP_COPY
&& adj
->base
== base
)
4548 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4549 removed because its value is not used, replace the SSA_NAME with a one
4550 relating to a created VAR_DECL together all of its uses and return true.
4551 ADJUSTMENTS is a pointer to an adjustments vector. */
4554 replace_removed_params_ssa_names (gimple stmt
,
4555 ipa_parm_adjustment_vec adjustments
)
4557 struct ipa_parm_adjustment
*adj
;
4558 tree lhs
, decl
, repl
, name
;
4560 if (gimple_code (stmt
) == GIMPLE_PHI
)
4561 lhs
= gimple_phi_result (stmt
);
4562 else if (is_gimple_assign (stmt
))
4563 lhs
= gimple_assign_lhs (stmt
);
4564 else if (is_gimple_call (stmt
))
4565 lhs
= gimple_call_lhs (stmt
);
4569 if (TREE_CODE (lhs
) != SSA_NAME
)
4572 decl
= SSA_NAME_VAR (lhs
);
4573 if (decl
== NULL_TREE
4574 || TREE_CODE (decl
) != PARM_DECL
)
4577 adj
= get_adjustment_for_base (adjustments
, decl
);
4581 repl
= get_replaced_param_substitute (adj
);
4582 name
= make_ssa_name (repl
, stmt
);
4586 fprintf (dump_file
, "replacing an SSA name of a removed param ");
4587 print_generic_expr (dump_file
, lhs
, 0);
4588 fprintf (dump_file
, " with ");
4589 print_generic_expr (dump_file
, name
, 0);
4590 fprintf (dump_file
, "\n");
4593 if (is_gimple_assign (stmt
))
4594 gimple_assign_set_lhs (stmt
, name
);
4595 else if (is_gimple_call (stmt
))
4596 gimple_call_set_lhs (stmt
, name
);
4598 gimple_phi_set_result (as_a
<gphi
*> (stmt
), name
);
4600 replace_uses_by (lhs
, name
);
4601 release_ssa_name (lhs
);
4605 /* If the statement STMT contains any expressions that need to replaced with a
4606 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4607 incompatibilities (GSI is used to accommodate conversion statements and must
4608 point to the statement). Return true iff the statement was modified. */
4611 sra_ipa_modify_assign (gimple stmt
, gimple_stmt_iterator
*gsi
,
4612 ipa_parm_adjustment_vec adjustments
)
4614 tree
*lhs_p
, *rhs_p
;
4617 if (!gimple_assign_single_p (stmt
))
4620 rhs_p
= gimple_assign_rhs1_ptr (stmt
);
4621 lhs_p
= gimple_assign_lhs_ptr (stmt
);
4623 any
= ipa_modify_expr (rhs_p
, false, adjustments
);
4624 any
|= ipa_modify_expr (lhs_p
, false, adjustments
);
4627 tree new_rhs
= NULL_TREE
;
4629 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p
), TREE_TYPE (*rhs_p
)))
4631 if (TREE_CODE (*rhs_p
) == CONSTRUCTOR
)
4633 /* V_C_Es of constructors can cause trouble (PR 42714). */
4634 if (is_gimple_reg_type (TREE_TYPE (*lhs_p
)))
4635 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
4637 *rhs_p
= build_constructor (TREE_TYPE (*lhs_p
),
4641 new_rhs
= fold_build1_loc (gimple_location (stmt
),
4642 VIEW_CONVERT_EXPR
, TREE_TYPE (*lhs_p
),
4645 else if (REFERENCE_CLASS_P (*rhs_p
)
4646 && is_gimple_reg_type (TREE_TYPE (*lhs_p
))
4647 && !is_gimple_reg (*lhs_p
))
4648 /* This can happen when an assignment in between two single field
4649 structures is turned into an assignment in between two pointers to
4650 scalars (PR 42237). */
4655 tree tmp
= force_gimple_operand_gsi (gsi
, new_rhs
, true, NULL_TREE
,
4656 true, GSI_SAME_STMT
);
4658 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
4667 /* Traverse the function body and all modifications as described in
4668 ADJUSTMENTS. Return true iff the CFG has been changed. */
4671 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments
)
4673 bool cfg_changed
= false;
4676 FOR_EACH_BB_FN (bb
, cfun
)
4678 gimple_stmt_iterator gsi
;
4680 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4681 replace_removed_params_ssa_names (gsi_stmt (gsi
), adjustments
);
4683 gsi
= gsi_start_bb (bb
);
4684 while (!gsi_end_p (gsi
))
4686 gimple stmt
= gsi_stmt (gsi
);
4687 bool modified
= false;
4691 switch (gimple_code (stmt
))
4694 t
= gimple_return_retval_ptr (as_a
<greturn
*> (stmt
));
4695 if (*t
!= NULL_TREE
)
4696 modified
|= ipa_modify_expr (t
, true, adjustments
);
4700 modified
|= sra_ipa_modify_assign (stmt
, &gsi
, adjustments
);
4701 modified
|= replace_removed_params_ssa_names (stmt
, adjustments
);
4705 /* Operands must be processed before the lhs. */
4706 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4708 t
= gimple_call_arg_ptr (stmt
, i
);
4709 modified
|= ipa_modify_expr (t
, true, adjustments
);
4712 if (gimple_call_lhs (stmt
))
4714 t
= gimple_call_lhs_ptr (stmt
);
4715 modified
|= ipa_modify_expr (t
, false, adjustments
);
4716 modified
|= replace_removed_params_ssa_names (stmt
,
4723 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4724 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
4726 t
= &TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
4727 modified
|= ipa_modify_expr (t
, true, adjustments
);
4729 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
4731 t
= &TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
4732 modified
|= ipa_modify_expr (t
, false, adjustments
);
4744 if (maybe_clean_eh_stmt (stmt
)
4745 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4755 /* Call gimple_debug_bind_reset_value on all debug statements describing
4756 gimple register parameters that are being removed or replaced. */
4759 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments
)
4762 gimple_stmt_iterator
*gsip
= NULL
, gsi
;
4764 if (MAY_HAVE_DEBUG_STMTS
&& single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
4766 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
4769 len
= adjustments
.length ();
4770 for (i
= 0; i
< len
; i
++)
4772 struct ipa_parm_adjustment
*adj
;
4773 imm_use_iterator ui
;
4776 tree name
, vexpr
, copy
= NULL_TREE
;
4777 use_operand_p use_p
;
4779 adj
= &adjustments
[i
];
4780 if (adj
->op
== IPA_PARM_OP_COPY
|| !is_gimple_reg (adj
->base
))
4782 name
= ssa_default_def (cfun
, adj
->base
);
4785 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
4787 if (gimple_clobber_p (stmt
))
4789 gimple_stmt_iterator cgsi
= gsi_for_stmt (stmt
);
4790 unlink_stmt_vdef (stmt
);
4791 gsi_remove (&cgsi
, true);
4792 release_defs (stmt
);
4795 /* All other users must have been removed by
4796 ipa_sra_modify_function_body. */
4797 gcc_assert (is_gimple_debug (stmt
));
4798 if (vexpr
== NULL
&& gsip
!= NULL
)
4800 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4801 vexpr
= make_node (DEBUG_EXPR_DECL
);
4802 def_temp
= gimple_build_debug_source_bind (vexpr
, adj
->base
,
4804 DECL_ARTIFICIAL (vexpr
) = 1;
4805 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
4806 DECL_MODE (vexpr
) = DECL_MODE (adj
->base
);
4807 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4811 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
4812 SET_USE (use_p
, vexpr
);
4815 gimple_debug_bind_reset_value (stmt
);
4818 /* Create a VAR_DECL for debug info purposes. */
4819 if (!DECL_IGNORED_P (adj
->base
))
4821 copy
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
4822 VAR_DECL
, DECL_NAME (adj
->base
),
4823 TREE_TYPE (adj
->base
));
4824 if (DECL_PT_UID_SET_P (adj
->base
))
4825 SET_DECL_PT_UID (copy
, DECL_PT_UID (adj
->base
));
4826 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (adj
->base
);
4827 TREE_READONLY (copy
) = TREE_READONLY (adj
->base
);
4828 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (adj
->base
);
4829 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (adj
->base
);
4830 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (adj
->base
);
4831 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (adj
->base
);
4832 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (adj
->base
);
4833 DECL_SEEN_IN_BIND_EXPR_P (copy
) = 1;
4834 SET_DECL_RTL (copy
, 0);
4835 TREE_USED (copy
) = 1;
4836 DECL_CONTEXT (copy
) = current_function_decl
;
4837 add_local_decl (cfun
, copy
);
4839 BLOCK_VARS (DECL_INITIAL (current_function_decl
));
4840 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = copy
;
4842 if (gsip
!= NULL
&& copy
&& target_for_debug_bind (adj
->base
))
4844 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4846 def_temp
= gimple_build_debug_bind (copy
, vexpr
, NULL
);
4848 def_temp
= gimple_build_debug_source_bind (copy
, adj
->base
,
4850 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4855 /* Return false if all callers have at least as many actual arguments as there
4856 are formal parameters in the current function and that their types
4860 some_callers_have_mismatched_arguments_p (struct cgraph_node
*node
,
4861 void *data ATTRIBUTE_UNUSED
)
4863 struct cgraph_edge
*cs
;
4864 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4865 if (!cs
->call_stmt
|| !callsite_arguments_match_p (cs
->call_stmt
))
4871 /* Return false if all callers have vuse attached to a call statement. */
4874 some_callers_have_no_vuse_p (struct cgraph_node
*node
,
4875 void *data ATTRIBUTE_UNUSED
)
4877 struct cgraph_edge
*cs
;
4878 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4879 if (!cs
->call_stmt
|| !gimple_vuse (cs
->call_stmt
))
4885 /* Convert all callers of NODE. */
4888 convert_callers_for_node (struct cgraph_node
*node
,
4891 ipa_parm_adjustment_vec
*adjustments
= (ipa_parm_adjustment_vec
*) data
;
4892 bitmap recomputed_callers
= BITMAP_ALLOC (NULL
);
4893 struct cgraph_edge
*cs
;
4895 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4897 push_cfun (DECL_STRUCT_FUNCTION (cs
->caller
->decl
));
4900 fprintf (dump_file
, "Adjusting call %s/%i -> %s/%i\n",
4901 xstrdup (cs
->caller
->name ()),
4903 xstrdup (cs
->callee
->name ()),
4906 ipa_modify_call_arguments (cs
, cs
->call_stmt
, *adjustments
);
4911 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4912 if (bitmap_set_bit (recomputed_callers
, cs
->caller
->uid
)
4913 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs
->caller
->decl
)))
4914 compute_inline_parameters (cs
->caller
, true);
4915 BITMAP_FREE (recomputed_callers
);
4920 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4923 convert_callers (struct cgraph_node
*node
, tree old_decl
,
4924 ipa_parm_adjustment_vec adjustments
)
4926 basic_block this_block
;
4928 node
->call_for_symbol_and_aliases (convert_callers_for_node
,
4929 &adjustments
, false);
4931 if (!encountered_recursive_call
)
4934 FOR_EACH_BB_FN (this_block
, cfun
)
4936 gimple_stmt_iterator gsi
;
4938 for (gsi
= gsi_start_bb (this_block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4942 stmt
= dyn_cast
<gcall
*> (gsi_stmt (gsi
));
4945 call_fndecl
= gimple_call_fndecl (stmt
);
4946 if (call_fndecl
== old_decl
)
4949 fprintf (dump_file
, "Adjusting recursive call");
4950 gimple_call_set_fndecl (stmt
, node
->decl
);
4951 ipa_modify_call_arguments (NULL
, stmt
, adjustments
);
4959 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4960 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4963 modify_function (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
4965 struct cgraph_node
*new_node
;
4968 cgraph_edge::rebuild_edges ();
4969 free_dominance_info (CDI_DOMINATORS
);
4972 /* This must be done after rebuilding cgraph edges for node above.
4973 Otherwise any recursive calls to node that are recorded in
4974 redirect_callers will be corrupted. */
4975 vec
<cgraph_edge
*> redirect_callers
= node
->collect_callers ();
4976 new_node
= node
->create_version_clone_with_body (redirect_callers
, NULL
,
4977 NULL
, false, NULL
, NULL
,
4979 redirect_callers
.release ();
4981 push_cfun (DECL_STRUCT_FUNCTION (new_node
->decl
));
4982 ipa_modify_formal_parameters (current_function_decl
, adjustments
);
4983 cfg_changed
= ipa_sra_modify_function_body (adjustments
);
4984 sra_ipa_reset_debug_stmts (adjustments
);
4985 convert_callers (new_node
, node
->decl
, adjustments
);
4986 new_node
->make_local ();
4990 /* Means of communication between ipa_sra_check_caller and
4991 ipa_sra_preliminary_function_checks. */
4993 struct ipa_sra_check_caller_data
4996 bool bad_arg_alignment
;
5000 /* If NODE has a caller, mark that fact in DATA which is pointer to
5001 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5002 calls if they are unit aligned and if not, set the appropriate flag in DATA
5006 ipa_sra_check_caller (struct cgraph_node
*node
, void *data
)
5011 struct ipa_sra_check_caller_data
*iscc
;
5012 iscc
= (struct ipa_sra_check_caller_data
*) data
;
5013 iscc
->has_callers
= true;
5015 for (cgraph_edge
*cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
5017 if (cs
->caller
->thunk
.thunk_p
)
5019 iscc
->has_thunk
= true;
5022 gimple call_stmt
= cs
->call_stmt
;
5023 unsigned count
= gimple_call_num_args (call_stmt
);
5024 for (unsigned i
= 0; i
< count
; i
++)
5026 tree arg
= gimple_call_arg (call_stmt
, i
);
5027 if (is_gimple_reg (arg
))
5031 HOST_WIDE_INT bitsize
, bitpos
;
5033 int unsignedp
, volatilep
= 0;
5034 get_inner_reference (arg
, &bitsize
, &bitpos
, &offset
, &mode
,
5035 &unsignedp
, &volatilep
, false);
5036 if (bitpos
% BITS_PER_UNIT
)
5038 iscc
->bad_arg_alignment
= true;
5047 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5048 attributes, return true otherwise. NODE is the cgraph node of the current
5052 ipa_sra_preliminary_function_checks (struct cgraph_node
*node
)
5054 if (!node
->can_be_local_p ())
5057 fprintf (dump_file
, "Function not local to this compilation unit.\n");
5061 if (!node
->local
.can_change_signature
)
5064 fprintf (dump_file
, "Function can not change signature.\n");
5068 if (!tree_versionable_function_p (node
->decl
))
5071 fprintf (dump_file
, "Function is not versionable.\n");
5075 if (!opt_for_fn (node
->decl
, optimize
)
5076 || !opt_for_fn (node
->decl
, flag_ipa_sra
))
5079 fprintf (dump_file
, "Function not optimized.\n");
5083 if (DECL_VIRTUAL_P (current_function_decl
))
5086 fprintf (dump_file
, "Function is a virtual method.\n");
5090 if ((DECL_ONE_ONLY (node
->decl
) || DECL_EXTERNAL (node
->decl
))
5091 && inline_summaries
->get (node
)->size
>= MAX_INLINE_INSNS_AUTO
)
5094 fprintf (dump_file
, "Function too big to be made truly local.\n");
5101 fprintf (dump_file
, "Function uses stdarg. \n");
5105 if (TYPE_ATTRIBUTES (TREE_TYPE (node
->decl
)))
5108 if (DECL_DISREGARD_INLINE_LIMITS (node
->decl
))
5111 fprintf (dump_file
, "Always inline function will be inlined "
5116 struct ipa_sra_check_caller_data iscc
;
5117 memset (&iscc
, 0, sizeof(iscc
));
5118 node
->call_for_symbol_and_aliases (ipa_sra_check_caller
, &iscc
, true);
5119 if (!iscc
.has_callers
)
5123 "Function has no callers in this compilation unit.\n");
5127 if (iscc
.bad_arg_alignment
)
5131 "A function call has an argument with non-unit alignment.\n");
5146 /* Perform early interprocedural SRA. */
5149 ipa_early_sra (void)
5151 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
5152 ipa_parm_adjustment_vec adjustments
;
5155 if (!ipa_sra_preliminary_function_checks (node
))
5159 sra_mode
= SRA_MODE_EARLY_IPA
;
5161 if (!find_param_candidates ())
5164 fprintf (dump_file
, "Function has no IPA-SRA candidates.\n");
5168 if (node
->call_for_symbol_and_aliases
5169 (some_callers_have_mismatched_arguments_p
, NULL
, true))
5172 fprintf (dump_file
, "There are callers with insufficient number of "
5173 "arguments or arguments with type mismatches.\n");
5177 if (node
->call_for_symbol_and_aliases
5178 (some_callers_have_no_vuse_p
, NULL
, true))
5181 fprintf (dump_file
, "There are callers with no VUSE attached "
5182 "to a call stmt.\n");
5186 bb_dereferences
= XCNEWVEC (HOST_WIDE_INT
,
5188 * last_basic_block_for_fn (cfun
));
5189 final_bbs
= BITMAP_ALLOC (NULL
);
5192 if (encountered_apply_args
)
5195 fprintf (dump_file
, "Function calls __builtin_apply_args().\n");
5199 if (encountered_unchangable_recursive_call
)
5202 fprintf (dump_file
, "Function calls itself with insufficient "
5203 "number of arguments.\n");
5207 adjustments
= analyze_all_param_acesses ();
5208 if (!adjustments
.exists ())
5211 ipa_dump_param_adjustments (dump_file
, adjustments
, current_function_decl
);
5213 if (modify_function (node
, adjustments
))
5214 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
5216 ret
= TODO_update_ssa
;
5217 adjustments
.release ();
5219 statistics_counter_event (cfun
, "Unused parameters deleted",
5220 sra_stats
.deleted_unused_parameters
);
5221 statistics_counter_event (cfun
, "Scalar parameters converted to by-value",
5222 sra_stats
.scalar_by_ref_to_by_val
);
5223 statistics_counter_event (cfun
, "Aggregate parameters broken up",
5224 sra_stats
.aggregate_params_reduced
);
5225 statistics_counter_event (cfun
, "Aggregate parameter components created",
5226 sra_stats
.param_reductions_created
);
5229 BITMAP_FREE (final_bbs
);
5230 free (bb_dereferences
);
5232 sra_deinitialize ();
5238 const pass_data pass_data_early_ipa_sra
=
5240 GIMPLE_PASS
, /* type */
5241 "eipa_sra", /* name */
5242 OPTGROUP_NONE
, /* optinfo_flags */
5243 TV_IPA_SRA
, /* tv_id */
5244 0, /* properties_required */
5245 0, /* properties_provided */
5246 0, /* properties_destroyed */
5247 0, /* todo_flags_start */
5248 TODO_dump_symtab
, /* todo_flags_finish */
5251 class pass_early_ipa_sra
: public gimple_opt_pass
5254 pass_early_ipa_sra (gcc::context
*ctxt
)
5255 : gimple_opt_pass (pass_data_early_ipa_sra
, ctxt
)
5258 /* opt_pass methods: */
5259 virtual bool gate (function
*) { return flag_ipa_sra
&& dbg_cnt (eipa_sra
); }
5260 virtual unsigned int execute (function
*) { return ipa_early_sra (); }
5262 }; // class pass_early_ipa_sra
5267 make_pass_early_ipa_sra (gcc::context
*ctxt
)
5269 return new pass_early_ipa_sra (ctxt
);