1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "alloc-pool.h"
82 #include "fold-const.h"
84 #include "hard-reg-set.h"
86 #include "dominance.h"
88 #include "basic-block.h"
89 #include "tree-ssa-alias.h"
90 #include "internal-fn.h"
92 #include "gimple-expr.h"
94 #include "stor-layout.h"
96 #include "gimple-iterator.h"
97 #include "gimplify-me.h"
98 #include "gimple-walk.h"
100 #include "gimple-ssa.h"
101 #include "tree-cfg.h"
102 #include "tree-phinodes.h"
103 #include "ssa-iterators.h"
104 #include "stringpool.h"
105 #include "tree-ssanames.h"
108 #include "insn-config.h"
113 #include "emit-rtl.h"
117 #include "tree-dfa.h"
118 #include "tree-ssa.h"
119 #include "tree-pass.h"
121 #include "symbol-summary.h"
122 #include "ipa-prop.h"
126 #include "tree-inline.h"
127 #include "gimple-pretty-print.h"
128 #include "ipa-inline.h"
129 #include "ipa-utils.h"
130 #include "builtins.h"
132 /* Enumeration of all aggregate reductions we can do. */
133 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
134 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
135 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
137 /* Global variable describing which aggregate reduction we are performing at
139 static enum sra_mode sra_mode
;
143 /* ACCESS represents each access to an aggregate variable (as a whole or a
144 part). It can also represent a group of accesses that refer to exactly the
145 same fragment of an aggregate (i.e. those that have exactly the same offset
146 and size). Such representatives for a single aggregate, once determined,
147 are linked in a linked list and have the group fields set.
149 Moreover, when doing intraprocedural SRA, a tree is built from those
150 representatives (by the means of first_child and next_sibling pointers), in
151 which all items in a subtree are "within" the root, i.e. their offset is
152 greater or equal to offset of the root and offset+size is smaller or equal
153 to offset+size of the root. Children of an access are sorted by offset.
155 Note that accesses to parts of vector and complex number types always
156 represented by an access to the whole complex number or a vector. It is a
157 duty of the modifying functions to replace them appropriately. */
161 /* Values returned by `get_ref_base_and_extent' for each component reference
162 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
163 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
164 HOST_WIDE_INT offset
;
168 /* Expression. It is context dependent so do not use it to create new
169 expressions to access the original aggregate. See PR 42154 for a
175 /* The statement this access belongs to. */
178 /* Next group representative for this aggregate. */
179 struct access
*next_grp
;
181 /* Pointer to the group representative. Pointer to itself if the struct is
182 the representative. */
183 struct access
*group_representative
;
185 /* If this access has any children (in terms of the definition above), this
186 points to the first one. */
187 struct access
*first_child
;
189 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
190 described above. In IPA-SRA this is a pointer to the next access
191 belonging to the same group (having the same representative). */
192 struct access
*next_sibling
;
194 /* Pointers to the first and last element in the linked list of assign
196 struct assign_link
*first_link
, *last_link
;
198 /* Pointer to the next access in the work queue. */
199 struct access
*next_queued
;
201 /* Replacement variable for this access "region." Never to be accessed
202 directly, always only by the means of get_access_replacement() and only
203 when grp_to_be_replaced flag is set. */
204 tree replacement_decl
;
206 /* Is this particular access write access? */
209 /* Is this access an access to a non-addressable field? */
210 unsigned non_addressable
: 1;
212 /* Is this access currently in the work queue? */
213 unsigned grp_queued
: 1;
215 /* Does this group contain a write access? This flag is propagated down the
217 unsigned grp_write
: 1;
219 /* Does this group contain a read access? This flag is propagated down the
221 unsigned grp_read
: 1;
223 /* Does this group contain a read access that comes from an assignment
224 statement? This flag is propagated down the access tree. */
225 unsigned grp_assignment_read
: 1;
227 /* Does this group contain a write access that comes from an assignment
228 statement? This flag is propagated down the access tree. */
229 unsigned grp_assignment_write
: 1;
231 /* Does this group contain a read access through a scalar type? This flag is
232 not propagated in the access tree in any direction. */
233 unsigned grp_scalar_read
: 1;
235 /* Does this group contain a write access through a scalar type? This flag
236 is not propagated in the access tree in any direction. */
237 unsigned grp_scalar_write
: 1;
239 /* Is this access an artificial one created to scalarize some record
241 unsigned grp_total_scalarization
: 1;
243 /* Other passes of the analysis use this bit to make function
244 analyze_access_subtree create scalar replacements for this group if
246 unsigned grp_hint
: 1;
248 /* Is the subtree rooted in this access fully covered by scalar
250 unsigned grp_covered
: 1;
252 /* If set to true, this access and all below it in an access tree must not be
254 unsigned grp_unscalarizable_region
: 1;
256 /* Whether data have been written to parts of the aggregate covered by this
257 access which is not to be scalarized. This flag is propagated up in the
259 unsigned grp_unscalarized_data
: 1;
261 /* Does this access and/or group contain a write access through a
263 unsigned grp_partial_lhs
: 1;
265 /* Set when a scalar replacement should be created for this variable. */
266 unsigned grp_to_be_replaced
: 1;
268 /* Set when we want a replacement for the sole purpose of having it in
269 generated debug statements. */
270 unsigned grp_to_be_debug_replaced
: 1;
272 /* Should TREE_NO_WARNING of a replacement be set? */
273 unsigned grp_no_warning
: 1;
275 /* Is it possible that the group refers to data which might be (directly or
276 otherwise) modified? */
277 unsigned grp_maybe_modified
: 1;
279 /* Set when this is a representative of a pointer to scalar (i.e. by
280 reference) parameter which we consider for turning into a plain scalar
281 (i.e. a by value parameter). */
282 unsigned grp_scalar_ptr
: 1;
284 /* Set when we discover that this pointer is not safe to dereference in the
286 unsigned grp_not_necessarilly_dereferenced
: 1;
288 /* Pool allocation new operator. */
289 inline void *operator new (size_t)
291 return pool
.allocate ();
294 /* Delete operator utilizing pool allocation. */
295 inline void operator delete (void *ptr
)
297 pool
.remove ((access
*) ptr
);
300 /* Memory allocation pool. */
301 static pool_allocator
<access
> pool
;
304 typedef struct access
*access_p
;
307 /* Alloc pool for allocating access structures. */
308 pool_allocator
<struct access
> access::pool ("SRA accesses", 16);
310 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
311 are used to propagate subaccesses from rhs to lhs as long as they don't
312 conflict with what is already there. */
315 struct access
*lacc
, *racc
;
316 struct assign_link
*next
;
318 /* Pool allocation new operator. */
319 inline void *operator new (size_t)
321 return pool
.allocate ();
324 /* Delete operator utilizing pool allocation. */
325 inline void operator delete (void *ptr
)
327 pool
.remove ((assign_link
*) ptr
);
330 /* Memory allocation pool. */
331 static pool_allocator
<assign_link
> pool
;
334 /* Alloc pool for allocating assign link structures. */
335 pool_allocator
<assign_link
> assign_link::pool ("SRA links", 16);
337 /* Base (tree) -> Vector (vec<access_p> *) map. */
338 static hash_map
<tree
, auto_vec
<access_p
> > *base_access_vec
;
340 /* Candidate hash table helpers. */
342 struct uid_decl_hasher
: nofree_ptr_hash
<tree_node
>
344 static inline hashval_t
hash (const tree_node
*);
345 static inline bool equal (const tree_node
*, const tree_node
*);
348 /* Hash a tree in a uid_decl_map. */
351 uid_decl_hasher::hash (const tree_node
*item
)
353 return item
->decl_minimal
.uid
;
356 /* Return true if the DECL_UID in both trees are equal. */
359 uid_decl_hasher::equal (const tree_node
*a
, const tree_node
*b
)
361 return (a
->decl_minimal
.uid
== b
->decl_minimal
.uid
);
364 /* Set of candidates. */
365 static bitmap candidate_bitmap
;
366 static hash_table
<uid_decl_hasher
> *candidates
;
368 /* For a candidate UID return the candidates decl. */
371 candidate (unsigned uid
)
374 t
.decl_minimal
.uid
= uid
;
375 return candidates
->find_with_hash (&t
, static_cast <hashval_t
> (uid
));
378 /* Bitmap of candidates which we should try to entirely scalarize away and
379 those which cannot be (because they are and need be used as a whole). */
380 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
382 /* Obstack for creation of fancy names. */
383 static struct obstack name_obstack
;
385 /* Head of a linked list of accesses that need to have its subaccesses
386 propagated to their assignment counterparts. */
387 static struct access
*work_queue_head
;
389 /* Number of parameters of the analyzed function when doing early ipa SRA. */
390 static int func_param_count
;
392 /* scan_function sets the following to true if it encounters a call to
393 __builtin_apply_args. */
394 static bool encountered_apply_args
;
396 /* Set by scan_function when it finds a recursive call. */
397 static bool encountered_recursive_call
;
399 /* Set by scan_function when it finds a recursive call with less actual
400 arguments than formal parameters.. */
401 static bool encountered_unchangable_recursive_call
;
403 /* This is a table in which for each basic block and parameter there is a
404 distance (offset + size) in that parameter which is dereferenced and
405 accessed in that BB. */
406 static HOST_WIDE_INT
*bb_dereferences
;
407 /* Bitmap of BBs that can cause the function to "stop" progressing by
408 returning, throwing externally, looping infinitely or calling a function
409 which might abort etc.. */
410 static bitmap final_bbs
;
412 /* Representative of no accesses at all. */
413 static struct access no_accesses_representant
;
415 /* Predicate to test the special value. */
418 no_accesses_p (struct access
*access
)
420 return access
== &no_accesses_representant
;
423 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
424 representative fields are dumped, otherwise those which only describe the
425 individual access are. */
429 /* Number of processed aggregates is readily available in
430 analyze_all_variable_accesses and so is not stored here. */
432 /* Number of created scalar replacements. */
435 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
439 /* Number of statements created by generate_subtree_copies. */
442 /* Number of statements created by load_assign_lhs_subreplacements. */
445 /* Number of times sra_modify_assign has deleted a statement. */
448 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
449 RHS reparately due to type conversions or nonexistent matching
451 int separate_lhs_rhs_handling
;
453 /* Number of parameters that were removed because they were unused. */
454 int deleted_unused_parameters
;
456 /* Number of scalars passed as parameters by reference that have been
457 converted to be passed by value. */
458 int scalar_by_ref_to_by_val
;
460 /* Number of aggregate parameters that were replaced by one or more of their
462 int aggregate_params_reduced
;
464 /* Numbber of components created when splitting aggregate parameters. */
465 int param_reductions_created
;
469 dump_access (FILE *f
, struct access
*access
, bool grp
)
471 fprintf (f
, "access { ");
472 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
473 print_generic_expr (f
, access
->base
, 0);
474 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
475 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
476 fprintf (f
, ", expr = ");
477 print_generic_expr (f
, access
->expr
, 0);
478 fprintf (f
, ", type = ");
479 print_generic_expr (f
, access
->type
, 0);
481 fprintf (f
, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
482 "grp_assignment_write = %d, grp_scalar_read = %d, "
483 "grp_scalar_write = %d, grp_total_scalarization = %d, "
484 "grp_hint = %d, grp_covered = %d, "
485 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
486 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
487 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
488 "grp_not_necessarilly_dereferenced = %d\n",
489 access
->grp_read
, access
->grp_write
, access
->grp_assignment_read
,
490 access
->grp_assignment_write
, access
->grp_scalar_read
,
491 access
->grp_scalar_write
, access
->grp_total_scalarization
,
492 access
->grp_hint
, access
->grp_covered
,
493 access
->grp_unscalarizable_region
, access
->grp_unscalarized_data
,
494 access
->grp_partial_lhs
, access
->grp_to_be_replaced
,
495 access
->grp_to_be_debug_replaced
, access
->grp_maybe_modified
,
496 access
->grp_not_necessarilly_dereferenced
);
498 fprintf (f
, ", write = %d, grp_total_scalarization = %d, "
499 "grp_partial_lhs = %d\n",
500 access
->write
, access
->grp_total_scalarization
,
501 access
->grp_partial_lhs
);
504 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
507 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
513 for (i
= 0; i
< level
; i
++)
514 fputs ("* ", dump_file
);
516 dump_access (f
, access
, true);
518 if (access
->first_child
)
519 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
521 access
= access
->next_sibling
;
526 /* Dump all access trees for a variable, given the pointer to the first root in
530 dump_access_tree (FILE *f
, struct access
*access
)
532 for (; access
; access
= access
->next_grp
)
533 dump_access_tree_1 (f
, access
, 0);
536 /* Return true iff ACC is non-NULL and has subaccesses. */
539 access_has_children_p (struct access
*acc
)
541 return acc
&& acc
->first_child
;
544 /* Return true iff ACC is (partly) covered by at least one replacement. */
547 access_has_replacements_p (struct access
*acc
)
549 struct access
*child
;
550 if (acc
->grp_to_be_replaced
)
552 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
553 if (access_has_replacements_p (child
))
558 /* Return a vector of pointers to accesses for the variable given in BASE or
559 NULL if there is none. */
561 static vec
<access_p
> *
562 get_base_access_vector (tree base
)
564 return base_access_vec
->get (base
);
567 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
568 in ACCESS. Return NULL if it cannot be found. */
570 static struct access
*
571 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
574 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
576 struct access
*child
= access
->first_child
;
578 while (child
&& (child
->offset
+ child
->size
<= offset
))
579 child
= child
->next_sibling
;
586 /* Return the first group representative for DECL or NULL if none exists. */
588 static struct access
*
589 get_first_repr_for_decl (tree base
)
591 vec
<access_p
> *access_vec
;
593 access_vec
= get_base_access_vector (base
);
597 return (*access_vec
)[0];
600 /* Find an access representative for the variable BASE and given OFFSET and
601 SIZE. Requires that access trees have already been built. Return NULL if
602 it cannot be found. */
604 static struct access
*
605 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
608 struct access
*access
;
610 access
= get_first_repr_for_decl (base
);
611 while (access
&& (access
->offset
+ access
->size
<= offset
))
612 access
= access
->next_grp
;
616 return find_access_in_subtree (access
, offset
, size
);
619 /* Add LINK to the linked list of assign links of RACC. */
621 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
623 gcc_assert (link
->racc
== racc
);
625 if (!racc
->first_link
)
627 gcc_assert (!racc
->last_link
);
628 racc
->first_link
= link
;
631 racc
->last_link
->next
= link
;
633 racc
->last_link
= link
;
637 /* Move all link structures in their linked list in OLD_RACC to the linked list
640 relink_to_new_repr (struct access
*new_racc
, struct access
*old_racc
)
642 if (!old_racc
->first_link
)
644 gcc_assert (!old_racc
->last_link
);
648 if (new_racc
->first_link
)
650 gcc_assert (!new_racc
->last_link
->next
);
651 gcc_assert (!old_racc
->last_link
|| !old_racc
->last_link
->next
);
653 new_racc
->last_link
->next
= old_racc
->first_link
;
654 new_racc
->last_link
= old_racc
->last_link
;
658 gcc_assert (!new_racc
->last_link
);
660 new_racc
->first_link
= old_racc
->first_link
;
661 new_racc
->last_link
= old_racc
->last_link
;
663 old_racc
->first_link
= old_racc
->last_link
= NULL
;
666 /* Add ACCESS to the work queue (which is actually a stack). */
669 add_access_to_work_queue (struct access
*access
)
671 if (!access
->grp_queued
)
673 gcc_assert (!access
->next_queued
);
674 access
->next_queued
= work_queue_head
;
675 access
->grp_queued
= 1;
676 work_queue_head
= access
;
680 /* Pop an access from the work queue, and return it, assuming there is one. */
682 static struct access
*
683 pop_access_from_work_queue (void)
685 struct access
*access
= work_queue_head
;
687 work_queue_head
= access
->next_queued
;
688 access
->next_queued
= NULL
;
689 access
->grp_queued
= 0;
694 /* Allocate necessary structures. */
697 sra_initialize (void)
699 candidate_bitmap
= BITMAP_ALLOC (NULL
);
700 candidates
= new hash_table
<uid_decl_hasher
>
701 (vec_safe_length (cfun
->local_decls
) / 2);
702 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
703 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
704 gcc_obstack_init (&name_obstack
);
705 base_access_vec
= new hash_map
<tree
, auto_vec
<access_p
> >;
706 memset (&sra_stats
, 0, sizeof (sra_stats
));
707 encountered_apply_args
= false;
708 encountered_recursive_call
= false;
709 encountered_unchangable_recursive_call
= false;
712 /* Deallocate all general structures. */
715 sra_deinitialize (void)
717 BITMAP_FREE (candidate_bitmap
);
720 BITMAP_FREE (should_scalarize_away_bitmap
);
721 BITMAP_FREE (cannot_scalarize_away_bitmap
);
722 access::pool
.release ();
723 assign_link::pool
.release ();
724 obstack_free (&name_obstack
, NULL
);
726 delete base_access_vec
;
729 /* Remove DECL from candidates for SRA and write REASON to the dump file if
732 disqualify_candidate (tree decl
, const char *reason
)
734 if (bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
)))
735 candidates
->remove_elt_with_hash (decl
, DECL_UID (decl
));
737 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
739 fprintf (dump_file
, "! Disqualifying ");
740 print_generic_expr (dump_file
, decl
, 0);
741 fprintf (dump_file
, " - %s\n", reason
);
745 /* Return true iff the type contains a field or an element which does not allow
749 type_internals_preclude_sra_p (tree type
, const char **msg
)
754 switch (TREE_CODE (type
))
758 case QUAL_UNION_TYPE
:
759 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
760 if (TREE_CODE (fld
) == FIELD_DECL
)
762 tree ft
= TREE_TYPE (fld
);
764 if (TREE_THIS_VOLATILE (fld
))
766 *msg
= "volatile structure field";
769 if (!DECL_FIELD_OFFSET (fld
))
771 *msg
= "no structure field offset";
774 if (!DECL_SIZE (fld
))
776 *msg
= "zero structure field size";
779 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
781 *msg
= "structure field offset not fixed";
784 if (!tree_fits_uhwi_p (DECL_SIZE (fld
)))
786 *msg
= "structure field size not fixed";
789 if (!tree_fits_shwi_p (bit_position (fld
)))
791 *msg
= "structure field size too big";
794 if (AGGREGATE_TYPE_P (ft
)
795 && int_bit_position (fld
) % BITS_PER_UNIT
!= 0)
797 *msg
= "structure field is bit field";
801 if (AGGREGATE_TYPE_P (ft
) && type_internals_preclude_sra_p (ft
, msg
))
808 et
= TREE_TYPE (type
);
810 if (TYPE_VOLATILE (et
))
812 *msg
= "element type is volatile";
816 if (AGGREGATE_TYPE_P (et
) && type_internals_preclude_sra_p (et
, msg
))
826 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
827 base variable if it is. Return T if it is not an SSA_NAME. */
830 get_ssa_base_param (tree t
)
832 if (TREE_CODE (t
) == SSA_NAME
)
834 if (SSA_NAME_IS_DEFAULT_DEF (t
))
835 return SSA_NAME_VAR (t
);
842 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
843 belongs to, unless the BB has already been marked as a potentially
847 mark_parm_dereference (tree base
, HOST_WIDE_INT dist
, gimple stmt
)
849 basic_block bb
= gimple_bb (stmt
);
850 int idx
, parm_index
= 0;
853 if (bitmap_bit_p (final_bbs
, bb
->index
))
856 for (parm
= DECL_ARGUMENTS (current_function_decl
);
857 parm
&& parm
!= base
;
858 parm
= DECL_CHAIN (parm
))
861 gcc_assert (parm_index
< func_param_count
);
863 idx
= bb
->index
* func_param_count
+ parm_index
;
864 if (bb_dereferences
[idx
] < dist
)
865 bb_dereferences
[idx
] = dist
;
868 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
869 the three fields. Also add it to the vector of accesses corresponding to
870 the base. Finally, return the new access. */
872 static struct access
*
873 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
875 struct access
*access
= new struct access ();
877 memset (access
, 0, sizeof (struct access
));
879 access
->offset
= offset
;
882 base_access_vec
->get_or_insert (base
).safe_push (access
);
887 /* Create and insert access for EXPR. Return created access, or NULL if it is
890 static struct access
*
891 create_access (tree expr
, gimple stmt
, bool write
)
893 struct access
*access
;
894 HOST_WIDE_INT offset
, size
, max_size
;
896 bool ptr
, unscalarizable_region
= false;
898 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
900 if (sra_mode
== SRA_MODE_EARLY_IPA
901 && TREE_CODE (base
) == MEM_REF
)
903 base
= get_ssa_base_param (TREE_OPERAND (base
, 0));
911 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
914 if (sra_mode
== SRA_MODE_EARLY_IPA
)
916 if (size
< 0 || size
!= max_size
)
918 disqualify_candidate (base
, "Encountered a variable sized access.");
921 if (TREE_CODE (expr
) == COMPONENT_REF
922 && DECL_BIT_FIELD (TREE_OPERAND (expr
, 1)))
924 disqualify_candidate (base
, "Encountered a bit-field access.");
927 gcc_checking_assert ((offset
% BITS_PER_UNIT
) == 0);
930 mark_parm_dereference (base
, offset
+ size
, stmt
);
934 if (size
!= max_size
)
937 unscalarizable_region
= true;
941 disqualify_candidate (base
, "Encountered an unconstrained access.");
946 access
= create_access_1 (base
, offset
, size
);
948 access
->type
= TREE_TYPE (expr
);
949 access
->write
= write
;
950 access
->grp_unscalarizable_region
= unscalarizable_region
;
953 if (TREE_CODE (expr
) == COMPONENT_REF
954 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr
, 1)))
955 access
->non_addressable
= 1;
961 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
962 register types or (recursively) records with only these two kinds of fields.
963 It also returns false if any of these records contains a bit-field. */
966 type_consists_of_records_p (tree type
)
970 if (TREE_CODE (type
) != RECORD_TYPE
)
973 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
974 if (TREE_CODE (fld
) == FIELD_DECL
)
976 tree ft
= TREE_TYPE (fld
);
978 if (DECL_BIT_FIELD (fld
))
981 if (!is_gimple_reg_type (ft
)
982 && !type_consists_of_records_p (ft
))
989 /* Create total_scalarization accesses for all scalar type fields in DECL that
990 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
991 must be the top-most VAR_DECL representing the variable, OFFSET must be the
992 offset of DECL within BASE. REF must be the memory reference expression for
996 completely_scalarize_record (tree base
, tree decl
, HOST_WIDE_INT offset
,
999 tree fld
, decl_type
= TREE_TYPE (decl
);
1001 for (fld
= TYPE_FIELDS (decl_type
); fld
; fld
= DECL_CHAIN (fld
))
1002 if (TREE_CODE (fld
) == FIELD_DECL
)
1004 HOST_WIDE_INT pos
= offset
+ int_bit_position (fld
);
1005 tree ft
= TREE_TYPE (fld
);
1006 tree nref
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), ref
, fld
,
1009 if (is_gimple_reg_type (ft
))
1011 struct access
*access
;
1014 size
= tree_to_uhwi (DECL_SIZE (fld
));
1015 access
= create_access_1 (base
, pos
, size
);
1016 access
->expr
= nref
;
1018 access
->grp_total_scalarization
= 1;
1019 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1022 completely_scalarize_record (base
, fld
, pos
, nref
);
1026 /* Create total_scalarization accesses for all scalar type fields in VAR and
1027 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
1028 type_consists_of_records_p. */
1031 completely_scalarize_var (tree var
)
1033 HOST_WIDE_INT size
= tree_to_uhwi (DECL_SIZE (var
));
1034 struct access
*access
;
1036 access
= create_access_1 (var
, 0, size
);
1038 access
->type
= TREE_TYPE (var
);
1039 access
->grp_total_scalarization
= 1;
1041 completely_scalarize_record (var
, var
, 0, var
);
1044 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1047 contains_view_convert_expr_p (const_tree ref
)
1049 while (handled_component_p (ref
))
1051 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
1053 ref
= TREE_OPERAND (ref
, 0);
1059 /* Search the given tree for a declaration by skipping handled components and
1060 exclude it from the candidates. */
1063 disqualify_base_of_expr (tree t
, const char *reason
)
1065 t
= get_base_address (t
);
1066 if (sra_mode
== SRA_MODE_EARLY_IPA
1067 && TREE_CODE (t
) == MEM_REF
)
1068 t
= get_ssa_base_param (TREE_OPERAND (t
, 0));
1070 if (t
&& DECL_P (t
))
1071 disqualify_candidate (t
, reason
);
1074 /* Scan expression EXPR and create access structures for all accesses to
1075 candidates for scalarization. Return the created access or NULL if none is
1078 static struct access
*
1079 build_access_from_expr_1 (tree expr
, gimple stmt
, bool write
)
1081 struct access
*ret
= NULL
;
1084 if (TREE_CODE (expr
) == BIT_FIELD_REF
1085 || TREE_CODE (expr
) == IMAGPART_EXPR
1086 || TREE_CODE (expr
) == REALPART_EXPR
)
1088 expr
= TREE_OPERAND (expr
, 0);
1092 partial_ref
= false;
1094 /* We need to dive through V_C_Es in order to get the size of its parameter
1095 and not the result type. Ada produces such statements. We are also
1096 capable of handling the topmost V_C_E but not any of those buried in other
1097 handled components. */
1098 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
1099 expr
= TREE_OPERAND (expr
, 0);
1101 if (contains_view_convert_expr_p (expr
))
1103 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
1107 if (TREE_THIS_VOLATILE (expr
))
1109 disqualify_base_of_expr (expr
, "part of a volatile reference.");
1113 switch (TREE_CODE (expr
))
1116 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
1117 && sra_mode
!= SRA_MODE_EARLY_IPA
)
1125 case ARRAY_RANGE_REF
:
1126 ret
= create_access (expr
, stmt
, write
);
1133 if (write
&& partial_ref
&& ret
)
1134 ret
->grp_partial_lhs
= 1;
1139 /* Scan expression EXPR and create access structures for all accesses to
1140 candidates for scalarization. Return true if any access has been inserted.
1141 STMT must be the statement from which the expression is taken, WRITE must be
1142 true if the expression is a store and false otherwise. */
1145 build_access_from_expr (tree expr
, gimple stmt
, bool write
)
1147 struct access
*access
;
1149 access
= build_access_from_expr_1 (expr
, stmt
, write
);
1152 /* This means the aggregate is accesses as a whole in a way other than an
1153 assign statement and thus cannot be removed even if we had a scalar
1154 replacement for everything. */
1155 if (cannot_scalarize_away_bitmap
)
1156 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
1162 /* Return the single non-EH successor edge of BB or NULL if there is none or
1166 single_non_eh_succ (basic_block bb
)
1171 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1172 if (!(e
->flags
& EDGE_EH
))
1182 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1183 there is no alternative spot where to put statements SRA might need to
1184 generate after it. The spot we are looking for is an edge leading to a
1185 single non-EH successor, if it exists and is indeed single. RHS may be
1186 NULL, in that case ignore it. */
1189 disqualify_if_bad_bb_terminating_stmt (gimple stmt
, tree lhs
, tree rhs
)
1191 if ((sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1192 && stmt_ends_bb_p (stmt
))
1194 if (single_non_eh_succ (gimple_bb (stmt
)))
1197 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
1199 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1205 /* Scan expressions occurring in STMT, create access structures for all accesses
1206 to candidates for scalarization and remove those candidates which occur in
1207 statements or expressions that prevent them from being split apart. Return
1208 true if any access has been inserted. */
1211 build_accesses_from_assign (gimple stmt
)
1214 struct access
*lacc
, *racc
;
1216 if (!gimple_assign_single_p (stmt
)
1217 /* Scope clobbers don't influence scalarization. */
1218 || gimple_clobber_p (stmt
))
1221 lhs
= gimple_assign_lhs (stmt
);
1222 rhs
= gimple_assign_rhs1 (stmt
);
1224 if (disqualify_if_bad_bb_terminating_stmt (stmt
, lhs
, rhs
))
1227 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1228 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1231 lacc
->grp_assignment_write
= 1;
1235 racc
->grp_assignment_read
= 1;
1236 if (should_scalarize_away_bitmap
&& !gimple_has_volatile_ops (stmt
)
1237 && !is_gimple_reg_type (racc
->type
))
1238 bitmap_set_bit (should_scalarize_away_bitmap
, DECL_UID (racc
->base
));
1242 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1243 && !lacc
->grp_unscalarizable_region
1244 && !racc
->grp_unscalarizable_region
1245 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1246 && lacc
->size
== racc
->size
1247 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1249 struct assign_link
*link
;
1251 link
= new assign_link
;
1252 memset (link
, 0, sizeof (struct assign_link
));
1257 add_link_to_rhs (racc
, link
);
1260 return lacc
|| racc
;
1263 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1264 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1267 asm_visit_addr (gimple
, tree op
, tree
, void *)
1269 op
= get_base_address (op
);
1272 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1277 /* Return true iff callsite CALL has at least as many actual arguments as there
1278 are formal parameters of the function currently processed by IPA-SRA and
1279 that their types match. */
1282 callsite_arguments_match_p (gimple call
)
1284 if (gimple_call_num_args (call
) < (unsigned) func_param_count
)
1289 for (parm
= DECL_ARGUMENTS (current_function_decl
), i
= 0;
1291 parm
= DECL_CHAIN (parm
), i
++)
1293 tree arg
= gimple_call_arg (call
, i
);
1294 if (!useless_type_conversion_p (TREE_TYPE (parm
), TREE_TYPE (arg
)))
1300 /* Scan function and look for interesting expressions and create access
1301 structures for them. Return true iff any access is created. */
1304 scan_function (void)
1309 FOR_EACH_BB_FN (bb
, cfun
)
1311 gimple_stmt_iterator gsi
;
1312 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1314 gimple stmt
= gsi_stmt (gsi
);
1318 if (final_bbs
&& stmt_can_throw_external (stmt
))
1319 bitmap_set_bit (final_bbs
, bb
->index
);
1320 switch (gimple_code (stmt
))
1323 t
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1325 ret
|= build_access_from_expr (t
, stmt
, false);
1327 bitmap_set_bit (final_bbs
, bb
->index
);
1331 ret
|= build_accesses_from_assign (stmt
);
1335 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1336 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1339 if (sra_mode
== SRA_MODE_EARLY_IPA
)
1341 tree dest
= gimple_call_fndecl (stmt
);
1342 int flags
= gimple_call_flags (stmt
);
1346 if (DECL_BUILT_IN_CLASS (dest
) == BUILT_IN_NORMAL
1347 && DECL_FUNCTION_CODE (dest
) == BUILT_IN_APPLY_ARGS
)
1348 encountered_apply_args
= true;
1349 if (recursive_call_p (current_function_decl
, dest
))
1351 encountered_recursive_call
= true;
1352 if (!callsite_arguments_match_p (stmt
))
1353 encountered_unchangable_recursive_call
= true;
1358 && (flags
& (ECF_CONST
| ECF_PURE
)) == 0)
1359 bitmap_set_bit (final_bbs
, bb
->index
);
1362 t
= gimple_call_lhs (stmt
);
1363 if (t
&& !disqualify_if_bad_bb_terminating_stmt (stmt
, t
, NULL
))
1364 ret
|= build_access_from_expr (t
, stmt
, true);
1369 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1370 walk_stmt_load_store_addr_ops (asm_stmt
, NULL
, NULL
, NULL
,
1373 bitmap_set_bit (final_bbs
, bb
->index
);
1375 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
1377 t
= TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
1378 ret
|= build_access_from_expr (t
, asm_stmt
, false);
1380 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
1382 t
= TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
1383 ret
|= build_access_from_expr (t
, asm_stmt
, true);
1397 /* Helper of QSORT function. There are pointers to accesses in the array. An
1398 access is considered smaller than another if it has smaller offset or if the
1399 offsets are the same but is size is bigger. */
1402 compare_access_positions (const void *a
, const void *b
)
1404 const access_p
*fp1
= (const access_p
*) a
;
1405 const access_p
*fp2
= (const access_p
*) b
;
1406 const access_p f1
= *fp1
;
1407 const access_p f2
= *fp2
;
1409 if (f1
->offset
!= f2
->offset
)
1410 return f1
->offset
< f2
->offset
? -1 : 1;
1412 if (f1
->size
== f2
->size
)
1414 if (f1
->type
== f2
->type
)
1416 /* Put any non-aggregate type before any aggregate type. */
1417 else if (!is_gimple_reg_type (f1
->type
)
1418 && is_gimple_reg_type (f2
->type
))
1420 else if (is_gimple_reg_type (f1
->type
)
1421 && !is_gimple_reg_type (f2
->type
))
1423 /* Put any complex or vector type before any other scalar type. */
1424 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1425 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1426 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1427 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1429 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1430 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1431 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1432 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1434 /* Put the integral type with the bigger precision first. */
1435 else if (INTEGRAL_TYPE_P (f1
->type
)
1436 && INTEGRAL_TYPE_P (f2
->type
))
1437 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1438 /* Put any integral type with non-full precision last. */
1439 else if (INTEGRAL_TYPE_P (f1
->type
)
1440 && (TREE_INT_CST_LOW (TYPE_SIZE (f1
->type
))
1441 != TYPE_PRECISION (f1
->type
)))
1443 else if (INTEGRAL_TYPE_P (f2
->type
)
1444 && (TREE_INT_CST_LOW (TYPE_SIZE (f2
->type
))
1445 != TYPE_PRECISION (f2
->type
)))
1447 /* Stabilize the sort. */
1448 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1451 /* We want the bigger accesses first, thus the opposite operator in the next
1453 return f1
->size
> f2
->size
? -1 : 1;
1457 /* Append a name of the declaration to the name obstack. A helper function for
1461 make_fancy_decl_name (tree decl
)
1465 tree name
= DECL_NAME (decl
);
1467 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1468 IDENTIFIER_LENGTH (name
));
1471 sprintf (buffer
, "D%u", DECL_UID (decl
));
1472 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1476 /* Helper for make_fancy_name. */
1479 make_fancy_name_1 (tree expr
)
1486 make_fancy_decl_name (expr
);
1490 switch (TREE_CODE (expr
))
1493 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1494 obstack_1grow (&name_obstack
, '$');
1495 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1499 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1500 obstack_1grow (&name_obstack
, '$');
1501 /* Arrays with only one element may not have a constant as their
1503 index
= TREE_OPERAND (expr
, 1);
1504 if (TREE_CODE (index
) != INTEGER_CST
)
1506 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1507 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1511 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1515 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1516 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1518 obstack_1grow (&name_obstack
, '$');
1519 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1520 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1521 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1528 gcc_unreachable (); /* we treat these as scalars. */
1535 /* Create a human readable name for replacement variable of ACCESS. */
1538 make_fancy_name (tree expr
)
1540 make_fancy_name_1 (expr
);
1541 obstack_1grow (&name_obstack
, '\0');
1542 return XOBFINISH (&name_obstack
, char *);
1545 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1546 EXP_TYPE at the given OFFSET. If BASE is something for which
1547 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1548 to insert new statements either before or below the current one as specified
1549 by INSERT_AFTER. This function is not capable of handling bitfields.
1551 BASE must be either a declaration or a memory reference that has correct
1552 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1555 build_ref_for_offset (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1556 tree exp_type
, gimple_stmt_iterator
*gsi
,
1559 tree prev_base
= base
;
1562 HOST_WIDE_INT base_offset
;
1563 unsigned HOST_WIDE_INT misalign
;
1566 gcc_checking_assert (offset
% BITS_PER_UNIT
== 0);
1567 get_object_alignment_1 (base
, &align
, &misalign
);
1568 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1570 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1571 offset such as array[var_index]. */
1577 gcc_checking_assert (gsi
);
1578 tmp
= make_ssa_name (build_pointer_type (TREE_TYPE (prev_base
)));
1579 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1580 STRIP_USELESS_TYPE_CONVERSION (addr
);
1581 stmt
= gimple_build_assign (tmp
, addr
);
1582 gimple_set_location (stmt
, loc
);
1584 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1586 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1588 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1589 offset
/ BITS_PER_UNIT
);
1592 else if (TREE_CODE (base
) == MEM_REF
)
1594 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1595 base_offset
+ offset
/ BITS_PER_UNIT
);
1596 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1597 base
= unshare_expr (TREE_OPERAND (base
, 0));
1601 off
= build_int_cst (reference_alias_ptr_type (base
),
1602 base_offset
+ offset
/ BITS_PER_UNIT
);
1603 base
= build_fold_addr_expr (unshare_expr (base
));
1606 misalign
= (misalign
+ offset
) & (align
- 1);
1608 align
= (misalign
& -misalign
);
1609 if (align
!= TYPE_ALIGN (exp_type
))
1610 exp_type
= build_aligned_type (exp_type
, align
);
1612 mem_ref
= fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1613 if (TREE_THIS_VOLATILE (prev_base
))
1614 TREE_THIS_VOLATILE (mem_ref
) = 1;
1615 if (TREE_SIDE_EFFECTS (prev_base
))
1616 TREE_SIDE_EFFECTS (mem_ref
) = 1;
1620 /* Construct a memory reference to a part of an aggregate BASE at the given
1621 OFFSET and of the same type as MODEL. In case this is a reference to a
1622 bit-field, the function will replicate the last component_ref of model's
1623 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1624 build_ref_for_offset. */
1627 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1628 struct access
*model
, gimple_stmt_iterator
*gsi
,
1631 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1632 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1634 /* This access represents a bit-field. */
1635 tree t
, exp_type
, fld
= TREE_OPERAND (model
->expr
, 1);
1637 offset
-= int_bit_position (fld
);
1638 exp_type
= TREE_TYPE (TREE_OPERAND (model
->expr
, 0));
1639 t
= build_ref_for_offset (loc
, base
, offset
, exp_type
, gsi
, insert_after
);
1640 return fold_build3_loc (loc
, COMPONENT_REF
, TREE_TYPE (fld
), t
, fld
,
1644 return build_ref_for_offset (loc
, base
, offset
, model
->type
,
1648 /* Attempt to build a memory reference that we could but into a gimple
1649 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1650 create statements and return s NULL instead. This function also ignores
1651 alignment issues and so its results should never end up in non-debug
1655 build_debug_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1656 struct access
*model
)
1658 HOST_WIDE_INT base_offset
;
1661 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1662 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1665 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1668 if (TREE_CODE (base
) == MEM_REF
)
1670 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1671 base_offset
+ offset
/ BITS_PER_UNIT
);
1672 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1673 base
= unshare_expr (TREE_OPERAND (base
, 0));
1677 off
= build_int_cst (reference_alias_ptr_type (base
),
1678 base_offset
+ offset
/ BITS_PER_UNIT
);
1679 base
= build_fold_addr_expr (unshare_expr (base
));
1682 return fold_build2_loc (loc
, MEM_REF
, model
->type
, base
, off
);
1685 /* Construct a memory reference consisting of component_refs and array_refs to
1686 a part of an aggregate *RES (which is of type TYPE). The requested part
1687 should have type EXP_TYPE at be the given OFFSET. This function might not
1688 succeed, it returns true when it does and only then *RES points to something
1689 meaningful. This function should be used only to build expressions that we
1690 might need to present to user (e.g. in warnings). In all other situations,
1691 build_ref_for_model or build_ref_for_offset should be used instead. */
1694 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1700 tree tr_size
, index
, minidx
;
1701 HOST_WIDE_INT el_size
;
1703 if (offset
== 0 && exp_type
1704 && types_compatible_p (exp_type
, type
))
1707 switch (TREE_CODE (type
))
1710 case QUAL_UNION_TYPE
:
1712 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1714 HOST_WIDE_INT pos
, size
;
1715 tree tr_pos
, expr
, *expr_ptr
;
1717 if (TREE_CODE (fld
) != FIELD_DECL
)
1720 tr_pos
= bit_position (fld
);
1721 if (!tr_pos
|| !tree_fits_uhwi_p (tr_pos
))
1723 pos
= tree_to_uhwi (tr_pos
);
1724 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1725 tr_size
= DECL_SIZE (fld
);
1726 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
1728 size
= tree_to_uhwi (tr_size
);
1734 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1737 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1740 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
1741 offset
- pos
, exp_type
))
1750 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1751 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
1753 el_size
= tree_to_uhwi (tr_size
);
1755 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1756 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1758 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1759 if (!integer_zerop (minidx
))
1760 index
= int_const_binop (PLUS_EXPR
, index
, minidx
);
1761 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1762 NULL_TREE
, NULL_TREE
);
1763 offset
= offset
% el_size
;
1764 type
= TREE_TYPE (type
);
1779 /* Return true iff TYPE is stdarg va_list type. */
1782 is_va_list_type (tree type
)
1784 return TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (va_list_type_node
);
1787 /* Print message to dump file why a variable was rejected. */
1790 reject (tree var
, const char *msg
)
1792 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1794 fprintf (dump_file
, "Rejected (%d): %s: ", DECL_UID (var
), msg
);
1795 print_generic_expr (dump_file
, var
, 0);
1796 fprintf (dump_file
, "\n");
1800 /* Return true if VAR is a candidate for SRA. */
1803 maybe_add_sra_candidate (tree var
)
1805 tree type
= TREE_TYPE (var
);
1809 if (!AGGREGATE_TYPE_P (type
))
1811 reject (var
, "not aggregate");
1814 if (needs_to_live_in_memory (var
))
1816 reject (var
, "needs to live in memory");
1819 if (TREE_THIS_VOLATILE (var
))
1821 reject (var
, "is volatile");
1824 if (!COMPLETE_TYPE_P (type
))
1826 reject (var
, "has incomplete type");
1829 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
1831 reject (var
, "type size not fixed");
1834 if (tree_to_uhwi (TYPE_SIZE (type
)) == 0)
1836 reject (var
, "type size is zero");
1839 if (type_internals_preclude_sra_p (type
, &msg
))
1844 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1845 we also want to schedule it rather late. Thus we ignore it in
1847 (sra_mode
== SRA_MODE_EARLY_INTRA
1848 && is_va_list_type (type
)))
1850 reject (var
, "is va_list");
1854 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1855 slot
= candidates
->find_slot_with_hash (var
, DECL_UID (var
), INSERT
);
1858 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1860 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1861 print_generic_expr (dump_file
, var
, 0);
1862 fprintf (dump_file
, "\n");
1868 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1869 those with type which is suitable for scalarization. */
1872 find_var_candidates (void)
1878 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1880 parm
= DECL_CHAIN (parm
))
1881 ret
|= maybe_add_sra_candidate (parm
);
1883 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1885 if (TREE_CODE (var
) != VAR_DECL
)
1888 ret
|= maybe_add_sra_candidate (var
);
1894 /* Sort all accesses for the given variable, check for partial overlaps and
1895 return NULL if there are any. If there are none, pick a representative for
1896 each combination of offset and size and create a linked list out of them.
1897 Return the pointer to the first representative and make sure it is the first
1898 one in the vector of accesses. */
1900 static struct access
*
1901 sort_and_splice_var_accesses (tree var
)
1903 int i
, j
, access_count
;
1904 struct access
*res
, **prev_acc_ptr
= &res
;
1905 vec
<access_p
> *access_vec
;
1907 HOST_WIDE_INT low
= -1, high
= 0;
1909 access_vec
= get_base_access_vector (var
);
1912 access_count
= access_vec
->length ();
1914 /* Sort by <OFFSET, SIZE>. */
1915 access_vec
->qsort (compare_access_positions
);
1918 while (i
< access_count
)
1920 struct access
*access
= (*access_vec
)[i
];
1921 bool grp_write
= access
->write
;
1922 bool grp_read
= !access
->write
;
1923 bool grp_scalar_write
= access
->write
1924 && is_gimple_reg_type (access
->type
);
1925 bool grp_scalar_read
= !access
->write
1926 && is_gimple_reg_type (access
->type
);
1927 bool grp_assignment_read
= access
->grp_assignment_read
;
1928 bool grp_assignment_write
= access
->grp_assignment_write
;
1929 bool multiple_scalar_reads
= false;
1930 bool total_scalarization
= access
->grp_total_scalarization
;
1931 bool grp_partial_lhs
= access
->grp_partial_lhs
;
1932 bool first_scalar
= is_gimple_reg_type (access
->type
);
1933 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
1935 if (first
|| access
->offset
>= high
)
1938 low
= access
->offset
;
1939 high
= access
->offset
+ access
->size
;
1941 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
1944 gcc_assert (access
->offset
>= low
1945 && access
->offset
+ access
->size
<= high
);
1948 while (j
< access_count
)
1950 struct access
*ac2
= (*access_vec
)[j
];
1951 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
1956 grp_scalar_write
= (grp_scalar_write
1957 || is_gimple_reg_type (ac2
->type
));
1962 if (is_gimple_reg_type (ac2
->type
))
1964 if (grp_scalar_read
)
1965 multiple_scalar_reads
= true;
1967 grp_scalar_read
= true;
1970 grp_assignment_read
|= ac2
->grp_assignment_read
;
1971 grp_assignment_write
|= ac2
->grp_assignment_write
;
1972 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
1973 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
1974 total_scalarization
|= ac2
->grp_total_scalarization
;
1975 relink_to_new_repr (access
, ac2
);
1977 /* If there are both aggregate-type and scalar-type accesses with
1978 this combination of size and offset, the comparison function
1979 should have put the scalars first. */
1980 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
1981 ac2
->group_representative
= access
;
1987 access
->group_representative
= access
;
1988 access
->grp_write
= grp_write
;
1989 access
->grp_read
= grp_read
;
1990 access
->grp_scalar_read
= grp_scalar_read
;
1991 access
->grp_scalar_write
= grp_scalar_write
;
1992 access
->grp_assignment_read
= grp_assignment_read
;
1993 access
->grp_assignment_write
= grp_assignment_write
;
1994 access
->grp_hint
= multiple_scalar_reads
|| total_scalarization
;
1995 access
->grp_total_scalarization
= total_scalarization
;
1996 access
->grp_partial_lhs
= grp_partial_lhs
;
1997 access
->grp_unscalarizable_region
= unscalarizable_region
;
1998 if (access
->first_link
)
1999 add_access_to_work_queue (access
);
2001 *prev_acc_ptr
= access
;
2002 prev_acc_ptr
= &access
->next_grp
;
2005 gcc_assert (res
== (*access_vec
)[0]);
2009 /* Create a variable for the given ACCESS which determines the type, name and a
2010 few other properties. Return the variable declaration and store it also to
2011 ACCESS->replacement. */
2014 create_access_replacement (struct access
*access
)
2018 if (access
->grp_to_be_debug_replaced
)
2020 repl
= create_tmp_var_raw (access
->type
);
2021 DECL_CONTEXT (repl
) = current_function_decl
;
2024 /* Drop any special alignment on the type if it's not on the main
2025 variant. This avoids issues with weirdo ABIs like AAPCS. */
2026 repl
= create_tmp_var (build_qualified_type
2027 (TYPE_MAIN_VARIANT (access
->type
),
2028 TYPE_QUALS (access
->type
)), "SR");
2029 if (TREE_CODE (access
->type
) == COMPLEX_TYPE
2030 || TREE_CODE (access
->type
) == VECTOR_TYPE
)
2032 if (!access
->grp_partial_lhs
)
2033 DECL_GIMPLE_REG_P (repl
) = 1;
2035 else if (access
->grp_partial_lhs
2036 && is_gimple_reg_type (access
->type
))
2037 TREE_ADDRESSABLE (repl
) = 1;
2039 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
2040 DECL_ARTIFICIAL (repl
) = 1;
2041 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
2043 if (DECL_NAME (access
->base
)
2044 && !DECL_IGNORED_P (access
->base
)
2045 && !DECL_ARTIFICIAL (access
->base
))
2047 char *pretty_name
= make_fancy_name (access
->expr
);
2048 tree debug_expr
= unshare_expr_without_location (access
->expr
), d
;
2051 DECL_NAME (repl
) = get_identifier (pretty_name
);
2052 obstack_free (&name_obstack
, pretty_name
);
2054 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2055 as DECL_DEBUG_EXPR isn't considered when looking for still
2056 used SSA_NAMEs and thus they could be freed. All debug info
2057 generation cares is whether something is constant or variable
2058 and that get_ref_base_and_extent works properly on the
2059 expression. It cannot handle accesses at a non-constant offset
2060 though, so just give up in those cases. */
2061 for (d
= debug_expr
;
2062 !fail
&& (handled_component_p (d
) || TREE_CODE (d
) == MEM_REF
);
2063 d
= TREE_OPERAND (d
, 0))
2064 switch (TREE_CODE (d
))
2067 case ARRAY_RANGE_REF
:
2068 if (TREE_OPERAND (d
, 1)
2069 && TREE_CODE (TREE_OPERAND (d
, 1)) != INTEGER_CST
)
2071 if (TREE_OPERAND (d
, 3)
2072 && TREE_CODE (TREE_OPERAND (d
, 3)) != INTEGER_CST
)
2076 if (TREE_OPERAND (d
, 2)
2077 && TREE_CODE (TREE_OPERAND (d
, 2)) != INTEGER_CST
)
2081 if (TREE_CODE (TREE_OPERAND (d
, 0)) != ADDR_EXPR
)
2084 d
= TREE_OPERAND (d
, 0);
2091 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
2092 DECL_HAS_DEBUG_EXPR_P (repl
) = 1;
2094 if (access
->grp_no_warning
)
2095 TREE_NO_WARNING (repl
) = 1;
2097 TREE_NO_WARNING (repl
) = TREE_NO_WARNING (access
->base
);
2100 TREE_NO_WARNING (repl
) = 1;
2104 if (access
->grp_to_be_debug_replaced
)
2106 fprintf (dump_file
, "Created a debug-only replacement for ");
2107 print_generic_expr (dump_file
, access
->base
, 0);
2108 fprintf (dump_file
, " offset: %u, size: %u\n",
2109 (unsigned) access
->offset
, (unsigned) access
->size
);
2113 fprintf (dump_file
, "Created a replacement for ");
2114 print_generic_expr (dump_file
, access
->base
, 0);
2115 fprintf (dump_file
, " offset: %u, size: %u: ",
2116 (unsigned) access
->offset
, (unsigned) access
->size
);
2117 print_generic_expr (dump_file
, repl
, 0);
2118 fprintf (dump_file
, "\n");
2121 sra_stats
.replacements
++;
2126 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2129 get_access_replacement (struct access
*access
)
2131 gcc_checking_assert (access
->replacement_decl
);
2132 return access
->replacement_decl
;
2136 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2137 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2138 to it is not "within" the root. Return false iff some accesses partially
2142 build_access_subtree (struct access
**access
)
2144 struct access
*root
= *access
, *last_child
= NULL
;
2145 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2147 *access
= (*access
)->next_grp
;
2148 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
2151 root
->first_child
= *access
;
2153 last_child
->next_sibling
= *access
;
2154 last_child
= *access
;
2156 if (!build_access_subtree (access
))
2160 if (*access
&& (*access
)->offset
< limit
)
2166 /* Build a tree of access representatives, ACCESS is the pointer to the first
2167 one, others are linked in a list by the next_grp field. Return false iff
2168 some accesses partially overlap. */
2171 build_access_trees (struct access
*access
)
2175 struct access
*root
= access
;
2177 if (!build_access_subtree (&access
))
2179 root
->next_grp
= access
;
2184 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2188 expr_with_var_bounded_array_refs_p (tree expr
)
2190 while (handled_component_p (expr
))
2192 if (TREE_CODE (expr
) == ARRAY_REF
2193 && !tree_fits_shwi_p (array_ref_low_bound (expr
)))
2195 expr
= TREE_OPERAND (expr
, 0);
2200 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2201 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2202 sorts of access flags appropriately along the way, notably always set
2203 grp_read and grp_assign_read according to MARK_READ and grp_write when
2206 Creating a replacement for a scalar access is considered beneficial if its
2207 grp_hint is set (this means we are either attempting total scalarization or
2208 there is more than one direct read access) or according to the following
2211 Access written to through a scalar type (once or more times)
2213 | Written to in an assignment statement
2215 | | Access read as scalar _once_
2217 | | | Read in an assignment statement
2219 | | | | Scalarize Comment
2220 -----------------------------------------------------------------------------
2221 0 0 0 0 No access for the scalar
2222 0 0 0 1 No access for the scalar
2223 0 0 1 0 No Single read - won't help
2224 0 0 1 1 No The same case
2225 0 1 0 0 No access for the scalar
2226 0 1 0 1 No access for the scalar
2227 0 1 1 0 Yes s = *g; return s.i;
2228 0 1 1 1 Yes The same case as above
2229 1 0 0 0 No Won't help
2230 1 0 0 1 Yes s.i = 1; *g = s;
2231 1 0 1 0 Yes s.i = 5; g = s.i;
2232 1 0 1 1 Yes The same case as above
2233 1 1 0 0 No Won't help.
2234 1 1 0 1 Yes s.i = 1; *g = s;
2235 1 1 1 0 Yes s = *g; return s.i;
2236 1 1 1 1 Yes Any of the above yeses */
2239 analyze_access_subtree (struct access
*root
, struct access
*parent
,
2240 bool allow_replacements
)
2242 struct access
*child
;
2243 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2244 HOST_WIDE_INT covered_to
= root
->offset
;
2245 bool scalar
= is_gimple_reg_type (root
->type
);
2246 bool hole
= false, sth_created
= false;
2250 if (parent
->grp_read
)
2252 if (parent
->grp_assignment_read
)
2253 root
->grp_assignment_read
= 1;
2254 if (parent
->grp_write
)
2255 root
->grp_write
= 1;
2256 if (parent
->grp_assignment_write
)
2257 root
->grp_assignment_write
= 1;
2258 if (parent
->grp_total_scalarization
)
2259 root
->grp_total_scalarization
= 1;
2262 if (root
->grp_unscalarizable_region
)
2263 allow_replacements
= false;
2265 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
2266 allow_replacements
= false;
2268 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
2270 hole
|= covered_to
< child
->offset
;
2271 sth_created
|= analyze_access_subtree (child
, root
,
2272 allow_replacements
&& !scalar
);
2274 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
2275 root
->grp_total_scalarization
&= child
->grp_total_scalarization
;
2276 if (child
->grp_covered
)
2277 covered_to
+= child
->size
;
2282 if (allow_replacements
&& scalar
&& !root
->first_child
2284 || ((root
->grp_scalar_read
|| root
->grp_assignment_read
)
2285 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))))
2287 /* Always create access replacements that cover the whole access.
2288 For integral types this means the precision has to match.
2289 Avoid assumptions based on the integral type kind, too. */
2290 if (INTEGRAL_TYPE_P (root
->type
)
2291 && (TREE_CODE (root
->type
) != INTEGER_TYPE
2292 || TYPE_PRECISION (root
->type
) != root
->size
)
2293 /* But leave bitfield accesses alone. */
2294 && (TREE_CODE (root
->expr
) != COMPONENT_REF
2295 || !DECL_BIT_FIELD (TREE_OPERAND (root
->expr
, 1))))
2297 tree rt
= root
->type
;
2298 gcc_assert ((root
->offset
% BITS_PER_UNIT
) == 0
2299 && (root
->size
% BITS_PER_UNIT
) == 0);
2300 root
->type
= build_nonstandard_integer_type (root
->size
,
2301 TYPE_UNSIGNED (rt
));
2302 root
->expr
= build_ref_for_offset (UNKNOWN_LOCATION
,
2303 root
->base
, root
->offset
,
2304 root
->type
, NULL
, false);
2306 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2308 fprintf (dump_file
, "Changing the type of a replacement for ");
2309 print_generic_expr (dump_file
, root
->base
, 0);
2310 fprintf (dump_file
, " offset: %u, size: %u ",
2311 (unsigned) root
->offset
, (unsigned) root
->size
);
2312 fprintf (dump_file
, " to an integer.\n");
2316 root
->grp_to_be_replaced
= 1;
2317 root
->replacement_decl
= create_access_replacement (root
);
2323 if (allow_replacements
2324 && scalar
&& !root
->first_child
2325 && (root
->grp_scalar_write
|| root
->grp_assignment_write
)
2326 && !bitmap_bit_p (cannot_scalarize_away_bitmap
,
2327 DECL_UID (root
->base
)))
2329 gcc_checking_assert (!root
->grp_scalar_read
2330 && !root
->grp_assignment_read
);
2332 if (MAY_HAVE_DEBUG_STMTS
)
2334 root
->grp_to_be_debug_replaced
= 1;
2335 root
->replacement_decl
= create_access_replacement (root
);
2339 if (covered_to
< limit
)
2342 root
->grp_total_scalarization
= 0;
2345 if (!hole
|| root
->grp_total_scalarization
)
2346 root
->grp_covered
= 1;
2347 else if (root
->grp_write
|| TREE_CODE (root
->base
) == PARM_DECL
)
2348 root
->grp_unscalarized_data
= 1; /* not covered and written to */
2352 /* Analyze all access trees linked by next_grp by the means of
2353 analyze_access_subtree. */
2355 analyze_access_trees (struct access
*access
)
2361 if (analyze_access_subtree (access
, NULL
, true))
2363 access
= access
->next_grp
;
2369 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2370 SIZE would conflict with an already existing one. If exactly such a child
2371 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2374 child_would_conflict_in_lacc (struct access
*lacc
, HOST_WIDE_INT norm_offset
,
2375 HOST_WIDE_INT size
, struct access
**exact_match
)
2377 struct access
*child
;
2379 for (child
= lacc
->first_child
; child
; child
= child
->next_sibling
)
2381 if (child
->offset
== norm_offset
&& child
->size
== size
)
2383 *exact_match
= child
;
2387 if (child
->offset
< norm_offset
+ size
2388 && child
->offset
+ child
->size
> norm_offset
)
2395 /* Create a new child access of PARENT, with all properties just like MODEL
2396 except for its offset and with its grp_write false and grp_read true.
2397 Return the new access or NULL if it cannot be created. Note that this access
2398 is created long after all splicing and sorting, it's not located in any
2399 access vector and is automatically a representative of its group. */
2401 static struct access
*
2402 create_artificial_child_access (struct access
*parent
, struct access
*model
,
2403 HOST_WIDE_INT new_offset
)
2405 struct access
**child
;
2406 tree expr
= parent
->base
;
2408 gcc_assert (!model
->grp_unscalarizable_region
);
2410 struct access
*access
= new struct access ();
2411 memset (access
, 0, sizeof (struct access
));
2412 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
2415 access
->grp_no_warning
= true;
2416 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
2417 new_offset
, model
, NULL
, false);
2420 access
->base
= parent
->base
;
2421 access
->expr
= expr
;
2422 access
->offset
= new_offset
;
2423 access
->size
= model
->size
;
2424 access
->type
= model
->type
;
2425 access
->grp_write
= true;
2426 access
->grp_read
= false;
2428 child
= &parent
->first_child
;
2429 while (*child
&& (*child
)->offset
< new_offset
)
2430 child
= &(*child
)->next_sibling
;
2432 access
->next_sibling
= *child
;
2439 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2440 true if any new subaccess was created. Additionally, if RACC is a scalar
2441 access but LACC is not, change the type of the latter, if possible. */
2444 propagate_subaccesses_across_link (struct access
*lacc
, struct access
*racc
)
2446 struct access
*rchild
;
2447 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
2450 if (is_gimple_reg_type (lacc
->type
)
2451 || lacc
->grp_unscalarizable_region
2452 || racc
->grp_unscalarizable_region
)
2455 if (is_gimple_reg_type (racc
->type
))
2457 if (!lacc
->first_child
&& !racc
->first_child
)
2459 tree t
= lacc
->base
;
2461 lacc
->type
= racc
->type
;
2462 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
),
2463 lacc
->offset
, racc
->type
))
2467 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
2468 lacc
->base
, lacc
->offset
,
2470 lacc
->grp_no_warning
= true;
2476 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
2478 struct access
*new_acc
= NULL
;
2479 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
2481 if (rchild
->grp_unscalarizable_region
)
2484 if (child_would_conflict_in_lacc (lacc
, norm_offset
, rchild
->size
,
2489 rchild
->grp_hint
= 1;
2490 new_acc
->grp_hint
|= new_acc
->grp_read
;
2491 if (rchild
->first_child
)
2492 ret
|= propagate_subaccesses_across_link (new_acc
, rchild
);
2497 rchild
->grp_hint
= 1;
2498 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
);
2502 if (racc
->first_child
)
2503 propagate_subaccesses_across_link (new_acc
, rchild
);
2510 /* Propagate all subaccesses across assignment links. */
2513 propagate_all_subaccesses (void)
2515 while (work_queue_head
)
2517 struct access
*racc
= pop_access_from_work_queue ();
2518 struct assign_link
*link
;
2520 gcc_assert (racc
->first_link
);
2522 for (link
= racc
->first_link
; link
; link
= link
->next
)
2524 struct access
*lacc
= link
->lacc
;
2526 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2528 lacc
= lacc
->group_representative
;
2529 if (propagate_subaccesses_across_link (lacc
, racc
)
2530 && lacc
->first_link
)
2531 add_access_to_work_queue (lacc
);
2536 /* Go through all accesses collected throughout the (intraprocedural) analysis
2537 stage, exclude overlapping ones, identify representatives and build trees
2538 out of them, making decisions about scalarization on the way. Return true
2539 iff there are any to-be-scalarized variables after this stage. */
2542 analyze_all_variable_accesses (void)
2545 bitmap tmp
= BITMAP_ALLOC (NULL
);
2548 unsigned max_scalarization_size
2549 = (optimize_function_for_size_p (cfun
)
2550 ? PARAM_VALUE (PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE
)
2551 : PARAM_VALUE (PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
))
2554 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2555 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
2556 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
2558 tree var
= candidate (i
);
2560 if (TREE_CODE (var
) == VAR_DECL
2561 && type_consists_of_records_p (TREE_TYPE (var
)))
2563 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var
)))
2564 <= max_scalarization_size
)
2566 completely_scalarize_var (var
);
2567 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2569 fprintf (dump_file
, "Will attempt to totally scalarize ");
2570 print_generic_expr (dump_file
, var
, 0);
2571 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2574 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2576 fprintf (dump_file
, "Too big to totally scalarize: ");
2577 print_generic_expr (dump_file
, var
, 0);
2578 fprintf (dump_file
, " (UID: %u)\n", DECL_UID (var
));
2583 bitmap_copy (tmp
, candidate_bitmap
);
2584 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2586 tree var
= candidate (i
);
2587 struct access
*access
;
2589 access
= sort_and_splice_var_accesses (var
);
2590 if (!access
|| !build_access_trees (access
))
2591 disqualify_candidate (var
,
2592 "No or inhibitingly overlapping accesses.");
2595 propagate_all_subaccesses ();
2597 bitmap_copy (tmp
, candidate_bitmap
);
2598 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2600 tree var
= candidate (i
);
2601 struct access
*access
= get_first_repr_for_decl (var
);
2603 if (analyze_access_trees (access
))
2606 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2608 fprintf (dump_file
, "\nAccess trees for ");
2609 print_generic_expr (dump_file
, var
, 0);
2610 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2611 dump_access_tree (dump_file
, access
);
2612 fprintf (dump_file
, "\n");
2616 disqualify_candidate (var
, "No scalar replacements to be created.");
2623 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
2630 /* Generate statements copying scalar replacements of accesses within a subtree
2631 into or out of AGG. ACCESS, all its children, siblings and their children
2632 are to be processed. AGG is an aggregate type expression (can be a
2633 declaration but does not have to be, it can for example also be a mem_ref or
2634 a series of handled components). TOP_OFFSET is the offset of the processed
2635 subtree which has to be subtracted from offsets of individual accesses to
2636 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2637 replacements in the interval <start_offset, start_offset + chunk_size>,
2638 otherwise copy all. GSI is a statement iterator used to place the new
2639 statements. WRITE should be true when the statements should write from AGG
2640 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2641 statements will be added after the current statement in GSI, they will be
2642 added before the statement otherwise. */
2645 generate_subtree_copies (struct access
*access
, tree agg
,
2646 HOST_WIDE_INT top_offset
,
2647 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
2648 gimple_stmt_iterator
*gsi
, bool write
,
2649 bool insert_after
, location_t loc
)
2653 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
2656 if (access
->grp_to_be_replaced
2658 || access
->offset
+ access
->size
> start_offset
))
2660 tree expr
, repl
= get_access_replacement (access
);
2663 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
2664 access
, gsi
, insert_after
);
2668 if (access
->grp_partial_lhs
)
2669 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
2671 insert_after
? GSI_NEW_STMT
2673 stmt
= gimple_build_assign (repl
, expr
);
2677 TREE_NO_WARNING (repl
) = 1;
2678 if (access
->grp_partial_lhs
)
2679 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2681 insert_after
? GSI_NEW_STMT
2683 stmt
= gimple_build_assign (expr
, repl
);
2685 gimple_set_location (stmt
, loc
);
2688 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2690 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2692 sra_stats
.subtree_copies
++;
2695 && access
->grp_to_be_debug_replaced
2697 || access
->offset
+ access
->size
> start_offset
))
2700 tree drhs
= build_debug_ref_for_model (loc
, agg
,
2701 access
->offset
- top_offset
,
2703 ds
= gimple_build_debug_bind (get_access_replacement (access
),
2704 drhs
, gsi_stmt (*gsi
));
2706 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2708 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2711 if (access
->first_child
)
2712 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
2713 start_offset
, chunk_size
, gsi
,
2714 write
, insert_after
, loc
);
2716 access
= access
->next_sibling
;
2721 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2722 the root of the subtree to be processed. GSI is the statement iterator used
2723 for inserting statements which are added after the current statement if
2724 INSERT_AFTER is true or before it otherwise. */
2727 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
2728 bool insert_after
, location_t loc
)
2731 struct access
*child
;
2733 if (access
->grp_to_be_replaced
)
2737 stmt
= gimple_build_assign (get_access_replacement (access
),
2738 build_zero_cst (access
->type
));
2740 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2742 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2744 gimple_set_location (stmt
, loc
);
2746 else if (access
->grp_to_be_debug_replaced
)
2749 = gimple_build_debug_bind (get_access_replacement (access
),
2750 build_zero_cst (access
->type
),
2753 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2755 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2758 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2759 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
2762 /* Clobber all scalar replacements in an access subtree. ACCESS is the the
2763 root of the subtree to be processed. GSI is the statement iterator used
2764 for inserting statements which are added after the current statement if
2765 INSERT_AFTER is true or before it otherwise. */
2768 clobber_subtree (struct access
*access
, gimple_stmt_iterator
*gsi
,
2769 bool insert_after
, location_t loc
)
2772 struct access
*child
;
2774 if (access
->grp_to_be_replaced
)
2776 tree rep
= get_access_replacement (access
);
2777 tree clobber
= build_constructor (access
->type
, NULL
);
2778 TREE_THIS_VOLATILE (clobber
) = 1;
2779 gimple stmt
= gimple_build_assign (rep
, clobber
);
2782 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2784 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2786 gimple_set_location (stmt
, loc
);
2789 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2790 clobber_subtree (child
, gsi
, insert_after
, loc
);
2793 /* Search for an access representative for the given expression EXPR and
2794 return it or NULL if it cannot be found. */
2796 static struct access
*
2797 get_access_for_expr (tree expr
)
2799 HOST_WIDE_INT offset
, size
, max_size
;
2802 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2803 a different size than the size of its argument and we need the latter
2805 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2806 expr
= TREE_OPERAND (expr
, 0);
2808 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
2809 if (max_size
== -1 || !DECL_P (base
))
2812 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
2815 return get_var_base_offset_size_access (base
, offset
, max_size
);
2818 /* Replace the expression EXPR with a scalar replacement if there is one and
2819 generate other statements to do type conversion or subtree copying if
2820 necessary. GSI is used to place newly created statements, WRITE is true if
2821 the expression is being written to (it is on a LHS of a statement or output
2822 in an assembly statement). */
2825 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
2828 struct access
*access
;
2829 tree type
, bfr
, orig_expr
;
2831 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
2834 expr
= &TREE_OPERAND (*expr
, 0);
2839 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
2840 expr
= &TREE_OPERAND (*expr
, 0);
2841 access
= get_access_for_expr (*expr
);
2844 type
= TREE_TYPE (*expr
);
2847 loc
= gimple_location (gsi_stmt (*gsi
));
2848 gimple_stmt_iterator alt_gsi
= gsi_none ();
2849 if (write
&& stmt_ends_bb_p (gsi_stmt (*gsi
)))
2851 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi
)));
2855 if (access
->grp_to_be_replaced
)
2857 tree repl
= get_access_replacement (access
);
2858 /* If we replace a non-register typed access simply use the original
2859 access expression to extract the scalar component afterwards.
2860 This happens if scalarizing a function return value or parameter
2861 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2862 gcc.c-torture/compile/20011217-1.c.
2864 We also want to use this when accessing a complex or vector which can
2865 be accessed as a different type too, potentially creating a need for
2866 type conversion (see PR42196) and when scalarized unions are involved
2867 in assembler statements (see PR42398). */
2868 if (!useless_type_conversion_p (type
, access
->type
))
2872 ref
= build_ref_for_model (loc
, orig_expr
, 0, access
, gsi
, false);
2878 if (access
->grp_partial_lhs
)
2879 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
2880 false, GSI_NEW_STMT
);
2881 stmt
= gimple_build_assign (repl
, ref
);
2882 gimple_set_location (stmt
, loc
);
2883 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2889 if (access
->grp_partial_lhs
)
2890 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2891 true, GSI_SAME_STMT
);
2892 stmt
= gimple_build_assign (ref
, repl
);
2893 gimple_set_location (stmt
, loc
);
2894 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2901 else if (write
&& access
->grp_to_be_debug_replaced
)
2903 gdebug
*ds
= gimple_build_debug_bind (get_access_replacement (access
),
2906 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2909 if (access
->first_child
)
2911 HOST_WIDE_INT start_offset
, chunk_size
;
2913 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 1))
2914 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 2)))
2916 chunk_size
= tree_to_uhwi (TREE_OPERAND (bfr
, 1));
2917 start_offset
= access
->offset
2918 + tree_to_uhwi (TREE_OPERAND (bfr
, 2));
2921 start_offset
= chunk_size
= 0;
2923 generate_subtree_copies (access
->first_child
, orig_expr
, access
->offset
,
2924 start_offset
, chunk_size
, gsi
, write
, write
,
2930 /* Where scalar replacements of the RHS have been written to when a replacement
2931 of a LHS of an assigments cannot be direclty loaded from a replacement of
2933 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
2934 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
2935 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
2937 struct subreplacement_assignment_data
2939 /* Offset of the access representing the lhs of the assignment. */
2940 HOST_WIDE_INT left_offset
;
2942 /* LHS and RHS of the original assignment. */
2943 tree assignment_lhs
, assignment_rhs
;
2945 /* Access representing the rhs of the whole assignment. */
2946 struct access
*top_racc
;
2948 /* Stmt iterator used for statement insertions after the original assignment.
2949 It points to the main GSI used to traverse a BB during function body
2951 gimple_stmt_iterator
*new_gsi
;
2953 /* Stmt iterator used for statement insertions before the original
2954 assignment. Keeps on pointing to the original statement. */
2955 gimple_stmt_iterator old_gsi
;
2957 /* Location of the assignment. */
2960 /* Keeps the information whether we have needed to refresh replacements of
2961 the LHS and from which side of the assignments this takes place. */
2962 enum unscalarized_data_handling refreshed
;
2965 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2966 base aggregate if there are unscalarized data or directly to LHS of the
2967 statement that is pointed to by GSI otherwise. */
2970 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data
*sad
)
2973 if (sad
->top_racc
->grp_unscalarized_data
)
2975 src
= sad
->assignment_rhs
;
2976 sad
->refreshed
= SRA_UDH_RIGHT
;
2980 src
= sad
->assignment_lhs
;
2981 sad
->refreshed
= SRA_UDH_LEFT
;
2983 generate_subtree_copies (sad
->top_racc
->first_child
, src
,
2984 sad
->top_racc
->offset
, 0, 0,
2985 &sad
->old_gsi
, false, false, sad
->loc
);
2988 /* Try to generate statements to load all sub-replacements in an access subtree
2989 formed by children of LACC from scalar replacements in the SAD->top_racc
2990 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
2991 and load the accesses from it. */
2994 load_assign_lhs_subreplacements (struct access
*lacc
,
2995 struct subreplacement_assignment_data
*sad
)
2997 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
2999 HOST_WIDE_INT offset
;
3000 offset
= lacc
->offset
- sad
->left_offset
+ sad
->top_racc
->offset
;
3002 if (lacc
->grp_to_be_replaced
)
3004 struct access
*racc
;
3008 racc
= find_access_in_subtree (sad
->top_racc
, offset
, lacc
->size
);
3009 if (racc
&& racc
->grp_to_be_replaced
)
3011 rhs
= get_access_replacement (racc
);
3012 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
3013 rhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
3016 if (racc
->grp_partial_lhs
&& lacc
->grp_partial_lhs
)
3017 rhs
= force_gimple_operand_gsi (&sad
->old_gsi
, rhs
, true,
3018 NULL_TREE
, true, GSI_SAME_STMT
);
3022 /* No suitable access on the right hand side, need to load from
3023 the aggregate. See if we have to update it first... */
3024 if (sad
->refreshed
== SRA_UDH_NONE
)
3025 handle_unscalarized_data_in_subtree (sad
);
3027 if (sad
->refreshed
== SRA_UDH_LEFT
)
3028 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_lhs
,
3029 lacc
->offset
- sad
->left_offset
,
3030 lacc
, sad
->new_gsi
, true);
3032 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_rhs
,
3033 lacc
->offset
- sad
->left_offset
,
3034 lacc
, sad
->new_gsi
, true);
3035 if (lacc
->grp_partial_lhs
)
3036 rhs
= force_gimple_operand_gsi (sad
->new_gsi
,
3037 rhs
, true, NULL_TREE
,
3038 false, GSI_NEW_STMT
);
3041 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
3042 gsi_insert_after (sad
->new_gsi
, stmt
, GSI_NEW_STMT
);
3043 gimple_set_location (stmt
, sad
->loc
);
3045 sra_stats
.subreplacements
++;
3049 if (sad
->refreshed
== SRA_UDH_NONE
3050 && lacc
->grp_read
&& !lacc
->grp_covered
)
3051 handle_unscalarized_data_in_subtree (sad
);
3053 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
3057 struct access
*racc
= find_access_in_subtree (sad
->top_racc
,
3061 if (racc
&& racc
->grp_to_be_replaced
)
3063 if (racc
->grp_write
)
3064 drhs
= get_access_replacement (racc
);
3068 else if (sad
->refreshed
== SRA_UDH_LEFT
)
3069 drhs
= build_debug_ref_for_model (sad
->loc
, lacc
->base
,
3070 lacc
->offset
, lacc
);
3071 else if (sad
->refreshed
== SRA_UDH_RIGHT
)
3072 drhs
= build_debug_ref_for_model (sad
->loc
, sad
->top_racc
->base
,
3077 && !useless_type_conversion_p (lacc
->type
, TREE_TYPE (drhs
)))
3078 drhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
3080 ds
= gimple_build_debug_bind (get_access_replacement (lacc
),
3081 drhs
, gsi_stmt (sad
->old_gsi
));
3082 gsi_insert_after (sad
->new_gsi
, ds
, GSI_NEW_STMT
);
3086 if (lacc
->first_child
)
3087 load_assign_lhs_subreplacements (lacc
, sad
);
3091 /* Result code for SRA assignment modification. */
3092 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
3093 SRA_AM_MODIFIED
, /* stmt changed but not
3095 SRA_AM_REMOVED
}; /* stmt eliminated */
3097 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3098 to the assignment and GSI is the statement iterator pointing at it. Returns
3099 the same values as sra_modify_assign. */
3101 static enum assignment_mod_result
3102 sra_modify_constructor_assign (gimple stmt
, gimple_stmt_iterator
*gsi
)
3104 tree lhs
= gimple_assign_lhs (stmt
);
3105 struct access
*acc
= get_access_for_expr (lhs
);
3108 location_t loc
= gimple_location (stmt
);
3110 if (gimple_clobber_p (stmt
))
3112 /* Clobber the replacement variable. */
3113 clobber_subtree (acc
, gsi
, !acc
->grp_covered
, loc
);
3114 /* Remove clobbers of fully scalarized variables, they are dead. */
3115 if (acc
->grp_covered
)
3117 unlink_stmt_vdef (stmt
);
3118 gsi_remove (gsi
, true);
3119 release_defs (stmt
);
3120 return SRA_AM_REMOVED
;
3123 return SRA_AM_MODIFIED
;
3126 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt
))) > 0)
3128 /* I have never seen this code path trigger but if it can happen the
3129 following should handle it gracefully. */
3130 if (access_has_children_p (acc
))
3131 generate_subtree_copies (acc
->first_child
, lhs
, acc
->offset
, 0, 0, gsi
,
3133 return SRA_AM_MODIFIED
;
3136 if (acc
->grp_covered
)
3138 init_subtree_with_zero (acc
, gsi
, false, loc
);
3139 unlink_stmt_vdef (stmt
);
3140 gsi_remove (gsi
, true);
3141 release_defs (stmt
);
3142 return SRA_AM_REMOVED
;
3146 init_subtree_with_zero (acc
, gsi
, true, loc
);
3147 return SRA_AM_MODIFIED
;
3151 /* Create and return a new suitable default definition SSA_NAME for RACC which
3152 is an access describing an uninitialized part of an aggregate that is being
3156 get_repl_default_def_ssa_name (struct access
*racc
)
3158 gcc_checking_assert (!racc
->grp_to_be_replaced
3159 && !racc
->grp_to_be_debug_replaced
);
3160 if (!racc
->replacement_decl
)
3161 racc
->replacement_decl
= create_access_replacement (racc
);
3162 return get_or_create_ssa_default_def (cfun
, racc
->replacement_decl
);
3165 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3166 bit-field field declaration somewhere in it. */
3169 contains_vce_or_bfcref_p (const_tree ref
)
3171 while (handled_component_p (ref
))
3173 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
3174 || (TREE_CODE (ref
) == COMPONENT_REF
3175 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1))))
3177 ref
= TREE_OPERAND (ref
, 0);
3183 /* Examine both sides of the assignment statement pointed to by STMT, replace
3184 them with a scalare replacement if there is one and generate copying of
3185 replacements if scalarized aggregates have been used in the assignment. GSI
3186 is used to hold generated statements for type conversions and subtree
3189 static enum assignment_mod_result
3190 sra_modify_assign (gimple stmt
, gimple_stmt_iterator
*gsi
)
3192 struct access
*lacc
, *racc
;
3194 bool modify_this_stmt
= false;
3195 bool force_gimple_rhs
= false;
3197 gimple_stmt_iterator orig_gsi
= *gsi
;
3199 if (!gimple_assign_single_p (stmt
))
3201 lhs
= gimple_assign_lhs (stmt
);
3202 rhs
= gimple_assign_rhs1 (stmt
);
3204 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3205 return sra_modify_constructor_assign (stmt
, gsi
);
3207 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
3208 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
3209 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
3211 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (stmt
),
3213 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (stmt
),
3215 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3218 lacc
= get_access_for_expr (lhs
);
3219 racc
= get_access_for_expr (rhs
);
3223 loc
= gimple_location (stmt
);
3224 if (lacc
&& lacc
->grp_to_be_replaced
)
3226 lhs
= get_access_replacement (lacc
);
3227 gimple_assign_set_lhs (stmt
, lhs
);
3228 modify_this_stmt
= true;
3229 if (lacc
->grp_partial_lhs
)
3230 force_gimple_rhs
= true;
3234 if (racc
&& racc
->grp_to_be_replaced
)
3236 rhs
= get_access_replacement (racc
);
3237 modify_this_stmt
= true;
3238 if (racc
->grp_partial_lhs
)
3239 force_gimple_rhs
= true;
3243 && !racc
->grp_unscalarized_data
3244 && TREE_CODE (lhs
) == SSA_NAME
3245 && !access_has_replacements_p (racc
))
3247 rhs
= get_repl_default_def_ssa_name (racc
);
3248 modify_this_stmt
= true;
3252 if (modify_this_stmt
)
3254 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3256 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3257 ??? This should move to fold_stmt which we simply should
3258 call after building a VIEW_CONVERT_EXPR here. */
3259 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
3260 && !contains_bitfld_component_ref_p (lhs
))
3262 lhs
= build_ref_for_model (loc
, lhs
, 0, racc
, gsi
, false);
3263 gimple_assign_set_lhs (stmt
, lhs
);
3265 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
3266 && !contains_vce_or_bfcref_p (rhs
))
3267 rhs
= build_ref_for_model (loc
, rhs
, 0, lacc
, gsi
, false);
3269 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3271 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
3273 if (is_gimple_reg_type (TREE_TYPE (lhs
))
3274 && TREE_CODE (lhs
) != SSA_NAME
)
3275 force_gimple_rhs
= true;
3280 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
3282 tree dlhs
= get_access_replacement (lacc
);
3283 tree drhs
= unshare_expr (rhs
);
3284 if (!useless_type_conversion_p (TREE_TYPE (dlhs
), TREE_TYPE (drhs
)))
3286 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs
))
3287 && !contains_vce_or_bfcref_p (drhs
))
3288 drhs
= build_debug_ref_for_model (loc
, drhs
, 0, lacc
);
3290 && !useless_type_conversion_p (TREE_TYPE (dlhs
),
3292 drhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
3293 TREE_TYPE (dlhs
), drhs
);
3295 gdebug
*ds
= gimple_build_debug_bind (dlhs
, drhs
, stmt
);
3296 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
3299 /* From this point on, the function deals with assignments in between
3300 aggregates when at least one has scalar reductions of some of its
3301 components. There are three possible scenarios: Both the LHS and RHS have
3302 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3304 In the first case, we would like to load the LHS components from RHS
3305 components whenever possible. If that is not possible, we would like to
3306 read it directly from the RHS (after updating it by storing in it its own
3307 components). If there are some necessary unscalarized data in the LHS,
3308 those will be loaded by the original assignment too. If neither of these
3309 cases happen, the original statement can be removed. Most of this is done
3310 by load_assign_lhs_subreplacements.
3312 In the second case, we would like to store all RHS scalarized components
3313 directly into LHS and if they cover the aggregate completely, remove the
3314 statement too. In the third case, we want the LHS components to be loaded
3315 directly from the RHS (DSE will remove the original statement if it
3318 This is a bit complex but manageable when types match and when unions do
3319 not cause confusion in a way that we cannot really load a component of LHS
3320 from the RHS or vice versa (the access representing this level can have
3321 subaccesses that are accessible only through a different union field at a
3322 higher level - different from the one used in the examined expression).
3325 Therefore, I specially handle a fourth case, happening when there is a
3326 specific type cast or it is impossible to locate a scalarized subaccess on
3327 the other side of the expression. If that happens, I simply "refresh" the
3328 RHS by storing in it is scalarized components leave the original statement
3329 there to do the copying and then load the scalar replacements of the LHS.
3330 This is what the first branch does. */
3332 if (modify_this_stmt
3333 || gimple_has_volatile_ops (stmt
)
3334 || contains_vce_or_bfcref_p (rhs
)
3335 || contains_vce_or_bfcref_p (lhs
)
3336 || stmt_ends_bb_p (stmt
))
3338 if (access_has_children_p (racc
))
3339 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
3340 gsi
, false, false, loc
);
3341 if (access_has_children_p (lacc
))
3343 gimple_stmt_iterator alt_gsi
= gsi_none ();
3344 if (stmt_ends_bb_p (stmt
))
3346 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi
)));
3349 generate_subtree_copies (lacc
->first_child
, lhs
, lacc
->offset
, 0, 0,
3350 gsi
, true, true, loc
);
3352 sra_stats
.separate_lhs_rhs_handling
++;
3354 /* This gimplification must be done after generate_subtree_copies,
3355 lest we insert the subtree copies in the middle of the gimplified
3357 if (force_gimple_rhs
)
3358 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
3359 true, GSI_SAME_STMT
);
3360 if (gimple_assign_rhs1 (stmt
) != rhs
)
3362 modify_this_stmt
= true;
3363 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
3364 gcc_assert (stmt
== gsi_stmt (orig_gsi
));
3367 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3371 if (access_has_children_p (lacc
)
3372 && access_has_children_p (racc
)
3373 /* When an access represents an unscalarizable region, it usually
3374 represents accesses with variable offset and thus must not be used
3375 to generate new memory accesses. */
3376 && !lacc
->grp_unscalarizable_region
3377 && !racc
->grp_unscalarizable_region
)
3379 struct subreplacement_assignment_data sad
;
3381 sad
.left_offset
= lacc
->offset
;
3382 sad
.assignment_lhs
= lhs
;
3383 sad
.assignment_rhs
= rhs
;
3384 sad
.top_racc
= racc
;
3387 sad
.loc
= gimple_location (stmt
);
3388 sad
.refreshed
= SRA_UDH_NONE
;
3390 if (lacc
->grp_read
&& !lacc
->grp_covered
)
3391 handle_unscalarized_data_in_subtree (&sad
);
3393 load_assign_lhs_subreplacements (lacc
, &sad
);
3394 if (sad
.refreshed
!= SRA_UDH_RIGHT
)
3397 unlink_stmt_vdef (stmt
);
3398 gsi_remove (&sad
.old_gsi
, true);
3399 release_defs (stmt
);
3400 sra_stats
.deleted
++;
3401 return SRA_AM_REMOVED
;
3406 if (access_has_children_p (racc
)
3407 && !racc
->grp_unscalarized_data
)
3411 fprintf (dump_file
, "Removing load: ");
3412 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3414 generate_subtree_copies (racc
->first_child
, lhs
,
3415 racc
->offset
, 0, 0, gsi
,
3417 gcc_assert (stmt
== gsi_stmt (*gsi
));
3418 unlink_stmt_vdef (stmt
);
3419 gsi_remove (gsi
, true);
3420 release_defs (stmt
);
3421 sra_stats
.deleted
++;
3422 return SRA_AM_REMOVED
;
3424 /* Restore the aggregate RHS from its components so the
3425 prevailing aggregate copy does the right thing. */
3426 if (access_has_children_p (racc
))
3427 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
3428 gsi
, false, false, loc
);
3429 /* Re-load the components of the aggregate copy destination.
3430 But use the RHS aggregate to load from to expose more
3431 optimization opportunities. */
3432 if (access_has_children_p (lacc
))
3433 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
3434 0, 0, gsi
, true, true, loc
);
3441 /* Traverse the function body and all modifications as decided in
3442 analyze_all_variable_accesses. Return true iff the CFG has been
3446 sra_modify_function_body (void)
3448 bool cfg_changed
= false;
3451 FOR_EACH_BB_FN (bb
, cfun
)
3453 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
3454 while (!gsi_end_p (gsi
))
3456 gimple stmt
= gsi_stmt (gsi
);
3457 enum assignment_mod_result assign_result
;
3458 bool modified
= false, deleted
= false;
3462 switch (gimple_code (stmt
))
3465 t
= gimple_return_retval_ptr (as_a
<greturn
*> (stmt
));
3466 if (*t
!= NULL_TREE
)
3467 modified
|= sra_modify_expr (t
, &gsi
, false);
3471 assign_result
= sra_modify_assign (stmt
, &gsi
);
3472 modified
|= assign_result
== SRA_AM_MODIFIED
;
3473 deleted
= assign_result
== SRA_AM_REMOVED
;
3477 /* Operands must be processed before the lhs. */
3478 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3480 t
= gimple_call_arg_ptr (stmt
, i
);
3481 modified
|= sra_modify_expr (t
, &gsi
, false);
3484 if (gimple_call_lhs (stmt
))
3486 t
= gimple_call_lhs_ptr (stmt
);
3487 modified
|= sra_modify_expr (t
, &gsi
, true);
3493 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3494 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
3496 t
= &TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
3497 modified
|= sra_modify_expr (t
, &gsi
, false);
3499 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
3501 t
= &TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
3502 modified
|= sra_modify_expr (t
, &gsi
, true);
3514 if (maybe_clean_eh_stmt (stmt
)
3515 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
3523 gsi_commit_edge_inserts ();
3527 /* Generate statements initializing scalar replacements of parts of function
3531 initialize_parameter_reductions (void)
3533 gimple_stmt_iterator gsi
;
3534 gimple_seq seq
= NULL
;
3537 gsi
= gsi_start (seq
);
3538 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3540 parm
= DECL_CHAIN (parm
))
3542 vec
<access_p
> *access_vec
;
3543 struct access
*access
;
3545 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3547 access_vec
= get_base_access_vector (parm
);
3551 for (access
= (*access_vec
)[0];
3553 access
= access
->next_grp
)
3554 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
3555 EXPR_LOCATION (parm
));
3558 seq
= gsi_seq (gsi
);
3560 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), seq
);
3563 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3564 it reveals there are components of some aggregates to be scalarized, it runs
3565 the required transformations. */
3567 perform_intra_sra (void)
3572 if (!find_var_candidates ())
3575 if (!scan_function ())
3578 if (!analyze_all_variable_accesses ())
3581 if (sra_modify_function_body ())
3582 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
3584 ret
= TODO_update_ssa
;
3585 initialize_parameter_reductions ();
3587 statistics_counter_event (cfun
, "Scalar replacements created",
3588 sra_stats
.replacements
);
3589 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
3590 statistics_counter_event (cfun
, "Subtree copy stmts",
3591 sra_stats
.subtree_copies
);
3592 statistics_counter_event (cfun
, "Subreplacement stmts",
3593 sra_stats
.subreplacements
);
3594 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
3595 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
3596 sra_stats
.separate_lhs_rhs_handling
);
3599 sra_deinitialize ();
3603 /* Perform early intraprocedural SRA. */
3605 early_intra_sra (void)
3607 sra_mode
= SRA_MODE_EARLY_INTRA
;
3608 return perform_intra_sra ();
3611 /* Perform "late" intraprocedural SRA. */
3613 late_intra_sra (void)
3615 sra_mode
= SRA_MODE_INTRA
;
3616 return perform_intra_sra ();
3621 gate_intra_sra (void)
3623 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
3629 const pass_data pass_data_sra_early
=
3631 GIMPLE_PASS
, /* type */
3633 OPTGROUP_NONE
, /* optinfo_flags */
3634 TV_TREE_SRA
, /* tv_id */
3635 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3636 0, /* properties_provided */
3637 0, /* properties_destroyed */
3638 0, /* todo_flags_start */
3639 TODO_update_ssa
, /* todo_flags_finish */
3642 class pass_sra_early
: public gimple_opt_pass
3645 pass_sra_early (gcc::context
*ctxt
)
3646 : gimple_opt_pass (pass_data_sra_early
, ctxt
)
3649 /* opt_pass methods: */
3650 virtual bool gate (function
*) { return gate_intra_sra (); }
3651 virtual unsigned int execute (function
*) { return early_intra_sra (); }
3653 }; // class pass_sra_early
3658 make_pass_sra_early (gcc::context
*ctxt
)
3660 return new pass_sra_early (ctxt
);
3665 const pass_data pass_data_sra
=
3667 GIMPLE_PASS
, /* type */
3669 OPTGROUP_NONE
, /* optinfo_flags */
3670 TV_TREE_SRA
, /* tv_id */
3671 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3672 0, /* properties_provided */
3673 0, /* properties_destroyed */
3674 TODO_update_address_taken
, /* todo_flags_start */
3675 TODO_update_ssa
, /* todo_flags_finish */
3678 class pass_sra
: public gimple_opt_pass
3681 pass_sra (gcc::context
*ctxt
)
3682 : gimple_opt_pass (pass_data_sra
, ctxt
)
3685 /* opt_pass methods: */
3686 virtual bool gate (function
*) { return gate_intra_sra (); }
3687 virtual unsigned int execute (function
*) { return late_intra_sra (); }
3689 }; // class pass_sra
3694 make_pass_sra (gcc::context
*ctxt
)
3696 return new pass_sra (ctxt
);
3700 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3704 is_unused_scalar_param (tree parm
)
3707 return (is_gimple_reg (parm
)
3708 && (!(name
= ssa_default_def (cfun
, parm
))
3709 || has_zero_uses (name
)));
3712 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3713 examine whether there are any direct or otherwise infeasible ones. If so,
3714 return true, otherwise return false. PARM must be a gimple register with a
3715 non-NULL default definition. */
3718 ptr_parm_has_direct_uses (tree parm
)
3720 imm_use_iterator ui
;
3722 tree name
= ssa_default_def (cfun
, parm
);
3725 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
3728 use_operand_p use_p
;
3730 if (is_gimple_debug (stmt
))
3733 /* Valid uses include dereferences on the lhs and the rhs. */
3734 if (gimple_has_lhs (stmt
))
3736 tree lhs
= gimple_get_lhs (stmt
);
3737 while (handled_component_p (lhs
))
3738 lhs
= TREE_OPERAND (lhs
, 0);
3739 if (TREE_CODE (lhs
) == MEM_REF
3740 && TREE_OPERAND (lhs
, 0) == name
3741 && integer_zerop (TREE_OPERAND (lhs
, 1))
3742 && types_compatible_p (TREE_TYPE (lhs
),
3743 TREE_TYPE (TREE_TYPE (name
)))
3744 && !TREE_THIS_VOLATILE (lhs
))
3747 if (gimple_assign_single_p (stmt
))
3749 tree rhs
= gimple_assign_rhs1 (stmt
);
3750 while (handled_component_p (rhs
))
3751 rhs
= TREE_OPERAND (rhs
, 0);
3752 if (TREE_CODE (rhs
) == MEM_REF
3753 && TREE_OPERAND (rhs
, 0) == name
3754 && integer_zerop (TREE_OPERAND (rhs
, 1))
3755 && types_compatible_p (TREE_TYPE (rhs
),
3756 TREE_TYPE (TREE_TYPE (name
)))
3757 && !TREE_THIS_VOLATILE (rhs
))
3760 else if (is_gimple_call (stmt
))
3763 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3765 tree arg
= gimple_call_arg (stmt
, i
);
3766 while (handled_component_p (arg
))
3767 arg
= TREE_OPERAND (arg
, 0);
3768 if (TREE_CODE (arg
) == MEM_REF
3769 && TREE_OPERAND (arg
, 0) == name
3770 && integer_zerop (TREE_OPERAND (arg
, 1))
3771 && types_compatible_p (TREE_TYPE (arg
),
3772 TREE_TYPE (TREE_TYPE (name
)))
3773 && !TREE_THIS_VOLATILE (arg
))
3778 /* If the number of valid uses does not match the number of
3779 uses in this stmt there is an unhandled use. */
3780 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
3787 BREAK_FROM_IMM_USE_STMT (ui
);
3793 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3794 them in candidate_bitmap. Note that these do not necessarily include
3795 parameter which are unused and thus can be removed. Return true iff any
3796 such candidate has been found. */
3799 find_param_candidates (void)
3806 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3808 parm
= DECL_CHAIN (parm
))
3810 tree type
= TREE_TYPE (parm
);
3815 if (TREE_THIS_VOLATILE (parm
)
3816 || TREE_ADDRESSABLE (parm
)
3817 || (!is_gimple_reg_type (type
) && is_va_list_type (type
)))
3820 if (is_unused_scalar_param (parm
))
3826 if (POINTER_TYPE_P (type
))
3828 type
= TREE_TYPE (type
);
3830 if (TREE_CODE (type
) == FUNCTION_TYPE
3831 || TYPE_VOLATILE (type
)
3832 || (TREE_CODE (type
) == ARRAY_TYPE
3833 && TYPE_NONALIASED_COMPONENT (type
))
3834 || !is_gimple_reg (parm
)
3835 || is_va_list_type (type
)
3836 || ptr_parm_has_direct_uses (parm
))
3839 else if (!AGGREGATE_TYPE_P (type
))
3842 if (!COMPLETE_TYPE_P (type
)
3843 || !tree_fits_uhwi_p (TYPE_SIZE (type
))
3844 || tree_to_uhwi (TYPE_SIZE (type
)) == 0
3845 || (AGGREGATE_TYPE_P (type
)
3846 && type_internals_preclude_sra_p (type
, &msg
)))
3849 bitmap_set_bit (candidate_bitmap
, DECL_UID (parm
));
3850 slot
= candidates
->find_slot_with_hash (parm
, DECL_UID (parm
), INSERT
);
3854 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3856 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (parm
));
3857 print_generic_expr (dump_file
, parm
, 0);
3858 fprintf (dump_file
, "\n");
3862 func_param_count
= count
;
3866 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3870 mark_maybe_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
3873 struct access
*repr
= (struct access
*) data
;
3875 repr
->grp_maybe_modified
= 1;
3879 /* Analyze what representatives (in linked lists accessible from
3880 REPRESENTATIVES) can be modified by side effects of statements in the
3881 current function. */
3884 analyze_modified_params (vec
<access_p
> representatives
)
3888 for (i
= 0; i
< func_param_count
; i
++)
3890 struct access
*repr
;
3892 for (repr
= representatives
[i
];
3894 repr
= repr
->next_grp
)
3896 struct access
*access
;
3900 if (no_accesses_p (repr
))
3902 if (!POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3903 || repr
->grp_maybe_modified
)
3906 ao_ref_init (&ar
, repr
->expr
);
3907 visited
= BITMAP_ALLOC (NULL
);
3908 for (access
= repr
; access
; access
= access
->next_sibling
)
3910 /* All accesses are read ones, otherwise grp_maybe_modified would
3911 be trivially set. */
3912 walk_aliased_vdefs (&ar
, gimple_vuse (access
->stmt
),
3913 mark_maybe_modified
, repr
, &visited
);
3914 if (repr
->grp_maybe_modified
)
3917 BITMAP_FREE (visited
);
3922 /* Propagate distances in bb_dereferences in the opposite direction than the
3923 control flow edges, in each step storing the maximum of the current value
3924 and the minimum of all successors. These steps are repeated until the table
3925 stabilizes. Note that BBs which might terminate the functions (according to
3926 final_bbs bitmap) never updated in this way. */
3929 propagate_dereference_distances (void)
3933 auto_vec
<basic_block
> queue (last_basic_block_for_fn (cfun
));
3934 queue
.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3935 FOR_EACH_BB_FN (bb
, cfun
)
3937 queue
.quick_push (bb
);
3941 while (!queue
.is_empty ())
3945 bool change
= false;
3951 if (bitmap_bit_p (final_bbs
, bb
->index
))
3954 for (i
= 0; i
< func_param_count
; i
++)
3956 int idx
= bb
->index
* func_param_count
+ i
;
3958 HOST_WIDE_INT inh
= 0;
3960 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3962 int succ_idx
= e
->dest
->index
* func_param_count
+ i
;
3964 if (e
->src
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
3970 inh
= bb_dereferences
[succ_idx
];
3972 else if (bb_dereferences
[succ_idx
] < inh
)
3973 inh
= bb_dereferences
[succ_idx
];
3976 if (!first
&& bb_dereferences
[idx
] < inh
)
3978 bb_dereferences
[idx
] = inh
;
3983 if (change
&& !bitmap_bit_p (final_bbs
, bb
->index
))
3984 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3989 e
->src
->aux
= e
->src
;
3990 queue
.quick_push (e
->src
);
3995 /* Dump a dereferences TABLE with heading STR to file F. */
3998 dump_dereferences_table (FILE *f
, const char *str
, HOST_WIDE_INT
*table
)
4002 fprintf (dump_file
, "%s", str
);
4003 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
),
4004 EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
4006 fprintf (f
, "%4i %i ", bb
->index
, bitmap_bit_p (final_bbs
, bb
->index
));
4007 if (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
4010 for (i
= 0; i
< func_param_count
; i
++)
4012 int idx
= bb
->index
* func_param_count
+ i
;
4013 fprintf (f
, " %4" HOST_WIDE_INT_PRINT
"d", table
[idx
]);
4018 fprintf (dump_file
, "\n");
4021 /* Determine what (parts of) parameters passed by reference that are not
4022 assigned to are not certainly dereferenced in this function and thus the
4023 dereferencing cannot be safely moved to the caller without potentially
4024 introducing a segfault. Mark such REPRESENTATIVES as
4025 grp_not_necessarilly_dereferenced.
4027 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4028 part is calculated rather than simple booleans are calculated for each
4029 pointer parameter to handle cases when only a fraction of the whole
4030 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4033 The maximum dereference distances for each pointer parameter and BB are
4034 already stored in bb_dereference. This routine simply propagates these
4035 values upwards by propagate_dereference_distances and then compares the
4036 distances of individual parameters in the ENTRY BB to the equivalent
4037 distances of each representative of a (fraction of a) parameter. */
4040 analyze_caller_dereference_legality (vec
<access_p
> representatives
)
4044 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4045 dump_dereferences_table (dump_file
,
4046 "Dereference table before propagation:\n",
4049 propagate_dereference_distances ();
4051 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4052 dump_dereferences_table (dump_file
,
4053 "Dereference table after propagation:\n",
4056 for (i
= 0; i
< func_param_count
; i
++)
4058 struct access
*repr
= representatives
[i
];
4059 int idx
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->index
* func_param_count
+ i
;
4061 if (!repr
|| no_accesses_p (repr
))
4066 if ((repr
->offset
+ repr
->size
) > bb_dereferences
[idx
])
4067 repr
->grp_not_necessarilly_dereferenced
= 1;
4068 repr
= repr
->next_grp
;
4074 /* Return the representative access for the parameter declaration PARM if it is
4075 a scalar passed by reference which is not written to and the pointer value
4076 is not used directly. Thus, if it is legal to dereference it in the caller
4077 and we can rule out modifications through aliases, such parameter should be
4078 turned into one passed by value. Return NULL otherwise. */
4080 static struct access
*
4081 unmodified_by_ref_scalar_representative (tree parm
)
4083 int i
, access_count
;
4084 struct access
*repr
;
4085 vec
<access_p
> *access_vec
;
4087 access_vec
= get_base_access_vector (parm
);
4088 gcc_assert (access_vec
);
4089 repr
= (*access_vec
)[0];
4092 repr
->group_representative
= repr
;
4094 access_count
= access_vec
->length ();
4095 for (i
= 1; i
< access_count
; i
++)
4097 struct access
*access
= (*access_vec
)[i
];
4100 access
->group_representative
= repr
;
4101 access
->next_sibling
= repr
->next_sibling
;
4102 repr
->next_sibling
= access
;
4106 repr
->grp_scalar_ptr
= 1;
4110 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4111 associated with. REQ_ALIGN is the minimum required alignment. */
4114 access_precludes_ipa_sra_p (struct access
*access
, unsigned int req_align
)
4116 unsigned int exp_align
;
4117 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4118 is incompatible assign in a call statement (and possibly even in asm
4119 statements). This can be relaxed by using a new temporary but only for
4120 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4121 intraprocedural SRA we deal with this by keeping the old aggregate around,
4122 something we cannot do in IPA-SRA.) */
4124 && (is_gimple_call (access
->stmt
)
4125 || gimple_code (access
->stmt
) == GIMPLE_ASM
))
4128 exp_align
= get_object_alignment (access
->expr
);
4129 if (exp_align
< req_align
)
4136 /* Sort collected accesses for parameter PARM, identify representatives for
4137 each accessed region and link them together. Return NULL if there are
4138 different but overlapping accesses, return the special ptr value meaning
4139 there are no accesses for this parameter if that is the case and return the
4140 first representative otherwise. Set *RO_GRP if there is a group of accesses
4141 with only read (i.e. no write) accesses. */
4143 static struct access
*
4144 splice_param_accesses (tree parm
, bool *ro_grp
)
4146 int i
, j
, access_count
, group_count
;
4147 int agg_size
, total_size
= 0;
4148 struct access
*access
, *res
, **prev_acc_ptr
= &res
;
4149 vec
<access_p
> *access_vec
;
4151 access_vec
= get_base_access_vector (parm
);
4153 return &no_accesses_representant
;
4154 access_count
= access_vec
->length ();
4156 access_vec
->qsort (compare_access_positions
);
4161 while (i
< access_count
)
4165 access
= (*access_vec
)[i
];
4166 modification
= access
->write
;
4167 if (access_precludes_ipa_sra_p (access
, TYPE_ALIGN (access
->type
)))
4169 a1_alias_type
= reference_alias_ptr_type (access
->expr
);
4171 /* Access is about to become group representative unless we find some
4172 nasty overlap which would preclude us from breaking this parameter
4176 while (j
< access_count
)
4178 struct access
*ac2
= (*access_vec
)[j
];
4179 if (ac2
->offset
!= access
->offset
)
4181 /* All or nothing law for parameters. */
4182 if (access
->offset
+ access
->size
> ac2
->offset
)
4187 else if (ac2
->size
!= access
->size
)
4190 if (access_precludes_ipa_sra_p (ac2
, TYPE_ALIGN (access
->type
))
4191 || (ac2
->type
!= access
->type
4192 && (TREE_ADDRESSABLE (ac2
->type
)
4193 || TREE_ADDRESSABLE (access
->type
)))
4194 || (reference_alias_ptr_type (ac2
->expr
) != a1_alias_type
))
4197 modification
|= ac2
->write
;
4198 ac2
->group_representative
= access
;
4199 ac2
->next_sibling
= access
->next_sibling
;
4200 access
->next_sibling
= ac2
;
4205 access
->grp_maybe_modified
= modification
;
4208 *prev_acc_ptr
= access
;
4209 prev_acc_ptr
= &access
->next_grp
;
4210 total_size
+= access
->size
;
4214 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4215 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))));
4217 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm
)));
4218 if (total_size
>= agg_size
)
4221 gcc_assert (group_count
> 0);
4225 /* Decide whether parameters with representative accesses given by REPR should
4226 be reduced into components. */
4229 decide_one_param_reduction (struct access
*repr
)
4231 int total_size
, cur_parm_size
, agg_size
, new_param_count
, parm_size_limit
;
4236 cur_parm_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm
)));
4237 gcc_assert (cur_parm_size
> 0);
4239 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4242 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))));
4247 agg_size
= cur_parm_size
;
4253 fprintf (dump_file
, "Evaluating PARAM group sizes for ");
4254 print_generic_expr (dump_file
, parm
, 0);
4255 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (parm
));
4256 for (acc
= repr
; acc
; acc
= acc
->next_grp
)
4257 dump_access (dump_file
, acc
, true);
4261 new_param_count
= 0;
4263 for (; repr
; repr
= repr
->next_grp
)
4265 gcc_assert (parm
== repr
->base
);
4267 /* Taking the address of a non-addressable field is verboten. */
4268 if (by_ref
&& repr
->non_addressable
)
4271 /* Do not decompose a non-BLKmode param in a way that would
4272 create BLKmode params. Especially for by-reference passing
4273 (thus, pointer-type param) this is hardly worthwhile. */
4274 if (DECL_MODE (parm
) != BLKmode
4275 && TYPE_MODE (repr
->type
) == BLKmode
)
4278 if (!by_ref
|| (!repr
->grp_maybe_modified
4279 && !repr
->grp_not_necessarilly_dereferenced
))
4280 total_size
+= repr
->size
;
4282 total_size
+= cur_parm_size
;
4287 gcc_assert (new_param_count
> 0);
4289 if (optimize_function_for_size_p (cfun
))
4290 parm_size_limit
= cur_parm_size
;
4292 parm_size_limit
= (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR
)
4295 if (total_size
< agg_size
4296 && total_size
<= parm_size_limit
)
4299 fprintf (dump_file
, " ....will be split into %i components\n",
4301 return new_param_count
;
4307 /* The order of the following enums is important, we need to do extra work for
4308 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4309 enum ipa_splicing_result
{ NO_GOOD_ACCESS
, UNUSED_PARAMS
, BY_VAL_ACCESSES
,
4310 MODIF_BY_REF_ACCESSES
, UNMODIF_BY_REF_ACCESSES
};
4312 /* Identify representatives of all accesses to all candidate parameters for
4313 IPA-SRA. Return result based on what representatives have been found. */
4315 static enum ipa_splicing_result
4316 splice_all_param_accesses (vec
<access_p
> &representatives
)
4318 enum ipa_splicing_result result
= NO_GOOD_ACCESS
;
4320 struct access
*repr
;
4322 representatives
.create (func_param_count
);
4324 for (parm
= DECL_ARGUMENTS (current_function_decl
);
4326 parm
= DECL_CHAIN (parm
))
4328 if (is_unused_scalar_param (parm
))
4330 representatives
.quick_push (&no_accesses_representant
);
4331 if (result
== NO_GOOD_ACCESS
)
4332 result
= UNUSED_PARAMS
;
4334 else if (POINTER_TYPE_P (TREE_TYPE (parm
))
4335 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm
)))
4336 && bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4338 repr
= unmodified_by_ref_scalar_representative (parm
);
4339 representatives
.quick_push (repr
);
4341 result
= UNMODIF_BY_REF_ACCESSES
;
4343 else if (bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4345 bool ro_grp
= false;
4346 repr
= splice_param_accesses (parm
, &ro_grp
);
4347 representatives
.quick_push (repr
);
4349 if (repr
&& !no_accesses_p (repr
))
4351 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4354 result
= UNMODIF_BY_REF_ACCESSES
;
4355 else if (result
< MODIF_BY_REF_ACCESSES
)
4356 result
= MODIF_BY_REF_ACCESSES
;
4358 else if (result
< BY_VAL_ACCESSES
)
4359 result
= BY_VAL_ACCESSES
;
4361 else if (no_accesses_p (repr
) && (result
== NO_GOOD_ACCESS
))
4362 result
= UNUSED_PARAMS
;
4365 representatives
.quick_push (NULL
);
4368 if (result
== NO_GOOD_ACCESS
)
4370 representatives
.release ();
4371 return NO_GOOD_ACCESS
;
4377 /* Return the index of BASE in PARMS. Abort if it is not found. */
4380 get_param_index (tree base
, vec
<tree
> parms
)
4384 len
= parms
.length ();
4385 for (i
= 0; i
< len
; i
++)
4386 if (parms
[i
] == base
)
4391 /* Convert the decisions made at the representative level into compact
4392 parameter adjustments. REPRESENTATIVES are pointers to first
4393 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4394 final number of adjustments. */
4396 static ipa_parm_adjustment_vec
4397 turn_representatives_into_adjustments (vec
<access_p
> representatives
,
4398 int adjustments_count
)
4401 ipa_parm_adjustment_vec adjustments
;
4405 gcc_assert (adjustments_count
> 0);
4406 parms
= ipa_get_vector_of_formal_parms (current_function_decl
);
4407 adjustments
.create (adjustments_count
);
4408 parm
= DECL_ARGUMENTS (current_function_decl
);
4409 for (i
= 0; i
< func_param_count
; i
++, parm
= DECL_CHAIN (parm
))
4411 struct access
*repr
= representatives
[i
];
4413 if (!repr
|| no_accesses_p (repr
))
4415 struct ipa_parm_adjustment adj
;
4417 memset (&adj
, 0, sizeof (adj
));
4418 adj
.base_index
= get_param_index (parm
, parms
);
4421 adj
.op
= IPA_PARM_OP_COPY
;
4423 adj
.op
= IPA_PARM_OP_REMOVE
;
4424 adj
.arg_prefix
= "ISRA";
4425 adjustments
.quick_push (adj
);
4429 struct ipa_parm_adjustment adj
;
4430 int index
= get_param_index (parm
, parms
);
4432 for (; repr
; repr
= repr
->next_grp
)
4434 memset (&adj
, 0, sizeof (adj
));
4435 gcc_assert (repr
->base
== parm
);
4436 adj
.base_index
= index
;
4437 adj
.base
= repr
->base
;
4438 adj
.type
= repr
->type
;
4439 adj
.alias_ptr_type
= reference_alias_ptr_type (repr
->expr
);
4440 adj
.offset
= repr
->offset
;
4441 adj
.by_ref
= (POINTER_TYPE_P (TREE_TYPE (repr
->base
))
4442 && (repr
->grp_maybe_modified
4443 || repr
->grp_not_necessarilly_dereferenced
));
4444 adj
.arg_prefix
= "ISRA";
4445 adjustments
.quick_push (adj
);
4453 /* Analyze the collected accesses and produce a plan what to do with the
4454 parameters in the form of adjustments, NULL meaning nothing. */
4456 static ipa_parm_adjustment_vec
4457 analyze_all_param_acesses (void)
4459 enum ipa_splicing_result repr_state
;
4460 bool proceed
= false;
4461 int i
, adjustments_count
= 0;
4462 vec
<access_p
> representatives
;
4463 ipa_parm_adjustment_vec adjustments
;
4465 repr_state
= splice_all_param_accesses (representatives
);
4466 if (repr_state
== NO_GOOD_ACCESS
)
4467 return ipa_parm_adjustment_vec ();
4469 /* If there are any parameters passed by reference which are not modified
4470 directly, we need to check whether they can be modified indirectly. */
4471 if (repr_state
== UNMODIF_BY_REF_ACCESSES
)
4473 analyze_caller_dereference_legality (representatives
);
4474 analyze_modified_params (representatives
);
4477 for (i
= 0; i
< func_param_count
; i
++)
4479 struct access
*repr
= representatives
[i
];
4481 if (repr
&& !no_accesses_p (repr
))
4483 if (repr
->grp_scalar_ptr
)
4485 adjustments_count
++;
4486 if (repr
->grp_not_necessarilly_dereferenced
4487 || repr
->grp_maybe_modified
)
4488 representatives
[i
] = NULL
;
4492 sra_stats
.scalar_by_ref_to_by_val
++;
4497 int new_components
= decide_one_param_reduction (repr
);
4499 if (new_components
== 0)
4501 representatives
[i
] = NULL
;
4502 adjustments_count
++;
4506 adjustments_count
+= new_components
;
4507 sra_stats
.aggregate_params_reduced
++;
4508 sra_stats
.param_reductions_created
+= new_components
;
4515 if (no_accesses_p (repr
))
4518 sra_stats
.deleted_unused_parameters
++;
4520 adjustments_count
++;
4524 if (!proceed
&& dump_file
)
4525 fprintf (dump_file
, "NOT proceeding to change params.\n");
4528 adjustments
= turn_representatives_into_adjustments (representatives
,
4531 adjustments
= ipa_parm_adjustment_vec ();
4533 representatives
.release ();
4537 /* If a parameter replacement identified by ADJ does not yet exist in the form
4538 of declaration, create it and record it, otherwise return the previously
4542 get_replaced_param_substitute (struct ipa_parm_adjustment
*adj
)
4545 if (!adj
->new_ssa_base
)
4547 char *pretty_name
= make_fancy_name (adj
->base
);
4549 repl
= create_tmp_reg (TREE_TYPE (adj
->base
), "ISR");
4550 DECL_NAME (repl
) = get_identifier (pretty_name
);
4551 obstack_free (&name_obstack
, pretty_name
);
4553 adj
->new_ssa_base
= repl
;
4556 repl
= adj
->new_ssa_base
;
4560 /* Find the first adjustment for a particular parameter BASE in a vector of
4561 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4564 static struct ipa_parm_adjustment
*
4565 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments
, tree base
)
4569 len
= adjustments
.length ();
4570 for (i
= 0; i
< len
; i
++)
4572 struct ipa_parm_adjustment
*adj
;
4574 adj
= &adjustments
[i
];
4575 if (adj
->op
!= IPA_PARM_OP_COPY
&& adj
->base
== base
)
4582 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4583 removed because its value is not used, replace the SSA_NAME with a one
4584 relating to a created VAR_DECL together all of its uses and return true.
4585 ADJUSTMENTS is a pointer to an adjustments vector. */
4588 replace_removed_params_ssa_names (gimple stmt
,
4589 ipa_parm_adjustment_vec adjustments
)
4591 struct ipa_parm_adjustment
*adj
;
4592 tree lhs
, decl
, repl
, name
;
4594 if (gimple_code (stmt
) == GIMPLE_PHI
)
4595 lhs
= gimple_phi_result (stmt
);
4596 else if (is_gimple_assign (stmt
))
4597 lhs
= gimple_assign_lhs (stmt
);
4598 else if (is_gimple_call (stmt
))
4599 lhs
= gimple_call_lhs (stmt
);
4603 if (TREE_CODE (lhs
) != SSA_NAME
)
4606 decl
= SSA_NAME_VAR (lhs
);
4607 if (decl
== NULL_TREE
4608 || TREE_CODE (decl
) != PARM_DECL
)
4611 adj
= get_adjustment_for_base (adjustments
, decl
);
4615 repl
= get_replaced_param_substitute (adj
);
4616 name
= make_ssa_name (repl
, stmt
);
4620 fprintf (dump_file
, "replacing an SSA name of a removed param ");
4621 print_generic_expr (dump_file
, lhs
, 0);
4622 fprintf (dump_file
, " with ");
4623 print_generic_expr (dump_file
, name
, 0);
4624 fprintf (dump_file
, "\n");
4627 if (is_gimple_assign (stmt
))
4628 gimple_assign_set_lhs (stmt
, name
);
4629 else if (is_gimple_call (stmt
))
4630 gimple_call_set_lhs (stmt
, name
);
4632 gimple_phi_set_result (as_a
<gphi
*> (stmt
), name
);
4634 replace_uses_by (lhs
, name
);
4635 release_ssa_name (lhs
);
4639 /* If the statement STMT contains any expressions that need to replaced with a
4640 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4641 incompatibilities (GSI is used to accommodate conversion statements and must
4642 point to the statement). Return true iff the statement was modified. */
4645 sra_ipa_modify_assign (gimple stmt
, gimple_stmt_iterator
*gsi
,
4646 ipa_parm_adjustment_vec adjustments
)
4648 tree
*lhs_p
, *rhs_p
;
4651 if (!gimple_assign_single_p (stmt
))
4654 rhs_p
= gimple_assign_rhs1_ptr (stmt
);
4655 lhs_p
= gimple_assign_lhs_ptr (stmt
);
4657 any
= ipa_modify_expr (rhs_p
, false, adjustments
);
4658 any
|= ipa_modify_expr (lhs_p
, false, adjustments
);
4661 tree new_rhs
= NULL_TREE
;
4663 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p
), TREE_TYPE (*rhs_p
)))
4665 if (TREE_CODE (*rhs_p
) == CONSTRUCTOR
)
4667 /* V_C_Es of constructors can cause trouble (PR 42714). */
4668 if (is_gimple_reg_type (TREE_TYPE (*lhs_p
)))
4669 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
4671 *rhs_p
= build_constructor (TREE_TYPE (*lhs_p
),
4675 new_rhs
= fold_build1_loc (gimple_location (stmt
),
4676 VIEW_CONVERT_EXPR
, TREE_TYPE (*lhs_p
),
4679 else if (REFERENCE_CLASS_P (*rhs_p
)
4680 && is_gimple_reg_type (TREE_TYPE (*lhs_p
))
4681 && !is_gimple_reg (*lhs_p
))
4682 /* This can happen when an assignment in between two single field
4683 structures is turned into an assignment in between two pointers to
4684 scalars (PR 42237). */
4689 tree tmp
= force_gimple_operand_gsi (gsi
, new_rhs
, true, NULL_TREE
,
4690 true, GSI_SAME_STMT
);
4692 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
4701 /* Traverse the function body and all modifications as described in
4702 ADJUSTMENTS. Return true iff the CFG has been changed. */
4705 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments
)
4707 bool cfg_changed
= false;
4710 FOR_EACH_BB_FN (bb
, cfun
)
4712 gimple_stmt_iterator gsi
;
4714 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4715 replace_removed_params_ssa_names (gsi_stmt (gsi
), adjustments
);
4717 gsi
= gsi_start_bb (bb
);
4718 while (!gsi_end_p (gsi
))
4720 gimple stmt
= gsi_stmt (gsi
);
4721 bool modified
= false;
4725 switch (gimple_code (stmt
))
4728 t
= gimple_return_retval_ptr (as_a
<greturn
*> (stmt
));
4729 if (*t
!= NULL_TREE
)
4730 modified
|= ipa_modify_expr (t
, true, adjustments
);
4734 modified
|= sra_ipa_modify_assign (stmt
, &gsi
, adjustments
);
4735 modified
|= replace_removed_params_ssa_names (stmt
, adjustments
);
4739 /* Operands must be processed before the lhs. */
4740 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4742 t
= gimple_call_arg_ptr (stmt
, i
);
4743 modified
|= ipa_modify_expr (t
, true, adjustments
);
4746 if (gimple_call_lhs (stmt
))
4748 t
= gimple_call_lhs_ptr (stmt
);
4749 modified
|= ipa_modify_expr (t
, false, adjustments
);
4750 modified
|= replace_removed_params_ssa_names (stmt
,
4757 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4758 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
4760 t
= &TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
4761 modified
|= ipa_modify_expr (t
, true, adjustments
);
4763 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
4765 t
= &TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
4766 modified
|= ipa_modify_expr (t
, false, adjustments
);
4778 if (maybe_clean_eh_stmt (stmt
)
4779 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4789 /* Call gimple_debug_bind_reset_value on all debug statements describing
4790 gimple register parameters that are being removed or replaced. */
4793 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments
)
4796 gimple_stmt_iterator
*gsip
= NULL
, gsi
;
4798 if (MAY_HAVE_DEBUG_STMTS
&& single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
4800 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
4803 len
= adjustments
.length ();
4804 for (i
= 0; i
< len
; i
++)
4806 struct ipa_parm_adjustment
*adj
;
4807 imm_use_iterator ui
;
4810 tree name
, vexpr
, copy
= NULL_TREE
;
4811 use_operand_p use_p
;
4813 adj
= &adjustments
[i
];
4814 if (adj
->op
== IPA_PARM_OP_COPY
|| !is_gimple_reg (adj
->base
))
4816 name
= ssa_default_def (cfun
, adj
->base
);
4819 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
4821 if (gimple_clobber_p (stmt
))
4823 gimple_stmt_iterator cgsi
= gsi_for_stmt (stmt
);
4824 unlink_stmt_vdef (stmt
);
4825 gsi_remove (&cgsi
, true);
4826 release_defs (stmt
);
4829 /* All other users must have been removed by
4830 ipa_sra_modify_function_body. */
4831 gcc_assert (is_gimple_debug (stmt
));
4832 if (vexpr
== NULL
&& gsip
!= NULL
)
4834 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4835 vexpr
= make_node (DEBUG_EXPR_DECL
);
4836 def_temp
= gimple_build_debug_source_bind (vexpr
, adj
->base
,
4838 DECL_ARTIFICIAL (vexpr
) = 1;
4839 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
4840 DECL_MODE (vexpr
) = DECL_MODE (adj
->base
);
4841 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4845 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
4846 SET_USE (use_p
, vexpr
);
4849 gimple_debug_bind_reset_value (stmt
);
4852 /* Create a VAR_DECL for debug info purposes. */
4853 if (!DECL_IGNORED_P (adj
->base
))
4855 copy
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
4856 VAR_DECL
, DECL_NAME (adj
->base
),
4857 TREE_TYPE (adj
->base
));
4858 if (DECL_PT_UID_SET_P (adj
->base
))
4859 SET_DECL_PT_UID (copy
, DECL_PT_UID (adj
->base
));
4860 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (adj
->base
);
4861 TREE_READONLY (copy
) = TREE_READONLY (adj
->base
);
4862 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (adj
->base
);
4863 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (adj
->base
);
4864 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (adj
->base
);
4865 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (adj
->base
);
4866 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (adj
->base
);
4867 DECL_SEEN_IN_BIND_EXPR_P (copy
) = 1;
4868 SET_DECL_RTL (copy
, 0);
4869 TREE_USED (copy
) = 1;
4870 DECL_CONTEXT (copy
) = current_function_decl
;
4871 add_local_decl (cfun
, copy
);
4873 BLOCK_VARS (DECL_INITIAL (current_function_decl
));
4874 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = copy
;
4876 if (gsip
!= NULL
&& copy
&& target_for_debug_bind (adj
->base
))
4878 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4880 def_temp
= gimple_build_debug_bind (copy
, vexpr
, NULL
);
4882 def_temp
= gimple_build_debug_source_bind (copy
, adj
->base
,
4884 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4889 /* Return false if all callers have at least as many actual arguments as there
4890 are formal parameters in the current function and that their types
4894 some_callers_have_mismatched_arguments_p (struct cgraph_node
*node
,
4895 void *data ATTRIBUTE_UNUSED
)
4897 struct cgraph_edge
*cs
;
4898 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4899 if (!cs
->call_stmt
|| !callsite_arguments_match_p (cs
->call_stmt
))
4905 /* Return false if all callers have vuse attached to a call statement. */
4908 some_callers_have_no_vuse_p (struct cgraph_node
*node
,
4909 void *data ATTRIBUTE_UNUSED
)
4911 struct cgraph_edge
*cs
;
4912 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4913 if (!cs
->call_stmt
|| !gimple_vuse (cs
->call_stmt
))
4919 /* Convert all callers of NODE. */
4922 convert_callers_for_node (struct cgraph_node
*node
,
4925 ipa_parm_adjustment_vec
*adjustments
= (ipa_parm_adjustment_vec
*) data
;
4926 bitmap recomputed_callers
= BITMAP_ALLOC (NULL
);
4927 struct cgraph_edge
*cs
;
4929 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4931 push_cfun (DECL_STRUCT_FUNCTION (cs
->caller
->decl
));
4934 fprintf (dump_file
, "Adjusting call %s/%i -> %s/%i\n",
4935 xstrdup (cs
->caller
->name ()),
4937 xstrdup (cs
->callee
->name ()),
4940 ipa_modify_call_arguments (cs
, cs
->call_stmt
, *adjustments
);
4945 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4946 if (bitmap_set_bit (recomputed_callers
, cs
->caller
->uid
)
4947 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs
->caller
->decl
)))
4948 compute_inline_parameters (cs
->caller
, true);
4949 BITMAP_FREE (recomputed_callers
);
4954 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4957 convert_callers (struct cgraph_node
*node
, tree old_decl
,
4958 ipa_parm_adjustment_vec adjustments
)
4960 basic_block this_block
;
4962 node
->call_for_symbol_and_aliases (convert_callers_for_node
,
4963 &adjustments
, false);
4965 if (!encountered_recursive_call
)
4968 FOR_EACH_BB_FN (this_block
, cfun
)
4970 gimple_stmt_iterator gsi
;
4972 for (gsi
= gsi_start_bb (this_block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4976 stmt
= dyn_cast
<gcall
*> (gsi_stmt (gsi
));
4979 call_fndecl
= gimple_call_fndecl (stmt
);
4980 if (call_fndecl
== old_decl
)
4983 fprintf (dump_file
, "Adjusting recursive call");
4984 gimple_call_set_fndecl (stmt
, node
->decl
);
4985 ipa_modify_call_arguments (NULL
, stmt
, adjustments
);
4993 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4994 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4997 modify_function (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
4999 struct cgraph_node
*new_node
;
5002 cgraph_edge::rebuild_edges ();
5003 free_dominance_info (CDI_DOMINATORS
);
5006 /* This must be done after rebuilding cgraph edges for node above.
5007 Otherwise any recursive calls to node that are recorded in
5008 redirect_callers will be corrupted. */
5009 vec
<cgraph_edge
*> redirect_callers
= node
->collect_callers ();
5010 new_node
= node
->create_version_clone_with_body (redirect_callers
, NULL
,
5011 NULL
, false, NULL
, NULL
,
5013 redirect_callers
.release ();
5015 push_cfun (DECL_STRUCT_FUNCTION (new_node
->decl
));
5016 ipa_modify_formal_parameters (current_function_decl
, adjustments
);
5017 cfg_changed
= ipa_sra_modify_function_body (adjustments
);
5018 sra_ipa_reset_debug_stmts (adjustments
);
5019 convert_callers (new_node
, node
->decl
, adjustments
);
5020 new_node
->make_local ();
5024 /* Means of communication between ipa_sra_check_caller and
5025 ipa_sra_preliminary_function_checks. */
5027 struct ipa_sra_check_caller_data
5030 bool bad_arg_alignment
;
5034 /* If NODE has a caller, mark that fact in DATA which is pointer to
5035 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5036 calls if they are unit aligned and if not, set the appropriate flag in DATA
5040 ipa_sra_check_caller (struct cgraph_node
*node
, void *data
)
5045 struct ipa_sra_check_caller_data
*iscc
;
5046 iscc
= (struct ipa_sra_check_caller_data
*) data
;
5047 iscc
->has_callers
= true;
5049 for (cgraph_edge
*cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
5051 if (cs
->caller
->thunk
.thunk_p
)
5053 iscc
->has_thunk
= true;
5056 gimple call_stmt
= cs
->call_stmt
;
5057 unsigned count
= gimple_call_num_args (call_stmt
);
5058 for (unsigned i
= 0; i
< count
; i
++)
5060 tree arg
= gimple_call_arg (call_stmt
, i
);
5061 if (is_gimple_reg (arg
))
5065 HOST_WIDE_INT bitsize
, bitpos
;
5067 int unsignedp
, volatilep
= 0;
5068 get_inner_reference (arg
, &bitsize
, &bitpos
, &offset
, &mode
,
5069 &unsignedp
, &volatilep
, false);
5070 if (bitpos
% BITS_PER_UNIT
)
5072 iscc
->bad_arg_alignment
= true;
5081 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5082 attributes, return true otherwise. NODE is the cgraph node of the current
5086 ipa_sra_preliminary_function_checks (struct cgraph_node
*node
)
5088 if (!node
->can_be_local_p ())
5091 fprintf (dump_file
, "Function not local to this compilation unit.\n");
5095 if (!node
->local
.can_change_signature
)
5098 fprintf (dump_file
, "Function can not change signature.\n");
5102 if (!tree_versionable_function_p (node
->decl
))
5105 fprintf (dump_file
, "Function is not versionable.\n");
5109 if (!opt_for_fn (node
->decl
, optimize
)
5110 || !opt_for_fn (node
->decl
, flag_ipa_sra
))
5113 fprintf (dump_file
, "Function not optimized.\n");
5117 if (DECL_VIRTUAL_P (current_function_decl
))
5120 fprintf (dump_file
, "Function is a virtual method.\n");
5124 if ((DECL_ONE_ONLY (node
->decl
) || DECL_EXTERNAL (node
->decl
))
5125 && inline_summaries
->get (node
)->size
>= MAX_INLINE_INSNS_AUTO
)
5128 fprintf (dump_file
, "Function too big to be made truly local.\n");
5135 fprintf (dump_file
, "Function uses stdarg. \n");
5139 if (TYPE_ATTRIBUTES (TREE_TYPE (node
->decl
)))
5142 if (DECL_DISREGARD_INLINE_LIMITS (node
->decl
))
5145 fprintf (dump_file
, "Always inline function will be inlined "
5150 struct ipa_sra_check_caller_data iscc
;
5151 memset (&iscc
, 0, sizeof(iscc
));
5152 node
->call_for_symbol_and_aliases (ipa_sra_check_caller
, &iscc
, true);
5153 if (!iscc
.has_callers
)
5157 "Function has no callers in this compilation unit.\n");
5161 if (iscc
.bad_arg_alignment
)
5165 "A function call has an argument with non-unit alignment.\n");
5180 /* Perform early interprocedural SRA. */
5183 ipa_early_sra (void)
5185 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
5186 ipa_parm_adjustment_vec adjustments
;
5189 if (!ipa_sra_preliminary_function_checks (node
))
5193 sra_mode
= SRA_MODE_EARLY_IPA
;
5195 if (!find_param_candidates ())
5198 fprintf (dump_file
, "Function has no IPA-SRA candidates.\n");
5202 if (node
->call_for_symbol_and_aliases
5203 (some_callers_have_mismatched_arguments_p
, NULL
, true))
5206 fprintf (dump_file
, "There are callers with insufficient number of "
5207 "arguments or arguments with type mismatches.\n");
5211 if (node
->call_for_symbol_and_aliases
5212 (some_callers_have_no_vuse_p
, NULL
, true))
5215 fprintf (dump_file
, "There are callers with no VUSE attached "
5216 "to a call stmt.\n");
5220 bb_dereferences
= XCNEWVEC (HOST_WIDE_INT
,
5222 * last_basic_block_for_fn (cfun
));
5223 final_bbs
= BITMAP_ALLOC (NULL
);
5226 if (encountered_apply_args
)
5229 fprintf (dump_file
, "Function calls __builtin_apply_args().\n");
5233 if (encountered_unchangable_recursive_call
)
5236 fprintf (dump_file
, "Function calls itself with insufficient "
5237 "number of arguments.\n");
5241 adjustments
= analyze_all_param_acesses ();
5242 if (!adjustments
.exists ())
5245 ipa_dump_param_adjustments (dump_file
, adjustments
, current_function_decl
);
5247 if (modify_function (node
, adjustments
))
5248 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
5250 ret
= TODO_update_ssa
;
5251 adjustments
.release ();
5253 statistics_counter_event (cfun
, "Unused parameters deleted",
5254 sra_stats
.deleted_unused_parameters
);
5255 statistics_counter_event (cfun
, "Scalar parameters converted to by-value",
5256 sra_stats
.scalar_by_ref_to_by_val
);
5257 statistics_counter_event (cfun
, "Aggregate parameters broken up",
5258 sra_stats
.aggregate_params_reduced
);
5259 statistics_counter_event (cfun
, "Aggregate parameter components created",
5260 sra_stats
.param_reductions_created
);
5263 BITMAP_FREE (final_bbs
);
5264 free (bb_dereferences
);
5266 sra_deinitialize ();
5272 const pass_data pass_data_early_ipa_sra
=
5274 GIMPLE_PASS
, /* type */
5275 "eipa_sra", /* name */
5276 OPTGROUP_NONE
, /* optinfo_flags */
5277 TV_IPA_SRA
, /* tv_id */
5278 0, /* properties_required */
5279 0, /* properties_provided */
5280 0, /* properties_destroyed */
5281 0, /* todo_flags_start */
5282 TODO_dump_symtab
, /* todo_flags_finish */
5285 class pass_early_ipa_sra
: public gimple_opt_pass
5288 pass_early_ipa_sra (gcc::context
*ctxt
)
5289 : gimple_opt_pass (pass_data_early_ipa_sra
, ctxt
)
5292 /* opt_pass methods: */
5293 virtual bool gate (function
*) { return flag_ipa_sra
&& dbg_cnt (eipa_sra
); }
5294 virtual unsigned int execute (function
*) { return ipa_early_sra (); }
5296 }; // class pass_early_ipa_sra
5301 make_pass_early_ipa_sra (gcc::context
*ctxt
)
5303 return new pass_early_ipa_sra (ctxt
);