1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008-2013 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "hash-table.h"
78 #include "alloc-pool.h"
84 #include "tree-pass.h"
86 #include "statistics.h"
91 #include "tree-inline.h"
92 #include "gimple-pretty-print.h"
93 #include "ipa-inline.h"
94 #include "ipa-utils.h"
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
98 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
99 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
101 /* Global variable describing which aggregate reduction we are performing at
103 static enum sra_mode sra_mode
;
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset
;
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
139 /* The statement this access belongs to. */
142 /* Next group representative for this aggregate. */
143 struct access
*next_grp
;
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access
*group_representative
;
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access
*first_child
;
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access
*next_sibling
;
158 /* Pointers to the first and last element in the linked list of assign
160 struct assign_link
*first_link
, *last_link
;
162 /* Pointer to the next access in the work queue. */
163 struct access
*next_queued
;
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl
;
170 /* Is this particular access write access? */
173 /* Is this access an access to a non-addressable field? */
174 unsigned non_addressable
: 1;
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued
: 1;
179 /* Does this group contain a write access? This flag is propagated down the
181 unsigned grp_write
: 1;
183 /* Does this group contain a read access? This flag is propagated down the
185 unsigned grp_read
: 1;
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read
: 1;
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write
: 1;
195 /* Does this group contain a read access through a scalar type? This flag is
196 not propagated in the access tree in any direction. */
197 unsigned grp_scalar_read
: 1;
199 /* Does this group contain a write access through a scalar type? This flag
200 is not propagated in the access tree in any direction. */
201 unsigned grp_scalar_write
: 1;
203 /* Is this access an artificial one created to scalarize some record
205 unsigned grp_total_scalarization
: 1;
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
210 unsigned grp_hint
: 1;
212 /* Is the subtree rooted in this access fully covered by scalar
214 unsigned grp_covered
: 1;
216 /* If set to true, this access and all below it in an access tree must not be
218 unsigned grp_unscalarizable_region
: 1;
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
223 unsigned grp_unscalarized_data
: 1;
225 /* Does this access and/or group contain a write access through a
227 unsigned grp_partial_lhs
: 1;
229 /* Set when a scalar replacement should be created for this variable. */
230 unsigned grp_to_be_replaced
: 1;
232 /* Set when we want a replacement for the sole purpose of having it in
233 generated debug statements. */
234 unsigned grp_to_be_debug_replaced
: 1;
236 /* Should TREE_NO_WARNING of a replacement be set? */
237 unsigned grp_no_warning
: 1;
239 /* Is it possible that the group refers to data which might be (directly or
240 otherwise) modified? */
241 unsigned grp_maybe_modified
: 1;
243 /* Set when this is a representative of a pointer to scalar (i.e. by
244 reference) parameter which we consider for turning into a plain scalar
245 (i.e. a by value parameter). */
246 unsigned grp_scalar_ptr
: 1;
248 /* Set when we discover that this pointer is not safe to dereference in the
250 unsigned grp_not_necessarilly_dereferenced
: 1;
253 typedef struct access
*access_p
;
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool
;
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
264 struct access
*lacc
, *racc
;
265 struct assign_link
*next
;
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool
;
271 /* Base (tree) -> Vector (vec<access_p> *) map. */
272 static struct pointer_map_t
*base_access_vec
;
274 /* Candidate hash table helpers. */
276 struct uid_decl_hasher
: typed_noop_remove
<tree_node
>
278 typedef tree_node value_type
;
279 typedef tree_node compare_type
;
280 static inline hashval_t
hash (const value_type
*);
281 static inline bool equal (const value_type
*, const compare_type
*);
284 /* Hash a tree in a uid_decl_map. */
287 uid_decl_hasher::hash (const value_type
*item
)
289 return item
->decl_minimal
.uid
;
292 /* Return true if the DECL_UID in both trees are equal. */
295 uid_decl_hasher::equal (const value_type
*a
, const compare_type
*b
)
297 return (a
->decl_minimal
.uid
== b
->decl_minimal
.uid
);
300 /* Set of candidates. */
301 static bitmap candidate_bitmap
;
302 static hash_table
<uid_decl_hasher
> candidates
;
304 /* For a candidate UID return the candidates decl. */
307 candidate (unsigned uid
)
310 t
.decl_minimal
.uid
= uid
;
311 return candidates
.find_with_hash (&t
, static_cast <hashval_t
> (uid
));
314 /* Bitmap of candidates which we should try to entirely scalarize away and
315 those which cannot be (because they are and need be used as a whole). */
316 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
318 /* Obstack for creation of fancy names. */
319 static struct obstack name_obstack
;
321 /* Head of a linked list of accesses that need to have its subaccesses
322 propagated to their assignment counterparts. */
323 static struct access
*work_queue_head
;
325 /* Number of parameters of the analyzed function when doing early ipa SRA. */
326 static int func_param_count
;
328 /* scan_function sets the following to true if it encounters a call to
329 __builtin_apply_args. */
330 static bool encountered_apply_args
;
332 /* Set by scan_function when it finds a recursive call. */
333 static bool encountered_recursive_call
;
335 /* Set by scan_function when it finds a recursive call with less actual
336 arguments than formal parameters.. */
337 static bool encountered_unchangable_recursive_call
;
339 /* This is a table in which for each basic block and parameter there is a
340 distance (offset + size) in that parameter which is dereferenced and
341 accessed in that BB. */
342 static HOST_WIDE_INT
*bb_dereferences
;
343 /* Bitmap of BBs that can cause the function to "stop" progressing by
344 returning, throwing externally, looping infinitely or calling a function
345 which might abort etc.. */
346 static bitmap final_bbs
;
348 /* Representative of no accesses at all. */
349 static struct access no_accesses_representant
;
351 /* Predicate to test the special value. */
354 no_accesses_p (struct access
*access
)
356 return access
== &no_accesses_representant
;
359 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
360 representative fields are dumped, otherwise those which only describe the
361 individual access are. */
365 /* Number of processed aggregates is readily available in
366 analyze_all_variable_accesses and so is not stored here. */
368 /* Number of created scalar replacements. */
371 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
375 /* Number of statements created by generate_subtree_copies. */
378 /* Number of statements created by load_assign_lhs_subreplacements. */
381 /* Number of times sra_modify_assign has deleted a statement. */
384 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
385 RHS reparately due to type conversions or nonexistent matching
387 int separate_lhs_rhs_handling
;
389 /* Number of parameters that were removed because they were unused. */
390 int deleted_unused_parameters
;
392 /* Number of scalars passed as parameters by reference that have been
393 converted to be passed by value. */
394 int scalar_by_ref_to_by_val
;
396 /* Number of aggregate parameters that were replaced by one or more of their
398 int aggregate_params_reduced
;
400 /* Numbber of components created when splitting aggregate parameters. */
401 int param_reductions_created
;
405 dump_access (FILE *f
, struct access
*access
, bool grp
)
407 fprintf (f
, "access { ");
408 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
409 print_generic_expr (f
, access
->base
, 0);
410 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
411 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
412 fprintf (f
, ", expr = ");
413 print_generic_expr (f
, access
->expr
, 0);
414 fprintf (f
, ", type = ");
415 print_generic_expr (f
, access
->type
, 0);
417 fprintf (f
, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
418 "grp_assignment_write = %d, grp_scalar_read = %d, "
419 "grp_scalar_write = %d, grp_total_scalarization = %d, "
420 "grp_hint = %d, grp_covered = %d, "
421 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
422 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
423 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
424 "grp_not_necessarilly_dereferenced = %d\n",
425 access
->grp_read
, access
->grp_write
, access
->grp_assignment_read
,
426 access
->grp_assignment_write
, access
->grp_scalar_read
,
427 access
->grp_scalar_write
, access
->grp_total_scalarization
,
428 access
->grp_hint
, access
->grp_covered
,
429 access
->grp_unscalarizable_region
, access
->grp_unscalarized_data
,
430 access
->grp_partial_lhs
, access
->grp_to_be_replaced
,
431 access
->grp_to_be_debug_replaced
, access
->grp_maybe_modified
,
432 access
->grp_not_necessarilly_dereferenced
);
434 fprintf (f
, ", write = %d, grp_total_scalarization = %d, "
435 "grp_partial_lhs = %d\n",
436 access
->write
, access
->grp_total_scalarization
,
437 access
->grp_partial_lhs
);
440 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
443 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
449 for (i
= 0; i
< level
; i
++)
450 fputs ("* ", dump_file
);
452 dump_access (f
, access
, true);
454 if (access
->first_child
)
455 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
457 access
= access
->next_sibling
;
462 /* Dump all access trees for a variable, given the pointer to the first root in
466 dump_access_tree (FILE *f
, struct access
*access
)
468 for (; access
; access
= access
->next_grp
)
469 dump_access_tree_1 (f
, access
, 0);
472 /* Return true iff ACC is non-NULL and has subaccesses. */
475 access_has_children_p (struct access
*acc
)
477 return acc
&& acc
->first_child
;
480 /* Return true iff ACC is (partly) covered by at least one replacement. */
483 access_has_replacements_p (struct access
*acc
)
485 struct access
*child
;
486 if (acc
->grp_to_be_replaced
)
488 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
489 if (access_has_replacements_p (child
))
494 /* Return a vector of pointers to accesses for the variable given in BASE or
495 NULL if there is none. */
497 static vec
<access_p
> *
498 get_base_access_vector (tree base
)
502 slot
= pointer_map_contains (base_access_vec
, base
);
506 return *(vec
<access_p
> **) slot
;
509 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
510 in ACCESS. Return NULL if it cannot be found. */
512 static struct access
*
513 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
516 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
518 struct access
*child
= access
->first_child
;
520 while (child
&& (child
->offset
+ child
->size
<= offset
))
521 child
= child
->next_sibling
;
528 /* Return the first group representative for DECL or NULL if none exists. */
530 static struct access
*
531 get_first_repr_for_decl (tree base
)
533 vec
<access_p
> *access_vec
;
535 access_vec
= get_base_access_vector (base
);
539 return (*access_vec
)[0];
542 /* Find an access representative for the variable BASE and given OFFSET and
543 SIZE. Requires that access trees have already been built. Return NULL if
544 it cannot be found. */
546 static struct access
*
547 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
550 struct access
*access
;
552 access
= get_first_repr_for_decl (base
);
553 while (access
&& (access
->offset
+ access
->size
<= offset
))
554 access
= access
->next_grp
;
558 return find_access_in_subtree (access
, offset
, size
);
561 /* Add LINK to the linked list of assign links of RACC. */
563 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
565 gcc_assert (link
->racc
== racc
);
567 if (!racc
->first_link
)
569 gcc_assert (!racc
->last_link
);
570 racc
->first_link
= link
;
573 racc
->last_link
->next
= link
;
575 racc
->last_link
= link
;
579 /* Move all link structures in their linked list in OLD_RACC to the linked list
582 relink_to_new_repr (struct access
*new_racc
, struct access
*old_racc
)
584 if (!old_racc
->first_link
)
586 gcc_assert (!old_racc
->last_link
);
590 if (new_racc
->first_link
)
592 gcc_assert (!new_racc
->last_link
->next
);
593 gcc_assert (!old_racc
->last_link
|| !old_racc
->last_link
->next
);
595 new_racc
->last_link
->next
= old_racc
->first_link
;
596 new_racc
->last_link
= old_racc
->last_link
;
600 gcc_assert (!new_racc
->last_link
);
602 new_racc
->first_link
= old_racc
->first_link
;
603 new_racc
->last_link
= old_racc
->last_link
;
605 old_racc
->first_link
= old_racc
->last_link
= NULL
;
608 /* Add ACCESS to the work queue (which is actually a stack). */
611 add_access_to_work_queue (struct access
*access
)
613 if (!access
->grp_queued
)
615 gcc_assert (!access
->next_queued
);
616 access
->next_queued
= work_queue_head
;
617 access
->grp_queued
= 1;
618 work_queue_head
= access
;
622 /* Pop an access from the work queue, and return it, assuming there is one. */
624 static struct access
*
625 pop_access_from_work_queue (void)
627 struct access
*access
= work_queue_head
;
629 work_queue_head
= access
->next_queued
;
630 access
->next_queued
= NULL
;
631 access
->grp_queued
= 0;
636 /* Allocate necessary structures. */
639 sra_initialize (void)
641 candidate_bitmap
= BITMAP_ALLOC (NULL
);
642 candidates
.create (vec_safe_length (cfun
->local_decls
) / 2);
643 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
644 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
645 gcc_obstack_init (&name_obstack
);
646 access_pool
= create_alloc_pool ("SRA accesses", sizeof (struct access
), 16);
647 link_pool
= create_alloc_pool ("SRA links", sizeof (struct assign_link
), 16);
648 base_access_vec
= pointer_map_create ();
649 memset (&sra_stats
, 0, sizeof (sra_stats
));
650 encountered_apply_args
= false;
651 encountered_recursive_call
= false;
652 encountered_unchangable_recursive_call
= false;
655 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
658 delete_base_accesses (const void *key ATTRIBUTE_UNUSED
, void **value
,
659 void *data ATTRIBUTE_UNUSED
)
661 vec
<access_p
> *access_vec
= (vec
<access_p
> *) *value
;
662 vec_free (access_vec
);
666 /* Deallocate all general structures. */
669 sra_deinitialize (void)
671 BITMAP_FREE (candidate_bitmap
);
672 candidates
.dispose ();
673 BITMAP_FREE (should_scalarize_away_bitmap
);
674 BITMAP_FREE (cannot_scalarize_away_bitmap
);
675 free_alloc_pool (access_pool
);
676 free_alloc_pool (link_pool
);
677 obstack_free (&name_obstack
, NULL
);
679 pointer_map_traverse (base_access_vec
, delete_base_accesses
, NULL
);
680 pointer_map_destroy (base_access_vec
);
683 /* Remove DECL from candidates for SRA and write REASON to the dump file if
686 disqualify_candidate (tree decl
, const char *reason
)
688 if (bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
)))
689 candidates
.clear_slot (candidates
.find_slot_with_hash (decl
,
693 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
695 fprintf (dump_file
, "! Disqualifying ");
696 print_generic_expr (dump_file
, decl
, 0);
697 fprintf (dump_file
, " - %s\n", reason
);
701 /* Return true iff the type contains a field or an element which does not allow
705 type_internals_preclude_sra_p (tree type
, const char **msg
)
710 switch (TREE_CODE (type
))
714 case QUAL_UNION_TYPE
:
715 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
716 if (TREE_CODE (fld
) == FIELD_DECL
)
718 tree ft
= TREE_TYPE (fld
);
720 if (TREE_THIS_VOLATILE (fld
))
722 *msg
= "volatile structure field";
725 if (!DECL_FIELD_OFFSET (fld
))
727 *msg
= "no structure field offset";
730 if (!DECL_SIZE (fld
))
732 *msg
= "zero structure field size";
735 if (!host_integerp (DECL_FIELD_OFFSET (fld
), 1))
737 *msg
= "structure field offset not fixed";
740 if (!host_integerp (DECL_SIZE (fld
), 1))
742 *msg
= "structure field size not fixed";
745 if (!host_integerp (bit_position (fld
), 0))
747 *msg
= "structure field size too big";
750 if (AGGREGATE_TYPE_P (ft
)
751 && int_bit_position (fld
) % BITS_PER_UNIT
!= 0)
753 *msg
= "structure field is bit field";
757 if (AGGREGATE_TYPE_P (ft
) && type_internals_preclude_sra_p (ft
, msg
))
764 et
= TREE_TYPE (type
);
766 if (TYPE_VOLATILE (et
))
768 *msg
= "element type is volatile";
772 if (AGGREGATE_TYPE_P (et
) && type_internals_preclude_sra_p (et
, msg
))
782 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
783 base variable if it is. Return T if it is not an SSA_NAME. */
786 get_ssa_base_param (tree t
)
788 if (TREE_CODE (t
) == SSA_NAME
)
790 if (SSA_NAME_IS_DEFAULT_DEF (t
))
791 return SSA_NAME_VAR (t
);
798 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
799 belongs to, unless the BB has already been marked as a potentially
803 mark_parm_dereference (tree base
, HOST_WIDE_INT dist
, gimple stmt
)
805 basic_block bb
= gimple_bb (stmt
);
806 int idx
, parm_index
= 0;
809 if (bitmap_bit_p (final_bbs
, bb
->index
))
812 for (parm
= DECL_ARGUMENTS (current_function_decl
);
813 parm
&& parm
!= base
;
814 parm
= DECL_CHAIN (parm
))
817 gcc_assert (parm_index
< func_param_count
);
819 idx
= bb
->index
* func_param_count
+ parm_index
;
820 if (bb_dereferences
[idx
] < dist
)
821 bb_dereferences
[idx
] = dist
;
824 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
825 the three fields. Also add it to the vector of accesses corresponding to
826 the base. Finally, return the new access. */
828 static struct access
*
829 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
832 struct access
*access
;
835 access
= (struct access
*) pool_alloc (access_pool
);
836 memset (access
, 0, sizeof (struct access
));
838 access
->offset
= offset
;
841 slot
= pointer_map_contains (base_access_vec
, base
);
843 v
= (vec
<access_p
> *) *slot
;
847 v
->safe_push (access
);
850 pointer_map_insert (base_access_vec
, base
)) = v
;
855 /* Create and insert access for EXPR. Return created access, or NULL if it is
858 static struct access
*
859 create_access (tree expr
, gimple stmt
, bool write
)
861 struct access
*access
;
862 HOST_WIDE_INT offset
, size
, max_size
;
864 bool ptr
, unscalarizable_region
= false;
866 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
868 if (sra_mode
== SRA_MODE_EARLY_IPA
869 && TREE_CODE (base
) == MEM_REF
)
871 base
= get_ssa_base_param (TREE_OPERAND (base
, 0));
879 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
882 if (sra_mode
== SRA_MODE_EARLY_IPA
)
884 if (size
< 0 || size
!= max_size
)
886 disqualify_candidate (base
, "Encountered a variable sized access.");
889 if (TREE_CODE (expr
) == COMPONENT_REF
890 && DECL_BIT_FIELD (TREE_OPERAND (expr
, 1)))
892 disqualify_candidate (base
, "Encountered a bit-field access.");
895 gcc_checking_assert ((offset
% BITS_PER_UNIT
) == 0);
898 mark_parm_dereference (base
, offset
+ size
, stmt
);
902 if (size
!= max_size
)
905 unscalarizable_region
= true;
909 disqualify_candidate (base
, "Encountered an unconstrained access.");
914 access
= create_access_1 (base
, offset
, size
);
916 access
->type
= TREE_TYPE (expr
);
917 access
->write
= write
;
918 access
->grp_unscalarizable_region
= unscalarizable_region
;
921 if (TREE_CODE (expr
) == COMPONENT_REF
922 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr
, 1)))
923 access
->non_addressable
= 1;
929 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
930 register types or (recursively) records with only these two kinds of fields.
931 It also returns false if any of these records contains a bit-field. */
934 type_consists_of_records_p (tree type
)
938 if (TREE_CODE (type
) != RECORD_TYPE
)
941 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
942 if (TREE_CODE (fld
) == FIELD_DECL
)
944 tree ft
= TREE_TYPE (fld
);
946 if (DECL_BIT_FIELD (fld
))
949 if (!is_gimple_reg_type (ft
)
950 && !type_consists_of_records_p (ft
))
957 /* Create total_scalarization accesses for all scalar type fields in DECL that
958 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
959 must be the top-most VAR_DECL representing the variable, OFFSET must be the
960 offset of DECL within BASE. REF must be the memory reference expression for
964 completely_scalarize_record (tree base
, tree decl
, HOST_WIDE_INT offset
,
967 tree fld
, decl_type
= TREE_TYPE (decl
);
969 for (fld
= TYPE_FIELDS (decl_type
); fld
; fld
= DECL_CHAIN (fld
))
970 if (TREE_CODE (fld
) == FIELD_DECL
)
972 HOST_WIDE_INT pos
= offset
+ int_bit_position (fld
);
973 tree ft
= TREE_TYPE (fld
);
974 tree nref
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), ref
, fld
,
977 if (is_gimple_reg_type (ft
))
979 struct access
*access
;
982 size
= tree_low_cst (DECL_SIZE (fld
), 1);
983 access
= create_access_1 (base
, pos
, size
);
986 access
->grp_total_scalarization
= 1;
987 /* Accesses for intraprocedural SRA can have their stmt NULL. */
990 completely_scalarize_record (base
, fld
, pos
, nref
);
994 /* Create total_scalarization accesses for all scalar type fields in VAR and
995 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
996 type_consists_of_records_p. */
999 completely_scalarize_var (tree var
)
1001 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (var
), 1);
1002 struct access
*access
;
1004 access
= create_access_1 (var
, 0, size
);
1006 access
->type
= TREE_TYPE (var
);
1007 access
->grp_total_scalarization
= 1;
1009 completely_scalarize_record (var
, var
, 0, var
);
1012 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1015 contains_view_convert_expr_p (const_tree ref
)
1017 while (handled_component_p (ref
))
1019 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
1021 ref
= TREE_OPERAND (ref
, 0);
1027 /* Search the given tree for a declaration by skipping handled components and
1028 exclude it from the candidates. */
1031 disqualify_base_of_expr (tree t
, const char *reason
)
1033 t
= get_base_address (t
);
1034 if (sra_mode
== SRA_MODE_EARLY_IPA
1035 && TREE_CODE (t
) == MEM_REF
)
1036 t
= get_ssa_base_param (TREE_OPERAND (t
, 0));
1038 if (t
&& DECL_P (t
))
1039 disqualify_candidate (t
, reason
);
1042 /* Scan expression EXPR and create access structures for all accesses to
1043 candidates for scalarization. Return the created access or NULL if none is
1046 static struct access
*
1047 build_access_from_expr_1 (tree expr
, gimple stmt
, bool write
)
1049 struct access
*ret
= NULL
;
1052 if (TREE_CODE (expr
) == BIT_FIELD_REF
1053 || TREE_CODE (expr
) == IMAGPART_EXPR
1054 || TREE_CODE (expr
) == REALPART_EXPR
)
1056 expr
= TREE_OPERAND (expr
, 0);
1060 partial_ref
= false;
1062 /* We need to dive through V_C_Es in order to get the size of its parameter
1063 and not the result type. Ada produces such statements. We are also
1064 capable of handling the topmost V_C_E but not any of those buried in other
1065 handled components. */
1066 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
1067 expr
= TREE_OPERAND (expr
, 0);
1069 if (contains_view_convert_expr_p (expr
))
1071 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
1076 switch (TREE_CODE (expr
))
1079 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
1080 && sra_mode
!= SRA_MODE_EARLY_IPA
)
1088 case ARRAY_RANGE_REF
:
1089 ret
= create_access (expr
, stmt
, write
);
1096 if (write
&& partial_ref
&& ret
)
1097 ret
->grp_partial_lhs
= 1;
1102 /* Scan expression EXPR and create access structures for all accesses to
1103 candidates for scalarization. Return true if any access has been inserted.
1104 STMT must be the statement from which the expression is taken, WRITE must be
1105 true if the expression is a store and false otherwise. */
1108 build_access_from_expr (tree expr
, gimple stmt
, bool write
)
1110 struct access
*access
;
1112 access
= build_access_from_expr_1 (expr
, stmt
, write
);
1115 /* This means the aggregate is accesses as a whole in a way other than an
1116 assign statement and thus cannot be removed even if we had a scalar
1117 replacement for everything. */
1118 if (cannot_scalarize_away_bitmap
)
1119 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
1125 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1126 modes in which it matters, return true iff they have been disqualified. RHS
1127 may be NULL, in that case ignore it. If we scalarize an aggregate in
1128 intra-SRA we may need to add statements after each statement. This is not
1129 possible if a statement unconditionally has to end the basic block. */
1131 disqualify_ops_if_throwing_stmt (gimple stmt
, tree lhs
, tree rhs
)
1133 if ((sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1134 && (stmt_can_throw_internal (stmt
) || stmt_ends_bb_p (stmt
)))
1136 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
1138 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1144 /* Scan expressions occurring in STMT, create access structures for all accesses
1145 to candidates for scalarization and remove those candidates which occur in
1146 statements or expressions that prevent them from being split apart. Return
1147 true if any access has been inserted. */
1150 build_accesses_from_assign (gimple stmt
)
1153 struct access
*lacc
, *racc
;
1155 if (!gimple_assign_single_p (stmt
)
1156 /* Scope clobbers don't influence scalarization. */
1157 || gimple_clobber_p (stmt
))
1160 lhs
= gimple_assign_lhs (stmt
);
1161 rhs
= gimple_assign_rhs1 (stmt
);
1163 if (disqualify_ops_if_throwing_stmt (stmt
, lhs
, rhs
))
1166 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1167 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1170 lacc
->grp_assignment_write
= 1;
1174 racc
->grp_assignment_read
= 1;
1175 if (should_scalarize_away_bitmap
&& !gimple_has_volatile_ops (stmt
)
1176 && !is_gimple_reg_type (racc
->type
))
1177 bitmap_set_bit (should_scalarize_away_bitmap
, DECL_UID (racc
->base
));
1181 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1182 && !lacc
->grp_unscalarizable_region
1183 && !racc
->grp_unscalarizable_region
1184 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1185 && lacc
->size
== racc
->size
1186 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1188 struct assign_link
*link
;
1190 link
= (struct assign_link
*) pool_alloc (link_pool
);
1191 memset (link
, 0, sizeof (struct assign_link
));
1196 add_link_to_rhs (racc
, link
);
1199 return lacc
|| racc
;
1202 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1203 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1206 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED
, tree op
,
1207 void *data ATTRIBUTE_UNUSED
)
1209 op
= get_base_address (op
);
1212 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1217 /* Return true iff callsite CALL has at least as many actual arguments as there
1218 are formal parameters of the function currently processed by IPA-SRA. */
1221 callsite_has_enough_arguments_p (gimple call
)
1223 return gimple_call_num_args (call
) >= (unsigned) func_param_count
;
1226 /* Scan function and look for interesting expressions and create access
1227 structures for them. Return true iff any access is created. */
1230 scan_function (void)
1237 gimple_stmt_iterator gsi
;
1238 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1240 gimple stmt
= gsi_stmt (gsi
);
1244 if (final_bbs
&& stmt_can_throw_external (stmt
))
1245 bitmap_set_bit (final_bbs
, bb
->index
);
1246 switch (gimple_code (stmt
))
1249 t
= gimple_return_retval (stmt
);
1251 ret
|= build_access_from_expr (t
, stmt
, false);
1253 bitmap_set_bit (final_bbs
, bb
->index
);
1257 ret
|= build_accesses_from_assign (stmt
);
1261 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1262 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1265 if (sra_mode
== SRA_MODE_EARLY_IPA
)
1267 tree dest
= gimple_call_fndecl (stmt
);
1268 int flags
= gimple_call_flags (stmt
);
1272 if (DECL_BUILT_IN_CLASS (dest
) == BUILT_IN_NORMAL
1273 && DECL_FUNCTION_CODE (dest
) == BUILT_IN_APPLY_ARGS
)
1274 encountered_apply_args
= true;
1275 if (recursive_call_p (current_function_decl
, dest
))
1277 encountered_recursive_call
= true;
1278 if (!callsite_has_enough_arguments_p (stmt
))
1279 encountered_unchangable_recursive_call
= true;
1284 && (flags
& (ECF_CONST
| ECF_PURE
)) == 0)
1285 bitmap_set_bit (final_bbs
, bb
->index
);
1288 t
= gimple_call_lhs (stmt
);
1289 if (t
&& !disqualify_ops_if_throwing_stmt (stmt
, t
, NULL
))
1290 ret
|= build_access_from_expr (t
, stmt
, true);
1294 walk_stmt_load_store_addr_ops (stmt
, NULL
, NULL
, NULL
,
1297 bitmap_set_bit (final_bbs
, bb
->index
);
1299 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
1301 t
= TREE_VALUE (gimple_asm_input_op (stmt
, i
));
1302 ret
|= build_access_from_expr (t
, stmt
, false);
1304 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
1306 t
= TREE_VALUE (gimple_asm_output_op (stmt
, i
));
1307 ret
|= build_access_from_expr (t
, stmt
, true);
1320 /* Helper of QSORT function. There are pointers to accesses in the array. An
1321 access is considered smaller than another if it has smaller offset or if the
1322 offsets are the same but is size is bigger. */
1325 compare_access_positions (const void *a
, const void *b
)
1327 const access_p
*fp1
= (const access_p
*) a
;
1328 const access_p
*fp2
= (const access_p
*) b
;
1329 const access_p f1
= *fp1
;
1330 const access_p f2
= *fp2
;
1332 if (f1
->offset
!= f2
->offset
)
1333 return f1
->offset
< f2
->offset
? -1 : 1;
1335 if (f1
->size
== f2
->size
)
1337 if (f1
->type
== f2
->type
)
1339 /* Put any non-aggregate type before any aggregate type. */
1340 else if (!is_gimple_reg_type (f1
->type
)
1341 && is_gimple_reg_type (f2
->type
))
1343 else if (is_gimple_reg_type (f1
->type
)
1344 && !is_gimple_reg_type (f2
->type
))
1346 /* Put any complex or vector type before any other scalar type. */
1347 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1348 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1349 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1350 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1352 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1353 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1354 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1355 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1357 /* Put the integral type with the bigger precision first. */
1358 else if (INTEGRAL_TYPE_P (f1
->type
)
1359 && INTEGRAL_TYPE_P (f2
->type
))
1360 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1361 /* Put any integral type with non-full precision last. */
1362 else if (INTEGRAL_TYPE_P (f1
->type
)
1363 && (TREE_INT_CST_LOW (TYPE_SIZE (f1
->type
))
1364 != TYPE_PRECISION (f1
->type
)))
1366 else if (INTEGRAL_TYPE_P (f2
->type
)
1367 && (TREE_INT_CST_LOW (TYPE_SIZE (f2
->type
))
1368 != TYPE_PRECISION (f2
->type
)))
1370 /* Stabilize the sort. */
1371 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1374 /* We want the bigger accesses first, thus the opposite operator in the next
1376 return f1
->size
> f2
->size
? -1 : 1;
1380 /* Append a name of the declaration to the name obstack. A helper function for
1384 make_fancy_decl_name (tree decl
)
1388 tree name
= DECL_NAME (decl
);
1390 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1391 IDENTIFIER_LENGTH (name
));
1394 sprintf (buffer
, "D%u", DECL_UID (decl
));
1395 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1399 /* Helper for make_fancy_name. */
1402 make_fancy_name_1 (tree expr
)
1409 make_fancy_decl_name (expr
);
1413 switch (TREE_CODE (expr
))
1416 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1417 obstack_1grow (&name_obstack
, '$');
1418 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1422 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1423 obstack_1grow (&name_obstack
, '$');
1424 /* Arrays with only one element may not have a constant as their
1426 index
= TREE_OPERAND (expr
, 1);
1427 if (TREE_CODE (index
) != INTEGER_CST
)
1429 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1430 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1434 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1438 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1439 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1441 obstack_1grow (&name_obstack
, '$');
1442 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1443 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1444 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1451 gcc_unreachable (); /* we treat these as scalars. */
1458 /* Create a human readable name for replacement variable of ACCESS. */
1461 make_fancy_name (tree expr
)
1463 make_fancy_name_1 (expr
);
1464 obstack_1grow (&name_obstack
, '\0');
1465 return XOBFINISH (&name_obstack
, char *);
1468 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1469 EXP_TYPE at the given OFFSET. If BASE is something for which
1470 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1471 to insert new statements either before or below the current one as specified
1472 by INSERT_AFTER. This function is not capable of handling bitfields.
1474 BASE must be either a declaration or a memory reference that has correct
1475 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1478 build_ref_for_offset (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1479 tree exp_type
, gimple_stmt_iterator
*gsi
,
1482 tree prev_base
= base
;
1485 HOST_WIDE_INT base_offset
;
1486 unsigned HOST_WIDE_INT misalign
;
1489 gcc_checking_assert (offset
% BITS_PER_UNIT
== 0);
1490 get_object_alignment_1 (base
, &align
, &misalign
);
1491 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1493 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1494 offset such as array[var_index]. */
1500 gcc_checking_assert (gsi
);
1501 tmp
= make_ssa_name (build_pointer_type (TREE_TYPE (prev_base
)), NULL
);
1502 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1503 STRIP_USELESS_TYPE_CONVERSION (addr
);
1504 stmt
= gimple_build_assign (tmp
, addr
);
1505 gimple_set_location (stmt
, loc
);
1507 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1509 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1511 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1512 offset
/ BITS_PER_UNIT
);
1515 else if (TREE_CODE (base
) == MEM_REF
)
1517 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1518 base_offset
+ offset
/ BITS_PER_UNIT
);
1519 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1520 base
= unshare_expr (TREE_OPERAND (base
, 0));
1524 off
= build_int_cst (reference_alias_ptr_type (base
),
1525 base_offset
+ offset
/ BITS_PER_UNIT
);
1526 base
= build_fold_addr_expr (unshare_expr (base
));
1529 misalign
= (misalign
+ offset
) & (align
- 1);
1531 align
= (misalign
& -misalign
);
1532 if (align
< TYPE_ALIGN (exp_type
))
1533 exp_type
= build_aligned_type (exp_type
, align
);
1535 mem_ref
= fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1536 if (TREE_THIS_VOLATILE (prev_base
))
1537 TREE_THIS_VOLATILE (mem_ref
) = 1;
1538 if (TREE_SIDE_EFFECTS (prev_base
))
1539 TREE_SIDE_EFFECTS (mem_ref
) = 1;
1543 /* Construct a memory reference to a part of an aggregate BASE at the given
1544 OFFSET and of the same type as MODEL. In case this is a reference to a
1545 bit-field, the function will replicate the last component_ref of model's
1546 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1547 build_ref_for_offset. */
1550 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1551 struct access
*model
, gimple_stmt_iterator
*gsi
,
1554 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1555 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1557 /* This access represents a bit-field. */
1558 tree t
, exp_type
, fld
= TREE_OPERAND (model
->expr
, 1);
1560 offset
-= int_bit_position (fld
);
1561 exp_type
= TREE_TYPE (TREE_OPERAND (model
->expr
, 0));
1562 t
= build_ref_for_offset (loc
, base
, offset
, exp_type
, gsi
, insert_after
);
1563 return fold_build3_loc (loc
, COMPONENT_REF
, TREE_TYPE (fld
), t
, fld
,
1567 return build_ref_for_offset (loc
, base
, offset
, model
->type
,
1571 /* Attempt to build a memory reference that we could but into a gimple
1572 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1573 create statements and return s NULL instead. This function also ignores
1574 alignment issues and so its results should never end up in non-debug
1578 build_debug_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1579 struct access
*model
)
1581 HOST_WIDE_INT base_offset
;
1584 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1585 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1588 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1591 if (TREE_CODE (base
) == MEM_REF
)
1593 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1594 base_offset
+ offset
/ BITS_PER_UNIT
);
1595 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1596 base
= unshare_expr (TREE_OPERAND (base
, 0));
1600 off
= build_int_cst (reference_alias_ptr_type (base
),
1601 base_offset
+ offset
/ BITS_PER_UNIT
);
1602 base
= build_fold_addr_expr (unshare_expr (base
));
1605 return fold_build2_loc (loc
, MEM_REF
, model
->type
, base
, off
);
1608 /* Construct a memory reference consisting of component_refs and array_refs to
1609 a part of an aggregate *RES (which is of type TYPE). The requested part
1610 should have type EXP_TYPE at be the given OFFSET. This function might not
1611 succeed, it returns true when it does and only then *RES points to something
1612 meaningful. This function should be used only to build expressions that we
1613 might need to present to user (e.g. in warnings). In all other situations,
1614 build_ref_for_model or build_ref_for_offset should be used instead. */
1617 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1623 tree tr_size
, index
, minidx
;
1624 HOST_WIDE_INT el_size
;
1626 if (offset
== 0 && exp_type
1627 && types_compatible_p (exp_type
, type
))
1630 switch (TREE_CODE (type
))
1633 case QUAL_UNION_TYPE
:
1635 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1637 HOST_WIDE_INT pos
, size
;
1638 tree tr_pos
, expr
, *expr_ptr
;
1640 if (TREE_CODE (fld
) != FIELD_DECL
)
1643 tr_pos
= bit_position (fld
);
1644 if (!tr_pos
|| !host_integerp (tr_pos
, 1))
1646 pos
= TREE_INT_CST_LOW (tr_pos
);
1647 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1648 tr_size
= DECL_SIZE (fld
);
1649 if (!tr_size
|| !host_integerp (tr_size
, 1))
1651 size
= TREE_INT_CST_LOW (tr_size
);
1657 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1660 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1663 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
1664 offset
- pos
, exp_type
))
1673 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1674 if (!tr_size
|| !host_integerp (tr_size
, 1))
1676 el_size
= tree_low_cst (tr_size
, 1);
1678 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1679 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1681 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1682 if (!integer_zerop (minidx
))
1683 index
= int_const_binop (PLUS_EXPR
, index
, minidx
);
1684 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1685 NULL_TREE
, NULL_TREE
);
1686 offset
= offset
% el_size
;
1687 type
= TREE_TYPE (type
);
1702 /* Return true iff TYPE is stdarg va_list type. */
1705 is_va_list_type (tree type
)
1707 return TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (va_list_type_node
);
1710 /* Print message to dump file why a variable was rejected. */
1713 reject (tree var
, const char *msg
)
1715 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1717 fprintf (dump_file
, "Rejected (%d): %s: ", DECL_UID (var
), msg
);
1718 print_generic_expr (dump_file
, var
, 0);
1719 fprintf (dump_file
, "\n");
1723 /* Return true if VAR is a candidate for SRA. */
1726 maybe_add_sra_candidate (tree var
)
1728 tree type
= TREE_TYPE (var
);
1732 if (!AGGREGATE_TYPE_P (type
))
1734 reject (var
, "not aggregate");
1737 if (needs_to_live_in_memory (var
))
1739 reject (var
, "needs to live in memory");
1742 if (TREE_THIS_VOLATILE (var
))
1744 reject (var
, "is volatile");
1747 if (!COMPLETE_TYPE_P (type
))
1749 reject (var
, "has incomplete type");
1752 if (!host_integerp (TYPE_SIZE (type
), 1))
1754 reject (var
, "type size not fixed");
1757 if (tree_low_cst (TYPE_SIZE (type
), 1) == 0)
1759 reject (var
, "type size is zero");
1762 if (type_internals_preclude_sra_p (type
, &msg
))
1767 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1768 we also want to schedule it rather late. Thus we ignore it in
1770 (sra_mode
== SRA_MODE_EARLY_INTRA
1771 && is_va_list_type (type
)))
1773 reject (var
, "is va_list");
1777 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1778 slot
= candidates
.find_slot_with_hash (var
, DECL_UID (var
), INSERT
);
1781 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1783 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1784 print_generic_expr (dump_file
, var
, 0);
1785 fprintf (dump_file
, "\n");
1791 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1792 those with type which is suitable for scalarization. */
1795 find_var_candidates (void)
1801 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1803 parm
= DECL_CHAIN (parm
))
1804 ret
|= maybe_add_sra_candidate (parm
);
1806 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1808 if (TREE_CODE (var
) != VAR_DECL
)
1811 ret
|= maybe_add_sra_candidate (var
);
1817 /* Sort all accesses for the given variable, check for partial overlaps and
1818 return NULL if there are any. If there are none, pick a representative for
1819 each combination of offset and size and create a linked list out of them.
1820 Return the pointer to the first representative and make sure it is the first
1821 one in the vector of accesses. */
1823 static struct access
*
1824 sort_and_splice_var_accesses (tree var
)
1826 int i
, j
, access_count
;
1827 struct access
*res
, **prev_acc_ptr
= &res
;
1828 vec
<access_p
> *access_vec
;
1830 HOST_WIDE_INT low
= -1, high
= 0;
1832 access_vec
= get_base_access_vector (var
);
1835 access_count
= access_vec
->length ();
1837 /* Sort by <OFFSET, SIZE>. */
1838 access_vec
->qsort (compare_access_positions
);
1841 while (i
< access_count
)
1843 struct access
*access
= (*access_vec
)[i
];
1844 bool grp_write
= access
->write
;
1845 bool grp_read
= !access
->write
;
1846 bool grp_scalar_write
= access
->write
1847 && is_gimple_reg_type (access
->type
);
1848 bool grp_scalar_read
= !access
->write
1849 && is_gimple_reg_type (access
->type
);
1850 bool grp_assignment_read
= access
->grp_assignment_read
;
1851 bool grp_assignment_write
= access
->grp_assignment_write
;
1852 bool multiple_scalar_reads
= false;
1853 bool total_scalarization
= access
->grp_total_scalarization
;
1854 bool grp_partial_lhs
= access
->grp_partial_lhs
;
1855 bool first_scalar
= is_gimple_reg_type (access
->type
);
1856 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
1858 if (first
|| access
->offset
>= high
)
1861 low
= access
->offset
;
1862 high
= access
->offset
+ access
->size
;
1864 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
1867 gcc_assert (access
->offset
>= low
1868 && access
->offset
+ access
->size
<= high
);
1871 while (j
< access_count
)
1873 struct access
*ac2
= (*access_vec
)[j
];
1874 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
1879 grp_scalar_write
= (grp_scalar_write
1880 || is_gimple_reg_type (ac2
->type
));
1885 if (is_gimple_reg_type (ac2
->type
))
1887 if (grp_scalar_read
)
1888 multiple_scalar_reads
= true;
1890 grp_scalar_read
= true;
1893 grp_assignment_read
|= ac2
->grp_assignment_read
;
1894 grp_assignment_write
|= ac2
->grp_assignment_write
;
1895 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
1896 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
1897 total_scalarization
|= ac2
->grp_total_scalarization
;
1898 relink_to_new_repr (access
, ac2
);
1900 /* If there are both aggregate-type and scalar-type accesses with
1901 this combination of size and offset, the comparison function
1902 should have put the scalars first. */
1903 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
1904 ac2
->group_representative
= access
;
1910 access
->group_representative
= access
;
1911 access
->grp_write
= grp_write
;
1912 access
->grp_read
= grp_read
;
1913 access
->grp_scalar_read
= grp_scalar_read
;
1914 access
->grp_scalar_write
= grp_scalar_write
;
1915 access
->grp_assignment_read
= grp_assignment_read
;
1916 access
->grp_assignment_write
= grp_assignment_write
;
1917 access
->grp_hint
= multiple_scalar_reads
|| total_scalarization
;
1918 access
->grp_total_scalarization
= total_scalarization
;
1919 access
->grp_partial_lhs
= grp_partial_lhs
;
1920 access
->grp_unscalarizable_region
= unscalarizable_region
;
1921 if (access
->first_link
)
1922 add_access_to_work_queue (access
);
1924 *prev_acc_ptr
= access
;
1925 prev_acc_ptr
= &access
->next_grp
;
1928 gcc_assert (res
== (*access_vec
)[0]);
1932 /* Create a variable for the given ACCESS which determines the type, name and a
1933 few other properties. Return the variable declaration and store it also to
1934 ACCESS->replacement. */
1937 create_access_replacement (struct access
*access
)
1941 if (access
->grp_to_be_debug_replaced
)
1943 repl
= create_tmp_var_raw (access
->type
, NULL
);
1944 DECL_CONTEXT (repl
) = current_function_decl
;
1947 repl
= create_tmp_var (access
->type
, "SR");
1948 if (TREE_CODE (access
->type
) == COMPLEX_TYPE
1949 || TREE_CODE (access
->type
) == VECTOR_TYPE
)
1951 if (!access
->grp_partial_lhs
)
1952 DECL_GIMPLE_REG_P (repl
) = 1;
1954 else if (access
->grp_partial_lhs
1955 && is_gimple_reg_type (access
->type
))
1956 TREE_ADDRESSABLE (repl
) = 1;
1958 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
1959 DECL_ARTIFICIAL (repl
) = 1;
1960 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
1962 if (DECL_NAME (access
->base
)
1963 && !DECL_IGNORED_P (access
->base
)
1964 && !DECL_ARTIFICIAL (access
->base
))
1966 char *pretty_name
= make_fancy_name (access
->expr
);
1967 tree debug_expr
= unshare_expr_without_location (access
->expr
), d
;
1970 DECL_NAME (repl
) = get_identifier (pretty_name
);
1971 obstack_free (&name_obstack
, pretty_name
);
1973 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1974 as DECL_DEBUG_EXPR isn't considered when looking for still
1975 used SSA_NAMEs and thus they could be freed. All debug info
1976 generation cares is whether something is constant or variable
1977 and that get_ref_base_and_extent works properly on the
1978 expression. It cannot handle accesses at a non-constant offset
1979 though, so just give up in those cases. */
1980 for (d
= debug_expr
;
1981 !fail
&& (handled_component_p (d
) || TREE_CODE (d
) == MEM_REF
);
1982 d
= TREE_OPERAND (d
, 0))
1983 switch (TREE_CODE (d
))
1986 case ARRAY_RANGE_REF
:
1987 if (TREE_OPERAND (d
, 1)
1988 && TREE_CODE (TREE_OPERAND (d
, 1)) != INTEGER_CST
)
1990 if (TREE_OPERAND (d
, 3)
1991 && TREE_CODE (TREE_OPERAND (d
, 3)) != INTEGER_CST
)
1995 if (TREE_OPERAND (d
, 2)
1996 && TREE_CODE (TREE_OPERAND (d
, 2)) != INTEGER_CST
)
2000 if (TREE_CODE (TREE_OPERAND (d
, 0)) != ADDR_EXPR
)
2003 d
= TREE_OPERAND (d
, 0);
2010 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
2011 DECL_HAS_DEBUG_EXPR_P (repl
) = 1;
2013 if (access
->grp_no_warning
)
2014 TREE_NO_WARNING (repl
) = 1;
2016 TREE_NO_WARNING (repl
) = TREE_NO_WARNING (access
->base
);
2019 TREE_NO_WARNING (repl
) = 1;
2023 if (access
->grp_to_be_debug_replaced
)
2025 fprintf (dump_file
, "Created a debug-only replacement for ");
2026 print_generic_expr (dump_file
, access
->base
, 0);
2027 fprintf (dump_file
, " offset: %u, size: %u\n",
2028 (unsigned) access
->offset
, (unsigned) access
->size
);
2032 fprintf (dump_file
, "Created a replacement for ");
2033 print_generic_expr (dump_file
, access
->base
, 0);
2034 fprintf (dump_file
, " offset: %u, size: %u: ",
2035 (unsigned) access
->offset
, (unsigned) access
->size
);
2036 print_generic_expr (dump_file
, repl
, 0);
2037 fprintf (dump_file
, "\n");
2040 sra_stats
.replacements
++;
2045 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2048 get_access_replacement (struct access
*access
)
2050 gcc_checking_assert (access
->replacement_decl
);
2051 return access
->replacement_decl
;
2055 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2056 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2057 to it is not "within" the root. Return false iff some accesses partially
2061 build_access_subtree (struct access
**access
)
2063 struct access
*root
= *access
, *last_child
= NULL
;
2064 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2066 *access
= (*access
)->next_grp
;
2067 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
2070 root
->first_child
= *access
;
2072 last_child
->next_sibling
= *access
;
2073 last_child
= *access
;
2075 if (!build_access_subtree (access
))
2079 if (*access
&& (*access
)->offset
< limit
)
2085 /* Build a tree of access representatives, ACCESS is the pointer to the first
2086 one, others are linked in a list by the next_grp field. Return false iff
2087 some accesses partially overlap. */
2090 build_access_trees (struct access
*access
)
2094 struct access
*root
= access
;
2096 if (!build_access_subtree (&access
))
2098 root
->next_grp
= access
;
2103 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2107 expr_with_var_bounded_array_refs_p (tree expr
)
2109 while (handled_component_p (expr
))
2111 if (TREE_CODE (expr
) == ARRAY_REF
2112 && !host_integerp (array_ref_low_bound (expr
), 0))
2114 expr
= TREE_OPERAND (expr
, 0);
2119 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2120 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2121 sorts of access flags appropriately along the way, notably always set
2122 grp_read and grp_assign_read according to MARK_READ and grp_write when
2125 Creating a replacement for a scalar access is considered beneficial if its
2126 grp_hint is set (this means we are either attempting total scalarization or
2127 there is more than one direct read access) or according to the following
2130 Access written to through a scalar type (once or more times)
2132 | Written to in an assignment statement
2134 | | Access read as scalar _once_
2136 | | | Read in an assignment statement
2138 | | | | Scalarize Comment
2139 -----------------------------------------------------------------------------
2140 0 0 0 0 No access for the scalar
2141 0 0 0 1 No access for the scalar
2142 0 0 1 0 No Single read - won't help
2143 0 0 1 1 No The same case
2144 0 1 0 0 No access for the scalar
2145 0 1 0 1 No access for the scalar
2146 0 1 1 0 Yes s = *g; return s.i;
2147 0 1 1 1 Yes The same case as above
2148 1 0 0 0 No Won't help
2149 1 0 0 1 Yes s.i = 1; *g = s;
2150 1 0 1 0 Yes s.i = 5; g = s.i;
2151 1 0 1 1 Yes The same case as above
2152 1 1 0 0 No Won't help.
2153 1 1 0 1 Yes s.i = 1; *g = s;
2154 1 1 1 0 Yes s = *g; return s.i;
2155 1 1 1 1 Yes Any of the above yeses */
2158 analyze_access_subtree (struct access
*root
, struct access
*parent
,
2159 bool allow_replacements
)
2161 struct access
*child
;
2162 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2163 HOST_WIDE_INT covered_to
= root
->offset
;
2164 bool scalar
= is_gimple_reg_type (root
->type
);
2165 bool hole
= false, sth_created
= false;
2169 if (parent
->grp_read
)
2171 if (parent
->grp_assignment_read
)
2172 root
->grp_assignment_read
= 1;
2173 if (parent
->grp_write
)
2174 root
->grp_write
= 1;
2175 if (parent
->grp_assignment_write
)
2176 root
->grp_assignment_write
= 1;
2177 if (parent
->grp_total_scalarization
)
2178 root
->grp_total_scalarization
= 1;
2181 if (root
->grp_unscalarizable_region
)
2182 allow_replacements
= false;
2184 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
2185 allow_replacements
= false;
2187 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
2189 hole
|= covered_to
< child
->offset
;
2190 sth_created
|= analyze_access_subtree (child
, root
,
2191 allow_replacements
&& !scalar
);
2193 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
2194 root
->grp_total_scalarization
&= child
->grp_total_scalarization
;
2195 if (child
->grp_covered
)
2196 covered_to
+= child
->size
;
2201 if (allow_replacements
&& scalar
&& !root
->first_child
2203 || ((root
->grp_scalar_read
|| root
->grp_assignment_read
)
2204 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))))
2206 /* Always create access replacements that cover the whole access.
2207 For integral types this means the precision has to match.
2208 Avoid assumptions based on the integral type kind, too. */
2209 if (INTEGRAL_TYPE_P (root
->type
)
2210 && (TREE_CODE (root
->type
) != INTEGER_TYPE
2211 || TYPE_PRECISION (root
->type
) != root
->size
)
2212 /* But leave bitfield accesses alone. */
2213 && (TREE_CODE (root
->expr
) != COMPONENT_REF
2214 || !DECL_BIT_FIELD (TREE_OPERAND (root
->expr
, 1))))
2216 tree rt
= root
->type
;
2217 gcc_assert ((root
->offset
% BITS_PER_UNIT
) == 0
2218 && (root
->size
% BITS_PER_UNIT
) == 0);
2219 root
->type
= build_nonstandard_integer_type (root
->size
,
2220 TYPE_UNSIGNED (rt
));
2221 root
->expr
= build_ref_for_offset (UNKNOWN_LOCATION
,
2222 root
->base
, root
->offset
,
2223 root
->type
, NULL
, false);
2225 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2227 fprintf (dump_file
, "Changing the type of a replacement for ");
2228 print_generic_expr (dump_file
, root
->base
, 0);
2229 fprintf (dump_file
, " offset: %u, size: %u ",
2230 (unsigned) root
->offset
, (unsigned) root
->size
);
2231 fprintf (dump_file
, " to an integer.\n");
2235 root
->grp_to_be_replaced
= 1;
2236 root
->replacement_decl
= create_access_replacement (root
);
2242 if (allow_replacements
2243 && scalar
&& !root
->first_child
2244 && (root
->grp_scalar_write
|| root
->grp_assignment_write
)
2245 && !bitmap_bit_p (cannot_scalarize_away_bitmap
,
2246 DECL_UID (root
->base
)))
2248 gcc_checking_assert (!root
->grp_scalar_read
2249 && !root
->grp_assignment_read
);
2251 if (MAY_HAVE_DEBUG_STMTS
)
2253 root
->grp_to_be_debug_replaced
= 1;
2254 root
->replacement_decl
= create_access_replacement (root
);
2258 if (covered_to
< limit
)
2261 root
->grp_total_scalarization
= 0;
2264 if (!hole
|| root
->grp_total_scalarization
)
2265 root
->grp_covered
= 1;
2266 else if (root
->grp_write
|| TREE_CODE (root
->base
) == PARM_DECL
)
2267 root
->grp_unscalarized_data
= 1; /* not covered and written to */
2271 /* Analyze all access trees linked by next_grp by the means of
2272 analyze_access_subtree. */
2274 analyze_access_trees (struct access
*access
)
2280 if (analyze_access_subtree (access
, NULL
, true))
2282 access
= access
->next_grp
;
2288 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2289 SIZE would conflict with an already existing one. If exactly such a child
2290 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2293 child_would_conflict_in_lacc (struct access
*lacc
, HOST_WIDE_INT norm_offset
,
2294 HOST_WIDE_INT size
, struct access
**exact_match
)
2296 struct access
*child
;
2298 for (child
= lacc
->first_child
; child
; child
= child
->next_sibling
)
2300 if (child
->offset
== norm_offset
&& child
->size
== size
)
2302 *exact_match
= child
;
2306 if (child
->offset
< norm_offset
+ size
2307 && child
->offset
+ child
->size
> norm_offset
)
2314 /* Create a new child access of PARENT, with all properties just like MODEL
2315 except for its offset and with its grp_write false and grp_read true.
2316 Return the new access or NULL if it cannot be created. Note that this access
2317 is created long after all splicing and sorting, it's not located in any
2318 access vector and is automatically a representative of its group. */
2320 static struct access
*
2321 create_artificial_child_access (struct access
*parent
, struct access
*model
,
2322 HOST_WIDE_INT new_offset
)
2324 struct access
*access
;
2325 struct access
**child
;
2326 tree expr
= parent
->base
;
2328 gcc_assert (!model
->grp_unscalarizable_region
);
2330 access
= (struct access
*) pool_alloc (access_pool
);
2331 memset (access
, 0, sizeof (struct access
));
2332 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
2335 access
->grp_no_warning
= true;
2336 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
2337 new_offset
, model
, NULL
, false);
2340 access
->base
= parent
->base
;
2341 access
->expr
= expr
;
2342 access
->offset
= new_offset
;
2343 access
->size
= model
->size
;
2344 access
->type
= model
->type
;
2345 access
->grp_write
= true;
2346 access
->grp_read
= false;
2348 child
= &parent
->first_child
;
2349 while (*child
&& (*child
)->offset
< new_offset
)
2350 child
= &(*child
)->next_sibling
;
2352 access
->next_sibling
= *child
;
2359 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2360 true if any new subaccess was created. Additionally, if RACC is a scalar
2361 access but LACC is not, change the type of the latter, if possible. */
2364 propagate_subaccesses_across_link (struct access
*lacc
, struct access
*racc
)
2366 struct access
*rchild
;
2367 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
2370 if (is_gimple_reg_type (lacc
->type
)
2371 || lacc
->grp_unscalarizable_region
2372 || racc
->grp_unscalarizable_region
)
2375 if (is_gimple_reg_type (racc
->type
))
2377 if (!lacc
->first_child
&& !racc
->first_child
)
2379 tree t
= lacc
->base
;
2381 lacc
->type
= racc
->type
;
2382 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
),
2383 lacc
->offset
, racc
->type
))
2387 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
2388 lacc
->base
, lacc
->offset
,
2390 lacc
->grp_no_warning
= true;
2396 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
2398 struct access
*new_acc
= NULL
;
2399 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
2401 if (rchild
->grp_unscalarizable_region
)
2404 if (child_would_conflict_in_lacc (lacc
, norm_offset
, rchild
->size
,
2409 rchild
->grp_hint
= 1;
2410 new_acc
->grp_hint
|= new_acc
->grp_read
;
2411 if (rchild
->first_child
)
2412 ret
|= propagate_subaccesses_across_link (new_acc
, rchild
);
2417 rchild
->grp_hint
= 1;
2418 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
);
2422 if (racc
->first_child
)
2423 propagate_subaccesses_across_link (new_acc
, rchild
);
2430 /* Propagate all subaccesses across assignment links. */
2433 propagate_all_subaccesses (void)
2435 while (work_queue_head
)
2437 struct access
*racc
= pop_access_from_work_queue ();
2438 struct assign_link
*link
;
2440 gcc_assert (racc
->first_link
);
2442 for (link
= racc
->first_link
; link
; link
= link
->next
)
2444 struct access
*lacc
= link
->lacc
;
2446 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2448 lacc
= lacc
->group_representative
;
2449 if (propagate_subaccesses_across_link (lacc
, racc
)
2450 && lacc
->first_link
)
2451 add_access_to_work_queue (lacc
);
2456 /* Go through all accesses collected throughout the (intraprocedural) analysis
2457 stage, exclude overlapping ones, identify representatives and build trees
2458 out of them, making decisions about scalarization on the way. Return true
2459 iff there are any to-be-scalarized variables after this stage. */
2462 analyze_all_variable_accesses (void)
2465 bitmap tmp
= BITMAP_ALLOC (NULL
);
2467 unsigned i
, max_total_scalarization_size
;
2469 max_total_scalarization_size
= UNITS_PER_WORD
* BITS_PER_UNIT
2470 * MOVE_RATIO (optimize_function_for_speed_p (cfun
));
2472 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2473 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
2474 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
2476 tree var
= candidate (i
);
2478 if (TREE_CODE (var
) == VAR_DECL
2479 && type_consists_of_records_p (TREE_TYPE (var
)))
2481 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1)
2482 <= max_total_scalarization_size
)
2484 completely_scalarize_var (var
);
2485 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2487 fprintf (dump_file
, "Will attempt to totally scalarize ");
2488 print_generic_expr (dump_file
, var
, 0);
2489 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2492 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2494 fprintf (dump_file
, "Too big to totally scalarize: ");
2495 print_generic_expr (dump_file
, var
, 0);
2496 fprintf (dump_file
, " (UID: %u)\n", DECL_UID (var
));
2501 bitmap_copy (tmp
, candidate_bitmap
);
2502 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2504 tree var
= candidate (i
);
2505 struct access
*access
;
2507 access
= sort_and_splice_var_accesses (var
);
2508 if (!access
|| !build_access_trees (access
))
2509 disqualify_candidate (var
,
2510 "No or inhibitingly overlapping accesses.");
2513 propagate_all_subaccesses ();
2515 bitmap_copy (tmp
, candidate_bitmap
);
2516 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2518 tree var
= candidate (i
);
2519 struct access
*access
= get_first_repr_for_decl (var
);
2521 if (analyze_access_trees (access
))
2524 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2526 fprintf (dump_file
, "\nAccess trees for ");
2527 print_generic_expr (dump_file
, var
, 0);
2528 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2529 dump_access_tree (dump_file
, access
);
2530 fprintf (dump_file
, "\n");
2534 disqualify_candidate (var
, "No scalar replacements to be created.");
2541 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
2548 /* Generate statements copying scalar replacements of accesses within a subtree
2549 into or out of AGG. ACCESS, all its children, siblings and their children
2550 are to be processed. AGG is an aggregate type expression (can be a
2551 declaration but does not have to be, it can for example also be a mem_ref or
2552 a series of handled components). TOP_OFFSET is the offset of the processed
2553 subtree which has to be subtracted from offsets of individual accesses to
2554 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2555 replacements in the interval <start_offset, start_offset + chunk_size>,
2556 otherwise copy all. GSI is a statement iterator used to place the new
2557 statements. WRITE should be true when the statements should write from AGG
2558 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2559 statements will be added after the current statement in GSI, they will be
2560 added before the statement otherwise. */
2563 generate_subtree_copies (struct access
*access
, tree agg
,
2564 HOST_WIDE_INT top_offset
,
2565 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
2566 gimple_stmt_iterator
*gsi
, bool write
,
2567 bool insert_after
, location_t loc
)
2571 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
2574 if (access
->grp_to_be_replaced
2576 || access
->offset
+ access
->size
> start_offset
))
2578 tree expr
, repl
= get_access_replacement (access
);
2581 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
2582 access
, gsi
, insert_after
);
2586 if (access
->grp_partial_lhs
)
2587 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
2589 insert_after
? GSI_NEW_STMT
2591 stmt
= gimple_build_assign (repl
, expr
);
2595 TREE_NO_WARNING (repl
) = 1;
2596 if (access
->grp_partial_lhs
)
2597 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2599 insert_after
? GSI_NEW_STMT
2601 stmt
= gimple_build_assign (expr
, repl
);
2603 gimple_set_location (stmt
, loc
);
2606 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2608 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2610 sra_stats
.subtree_copies
++;
2613 && access
->grp_to_be_debug_replaced
2615 || access
->offset
+ access
->size
> start_offset
))
2618 tree drhs
= build_debug_ref_for_model (loc
, agg
,
2619 access
->offset
- top_offset
,
2621 ds
= gimple_build_debug_bind (get_access_replacement (access
),
2622 drhs
, gsi_stmt (*gsi
));
2624 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2626 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2629 if (access
->first_child
)
2630 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
2631 start_offset
, chunk_size
, gsi
,
2632 write
, insert_after
, loc
);
2634 access
= access
->next_sibling
;
2639 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2640 the root of the subtree to be processed. GSI is the statement iterator used
2641 for inserting statements which are added after the current statement if
2642 INSERT_AFTER is true or before it otherwise. */
2645 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
2646 bool insert_after
, location_t loc
)
2649 struct access
*child
;
2651 if (access
->grp_to_be_replaced
)
2655 stmt
= gimple_build_assign (get_access_replacement (access
),
2656 build_zero_cst (access
->type
));
2658 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2660 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2662 gimple_set_location (stmt
, loc
);
2664 else if (access
->grp_to_be_debug_replaced
)
2666 gimple ds
= gimple_build_debug_bind (get_access_replacement (access
),
2667 build_zero_cst (access
->type
),
2670 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2672 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2675 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2676 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
2679 /* Search for an access representative for the given expression EXPR and
2680 return it or NULL if it cannot be found. */
2682 static struct access
*
2683 get_access_for_expr (tree expr
)
2685 HOST_WIDE_INT offset
, size
, max_size
;
2688 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2689 a different size than the size of its argument and we need the latter
2691 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2692 expr
= TREE_OPERAND (expr
, 0);
2694 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
2695 if (max_size
== -1 || !DECL_P (base
))
2698 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
2701 return get_var_base_offset_size_access (base
, offset
, max_size
);
2704 /* Replace the expression EXPR with a scalar replacement if there is one and
2705 generate other statements to do type conversion or subtree copying if
2706 necessary. GSI is used to place newly created statements, WRITE is true if
2707 the expression is being written to (it is on a LHS of a statement or output
2708 in an assembly statement). */
2711 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
2714 struct access
*access
;
2717 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
2720 expr
= &TREE_OPERAND (*expr
, 0);
2725 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
2726 expr
= &TREE_OPERAND (*expr
, 0);
2727 access
= get_access_for_expr (*expr
);
2730 type
= TREE_TYPE (*expr
);
2732 loc
= gimple_location (gsi_stmt (*gsi
));
2733 if (access
->grp_to_be_replaced
)
2735 tree repl
= get_access_replacement (access
);
2736 /* If we replace a non-register typed access simply use the original
2737 access expression to extract the scalar component afterwards.
2738 This happens if scalarizing a function return value or parameter
2739 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2740 gcc.c-torture/compile/20011217-1.c.
2742 We also want to use this when accessing a complex or vector which can
2743 be accessed as a different type too, potentially creating a need for
2744 type conversion (see PR42196) and when scalarized unions are involved
2745 in assembler statements (see PR42398). */
2746 if (!useless_type_conversion_p (type
, access
->type
))
2750 ref
= build_ref_for_model (loc
, access
->base
, access
->offset
, access
,
2757 if (access
->grp_partial_lhs
)
2758 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
2759 false, GSI_NEW_STMT
);
2760 stmt
= gimple_build_assign (repl
, ref
);
2761 gimple_set_location (stmt
, loc
);
2762 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2768 if (access
->grp_partial_lhs
)
2769 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2770 true, GSI_SAME_STMT
);
2771 stmt
= gimple_build_assign (ref
, repl
);
2772 gimple_set_location (stmt
, loc
);
2773 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2780 else if (write
&& access
->grp_to_be_debug_replaced
)
2782 gimple ds
= gimple_build_debug_bind (get_access_replacement (access
),
2785 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2788 if (access
->first_child
)
2790 HOST_WIDE_INT start_offset
, chunk_size
;
2792 && host_integerp (TREE_OPERAND (bfr
, 1), 1)
2793 && host_integerp (TREE_OPERAND (bfr
, 2), 1))
2795 chunk_size
= tree_low_cst (TREE_OPERAND (bfr
, 1), 1);
2796 start_offset
= access
->offset
2797 + tree_low_cst (TREE_OPERAND (bfr
, 2), 1);
2800 start_offset
= chunk_size
= 0;
2802 generate_subtree_copies (access
->first_child
, access
->base
, 0,
2803 start_offset
, chunk_size
, gsi
, write
, write
,
2809 /* Where scalar replacements of the RHS have been written to when a replacement
2810 of a LHS of an assigments cannot be direclty loaded from a replacement of
2812 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
2813 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
2814 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
2816 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2817 base aggregate if there are unscalarized data or directly to LHS of the
2818 statement that is pointed to by GSI otherwise. */
2820 static enum unscalarized_data_handling
2821 handle_unscalarized_data_in_subtree (struct access
*top_racc
,
2822 gimple_stmt_iterator
*gsi
)
2824 if (top_racc
->grp_unscalarized_data
)
2826 generate_subtree_copies (top_racc
->first_child
, top_racc
->base
, 0, 0, 0,
2828 gimple_location (gsi_stmt (*gsi
)));
2829 return SRA_UDH_RIGHT
;
2833 tree lhs
= gimple_assign_lhs (gsi_stmt (*gsi
));
2834 generate_subtree_copies (top_racc
->first_child
, lhs
, top_racc
->offset
,
2835 0, 0, gsi
, false, false,
2836 gimple_location (gsi_stmt (*gsi
)));
2837 return SRA_UDH_LEFT
;
2842 /* Try to generate statements to load all sub-replacements in an access subtree
2843 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2844 If that is not possible, refresh the TOP_RACC base aggregate and load the
2845 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2846 copied. NEW_GSI is stmt iterator used for statement insertions after the
2847 original assignment, OLD_GSI is used to insert statements before the
2848 assignment. *REFRESHED keeps the information whether we have needed to
2849 refresh replacements of the LHS and from which side of the assignments this
2853 load_assign_lhs_subreplacements (struct access
*lacc
, struct access
*top_racc
,
2854 HOST_WIDE_INT left_offset
,
2855 gimple_stmt_iterator
*old_gsi
,
2856 gimple_stmt_iterator
*new_gsi
,
2857 enum unscalarized_data_handling
*refreshed
)
2859 location_t loc
= gimple_location (gsi_stmt (*old_gsi
));
2860 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
2862 HOST_WIDE_INT offset
= lacc
->offset
- left_offset
+ top_racc
->offset
;
2864 if (lacc
->grp_to_be_replaced
)
2866 struct access
*racc
;
2870 racc
= find_access_in_subtree (top_racc
, offset
, lacc
->size
);
2871 if (racc
&& racc
->grp_to_be_replaced
)
2873 rhs
= get_access_replacement (racc
);
2874 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
2875 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, lacc
->type
, rhs
);
2877 if (racc
->grp_partial_lhs
&& lacc
->grp_partial_lhs
)
2878 rhs
= force_gimple_operand_gsi (old_gsi
, rhs
, true, NULL_TREE
,
2879 true, GSI_SAME_STMT
);
2883 /* No suitable access on the right hand side, need to load from
2884 the aggregate. See if we have to update it first... */
2885 if (*refreshed
== SRA_UDH_NONE
)
2886 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2889 if (*refreshed
== SRA_UDH_LEFT
)
2890 rhs
= build_ref_for_model (loc
, lacc
->base
, lacc
->offset
, lacc
,
2893 rhs
= build_ref_for_model (loc
, top_racc
->base
, offset
, lacc
,
2895 if (lacc
->grp_partial_lhs
)
2896 rhs
= force_gimple_operand_gsi (new_gsi
, rhs
, true, NULL_TREE
,
2897 false, GSI_NEW_STMT
);
2900 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
2901 gsi_insert_after (new_gsi
, stmt
, GSI_NEW_STMT
);
2902 gimple_set_location (stmt
, loc
);
2904 sra_stats
.subreplacements
++;
2908 if (*refreshed
== SRA_UDH_NONE
2909 && lacc
->grp_read
&& !lacc
->grp_covered
)
2910 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2912 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
2916 struct access
*racc
= find_access_in_subtree (top_racc
, offset
,
2919 if (racc
&& racc
->grp_to_be_replaced
)
2921 if (racc
->grp_write
)
2922 drhs
= get_access_replacement (racc
);
2926 else if (*refreshed
== SRA_UDH_LEFT
)
2927 drhs
= build_debug_ref_for_model (loc
, lacc
->base
, lacc
->offset
,
2929 else if (*refreshed
== SRA_UDH_RIGHT
)
2930 drhs
= build_debug_ref_for_model (loc
, top_racc
->base
, offset
,
2934 ds
= gimple_build_debug_bind (get_access_replacement (lacc
),
2935 drhs
, gsi_stmt (*old_gsi
));
2936 gsi_insert_after (new_gsi
, ds
, GSI_NEW_STMT
);
2940 if (lacc
->first_child
)
2941 load_assign_lhs_subreplacements (lacc
, top_racc
, left_offset
,
2942 old_gsi
, new_gsi
, refreshed
);
2946 /* Result code for SRA assignment modification. */
2947 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
2948 SRA_AM_MODIFIED
, /* stmt changed but not
2950 SRA_AM_REMOVED
}; /* stmt eliminated */
2952 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2953 to the assignment and GSI is the statement iterator pointing at it. Returns
2954 the same values as sra_modify_assign. */
2956 static enum assignment_mod_result
2957 sra_modify_constructor_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
2959 tree lhs
= gimple_assign_lhs (*stmt
);
2963 acc
= get_access_for_expr (lhs
);
2967 if (gimple_clobber_p (*stmt
))
2969 /* Remove clobbers of fully scalarized variables, otherwise
2971 if (acc
->grp_covered
)
2973 unlink_stmt_vdef (*stmt
);
2974 gsi_remove (gsi
, true);
2975 release_defs (*stmt
);
2976 return SRA_AM_REMOVED
;
2982 loc
= gimple_location (*stmt
);
2983 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt
))) > 0)
2985 /* I have never seen this code path trigger but if it can happen the
2986 following should handle it gracefully. */
2987 if (access_has_children_p (acc
))
2988 generate_subtree_copies (acc
->first_child
, acc
->base
, 0, 0, 0, gsi
,
2990 return SRA_AM_MODIFIED
;
2993 if (acc
->grp_covered
)
2995 init_subtree_with_zero (acc
, gsi
, false, loc
);
2996 unlink_stmt_vdef (*stmt
);
2997 gsi_remove (gsi
, true);
2998 release_defs (*stmt
);
2999 return SRA_AM_REMOVED
;
3003 init_subtree_with_zero (acc
, gsi
, true, loc
);
3004 return SRA_AM_MODIFIED
;
3008 /* Create and return a new suitable default definition SSA_NAME for RACC which
3009 is an access describing an uninitialized part of an aggregate that is being
3013 get_repl_default_def_ssa_name (struct access
*racc
)
3015 gcc_checking_assert (!racc
->grp_to_be_replaced
3016 && !racc
->grp_to_be_debug_replaced
);
3017 if (!racc
->replacement_decl
)
3018 racc
->replacement_decl
= create_access_replacement (racc
);
3019 return get_or_create_ssa_default_def (cfun
, racc
->replacement_decl
);
3022 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3023 bit-field field declaration somewhere in it. */
3026 contains_vce_or_bfcref_p (const_tree ref
)
3028 while (handled_component_p (ref
))
3030 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
3031 || (TREE_CODE (ref
) == COMPONENT_REF
3032 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1))))
3034 ref
= TREE_OPERAND (ref
, 0);
3040 /* Examine both sides of the assignment statement pointed to by STMT, replace
3041 them with a scalare replacement if there is one and generate copying of
3042 replacements if scalarized aggregates have been used in the assignment. GSI
3043 is used to hold generated statements for type conversions and subtree
3046 static enum assignment_mod_result
3047 sra_modify_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
3049 struct access
*lacc
, *racc
;
3051 bool modify_this_stmt
= false;
3052 bool force_gimple_rhs
= false;
3054 gimple_stmt_iterator orig_gsi
= *gsi
;
3056 if (!gimple_assign_single_p (*stmt
))
3058 lhs
= gimple_assign_lhs (*stmt
);
3059 rhs
= gimple_assign_rhs1 (*stmt
);
3061 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3062 return sra_modify_constructor_assign (stmt
, gsi
);
3064 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
3065 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
3066 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
3068 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (*stmt
),
3070 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (*stmt
),
3072 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3075 lacc
= get_access_for_expr (lhs
);
3076 racc
= get_access_for_expr (rhs
);
3080 loc
= gimple_location (*stmt
);
3081 if (lacc
&& lacc
->grp_to_be_replaced
)
3083 lhs
= get_access_replacement (lacc
);
3084 gimple_assign_set_lhs (*stmt
, lhs
);
3085 modify_this_stmt
= true;
3086 if (lacc
->grp_partial_lhs
)
3087 force_gimple_rhs
= true;
3091 if (racc
&& racc
->grp_to_be_replaced
)
3093 rhs
= get_access_replacement (racc
);
3094 modify_this_stmt
= true;
3095 if (racc
->grp_partial_lhs
)
3096 force_gimple_rhs
= true;
3100 && !racc
->grp_unscalarized_data
3101 && TREE_CODE (lhs
) == SSA_NAME
3102 && !access_has_replacements_p (racc
))
3104 rhs
= get_repl_default_def_ssa_name (racc
);
3105 modify_this_stmt
= true;
3109 if (modify_this_stmt
)
3111 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3113 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3114 ??? This should move to fold_stmt which we simply should
3115 call after building a VIEW_CONVERT_EXPR here. */
3116 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
3117 && !contains_bitfld_component_ref_p (lhs
))
3119 lhs
= build_ref_for_model (loc
, lhs
, 0, racc
, gsi
, false);
3120 gimple_assign_set_lhs (*stmt
, lhs
);
3122 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
3123 && !contains_vce_or_bfcref_p (rhs
))
3124 rhs
= build_ref_for_model (loc
, rhs
, 0, lacc
, gsi
, false);
3126 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3128 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
3130 if (is_gimple_reg_type (TREE_TYPE (lhs
))
3131 && TREE_CODE (lhs
) != SSA_NAME
)
3132 force_gimple_rhs
= true;
3137 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
3139 tree dlhs
= get_access_replacement (lacc
);
3140 tree drhs
= unshare_expr (rhs
);
3141 if (!useless_type_conversion_p (TREE_TYPE (dlhs
), TREE_TYPE (drhs
)))
3143 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs
))
3144 && !contains_vce_or_bfcref_p (drhs
))
3145 drhs
= build_debug_ref_for_model (loc
, drhs
, 0, lacc
);
3147 && !useless_type_conversion_p (TREE_TYPE (dlhs
),
3149 drhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
3150 TREE_TYPE (dlhs
), drhs
);
3152 gimple ds
= gimple_build_debug_bind (dlhs
, drhs
, *stmt
);
3153 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
3156 /* From this point on, the function deals with assignments in between
3157 aggregates when at least one has scalar reductions of some of its
3158 components. There are three possible scenarios: Both the LHS and RHS have
3159 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3161 In the first case, we would like to load the LHS components from RHS
3162 components whenever possible. If that is not possible, we would like to
3163 read it directly from the RHS (after updating it by storing in it its own
3164 components). If there are some necessary unscalarized data in the LHS,
3165 those will be loaded by the original assignment too. If neither of these
3166 cases happen, the original statement can be removed. Most of this is done
3167 by load_assign_lhs_subreplacements.
3169 In the second case, we would like to store all RHS scalarized components
3170 directly into LHS and if they cover the aggregate completely, remove the
3171 statement too. In the third case, we want the LHS components to be loaded
3172 directly from the RHS (DSE will remove the original statement if it
3175 This is a bit complex but manageable when types match and when unions do
3176 not cause confusion in a way that we cannot really load a component of LHS
3177 from the RHS or vice versa (the access representing this level can have
3178 subaccesses that are accessible only through a different union field at a
3179 higher level - different from the one used in the examined expression).
3182 Therefore, I specially handle a fourth case, happening when there is a
3183 specific type cast or it is impossible to locate a scalarized subaccess on
3184 the other side of the expression. If that happens, I simply "refresh" the
3185 RHS by storing in it is scalarized components leave the original statement
3186 there to do the copying and then load the scalar replacements of the LHS.
3187 This is what the first branch does. */
3189 if (modify_this_stmt
3190 || gimple_has_volatile_ops (*stmt
)
3191 || contains_vce_or_bfcref_p (rhs
)
3192 || contains_vce_or_bfcref_p (lhs
))
3194 if (access_has_children_p (racc
))
3195 generate_subtree_copies (racc
->first_child
, racc
->base
, 0, 0, 0,
3196 gsi
, false, false, loc
);
3197 if (access_has_children_p (lacc
))
3198 generate_subtree_copies (lacc
->first_child
, lacc
->base
, 0, 0, 0,
3199 gsi
, true, true, loc
);
3200 sra_stats
.separate_lhs_rhs_handling
++;
3202 /* This gimplification must be done after generate_subtree_copies,
3203 lest we insert the subtree copies in the middle of the gimplified
3205 if (force_gimple_rhs
)
3206 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
3207 true, GSI_SAME_STMT
);
3208 if (gimple_assign_rhs1 (*stmt
) != rhs
)
3210 modify_this_stmt
= true;
3211 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
3212 gcc_assert (*stmt
== gsi_stmt (orig_gsi
));
3215 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3219 if (access_has_children_p (lacc
)
3220 && access_has_children_p (racc
)
3221 /* When an access represents an unscalarizable region, it usually
3222 represents accesses with variable offset and thus must not be used
3223 to generate new memory accesses. */
3224 && !lacc
->grp_unscalarizable_region
3225 && !racc
->grp_unscalarizable_region
)
3227 gimple_stmt_iterator orig_gsi
= *gsi
;
3228 enum unscalarized_data_handling refreshed
;
3230 if (lacc
->grp_read
&& !lacc
->grp_covered
)
3231 refreshed
= handle_unscalarized_data_in_subtree (racc
, gsi
);
3233 refreshed
= SRA_UDH_NONE
;
3235 load_assign_lhs_subreplacements (lacc
, racc
, lacc
->offset
,
3236 &orig_gsi
, gsi
, &refreshed
);
3237 if (refreshed
!= SRA_UDH_RIGHT
)
3240 unlink_stmt_vdef (*stmt
);
3241 gsi_remove (&orig_gsi
, true);
3242 release_defs (*stmt
);
3243 sra_stats
.deleted
++;
3244 return SRA_AM_REMOVED
;
3249 if (access_has_children_p (racc
)
3250 && !racc
->grp_unscalarized_data
)
3254 fprintf (dump_file
, "Removing load: ");
3255 print_gimple_stmt (dump_file
, *stmt
, 0, 0);
3257 generate_subtree_copies (racc
->first_child
, lhs
,
3258 racc
->offset
, 0, 0, gsi
,
3260 gcc_assert (*stmt
== gsi_stmt (*gsi
));
3261 unlink_stmt_vdef (*stmt
);
3262 gsi_remove (gsi
, true);
3263 release_defs (*stmt
);
3264 sra_stats
.deleted
++;
3265 return SRA_AM_REMOVED
;
3267 /* Restore the aggregate RHS from its components so the
3268 prevailing aggregate copy does the right thing. */
3269 if (access_has_children_p (racc
))
3270 generate_subtree_copies (racc
->first_child
, racc
->base
, 0, 0, 0,
3271 gsi
, false, false, loc
);
3272 /* Re-load the components of the aggregate copy destination.
3273 But use the RHS aggregate to load from to expose more
3274 optimization opportunities. */
3275 if (access_has_children_p (lacc
))
3276 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
3277 0, 0, gsi
, true, true, loc
);
3284 /* Traverse the function body and all modifications as decided in
3285 analyze_all_variable_accesses. Return true iff the CFG has been
3289 sra_modify_function_body (void)
3291 bool cfg_changed
= false;
3296 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
3297 while (!gsi_end_p (gsi
))
3299 gimple stmt
= gsi_stmt (gsi
);
3300 enum assignment_mod_result assign_result
;
3301 bool modified
= false, deleted
= false;
3305 switch (gimple_code (stmt
))
3308 t
= gimple_return_retval_ptr (stmt
);
3309 if (*t
!= NULL_TREE
)
3310 modified
|= sra_modify_expr (t
, &gsi
, false);
3314 assign_result
= sra_modify_assign (&stmt
, &gsi
);
3315 modified
|= assign_result
== SRA_AM_MODIFIED
;
3316 deleted
= assign_result
== SRA_AM_REMOVED
;
3320 /* Operands must be processed before the lhs. */
3321 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3323 t
= gimple_call_arg_ptr (stmt
, i
);
3324 modified
|= sra_modify_expr (t
, &gsi
, false);
3327 if (gimple_call_lhs (stmt
))
3329 t
= gimple_call_lhs_ptr (stmt
);
3330 modified
|= sra_modify_expr (t
, &gsi
, true);
3335 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
3337 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
3338 modified
|= sra_modify_expr (t
, &gsi
, false);
3340 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
3342 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
3343 modified
|= sra_modify_expr (t
, &gsi
, true);
3354 if (maybe_clean_eh_stmt (stmt
)
3355 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
3366 /* Generate statements initializing scalar replacements of parts of function
3370 initialize_parameter_reductions (void)
3372 gimple_stmt_iterator gsi
;
3373 gimple_seq seq
= NULL
;
3376 gsi
= gsi_start (seq
);
3377 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3379 parm
= DECL_CHAIN (parm
))
3381 vec
<access_p
> *access_vec
;
3382 struct access
*access
;
3384 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3386 access_vec
= get_base_access_vector (parm
);
3390 for (access
= (*access_vec
)[0];
3392 access
= access
->next_grp
)
3393 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
3394 EXPR_LOCATION (parm
));
3397 seq
= gsi_seq (gsi
);
3399 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR
), seq
);
3402 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3403 it reveals there are components of some aggregates to be scalarized, it runs
3404 the required transformations. */
3406 perform_intra_sra (void)
3411 if (!find_var_candidates ())
3414 if (!scan_function ())
3417 if (!analyze_all_variable_accesses ())
3420 if (sra_modify_function_body ())
3421 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
3423 ret
= TODO_update_ssa
;
3424 initialize_parameter_reductions ();
3426 statistics_counter_event (cfun
, "Scalar replacements created",
3427 sra_stats
.replacements
);
3428 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
3429 statistics_counter_event (cfun
, "Subtree copy stmts",
3430 sra_stats
.subtree_copies
);
3431 statistics_counter_event (cfun
, "Subreplacement stmts",
3432 sra_stats
.subreplacements
);
3433 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
3434 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
3435 sra_stats
.separate_lhs_rhs_handling
);
3438 sra_deinitialize ();
3442 /* Perform early intraprocedural SRA. */
3444 early_intra_sra (void)
3446 sra_mode
= SRA_MODE_EARLY_INTRA
;
3447 return perform_intra_sra ();
3450 /* Perform "late" intraprocedural SRA. */
3452 late_intra_sra (void)
3454 sra_mode
= SRA_MODE_INTRA
;
3455 return perform_intra_sra ();
3460 gate_intra_sra (void)
3462 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
3468 const pass_data pass_data_sra_early
=
3470 GIMPLE_PASS
, /* type */
3472 OPTGROUP_NONE
, /* optinfo_flags */
3473 true, /* has_gate */
3474 true, /* has_execute */
3475 TV_TREE_SRA
, /* tv_id */
3476 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3477 0, /* properties_provided */
3478 0, /* properties_destroyed */
3479 0, /* todo_flags_start */
3480 ( TODO_update_ssa
| TODO_verify_ssa
), /* todo_flags_finish */
3483 class pass_sra_early
: public gimple_opt_pass
3486 pass_sra_early (gcc::context
*ctxt
)
3487 : gimple_opt_pass (pass_data_sra_early
, ctxt
)
3490 /* opt_pass methods: */
3491 bool gate () { return gate_intra_sra (); }
3492 unsigned int execute () { return early_intra_sra (); }
3494 }; // class pass_sra_early
3499 make_pass_sra_early (gcc::context
*ctxt
)
3501 return new pass_sra_early (ctxt
);
3506 const pass_data pass_data_sra
=
3508 GIMPLE_PASS
, /* type */
3510 OPTGROUP_NONE
, /* optinfo_flags */
3511 true, /* has_gate */
3512 true, /* has_execute */
3513 TV_TREE_SRA
, /* tv_id */
3514 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3515 0, /* properties_provided */
3516 0, /* properties_destroyed */
3517 TODO_update_address_taken
, /* todo_flags_start */
3518 ( TODO_update_ssa
| TODO_verify_ssa
), /* todo_flags_finish */
3521 class pass_sra
: public gimple_opt_pass
3524 pass_sra (gcc::context
*ctxt
)
3525 : gimple_opt_pass (pass_data_sra
, ctxt
)
3528 /* opt_pass methods: */
3529 bool gate () { return gate_intra_sra (); }
3530 unsigned int execute () { return late_intra_sra (); }
3532 }; // class pass_sra
3537 make_pass_sra (gcc::context
*ctxt
)
3539 return new pass_sra (ctxt
);
3543 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3547 is_unused_scalar_param (tree parm
)
3550 return (is_gimple_reg (parm
)
3551 && (!(name
= ssa_default_def (cfun
, parm
))
3552 || has_zero_uses (name
)));
3555 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3556 examine whether there are any direct or otherwise infeasible ones. If so,
3557 return true, otherwise return false. PARM must be a gimple register with a
3558 non-NULL default definition. */
3561 ptr_parm_has_direct_uses (tree parm
)
3563 imm_use_iterator ui
;
3565 tree name
= ssa_default_def (cfun
, parm
);
3568 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
3571 use_operand_p use_p
;
3573 if (is_gimple_debug (stmt
))
3576 /* Valid uses include dereferences on the lhs and the rhs. */
3577 if (gimple_has_lhs (stmt
))
3579 tree lhs
= gimple_get_lhs (stmt
);
3580 while (handled_component_p (lhs
))
3581 lhs
= TREE_OPERAND (lhs
, 0);
3582 if (TREE_CODE (lhs
) == MEM_REF
3583 && TREE_OPERAND (lhs
, 0) == name
3584 && integer_zerop (TREE_OPERAND (lhs
, 1))
3585 && types_compatible_p (TREE_TYPE (lhs
),
3586 TREE_TYPE (TREE_TYPE (name
)))
3587 && !TREE_THIS_VOLATILE (lhs
))
3590 if (gimple_assign_single_p (stmt
))
3592 tree rhs
= gimple_assign_rhs1 (stmt
);
3593 while (handled_component_p (rhs
))
3594 rhs
= TREE_OPERAND (rhs
, 0);
3595 if (TREE_CODE (rhs
) == MEM_REF
3596 && TREE_OPERAND (rhs
, 0) == name
3597 && integer_zerop (TREE_OPERAND (rhs
, 1))
3598 && types_compatible_p (TREE_TYPE (rhs
),
3599 TREE_TYPE (TREE_TYPE (name
)))
3600 && !TREE_THIS_VOLATILE (rhs
))
3603 else if (is_gimple_call (stmt
))
3606 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3608 tree arg
= gimple_call_arg (stmt
, i
);
3609 while (handled_component_p (arg
))
3610 arg
= TREE_OPERAND (arg
, 0);
3611 if (TREE_CODE (arg
) == MEM_REF
3612 && TREE_OPERAND (arg
, 0) == name
3613 && integer_zerop (TREE_OPERAND (arg
, 1))
3614 && types_compatible_p (TREE_TYPE (arg
),
3615 TREE_TYPE (TREE_TYPE (name
)))
3616 && !TREE_THIS_VOLATILE (arg
))
3621 /* If the number of valid uses does not match the number of
3622 uses in this stmt there is an unhandled use. */
3623 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
3630 BREAK_FROM_IMM_USE_STMT (ui
);
3636 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3637 them in candidate_bitmap. Note that these do not necessarily include
3638 parameter which are unused and thus can be removed. Return true iff any
3639 such candidate has been found. */
3642 find_param_candidates (void)
3649 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3651 parm
= DECL_CHAIN (parm
))
3653 tree type
= TREE_TYPE (parm
);
3658 if (TREE_THIS_VOLATILE (parm
)
3659 || TREE_ADDRESSABLE (parm
)
3660 || (!is_gimple_reg_type (type
) && is_va_list_type (type
)))
3663 if (is_unused_scalar_param (parm
))
3669 if (POINTER_TYPE_P (type
))
3671 type
= TREE_TYPE (type
);
3673 if (TREE_CODE (type
) == FUNCTION_TYPE
3674 || TYPE_VOLATILE (type
)
3675 || upc_shared_type_p (type
)
3676 || (TREE_CODE (type
) == ARRAY_TYPE
3677 && TYPE_NONALIASED_COMPONENT (type
))
3678 || !is_gimple_reg (parm
)
3679 || is_va_list_type (type
)
3680 || ptr_parm_has_direct_uses (parm
))
3683 else if (!AGGREGATE_TYPE_P (type
))
3686 if (!COMPLETE_TYPE_P (type
)
3687 || !host_integerp (TYPE_SIZE (type
), 1)
3688 || tree_low_cst (TYPE_SIZE (type
), 1) == 0
3689 || (AGGREGATE_TYPE_P (type
)
3690 && type_internals_preclude_sra_p (type
, &msg
)))
3693 bitmap_set_bit (candidate_bitmap
, DECL_UID (parm
));
3694 slot
= candidates
.find_slot_with_hash (parm
, DECL_UID (parm
), INSERT
);
3698 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3700 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (parm
));
3701 print_generic_expr (dump_file
, parm
, 0);
3702 fprintf (dump_file
, "\n");
3706 func_param_count
= count
;
3710 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3714 mark_maybe_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
3717 struct access
*repr
= (struct access
*) data
;
3719 repr
->grp_maybe_modified
= 1;
3723 /* Analyze what representatives (in linked lists accessible from
3724 REPRESENTATIVES) can be modified by side effects of statements in the
3725 current function. */
3728 analyze_modified_params (vec
<access_p
> representatives
)
3732 for (i
= 0; i
< func_param_count
; i
++)
3734 struct access
*repr
;
3736 for (repr
= representatives
[i
];
3738 repr
= repr
->next_grp
)
3740 struct access
*access
;
3744 if (no_accesses_p (repr
))
3746 if (!POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3747 || repr
->grp_maybe_modified
)
3750 ao_ref_init (&ar
, repr
->expr
);
3751 visited
= BITMAP_ALLOC (NULL
);
3752 for (access
= repr
; access
; access
= access
->next_sibling
)
3754 /* All accesses are read ones, otherwise grp_maybe_modified would
3755 be trivially set. */
3756 walk_aliased_vdefs (&ar
, gimple_vuse (access
->stmt
),
3757 mark_maybe_modified
, repr
, &visited
);
3758 if (repr
->grp_maybe_modified
)
3761 BITMAP_FREE (visited
);
3766 /* Propagate distances in bb_dereferences in the opposite direction than the
3767 control flow edges, in each step storing the maximum of the current value
3768 and the minimum of all successors. These steps are repeated until the table
3769 stabilizes. Note that BBs which might terminate the functions (according to
3770 final_bbs bitmap) never updated in this way. */
3773 propagate_dereference_distances (void)
3775 vec
<basic_block
> queue
;
3778 queue
.create (last_basic_block_for_function (cfun
));
3779 queue
.quick_push (ENTRY_BLOCK_PTR
);
3782 queue
.quick_push (bb
);
3786 while (!queue
.is_empty ())
3790 bool change
= false;
3796 if (bitmap_bit_p (final_bbs
, bb
->index
))
3799 for (i
= 0; i
< func_param_count
; i
++)
3801 int idx
= bb
->index
* func_param_count
+ i
;
3803 HOST_WIDE_INT inh
= 0;
3805 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3807 int succ_idx
= e
->dest
->index
* func_param_count
+ i
;
3809 if (e
->src
== EXIT_BLOCK_PTR
)
3815 inh
= bb_dereferences
[succ_idx
];
3817 else if (bb_dereferences
[succ_idx
] < inh
)
3818 inh
= bb_dereferences
[succ_idx
];
3821 if (!first
&& bb_dereferences
[idx
] < inh
)
3823 bb_dereferences
[idx
] = inh
;
3828 if (change
&& !bitmap_bit_p (final_bbs
, bb
->index
))
3829 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3834 e
->src
->aux
= e
->src
;
3835 queue
.quick_push (e
->src
);
3842 /* Dump a dereferences TABLE with heading STR to file F. */
3845 dump_dereferences_table (FILE *f
, const char *str
, HOST_WIDE_INT
*table
)
3849 fprintf (dump_file
, str
);
3850 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
3852 fprintf (f
, "%4i %i ", bb
->index
, bitmap_bit_p (final_bbs
, bb
->index
));
3853 if (bb
!= EXIT_BLOCK_PTR
)
3856 for (i
= 0; i
< func_param_count
; i
++)
3858 int idx
= bb
->index
* func_param_count
+ i
;
3859 fprintf (f
, " %4" HOST_WIDE_INT_PRINT
"d", table
[idx
]);
3864 fprintf (dump_file
, "\n");
3867 /* Determine what (parts of) parameters passed by reference that are not
3868 assigned to are not certainly dereferenced in this function and thus the
3869 dereferencing cannot be safely moved to the caller without potentially
3870 introducing a segfault. Mark such REPRESENTATIVES as
3871 grp_not_necessarilly_dereferenced.
3873 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3874 part is calculated rather than simple booleans are calculated for each
3875 pointer parameter to handle cases when only a fraction of the whole
3876 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3879 The maximum dereference distances for each pointer parameter and BB are
3880 already stored in bb_dereference. This routine simply propagates these
3881 values upwards by propagate_dereference_distances and then compares the
3882 distances of individual parameters in the ENTRY BB to the equivalent
3883 distances of each representative of a (fraction of a) parameter. */
3886 analyze_caller_dereference_legality (vec
<access_p
> representatives
)
3890 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3891 dump_dereferences_table (dump_file
,
3892 "Dereference table before propagation:\n",
3895 propagate_dereference_distances ();
3897 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3898 dump_dereferences_table (dump_file
,
3899 "Dereference table after propagation:\n",
3902 for (i
= 0; i
< func_param_count
; i
++)
3904 struct access
*repr
= representatives
[i
];
3905 int idx
= ENTRY_BLOCK_PTR
->index
* func_param_count
+ i
;
3907 if (!repr
|| no_accesses_p (repr
))
3912 if ((repr
->offset
+ repr
->size
) > bb_dereferences
[idx
])
3913 repr
->grp_not_necessarilly_dereferenced
= 1;
3914 repr
= repr
->next_grp
;
3920 /* Return the representative access for the parameter declaration PARM if it is
3921 a scalar passed by reference which is not written to and the pointer value
3922 is not used directly. Thus, if it is legal to dereference it in the caller
3923 and we can rule out modifications through aliases, such parameter should be
3924 turned into one passed by value. Return NULL otherwise. */
3926 static struct access
*
3927 unmodified_by_ref_scalar_representative (tree parm
)
3929 int i
, access_count
;
3930 struct access
*repr
;
3931 vec
<access_p
> *access_vec
;
3933 access_vec
= get_base_access_vector (parm
);
3934 gcc_assert (access_vec
);
3935 repr
= (*access_vec
)[0];
3938 repr
->group_representative
= repr
;
3940 access_count
= access_vec
->length ();
3941 for (i
= 1; i
< access_count
; i
++)
3943 struct access
*access
= (*access_vec
)[i
];
3946 access
->group_representative
= repr
;
3947 access
->next_sibling
= repr
->next_sibling
;
3948 repr
->next_sibling
= access
;
3952 repr
->grp_scalar_ptr
= 1;
3956 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
3957 associated with. REQ_ALIGN is the minimum required alignment. */
3960 access_precludes_ipa_sra_p (struct access
*access
, unsigned int req_align
)
3962 unsigned int exp_align
;
3963 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3964 is incompatible assign in a call statement (and possibly even in asm
3965 statements). This can be relaxed by using a new temporary but only for
3966 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3967 intraprocedural SRA we deal with this by keeping the old aggregate around,
3968 something we cannot do in IPA-SRA.) */
3970 && (is_gimple_call (access
->stmt
)
3971 || gimple_code (access
->stmt
) == GIMPLE_ASM
))
3974 exp_align
= get_object_alignment (access
->expr
);
3975 if (exp_align
< req_align
)
3982 /* Sort collected accesses for parameter PARM, identify representatives for
3983 each accessed region and link them together. Return NULL if there are
3984 different but overlapping accesses, return the special ptr value meaning
3985 there are no accesses for this parameter if that is the case and return the
3986 first representative otherwise. Set *RO_GRP if there is a group of accesses
3987 with only read (i.e. no write) accesses. */
3989 static struct access
*
3990 splice_param_accesses (tree parm
, bool *ro_grp
)
3992 int i
, j
, access_count
, group_count
;
3993 int agg_size
, total_size
= 0;
3994 struct access
*access
, *res
, **prev_acc_ptr
= &res
;
3995 vec
<access_p
> *access_vec
;
3997 access_vec
= get_base_access_vector (parm
);
3999 return &no_accesses_representant
;
4000 access_count
= access_vec
->length ();
4002 access_vec
->qsort (compare_access_positions
);
4007 while (i
< access_count
)
4011 access
= (*access_vec
)[i
];
4012 modification
= access
->write
;
4013 if (access_precludes_ipa_sra_p (access
, TYPE_ALIGN (access
->type
)))
4015 a1_alias_type
= reference_alias_ptr_type (access
->expr
);
4017 /* Access is about to become group representative unless we find some
4018 nasty overlap which would preclude us from breaking this parameter
4022 while (j
< access_count
)
4024 struct access
*ac2
= (*access_vec
)[j
];
4025 if (ac2
->offset
!= access
->offset
)
4027 /* All or nothing law for parameters. */
4028 if (access
->offset
+ access
->size
> ac2
->offset
)
4033 else if (ac2
->size
!= access
->size
)
4036 if (access_precludes_ipa_sra_p (ac2
, TYPE_ALIGN (access
->type
))
4037 || (ac2
->type
!= access
->type
4038 && (TREE_ADDRESSABLE (ac2
->type
)
4039 || TREE_ADDRESSABLE (access
->type
)))
4040 || (reference_alias_ptr_type (ac2
->expr
) != a1_alias_type
))
4043 modification
|= ac2
->write
;
4044 ac2
->group_representative
= access
;
4045 ac2
->next_sibling
= access
->next_sibling
;
4046 access
->next_sibling
= ac2
;
4051 access
->grp_maybe_modified
= modification
;
4054 *prev_acc_ptr
= access
;
4055 prev_acc_ptr
= &access
->next_grp
;
4056 total_size
+= access
->size
;
4060 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4061 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
4063 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
4064 if (total_size
>= agg_size
)
4067 gcc_assert (group_count
> 0);
4071 /* Decide whether parameters with representative accesses given by REPR should
4072 be reduced into components. */
4075 decide_one_param_reduction (struct access
*repr
)
4077 int total_size
, cur_parm_size
, agg_size
, new_param_count
, parm_size_limit
;
4082 cur_parm_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
4083 gcc_assert (cur_parm_size
> 0);
4085 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4088 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
4093 agg_size
= cur_parm_size
;
4099 fprintf (dump_file
, "Evaluating PARAM group sizes for ");
4100 print_generic_expr (dump_file
, parm
, 0);
4101 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (parm
));
4102 for (acc
= repr
; acc
; acc
= acc
->next_grp
)
4103 dump_access (dump_file
, acc
, true);
4107 new_param_count
= 0;
4109 for (; repr
; repr
= repr
->next_grp
)
4111 gcc_assert (parm
== repr
->base
);
4113 /* Taking the address of a non-addressable field is verboten. */
4114 if (by_ref
&& repr
->non_addressable
)
4117 /* Do not decompose a non-BLKmode param in a way that would
4118 create BLKmode params. Especially for by-reference passing
4119 (thus, pointer-type param) this is hardly worthwhile. */
4120 if (DECL_MODE (parm
) != BLKmode
4121 && TYPE_MODE (repr
->type
) == BLKmode
)
4124 if (!by_ref
|| (!repr
->grp_maybe_modified
4125 && !repr
->grp_not_necessarilly_dereferenced
))
4126 total_size
+= repr
->size
;
4128 total_size
+= cur_parm_size
;
4133 gcc_assert (new_param_count
> 0);
4135 if (optimize_function_for_size_p (cfun
))
4136 parm_size_limit
= cur_parm_size
;
4138 parm_size_limit
= (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR
)
4141 if (total_size
< agg_size
4142 && total_size
<= parm_size_limit
)
4145 fprintf (dump_file
, " ....will be split into %i components\n",
4147 return new_param_count
;
4153 /* The order of the following enums is important, we need to do extra work for
4154 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4155 enum ipa_splicing_result
{ NO_GOOD_ACCESS
, UNUSED_PARAMS
, BY_VAL_ACCESSES
,
4156 MODIF_BY_REF_ACCESSES
, UNMODIF_BY_REF_ACCESSES
};
4158 /* Identify representatives of all accesses to all candidate parameters for
4159 IPA-SRA. Return result based on what representatives have been found. */
4161 static enum ipa_splicing_result
4162 splice_all_param_accesses (vec
<access_p
> &representatives
)
4164 enum ipa_splicing_result result
= NO_GOOD_ACCESS
;
4166 struct access
*repr
;
4168 representatives
.create (func_param_count
);
4170 for (parm
= DECL_ARGUMENTS (current_function_decl
);
4172 parm
= DECL_CHAIN (parm
))
4174 if (is_unused_scalar_param (parm
))
4176 representatives
.quick_push (&no_accesses_representant
);
4177 if (result
== NO_GOOD_ACCESS
)
4178 result
= UNUSED_PARAMS
;
4180 else if (POINTER_TYPE_P (TREE_TYPE (parm
))
4181 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm
)))
4182 && bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4184 repr
= unmodified_by_ref_scalar_representative (parm
);
4185 representatives
.quick_push (repr
);
4187 result
= UNMODIF_BY_REF_ACCESSES
;
4189 else if (bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4191 bool ro_grp
= false;
4192 repr
= splice_param_accesses (parm
, &ro_grp
);
4193 representatives
.quick_push (repr
);
4195 if (repr
&& !no_accesses_p (repr
))
4197 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4200 result
= UNMODIF_BY_REF_ACCESSES
;
4201 else if (result
< MODIF_BY_REF_ACCESSES
)
4202 result
= MODIF_BY_REF_ACCESSES
;
4204 else if (result
< BY_VAL_ACCESSES
)
4205 result
= BY_VAL_ACCESSES
;
4207 else if (no_accesses_p (repr
) && (result
== NO_GOOD_ACCESS
))
4208 result
= UNUSED_PARAMS
;
4211 representatives
.quick_push (NULL
);
4214 if (result
== NO_GOOD_ACCESS
)
4216 representatives
.release ();
4217 return NO_GOOD_ACCESS
;
4223 /* Return the index of BASE in PARMS. Abort if it is not found. */
4226 get_param_index (tree base
, vec
<tree
> parms
)
4230 len
= parms
.length ();
4231 for (i
= 0; i
< len
; i
++)
4232 if (parms
[i
] == base
)
4237 /* Convert the decisions made at the representative level into compact
4238 parameter adjustments. REPRESENTATIVES are pointers to first
4239 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4240 final number of adjustments. */
4242 static ipa_parm_adjustment_vec
4243 turn_representatives_into_adjustments (vec
<access_p
> representatives
,
4244 int adjustments_count
)
4247 ipa_parm_adjustment_vec adjustments
;
4251 gcc_assert (adjustments_count
> 0);
4252 parms
= ipa_get_vector_of_formal_parms (current_function_decl
);
4253 adjustments
.create (adjustments_count
);
4254 parm
= DECL_ARGUMENTS (current_function_decl
);
4255 for (i
= 0; i
< func_param_count
; i
++, parm
= DECL_CHAIN (parm
))
4257 struct access
*repr
= representatives
[i
];
4259 if (!repr
|| no_accesses_p (repr
))
4261 struct ipa_parm_adjustment adj
;
4263 memset (&adj
, 0, sizeof (adj
));
4264 adj
.base_index
= get_param_index (parm
, parms
);
4269 adj
.remove_param
= 1;
4270 adjustments
.quick_push (adj
);
4274 struct ipa_parm_adjustment adj
;
4275 int index
= get_param_index (parm
, parms
);
4277 for (; repr
; repr
= repr
->next_grp
)
4279 memset (&adj
, 0, sizeof (adj
));
4280 gcc_assert (repr
->base
== parm
);
4281 adj
.base_index
= index
;
4282 adj
.base
= repr
->base
;
4283 adj
.type
= repr
->type
;
4284 adj
.alias_ptr_type
= reference_alias_ptr_type (repr
->expr
);
4285 adj
.offset
= repr
->offset
;
4286 adj
.by_ref
= (POINTER_TYPE_P (TREE_TYPE (repr
->base
))
4287 && (repr
->grp_maybe_modified
4288 || repr
->grp_not_necessarilly_dereferenced
));
4289 adjustments
.quick_push (adj
);
4297 /* Analyze the collected accesses and produce a plan what to do with the
4298 parameters in the form of adjustments, NULL meaning nothing. */
4300 static ipa_parm_adjustment_vec
4301 analyze_all_param_acesses (void)
4303 enum ipa_splicing_result repr_state
;
4304 bool proceed
= false;
4305 int i
, adjustments_count
= 0;
4306 vec
<access_p
> representatives
;
4307 ipa_parm_adjustment_vec adjustments
;
4309 repr_state
= splice_all_param_accesses (representatives
);
4310 if (repr_state
== NO_GOOD_ACCESS
)
4311 return ipa_parm_adjustment_vec ();
4313 /* If there are any parameters passed by reference which are not modified
4314 directly, we need to check whether they can be modified indirectly. */
4315 if (repr_state
== UNMODIF_BY_REF_ACCESSES
)
4317 analyze_caller_dereference_legality (representatives
);
4318 analyze_modified_params (representatives
);
4321 for (i
= 0; i
< func_param_count
; i
++)
4323 struct access
*repr
= representatives
[i
];
4325 if (repr
&& !no_accesses_p (repr
))
4327 if (repr
->grp_scalar_ptr
)
4329 adjustments_count
++;
4330 if (repr
->grp_not_necessarilly_dereferenced
4331 || repr
->grp_maybe_modified
)
4332 representatives
[i
] = NULL
;
4336 sra_stats
.scalar_by_ref_to_by_val
++;
4341 int new_components
= decide_one_param_reduction (repr
);
4343 if (new_components
== 0)
4345 representatives
[i
] = NULL
;
4346 adjustments_count
++;
4350 adjustments_count
+= new_components
;
4351 sra_stats
.aggregate_params_reduced
++;
4352 sra_stats
.param_reductions_created
+= new_components
;
4359 if (no_accesses_p (repr
))
4362 sra_stats
.deleted_unused_parameters
++;
4364 adjustments_count
++;
4368 if (!proceed
&& dump_file
)
4369 fprintf (dump_file
, "NOT proceeding to change params.\n");
4372 adjustments
= turn_representatives_into_adjustments (representatives
,
4375 adjustments
= ipa_parm_adjustment_vec ();
4377 representatives
.release ();
4381 /* If a parameter replacement identified by ADJ does not yet exist in the form
4382 of declaration, create it and record it, otherwise return the previously
4386 get_replaced_param_substitute (struct ipa_parm_adjustment
*adj
)
4389 if (!adj
->new_ssa_base
)
4391 char *pretty_name
= make_fancy_name (adj
->base
);
4393 repl
= create_tmp_reg (TREE_TYPE (adj
->base
), "ISR");
4394 DECL_NAME (repl
) = get_identifier (pretty_name
);
4395 obstack_free (&name_obstack
, pretty_name
);
4397 adj
->new_ssa_base
= repl
;
4400 repl
= adj
->new_ssa_base
;
4404 /* Find the first adjustment for a particular parameter BASE in a vector of
4405 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4408 static struct ipa_parm_adjustment
*
4409 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments
, tree base
)
4413 len
= adjustments
.length ();
4414 for (i
= 0; i
< len
; i
++)
4416 struct ipa_parm_adjustment
*adj
;
4418 adj
= &adjustments
[i
];
4419 if (!adj
->copy_param
&& adj
->base
== base
)
4426 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4427 removed because its value is not used, replace the SSA_NAME with a one
4428 relating to a created VAR_DECL together all of its uses and return true.
4429 ADJUSTMENTS is a pointer to an adjustments vector. */
4432 replace_removed_params_ssa_names (gimple stmt
,
4433 ipa_parm_adjustment_vec adjustments
)
4435 struct ipa_parm_adjustment
*adj
;
4436 tree lhs
, decl
, repl
, name
;
4438 if (gimple_code (stmt
) == GIMPLE_PHI
)
4439 lhs
= gimple_phi_result (stmt
);
4440 else if (is_gimple_assign (stmt
))
4441 lhs
= gimple_assign_lhs (stmt
);
4442 else if (is_gimple_call (stmt
))
4443 lhs
= gimple_call_lhs (stmt
);
4447 if (TREE_CODE (lhs
) != SSA_NAME
)
4450 decl
= SSA_NAME_VAR (lhs
);
4451 if (decl
== NULL_TREE
4452 || TREE_CODE (decl
) != PARM_DECL
)
4455 adj
= get_adjustment_for_base (adjustments
, decl
);
4459 repl
= get_replaced_param_substitute (adj
);
4460 name
= make_ssa_name (repl
, stmt
);
4464 fprintf (dump_file
, "replacing an SSA name of a removed param ");
4465 print_generic_expr (dump_file
, lhs
, 0);
4466 fprintf (dump_file
, " with ");
4467 print_generic_expr (dump_file
, name
, 0);
4468 fprintf (dump_file
, "\n");
4471 if (is_gimple_assign (stmt
))
4472 gimple_assign_set_lhs (stmt
, name
);
4473 else if (is_gimple_call (stmt
))
4474 gimple_call_set_lhs (stmt
, name
);
4476 gimple_phi_set_result (stmt
, name
);
4478 replace_uses_by (lhs
, name
);
4479 release_ssa_name (lhs
);
4483 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4484 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4485 specifies whether the function should care about type incompatibility the
4486 current and new expressions. If it is false, the function will leave
4487 incompatibility issues to the caller. Return true iff the expression
4491 sra_ipa_modify_expr (tree
*expr
, bool convert
,
4492 ipa_parm_adjustment_vec adjustments
)
4495 struct ipa_parm_adjustment
*adj
, *cand
= NULL
;
4496 HOST_WIDE_INT offset
, size
, max_size
;
4499 len
= adjustments
.length ();
4501 if (TREE_CODE (*expr
) == BIT_FIELD_REF
4502 || TREE_CODE (*expr
) == IMAGPART_EXPR
4503 || TREE_CODE (*expr
) == REALPART_EXPR
)
4505 expr
= &TREE_OPERAND (*expr
, 0);
4509 base
= get_ref_base_and_extent (*expr
, &offset
, &size
, &max_size
);
4510 if (!base
|| size
== -1 || max_size
== -1)
4513 if (TREE_CODE (base
) == MEM_REF
)
4515 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
4516 base
= TREE_OPERAND (base
, 0);
4519 base
= get_ssa_base_param (base
);
4520 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4523 for (i
= 0; i
< len
; i
++)
4525 adj
= &adjustments
[i
];
4527 if (adj
->base
== base
4528 && (adj
->offset
== offset
|| adj
->remove_param
))
4534 if (!cand
|| cand
->copy_param
|| cand
->remove_param
)
4538 src
= build_simple_mem_ref (cand
->reduction
);
4540 src
= cand
->reduction
;
4542 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4544 fprintf (dump_file
, "About to replace expr ");
4545 print_generic_expr (dump_file
, *expr
, 0);
4546 fprintf (dump_file
, " with ");
4547 print_generic_expr (dump_file
, src
, 0);
4548 fprintf (dump_file
, "\n");
4551 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4553 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4561 /* If the statement pointed to by STMT_PTR contains any expressions that need
4562 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4563 potential type incompatibilities (GSI is used to accommodate conversion
4564 statements and must point to the statement). Return true iff the statement
4568 sra_ipa_modify_assign (gimple
*stmt_ptr
, gimple_stmt_iterator
*gsi
,
4569 ipa_parm_adjustment_vec adjustments
)
4571 gimple stmt
= *stmt_ptr
;
4572 tree
*lhs_p
, *rhs_p
;
4575 if (!gimple_assign_single_p (stmt
))
4578 rhs_p
= gimple_assign_rhs1_ptr (stmt
);
4579 lhs_p
= gimple_assign_lhs_ptr (stmt
);
4581 any
= sra_ipa_modify_expr (rhs_p
, false, adjustments
);
4582 any
|= sra_ipa_modify_expr (lhs_p
, false, adjustments
);
4585 tree new_rhs
= NULL_TREE
;
4587 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p
), TREE_TYPE (*rhs_p
)))
4589 if (TREE_CODE (*rhs_p
) == CONSTRUCTOR
)
4591 /* V_C_Es of constructors can cause trouble (PR 42714). */
4592 if (is_gimple_reg_type (TREE_TYPE (*lhs_p
)))
4593 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
4595 *rhs_p
= build_constructor (TREE_TYPE (*lhs_p
),
4599 new_rhs
= fold_build1_loc (gimple_location (stmt
),
4600 VIEW_CONVERT_EXPR
, TREE_TYPE (*lhs_p
),
4603 else if (REFERENCE_CLASS_P (*rhs_p
)
4604 && is_gimple_reg_type (TREE_TYPE (*lhs_p
))
4605 && !is_gimple_reg (*lhs_p
))
4606 /* This can happen when an assignment in between two single field
4607 structures is turned into an assignment in between two pointers to
4608 scalars (PR 42237). */
4613 tree tmp
= force_gimple_operand_gsi (gsi
, new_rhs
, true, NULL_TREE
,
4614 true, GSI_SAME_STMT
);
4616 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
4625 /* Traverse the function body and all modifications as described in
4626 ADJUSTMENTS. Return true iff the CFG has been changed. */
4629 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments
)
4631 bool cfg_changed
= false;
4636 gimple_stmt_iterator gsi
;
4638 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4639 replace_removed_params_ssa_names (gsi_stmt (gsi
), adjustments
);
4641 gsi
= gsi_start_bb (bb
);
4642 while (!gsi_end_p (gsi
))
4644 gimple stmt
= gsi_stmt (gsi
);
4645 bool modified
= false;
4649 switch (gimple_code (stmt
))
4652 t
= gimple_return_retval_ptr (stmt
);
4653 if (*t
!= NULL_TREE
)
4654 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4658 modified
|= sra_ipa_modify_assign (&stmt
, &gsi
, adjustments
);
4659 modified
|= replace_removed_params_ssa_names (stmt
, adjustments
);
4663 /* Operands must be processed before the lhs. */
4664 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4666 t
= gimple_call_arg_ptr (stmt
, i
);
4667 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4670 if (gimple_call_lhs (stmt
))
4672 t
= gimple_call_lhs_ptr (stmt
);
4673 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4674 modified
|= replace_removed_params_ssa_names (stmt
,
4680 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
4682 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
4683 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4685 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
4687 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
4688 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4699 if (maybe_clean_eh_stmt (stmt
)
4700 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4710 /* Call gimple_debug_bind_reset_value on all debug statements describing
4711 gimple register parameters that are being removed or replaced. */
4714 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments
)
4717 gimple_stmt_iterator
*gsip
= NULL
, gsi
;
4719 if (MAY_HAVE_DEBUG_STMTS
&& single_succ_p (ENTRY_BLOCK_PTR
))
4721 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR
));
4724 len
= adjustments
.length ();
4725 for (i
= 0; i
< len
; i
++)
4727 struct ipa_parm_adjustment
*adj
;
4728 imm_use_iterator ui
;
4729 gimple stmt
, def_temp
;
4730 tree name
, vexpr
, copy
= NULL_TREE
;
4731 use_operand_p use_p
;
4733 adj
= &adjustments
[i
];
4734 if (adj
->copy_param
|| !is_gimple_reg (adj
->base
))
4736 name
= ssa_default_def (cfun
, adj
->base
);
4739 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
4741 if (gimple_clobber_p (stmt
))
4743 gimple_stmt_iterator cgsi
= gsi_for_stmt (stmt
);
4744 unlink_stmt_vdef (stmt
);
4745 gsi_remove (&cgsi
, true);
4746 release_defs (stmt
);
4749 /* All other users must have been removed by
4750 ipa_sra_modify_function_body. */
4751 gcc_assert (is_gimple_debug (stmt
));
4752 if (vexpr
== NULL
&& gsip
!= NULL
)
4754 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4755 vexpr
= make_node (DEBUG_EXPR_DECL
);
4756 def_temp
= gimple_build_debug_source_bind (vexpr
, adj
->base
,
4758 DECL_ARTIFICIAL (vexpr
) = 1;
4759 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
4760 DECL_MODE (vexpr
) = DECL_MODE (adj
->base
);
4761 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4765 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
4766 SET_USE (use_p
, vexpr
);
4769 gimple_debug_bind_reset_value (stmt
);
4772 /* Create a VAR_DECL for debug info purposes. */
4773 if (!DECL_IGNORED_P (adj
->base
))
4775 copy
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
4776 VAR_DECL
, DECL_NAME (adj
->base
),
4777 TREE_TYPE (adj
->base
));
4778 if (DECL_PT_UID_SET_P (adj
->base
))
4779 SET_DECL_PT_UID (copy
, DECL_PT_UID (adj
->base
));
4780 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (adj
->base
);
4781 TREE_READONLY (copy
) = TREE_READONLY (adj
->base
);
4782 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (adj
->base
);
4783 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (adj
->base
);
4784 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (adj
->base
);
4785 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (adj
->base
);
4786 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (adj
->base
);
4787 DECL_SEEN_IN_BIND_EXPR_P (copy
) = 1;
4788 SET_DECL_RTL (copy
, 0);
4789 TREE_USED (copy
) = 1;
4790 DECL_CONTEXT (copy
) = current_function_decl
;
4791 add_local_decl (cfun
, copy
);
4793 BLOCK_VARS (DECL_INITIAL (current_function_decl
));
4794 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = copy
;
4796 if (gsip
!= NULL
&& copy
&& target_for_debug_bind (adj
->base
))
4798 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4800 def_temp
= gimple_build_debug_bind (copy
, vexpr
, NULL
);
4802 def_temp
= gimple_build_debug_source_bind (copy
, adj
->base
,
4804 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4809 /* Return false iff all callers have at least as many actual arguments as there
4810 are formal parameters in the current function. */
4813 not_all_callers_have_enough_arguments_p (struct cgraph_node
*node
,
4814 void *data ATTRIBUTE_UNUSED
)
4816 struct cgraph_edge
*cs
;
4817 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4818 if (!callsite_has_enough_arguments_p (cs
->call_stmt
))
4824 /* Convert all callers of NODE. */
4827 convert_callers_for_node (struct cgraph_node
*node
,
4830 ipa_parm_adjustment_vec
*adjustments
= (ipa_parm_adjustment_vec
*) data
;
4831 bitmap recomputed_callers
= BITMAP_ALLOC (NULL
);
4832 struct cgraph_edge
*cs
;
4834 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4836 push_cfun (DECL_STRUCT_FUNCTION (cs
->caller
->symbol
.decl
));
4839 fprintf (dump_file
, "Adjusting call %s/%i -> %s/%i\n",
4840 xstrdup (cgraph_node_name (cs
->caller
)),
4841 cs
->caller
->symbol
.order
,
4842 xstrdup (cgraph_node_name (cs
->callee
)),
4843 cs
->callee
->symbol
.order
);
4845 ipa_modify_call_arguments (cs
, cs
->call_stmt
, *adjustments
);
4850 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4851 if (bitmap_set_bit (recomputed_callers
, cs
->caller
->uid
)
4852 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs
->caller
->symbol
.decl
)))
4853 compute_inline_parameters (cs
->caller
, true);
4854 BITMAP_FREE (recomputed_callers
);
4859 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4862 convert_callers (struct cgraph_node
*node
, tree old_decl
,
4863 ipa_parm_adjustment_vec adjustments
)
4865 basic_block this_block
;
4867 cgraph_for_node_and_aliases (node
, convert_callers_for_node
,
4868 &adjustments
, false);
4870 if (!encountered_recursive_call
)
4873 FOR_EACH_BB (this_block
)
4875 gimple_stmt_iterator gsi
;
4877 for (gsi
= gsi_start_bb (this_block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4879 gimple stmt
= gsi_stmt (gsi
);
4881 if (gimple_code (stmt
) != GIMPLE_CALL
)
4883 call_fndecl
= gimple_call_fndecl (stmt
);
4884 if (call_fndecl
== old_decl
)
4887 fprintf (dump_file
, "Adjusting recursive call");
4888 gimple_call_set_fndecl (stmt
, node
->symbol
.decl
);
4889 ipa_modify_call_arguments (NULL
, stmt
, adjustments
);
4897 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4898 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4901 modify_function (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
4903 struct cgraph_node
*new_node
;
4905 vec
<cgraph_edge_p
> redirect_callers
= collect_callers_of_node (node
);
4907 rebuild_cgraph_edges ();
4908 free_dominance_info (CDI_DOMINATORS
);
4911 new_node
= cgraph_function_versioning (node
, redirect_callers
,
4913 NULL
, false, NULL
, NULL
, "isra");
4914 redirect_callers
.release ();
4916 push_cfun (DECL_STRUCT_FUNCTION (new_node
->symbol
.decl
));
4917 ipa_modify_formal_parameters (current_function_decl
, adjustments
, "ISRA");
4918 cfg_changed
= ipa_sra_modify_function_body (adjustments
);
4919 sra_ipa_reset_debug_stmts (adjustments
);
4920 convert_callers (new_node
, node
->symbol
.decl
, adjustments
);
4921 cgraph_make_node_local (new_node
);
4925 /* If NODE has a caller, return true. */
4928 has_caller_p (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
4935 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4936 attributes, return true otherwise. NODE is the cgraph node of the current
4940 ipa_sra_preliminary_function_checks (struct cgraph_node
*node
)
4942 if (!cgraph_node_can_be_local_p (node
))
4945 fprintf (dump_file
, "Function not local to this compilation unit.\n");
4949 if (!node
->local
.can_change_signature
)
4952 fprintf (dump_file
, "Function can not change signature.\n");
4956 if (!tree_versionable_function_p (node
->symbol
.decl
))
4959 fprintf (dump_file
, "Function is not versionable.\n");
4963 if (DECL_VIRTUAL_P (current_function_decl
))
4966 fprintf (dump_file
, "Function is a virtual method.\n");
4970 if ((DECL_COMDAT (node
->symbol
.decl
) || DECL_EXTERNAL (node
->symbol
.decl
))
4971 && inline_summary (node
)->size
>= MAX_INLINE_INSNS_AUTO
)
4974 fprintf (dump_file
, "Function too big to be made truly local.\n");
4978 if (!cgraph_for_node_and_aliases (node
, has_caller_p
, NULL
, true))
4982 "Function has no callers in this compilation unit.\n");
4989 fprintf (dump_file
, "Function uses stdarg. \n");
4993 if (TYPE_ATTRIBUTES (TREE_TYPE (node
->symbol
.decl
)))
4999 /* Perform early interprocedural SRA. */
5002 ipa_early_sra (void)
5004 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
5005 ipa_parm_adjustment_vec adjustments
;
5008 if (!ipa_sra_preliminary_function_checks (node
))
5012 sra_mode
= SRA_MODE_EARLY_IPA
;
5014 if (!find_param_candidates ())
5017 fprintf (dump_file
, "Function has no IPA-SRA candidates.\n");
5021 if (cgraph_for_node_and_aliases (node
, not_all_callers_have_enough_arguments_p
,
5025 fprintf (dump_file
, "There are callers with insufficient number of "
5030 bb_dereferences
= XCNEWVEC (HOST_WIDE_INT
,
5032 * last_basic_block_for_function (cfun
));
5033 final_bbs
= BITMAP_ALLOC (NULL
);
5036 if (encountered_apply_args
)
5039 fprintf (dump_file
, "Function calls __builtin_apply_args().\n");
5043 if (encountered_unchangable_recursive_call
)
5046 fprintf (dump_file
, "Function calls itself with insufficient "
5047 "number of arguments.\n");
5051 adjustments
= analyze_all_param_acesses ();
5052 if (!adjustments
.exists ())
5055 ipa_dump_param_adjustments (dump_file
, adjustments
, current_function_decl
);
5057 if (modify_function (node
, adjustments
))
5058 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
5060 ret
= TODO_update_ssa
;
5061 adjustments
.release ();
5063 statistics_counter_event (cfun
, "Unused parameters deleted",
5064 sra_stats
.deleted_unused_parameters
);
5065 statistics_counter_event (cfun
, "Scalar parameters converted to by-value",
5066 sra_stats
.scalar_by_ref_to_by_val
);
5067 statistics_counter_event (cfun
, "Aggregate parameters broken up",
5068 sra_stats
.aggregate_params_reduced
);
5069 statistics_counter_event (cfun
, "Aggregate parameter components created",
5070 sra_stats
.param_reductions_created
);
5073 BITMAP_FREE (final_bbs
);
5074 free (bb_dereferences
);
5076 sra_deinitialize ();
5080 /* Return if early ipa sra shall be performed. */
5082 ipa_early_sra_gate (void)
5084 return flag_ipa_sra
&& dbg_cnt (eipa_sra
);
5089 const pass_data pass_data_early_ipa_sra
=
5091 GIMPLE_PASS
, /* type */
5092 "eipa_sra", /* name */
5093 OPTGROUP_NONE
, /* optinfo_flags */
5094 true, /* has_gate */
5095 true, /* has_execute */
5096 TV_IPA_SRA
, /* tv_id */
5097 0, /* properties_required */
5098 0, /* properties_provided */
5099 0, /* properties_destroyed */
5100 0, /* todo_flags_start */
5101 TODO_dump_symtab
, /* todo_flags_finish */
5104 class pass_early_ipa_sra
: public gimple_opt_pass
5107 pass_early_ipa_sra (gcc::context
*ctxt
)
5108 : gimple_opt_pass (pass_data_early_ipa_sra
, ctxt
)
5111 /* opt_pass methods: */
5112 bool gate () { return ipa_early_sra_gate (); }
5113 unsigned int execute () { return ipa_early_sra (); }
5115 }; // class pass_early_ipa_sra
5120 make_pass_early_ipa_sra (gcc::context
*ctxt
)
5122 return new pass_early_ipa_sra (ctxt
);