1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008-2016 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
87 #include "gimple-pretty-print.h"
89 #include "fold-const.h"
91 #include "stor-layout.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
99 #include "symbol-summary.h"
100 #include "ipa-prop.h"
103 #include "tree-inline.h"
104 #include "ipa-inline.h"
105 #include "ipa-utils.h"
106 #include "builtins.h"
108 /* Enumeration of all aggregate reductions we can do. */
109 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
110 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
111 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
113 /* Global variable describing which aggregate reduction we are performing at
115 static enum sra_mode sra_mode
;
119 /* ACCESS represents each access to an aggregate variable (as a whole or a
120 part). It can also represent a group of accesses that refer to exactly the
121 same fragment of an aggregate (i.e. those that have exactly the same offset
122 and size). Such representatives for a single aggregate, once determined,
123 are linked in a linked list and have the group fields set.
125 Moreover, when doing intraprocedural SRA, a tree is built from those
126 representatives (by the means of first_child and next_sibling pointers), in
127 which all items in a subtree are "within" the root, i.e. their offset is
128 greater or equal to offset of the root and offset+size is smaller or equal
129 to offset+size of the root. Children of an access are sorted by offset.
131 Note that accesses to parts of vector and complex number types always
132 represented by an access to the whole complex number or a vector. It is a
133 duty of the modifying functions to replace them appropriately. */
137 /* Values returned by `get_ref_base_and_extent' for each component reference
138 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
139 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
140 HOST_WIDE_INT offset
;
144 /* Expression. It is context dependent so do not use it to create new
145 expressions to access the original aggregate. See PR 42154 for a
151 /* The statement this access belongs to. */
154 /* Next group representative for this aggregate. */
155 struct access
*next_grp
;
157 /* Pointer to the group representative. Pointer to itself if the struct is
158 the representative. */
159 struct access
*group_representative
;
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access
*first_child
;
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. In IPA-SRA this is a pointer to the next access
167 belonging to the same group (having the same representative). */
168 struct access
*next_sibling
;
170 /* Pointers to the first and last element in the linked list of assign
172 struct assign_link
*first_link
, *last_link
;
174 /* Pointer to the next access in the work queue. */
175 struct access
*next_queued
;
177 /* Replacement variable for this access "region." Never to be accessed
178 directly, always only by the means of get_access_replacement() and only
179 when grp_to_be_replaced flag is set. */
180 tree replacement_decl
;
182 /* Is this access an access to a non-addressable field? */
183 unsigned non_addressable
: 1;
185 /* Is this access made in reverse storage order? */
186 unsigned reverse
: 1;
188 /* Is this particular access write access? */
191 /* Is this access currently in the work queue? */
192 unsigned grp_queued
: 1;
194 /* Does this group contain a write access? This flag is propagated down the
196 unsigned grp_write
: 1;
198 /* Does this group contain a read access? This flag is propagated down the
200 unsigned grp_read
: 1;
202 /* Does this group contain a read access that comes from an assignment
203 statement? This flag is propagated down the access tree. */
204 unsigned grp_assignment_read
: 1;
206 /* Does this group contain a write access that comes from an assignment
207 statement? This flag is propagated down the access tree. */
208 unsigned grp_assignment_write
: 1;
210 /* Does this group contain a read access through a scalar type? This flag is
211 not propagated in the access tree in any direction. */
212 unsigned grp_scalar_read
: 1;
214 /* Does this group contain a write access through a scalar type? This flag
215 is not propagated in the access tree in any direction. */
216 unsigned grp_scalar_write
: 1;
218 /* Is this access an artificial one created to scalarize some record
220 unsigned grp_total_scalarization
: 1;
222 /* Other passes of the analysis use this bit to make function
223 analyze_access_subtree create scalar replacements for this group if
225 unsigned grp_hint
: 1;
227 /* Is the subtree rooted in this access fully covered by scalar
229 unsigned grp_covered
: 1;
231 /* If set to true, this access and all below it in an access tree must not be
233 unsigned grp_unscalarizable_region
: 1;
235 /* Whether data have been written to parts of the aggregate covered by this
236 access which is not to be scalarized. This flag is propagated up in the
238 unsigned grp_unscalarized_data
: 1;
240 /* Does this access and/or group contain a write access through a
242 unsigned grp_partial_lhs
: 1;
244 /* Set when a scalar replacement should be created for this variable. */
245 unsigned grp_to_be_replaced
: 1;
247 /* Set when we want a replacement for the sole purpose of having it in
248 generated debug statements. */
249 unsigned grp_to_be_debug_replaced
: 1;
251 /* Should TREE_NO_WARNING of a replacement be set? */
252 unsigned grp_no_warning
: 1;
254 /* Is it possible that the group refers to data which might be (directly or
255 otherwise) modified? */
256 unsigned grp_maybe_modified
: 1;
258 /* Set when this is a representative of a pointer to scalar (i.e. by
259 reference) parameter which we consider for turning into a plain scalar
260 (i.e. a by value parameter). */
261 unsigned grp_scalar_ptr
: 1;
263 /* Set when we discover that this pointer is not safe to dereference in the
265 unsigned grp_not_necessarilly_dereferenced
: 1;
268 typedef struct access
*access_p
;
271 /* Alloc pool for allocating access structures. */
272 static object_allocator
<struct access
> access_pool ("SRA accesses");
274 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
275 are used to propagate subaccesses from rhs to lhs as long as they don't
276 conflict with what is already there. */
279 struct access
*lacc
, *racc
;
280 struct assign_link
*next
;
283 /* Alloc pool for allocating assign link structures. */
284 static object_allocator
<assign_link
> assign_link_pool ("SRA links");
286 /* Base (tree) -> Vector (vec<access_p> *) map. */
287 static hash_map
<tree
, auto_vec
<access_p
> > *base_access_vec
;
289 /* Candidate hash table helpers. */
291 struct uid_decl_hasher
: nofree_ptr_hash
<tree_node
>
293 static inline hashval_t
hash (const tree_node
*);
294 static inline bool equal (const tree_node
*, const tree_node
*);
297 /* Hash a tree in a uid_decl_map. */
300 uid_decl_hasher::hash (const tree_node
*item
)
302 return item
->decl_minimal
.uid
;
305 /* Return true if the DECL_UID in both trees are equal. */
308 uid_decl_hasher::equal (const tree_node
*a
, const tree_node
*b
)
310 return (a
->decl_minimal
.uid
== b
->decl_minimal
.uid
);
313 /* Set of candidates. */
314 static bitmap candidate_bitmap
;
315 static hash_table
<uid_decl_hasher
> *candidates
;
317 /* For a candidate UID return the candidates decl. */
320 candidate (unsigned uid
)
323 t
.decl_minimal
.uid
= uid
;
324 return candidates
->find_with_hash (&t
, static_cast <hashval_t
> (uid
));
327 /* Bitmap of candidates which we should try to entirely scalarize away and
328 those which cannot be (because they are and need be used as a whole). */
329 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
331 /* Bitmap of candidates in the constant pool, which cannot be scalarized
332 because this would produce non-constant expressions (e.g. Ada). */
333 static bitmap disqualified_constants
;
335 /* Obstack for creation of fancy names. */
336 static struct obstack name_obstack
;
338 /* Head of a linked list of accesses that need to have its subaccesses
339 propagated to their assignment counterparts. */
340 static struct access
*work_queue_head
;
342 /* Number of parameters of the analyzed function when doing early ipa SRA. */
343 static int func_param_count
;
345 /* scan_function sets the following to true if it encounters a call to
346 __builtin_apply_args. */
347 static bool encountered_apply_args
;
349 /* Set by scan_function when it finds a recursive call. */
350 static bool encountered_recursive_call
;
352 /* Set by scan_function when it finds a recursive call with less actual
353 arguments than formal parameters.. */
354 static bool encountered_unchangable_recursive_call
;
356 /* This is a table in which for each basic block and parameter there is a
357 distance (offset + size) in that parameter which is dereferenced and
358 accessed in that BB. */
359 static HOST_WIDE_INT
*bb_dereferences
;
360 /* Bitmap of BBs that can cause the function to "stop" progressing by
361 returning, throwing externally, looping infinitely or calling a function
362 which might abort etc.. */
363 static bitmap final_bbs
;
365 /* Representative of no accesses at all. */
366 static struct access no_accesses_representant
;
368 /* Predicate to test the special value. */
371 no_accesses_p (struct access
*access
)
373 return access
== &no_accesses_representant
;
376 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
377 representative fields are dumped, otherwise those which only describe the
378 individual access are. */
382 /* Number of processed aggregates is readily available in
383 analyze_all_variable_accesses and so is not stored here. */
385 /* Number of created scalar replacements. */
388 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
392 /* Number of statements created by generate_subtree_copies. */
395 /* Number of statements created by load_assign_lhs_subreplacements. */
398 /* Number of times sra_modify_assign has deleted a statement. */
401 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
402 RHS reparately due to type conversions or nonexistent matching
404 int separate_lhs_rhs_handling
;
406 /* Number of parameters that were removed because they were unused. */
407 int deleted_unused_parameters
;
409 /* Number of scalars passed as parameters by reference that have been
410 converted to be passed by value. */
411 int scalar_by_ref_to_by_val
;
413 /* Number of aggregate parameters that were replaced by one or more of their
415 int aggregate_params_reduced
;
417 /* Numbber of components created when splitting aggregate parameters. */
418 int param_reductions_created
;
422 dump_access (FILE *f
, struct access
*access
, bool grp
)
424 fprintf (f
, "access { ");
425 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
426 print_generic_expr (f
, access
->base
, 0);
427 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
428 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
429 fprintf (f
, ", expr = ");
430 print_generic_expr (f
, access
->expr
, 0);
431 fprintf (f
, ", type = ");
432 print_generic_expr (f
, access
->type
, 0);
433 fprintf (f
, ", non_addressable = %d, reverse = %d",
434 access
->non_addressable
, access
->reverse
);
436 fprintf (f
, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
437 "grp_assignment_write = %d, grp_scalar_read = %d, "
438 "grp_scalar_write = %d, grp_total_scalarization = %d, "
439 "grp_hint = %d, grp_covered = %d, "
440 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
441 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
442 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
443 "grp_not_necessarilly_dereferenced = %d\n",
444 access
->grp_read
, access
->grp_write
, access
->grp_assignment_read
,
445 access
->grp_assignment_write
, access
->grp_scalar_read
,
446 access
->grp_scalar_write
, access
->grp_total_scalarization
,
447 access
->grp_hint
, access
->grp_covered
,
448 access
->grp_unscalarizable_region
, access
->grp_unscalarized_data
,
449 access
->grp_partial_lhs
, access
->grp_to_be_replaced
,
450 access
->grp_to_be_debug_replaced
, access
->grp_maybe_modified
,
451 access
->grp_not_necessarilly_dereferenced
);
453 fprintf (f
, ", write = %d, grp_total_scalarization = %d, "
454 "grp_partial_lhs = %d\n",
455 access
->write
, access
->grp_total_scalarization
,
456 access
->grp_partial_lhs
);
459 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
462 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
468 for (i
= 0; i
< level
; i
++)
469 fputs ("* ", dump_file
);
471 dump_access (f
, access
, true);
473 if (access
->first_child
)
474 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
476 access
= access
->next_sibling
;
481 /* Dump all access trees for a variable, given the pointer to the first root in
485 dump_access_tree (FILE *f
, struct access
*access
)
487 for (; access
; access
= access
->next_grp
)
488 dump_access_tree_1 (f
, access
, 0);
491 /* Return true iff ACC is non-NULL and has subaccesses. */
494 access_has_children_p (struct access
*acc
)
496 return acc
&& acc
->first_child
;
499 /* Return true iff ACC is (partly) covered by at least one replacement. */
502 access_has_replacements_p (struct access
*acc
)
504 struct access
*child
;
505 if (acc
->grp_to_be_replaced
)
507 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
508 if (access_has_replacements_p (child
))
513 /* Return a vector of pointers to accesses for the variable given in BASE or
514 NULL if there is none. */
516 static vec
<access_p
> *
517 get_base_access_vector (tree base
)
519 return base_access_vec
->get (base
);
522 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
523 in ACCESS. Return NULL if it cannot be found. */
525 static struct access
*
526 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
529 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
531 struct access
*child
= access
->first_child
;
533 while (child
&& (child
->offset
+ child
->size
<= offset
))
534 child
= child
->next_sibling
;
541 /* Return the first group representative for DECL or NULL if none exists. */
543 static struct access
*
544 get_first_repr_for_decl (tree base
)
546 vec
<access_p
> *access_vec
;
548 access_vec
= get_base_access_vector (base
);
552 return (*access_vec
)[0];
555 /* Find an access representative for the variable BASE and given OFFSET and
556 SIZE. Requires that access trees have already been built. Return NULL if
557 it cannot be found. */
559 static struct access
*
560 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
563 struct access
*access
;
565 access
= get_first_repr_for_decl (base
);
566 while (access
&& (access
->offset
+ access
->size
<= offset
))
567 access
= access
->next_grp
;
571 return find_access_in_subtree (access
, offset
, size
);
574 /* Add LINK to the linked list of assign links of RACC. */
576 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
578 gcc_assert (link
->racc
== racc
);
580 if (!racc
->first_link
)
582 gcc_assert (!racc
->last_link
);
583 racc
->first_link
= link
;
586 racc
->last_link
->next
= link
;
588 racc
->last_link
= link
;
592 /* Move all link structures in their linked list in OLD_RACC to the linked list
595 relink_to_new_repr (struct access
*new_racc
, struct access
*old_racc
)
597 if (!old_racc
->first_link
)
599 gcc_assert (!old_racc
->last_link
);
603 if (new_racc
->first_link
)
605 gcc_assert (!new_racc
->last_link
->next
);
606 gcc_assert (!old_racc
->last_link
|| !old_racc
->last_link
->next
);
608 new_racc
->last_link
->next
= old_racc
->first_link
;
609 new_racc
->last_link
= old_racc
->last_link
;
613 gcc_assert (!new_racc
->last_link
);
615 new_racc
->first_link
= old_racc
->first_link
;
616 new_racc
->last_link
= old_racc
->last_link
;
618 old_racc
->first_link
= old_racc
->last_link
= NULL
;
621 /* Add ACCESS to the work queue (which is actually a stack). */
624 add_access_to_work_queue (struct access
*access
)
626 if (!access
->grp_queued
)
628 gcc_assert (!access
->next_queued
);
629 access
->next_queued
= work_queue_head
;
630 access
->grp_queued
= 1;
631 work_queue_head
= access
;
635 /* Pop an access from the work queue, and return it, assuming there is one. */
637 static struct access
*
638 pop_access_from_work_queue (void)
640 struct access
*access
= work_queue_head
;
642 work_queue_head
= access
->next_queued
;
643 access
->next_queued
= NULL
;
644 access
->grp_queued
= 0;
649 /* Allocate necessary structures. */
652 sra_initialize (void)
654 candidate_bitmap
= BITMAP_ALLOC (NULL
);
655 candidates
= new hash_table
<uid_decl_hasher
>
656 (vec_safe_length (cfun
->local_decls
) / 2);
657 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
658 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
659 disqualified_constants
= BITMAP_ALLOC (NULL
);
660 gcc_obstack_init (&name_obstack
);
661 base_access_vec
= new hash_map
<tree
, auto_vec
<access_p
> >;
662 memset (&sra_stats
, 0, sizeof (sra_stats
));
663 encountered_apply_args
= false;
664 encountered_recursive_call
= false;
665 encountered_unchangable_recursive_call
= false;
668 /* Deallocate all general structures. */
671 sra_deinitialize (void)
673 BITMAP_FREE (candidate_bitmap
);
676 BITMAP_FREE (should_scalarize_away_bitmap
);
677 BITMAP_FREE (cannot_scalarize_away_bitmap
);
678 BITMAP_FREE (disqualified_constants
);
679 access_pool
.release ();
680 assign_link_pool
.release ();
681 obstack_free (&name_obstack
, NULL
);
683 delete base_access_vec
;
686 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
688 static bool constant_decl_p (tree decl
)
690 return TREE_CODE (decl
) == VAR_DECL
&& DECL_IN_CONSTANT_POOL (decl
);
693 /* Remove DECL from candidates for SRA and write REASON to the dump file if
696 disqualify_candidate (tree decl
, const char *reason
)
698 if (bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
)))
699 candidates
->remove_elt_with_hash (decl
, DECL_UID (decl
));
700 if (constant_decl_p (decl
))
701 bitmap_set_bit (disqualified_constants
, DECL_UID (decl
));
703 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
705 fprintf (dump_file
, "! Disqualifying ");
706 print_generic_expr (dump_file
, decl
, 0);
707 fprintf (dump_file
, " - %s\n", reason
);
711 /* Return true iff the type contains a field or an element which does not allow
715 type_internals_preclude_sra_p (tree type
, const char **msg
)
720 switch (TREE_CODE (type
))
724 case QUAL_UNION_TYPE
:
725 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
726 if (TREE_CODE (fld
) == FIELD_DECL
)
728 tree ft
= TREE_TYPE (fld
);
730 if (TREE_THIS_VOLATILE (fld
))
732 *msg
= "volatile structure field";
735 if (!DECL_FIELD_OFFSET (fld
))
737 *msg
= "no structure field offset";
740 if (!DECL_SIZE (fld
))
742 *msg
= "zero structure field size";
745 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
747 *msg
= "structure field offset not fixed";
750 if (!tree_fits_uhwi_p (DECL_SIZE (fld
)))
752 *msg
= "structure field size not fixed";
755 if (!tree_fits_shwi_p (bit_position (fld
)))
757 *msg
= "structure field size too big";
760 if (AGGREGATE_TYPE_P (ft
)
761 && int_bit_position (fld
) % BITS_PER_UNIT
!= 0)
763 *msg
= "structure field is bit field";
767 if (AGGREGATE_TYPE_P (ft
) && type_internals_preclude_sra_p (ft
, msg
))
774 et
= TREE_TYPE (type
);
776 if (TYPE_VOLATILE (et
))
778 *msg
= "element type is volatile";
782 if (AGGREGATE_TYPE_P (et
) && type_internals_preclude_sra_p (et
, msg
))
792 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
793 base variable if it is. Return T if it is not an SSA_NAME. */
796 get_ssa_base_param (tree t
)
798 if (TREE_CODE (t
) == SSA_NAME
)
800 if (SSA_NAME_IS_DEFAULT_DEF (t
))
801 return SSA_NAME_VAR (t
);
808 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
809 belongs to, unless the BB has already been marked as a potentially
813 mark_parm_dereference (tree base
, HOST_WIDE_INT dist
, gimple
*stmt
)
815 basic_block bb
= gimple_bb (stmt
);
816 int idx
, parm_index
= 0;
819 if (bitmap_bit_p (final_bbs
, bb
->index
))
822 for (parm
= DECL_ARGUMENTS (current_function_decl
);
823 parm
&& parm
!= base
;
824 parm
= DECL_CHAIN (parm
))
827 gcc_assert (parm_index
< func_param_count
);
829 idx
= bb
->index
* func_param_count
+ parm_index
;
830 if (bb_dereferences
[idx
] < dist
)
831 bb_dereferences
[idx
] = dist
;
834 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
835 the three fields. Also add it to the vector of accesses corresponding to
836 the base. Finally, return the new access. */
838 static struct access
*
839 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
841 struct access
*access
= access_pool
.allocate ();
843 memset (access
, 0, sizeof (struct access
));
845 access
->offset
= offset
;
848 base_access_vec
->get_or_insert (base
).safe_push (access
);
853 static bool maybe_add_sra_candidate (tree
);
855 /* Create and insert access for EXPR. Return created access, or NULL if it is
856 not possible. Also scan for uses of constant pool as we go along and add
859 static struct access
*
860 create_access (tree expr
, gimple
*stmt
, bool write
)
862 struct access
*access
;
863 HOST_WIDE_INT offset
, size
, max_size
;
865 bool reverse
, ptr
, unscalarizable_region
= false;
867 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
, &reverse
);
869 if (sra_mode
== SRA_MODE_EARLY_IPA
870 && TREE_CODE (base
) == MEM_REF
)
872 base
= get_ssa_base_param (TREE_OPERAND (base
, 0));
880 /* For constant-pool entries, check we can substitute the constant value. */
881 if (constant_decl_p (base
)
882 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
))
884 gcc_assert (!bitmap_bit_p (disqualified_constants
, DECL_UID (base
)));
886 && !is_gimple_reg_type (TREE_TYPE (expr
))
887 && dump_file
&& (dump_flags
& TDF_DETAILS
))
889 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
890 and elements of multidimensional arrays (which are
891 multi-element arrays in their own right). */
892 fprintf (dump_file
, "Allowing non-reg-type load of part"
893 " of constant-pool entry: ");
894 print_generic_expr (dump_file
, expr
, 0);
896 maybe_add_sra_candidate (base
);
899 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
902 if (sra_mode
== SRA_MODE_EARLY_IPA
)
904 if (size
< 0 || size
!= max_size
)
906 disqualify_candidate (base
, "Encountered a variable sized access.");
909 if (TREE_CODE (expr
) == COMPONENT_REF
910 && DECL_BIT_FIELD (TREE_OPERAND (expr
, 1)))
912 disqualify_candidate (base
, "Encountered a bit-field access.");
915 gcc_checking_assert ((offset
% BITS_PER_UNIT
) == 0);
918 mark_parm_dereference (base
, offset
+ size
, stmt
);
922 if (size
!= max_size
)
925 unscalarizable_region
= true;
929 disqualify_candidate (base
, "Encountered an unconstrained access.");
934 access
= create_access_1 (base
, offset
, size
);
936 access
->type
= TREE_TYPE (expr
);
937 access
->write
= write
;
938 access
->grp_unscalarizable_region
= unscalarizable_region
;
940 access
->reverse
= reverse
;
942 if (TREE_CODE (expr
) == COMPONENT_REF
943 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr
, 1)))
944 access
->non_addressable
= 1;
950 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
951 ARRAY_TYPE with fields that are either of gimple register types (excluding
952 bit-fields) or (recursively) scalarizable types. */
955 scalarizable_type_p (tree type
)
957 gcc_assert (!is_gimple_reg_type (type
));
958 if (type_contains_placeholder_p (type
))
961 switch (TREE_CODE (type
))
964 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
965 if (TREE_CODE (fld
) == FIELD_DECL
)
967 tree ft
= TREE_TYPE (fld
);
969 if (DECL_BIT_FIELD (fld
))
972 if (!is_gimple_reg_type (ft
)
973 && !scalarizable_type_p (ft
))
981 if (TYPE_DOMAIN (type
) == NULL_TREE
982 || !tree_fits_shwi_p (TYPE_SIZE (type
))
983 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type
)))
984 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type
))) <= 0)
985 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type
))))
987 if (tree_to_shwi (TYPE_SIZE (type
)) == 0
988 && TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) == NULL_TREE
)
989 /* Zero-element array, should not prevent scalarization. */
991 else if ((tree_to_shwi (TYPE_SIZE (type
)) <= 0)
992 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
993 /* Variable-length array, do not allow scalarization. */
996 tree elem
= TREE_TYPE (type
);
997 if (!is_gimple_reg_type (elem
)
998 && !scalarizable_type_p (elem
))
1007 static void scalarize_elem (tree
, HOST_WIDE_INT
, HOST_WIDE_INT
, bool, tree
, tree
);
1009 /* Create total_scalarization accesses for all scalar fields of a member
1010 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1011 must be the top-most VAR_DECL representing the variable; within that,
1012 OFFSET locates the member and REF must be the memory reference expression for
1016 completely_scalarize (tree base
, tree decl_type
, HOST_WIDE_INT offset
, tree ref
)
1018 switch (TREE_CODE (decl_type
))
1021 for (tree fld
= TYPE_FIELDS (decl_type
); fld
; fld
= DECL_CHAIN (fld
))
1022 if (TREE_CODE (fld
) == FIELD_DECL
)
1024 HOST_WIDE_INT pos
= offset
+ int_bit_position (fld
);
1025 tree ft
= TREE_TYPE (fld
);
1026 tree nref
= build3 (COMPONENT_REF
, ft
, ref
, fld
, NULL_TREE
);
1028 scalarize_elem (base
, pos
, tree_to_uhwi (DECL_SIZE (fld
)),
1029 TYPE_REVERSE_STORAGE_ORDER (decl_type
),
1035 tree elemtype
= TREE_TYPE (decl_type
);
1036 tree elem_size
= TYPE_SIZE (elemtype
);
1037 gcc_assert (elem_size
&& tree_fits_shwi_p (elem_size
));
1038 HOST_WIDE_INT el_size
= tree_to_shwi (elem_size
);
1039 gcc_assert (el_size
> 0);
1041 tree minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type
));
1042 gcc_assert (TREE_CODE (minidx
) == INTEGER_CST
);
1043 tree maxidx
= TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type
));
1044 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1047 gcc_assert (TREE_CODE (maxidx
) == INTEGER_CST
);
1048 tree domain
= TYPE_DOMAIN (decl_type
);
1049 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1050 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1051 offset_int idx
= wi::to_offset (minidx
);
1052 offset_int max
= wi::to_offset (maxidx
);
1053 if (!TYPE_UNSIGNED (domain
))
1055 idx
= wi::sext (idx
, TYPE_PRECISION (domain
));
1056 max
= wi::sext (max
, TYPE_PRECISION (domain
));
1058 for (int el_off
= offset
; wi::les_p (idx
, max
); ++idx
)
1060 tree nref
= build4 (ARRAY_REF
, elemtype
,
1062 wide_int_to_tree (domain
, idx
),
1063 NULL_TREE
, NULL_TREE
);
1064 scalarize_elem (base
, el_off
, el_size
,
1065 TYPE_REVERSE_STORAGE_ORDER (decl_type
),
1077 /* Create total_scalarization accesses for a member of type TYPE, which must
1078 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1079 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1080 the member, REVERSE gives its torage order. and REF must be the reference
1081 expression for it. */
1084 scalarize_elem (tree base
, HOST_WIDE_INT pos
, HOST_WIDE_INT size
, bool reverse
,
1085 tree ref
, tree type
)
1087 if (is_gimple_reg_type (type
))
1089 struct access
*access
= create_access_1 (base
, pos
, size
);
1091 access
->type
= type
;
1092 access
->grp_total_scalarization
= 1;
1093 access
->reverse
= reverse
;
1094 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1097 completely_scalarize (base
, type
, pos
, ref
);
1100 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1101 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1104 create_total_scalarization_access (tree var
)
1106 HOST_WIDE_INT size
= tree_to_uhwi (DECL_SIZE (var
));
1107 struct access
*access
;
1109 access
= create_access_1 (var
, 0, size
);
1111 access
->type
= TREE_TYPE (var
);
1112 access
->grp_total_scalarization
= 1;
1115 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1118 contains_view_convert_expr_p (const_tree ref
)
1120 while (handled_component_p (ref
))
1122 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
1124 ref
= TREE_OPERAND (ref
, 0);
1130 /* Search the given tree for a declaration by skipping handled components and
1131 exclude it from the candidates. */
1134 disqualify_base_of_expr (tree t
, const char *reason
)
1136 t
= get_base_address (t
);
1137 if (sra_mode
== SRA_MODE_EARLY_IPA
1138 && TREE_CODE (t
) == MEM_REF
)
1139 t
= get_ssa_base_param (TREE_OPERAND (t
, 0));
1141 if (t
&& DECL_P (t
))
1142 disqualify_candidate (t
, reason
);
1145 /* Scan expression EXPR and create access structures for all accesses to
1146 candidates for scalarization. Return the created access or NULL if none is
1149 static struct access
*
1150 build_access_from_expr_1 (tree expr
, gimple
*stmt
, bool write
)
1152 struct access
*ret
= NULL
;
1155 if (TREE_CODE (expr
) == BIT_FIELD_REF
1156 || TREE_CODE (expr
) == IMAGPART_EXPR
1157 || TREE_CODE (expr
) == REALPART_EXPR
)
1159 expr
= TREE_OPERAND (expr
, 0);
1163 partial_ref
= false;
1165 /* We need to dive through V_C_Es in order to get the size of its parameter
1166 and not the result type. Ada produces such statements. We are also
1167 capable of handling the topmost V_C_E but not any of those buried in other
1168 handled components. */
1169 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
&& !storage_order_barrier_p (expr
))
1170 expr
= TREE_OPERAND (expr
, 0);
1172 if (contains_view_convert_expr_p (expr
))
1174 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
1178 if (TREE_THIS_VOLATILE (expr
))
1180 disqualify_base_of_expr (expr
, "part of a volatile reference.");
1184 switch (TREE_CODE (expr
))
1187 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
1188 && sra_mode
!= SRA_MODE_EARLY_IPA
)
1196 case ARRAY_RANGE_REF
:
1197 ret
= create_access (expr
, stmt
, write
);
1204 if (write
&& partial_ref
&& ret
)
1205 ret
->grp_partial_lhs
= 1;
1210 /* Scan expression EXPR and create access structures for all accesses to
1211 candidates for scalarization. Return true if any access has been inserted.
1212 STMT must be the statement from which the expression is taken, WRITE must be
1213 true if the expression is a store and false otherwise. */
1216 build_access_from_expr (tree expr
, gimple
*stmt
, bool write
)
1218 struct access
*access
;
1220 access
= build_access_from_expr_1 (expr
, stmt
, write
);
1223 /* This means the aggregate is accesses as a whole in a way other than an
1224 assign statement and thus cannot be removed even if we had a scalar
1225 replacement for everything. */
1226 if (cannot_scalarize_away_bitmap
)
1227 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
1233 /* Return the single non-EH successor edge of BB or NULL if there is none or
1237 single_non_eh_succ (basic_block bb
)
1242 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1243 if (!(e
->flags
& EDGE_EH
))
1253 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1254 there is no alternative spot where to put statements SRA might need to
1255 generate after it. The spot we are looking for is an edge leading to a
1256 single non-EH successor, if it exists and is indeed single. RHS may be
1257 NULL, in that case ignore it. */
1260 disqualify_if_bad_bb_terminating_stmt (gimple
*stmt
, tree lhs
, tree rhs
)
1262 if ((sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1263 && stmt_ends_bb_p (stmt
))
1265 if (single_non_eh_succ (gimple_bb (stmt
)))
1268 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
1270 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1276 /* Scan expressions occurring in STMT, create access structures for all accesses
1277 to candidates for scalarization and remove those candidates which occur in
1278 statements or expressions that prevent them from being split apart. Return
1279 true if any access has been inserted. */
1282 build_accesses_from_assign (gimple
*stmt
)
1285 struct access
*lacc
, *racc
;
1287 if (!gimple_assign_single_p (stmt
)
1288 /* Scope clobbers don't influence scalarization. */
1289 || gimple_clobber_p (stmt
))
1292 lhs
= gimple_assign_lhs (stmt
);
1293 rhs
= gimple_assign_rhs1 (stmt
);
1295 if (disqualify_if_bad_bb_terminating_stmt (stmt
, lhs
, rhs
))
1298 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1299 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1303 lacc
->grp_assignment_write
= 1;
1304 if (storage_order_barrier_p (rhs
))
1305 lacc
->grp_unscalarizable_region
= 1;
1310 racc
->grp_assignment_read
= 1;
1311 if (should_scalarize_away_bitmap
&& !gimple_has_volatile_ops (stmt
)
1312 && !is_gimple_reg_type (racc
->type
))
1313 bitmap_set_bit (should_scalarize_away_bitmap
, DECL_UID (racc
->base
));
1314 if (storage_order_barrier_p (lhs
))
1315 racc
->grp_unscalarizable_region
= 1;
1319 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1320 && !lacc
->grp_unscalarizable_region
1321 && !racc
->grp_unscalarizable_region
1322 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1323 && lacc
->size
== racc
->size
1324 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1326 struct assign_link
*link
;
1328 link
= assign_link_pool
.allocate ();
1329 memset (link
, 0, sizeof (struct assign_link
));
1334 add_link_to_rhs (racc
, link
);
1337 return lacc
|| racc
;
1340 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1341 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1344 asm_visit_addr (gimple
*, tree op
, tree
, void *)
1346 op
= get_base_address (op
);
1349 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1354 /* Return true iff callsite CALL has at least as many actual arguments as there
1355 are formal parameters of the function currently processed by IPA-SRA and
1356 that their types match. */
1359 callsite_arguments_match_p (gimple
*call
)
1361 if (gimple_call_num_args (call
) < (unsigned) func_param_count
)
1366 for (parm
= DECL_ARGUMENTS (current_function_decl
), i
= 0;
1368 parm
= DECL_CHAIN (parm
), i
++)
1370 tree arg
= gimple_call_arg (call
, i
);
1371 if (!useless_type_conversion_p (TREE_TYPE (parm
), TREE_TYPE (arg
)))
1377 /* Scan function and look for interesting expressions and create access
1378 structures for them. Return true iff any access is created. */
1381 scan_function (void)
1386 FOR_EACH_BB_FN (bb
, cfun
)
1388 gimple_stmt_iterator gsi
;
1389 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1391 gimple
*stmt
= gsi_stmt (gsi
);
1395 if (final_bbs
&& stmt_can_throw_external (stmt
))
1396 bitmap_set_bit (final_bbs
, bb
->index
);
1397 switch (gimple_code (stmt
))
1400 t
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1402 ret
|= build_access_from_expr (t
, stmt
, false);
1404 bitmap_set_bit (final_bbs
, bb
->index
);
1408 ret
|= build_accesses_from_assign (stmt
);
1412 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1413 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1416 if (sra_mode
== SRA_MODE_EARLY_IPA
)
1418 tree dest
= gimple_call_fndecl (stmt
);
1419 int flags
= gimple_call_flags (stmt
);
1423 if (DECL_BUILT_IN_CLASS (dest
) == BUILT_IN_NORMAL
1424 && DECL_FUNCTION_CODE (dest
) == BUILT_IN_APPLY_ARGS
)
1425 encountered_apply_args
= true;
1426 if (recursive_call_p (current_function_decl
, dest
))
1428 encountered_recursive_call
= true;
1429 if (!callsite_arguments_match_p (stmt
))
1430 encountered_unchangable_recursive_call
= true;
1435 && (flags
& (ECF_CONST
| ECF_PURE
)) == 0)
1436 bitmap_set_bit (final_bbs
, bb
->index
);
1439 t
= gimple_call_lhs (stmt
);
1440 if (t
&& !disqualify_if_bad_bb_terminating_stmt (stmt
, t
, NULL
))
1441 ret
|= build_access_from_expr (t
, stmt
, true);
1446 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1447 walk_stmt_load_store_addr_ops (asm_stmt
, NULL
, NULL
, NULL
,
1450 bitmap_set_bit (final_bbs
, bb
->index
);
1452 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
1454 t
= TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
1455 ret
|= build_access_from_expr (t
, asm_stmt
, false);
1457 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
1459 t
= TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
1460 ret
|= build_access_from_expr (t
, asm_stmt
, true);
1474 /* Helper of QSORT function. There are pointers to accesses in the array. An
1475 access is considered smaller than another if it has smaller offset or if the
1476 offsets are the same but is size is bigger. */
1479 compare_access_positions (const void *a
, const void *b
)
1481 const access_p
*fp1
= (const access_p
*) a
;
1482 const access_p
*fp2
= (const access_p
*) b
;
1483 const access_p f1
= *fp1
;
1484 const access_p f2
= *fp2
;
1486 if (f1
->offset
!= f2
->offset
)
1487 return f1
->offset
< f2
->offset
? -1 : 1;
1489 if (f1
->size
== f2
->size
)
1491 if (f1
->type
== f2
->type
)
1493 /* Put any non-aggregate type before any aggregate type. */
1494 else if (!is_gimple_reg_type (f1
->type
)
1495 && is_gimple_reg_type (f2
->type
))
1497 else if (is_gimple_reg_type (f1
->type
)
1498 && !is_gimple_reg_type (f2
->type
))
1500 /* Put any complex or vector type before any other scalar type. */
1501 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1502 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1503 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1504 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1506 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1507 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1508 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1509 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1511 /* Put the integral type with the bigger precision first. */
1512 else if (INTEGRAL_TYPE_P (f1
->type
)
1513 && INTEGRAL_TYPE_P (f2
->type
))
1514 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1515 /* Put any integral type with non-full precision last. */
1516 else if (INTEGRAL_TYPE_P (f1
->type
)
1517 && (TREE_INT_CST_LOW (TYPE_SIZE (f1
->type
))
1518 != TYPE_PRECISION (f1
->type
)))
1520 else if (INTEGRAL_TYPE_P (f2
->type
)
1521 && (TREE_INT_CST_LOW (TYPE_SIZE (f2
->type
))
1522 != TYPE_PRECISION (f2
->type
)))
1524 /* Stabilize the sort. */
1525 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1528 /* We want the bigger accesses first, thus the opposite operator in the next
1530 return f1
->size
> f2
->size
? -1 : 1;
1534 /* Append a name of the declaration to the name obstack. A helper function for
1538 make_fancy_decl_name (tree decl
)
1542 tree name
= DECL_NAME (decl
);
1544 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1545 IDENTIFIER_LENGTH (name
));
1548 sprintf (buffer
, "D%u", DECL_UID (decl
));
1549 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1553 /* Helper for make_fancy_name. */
1556 make_fancy_name_1 (tree expr
)
1563 make_fancy_decl_name (expr
);
1567 switch (TREE_CODE (expr
))
1570 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1571 obstack_1grow (&name_obstack
, '$');
1572 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1576 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1577 obstack_1grow (&name_obstack
, '$');
1578 /* Arrays with only one element may not have a constant as their
1580 index
= TREE_OPERAND (expr
, 1);
1581 if (TREE_CODE (index
) != INTEGER_CST
)
1583 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1584 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1588 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1592 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1593 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1595 obstack_1grow (&name_obstack
, '$');
1596 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1597 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1598 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1605 gcc_unreachable (); /* we treat these as scalars. */
1612 /* Create a human readable name for replacement variable of ACCESS. */
1615 make_fancy_name (tree expr
)
1617 make_fancy_name_1 (expr
);
1618 obstack_1grow (&name_obstack
, '\0');
1619 return XOBFINISH (&name_obstack
, char *);
1622 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1623 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1624 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1625 be non-NULL and is used to insert new statements either before or below
1626 the current one as specified by INSERT_AFTER. This function is not capable
1627 of handling bitfields. */
1630 build_ref_for_offset (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1631 bool reverse
, tree exp_type
, gimple_stmt_iterator
*gsi
,
1634 tree prev_base
= base
;
1637 HOST_WIDE_INT base_offset
;
1638 unsigned HOST_WIDE_INT misalign
;
1641 gcc_checking_assert (offset
% BITS_PER_UNIT
== 0);
1642 get_object_alignment_1 (base
, &align
, &misalign
);
1643 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1645 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1646 offset such as array[var_index]. */
1652 gcc_checking_assert (gsi
);
1653 tmp
= make_ssa_name (build_pointer_type (TREE_TYPE (prev_base
)));
1654 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1655 STRIP_USELESS_TYPE_CONVERSION (addr
);
1656 stmt
= gimple_build_assign (tmp
, addr
);
1657 gimple_set_location (stmt
, loc
);
1659 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1661 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1663 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1664 offset
/ BITS_PER_UNIT
);
1667 else if (TREE_CODE (base
) == MEM_REF
)
1669 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1670 base_offset
+ offset
/ BITS_PER_UNIT
);
1671 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1672 base
= unshare_expr (TREE_OPERAND (base
, 0));
1676 off
= build_int_cst (reference_alias_ptr_type (base
),
1677 base_offset
+ offset
/ BITS_PER_UNIT
);
1678 base
= build_fold_addr_expr (unshare_expr (base
));
1681 misalign
= (misalign
+ offset
) & (align
- 1);
1683 align
= (misalign
& -misalign
);
1684 if (align
!= TYPE_ALIGN (exp_type
))
1685 exp_type
= build_aligned_type (exp_type
, align
);
1687 mem_ref
= fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1688 REF_REVERSE_STORAGE_ORDER (mem_ref
) = reverse
;
1689 if (TREE_THIS_VOLATILE (prev_base
))
1690 TREE_THIS_VOLATILE (mem_ref
) = 1;
1691 if (TREE_SIDE_EFFECTS (prev_base
))
1692 TREE_SIDE_EFFECTS (mem_ref
) = 1;
1696 /* Construct a memory reference to a part of an aggregate BASE at the given
1697 OFFSET and of the same type as MODEL. In case this is a reference to a
1698 bit-field, the function will replicate the last component_ref of model's
1699 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1700 build_ref_for_offset. */
1703 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1704 struct access
*model
, gimple_stmt_iterator
*gsi
,
1707 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1708 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1710 /* This access represents a bit-field. */
1711 tree t
, exp_type
, fld
= TREE_OPERAND (model
->expr
, 1);
1713 offset
-= int_bit_position (fld
);
1714 exp_type
= TREE_TYPE (TREE_OPERAND (model
->expr
, 0));
1715 t
= build_ref_for_offset (loc
, base
, offset
, model
->reverse
, exp_type
,
1717 /* The flag will be set on the record type. */
1718 REF_REVERSE_STORAGE_ORDER (t
) = 0;
1719 return fold_build3_loc (loc
, COMPONENT_REF
, TREE_TYPE (fld
), t
, fld
,
1724 build_ref_for_offset (loc
, base
, offset
, model
->reverse
, model
->type
,
1728 /* Attempt to build a memory reference that we could but into a gimple
1729 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1730 create statements and return s NULL instead. This function also ignores
1731 alignment issues and so its results should never end up in non-debug
1735 build_debug_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1736 struct access
*model
)
1738 HOST_WIDE_INT base_offset
;
1741 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1742 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1745 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1748 if (TREE_CODE (base
) == MEM_REF
)
1750 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1751 base_offset
+ offset
/ BITS_PER_UNIT
);
1752 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1753 base
= unshare_expr (TREE_OPERAND (base
, 0));
1757 off
= build_int_cst (reference_alias_ptr_type (base
),
1758 base_offset
+ offset
/ BITS_PER_UNIT
);
1759 base
= build_fold_addr_expr (unshare_expr (base
));
1762 return fold_build2_loc (loc
, MEM_REF
, model
->type
, base
, off
);
1765 /* Construct a memory reference consisting of component_refs and array_refs to
1766 a part of an aggregate *RES (which is of type TYPE). The requested part
1767 should have type EXP_TYPE at be the given OFFSET. This function might not
1768 succeed, it returns true when it does and only then *RES points to something
1769 meaningful. This function should be used only to build expressions that we
1770 might need to present to user (e.g. in warnings). In all other situations,
1771 build_ref_for_model or build_ref_for_offset should be used instead. */
1774 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1780 tree tr_size
, index
, minidx
;
1781 HOST_WIDE_INT el_size
;
1783 if (offset
== 0 && exp_type
1784 && types_compatible_p (exp_type
, type
))
1787 switch (TREE_CODE (type
))
1790 case QUAL_UNION_TYPE
:
1792 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1794 HOST_WIDE_INT pos
, size
;
1795 tree tr_pos
, expr
, *expr_ptr
;
1797 if (TREE_CODE (fld
) != FIELD_DECL
)
1800 tr_pos
= bit_position (fld
);
1801 if (!tr_pos
|| !tree_fits_uhwi_p (tr_pos
))
1803 pos
= tree_to_uhwi (tr_pos
);
1804 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1805 tr_size
= DECL_SIZE (fld
);
1806 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
1808 size
= tree_to_uhwi (tr_size
);
1814 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1817 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1820 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
1821 offset
- pos
, exp_type
))
1830 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1831 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
1833 el_size
= tree_to_uhwi (tr_size
);
1835 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1836 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1838 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1839 if (!integer_zerop (minidx
))
1840 index
= int_const_binop (PLUS_EXPR
, index
, minidx
);
1841 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1842 NULL_TREE
, NULL_TREE
);
1843 offset
= offset
% el_size
;
1844 type
= TREE_TYPE (type
);
1859 /* Return true iff TYPE is stdarg va_list type. */
1862 is_va_list_type (tree type
)
1864 return TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (va_list_type_node
);
1867 /* Print message to dump file why a variable was rejected. */
1870 reject (tree var
, const char *msg
)
1872 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1874 fprintf (dump_file
, "Rejected (%d): %s: ", DECL_UID (var
), msg
);
1875 print_generic_expr (dump_file
, var
, 0);
1876 fprintf (dump_file
, "\n");
1880 /* Return true if VAR is a candidate for SRA. */
1883 maybe_add_sra_candidate (tree var
)
1885 tree type
= TREE_TYPE (var
);
1889 if (!AGGREGATE_TYPE_P (type
))
1891 reject (var
, "not aggregate");
1894 /* Allow constant-pool entries (that "need to live in memory")
1895 unless we are doing IPA SRA. */
1896 if (needs_to_live_in_memory (var
)
1897 && (sra_mode
== SRA_MODE_EARLY_IPA
|| !constant_decl_p (var
)))
1899 reject (var
, "needs to live in memory");
1902 if (TREE_THIS_VOLATILE (var
))
1904 reject (var
, "is volatile");
1907 if (!COMPLETE_TYPE_P (type
))
1909 reject (var
, "has incomplete type");
1912 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
1914 reject (var
, "type size not fixed");
1917 if (tree_to_uhwi (TYPE_SIZE (type
)) == 0)
1919 reject (var
, "type size is zero");
1922 if (type_internals_preclude_sra_p (type
, &msg
))
1927 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1928 we also want to schedule it rather late. Thus we ignore it in
1930 (sra_mode
== SRA_MODE_EARLY_INTRA
1931 && is_va_list_type (type
)))
1933 reject (var
, "is va_list");
1937 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1938 slot
= candidates
->find_slot_with_hash (var
, DECL_UID (var
), INSERT
);
1941 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1943 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1944 print_generic_expr (dump_file
, var
, 0);
1945 fprintf (dump_file
, "\n");
1951 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1952 those with type which is suitable for scalarization. */
1955 find_var_candidates (void)
1961 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1963 parm
= DECL_CHAIN (parm
))
1964 ret
|= maybe_add_sra_candidate (parm
);
1966 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1968 if (TREE_CODE (var
) != VAR_DECL
)
1971 ret
|= maybe_add_sra_candidate (var
);
1977 /* Sort all accesses for the given variable, check for partial overlaps and
1978 return NULL if there are any. If there are none, pick a representative for
1979 each combination of offset and size and create a linked list out of them.
1980 Return the pointer to the first representative and make sure it is the first
1981 one in the vector of accesses. */
1983 static struct access
*
1984 sort_and_splice_var_accesses (tree var
)
1986 int i
, j
, access_count
;
1987 struct access
*res
, **prev_acc_ptr
= &res
;
1988 vec
<access_p
> *access_vec
;
1990 HOST_WIDE_INT low
= -1, high
= 0;
1992 access_vec
= get_base_access_vector (var
);
1995 access_count
= access_vec
->length ();
1997 /* Sort by <OFFSET, SIZE>. */
1998 access_vec
->qsort (compare_access_positions
);
2001 while (i
< access_count
)
2003 struct access
*access
= (*access_vec
)[i
];
2004 bool grp_write
= access
->write
;
2005 bool grp_read
= !access
->write
;
2006 bool grp_scalar_write
= access
->write
2007 && is_gimple_reg_type (access
->type
);
2008 bool grp_scalar_read
= !access
->write
2009 && is_gimple_reg_type (access
->type
);
2010 bool grp_assignment_read
= access
->grp_assignment_read
;
2011 bool grp_assignment_write
= access
->grp_assignment_write
;
2012 bool multiple_scalar_reads
= false;
2013 bool total_scalarization
= access
->grp_total_scalarization
;
2014 bool grp_partial_lhs
= access
->grp_partial_lhs
;
2015 bool first_scalar
= is_gimple_reg_type (access
->type
);
2016 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
2018 if (first
|| access
->offset
>= high
)
2021 low
= access
->offset
;
2022 high
= access
->offset
+ access
->size
;
2024 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
2027 gcc_assert (access
->offset
>= low
2028 && access
->offset
+ access
->size
<= high
);
2031 while (j
< access_count
)
2033 struct access
*ac2
= (*access_vec
)[j
];
2034 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
2039 grp_scalar_write
= (grp_scalar_write
2040 || is_gimple_reg_type (ac2
->type
));
2045 if (is_gimple_reg_type (ac2
->type
))
2047 if (grp_scalar_read
)
2048 multiple_scalar_reads
= true;
2050 grp_scalar_read
= true;
2053 grp_assignment_read
|= ac2
->grp_assignment_read
;
2054 grp_assignment_write
|= ac2
->grp_assignment_write
;
2055 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
2056 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
2057 total_scalarization
|= ac2
->grp_total_scalarization
;
2058 relink_to_new_repr (access
, ac2
);
2060 /* If there are both aggregate-type and scalar-type accesses with
2061 this combination of size and offset, the comparison function
2062 should have put the scalars first. */
2063 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
2064 ac2
->group_representative
= access
;
2070 access
->group_representative
= access
;
2071 access
->grp_write
= grp_write
;
2072 access
->grp_read
= grp_read
;
2073 access
->grp_scalar_read
= grp_scalar_read
;
2074 access
->grp_scalar_write
= grp_scalar_write
;
2075 access
->grp_assignment_read
= grp_assignment_read
;
2076 access
->grp_assignment_write
= grp_assignment_write
;
2077 access
->grp_hint
= multiple_scalar_reads
|| total_scalarization
;
2078 access
->grp_total_scalarization
= total_scalarization
;
2079 access
->grp_partial_lhs
= grp_partial_lhs
;
2080 access
->grp_unscalarizable_region
= unscalarizable_region
;
2081 if (access
->first_link
)
2082 add_access_to_work_queue (access
);
2084 *prev_acc_ptr
= access
;
2085 prev_acc_ptr
= &access
->next_grp
;
2088 gcc_assert (res
== (*access_vec
)[0]);
2092 /* Create a variable for the given ACCESS which determines the type, name and a
2093 few other properties. Return the variable declaration and store it also to
2094 ACCESS->replacement. */
2097 create_access_replacement (struct access
*access
)
2101 if (access
->grp_to_be_debug_replaced
)
2103 repl
= create_tmp_var_raw (access
->type
);
2104 DECL_CONTEXT (repl
) = current_function_decl
;
2107 /* Drop any special alignment on the type if it's not on the main
2108 variant. This avoids issues with weirdo ABIs like AAPCS. */
2109 repl
= create_tmp_var (build_qualified_type
2110 (TYPE_MAIN_VARIANT (access
->type
),
2111 TYPE_QUALS (access
->type
)), "SR");
2112 if (TREE_CODE (access
->type
) == COMPLEX_TYPE
2113 || TREE_CODE (access
->type
) == VECTOR_TYPE
)
2115 if (!access
->grp_partial_lhs
)
2116 DECL_GIMPLE_REG_P (repl
) = 1;
2118 else if (access
->grp_partial_lhs
2119 && is_gimple_reg_type (access
->type
))
2120 TREE_ADDRESSABLE (repl
) = 1;
2122 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
2123 DECL_ARTIFICIAL (repl
) = 1;
2124 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
2126 if (DECL_NAME (access
->base
)
2127 && !DECL_IGNORED_P (access
->base
)
2128 && !DECL_ARTIFICIAL (access
->base
))
2130 char *pretty_name
= make_fancy_name (access
->expr
);
2131 tree debug_expr
= unshare_expr_without_location (access
->expr
), d
;
2134 DECL_NAME (repl
) = get_identifier (pretty_name
);
2135 obstack_free (&name_obstack
, pretty_name
);
2137 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2138 as DECL_DEBUG_EXPR isn't considered when looking for still
2139 used SSA_NAMEs and thus they could be freed. All debug info
2140 generation cares is whether something is constant or variable
2141 and that get_ref_base_and_extent works properly on the
2142 expression. It cannot handle accesses at a non-constant offset
2143 though, so just give up in those cases. */
2144 for (d
= debug_expr
;
2145 !fail
&& (handled_component_p (d
) || TREE_CODE (d
) == MEM_REF
);
2146 d
= TREE_OPERAND (d
, 0))
2147 switch (TREE_CODE (d
))
2150 case ARRAY_RANGE_REF
:
2151 if (TREE_OPERAND (d
, 1)
2152 && TREE_CODE (TREE_OPERAND (d
, 1)) != INTEGER_CST
)
2154 if (TREE_OPERAND (d
, 3)
2155 && TREE_CODE (TREE_OPERAND (d
, 3)) != INTEGER_CST
)
2159 if (TREE_OPERAND (d
, 2)
2160 && TREE_CODE (TREE_OPERAND (d
, 2)) != INTEGER_CST
)
2164 if (TREE_CODE (TREE_OPERAND (d
, 0)) != ADDR_EXPR
)
2167 d
= TREE_OPERAND (d
, 0);
2174 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
2175 DECL_HAS_DEBUG_EXPR_P (repl
) = 1;
2177 if (access
->grp_no_warning
)
2178 TREE_NO_WARNING (repl
) = 1;
2180 TREE_NO_WARNING (repl
) = TREE_NO_WARNING (access
->base
);
2183 TREE_NO_WARNING (repl
) = 1;
2187 if (access
->grp_to_be_debug_replaced
)
2189 fprintf (dump_file
, "Created a debug-only replacement for ");
2190 print_generic_expr (dump_file
, access
->base
, 0);
2191 fprintf (dump_file
, " offset: %u, size: %u\n",
2192 (unsigned) access
->offset
, (unsigned) access
->size
);
2196 fprintf (dump_file
, "Created a replacement for ");
2197 print_generic_expr (dump_file
, access
->base
, 0);
2198 fprintf (dump_file
, " offset: %u, size: %u: ",
2199 (unsigned) access
->offset
, (unsigned) access
->size
);
2200 print_generic_expr (dump_file
, repl
, 0);
2201 fprintf (dump_file
, "\n");
2204 sra_stats
.replacements
++;
2209 /* Return ACCESS scalar replacement, which must exist. */
2212 get_access_replacement (struct access
*access
)
2214 gcc_checking_assert (access
->replacement_decl
);
2215 return access
->replacement_decl
;
2219 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2220 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2221 to it is not "within" the root. Return false iff some accesses partially
2225 build_access_subtree (struct access
**access
)
2227 struct access
*root
= *access
, *last_child
= NULL
;
2228 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2230 *access
= (*access
)->next_grp
;
2231 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
2234 root
->first_child
= *access
;
2236 last_child
->next_sibling
= *access
;
2237 last_child
= *access
;
2239 if (!build_access_subtree (access
))
2243 if (*access
&& (*access
)->offset
< limit
)
2249 /* Build a tree of access representatives, ACCESS is the pointer to the first
2250 one, others are linked in a list by the next_grp field. Return false iff
2251 some accesses partially overlap. */
2254 build_access_trees (struct access
*access
)
2258 struct access
*root
= access
;
2260 if (!build_access_subtree (&access
))
2262 root
->next_grp
= access
;
2267 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2271 expr_with_var_bounded_array_refs_p (tree expr
)
2273 while (handled_component_p (expr
))
2275 if (TREE_CODE (expr
) == ARRAY_REF
2276 && !tree_fits_shwi_p (array_ref_low_bound (expr
)))
2278 expr
= TREE_OPERAND (expr
, 0);
2283 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2284 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2285 sorts of access flags appropriately along the way, notably always set
2286 grp_read and grp_assign_read according to MARK_READ and grp_write when
2289 Creating a replacement for a scalar access is considered beneficial if its
2290 grp_hint is set (this means we are either attempting total scalarization or
2291 there is more than one direct read access) or according to the following
2294 Access written to through a scalar type (once or more times)
2296 | Written to in an assignment statement
2298 | | Access read as scalar _once_
2300 | | | Read in an assignment statement
2302 | | | | Scalarize Comment
2303 -----------------------------------------------------------------------------
2304 0 0 0 0 No access for the scalar
2305 0 0 0 1 No access for the scalar
2306 0 0 1 0 No Single read - won't help
2307 0 0 1 1 No The same case
2308 0 1 0 0 No access for the scalar
2309 0 1 0 1 No access for the scalar
2310 0 1 1 0 Yes s = *g; return s.i;
2311 0 1 1 1 Yes The same case as above
2312 1 0 0 0 No Won't help
2313 1 0 0 1 Yes s.i = 1; *g = s;
2314 1 0 1 0 Yes s.i = 5; g = s.i;
2315 1 0 1 1 Yes The same case as above
2316 1 1 0 0 No Won't help.
2317 1 1 0 1 Yes s.i = 1; *g = s;
2318 1 1 1 0 Yes s = *g; return s.i;
2319 1 1 1 1 Yes Any of the above yeses */
2322 analyze_access_subtree (struct access
*root
, struct access
*parent
,
2323 bool allow_replacements
)
2325 struct access
*child
;
2326 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2327 HOST_WIDE_INT covered_to
= root
->offset
;
2328 bool scalar
= is_gimple_reg_type (root
->type
);
2329 bool hole
= false, sth_created
= false;
2333 if (parent
->grp_read
)
2335 if (parent
->grp_assignment_read
)
2336 root
->grp_assignment_read
= 1;
2337 if (parent
->grp_write
)
2338 root
->grp_write
= 1;
2339 if (parent
->grp_assignment_write
)
2340 root
->grp_assignment_write
= 1;
2341 if (parent
->grp_total_scalarization
)
2342 root
->grp_total_scalarization
= 1;
2345 if (root
->grp_unscalarizable_region
)
2346 allow_replacements
= false;
2348 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
2349 allow_replacements
= false;
2351 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
2353 hole
|= covered_to
< child
->offset
;
2354 sth_created
|= analyze_access_subtree (child
, root
,
2355 allow_replacements
&& !scalar
);
2357 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
2358 root
->grp_total_scalarization
&= child
->grp_total_scalarization
;
2359 if (child
->grp_covered
)
2360 covered_to
+= child
->size
;
2365 if (allow_replacements
&& scalar
&& !root
->first_child
2367 || ((root
->grp_scalar_read
|| root
->grp_assignment_read
)
2368 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))))
2370 /* Always create access replacements that cover the whole access.
2371 For integral types this means the precision has to match.
2372 Avoid assumptions based on the integral type kind, too. */
2373 if (INTEGRAL_TYPE_P (root
->type
)
2374 && (TREE_CODE (root
->type
) != INTEGER_TYPE
2375 || TYPE_PRECISION (root
->type
) != root
->size
)
2376 /* But leave bitfield accesses alone. */
2377 && (TREE_CODE (root
->expr
) != COMPONENT_REF
2378 || !DECL_BIT_FIELD (TREE_OPERAND (root
->expr
, 1))))
2380 tree rt
= root
->type
;
2381 gcc_assert ((root
->offset
% BITS_PER_UNIT
) == 0
2382 && (root
->size
% BITS_PER_UNIT
) == 0);
2383 root
->type
= build_nonstandard_integer_type (root
->size
,
2384 TYPE_UNSIGNED (rt
));
2385 root
->expr
= build_ref_for_offset (UNKNOWN_LOCATION
, root
->base
,
2386 root
->offset
, root
->reverse
,
2387 root
->type
, NULL
, false);
2389 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2391 fprintf (dump_file
, "Changing the type of a replacement for ");
2392 print_generic_expr (dump_file
, root
->base
, 0);
2393 fprintf (dump_file
, " offset: %u, size: %u ",
2394 (unsigned) root
->offset
, (unsigned) root
->size
);
2395 fprintf (dump_file
, " to an integer.\n");
2399 root
->grp_to_be_replaced
= 1;
2400 root
->replacement_decl
= create_access_replacement (root
);
2406 if (allow_replacements
2407 && scalar
&& !root
->first_child
2408 && (root
->grp_scalar_write
|| root
->grp_assignment_write
)
2409 && !bitmap_bit_p (cannot_scalarize_away_bitmap
,
2410 DECL_UID (root
->base
)))
2412 gcc_checking_assert (!root
->grp_scalar_read
2413 && !root
->grp_assignment_read
);
2415 if (MAY_HAVE_DEBUG_STMTS
)
2417 root
->grp_to_be_debug_replaced
= 1;
2418 root
->replacement_decl
= create_access_replacement (root
);
2422 if (covered_to
< limit
)
2424 if (scalar
|| !allow_replacements
)
2425 root
->grp_total_scalarization
= 0;
2428 if (!hole
|| root
->grp_total_scalarization
)
2429 root
->grp_covered
= 1;
2430 else if (root
->grp_write
|| TREE_CODE (root
->base
) == PARM_DECL
)
2431 root
->grp_unscalarized_data
= 1; /* not covered and written to */
2435 /* Analyze all access trees linked by next_grp by the means of
2436 analyze_access_subtree. */
2438 analyze_access_trees (struct access
*access
)
2444 if (analyze_access_subtree (access
, NULL
, true))
2446 access
= access
->next_grp
;
2452 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2453 SIZE would conflict with an already existing one. If exactly such a child
2454 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2457 child_would_conflict_in_lacc (struct access
*lacc
, HOST_WIDE_INT norm_offset
,
2458 HOST_WIDE_INT size
, struct access
**exact_match
)
2460 struct access
*child
;
2462 for (child
= lacc
->first_child
; child
; child
= child
->next_sibling
)
2464 if (child
->offset
== norm_offset
&& child
->size
== size
)
2466 *exact_match
= child
;
2470 if (child
->offset
< norm_offset
+ size
2471 && child
->offset
+ child
->size
> norm_offset
)
2478 /* Create a new child access of PARENT, with all properties just like MODEL
2479 except for its offset and with its grp_write false and grp_read true.
2480 Return the new access or NULL if it cannot be created. Note that this access
2481 is created long after all splicing and sorting, it's not located in any
2482 access vector and is automatically a representative of its group. */
2484 static struct access
*
2485 create_artificial_child_access (struct access
*parent
, struct access
*model
,
2486 HOST_WIDE_INT new_offset
)
2488 struct access
**child
;
2489 tree expr
= parent
->base
;
2491 gcc_assert (!model
->grp_unscalarizable_region
);
2493 struct access
*access
= access_pool
.allocate ();
2494 memset (access
, 0, sizeof (struct access
));
2495 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
2498 access
->grp_no_warning
= true;
2499 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
2500 new_offset
, model
, NULL
, false);
2503 access
->base
= parent
->base
;
2504 access
->expr
= expr
;
2505 access
->offset
= new_offset
;
2506 access
->size
= model
->size
;
2507 access
->type
= model
->type
;
2508 access
->grp_write
= true;
2509 access
->grp_read
= false;
2510 access
->reverse
= model
->reverse
;
2512 child
= &parent
->first_child
;
2513 while (*child
&& (*child
)->offset
< new_offset
)
2514 child
= &(*child
)->next_sibling
;
2516 access
->next_sibling
= *child
;
2523 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2524 true if any new subaccess was created. Additionally, if RACC is a scalar
2525 access but LACC is not, change the type of the latter, if possible. */
2528 propagate_subaccesses_across_link (struct access
*lacc
, struct access
*racc
)
2530 struct access
*rchild
;
2531 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
2534 if (is_gimple_reg_type (lacc
->type
)
2535 || lacc
->grp_unscalarizable_region
2536 || racc
->grp_unscalarizable_region
)
2539 if (is_gimple_reg_type (racc
->type
))
2541 if (!lacc
->first_child
&& !racc
->first_child
)
2543 tree t
= lacc
->base
;
2545 lacc
->type
= racc
->type
;
2546 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
),
2547 lacc
->offset
, racc
->type
))
2551 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
2552 lacc
->base
, lacc
->offset
,
2554 lacc
->grp_no_warning
= true;
2560 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
2562 struct access
*new_acc
= NULL
;
2563 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
2565 if (rchild
->grp_unscalarizable_region
)
2568 if (child_would_conflict_in_lacc (lacc
, norm_offset
, rchild
->size
,
2573 rchild
->grp_hint
= 1;
2574 new_acc
->grp_hint
|= new_acc
->grp_read
;
2575 if (rchild
->first_child
)
2576 ret
|= propagate_subaccesses_across_link (new_acc
, rchild
);
2581 rchild
->grp_hint
= 1;
2582 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
);
2586 if (racc
->first_child
)
2587 propagate_subaccesses_across_link (new_acc
, rchild
);
2594 /* Propagate all subaccesses across assignment links. */
2597 propagate_all_subaccesses (void)
2599 while (work_queue_head
)
2601 struct access
*racc
= pop_access_from_work_queue ();
2602 struct assign_link
*link
;
2604 gcc_assert (racc
->first_link
);
2606 for (link
= racc
->first_link
; link
; link
= link
->next
)
2608 struct access
*lacc
= link
->lacc
;
2610 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2612 lacc
= lacc
->group_representative
;
2613 if (propagate_subaccesses_across_link (lacc
, racc
)
2614 && lacc
->first_link
)
2615 add_access_to_work_queue (lacc
);
2620 /* Go through all accesses collected throughout the (intraprocedural) analysis
2621 stage, exclude overlapping ones, identify representatives and build trees
2622 out of them, making decisions about scalarization on the way. Return true
2623 iff there are any to-be-scalarized variables after this stage. */
2626 analyze_all_variable_accesses (void)
2629 bitmap tmp
= BITMAP_ALLOC (NULL
);
2632 bool optimize_speed_p
= !optimize_function_for_size_p (cfun
);
2634 enum compiler_param param
= optimize_speed_p
2635 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2636 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE
;
2638 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2639 fall back to a target default. */
2640 unsigned HOST_WIDE_INT max_scalarization_size
2641 = global_options_set
.x_param_values
[param
]
2642 ? PARAM_VALUE (param
)
2643 : get_move_ratio (optimize_speed_p
) * UNITS_PER_WORD
;
2645 max_scalarization_size
*= BITS_PER_UNIT
;
2647 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2648 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
2649 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
2651 tree var
= candidate (i
);
2653 if (TREE_CODE (var
) == VAR_DECL
2654 && scalarizable_type_p (TREE_TYPE (var
)))
2656 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var
)))
2657 <= max_scalarization_size
)
2659 create_total_scalarization_access (var
);
2660 completely_scalarize (var
, TREE_TYPE (var
), 0, var
);
2661 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2663 fprintf (dump_file
, "Will attempt to totally scalarize ");
2664 print_generic_expr (dump_file
, var
, 0);
2665 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2668 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2670 fprintf (dump_file
, "Too big to totally scalarize: ");
2671 print_generic_expr (dump_file
, var
, 0);
2672 fprintf (dump_file
, " (UID: %u)\n", DECL_UID (var
));
2677 bitmap_copy (tmp
, candidate_bitmap
);
2678 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2680 tree var
= candidate (i
);
2681 struct access
*access
;
2683 access
= sort_and_splice_var_accesses (var
);
2684 if (!access
|| !build_access_trees (access
))
2685 disqualify_candidate (var
,
2686 "No or inhibitingly overlapping accesses.");
2689 propagate_all_subaccesses ();
2691 bitmap_copy (tmp
, candidate_bitmap
);
2692 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2694 tree var
= candidate (i
);
2695 struct access
*access
= get_first_repr_for_decl (var
);
2697 if (analyze_access_trees (access
))
2700 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2702 fprintf (dump_file
, "\nAccess trees for ");
2703 print_generic_expr (dump_file
, var
, 0);
2704 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2705 dump_access_tree (dump_file
, access
);
2706 fprintf (dump_file
, "\n");
2710 disqualify_candidate (var
, "No scalar replacements to be created.");
2717 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
2724 /* Generate statements copying scalar replacements of accesses within a subtree
2725 into or out of AGG. ACCESS, all its children, siblings and their children
2726 are to be processed. AGG is an aggregate type expression (can be a
2727 declaration but does not have to be, it can for example also be a mem_ref or
2728 a series of handled components). TOP_OFFSET is the offset of the processed
2729 subtree which has to be subtracted from offsets of individual accesses to
2730 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2731 replacements in the interval <start_offset, start_offset + chunk_size>,
2732 otherwise copy all. GSI is a statement iterator used to place the new
2733 statements. WRITE should be true when the statements should write from AGG
2734 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2735 statements will be added after the current statement in GSI, they will be
2736 added before the statement otherwise. */
2739 generate_subtree_copies (struct access
*access
, tree agg
,
2740 HOST_WIDE_INT top_offset
,
2741 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
2742 gimple_stmt_iterator
*gsi
, bool write
,
2743 bool insert_after
, location_t loc
)
2747 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
2750 if (access
->grp_to_be_replaced
2752 || access
->offset
+ access
->size
> start_offset
))
2754 tree expr
, repl
= get_access_replacement (access
);
2757 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
2758 access
, gsi
, insert_after
);
2762 if (access
->grp_partial_lhs
)
2763 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
2765 insert_after
? GSI_NEW_STMT
2767 stmt
= gimple_build_assign (repl
, expr
);
2771 TREE_NO_WARNING (repl
) = 1;
2772 if (access
->grp_partial_lhs
)
2773 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2775 insert_after
? GSI_NEW_STMT
2777 stmt
= gimple_build_assign (expr
, repl
);
2779 gimple_set_location (stmt
, loc
);
2782 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2784 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2786 sra_stats
.subtree_copies
++;
2789 && access
->grp_to_be_debug_replaced
2791 || access
->offset
+ access
->size
> start_offset
))
2794 tree drhs
= build_debug_ref_for_model (loc
, agg
,
2795 access
->offset
- top_offset
,
2797 ds
= gimple_build_debug_bind (get_access_replacement (access
),
2798 drhs
, gsi_stmt (*gsi
));
2800 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2802 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2805 if (access
->first_child
)
2806 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
2807 start_offset
, chunk_size
, gsi
,
2808 write
, insert_after
, loc
);
2810 access
= access
->next_sibling
;
2815 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2816 root of the subtree to be processed. GSI is the statement iterator used
2817 for inserting statements which are added after the current statement if
2818 INSERT_AFTER is true or before it otherwise. */
2821 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
2822 bool insert_after
, location_t loc
)
2825 struct access
*child
;
2827 if (access
->grp_to_be_replaced
)
2831 stmt
= gimple_build_assign (get_access_replacement (access
),
2832 build_zero_cst (access
->type
));
2834 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2836 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2838 gimple_set_location (stmt
, loc
);
2840 else if (access
->grp_to_be_debug_replaced
)
2843 = gimple_build_debug_bind (get_access_replacement (access
),
2844 build_zero_cst (access
->type
),
2847 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2849 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2852 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2853 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
2856 /* Clobber all scalar replacements in an access subtree. ACCESS is the
2857 root of the subtree to be processed. GSI is the statement iterator used
2858 for inserting statements which are added after the current statement if
2859 INSERT_AFTER is true or before it otherwise. */
2862 clobber_subtree (struct access
*access
, gimple_stmt_iterator
*gsi
,
2863 bool insert_after
, location_t loc
)
2866 struct access
*child
;
2868 if (access
->grp_to_be_replaced
)
2870 tree rep
= get_access_replacement (access
);
2871 tree clobber
= build_constructor (access
->type
, NULL
);
2872 TREE_THIS_VOLATILE (clobber
) = 1;
2873 gimple
*stmt
= gimple_build_assign (rep
, clobber
);
2876 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2878 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2880 gimple_set_location (stmt
, loc
);
2883 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2884 clobber_subtree (child
, gsi
, insert_after
, loc
);
2887 /* Search for an access representative for the given expression EXPR and
2888 return it or NULL if it cannot be found. */
2890 static struct access
*
2891 get_access_for_expr (tree expr
)
2893 HOST_WIDE_INT offset
, size
, max_size
;
2897 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2898 a different size than the size of its argument and we need the latter
2900 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2901 expr
= TREE_OPERAND (expr
, 0);
2903 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
, &reverse
);
2904 if (max_size
== -1 || !DECL_P (base
))
2907 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
2910 return get_var_base_offset_size_access (base
, offset
, max_size
);
2913 /* Replace the expression EXPR with a scalar replacement if there is one and
2914 generate other statements to do type conversion or subtree copying if
2915 necessary. GSI is used to place newly created statements, WRITE is true if
2916 the expression is being written to (it is on a LHS of a statement or output
2917 in an assembly statement). */
2920 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
2923 struct access
*access
;
2924 tree type
, bfr
, orig_expr
;
2926 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
2929 expr
= &TREE_OPERAND (*expr
, 0);
2934 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
2935 expr
= &TREE_OPERAND (*expr
, 0);
2936 access
= get_access_for_expr (*expr
);
2939 type
= TREE_TYPE (*expr
);
2942 loc
= gimple_location (gsi_stmt (*gsi
));
2943 gimple_stmt_iterator alt_gsi
= gsi_none ();
2944 if (write
&& stmt_ends_bb_p (gsi_stmt (*gsi
)))
2946 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi
)));
2950 if (access
->grp_to_be_replaced
)
2952 tree repl
= get_access_replacement (access
);
2953 /* If we replace a non-register typed access simply use the original
2954 access expression to extract the scalar component afterwards.
2955 This happens if scalarizing a function return value or parameter
2956 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2957 gcc.c-torture/compile/20011217-1.c.
2959 We also want to use this when accessing a complex or vector which can
2960 be accessed as a different type too, potentially creating a need for
2961 type conversion (see PR42196) and when scalarized unions are involved
2962 in assembler statements (see PR42398). */
2963 if (!useless_type_conversion_p (type
, access
->type
))
2967 ref
= build_ref_for_model (loc
, orig_expr
, 0, access
, gsi
, false);
2973 if (access
->grp_partial_lhs
)
2974 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
2975 false, GSI_NEW_STMT
);
2976 stmt
= gimple_build_assign (repl
, ref
);
2977 gimple_set_location (stmt
, loc
);
2978 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2984 if (access
->grp_partial_lhs
)
2985 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2986 true, GSI_SAME_STMT
);
2987 stmt
= gimple_build_assign (ref
, repl
);
2988 gimple_set_location (stmt
, loc
);
2989 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2996 else if (write
&& access
->grp_to_be_debug_replaced
)
2998 gdebug
*ds
= gimple_build_debug_bind (get_access_replacement (access
),
3001 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
3004 if (access
->first_child
)
3006 HOST_WIDE_INT start_offset
, chunk_size
;
3008 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 1))
3009 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 2)))
3011 chunk_size
= tree_to_uhwi (TREE_OPERAND (bfr
, 1));
3012 start_offset
= access
->offset
3013 + tree_to_uhwi (TREE_OPERAND (bfr
, 2));
3016 start_offset
= chunk_size
= 0;
3018 generate_subtree_copies (access
->first_child
, orig_expr
, access
->offset
,
3019 start_offset
, chunk_size
, gsi
, write
, write
,
3025 /* Where scalar replacements of the RHS have been written to when a replacement
3026 of a LHS of an assigments cannot be direclty loaded from a replacement of
3028 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
3029 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
3030 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
3032 struct subreplacement_assignment_data
3034 /* Offset of the access representing the lhs of the assignment. */
3035 HOST_WIDE_INT left_offset
;
3037 /* LHS and RHS of the original assignment. */
3038 tree assignment_lhs
, assignment_rhs
;
3040 /* Access representing the rhs of the whole assignment. */
3041 struct access
*top_racc
;
3043 /* Stmt iterator used for statement insertions after the original assignment.
3044 It points to the main GSI used to traverse a BB during function body
3046 gimple_stmt_iterator
*new_gsi
;
3048 /* Stmt iterator used for statement insertions before the original
3049 assignment. Keeps on pointing to the original statement. */
3050 gimple_stmt_iterator old_gsi
;
3052 /* Location of the assignment. */
3055 /* Keeps the information whether we have needed to refresh replacements of
3056 the LHS and from which side of the assignments this takes place. */
3057 enum unscalarized_data_handling refreshed
;
3060 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3061 base aggregate if there are unscalarized data or directly to LHS of the
3062 statement that is pointed to by GSI otherwise. */
3065 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data
*sad
)
3068 if (sad
->top_racc
->grp_unscalarized_data
)
3070 src
= sad
->assignment_rhs
;
3071 sad
->refreshed
= SRA_UDH_RIGHT
;
3075 src
= sad
->assignment_lhs
;
3076 sad
->refreshed
= SRA_UDH_LEFT
;
3078 generate_subtree_copies (sad
->top_racc
->first_child
, src
,
3079 sad
->top_racc
->offset
, 0, 0,
3080 &sad
->old_gsi
, false, false, sad
->loc
);
3083 /* Try to generate statements to load all sub-replacements in an access subtree
3084 formed by children of LACC from scalar replacements in the SAD->top_racc
3085 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3086 and load the accesses from it. */
3089 load_assign_lhs_subreplacements (struct access
*lacc
,
3090 struct subreplacement_assignment_data
*sad
)
3092 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
3094 HOST_WIDE_INT offset
;
3095 offset
= lacc
->offset
- sad
->left_offset
+ sad
->top_racc
->offset
;
3097 if (lacc
->grp_to_be_replaced
)
3099 struct access
*racc
;
3103 racc
= find_access_in_subtree (sad
->top_racc
, offset
, lacc
->size
);
3104 if (racc
&& racc
->grp_to_be_replaced
)
3106 rhs
= get_access_replacement (racc
);
3107 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
3108 rhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
3111 if (racc
->grp_partial_lhs
&& lacc
->grp_partial_lhs
)
3112 rhs
= force_gimple_operand_gsi (&sad
->old_gsi
, rhs
, true,
3113 NULL_TREE
, true, GSI_SAME_STMT
);
3117 /* No suitable access on the right hand side, need to load from
3118 the aggregate. See if we have to update it first... */
3119 if (sad
->refreshed
== SRA_UDH_NONE
)
3120 handle_unscalarized_data_in_subtree (sad
);
3122 if (sad
->refreshed
== SRA_UDH_LEFT
)
3123 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_lhs
,
3124 lacc
->offset
- sad
->left_offset
,
3125 lacc
, sad
->new_gsi
, true);
3127 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_rhs
,
3128 lacc
->offset
- sad
->left_offset
,
3129 lacc
, sad
->new_gsi
, true);
3130 if (lacc
->grp_partial_lhs
)
3131 rhs
= force_gimple_operand_gsi (sad
->new_gsi
,
3132 rhs
, true, NULL_TREE
,
3133 false, GSI_NEW_STMT
);
3136 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
3137 gsi_insert_after (sad
->new_gsi
, stmt
, GSI_NEW_STMT
);
3138 gimple_set_location (stmt
, sad
->loc
);
3140 sra_stats
.subreplacements
++;
3144 if (sad
->refreshed
== SRA_UDH_NONE
3145 && lacc
->grp_read
&& !lacc
->grp_covered
)
3146 handle_unscalarized_data_in_subtree (sad
);
3148 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
3152 struct access
*racc
= find_access_in_subtree (sad
->top_racc
,
3156 if (racc
&& racc
->grp_to_be_replaced
)
3158 if (racc
->grp_write
|| constant_decl_p (racc
->base
))
3159 drhs
= get_access_replacement (racc
);
3163 else if (sad
->refreshed
== SRA_UDH_LEFT
)
3164 drhs
= build_debug_ref_for_model (sad
->loc
, lacc
->base
,
3165 lacc
->offset
, lacc
);
3166 else if (sad
->refreshed
== SRA_UDH_RIGHT
)
3167 drhs
= build_debug_ref_for_model (sad
->loc
, sad
->top_racc
->base
,
3172 && !useless_type_conversion_p (lacc
->type
, TREE_TYPE (drhs
)))
3173 drhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
3175 ds
= gimple_build_debug_bind (get_access_replacement (lacc
),
3176 drhs
, gsi_stmt (sad
->old_gsi
));
3177 gsi_insert_after (sad
->new_gsi
, ds
, GSI_NEW_STMT
);
3181 if (lacc
->first_child
)
3182 load_assign_lhs_subreplacements (lacc
, sad
);
3186 /* Result code for SRA assignment modification. */
3187 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
3188 SRA_AM_MODIFIED
, /* stmt changed but not
3190 SRA_AM_REMOVED
}; /* stmt eliminated */
3192 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3193 to the assignment and GSI is the statement iterator pointing at it. Returns
3194 the same values as sra_modify_assign. */
3196 static enum assignment_mod_result
3197 sra_modify_constructor_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
3199 tree lhs
= gimple_assign_lhs (stmt
);
3200 struct access
*acc
= get_access_for_expr (lhs
);
3203 location_t loc
= gimple_location (stmt
);
3205 if (gimple_clobber_p (stmt
))
3207 /* Clobber the replacement variable. */
3208 clobber_subtree (acc
, gsi
, !acc
->grp_covered
, loc
);
3209 /* Remove clobbers of fully scalarized variables, they are dead. */
3210 if (acc
->grp_covered
)
3212 unlink_stmt_vdef (stmt
);
3213 gsi_remove (gsi
, true);
3214 release_defs (stmt
);
3215 return SRA_AM_REMOVED
;
3218 return SRA_AM_MODIFIED
;
3221 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt
))) > 0)
3223 /* I have never seen this code path trigger but if it can happen the
3224 following should handle it gracefully. */
3225 if (access_has_children_p (acc
))
3226 generate_subtree_copies (acc
->first_child
, lhs
, acc
->offset
, 0, 0, gsi
,
3228 return SRA_AM_MODIFIED
;
3231 if (acc
->grp_covered
)
3233 init_subtree_with_zero (acc
, gsi
, false, loc
);
3234 unlink_stmt_vdef (stmt
);
3235 gsi_remove (gsi
, true);
3236 release_defs (stmt
);
3237 return SRA_AM_REMOVED
;
3241 init_subtree_with_zero (acc
, gsi
, true, loc
);
3242 return SRA_AM_MODIFIED
;
3246 /* Create and return a new suitable default definition SSA_NAME for RACC which
3247 is an access describing an uninitialized part of an aggregate that is being
3251 get_repl_default_def_ssa_name (struct access
*racc
)
3253 gcc_checking_assert (!racc
->grp_to_be_replaced
3254 && !racc
->grp_to_be_debug_replaced
);
3255 if (!racc
->replacement_decl
)
3256 racc
->replacement_decl
= create_access_replacement (racc
);
3257 return get_or_create_ssa_default_def (cfun
, racc
->replacement_decl
);
3260 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3261 bit-field field declaration somewhere in it. */
3264 contains_vce_or_bfcref_p (const_tree ref
)
3266 while (handled_component_p (ref
))
3268 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
3269 || (TREE_CODE (ref
) == COMPONENT_REF
3270 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1))))
3272 ref
= TREE_OPERAND (ref
, 0);
3278 /* Examine both sides of the assignment statement pointed to by STMT, replace
3279 them with a scalare replacement if there is one and generate copying of
3280 replacements if scalarized aggregates have been used in the assignment. GSI
3281 is used to hold generated statements for type conversions and subtree
3284 static enum assignment_mod_result
3285 sra_modify_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
3287 struct access
*lacc
, *racc
;
3289 bool modify_this_stmt
= false;
3290 bool force_gimple_rhs
= false;
3292 gimple_stmt_iterator orig_gsi
= *gsi
;
3294 if (!gimple_assign_single_p (stmt
))
3296 lhs
= gimple_assign_lhs (stmt
);
3297 rhs
= gimple_assign_rhs1 (stmt
);
3299 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3300 return sra_modify_constructor_assign (stmt
, gsi
);
3302 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
3303 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
3304 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
3306 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (stmt
),
3308 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (stmt
),
3310 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3313 lacc
= get_access_for_expr (lhs
);
3314 racc
= get_access_for_expr (rhs
);
3317 /* Avoid modifying initializations of constant-pool replacements. */
3318 if (racc
&& (racc
->replacement_decl
== lhs
))
3321 loc
= gimple_location (stmt
);
3322 if (lacc
&& lacc
->grp_to_be_replaced
)
3324 lhs
= get_access_replacement (lacc
);
3325 gimple_assign_set_lhs (stmt
, lhs
);
3326 modify_this_stmt
= true;
3327 if (lacc
->grp_partial_lhs
)
3328 force_gimple_rhs
= true;
3332 if (racc
&& racc
->grp_to_be_replaced
)
3334 rhs
= get_access_replacement (racc
);
3335 modify_this_stmt
= true;
3336 if (racc
->grp_partial_lhs
)
3337 force_gimple_rhs
= true;
3341 && !racc
->grp_unscalarized_data
3342 && !racc
->grp_unscalarizable_region
3343 && TREE_CODE (lhs
) == SSA_NAME
3344 && !access_has_replacements_p (racc
))
3346 rhs
= get_repl_default_def_ssa_name (racc
);
3347 modify_this_stmt
= true;
3351 if (modify_this_stmt
)
3353 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3355 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3356 ??? This should move to fold_stmt which we simply should
3357 call after building a VIEW_CONVERT_EXPR here. */
3358 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
3359 && !contains_bitfld_component_ref_p (lhs
))
3361 lhs
= build_ref_for_model (loc
, lhs
, 0, racc
, gsi
, false);
3362 gimple_assign_set_lhs (stmt
, lhs
);
3364 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
3365 && !contains_vce_or_bfcref_p (rhs
))
3366 rhs
= build_ref_for_model (loc
, rhs
, 0, lacc
, gsi
, false);
3368 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3370 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
3372 if (is_gimple_reg_type (TREE_TYPE (lhs
))
3373 && TREE_CODE (lhs
) != SSA_NAME
)
3374 force_gimple_rhs
= true;
3379 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
3381 tree dlhs
= get_access_replacement (lacc
);
3382 tree drhs
= unshare_expr (rhs
);
3383 if (!useless_type_conversion_p (TREE_TYPE (dlhs
), TREE_TYPE (drhs
)))
3385 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs
))
3386 && !contains_vce_or_bfcref_p (drhs
))
3387 drhs
= build_debug_ref_for_model (loc
, drhs
, 0, lacc
);
3389 && !useless_type_conversion_p (TREE_TYPE (dlhs
),
3391 drhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
3392 TREE_TYPE (dlhs
), drhs
);
3394 gdebug
*ds
= gimple_build_debug_bind (dlhs
, drhs
, stmt
);
3395 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
3398 /* From this point on, the function deals with assignments in between
3399 aggregates when at least one has scalar reductions of some of its
3400 components. There are three possible scenarios: Both the LHS and RHS have
3401 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3403 In the first case, we would like to load the LHS components from RHS
3404 components whenever possible. If that is not possible, we would like to
3405 read it directly from the RHS (after updating it by storing in it its own
3406 components). If there are some necessary unscalarized data in the LHS,
3407 those will be loaded by the original assignment too. If neither of these
3408 cases happen, the original statement can be removed. Most of this is done
3409 by load_assign_lhs_subreplacements.
3411 In the second case, we would like to store all RHS scalarized components
3412 directly into LHS and if they cover the aggregate completely, remove the
3413 statement too. In the third case, we want the LHS components to be loaded
3414 directly from the RHS (DSE will remove the original statement if it
3417 This is a bit complex but manageable when types match and when unions do
3418 not cause confusion in a way that we cannot really load a component of LHS
3419 from the RHS or vice versa (the access representing this level can have
3420 subaccesses that are accessible only through a different union field at a
3421 higher level - different from the one used in the examined expression).
3424 Therefore, I specially handle a fourth case, happening when there is a
3425 specific type cast or it is impossible to locate a scalarized subaccess on
3426 the other side of the expression. If that happens, I simply "refresh" the
3427 RHS by storing in it is scalarized components leave the original statement
3428 there to do the copying and then load the scalar replacements of the LHS.
3429 This is what the first branch does. */
3431 if (modify_this_stmt
3432 || gimple_has_volatile_ops (stmt
)
3433 || contains_vce_or_bfcref_p (rhs
)
3434 || contains_vce_or_bfcref_p (lhs
)
3435 || stmt_ends_bb_p (stmt
))
3437 /* No need to copy into a constant-pool, it comes pre-initialized. */
3438 if (access_has_children_p (racc
) && !constant_decl_p (racc
->base
))
3439 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
3440 gsi
, false, false, loc
);
3441 if (access_has_children_p (lacc
))
3443 gimple_stmt_iterator alt_gsi
= gsi_none ();
3444 if (stmt_ends_bb_p (stmt
))
3446 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi
)));
3449 generate_subtree_copies (lacc
->first_child
, lhs
, lacc
->offset
, 0, 0,
3450 gsi
, true, true, loc
);
3452 sra_stats
.separate_lhs_rhs_handling
++;
3454 /* This gimplification must be done after generate_subtree_copies,
3455 lest we insert the subtree copies in the middle of the gimplified
3457 if (force_gimple_rhs
)
3458 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
3459 true, GSI_SAME_STMT
);
3460 if (gimple_assign_rhs1 (stmt
) != rhs
)
3462 modify_this_stmt
= true;
3463 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
3464 gcc_assert (stmt
== gsi_stmt (orig_gsi
));
3467 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3471 if (access_has_children_p (lacc
)
3472 && access_has_children_p (racc
)
3473 /* When an access represents an unscalarizable region, it usually
3474 represents accesses with variable offset and thus must not be used
3475 to generate new memory accesses. */
3476 && !lacc
->grp_unscalarizable_region
3477 && !racc
->grp_unscalarizable_region
)
3479 struct subreplacement_assignment_data sad
;
3481 sad
.left_offset
= lacc
->offset
;
3482 sad
.assignment_lhs
= lhs
;
3483 sad
.assignment_rhs
= rhs
;
3484 sad
.top_racc
= racc
;
3487 sad
.loc
= gimple_location (stmt
);
3488 sad
.refreshed
= SRA_UDH_NONE
;
3490 if (lacc
->grp_read
&& !lacc
->grp_covered
)
3491 handle_unscalarized_data_in_subtree (&sad
);
3493 load_assign_lhs_subreplacements (lacc
, &sad
);
3494 if (sad
.refreshed
!= SRA_UDH_RIGHT
)
3497 unlink_stmt_vdef (stmt
);
3498 gsi_remove (&sad
.old_gsi
, true);
3499 release_defs (stmt
);
3500 sra_stats
.deleted
++;
3501 return SRA_AM_REMOVED
;
3506 if (access_has_children_p (racc
)
3507 && !racc
->grp_unscalarized_data
3508 && TREE_CODE (lhs
) != SSA_NAME
)
3512 fprintf (dump_file
, "Removing load: ");
3513 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3515 generate_subtree_copies (racc
->first_child
, lhs
,
3516 racc
->offset
, 0, 0, gsi
,
3518 gcc_assert (stmt
== gsi_stmt (*gsi
));
3519 unlink_stmt_vdef (stmt
);
3520 gsi_remove (gsi
, true);
3521 release_defs (stmt
);
3522 sra_stats
.deleted
++;
3523 return SRA_AM_REMOVED
;
3525 /* Restore the aggregate RHS from its components so the
3526 prevailing aggregate copy does the right thing. */
3527 if (access_has_children_p (racc
))
3528 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
3529 gsi
, false, false, loc
);
3530 /* Re-load the components of the aggregate copy destination.
3531 But use the RHS aggregate to load from to expose more
3532 optimization opportunities. */
3533 if (access_has_children_p (lacc
))
3534 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
3535 0, 0, gsi
, true, true, loc
);
3542 /* Set any scalar replacements of values in the constant pool to the initial
3543 value of the constant. (Constant-pool decls like *.LC0 have effectively
3544 been initialized before the program starts, we must do the same for their
3545 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3546 the function's entry block. */
3549 initialize_constant_pool_replacements (void)
3551 gimple_seq seq
= NULL
;
3552 gimple_stmt_iterator gsi
= gsi_start (seq
);
3556 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
3557 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
3558 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
3560 tree var
= candidate (i
);
3561 if (!constant_decl_p (var
))
3563 vec
<access_p
> *access_vec
= get_base_access_vector (var
);
3566 for (unsigned i
= 0; i
< access_vec
->length (); i
++)
3568 struct access
*access
= (*access_vec
)[i
];
3569 if (!access
->replacement_decl
)
3571 gassign
*stmt
= gimple_build_assign (
3572 get_access_replacement (access
), unshare_expr (access
->expr
));
3573 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3575 fprintf (dump_file
, "Generating constant initializer: ");
3576 print_gimple_stmt (dump_file
, stmt
, 0, 1);
3577 fprintf (dump_file
, "\n");
3579 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
3584 seq
= gsi_seq (gsi
);
3586 gsi_insert_seq_on_edge_immediate (
3587 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), seq
);
3590 /* Traverse the function body and all modifications as decided in
3591 analyze_all_variable_accesses. Return true iff the CFG has been
3595 sra_modify_function_body (void)
3597 bool cfg_changed
= false;
3600 initialize_constant_pool_replacements ();
3602 FOR_EACH_BB_FN (bb
, cfun
)
3604 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
3605 while (!gsi_end_p (gsi
))
3607 gimple
*stmt
= gsi_stmt (gsi
);
3608 enum assignment_mod_result assign_result
;
3609 bool modified
= false, deleted
= false;
3613 switch (gimple_code (stmt
))
3616 t
= gimple_return_retval_ptr (as_a
<greturn
*> (stmt
));
3617 if (*t
!= NULL_TREE
)
3618 modified
|= sra_modify_expr (t
, &gsi
, false);
3622 assign_result
= sra_modify_assign (stmt
, &gsi
);
3623 modified
|= assign_result
== SRA_AM_MODIFIED
;
3624 deleted
= assign_result
== SRA_AM_REMOVED
;
3628 /* Operands must be processed before the lhs. */
3629 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3631 t
= gimple_call_arg_ptr (stmt
, i
);
3632 modified
|= sra_modify_expr (t
, &gsi
, false);
3635 if (gimple_call_lhs (stmt
))
3637 t
= gimple_call_lhs_ptr (stmt
);
3638 modified
|= sra_modify_expr (t
, &gsi
, true);
3644 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3645 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
3647 t
= &TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
3648 modified
|= sra_modify_expr (t
, &gsi
, false);
3650 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
3652 t
= &TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
3653 modified
|= sra_modify_expr (t
, &gsi
, true);
3665 if (maybe_clean_eh_stmt (stmt
)
3666 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
3674 gsi_commit_edge_inserts ();
3678 /* Generate statements initializing scalar replacements of parts of function
3682 initialize_parameter_reductions (void)
3684 gimple_stmt_iterator gsi
;
3685 gimple_seq seq
= NULL
;
3688 gsi
= gsi_start (seq
);
3689 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3691 parm
= DECL_CHAIN (parm
))
3693 vec
<access_p
> *access_vec
;
3694 struct access
*access
;
3696 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3698 access_vec
= get_base_access_vector (parm
);
3702 for (access
= (*access_vec
)[0];
3704 access
= access
->next_grp
)
3705 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
3706 EXPR_LOCATION (parm
));
3709 seq
= gsi_seq (gsi
);
3711 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), seq
);
3714 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3715 it reveals there are components of some aggregates to be scalarized, it runs
3716 the required transformations. */
3718 perform_intra_sra (void)
3723 if (!find_var_candidates ())
3726 if (!scan_function ())
3729 if (!analyze_all_variable_accesses ())
3732 if (sra_modify_function_body ())
3733 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
3735 ret
= TODO_update_ssa
;
3736 initialize_parameter_reductions ();
3738 statistics_counter_event (cfun
, "Scalar replacements created",
3739 sra_stats
.replacements
);
3740 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
3741 statistics_counter_event (cfun
, "Subtree copy stmts",
3742 sra_stats
.subtree_copies
);
3743 statistics_counter_event (cfun
, "Subreplacement stmts",
3744 sra_stats
.subreplacements
);
3745 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
3746 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
3747 sra_stats
.separate_lhs_rhs_handling
);
3750 sra_deinitialize ();
3754 /* Perform early intraprocedural SRA. */
3756 early_intra_sra (void)
3758 sra_mode
= SRA_MODE_EARLY_INTRA
;
3759 return perform_intra_sra ();
3762 /* Perform "late" intraprocedural SRA. */
3764 late_intra_sra (void)
3766 sra_mode
= SRA_MODE_INTRA
;
3767 return perform_intra_sra ();
3772 gate_intra_sra (void)
3774 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
3780 const pass_data pass_data_sra_early
=
3782 GIMPLE_PASS
, /* type */
3784 OPTGROUP_NONE
, /* optinfo_flags */
3785 TV_TREE_SRA
, /* tv_id */
3786 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3787 0, /* properties_provided */
3788 0, /* properties_destroyed */
3789 0, /* todo_flags_start */
3790 TODO_update_ssa
, /* todo_flags_finish */
3793 class pass_sra_early
: public gimple_opt_pass
3796 pass_sra_early (gcc::context
*ctxt
)
3797 : gimple_opt_pass (pass_data_sra_early
, ctxt
)
3800 /* opt_pass methods: */
3801 virtual bool gate (function
*) { return gate_intra_sra (); }
3802 virtual unsigned int execute (function
*) { return early_intra_sra (); }
3804 }; // class pass_sra_early
3809 make_pass_sra_early (gcc::context
*ctxt
)
3811 return new pass_sra_early (ctxt
);
3816 const pass_data pass_data_sra
=
3818 GIMPLE_PASS
, /* type */
3820 OPTGROUP_NONE
, /* optinfo_flags */
3821 TV_TREE_SRA
, /* tv_id */
3822 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3823 0, /* properties_provided */
3824 0, /* properties_destroyed */
3825 TODO_update_address_taken
, /* todo_flags_start */
3826 TODO_update_ssa
, /* todo_flags_finish */
3829 class pass_sra
: public gimple_opt_pass
3832 pass_sra (gcc::context
*ctxt
)
3833 : gimple_opt_pass (pass_data_sra
, ctxt
)
3836 /* opt_pass methods: */
3837 virtual bool gate (function
*) { return gate_intra_sra (); }
3838 virtual unsigned int execute (function
*) { return late_intra_sra (); }
3840 }; // class pass_sra
3845 make_pass_sra (gcc::context
*ctxt
)
3847 return new pass_sra (ctxt
);
3851 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3855 is_unused_scalar_param (tree parm
)
3858 return (is_gimple_reg (parm
)
3859 && (!(name
= ssa_default_def (cfun
, parm
))
3860 || has_zero_uses (name
)));
3863 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3864 examine whether there are any direct or otherwise infeasible ones. If so,
3865 return true, otherwise return false. PARM must be a gimple register with a
3866 non-NULL default definition. */
3869 ptr_parm_has_direct_uses (tree parm
)
3871 imm_use_iterator ui
;
3873 tree name
= ssa_default_def (cfun
, parm
);
3876 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
3879 use_operand_p use_p
;
3881 if (is_gimple_debug (stmt
))
3884 /* Valid uses include dereferences on the lhs and the rhs. */
3885 if (gimple_has_lhs (stmt
))
3887 tree lhs
= gimple_get_lhs (stmt
);
3888 while (handled_component_p (lhs
))
3889 lhs
= TREE_OPERAND (lhs
, 0);
3890 if (TREE_CODE (lhs
) == MEM_REF
3891 && TREE_OPERAND (lhs
, 0) == name
3892 && integer_zerop (TREE_OPERAND (lhs
, 1))
3893 && types_compatible_p (TREE_TYPE (lhs
),
3894 TREE_TYPE (TREE_TYPE (name
)))
3895 && !TREE_THIS_VOLATILE (lhs
))
3898 if (gimple_assign_single_p (stmt
))
3900 tree rhs
= gimple_assign_rhs1 (stmt
);
3901 while (handled_component_p (rhs
))
3902 rhs
= TREE_OPERAND (rhs
, 0);
3903 if (TREE_CODE (rhs
) == MEM_REF
3904 && TREE_OPERAND (rhs
, 0) == name
3905 && integer_zerop (TREE_OPERAND (rhs
, 1))
3906 && types_compatible_p (TREE_TYPE (rhs
),
3907 TREE_TYPE (TREE_TYPE (name
)))
3908 && !TREE_THIS_VOLATILE (rhs
))
3911 else if (is_gimple_call (stmt
))
3914 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3916 tree arg
= gimple_call_arg (stmt
, i
);
3917 while (handled_component_p (arg
))
3918 arg
= TREE_OPERAND (arg
, 0);
3919 if (TREE_CODE (arg
) == MEM_REF
3920 && TREE_OPERAND (arg
, 0) == name
3921 && integer_zerop (TREE_OPERAND (arg
, 1))
3922 && types_compatible_p (TREE_TYPE (arg
),
3923 TREE_TYPE (TREE_TYPE (name
)))
3924 && !TREE_THIS_VOLATILE (arg
))
3929 /* If the number of valid uses does not match the number of
3930 uses in this stmt there is an unhandled use. */
3931 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
3938 BREAK_FROM_IMM_USE_STMT (ui
);
3944 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3945 them in candidate_bitmap. Note that these do not necessarily include
3946 parameter which are unused and thus can be removed. Return true iff any
3947 such candidate has been found. */
3950 find_param_candidates (void)
3957 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3959 parm
= DECL_CHAIN (parm
))
3961 tree type
= TREE_TYPE (parm
);
3966 if (TREE_THIS_VOLATILE (parm
)
3967 || TREE_ADDRESSABLE (parm
)
3968 || (!is_gimple_reg_type (type
) && is_va_list_type (type
)))
3971 if (is_unused_scalar_param (parm
))
3977 if (POINTER_TYPE_P (type
))
3979 type
= TREE_TYPE (type
);
3981 if (TREE_CODE (type
) == FUNCTION_TYPE
3982 || TYPE_VOLATILE (type
)
3983 || (TREE_CODE (type
) == ARRAY_TYPE
3984 && TYPE_NONALIASED_COMPONENT (type
))
3985 || !is_gimple_reg (parm
)
3986 || is_va_list_type (type
)
3987 || ptr_parm_has_direct_uses (parm
))
3990 else if (!AGGREGATE_TYPE_P (type
))
3993 if (!COMPLETE_TYPE_P (type
)
3994 || !tree_fits_uhwi_p (TYPE_SIZE (type
))
3995 || tree_to_uhwi (TYPE_SIZE (type
)) == 0
3996 || (AGGREGATE_TYPE_P (type
)
3997 && type_internals_preclude_sra_p (type
, &msg
)))
4000 bitmap_set_bit (candidate_bitmap
, DECL_UID (parm
));
4001 slot
= candidates
->find_slot_with_hash (parm
, DECL_UID (parm
), INSERT
);
4005 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4007 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (parm
));
4008 print_generic_expr (dump_file
, parm
, 0);
4009 fprintf (dump_file
, "\n");
4013 func_param_count
= count
;
4017 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4021 mark_maybe_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
4024 struct access
*repr
= (struct access
*) data
;
4026 repr
->grp_maybe_modified
= 1;
4030 /* Analyze what representatives (in linked lists accessible from
4031 REPRESENTATIVES) can be modified by side effects of statements in the
4032 current function. */
4035 analyze_modified_params (vec
<access_p
> representatives
)
4039 for (i
= 0; i
< func_param_count
; i
++)
4041 struct access
*repr
;
4043 for (repr
= representatives
[i
];
4045 repr
= repr
->next_grp
)
4047 struct access
*access
;
4051 if (no_accesses_p (repr
))
4053 if (!POINTER_TYPE_P (TREE_TYPE (repr
->base
))
4054 || repr
->grp_maybe_modified
)
4057 ao_ref_init (&ar
, repr
->expr
);
4058 visited
= BITMAP_ALLOC (NULL
);
4059 for (access
= repr
; access
; access
= access
->next_sibling
)
4061 /* All accesses are read ones, otherwise grp_maybe_modified would
4062 be trivially set. */
4063 walk_aliased_vdefs (&ar
, gimple_vuse (access
->stmt
),
4064 mark_maybe_modified
, repr
, &visited
);
4065 if (repr
->grp_maybe_modified
)
4068 BITMAP_FREE (visited
);
4073 /* Propagate distances in bb_dereferences in the opposite direction than the
4074 control flow edges, in each step storing the maximum of the current value
4075 and the minimum of all successors. These steps are repeated until the table
4076 stabilizes. Note that BBs which might terminate the functions (according to
4077 final_bbs bitmap) never updated in this way. */
4080 propagate_dereference_distances (void)
4084 auto_vec
<basic_block
> queue (last_basic_block_for_fn (cfun
));
4085 queue
.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
4086 FOR_EACH_BB_FN (bb
, cfun
)
4088 queue
.quick_push (bb
);
4092 while (!queue
.is_empty ())
4096 bool change
= false;
4102 if (bitmap_bit_p (final_bbs
, bb
->index
))
4105 for (i
= 0; i
< func_param_count
; i
++)
4107 int idx
= bb
->index
* func_param_count
+ i
;
4109 HOST_WIDE_INT inh
= 0;
4111 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4113 int succ_idx
= e
->dest
->index
* func_param_count
+ i
;
4115 if (e
->src
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
4121 inh
= bb_dereferences
[succ_idx
];
4123 else if (bb_dereferences
[succ_idx
] < inh
)
4124 inh
= bb_dereferences
[succ_idx
];
4127 if (!first
&& bb_dereferences
[idx
] < inh
)
4129 bb_dereferences
[idx
] = inh
;
4134 if (change
&& !bitmap_bit_p (final_bbs
, bb
->index
))
4135 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4140 e
->src
->aux
= e
->src
;
4141 queue
.quick_push (e
->src
);
4146 /* Dump a dereferences TABLE with heading STR to file F. */
4149 dump_dereferences_table (FILE *f
, const char *str
, HOST_WIDE_INT
*table
)
4153 fprintf (dump_file
, "%s", str
);
4154 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
),
4155 EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
4157 fprintf (f
, "%4i %i ", bb
->index
, bitmap_bit_p (final_bbs
, bb
->index
));
4158 if (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
4161 for (i
= 0; i
< func_param_count
; i
++)
4163 int idx
= bb
->index
* func_param_count
+ i
;
4164 fprintf (f
, " %4" HOST_WIDE_INT_PRINT
"d", table
[idx
]);
4169 fprintf (dump_file
, "\n");
4172 /* Determine what (parts of) parameters passed by reference that are not
4173 assigned to are not certainly dereferenced in this function and thus the
4174 dereferencing cannot be safely moved to the caller without potentially
4175 introducing a segfault. Mark such REPRESENTATIVES as
4176 grp_not_necessarilly_dereferenced.
4178 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4179 part is calculated rather than simple booleans are calculated for each
4180 pointer parameter to handle cases when only a fraction of the whole
4181 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4184 The maximum dereference distances for each pointer parameter and BB are
4185 already stored in bb_dereference. This routine simply propagates these
4186 values upwards by propagate_dereference_distances and then compares the
4187 distances of individual parameters in the ENTRY BB to the equivalent
4188 distances of each representative of a (fraction of a) parameter. */
4191 analyze_caller_dereference_legality (vec
<access_p
> representatives
)
4195 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4196 dump_dereferences_table (dump_file
,
4197 "Dereference table before propagation:\n",
4200 propagate_dereference_distances ();
4202 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4203 dump_dereferences_table (dump_file
,
4204 "Dereference table after propagation:\n",
4207 for (i
= 0; i
< func_param_count
; i
++)
4209 struct access
*repr
= representatives
[i
];
4210 int idx
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->index
* func_param_count
+ i
;
4212 if (!repr
|| no_accesses_p (repr
))
4217 if ((repr
->offset
+ repr
->size
) > bb_dereferences
[idx
])
4218 repr
->grp_not_necessarilly_dereferenced
= 1;
4219 repr
= repr
->next_grp
;
4225 /* Return the representative access for the parameter declaration PARM if it is
4226 a scalar passed by reference which is not written to and the pointer value
4227 is not used directly. Thus, if it is legal to dereference it in the caller
4228 and we can rule out modifications through aliases, such parameter should be
4229 turned into one passed by value. Return NULL otherwise. */
4231 static struct access
*
4232 unmodified_by_ref_scalar_representative (tree parm
)
4234 int i
, access_count
;
4235 struct access
*repr
;
4236 vec
<access_p
> *access_vec
;
4238 access_vec
= get_base_access_vector (parm
);
4239 gcc_assert (access_vec
);
4240 repr
= (*access_vec
)[0];
4243 repr
->group_representative
= repr
;
4245 access_count
= access_vec
->length ();
4246 for (i
= 1; i
< access_count
; i
++)
4248 struct access
*access
= (*access_vec
)[i
];
4251 access
->group_representative
= repr
;
4252 access
->next_sibling
= repr
->next_sibling
;
4253 repr
->next_sibling
= access
;
4257 repr
->grp_scalar_ptr
= 1;
4261 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4262 associated with. REQ_ALIGN is the minimum required alignment. */
4265 access_precludes_ipa_sra_p (struct access
*access
, unsigned int req_align
)
4267 unsigned int exp_align
;
4268 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4269 is incompatible assign in a call statement (and possibly even in asm
4270 statements). This can be relaxed by using a new temporary but only for
4271 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4272 intraprocedural SRA we deal with this by keeping the old aggregate around,
4273 something we cannot do in IPA-SRA.) */
4275 && (is_gimple_call (access
->stmt
)
4276 || gimple_code (access
->stmt
) == GIMPLE_ASM
))
4279 exp_align
= get_object_alignment (access
->expr
);
4280 if (exp_align
< req_align
)
4287 /* Sort collected accesses for parameter PARM, identify representatives for
4288 each accessed region and link them together. Return NULL if there are
4289 different but overlapping accesses, return the special ptr value meaning
4290 there are no accesses for this parameter if that is the case and return the
4291 first representative otherwise. Set *RO_GRP if there is a group of accesses
4292 with only read (i.e. no write) accesses. */
4294 static struct access
*
4295 splice_param_accesses (tree parm
, bool *ro_grp
)
4297 int i
, j
, access_count
, group_count
;
4298 int agg_size
, total_size
= 0;
4299 struct access
*access
, *res
, **prev_acc_ptr
= &res
;
4300 vec
<access_p
> *access_vec
;
4302 access_vec
= get_base_access_vector (parm
);
4304 return &no_accesses_representant
;
4305 access_count
= access_vec
->length ();
4307 access_vec
->qsort (compare_access_positions
);
4312 while (i
< access_count
)
4316 access
= (*access_vec
)[i
];
4317 modification
= access
->write
;
4318 if (access_precludes_ipa_sra_p (access
, TYPE_ALIGN (access
->type
)))
4320 a1_alias_type
= reference_alias_ptr_type (access
->expr
);
4322 /* Access is about to become group representative unless we find some
4323 nasty overlap which would preclude us from breaking this parameter
4327 while (j
< access_count
)
4329 struct access
*ac2
= (*access_vec
)[j
];
4330 if (ac2
->offset
!= access
->offset
)
4332 /* All or nothing law for parameters. */
4333 if (access
->offset
+ access
->size
> ac2
->offset
)
4338 else if (ac2
->size
!= access
->size
)
4341 if (access_precludes_ipa_sra_p (ac2
, TYPE_ALIGN (access
->type
))
4342 || (ac2
->type
!= access
->type
4343 && (TREE_ADDRESSABLE (ac2
->type
)
4344 || TREE_ADDRESSABLE (access
->type
)))
4345 || (reference_alias_ptr_type (ac2
->expr
) != a1_alias_type
))
4348 modification
|= ac2
->write
;
4349 ac2
->group_representative
= access
;
4350 ac2
->next_sibling
= access
->next_sibling
;
4351 access
->next_sibling
= ac2
;
4356 access
->grp_maybe_modified
= modification
;
4359 *prev_acc_ptr
= access
;
4360 prev_acc_ptr
= &access
->next_grp
;
4361 total_size
+= access
->size
;
4365 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4366 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))));
4368 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm
)));
4369 if (total_size
>= agg_size
)
4372 gcc_assert (group_count
> 0);
4376 /* Decide whether parameters with representative accesses given by REPR should
4377 be reduced into components. */
4380 decide_one_param_reduction (struct access
*repr
)
4382 int total_size
, cur_parm_size
, agg_size
, new_param_count
, parm_size_limit
;
4387 cur_parm_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm
)));
4388 gcc_assert (cur_parm_size
> 0);
4390 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4393 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))));
4398 agg_size
= cur_parm_size
;
4404 fprintf (dump_file
, "Evaluating PARAM group sizes for ");
4405 print_generic_expr (dump_file
, parm
, 0);
4406 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (parm
));
4407 for (acc
= repr
; acc
; acc
= acc
->next_grp
)
4408 dump_access (dump_file
, acc
, true);
4412 new_param_count
= 0;
4414 for (; repr
; repr
= repr
->next_grp
)
4416 gcc_assert (parm
== repr
->base
);
4418 /* Taking the address of a non-addressable field is verboten. */
4419 if (by_ref
&& repr
->non_addressable
)
4422 /* Do not decompose a non-BLKmode param in a way that would
4423 create BLKmode params. Especially for by-reference passing
4424 (thus, pointer-type param) this is hardly worthwhile. */
4425 if (DECL_MODE (parm
) != BLKmode
4426 && TYPE_MODE (repr
->type
) == BLKmode
)
4429 if (!by_ref
|| (!repr
->grp_maybe_modified
4430 && !repr
->grp_not_necessarilly_dereferenced
))
4431 total_size
+= repr
->size
;
4433 total_size
+= cur_parm_size
;
4438 gcc_assert (new_param_count
> 0);
4440 if (optimize_function_for_size_p (cfun
))
4441 parm_size_limit
= cur_parm_size
;
4443 parm_size_limit
= (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR
)
4446 if (total_size
< agg_size
4447 && total_size
<= parm_size_limit
)
4450 fprintf (dump_file
, " ....will be split into %i components\n",
4452 return new_param_count
;
4458 /* The order of the following enums is important, we need to do extra work for
4459 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4460 enum ipa_splicing_result
{ NO_GOOD_ACCESS
, UNUSED_PARAMS
, BY_VAL_ACCESSES
,
4461 MODIF_BY_REF_ACCESSES
, UNMODIF_BY_REF_ACCESSES
};
4463 /* Identify representatives of all accesses to all candidate parameters for
4464 IPA-SRA. Return result based on what representatives have been found. */
4466 static enum ipa_splicing_result
4467 splice_all_param_accesses (vec
<access_p
> &representatives
)
4469 enum ipa_splicing_result result
= NO_GOOD_ACCESS
;
4471 struct access
*repr
;
4473 representatives
.create (func_param_count
);
4475 for (parm
= DECL_ARGUMENTS (current_function_decl
);
4477 parm
= DECL_CHAIN (parm
))
4479 if (is_unused_scalar_param (parm
))
4481 representatives
.quick_push (&no_accesses_representant
);
4482 if (result
== NO_GOOD_ACCESS
)
4483 result
= UNUSED_PARAMS
;
4485 else if (POINTER_TYPE_P (TREE_TYPE (parm
))
4486 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm
)))
4487 && bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4489 repr
= unmodified_by_ref_scalar_representative (parm
);
4490 representatives
.quick_push (repr
);
4492 result
= UNMODIF_BY_REF_ACCESSES
;
4494 else if (bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4496 bool ro_grp
= false;
4497 repr
= splice_param_accesses (parm
, &ro_grp
);
4498 representatives
.quick_push (repr
);
4500 if (repr
&& !no_accesses_p (repr
))
4502 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4505 result
= UNMODIF_BY_REF_ACCESSES
;
4506 else if (result
< MODIF_BY_REF_ACCESSES
)
4507 result
= MODIF_BY_REF_ACCESSES
;
4509 else if (result
< BY_VAL_ACCESSES
)
4510 result
= BY_VAL_ACCESSES
;
4512 else if (no_accesses_p (repr
) && (result
== NO_GOOD_ACCESS
))
4513 result
= UNUSED_PARAMS
;
4516 representatives
.quick_push (NULL
);
4519 if (result
== NO_GOOD_ACCESS
)
4521 representatives
.release ();
4522 return NO_GOOD_ACCESS
;
4528 /* Return the index of BASE in PARMS. Abort if it is not found. */
4531 get_param_index (tree base
, vec
<tree
> parms
)
4535 len
= parms
.length ();
4536 for (i
= 0; i
< len
; i
++)
4537 if (parms
[i
] == base
)
4542 /* Convert the decisions made at the representative level into compact
4543 parameter adjustments. REPRESENTATIVES are pointers to first
4544 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4545 final number of adjustments. */
4547 static ipa_parm_adjustment_vec
4548 turn_representatives_into_adjustments (vec
<access_p
> representatives
,
4549 int adjustments_count
)
4552 ipa_parm_adjustment_vec adjustments
;
4556 gcc_assert (adjustments_count
> 0);
4557 parms
= ipa_get_vector_of_formal_parms (current_function_decl
);
4558 adjustments
.create (adjustments_count
);
4559 parm
= DECL_ARGUMENTS (current_function_decl
);
4560 for (i
= 0; i
< func_param_count
; i
++, parm
= DECL_CHAIN (parm
))
4562 struct access
*repr
= representatives
[i
];
4564 if (!repr
|| no_accesses_p (repr
))
4566 struct ipa_parm_adjustment adj
;
4568 memset (&adj
, 0, sizeof (adj
));
4569 adj
.base_index
= get_param_index (parm
, parms
);
4572 adj
.op
= IPA_PARM_OP_COPY
;
4574 adj
.op
= IPA_PARM_OP_REMOVE
;
4575 adj
.arg_prefix
= "ISRA";
4576 adjustments
.quick_push (adj
);
4580 struct ipa_parm_adjustment adj
;
4581 int index
= get_param_index (parm
, parms
);
4583 for (; repr
; repr
= repr
->next_grp
)
4585 memset (&adj
, 0, sizeof (adj
));
4586 gcc_assert (repr
->base
== parm
);
4587 adj
.base_index
= index
;
4588 adj
.base
= repr
->base
;
4589 adj
.type
= repr
->type
;
4590 adj
.alias_ptr_type
= reference_alias_ptr_type (repr
->expr
);
4591 adj
.offset
= repr
->offset
;
4592 adj
.reverse
= repr
->reverse
;
4593 adj
.by_ref
= (POINTER_TYPE_P (TREE_TYPE (repr
->base
))
4594 && (repr
->grp_maybe_modified
4595 || repr
->grp_not_necessarilly_dereferenced
));
4596 adj
.arg_prefix
= "ISRA";
4597 adjustments
.quick_push (adj
);
4605 /* Analyze the collected accesses and produce a plan what to do with the
4606 parameters in the form of adjustments, NULL meaning nothing. */
4608 static ipa_parm_adjustment_vec
4609 analyze_all_param_acesses (void)
4611 enum ipa_splicing_result repr_state
;
4612 bool proceed
= false;
4613 int i
, adjustments_count
= 0;
4614 vec
<access_p
> representatives
;
4615 ipa_parm_adjustment_vec adjustments
;
4617 repr_state
= splice_all_param_accesses (representatives
);
4618 if (repr_state
== NO_GOOD_ACCESS
)
4619 return ipa_parm_adjustment_vec ();
4621 /* If there are any parameters passed by reference which are not modified
4622 directly, we need to check whether they can be modified indirectly. */
4623 if (repr_state
== UNMODIF_BY_REF_ACCESSES
)
4625 analyze_caller_dereference_legality (representatives
);
4626 analyze_modified_params (representatives
);
4629 for (i
= 0; i
< func_param_count
; i
++)
4631 struct access
*repr
= representatives
[i
];
4633 if (repr
&& !no_accesses_p (repr
))
4635 if (repr
->grp_scalar_ptr
)
4637 adjustments_count
++;
4638 if (repr
->grp_not_necessarilly_dereferenced
4639 || repr
->grp_maybe_modified
)
4640 representatives
[i
] = NULL
;
4644 sra_stats
.scalar_by_ref_to_by_val
++;
4649 int new_components
= decide_one_param_reduction (repr
);
4651 if (new_components
== 0)
4653 representatives
[i
] = NULL
;
4654 adjustments_count
++;
4658 adjustments_count
+= new_components
;
4659 sra_stats
.aggregate_params_reduced
++;
4660 sra_stats
.param_reductions_created
+= new_components
;
4667 if (no_accesses_p (repr
))
4670 sra_stats
.deleted_unused_parameters
++;
4672 adjustments_count
++;
4676 if (!proceed
&& dump_file
)
4677 fprintf (dump_file
, "NOT proceeding to change params.\n");
4680 adjustments
= turn_representatives_into_adjustments (representatives
,
4683 adjustments
= ipa_parm_adjustment_vec ();
4685 representatives
.release ();
4689 /* If a parameter replacement identified by ADJ does not yet exist in the form
4690 of declaration, create it and record it, otherwise return the previously
4694 get_replaced_param_substitute (struct ipa_parm_adjustment
*adj
)
4697 if (!adj
->new_ssa_base
)
4699 char *pretty_name
= make_fancy_name (adj
->base
);
4701 repl
= create_tmp_reg (TREE_TYPE (adj
->base
), "ISR");
4702 DECL_NAME (repl
) = get_identifier (pretty_name
);
4703 obstack_free (&name_obstack
, pretty_name
);
4705 adj
->new_ssa_base
= repl
;
4708 repl
= adj
->new_ssa_base
;
4712 /* Find the first adjustment for a particular parameter BASE in a vector of
4713 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4716 static struct ipa_parm_adjustment
*
4717 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments
, tree base
)
4721 len
= adjustments
.length ();
4722 for (i
= 0; i
< len
; i
++)
4724 struct ipa_parm_adjustment
*adj
;
4726 adj
= &adjustments
[i
];
4727 if (adj
->op
!= IPA_PARM_OP_COPY
&& adj
->base
== base
)
4734 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4735 parameter which is to be removed because its value is not used, create a new
4736 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4737 original with it and return it. If there is no need to re-map, return NULL.
4738 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4741 replace_removed_params_ssa_names (tree old_name
, gimple
*stmt
,
4742 ipa_parm_adjustment_vec adjustments
)
4744 struct ipa_parm_adjustment
*adj
;
4745 tree decl
, repl
, new_name
;
4747 if (TREE_CODE (old_name
) != SSA_NAME
)
4750 decl
= SSA_NAME_VAR (old_name
);
4751 if (decl
== NULL_TREE
4752 || TREE_CODE (decl
) != PARM_DECL
)
4755 adj
= get_adjustment_for_base (adjustments
, decl
);
4759 repl
= get_replaced_param_substitute (adj
);
4760 new_name
= make_ssa_name (repl
, stmt
);
4761 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name
)
4762 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name
);
4766 fprintf (dump_file
, "replacing an SSA name of a removed param ");
4767 print_generic_expr (dump_file
, old_name
, 0);
4768 fprintf (dump_file
, " with ");
4769 print_generic_expr (dump_file
, new_name
, 0);
4770 fprintf (dump_file
, "\n");
4773 replace_uses_by (old_name
, new_name
);
4777 /* If the statement STMT contains any expressions that need to replaced with a
4778 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4779 incompatibilities (GSI is used to accommodate conversion statements and must
4780 point to the statement). Return true iff the statement was modified. */
4783 sra_ipa_modify_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
,
4784 ipa_parm_adjustment_vec adjustments
)
4786 tree
*lhs_p
, *rhs_p
;
4789 if (!gimple_assign_single_p (stmt
))
4792 rhs_p
= gimple_assign_rhs1_ptr (stmt
);
4793 lhs_p
= gimple_assign_lhs_ptr (stmt
);
4795 any
= ipa_modify_expr (rhs_p
, false, adjustments
);
4796 any
|= ipa_modify_expr (lhs_p
, false, adjustments
);
4799 tree new_rhs
= NULL_TREE
;
4801 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p
), TREE_TYPE (*rhs_p
)))
4803 if (TREE_CODE (*rhs_p
) == CONSTRUCTOR
)
4805 /* V_C_Es of constructors can cause trouble (PR 42714). */
4806 if (is_gimple_reg_type (TREE_TYPE (*lhs_p
)))
4807 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
4809 *rhs_p
= build_constructor (TREE_TYPE (*lhs_p
),
4813 new_rhs
= fold_build1_loc (gimple_location (stmt
),
4814 VIEW_CONVERT_EXPR
, TREE_TYPE (*lhs_p
),
4817 else if (REFERENCE_CLASS_P (*rhs_p
)
4818 && is_gimple_reg_type (TREE_TYPE (*lhs_p
))
4819 && !is_gimple_reg (*lhs_p
))
4820 /* This can happen when an assignment in between two single field
4821 structures is turned into an assignment in between two pointers to
4822 scalars (PR 42237). */
4827 tree tmp
= force_gimple_operand_gsi (gsi
, new_rhs
, true, NULL_TREE
,
4828 true, GSI_SAME_STMT
);
4830 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
4839 /* Traverse the function body and all modifications as described in
4840 ADJUSTMENTS. Return true iff the CFG has been changed. */
4843 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments
)
4845 bool cfg_changed
= false;
4848 FOR_EACH_BB_FN (bb
, cfun
)
4850 gimple_stmt_iterator gsi
;
4852 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4854 gphi
*phi
= as_a
<gphi
*> (gsi_stmt (gsi
));
4855 tree new_lhs
, old_lhs
= gimple_phi_result (phi
);
4856 new_lhs
= replace_removed_params_ssa_names (old_lhs
, phi
, adjustments
);
4859 gimple_phi_set_result (phi
, new_lhs
);
4860 release_ssa_name (old_lhs
);
4864 gsi
= gsi_start_bb (bb
);
4865 while (!gsi_end_p (gsi
))
4867 gimple
*stmt
= gsi_stmt (gsi
);
4868 bool modified
= false;
4872 switch (gimple_code (stmt
))
4875 t
= gimple_return_retval_ptr (as_a
<greturn
*> (stmt
));
4876 if (*t
!= NULL_TREE
)
4877 modified
|= ipa_modify_expr (t
, true, adjustments
);
4881 modified
|= sra_ipa_modify_assign (stmt
, &gsi
, adjustments
);
4885 /* Operands must be processed before the lhs. */
4886 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4888 t
= gimple_call_arg_ptr (stmt
, i
);
4889 modified
|= ipa_modify_expr (t
, true, adjustments
);
4892 if (gimple_call_lhs (stmt
))
4894 t
= gimple_call_lhs_ptr (stmt
);
4895 modified
|= ipa_modify_expr (t
, false, adjustments
);
4901 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4902 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
4904 t
= &TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
4905 modified
|= ipa_modify_expr (t
, true, adjustments
);
4907 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
4909 t
= &TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
4910 modified
|= ipa_modify_expr (t
, false, adjustments
);
4921 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
4923 tree old_def
= DEF_FROM_PTR (defp
);
4924 if (tree new_def
= replace_removed_params_ssa_names (old_def
, stmt
,
4927 SET_DEF (defp
, new_def
);
4928 release_ssa_name (old_def
);
4936 if (maybe_clean_eh_stmt (stmt
)
4937 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4947 /* Call gimple_debug_bind_reset_value on all debug statements describing
4948 gimple register parameters that are being removed or replaced. */
4951 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments
)
4954 gimple_stmt_iterator
*gsip
= NULL
, gsi
;
4956 if (MAY_HAVE_DEBUG_STMTS
&& single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
4958 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
4961 len
= adjustments
.length ();
4962 for (i
= 0; i
< len
; i
++)
4964 struct ipa_parm_adjustment
*adj
;
4965 imm_use_iterator ui
;
4968 tree name
, vexpr
, copy
= NULL_TREE
;
4969 use_operand_p use_p
;
4971 adj
= &adjustments
[i
];
4972 if (adj
->op
== IPA_PARM_OP_COPY
|| !is_gimple_reg (adj
->base
))
4974 name
= ssa_default_def (cfun
, adj
->base
);
4977 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
4979 if (gimple_clobber_p (stmt
))
4981 gimple_stmt_iterator cgsi
= gsi_for_stmt (stmt
);
4982 unlink_stmt_vdef (stmt
);
4983 gsi_remove (&cgsi
, true);
4984 release_defs (stmt
);
4987 /* All other users must have been removed by
4988 ipa_sra_modify_function_body. */
4989 gcc_assert (is_gimple_debug (stmt
));
4990 if (vexpr
== NULL
&& gsip
!= NULL
)
4992 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4993 vexpr
= make_node (DEBUG_EXPR_DECL
);
4994 def_temp
= gimple_build_debug_source_bind (vexpr
, adj
->base
,
4996 DECL_ARTIFICIAL (vexpr
) = 1;
4997 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
4998 DECL_MODE (vexpr
) = DECL_MODE (adj
->base
);
4999 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
5003 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
5004 SET_USE (use_p
, vexpr
);
5007 gimple_debug_bind_reset_value (stmt
);
5010 /* Create a VAR_DECL for debug info purposes. */
5011 if (!DECL_IGNORED_P (adj
->base
))
5013 copy
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
5014 VAR_DECL
, DECL_NAME (adj
->base
),
5015 TREE_TYPE (adj
->base
));
5016 if (DECL_PT_UID_SET_P (adj
->base
))
5017 SET_DECL_PT_UID (copy
, DECL_PT_UID (adj
->base
));
5018 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (adj
->base
);
5019 TREE_READONLY (copy
) = TREE_READONLY (adj
->base
);
5020 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (adj
->base
);
5021 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (adj
->base
);
5022 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (adj
->base
);
5023 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (adj
->base
);
5024 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (adj
->base
);
5025 DECL_SEEN_IN_BIND_EXPR_P (copy
) = 1;
5026 SET_DECL_RTL (copy
, 0);
5027 TREE_USED (copy
) = 1;
5028 DECL_CONTEXT (copy
) = current_function_decl
;
5029 add_local_decl (cfun
, copy
);
5031 BLOCK_VARS (DECL_INITIAL (current_function_decl
));
5032 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = copy
;
5034 if (gsip
!= NULL
&& copy
&& target_for_debug_bind (adj
->base
))
5036 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
5038 def_temp
= gimple_build_debug_bind (copy
, vexpr
, NULL
);
5040 def_temp
= gimple_build_debug_source_bind (copy
, adj
->base
,
5042 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
5047 /* Return false if all callers have at least as many actual arguments as there
5048 are formal parameters in the current function and that their types
5052 some_callers_have_mismatched_arguments_p (struct cgraph_node
*node
,
5053 void *data ATTRIBUTE_UNUSED
)
5055 struct cgraph_edge
*cs
;
5056 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
5057 if (!cs
->call_stmt
|| !callsite_arguments_match_p (cs
->call_stmt
))
5063 /* Return false if all callers have vuse attached to a call statement. */
5066 some_callers_have_no_vuse_p (struct cgraph_node
*node
,
5067 void *data ATTRIBUTE_UNUSED
)
5069 struct cgraph_edge
*cs
;
5070 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
5071 if (!cs
->call_stmt
|| !gimple_vuse (cs
->call_stmt
))
5077 /* Convert all callers of NODE. */
5080 convert_callers_for_node (struct cgraph_node
*node
,
5083 ipa_parm_adjustment_vec
*adjustments
= (ipa_parm_adjustment_vec
*) data
;
5084 bitmap recomputed_callers
= BITMAP_ALLOC (NULL
);
5085 struct cgraph_edge
*cs
;
5087 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
5089 push_cfun (DECL_STRUCT_FUNCTION (cs
->caller
->decl
));
5092 fprintf (dump_file
, "Adjusting call %s/%i -> %s/%i\n",
5093 xstrdup_for_dump (cs
->caller
->name ()),
5095 xstrdup_for_dump (cs
->callee
->name ()),
5098 ipa_modify_call_arguments (cs
, cs
->call_stmt
, *adjustments
);
5103 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
5104 if (bitmap_set_bit (recomputed_callers
, cs
->caller
->uid
)
5105 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs
->caller
->decl
)))
5106 compute_inline_parameters (cs
->caller
, true);
5107 BITMAP_FREE (recomputed_callers
);
5112 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5115 convert_callers (struct cgraph_node
*node
, tree old_decl
,
5116 ipa_parm_adjustment_vec adjustments
)
5118 basic_block this_block
;
5120 node
->call_for_symbol_and_aliases (convert_callers_for_node
,
5121 &adjustments
, false);
5123 if (!encountered_recursive_call
)
5126 FOR_EACH_BB_FN (this_block
, cfun
)
5128 gimple_stmt_iterator gsi
;
5130 for (gsi
= gsi_start_bb (this_block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5134 stmt
= dyn_cast
<gcall
*> (gsi_stmt (gsi
));
5137 call_fndecl
= gimple_call_fndecl (stmt
);
5138 if (call_fndecl
== old_decl
)
5141 fprintf (dump_file
, "Adjusting recursive call");
5142 gimple_call_set_fndecl (stmt
, node
->decl
);
5143 ipa_modify_call_arguments (NULL
, stmt
, adjustments
);
5151 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5152 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5155 modify_function (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
5157 struct cgraph_node
*new_node
;
5160 cgraph_edge::rebuild_edges ();
5161 free_dominance_info (CDI_DOMINATORS
);
5164 /* This must be done after rebuilding cgraph edges for node above.
5165 Otherwise any recursive calls to node that are recorded in
5166 redirect_callers will be corrupted. */
5167 vec
<cgraph_edge
*> redirect_callers
= node
->collect_callers ();
5168 new_node
= node
->create_version_clone_with_body (redirect_callers
, NULL
,
5169 NULL
, false, NULL
, NULL
,
5171 redirect_callers
.release ();
5173 push_cfun (DECL_STRUCT_FUNCTION (new_node
->decl
));
5174 ipa_modify_formal_parameters (current_function_decl
, adjustments
);
5175 cfg_changed
= ipa_sra_modify_function_body (adjustments
);
5176 sra_ipa_reset_debug_stmts (adjustments
);
5177 convert_callers (new_node
, node
->decl
, adjustments
);
5178 new_node
->make_local ();
5182 /* Means of communication between ipa_sra_check_caller and
5183 ipa_sra_preliminary_function_checks. */
5185 struct ipa_sra_check_caller_data
5188 bool bad_arg_alignment
;
5192 /* If NODE has a caller, mark that fact in DATA which is pointer to
5193 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5194 calls if they are unit aligned and if not, set the appropriate flag in DATA
5198 ipa_sra_check_caller (struct cgraph_node
*node
, void *data
)
5203 struct ipa_sra_check_caller_data
*iscc
;
5204 iscc
= (struct ipa_sra_check_caller_data
*) data
;
5205 iscc
->has_callers
= true;
5207 for (cgraph_edge
*cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
5209 if (cs
->caller
->thunk
.thunk_p
)
5211 iscc
->has_thunk
= true;
5214 gimple
*call_stmt
= cs
->call_stmt
;
5215 unsigned count
= gimple_call_num_args (call_stmt
);
5216 for (unsigned i
= 0; i
< count
; i
++)
5218 tree arg
= gimple_call_arg (call_stmt
, i
);
5219 if (is_gimple_reg (arg
))
5223 HOST_WIDE_INT bitsize
, bitpos
;
5225 int unsignedp
, reversep
, volatilep
= 0;
5226 get_inner_reference (arg
, &bitsize
, &bitpos
, &offset
, &mode
,
5227 &unsignedp
, &reversep
, &volatilep
, false);
5228 if (bitpos
% BITS_PER_UNIT
)
5230 iscc
->bad_arg_alignment
= true;
5239 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5240 attributes, return true otherwise. NODE is the cgraph node of the current
5244 ipa_sra_preliminary_function_checks (struct cgraph_node
*node
)
5246 if (!node
->can_be_local_p ())
5249 fprintf (dump_file
, "Function not local to this compilation unit.\n");
5253 if (!node
->local
.can_change_signature
)
5256 fprintf (dump_file
, "Function can not change signature.\n");
5260 if (!tree_versionable_function_p (node
->decl
))
5263 fprintf (dump_file
, "Function is not versionable.\n");
5267 if (!opt_for_fn (node
->decl
, optimize
)
5268 || !opt_for_fn (node
->decl
, flag_ipa_sra
))
5271 fprintf (dump_file
, "Function not optimized.\n");
5275 if (DECL_VIRTUAL_P (current_function_decl
))
5278 fprintf (dump_file
, "Function is a virtual method.\n");
5282 if ((DECL_ONE_ONLY (node
->decl
) || DECL_EXTERNAL (node
->decl
))
5283 && inline_summaries
->get (node
)->size
>= MAX_INLINE_INSNS_AUTO
)
5286 fprintf (dump_file
, "Function too big to be made truly local.\n");
5293 fprintf (dump_file
, "Function uses stdarg. \n");
5297 if (TYPE_ATTRIBUTES (TREE_TYPE (node
->decl
)))
5300 if (DECL_DISREGARD_INLINE_LIMITS (node
->decl
))
5303 fprintf (dump_file
, "Always inline function will be inlined "
5308 struct ipa_sra_check_caller_data iscc
;
5309 memset (&iscc
, 0, sizeof(iscc
));
5310 node
->call_for_symbol_and_aliases (ipa_sra_check_caller
, &iscc
, true);
5311 if (!iscc
.has_callers
)
5315 "Function has no callers in this compilation unit.\n");
5319 if (iscc
.bad_arg_alignment
)
5323 "A function call has an argument with non-unit alignment.\n");
5338 /* Perform early interprocedural SRA. */
5341 ipa_early_sra (void)
5343 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
5344 ipa_parm_adjustment_vec adjustments
;
5347 if (!ipa_sra_preliminary_function_checks (node
))
5351 sra_mode
= SRA_MODE_EARLY_IPA
;
5353 if (!find_param_candidates ())
5356 fprintf (dump_file
, "Function has no IPA-SRA candidates.\n");
5360 if (node
->call_for_symbol_and_aliases
5361 (some_callers_have_mismatched_arguments_p
, NULL
, true))
5364 fprintf (dump_file
, "There are callers with insufficient number of "
5365 "arguments or arguments with type mismatches.\n");
5369 if (node
->call_for_symbol_and_aliases
5370 (some_callers_have_no_vuse_p
, NULL
, true))
5373 fprintf (dump_file
, "There are callers with no VUSE attached "
5374 "to a call stmt.\n");
5378 bb_dereferences
= XCNEWVEC (HOST_WIDE_INT
,
5380 * last_basic_block_for_fn (cfun
));
5381 final_bbs
= BITMAP_ALLOC (NULL
);
5384 if (encountered_apply_args
)
5387 fprintf (dump_file
, "Function calls __builtin_apply_args().\n");
5391 if (encountered_unchangable_recursive_call
)
5394 fprintf (dump_file
, "Function calls itself with insufficient "
5395 "number of arguments.\n");
5399 adjustments
= analyze_all_param_acesses ();
5400 if (!adjustments
.exists ())
5403 ipa_dump_param_adjustments (dump_file
, adjustments
, current_function_decl
);
5405 if (modify_function (node
, adjustments
))
5406 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
5408 ret
= TODO_update_ssa
;
5409 adjustments
.release ();
5411 statistics_counter_event (cfun
, "Unused parameters deleted",
5412 sra_stats
.deleted_unused_parameters
);
5413 statistics_counter_event (cfun
, "Scalar parameters converted to by-value",
5414 sra_stats
.scalar_by_ref_to_by_val
);
5415 statistics_counter_event (cfun
, "Aggregate parameters broken up",
5416 sra_stats
.aggregate_params_reduced
);
5417 statistics_counter_event (cfun
, "Aggregate parameter components created",
5418 sra_stats
.param_reductions_created
);
5421 BITMAP_FREE (final_bbs
);
5422 free (bb_dereferences
);
5424 sra_deinitialize ();
5430 const pass_data pass_data_early_ipa_sra
=
5432 GIMPLE_PASS
, /* type */
5433 "eipa_sra", /* name */
5434 OPTGROUP_NONE
, /* optinfo_flags */
5435 TV_IPA_SRA
, /* tv_id */
5436 0, /* properties_required */
5437 0, /* properties_provided */
5438 0, /* properties_destroyed */
5439 0, /* todo_flags_start */
5440 TODO_dump_symtab
, /* todo_flags_finish */
5443 class pass_early_ipa_sra
: public gimple_opt_pass
5446 pass_early_ipa_sra (gcc::context
*ctxt
)
5447 : gimple_opt_pass (pass_data_early_ipa_sra
, ctxt
)
5450 /* opt_pass methods: */
5451 virtual bool gate (function
*) { return flag_ipa_sra
&& dbg_cnt (eipa_sra
); }
5452 virtual unsigned int execute (function
*) { return ipa_early_sra (); }
5454 }; // class pass_early_ipa_sra
5459 make_pass_early_ipa_sra (gcc::context
*ctxt
)
5461 return new pass_early_ipa_sra (ctxt
);