1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008-2013 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "alloc-pool.h"
82 #include "tree-flow.h"
83 #include "tree-pass.h"
85 #include "statistics.h"
90 #include "tree-inline.h"
91 #include "gimple-pretty-print.h"
92 #include "ipa-inline.h"
94 /* Enumeration of all aggregate reductions we can do. */
95 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
96 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
97 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
99 /* Global variable describing which aggregate reduction we are performing at
101 static enum sra_mode sra_mode
;
105 /* ACCESS represents each access to an aggregate variable (as a whole or a
106 part). It can also represent a group of accesses that refer to exactly the
107 same fragment of an aggregate (i.e. those that have exactly the same offset
108 and size). Such representatives for a single aggregate, once determined,
109 are linked in a linked list and have the group fields set.
111 Moreover, when doing intraprocedural SRA, a tree is built from those
112 representatives (by the means of first_child and next_sibling pointers), in
113 which all items in a subtree are "within" the root, i.e. their offset is
114 greater or equal to offset of the root and offset+size is smaller or equal
115 to offset+size of the root. Children of an access are sorted by offset.
117 Note that accesses to parts of vector and complex number types always
118 represented by an access to the whole complex number or a vector. It is a
119 duty of the modifying functions to replace them appropriately. */
123 /* Values returned by `get_ref_base_and_extent' for each component reference
124 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
125 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
126 HOST_WIDE_INT offset
;
130 /* Expression. It is context dependent so do not use it to create new
131 expressions to access the original aggregate. See PR 42154 for a
137 /* The statement this access belongs to. */
140 /* Next group representative for this aggregate. */
141 struct access
*next_grp
;
143 /* Pointer to the group representative. Pointer to itself if the struct is
144 the representative. */
145 struct access
*group_representative
;
147 /* If this access has any children (in terms of the definition above), this
148 points to the first one. */
149 struct access
*first_child
;
151 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
152 described above. In IPA-SRA this is a pointer to the next access
153 belonging to the same group (having the same representative). */
154 struct access
*next_sibling
;
156 /* Pointers to the first and last element in the linked list of assign
158 struct assign_link
*first_link
, *last_link
;
160 /* Pointer to the next access in the work queue. */
161 struct access
*next_queued
;
163 /* Replacement variable for this access "region." Never to be accessed
164 directly, always only by the means of get_access_replacement() and only
165 when grp_to_be_replaced flag is set. */
166 tree replacement_decl
;
168 /* Is this particular access write access? */
171 /* Is this access an access to a non-addressable field? */
172 unsigned non_addressable
: 1;
174 /* Is this access currently in the work queue? */
175 unsigned grp_queued
: 1;
177 /* Does this group contain a write access? This flag is propagated down the
179 unsigned grp_write
: 1;
181 /* Does this group contain a read access? This flag is propagated down the
183 unsigned grp_read
: 1;
185 /* Does this group contain a read access that comes from an assignment
186 statement? This flag is propagated down the access tree. */
187 unsigned grp_assignment_read
: 1;
189 /* Does this group contain a write access that comes from an assignment
190 statement? This flag is propagated down the access tree. */
191 unsigned grp_assignment_write
: 1;
193 /* Does this group contain a read access through a scalar type? This flag is
194 not propagated in the access tree in any direction. */
195 unsigned grp_scalar_read
: 1;
197 /* Does this group contain a write access through a scalar type? This flag
198 is not propagated in the access tree in any direction. */
199 unsigned grp_scalar_write
: 1;
201 /* Is this access an artificial one created to scalarize some record
203 unsigned grp_total_scalarization
: 1;
205 /* Other passes of the analysis use this bit to make function
206 analyze_access_subtree create scalar replacements for this group if
208 unsigned grp_hint
: 1;
210 /* Is the subtree rooted in this access fully covered by scalar
212 unsigned grp_covered
: 1;
214 /* If set to true, this access and all below it in an access tree must not be
216 unsigned grp_unscalarizable_region
: 1;
218 /* Whether data have been written to parts of the aggregate covered by this
219 access which is not to be scalarized. This flag is propagated up in the
221 unsigned grp_unscalarized_data
: 1;
223 /* Does this access and/or group contain a write access through a
225 unsigned grp_partial_lhs
: 1;
227 /* Set when a scalar replacement should be created for this variable. */
228 unsigned grp_to_be_replaced
: 1;
230 /* Set when we want a replacement for the sole purpose of having it in
231 generated debug statements. */
232 unsigned grp_to_be_debug_replaced
: 1;
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning
: 1;
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified
: 1;
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr
: 1;
246 /* Set when we discover that this pointer is not safe to dereference in the
248 unsigned grp_not_necessarilly_dereferenced
: 1;
251 typedef struct access
*access_p
;
254 /* Alloc pool for allocating access structures. */
255 static alloc_pool access_pool
;
257 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
258 are used to propagate subaccesses from rhs to lhs as long as they don't
259 conflict with what is already there. */
262 struct access
*lacc
, *racc
;
263 struct assign_link
*next
;
266 /* Alloc pool for allocating assign link structures. */
267 static alloc_pool link_pool
;
269 /* Base (tree) -> Vector (vec<access_p> *) map. */
270 static struct pointer_map_t
*base_access_vec
;
272 /* Set of candidates. */
273 static bitmap candidate_bitmap
;
274 static htab_t candidates
;
276 /* For a candidate UID return the candidates decl. */
279 candidate (unsigned uid
)
281 struct tree_decl_minimal t
;
283 return (tree
) htab_find_with_hash (candidates
, &t
, uid
);
286 /* Bitmap of candidates which we should try to entirely scalarize away and
287 those which cannot be (because they are and need be used as a whole). */
288 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
290 /* Obstack for creation of fancy names. */
291 static struct obstack name_obstack
;
293 /* Head of a linked list of accesses that need to have its subaccesses
294 propagated to their assignment counterparts. */
295 static struct access
*work_queue_head
;
297 /* Number of parameters of the analyzed function when doing early ipa SRA. */
298 static int func_param_count
;
300 /* scan_function sets the following to true if it encounters a call to
301 __builtin_apply_args. */
302 static bool encountered_apply_args
;
304 /* Set by scan_function when it finds a recursive call. */
305 static bool encountered_recursive_call
;
307 /* Set by scan_function when it finds a recursive call with less actual
308 arguments than formal parameters.. */
309 static bool encountered_unchangable_recursive_call
;
311 /* This is a table in which for each basic block and parameter there is a
312 distance (offset + size) in that parameter which is dereferenced and
313 accessed in that BB. */
314 static HOST_WIDE_INT
*bb_dereferences
;
315 /* Bitmap of BBs that can cause the function to "stop" progressing by
316 returning, throwing externally, looping infinitely or calling a function
317 which might abort etc.. */
318 static bitmap final_bbs
;
320 /* Representative of no accesses at all. */
321 static struct access no_accesses_representant
;
323 /* Predicate to test the special value. */
326 no_accesses_p (struct access
*access
)
328 return access
== &no_accesses_representant
;
331 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
332 representative fields are dumped, otherwise those which only describe the
333 individual access are. */
337 /* Number of processed aggregates is readily available in
338 analyze_all_variable_accesses and so is not stored here. */
340 /* Number of created scalar replacements. */
343 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
347 /* Number of statements created by generate_subtree_copies. */
350 /* Number of statements created by load_assign_lhs_subreplacements. */
353 /* Number of times sra_modify_assign has deleted a statement. */
356 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
357 RHS reparately due to type conversions or nonexistent matching
359 int separate_lhs_rhs_handling
;
361 /* Number of parameters that were removed because they were unused. */
362 int deleted_unused_parameters
;
364 /* Number of scalars passed as parameters by reference that have been
365 converted to be passed by value. */
366 int scalar_by_ref_to_by_val
;
368 /* Number of aggregate parameters that were replaced by one or more of their
370 int aggregate_params_reduced
;
372 /* Numbber of components created when splitting aggregate parameters. */
373 int param_reductions_created
;
377 dump_access (FILE *f
, struct access
*access
, bool grp
)
379 fprintf (f
, "access { ");
380 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
381 print_generic_expr (f
, access
->base
, 0);
382 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
383 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
384 fprintf (f
, ", expr = ");
385 print_generic_expr (f
, access
->expr
, 0);
386 fprintf (f
, ", type = ");
387 print_generic_expr (f
, access
->type
, 0);
389 fprintf (f
, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
390 "grp_assignment_write = %d, grp_scalar_read = %d, "
391 "grp_scalar_write = %d, grp_total_scalarization = %d, "
392 "grp_hint = %d, grp_covered = %d, "
393 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
394 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
395 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
396 "grp_not_necessarilly_dereferenced = %d\n",
397 access
->grp_read
, access
->grp_write
, access
->grp_assignment_read
,
398 access
->grp_assignment_write
, access
->grp_scalar_read
,
399 access
->grp_scalar_write
, access
->grp_total_scalarization
,
400 access
->grp_hint
, access
->grp_covered
,
401 access
->grp_unscalarizable_region
, access
->grp_unscalarized_data
,
402 access
->grp_partial_lhs
, access
->grp_to_be_replaced
,
403 access
->grp_to_be_debug_replaced
, access
->grp_maybe_modified
,
404 access
->grp_not_necessarilly_dereferenced
);
406 fprintf (f
, ", write = %d, grp_total_scalarization = %d, "
407 "grp_partial_lhs = %d\n",
408 access
->write
, access
->grp_total_scalarization
,
409 access
->grp_partial_lhs
);
412 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
415 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
421 for (i
= 0; i
< level
; i
++)
422 fputs ("* ", dump_file
);
424 dump_access (f
, access
, true);
426 if (access
->first_child
)
427 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
429 access
= access
->next_sibling
;
434 /* Dump all access trees for a variable, given the pointer to the first root in
438 dump_access_tree (FILE *f
, struct access
*access
)
440 for (; access
; access
= access
->next_grp
)
441 dump_access_tree_1 (f
, access
, 0);
444 /* Return true iff ACC is non-NULL and has subaccesses. */
447 access_has_children_p (struct access
*acc
)
449 return acc
&& acc
->first_child
;
452 /* Return true iff ACC is (partly) covered by at least one replacement. */
455 access_has_replacements_p (struct access
*acc
)
457 struct access
*child
;
458 if (acc
->grp_to_be_replaced
)
460 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
461 if (access_has_replacements_p (child
))
466 /* Return a vector of pointers to accesses for the variable given in BASE or
467 NULL if there is none. */
469 static vec
<access_p
> *
470 get_base_access_vector (tree base
)
474 slot
= pointer_map_contains (base_access_vec
, base
);
478 return *(vec
<access_p
> **) slot
;
481 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
482 in ACCESS. Return NULL if it cannot be found. */
484 static struct access
*
485 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
488 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
490 struct access
*child
= access
->first_child
;
492 while (child
&& (child
->offset
+ child
->size
<= offset
))
493 child
= child
->next_sibling
;
500 /* Return the first group representative for DECL or NULL if none exists. */
502 static struct access
*
503 get_first_repr_for_decl (tree base
)
505 vec
<access_p
> *access_vec
;
507 access_vec
= get_base_access_vector (base
);
511 return (*access_vec
)[0];
514 /* Find an access representative for the variable BASE and given OFFSET and
515 SIZE. Requires that access trees have already been built. Return NULL if
516 it cannot be found. */
518 static struct access
*
519 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
522 struct access
*access
;
524 access
= get_first_repr_for_decl (base
);
525 while (access
&& (access
->offset
+ access
->size
<= offset
))
526 access
= access
->next_grp
;
530 return find_access_in_subtree (access
, offset
, size
);
533 /* Add LINK to the linked list of assign links of RACC. */
535 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
537 gcc_assert (link
->racc
== racc
);
539 if (!racc
->first_link
)
541 gcc_assert (!racc
->last_link
);
542 racc
->first_link
= link
;
545 racc
->last_link
->next
= link
;
547 racc
->last_link
= link
;
551 /* Move all link structures in their linked list in OLD_RACC to the linked list
554 relink_to_new_repr (struct access
*new_racc
, struct access
*old_racc
)
556 if (!old_racc
->first_link
)
558 gcc_assert (!old_racc
->last_link
);
562 if (new_racc
->first_link
)
564 gcc_assert (!new_racc
->last_link
->next
);
565 gcc_assert (!old_racc
->last_link
|| !old_racc
->last_link
->next
);
567 new_racc
->last_link
->next
= old_racc
->first_link
;
568 new_racc
->last_link
= old_racc
->last_link
;
572 gcc_assert (!new_racc
->last_link
);
574 new_racc
->first_link
= old_racc
->first_link
;
575 new_racc
->last_link
= old_racc
->last_link
;
577 old_racc
->first_link
= old_racc
->last_link
= NULL
;
580 /* Add ACCESS to the work queue (which is actually a stack). */
583 add_access_to_work_queue (struct access
*access
)
585 if (!access
->grp_queued
)
587 gcc_assert (!access
->next_queued
);
588 access
->next_queued
= work_queue_head
;
589 access
->grp_queued
= 1;
590 work_queue_head
= access
;
594 /* Pop an access from the work queue, and return it, assuming there is one. */
596 static struct access
*
597 pop_access_from_work_queue (void)
599 struct access
*access
= work_queue_head
;
601 work_queue_head
= access
->next_queued
;
602 access
->next_queued
= NULL
;
603 access
->grp_queued
= 0;
608 /* Allocate necessary structures. */
611 sra_initialize (void)
613 candidate_bitmap
= BITMAP_ALLOC (NULL
);
614 candidates
= htab_create (vec_safe_length (cfun
->local_decls
) / 2,
615 uid_decl_map_hash
, uid_decl_map_eq
, NULL
);
616 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
617 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
618 gcc_obstack_init (&name_obstack
);
619 access_pool
= create_alloc_pool ("SRA accesses", sizeof (struct access
), 16);
620 link_pool
= create_alloc_pool ("SRA links", sizeof (struct assign_link
), 16);
621 base_access_vec
= pointer_map_create ();
622 memset (&sra_stats
, 0, sizeof (sra_stats
));
623 encountered_apply_args
= false;
624 encountered_recursive_call
= false;
625 encountered_unchangable_recursive_call
= false;
628 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
631 delete_base_accesses (const void *key ATTRIBUTE_UNUSED
, void **value
,
632 void *data ATTRIBUTE_UNUSED
)
634 vec
<access_p
> *access_vec
= (vec
<access_p
> *) *value
;
635 vec_free (access_vec
);
639 /* Deallocate all general structures. */
642 sra_deinitialize (void)
644 BITMAP_FREE (candidate_bitmap
);
645 htab_delete (candidates
);
646 BITMAP_FREE (should_scalarize_away_bitmap
);
647 BITMAP_FREE (cannot_scalarize_away_bitmap
);
648 free_alloc_pool (access_pool
);
649 free_alloc_pool (link_pool
);
650 obstack_free (&name_obstack
, NULL
);
652 pointer_map_traverse (base_access_vec
, delete_base_accesses
, NULL
);
653 pointer_map_destroy (base_access_vec
);
656 /* Remove DECL from candidates for SRA and write REASON to the dump file if
659 disqualify_candidate (tree decl
, const char *reason
)
661 if (bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
)))
662 htab_clear_slot (candidates
,
663 htab_find_slot_with_hash (candidates
, decl
,
664 DECL_UID (decl
), NO_INSERT
));
666 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
668 fprintf (dump_file
, "! Disqualifying ");
669 print_generic_expr (dump_file
, decl
, 0);
670 fprintf (dump_file
, " - %s\n", reason
);
674 /* Return true iff the type contains a field or an element which does not allow
678 type_internals_preclude_sra_p (tree type
, const char **msg
)
683 switch (TREE_CODE (type
))
687 case QUAL_UNION_TYPE
:
688 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
689 if (TREE_CODE (fld
) == FIELD_DECL
)
691 tree ft
= TREE_TYPE (fld
);
693 if (TREE_THIS_VOLATILE (fld
))
695 *msg
= "volatile structure field";
698 if (!DECL_FIELD_OFFSET (fld
))
700 *msg
= "no structure field offset";
703 if (!DECL_SIZE (fld
))
705 *msg
= "zero structure field size";
708 if (!host_integerp (DECL_FIELD_OFFSET (fld
), 1))
710 *msg
= "structure field offset not fixed";
713 if (!host_integerp (DECL_SIZE (fld
), 1))
715 *msg
= "structure field size not fixed";
718 if (!host_integerp (bit_position (fld
), 0))
720 *msg
= "structure field size too big";
723 if (AGGREGATE_TYPE_P (ft
)
724 && int_bit_position (fld
) % BITS_PER_UNIT
!= 0)
726 *msg
= "structure field is bit field";
730 if (AGGREGATE_TYPE_P (ft
) && type_internals_preclude_sra_p (ft
, msg
))
737 et
= TREE_TYPE (type
);
739 if (TYPE_VOLATILE (et
))
741 *msg
= "element type is volatile";
745 if (AGGREGATE_TYPE_P (et
) && type_internals_preclude_sra_p (et
, msg
))
755 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
756 base variable if it is. Return T if it is not an SSA_NAME. */
759 get_ssa_base_param (tree t
)
761 if (TREE_CODE (t
) == SSA_NAME
)
763 if (SSA_NAME_IS_DEFAULT_DEF (t
))
764 return SSA_NAME_VAR (t
);
771 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
772 belongs to, unless the BB has already been marked as a potentially
776 mark_parm_dereference (tree base
, HOST_WIDE_INT dist
, gimple stmt
)
778 basic_block bb
= gimple_bb (stmt
);
779 int idx
, parm_index
= 0;
782 if (bitmap_bit_p (final_bbs
, bb
->index
))
785 for (parm
= DECL_ARGUMENTS (current_function_decl
);
786 parm
&& parm
!= base
;
787 parm
= DECL_CHAIN (parm
))
790 gcc_assert (parm_index
< func_param_count
);
792 idx
= bb
->index
* func_param_count
+ parm_index
;
793 if (bb_dereferences
[idx
] < dist
)
794 bb_dereferences
[idx
] = dist
;
797 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
798 the three fields. Also add it to the vector of accesses corresponding to
799 the base. Finally, return the new access. */
801 static struct access
*
802 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
805 struct access
*access
;
808 access
= (struct access
*) pool_alloc (access_pool
);
809 memset (access
, 0, sizeof (struct access
));
811 access
->offset
= offset
;
814 slot
= pointer_map_contains (base_access_vec
, base
);
816 v
= (vec
<access_p
> *) *slot
;
820 v
->safe_push (access
);
823 pointer_map_insert (base_access_vec
, base
)) = v
;
828 /* Create and insert access for EXPR. Return created access, or NULL if it is
831 static struct access
*
832 create_access (tree expr
, gimple stmt
, bool write
)
834 struct access
*access
;
835 HOST_WIDE_INT offset
, size
, max_size
;
837 bool ptr
, unscalarizable_region
= false;
839 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
841 if (sra_mode
== SRA_MODE_EARLY_IPA
842 && TREE_CODE (base
) == MEM_REF
)
844 base
= get_ssa_base_param (TREE_OPERAND (base
, 0));
852 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
855 if (sra_mode
== SRA_MODE_EARLY_IPA
)
857 if (size
< 0 || size
!= max_size
)
859 disqualify_candidate (base
, "Encountered a variable sized access.");
862 if (TREE_CODE (expr
) == COMPONENT_REF
863 && DECL_BIT_FIELD (TREE_OPERAND (expr
, 1)))
865 disqualify_candidate (base
, "Encountered a bit-field access.");
868 gcc_checking_assert ((offset
% BITS_PER_UNIT
) == 0);
871 mark_parm_dereference (base
, offset
+ size
, stmt
);
875 if (size
!= max_size
)
878 unscalarizable_region
= true;
882 disqualify_candidate (base
, "Encountered an unconstrained access.");
887 access
= create_access_1 (base
, offset
, size
);
889 access
->type
= TREE_TYPE (expr
);
890 access
->write
= write
;
891 access
->grp_unscalarizable_region
= unscalarizable_region
;
894 if (TREE_CODE (expr
) == COMPONENT_REF
895 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr
, 1)))
896 access
->non_addressable
= 1;
902 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
903 register types or (recursively) records with only these two kinds of fields.
904 It also returns false if any of these records contains a bit-field. */
907 type_consists_of_records_p (tree type
)
911 if (TREE_CODE (type
) != RECORD_TYPE
)
914 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
915 if (TREE_CODE (fld
) == FIELD_DECL
)
917 tree ft
= TREE_TYPE (fld
);
919 if (DECL_BIT_FIELD (fld
))
922 if (!is_gimple_reg_type (ft
)
923 && !type_consists_of_records_p (ft
))
930 /* Create total_scalarization accesses for all scalar type fields in DECL that
931 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
932 must be the top-most VAR_DECL representing the variable, OFFSET must be the
933 offset of DECL within BASE. REF must be the memory reference expression for
937 completely_scalarize_record (tree base
, tree decl
, HOST_WIDE_INT offset
,
940 tree fld
, decl_type
= TREE_TYPE (decl
);
942 for (fld
= TYPE_FIELDS (decl_type
); fld
; fld
= DECL_CHAIN (fld
))
943 if (TREE_CODE (fld
) == FIELD_DECL
)
945 HOST_WIDE_INT pos
= offset
+ int_bit_position (fld
);
946 tree ft
= TREE_TYPE (fld
);
947 tree nref
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), ref
, fld
,
950 if (is_gimple_reg_type (ft
))
952 struct access
*access
;
955 size
= tree_low_cst (DECL_SIZE (fld
), 1);
956 access
= create_access_1 (base
, pos
, size
);
959 access
->grp_total_scalarization
= 1;
960 /* Accesses for intraprocedural SRA can have their stmt NULL. */
963 completely_scalarize_record (base
, fld
, pos
, nref
);
967 /* Create total_scalarization accesses for all scalar type fields in VAR and
968 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
969 type_consists_of_records_p. */
972 completely_scalarize_var (tree var
)
974 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (var
), 1);
975 struct access
*access
;
977 access
= create_access_1 (var
, 0, size
);
979 access
->type
= TREE_TYPE (var
);
980 access
->grp_total_scalarization
= 1;
982 completely_scalarize_record (var
, var
, 0, var
);
985 /* Search the given tree for a declaration by skipping handled components and
986 exclude it from the candidates. */
989 disqualify_base_of_expr (tree t
, const char *reason
)
991 t
= get_base_address (t
);
992 if (sra_mode
== SRA_MODE_EARLY_IPA
993 && TREE_CODE (t
) == MEM_REF
)
994 t
= get_ssa_base_param (TREE_OPERAND (t
, 0));
997 disqualify_candidate (t
, reason
);
1000 /* Scan expression EXPR and create access structures for all accesses to
1001 candidates for scalarization. Return the created access or NULL if none is
1004 static struct access
*
1005 build_access_from_expr_1 (tree expr
, gimple stmt
, bool write
)
1007 struct access
*ret
= NULL
;
1010 if (TREE_CODE (expr
) == BIT_FIELD_REF
1011 || TREE_CODE (expr
) == IMAGPART_EXPR
1012 || TREE_CODE (expr
) == REALPART_EXPR
)
1014 expr
= TREE_OPERAND (expr
, 0);
1018 partial_ref
= false;
1020 /* We need to dive through V_C_Es in order to get the size of its parameter
1021 and not the result type. Ada produces such statements. We are also
1022 capable of handling the topmost V_C_E but not any of those buried in other
1023 handled components. */
1024 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
1025 expr
= TREE_OPERAND (expr
, 0);
1027 if (contains_view_convert_expr_p (expr
))
1029 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
1034 switch (TREE_CODE (expr
))
1037 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
1038 && sra_mode
!= SRA_MODE_EARLY_IPA
)
1046 case ARRAY_RANGE_REF
:
1047 ret
= create_access (expr
, stmt
, write
);
1054 if (write
&& partial_ref
&& ret
)
1055 ret
->grp_partial_lhs
= 1;
1060 /* Scan expression EXPR and create access structures for all accesses to
1061 candidates for scalarization. Return true if any access has been inserted.
1062 STMT must be the statement from which the expression is taken, WRITE must be
1063 true if the expression is a store and false otherwise. */
1066 build_access_from_expr (tree expr
, gimple stmt
, bool write
)
1068 struct access
*access
;
1070 access
= build_access_from_expr_1 (expr
, stmt
, write
);
1073 /* This means the aggregate is accesses as a whole in a way other than an
1074 assign statement and thus cannot be removed even if we had a scalar
1075 replacement for everything. */
1076 if (cannot_scalarize_away_bitmap
)
1077 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
1083 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1084 modes in which it matters, return true iff they have been disqualified. RHS
1085 may be NULL, in that case ignore it. If we scalarize an aggregate in
1086 intra-SRA we may need to add statements after each statement. This is not
1087 possible if a statement unconditionally has to end the basic block. */
1089 disqualify_ops_if_throwing_stmt (gimple stmt
, tree lhs
, tree rhs
)
1091 if ((sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1092 && (stmt_can_throw_internal (stmt
) || stmt_ends_bb_p (stmt
)))
1094 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
1096 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1102 /* Scan expressions occurring in STMT, create access structures for all accesses
1103 to candidates for scalarization and remove those candidates which occur in
1104 statements or expressions that prevent them from being split apart. Return
1105 true if any access has been inserted. */
1108 build_accesses_from_assign (gimple stmt
)
1111 struct access
*lacc
, *racc
;
1113 if (!gimple_assign_single_p (stmt
)
1114 /* Scope clobbers don't influence scalarization. */
1115 || gimple_clobber_p (stmt
))
1118 lhs
= gimple_assign_lhs (stmt
);
1119 rhs
= gimple_assign_rhs1 (stmt
);
1121 if (disqualify_ops_if_throwing_stmt (stmt
, lhs
, rhs
))
1124 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1125 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1128 lacc
->grp_assignment_write
= 1;
1132 racc
->grp_assignment_read
= 1;
1133 if (should_scalarize_away_bitmap
&& !gimple_has_volatile_ops (stmt
)
1134 && !is_gimple_reg_type (racc
->type
))
1135 bitmap_set_bit (should_scalarize_away_bitmap
, DECL_UID (racc
->base
));
1139 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1140 && !lacc
->grp_unscalarizable_region
1141 && !racc
->grp_unscalarizable_region
1142 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1143 && lacc
->size
== racc
->size
1144 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1146 struct assign_link
*link
;
1148 link
= (struct assign_link
*) pool_alloc (link_pool
);
1149 memset (link
, 0, sizeof (struct assign_link
));
1154 add_link_to_rhs (racc
, link
);
1157 return lacc
|| racc
;
1160 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1161 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1164 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED
, tree op
,
1165 void *data ATTRIBUTE_UNUSED
)
1167 op
= get_base_address (op
);
1170 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1175 /* Return true iff callsite CALL has at least as many actual arguments as there
1176 are formal parameters of the function currently processed by IPA-SRA. */
1179 callsite_has_enough_arguments_p (gimple call
)
1181 return gimple_call_num_args (call
) >= (unsigned) func_param_count
;
1184 /* Scan function and look for interesting expressions and create access
1185 structures for them. Return true iff any access is created. */
1188 scan_function (void)
1195 gimple_stmt_iterator gsi
;
1196 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1198 gimple stmt
= gsi_stmt (gsi
);
1202 if (final_bbs
&& stmt_can_throw_external (stmt
))
1203 bitmap_set_bit (final_bbs
, bb
->index
);
1204 switch (gimple_code (stmt
))
1207 t
= gimple_return_retval (stmt
);
1209 ret
|= build_access_from_expr (t
, stmt
, false);
1211 bitmap_set_bit (final_bbs
, bb
->index
);
1215 ret
|= build_accesses_from_assign (stmt
);
1219 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1220 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1223 if (sra_mode
== SRA_MODE_EARLY_IPA
)
1225 tree dest
= gimple_call_fndecl (stmt
);
1226 int flags
= gimple_call_flags (stmt
);
1230 if (DECL_BUILT_IN_CLASS (dest
) == BUILT_IN_NORMAL
1231 && DECL_FUNCTION_CODE (dest
) == BUILT_IN_APPLY_ARGS
)
1232 encountered_apply_args
= true;
1233 if (cgraph_get_node (dest
)
1234 == cgraph_get_node (current_function_decl
))
1236 encountered_recursive_call
= true;
1237 if (!callsite_has_enough_arguments_p (stmt
))
1238 encountered_unchangable_recursive_call
= true;
1243 && (flags
& (ECF_CONST
| ECF_PURE
)) == 0)
1244 bitmap_set_bit (final_bbs
, bb
->index
);
1247 t
= gimple_call_lhs (stmt
);
1248 if (t
&& !disqualify_ops_if_throwing_stmt (stmt
, t
, NULL
))
1249 ret
|= build_access_from_expr (t
, stmt
, true);
1253 walk_stmt_load_store_addr_ops (stmt
, NULL
, NULL
, NULL
,
1256 bitmap_set_bit (final_bbs
, bb
->index
);
1258 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
1260 t
= TREE_VALUE (gimple_asm_input_op (stmt
, i
));
1261 ret
|= build_access_from_expr (t
, stmt
, false);
1263 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
1265 t
= TREE_VALUE (gimple_asm_output_op (stmt
, i
));
1266 ret
|= build_access_from_expr (t
, stmt
, true);
1279 /* Helper of QSORT function. There are pointers to accesses in the array. An
1280 access is considered smaller than another if it has smaller offset or if the
1281 offsets are the same but is size is bigger. */
1284 compare_access_positions (const void *a
, const void *b
)
1286 const access_p
*fp1
= (const access_p
*) a
;
1287 const access_p
*fp2
= (const access_p
*) b
;
1288 const access_p f1
= *fp1
;
1289 const access_p f2
= *fp2
;
1291 if (f1
->offset
!= f2
->offset
)
1292 return f1
->offset
< f2
->offset
? -1 : 1;
1294 if (f1
->size
== f2
->size
)
1296 if (f1
->type
== f2
->type
)
1298 /* Put any non-aggregate type before any aggregate type. */
1299 else if (!is_gimple_reg_type (f1
->type
)
1300 && is_gimple_reg_type (f2
->type
))
1302 else if (is_gimple_reg_type (f1
->type
)
1303 && !is_gimple_reg_type (f2
->type
))
1305 /* Put any complex or vector type before any other scalar type. */
1306 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1307 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1308 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1309 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1311 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1312 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1313 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1314 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1316 /* Put the integral type with the bigger precision first. */
1317 else if (INTEGRAL_TYPE_P (f1
->type
)
1318 && INTEGRAL_TYPE_P (f2
->type
))
1319 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1320 /* Put any integral type with non-full precision last. */
1321 else if (INTEGRAL_TYPE_P (f1
->type
)
1322 && (TREE_INT_CST_LOW (TYPE_SIZE (f1
->type
))
1323 != TYPE_PRECISION (f1
->type
)))
1325 else if (INTEGRAL_TYPE_P (f2
->type
)
1326 && (TREE_INT_CST_LOW (TYPE_SIZE (f2
->type
))
1327 != TYPE_PRECISION (f2
->type
)))
1329 /* Stabilize the sort. */
1330 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1333 /* We want the bigger accesses first, thus the opposite operator in the next
1335 return f1
->size
> f2
->size
? -1 : 1;
1339 /* Append a name of the declaration to the name obstack. A helper function for
1343 make_fancy_decl_name (tree decl
)
1347 tree name
= DECL_NAME (decl
);
1349 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1350 IDENTIFIER_LENGTH (name
));
1353 sprintf (buffer
, "D%u", DECL_UID (decl
));
1354 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1358 /* Helper for make_fancy_name. */
1361 make_fancy_name_1 (tree expr
)
1368 make_fancy_decl_name (expr
);
1372 switch (TREE_CODE (expr
))
1375 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1376 obstack_1grow (&name_obstack
, '$');
1377 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1381 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1382 obstack_1grow (&name_obstack
, '$');
1383 /* Arrays with only one element may not have a constant as their
1385 index
= TREE_OPERAND (expr
, 1);
1386 if (TREE_CODE (index
) != INTEGER_CST
)
1388 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1389 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1393 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1397 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1398 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1400 obstack_1grow (&name_obstack
, '$');
1401 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1402 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1403 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1410 gcc_unreachable (); /* we treat these as scalars. */
1417 /* Create a human readable name for replacement variable of ACCESS. */
1420 make_fancy_name (tree expr
)
1422 make_fancy_name_1 (expr
);
1423 obstack_1grow (&name_obstack
, '\0');
1424 return XOBFINISH (&name_obstack
, char *);
1427 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1428 EXP_TYPE at the given OFFSET. If BASE is something for which
1429 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1430 to insert new statements either before or below the current one as specified
1431 by INSERT_AFTER. This function is not capable of handling bitfields.
1433 BASE must be either a declaration or a memory reference that has correct
1434 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1437 build_ref_for_offset (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1438 tree exp_type
, gimple_stmt_iterator
*gsi
,
1441 tree prev_base
= base
;
1443 HOST_WIDE_INT base_offset
;
1444 unsigned HOST_WIDE_INT misalign
;
1447 gcc_checking_assert (offset
% BITS_PER_UNIT
== 0);
1448 get_object_alignment_1 (base
, &align
, &misalign
);
1449 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1451 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1452 offset such as array[var_index]. */
1458 gcc_checking_assert (gsi
);
1459 tmp
= make_ssa_name (build_pointer_type (TREE_TYPE (prev_base
)), NULL
);
1460 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1461 STRIP_USELESS_TYPE_CONVERSION (addr
);
1462 stmt
= gimple_build_assign (tmp
, addr
);
1463 gimple_set_location (stmt
, loc
);
1465 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1467 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1469 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1470 offset
/ BITS_PER_UNIT
);
1473 else if (TREE_CODE (base
) == MEM_REF
)
1475 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1476 base_offset
+ offset
/ BITS_PER_UNIT
);
1477 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1478 base
= unshare_expr (TREE_OPERAND (base
, 0));
1482 off
= build_int_cst (reference_alias_ptr_type (base
),
1483 base_offset
+ offset
/ BITS_PER_UNIT
);
1484 base
= build_fold_addr_expr (unshare_expr (base
));
1487 misalign
= (misalign
+ offset
) & (align
- 1);
1489 align
= (misalign
& -misalign
);
1490 if (align
< TYPE_ALIGN (exp_type
))
1491 exp_type
= build_aligned_type (exp_type
, align
);
1493 return fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1496 /* Construct a memory reference to a part of an aggregate BASE at the given
1497 OFFSET and of the same type as MODEL. In case this is a reference to a
1498 bit-field, the function will replicate the last component_ref of model's
1499 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1500 build_ref_for_offset. */
1503 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1504 struct access
*model
, gimple_stmt_iterator
*gsi
,
1507 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1508 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1510 /* This access represents a bit-field. */
1511 tree t
, exp_type
, fld
= TREE_OPERAND (model
->expr
, 1);
1513 offset
-= int_bit_position (fld
);
1514 exp_type
= TREE_TYPE (TREE_OPERAND (model
->expr
, 0));
1515 t
= build_ref_for_offset (loc
, base
, offset
, exp_type
, gsi
, insert_after
);
1516 return fold_build3_loc (loc
, COMPONENT_REF
, TREE_TYPE (fld
), t
, fld
,
1520 return build_ref_for_offset (loc
, base
, offset
, model
->type
,
1524 /* Attempt to build a memory reference that we could but into a gimple
1525 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1526 create statements and return s NULL instead. This function also ignores
1527 alignment issues and so its results should never end up in non-debug
1531 build_debug_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1532 struct access
*model
)
1534 HOST_WIDE_INT base_offset
;
1537 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1538 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1541 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1544 if (TREE_CODE (base
) == MEM_REF
)
1546 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1547 base_offset
+ offset
/ BITS_PER_UNIT
);
1548 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1549 base
= unshare_expr (TREE_OPERAND (base
, 0));
1553 off
= build_int_cst (reference_alias_ptr_type (base
),
1554 base_offset
+ offset
/ BITS_PER_UNIT
);
1555 base
= build_fold_addr_expr (unshare_expr (base
));
1558 return fold_build2_loc (loc
, MEM_REF
, model
->type
, base
, off
);
1561 /* Construct a memory reference consisting of component_refs and array_refs to
1562 a part of an aggregate *RES (which is of type TYPE). The requested part
1563 should have type EXP_TYPE at be the given OFFSET. This function might not
1564 succeed, it returns true when it does and only then *RES points to something
1565 meaningful. This function should be used only to build expressions that we
1566 might need to present to user (e.g. in warnings). In all other situations,
1567 build_ref_for_model or build_ref_for_offset should be used instead. */
1570 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1576 tree tr_size
, index
, minidx
;
1577 HOST_WIDE_INT el_size
;
1579 if (offset
== 0 && exp_type
1580 && types_compatible_p (exp_type
, type
))
1583 switch (TREE_CODE (type
))
1586 case QUAL_UNION_TYPE
:
1588 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1590 HOST_WIDE_INT pos
, size
;
1591 tree tr_pos
, expr
, *expr_ptr
;
1593 if (TREE_CODE (fld
) != FIELD_DECL
)
1596 tr_pos
= bit_position (fld
);
1597 if (!tr_pos
|| !host_integerp (tr_pos
, 1))
1599 pos
= TREE_INT_CST_LOW (tr_pos
);
1600 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1601 tr_size
= DECL_SIZE (fld
);
1602 if (!tr_size
|| !host_integerp (tr_size
, 1))
1604 size
= TREE_INT_CST_LOW (tr_size
);
1610 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1613 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1616 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
1617 offset
- pos
, exp_type
))
1626 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1627 if (!tr_size
|| !host_integerp (tr_size
, 1))
1629 el_size
= tree_low_cst (tr_size
, 1);
1631 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1632 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1634 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1635 if (!integer_zerop (minidx
))
1636 index
= int_const_binop (PLUS_EXPR
, index
, minidx
);
1637 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1638 NULL_TREE
, NULL_TREE
);
1639 offset
= offset
% el_size
;
1640 type
= TREE_TYPE (type
);
1655 /* Return true iff TYPE is stdarg va_list type. */
1658 is_va_list_type (tree type
)
1660 return TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (va_list_type_node
);
1663 /* Print message to dump file why a variable was rejected. */
1666 reject (tree var
, const char *msg
)
1668 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1670 fprintf (dump_file
, "Rejected (%d): %s: ", DECL_UID (var
), msg
);
1671 print_generic_expr (dump_file
, var
, 0);
1672 fprintf (dump_file
, "\n");
1676 /* Return true if VAR is a candidate for SRA. */
1679 maybe_add_sra_candidate (tree var
)
1681 tree type
= TREE_TYPE (var
);
1685 if (!AGGREGATE_TYPE_P (type
))
1687 reject (var
, "not aggregate");
1690 if (needs_to_live_in_memory (var
))
1692 reject (var
, "needs to live in memory");
1695 if (TREE_THIS_VOLATILE (var
))
1697 reject (var
, "is volatile");
1700 if (!COMPLETE_TYPE_P (type
))
1702 reject (var
, "has incomplete type");
1705 if (!host_integerp (TYPE_SIZE (type
), 1))
1707 reject (var
, "type size not fixed");
1710 if (tree_low_cst (TYPE_SIZE (type
), 1) == 0)
1712 reject (var
, "type size is zero");
1715 if (type_internals_preclude_sra_p (type
, &msg
))
1720 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1721 we also want to schedule it rather late. Thus we ignore it in
1723 (sra_mode
== SRA_MODE_EARLY_INTRA
1724 && is_va_list_type (type
)))
1726 reject (var
, "is va_list");
1730 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1731 slot
= htab_find_slot_with_hash (candidates
, var
, DECL_UID (var
), INSERT
);
1732 *slot
= (void *) var
;
1734 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1736 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1737 print_generic_expr (dump_file
, var
, 0);
1738 fprintf (dump_file
, "\n");
1744 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1745 those with type which is suitable for scalarization. */
1748 find_var_candidates (void)
1754 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1756 parm
= DECL_CHAIN (parm
))
1757 ret
|= maybe_add_sra_candidate (parm
);
1759 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1761 if (TREE_CODE (var
) != VAR_DECL
)
1764 ret
|= maybe_add_sra_candidate (var
);
1770 /* Sort all accesses for the given variable, check for partial overlaps and
1771 return NULL if there are any. If there are none, pick a representative for
1772 each combination of offset and size and create a linked list out of them.
1773 Return the pointer to the first representative and make sure it is the first
1774 one in the vector of accesses. */
1776 static struct access
*
1777 sort_and_splice_var_accesses (tree var
)
1779 int i
, j
, access_count
;
1780 struct access
*res
, **prev_acc_ptr
= &res
;
1781 vec
<access_p
> *access_vec
;
1783 HOST_WIDE_INT low
= -1, high
= 0;
1785 access_vec
= get_base_access_vector (var
);
1788 access_count
= access_vec
->length ();
1790 /* Sort by <OFFSET, SIZE>. */
1791 access_vec
->qsort (compare_access_positions
);
1794 while (i
< access_count
)
1796 struct access
*access
= (*access_vec
)[i
];
1797 bool grp_write
= access
->write
;
1798 bool grp_read
= !access
->write
;
1799 bool grp_scalar_write
= access
->write
1800 && is_gimple_reg_type (access
->type
);
1801 bool grp_scalar_read
= !access
->write
1802 && is_gimple_reg_type (access
->type
);
1803 bool grp_assignment_read
= access
->grp_assignment_read
;
1804 bool grp_assignment_write
= access
->grp_assignment_write
;
1805 bool multiple_scalar_reads
= false;
1806 bool total_scalarization
= access
->grp_total_scalarization
;
1807 bool grp_partial_lhs
= access
->grp_partial_lhs
;
1808 bool first_scalar
= is_gimple_reg_type (access
->type
);
1809 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
1811 if (first
|| access
->offset
>= high
)
1814 low
= access
->offset
;
1815 high
= access
->offset
+ access
->size
;
1817 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
1820 gcc_assert (access
->offset
>= low
1821 && access
->offset
+ access
->size
<= high
);
1824 while (j
< access_count
)
1826 struct access
*ac2
= (*access_vec
)[j
];
1827 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
1832 grp_scalar_write
= (grp_scalar_write
1833 || is_gimple_reg_type (ac2
->type
));
1838 if (is_gimple_reg_type (ac2
->type
))
1840 if (grp_scalar_read
)
1841 multiple_scalar_reads
= true;
1843 grp_scalar_read
= true;
1846 grp_assignment_read
|= ac2
->grp_assignment_read
;
1847 grp_assignment_write
|= ac2
->grp_assignment_write
;
1848 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
1849 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
1850 total_scalarization
|= ac2
->grp_total_scalarization
;
1851 relink_to_new_repr (access
, ac2
);
1853 /* If there are both aggregate-type and scalar-type accesses with
1854 this combination of size and offset, the comparison function
1855 should have put the scalars first. */
1856 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
1857 ac2
->group_representative
= access
;
1863 access
->group_representative
= access
;
1864 access
->grp_write
= grp_write
;
1865 access
->grp_read
= grp_read
;
1866 access
->grp_scalar_read
= grp_scalar_read
;
1867 access
->grp_scalar_write
= grp_scalar_write
;
1868 access
->grp_assignment_read
= grp_assignment_read
;
1869 access
->grp_assignment_write
= grp_assignment_write
;
1870 access
->grp_hint
= multiple_scalar_reads
|| total_scalarization
;
1871 access
->grp_total_scalarization
= total_scalarization
;
1872 access
->grp_partial_lhs
= grp_partial_lhs
;
1873 access
->grp_unscalarizable_region
= unscalarizable_region
;
1874 if (access
->first_link
)
1875 add_access_to_work_queue (access
);
1877 *prev_acc_ptr
= access
;
1878 prev_acc_ptr
= &access
->next_grp
;
1881 gcc_assert (res
== (*access_vec
)[0]);
1885 /* Create a variable for the given ACCESS which determines the type, name and a
1886 few other properties. Return the variable declaration and store it also to
1887 ACCESS->replacement. */
1890 create_access_replacement (struct access
*access
)
1894 if (access
->grp_to_be_debug_replaced
)
1896 repl
= create_tmp_var_raw (access
->type
, NULL
);
1897 DECL_CONTEXT (repl
) = current_function_decl
;
1900 repl
= create_tmp_var (access
->type
, "SR");
1901 if (TREE_CODE (access
->type
) == COMPLEX_TYPE
1902 || TREE_CODE (access
->type
) == VECTOR_TYPE
)
1904 if (!access
->grp_partial_lhs
)
1905 DECL_GIMPLE_REG_P (repl
) = 1;
1907 else if (access
->grp_partial_lhs
1908 && is_gimple_reg_type (access
->type
))
1909 TREE_ADDRESSABLE (repl
) = 1;
1911 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
1912 DECL_ARTIFICIAL (repl
) = 1;
1913 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
1915 if (DECL_NAME (access
->base
)
1916 && !DECL_IGNORED_P (access
->base
)
1917 && !DECL_ARTIFICIAL (access
->base
))
1919 char *pretty_name
= make_fancy_name (access
->expr
);
1920 tree debug_expr
= unshare_expr_without_location (access
->expr
), d
;
1923 DECL_NAME (repl
) = get_identifier (pretty_name
);
1924 obstack_free (&name_obstack
, pretty_name
);
1926 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1927 as DECL_DEBUG_EXPR isn't considered when looking for still
1928 used SSA_NAMEs and thus they could be freed. All debug info
1929 generation cares is whether something is constant or variable
1930 and that get_ref_base_and_extent works properly on the
1931 expression. It cannot handle accesses at a non-constant offset
1932 though, so just give up in those cases. */
1933 for (d
= debug_expr
;
1934 !fail
&& (handled_component_p (d
) || TREE_CODE (d
) == MEM_REF
);
1935 d
= TREE_OPERAND (d
, 0))
1936 switch (TREE_CODE (d
))
1939 case ARRAY_RANGE_REF
:
1940 if (TREE_OPERAND (d
, 1)
1941 && TREE_CODE (TREE_OPERAND (d
, 1)) != INTEGER_CST
)
1943 if (TREE_OPERAND (d
, 3)
1944 && TREE_CODE (TREE_OPERAND (d
, 3)) != INTEGER_CST
)
1948 if (TREE_OPERAND (d
, 2)
1949 && TREE_CODE (TREE_OPERAND (d
, 2)) != INTEGER_CST
)
1953 if (TREE_CODE (TREE_OPERAND (d
, 0)) != ADDR_EXPR
)
1956 d
= TREE_OPERAND (d
, 0);
1963 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
1964 DECL_DEBUG_EXPR_IS_FROM (repl
) = 1;
1966 if (access
->grp_no_warning
)
1967 TREE_NO_WARNING (repl
) = 1;
1969 TREE_NO_WARNING (repl
) = TREE_NO_WARNING (access
->base
);
1972 TREE_NO_WARNING (repl
) = 1;
1976 if (access
->grp_to_be_debug_replaced
)
1978 fprintf (dump_file
, "Created a debug-only replacement for ");
1979 print_generic_expr (dump_file
, access
->base
, 0);
1980 fprintf (dump_file
, " offset: %u, size: %u\n",
1981 (unsigned) access
->offset
, (unsigned) access
->size
);
1985 fprintf (dump_file
, "Created a replacement for ");
1986 print_generic_expr (dump_file
, access
->base
, 0);
1987 fprintf (dump_file
, " offset: %u, size: %u: ",
1988 (unsigned) access
->offset
, (unsigned) access
->size
);
1989 print_generic_expr (dump_file
, repl
, 0);
1990 fprintf (dump_file
, "\n");
1993 sra_stats
.replacements
++;
1998 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2001 get_access_replacement (struct access
*access
)
2003 gcc_checking_assert (access
->replacement_decl
);
2004 return access
->replacement_decl
;
2008 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2009 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2010 to it is not "within" the root. Return false iff some accesses partially
2014 build_access_subtree (struct access
**access
)
2016 struct access
*root
= *access
, *last_child
= NULL
;
2017 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2019 *access
= (*access
)->next_grp
;
2020 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
2023 root
->first_child
= *access
;
2025 last_child
->next_sibling
= *access
;
2026 last_child
= *access
;
2028 if (!build_access_subtree (access
))
2032 if (*access
&& (*access
)->offset
< limit
)
2038 /* Build a tree of access representatives, ACCESS is the pointer to the first
2039 one, others are linked in a list by the next_grp field. Return false iff
2040 some accesses partially overlap. */
2043 build_access_trees (struct access
*access
)
2047 struct access
*root
= access
;
2049 if (!build_access_subtree (&access
))
2051 root
->next_grp
= access
;
2056 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2060 expr_with_var_bounded_array_refs_p (tree expr
)
2062 while (handled_component_p (expr
))
2064 if (TREE_CODE (expr
) == ARRAY_REF
2065 && !host_integerp (array_ref_low_bound (expr
), 0))
2067 expr
= TREE_OPERAND (expr
, 0);
2072 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2073 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2074 sorts of access flags appropriately along the way, notably always set
2075 grp_read and grp_assign_read according to MARK_READ and grp_write when
2078 Creating a replacement for a scalar access is considered beneficial if its
2079 grp_hint is set (this means we are either attempting total scalarization or
2080 there is more than one direct read access) or according to the following
2083 Access written to through a scalar type (once or more times)
2085 | Written to in an assignment statement
2087 | | Access read as scalar _once_
2089 | | | Read in an assignment statement
2091 | | | | Scalarize Comment
2092 -----------------------------------------------------------------------------
2093 0 0 0 0 No access for the scalar
2094 0 0 0 1 No access for the scalar
2095 0 0 1 0 No Single read - won't help
2096 0 0 1 1 No The same case
2097 0 1 0 0 No access for the scalar
2098 0 1 0 1 No access for the scalar
2099 0 1 1 0 Yes s = *g; return s.i;
2100 0 1 1 1 Yes The same case as above
2101 1 0 0 0 No Won't help
2102 1 0 0 1 Yes s.i = 1; *g = s;
2103 1 0 1 0 Yes s.i = 5; g = s.i;
2104 1 0 1 1 Yes The same case as above
2105 1 1 0 0 No Won't help.
2106 1 1 0 1 Yes s.i = 1; *g = s;
2107 1 1 1 0 Yes s = *g; return s.i;
2108 1 1 1 1 Yes Any of the above yeses */
2111 analyze_access_subtree (struct access
*root
, struct access
*parent
,
2112 bool allow_replacements
)
2114 struct access
*child
;
2115 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2116 HOST_WIDE_INT covered_to
= root
->offset
;
2117 bool scalar
= is_gimple_reg_type (root
->type
);
2118 bool hole
= false, sth_created
= false;
2122 if (parent
->grp_read
)
2124 if (parent
->grp_assignment_read
)
2125 root
->grp_assignment_read
= 1;
2126 if (parent
->grp_write
)
2127 root
->grp_write
= 1;
2128 if (parent
->grp_assignment_write
)
2129 root
->grp_assignment_write
= 1;
2130 if (parent
->grp_total_scalarization
)
2131 root
->grp_total_scalarization
= 1;
2134 if (root
->grp_unscalarizable_region
)
2135 allow_replacements
= false;
2137 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
2138 allow_replacements
= false;
2140 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
2142 hole
|= covered_to
< child
->offset
;
2143 sth_created
|= analyze_access_subtree (child
, root
,
2144 allow_replacements
&& !scalar
);
2146 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
2147 root
->grp_total_scalarization
&= child
->grp_total_scalarization
;
2148 if (child
->grp_covered
)
2149 covered_to
+= child
->size
;
2154 if (allow_replacements
&& scalar
&& !root
->first_child
2156 || ((root
->grp_scalar_read
|| root
->grp_assignment_read
)
2157 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))))
2159 /* Always create access replacements that cover the whole access.
2160 For integral types this means the precision has to match.
2161 Avoid assumptions based on the integral type kind, too. */
2162 if (INTEGRAL_TYPE_P (root
->type
)
2163 && (TREE_CODE (root
->type
) != INTEGER_TYPE
2164 || TYPE_PRECISION (root
->type
) != root
->size
)
2165 /* But leave bitfield accesses alone. */
2166 && (TREE_CODE (root
->expr
) != COMPONENT_REF
2167 || !DECL_BIT_FIELD (TREE_OPERAND (root
->expr
, 1))))
2169 tree rt
= root
->type
;
2170 gcc_assert ((root
->offset
% BITS_PER_UNIT
) == 0
2171 && (root
->size
% BITS_PER_UNIT
) == 0);
2172 root
->type
= build_nonstandard_integer_type (root
->size
,
2173 TYPE_UNSIGNED (rt
));
2174 root
->expr
= build_ref_for_offset (UNKNOWN_LOCATION
,
2175 root
->base
, root
->offset
,
2176 root
->type
, NULL
, false);
2178 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2180 fprintf (dump_file
, "Changing the type of a replacement for ");
2181 print_generic_expr (dump_file
, root
->base
, 0);
2182 fprintf (dump_file
, " offset: %u, size: %u ",
2183 (unsigned) root
->offset
, (unsigned) root
->size
);
2184 fprintf (dump_file
, " to an integer.\n");
2188 root
->grp_to_be_replaced
= 1;
2189 root
->replacement_decl
= create_access_replacement (root
);
2195 if (allow_replacements
2196 && scalar
&& !root
->first_child
2197 && (root
->grp_scalar_write
|| root
->grp_assignment_write
)
2198 && !bitmap_bit_p (cannot_scalarize_away_bitmap
,
2199 DECL_UID (root
->base
)))
2201 gcc_checking_assert (!root
->grp_scalar_read
2202 && !root
->grp_assignment_read
);
2204 if (MAY_HAVE_DEBUG_STMTS
)
2206 root
->grp_to_be_debug_replaced
= 1;
2207 root
->replacement_decl
= create_access_replacement (root
);
2211 if (covered_to
< limit
)
2214 root
->grp_total_scalarization
= 0;
2217 if (!hole
|| root
->grp_total_scalarization
)
2218 root
->grp_covered
= 1;
2219 else if (root
->grp_write
|| TREE_CODE (root
->base
) == PARM_DECL
)
2220 root
->grp_unscalarized_data
= 1; /* not covered and written to */
2224 /* Analyze all access trees linked by next_grp by the means of
2225 analyze_access_subtree. */
2227 analyze_access_trees (struct access
*access
)
2233 if (analyze_access_subtree (access
, NULL
, true))
2235 access
= access
->next_grp
;
2241 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2242 SIZE would conflict with an already existing one. If exactly such a child
2243 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2246 child_would_conflict_in_lacc (struct access
*lacc
, HOST_WIDE_INT norm_offset
,
2247 HOST_WIDE_INT size
, struct access
**exact_match
)
2249 struct access
*child
;
2251 for (child
= lacc
->first_child
; child
; child
= child
->next_sibling
)
2253 if (child
->offset
== norm_offset
&& child
->size
== size
)
2255 *exact_match
= child
;
2259 if (child
->offset
< norm_offset
+ size
2260 && child
->offset
+ child
->size
> norm_offset
)
2267 /* Create a new child access of PARENT, with all properties just like MODEL
2268 except for its offset and with its grp_write false and grp_read true.
2269 Return the new access or NULL if it cannot be created. Note that this access
2270 is created long after all splicing and sorting, it's not located in any
2271 access vector and is automatically a representative of its group. */
2273 static struct access
*
2274 create_artificial_child_access (struct access
*parent
, struct access
*model
,
2275 HOST_WIDE_INT new_offset
)
2277 struct access
*access
;
2278 struct access
**child
;
2279 tree expr
= parent
->base
;
2281 gcc_assert (!model
->grp_unscalarizable_region
);
2283 access
= (struct access
*) pool_alloc (access_pool
);
2284 memset (access
, 0, sizeof (struct access
));
2285 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
2288 access
->grp_no_warning
= true;
2289 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
2290 new_offset
, model
, NULL
, false);
2293 access
->base
= parent
->base
;
2294 access
->expr
= expr
;
2295 access
->offset
= new_offset
;
2296 access
->size
= model
->size
;
2297 access
->type
= model
->type
;
2298 access
->grp_write
= true;
2299 access
->grp_read
= false;
2301 child
= &parent
->first_child
;
2302 while (*child
&& (*child
)->offset
< new_offset
)
2303 child
= &(*child
)->next_sibling
;
2305 access
->next_sibling
= *child
;
2312 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2313 true if any new subaccess was created. Additionally, if RACC is a scalar
2314 access but LACC is not, change the type of the latter, if possible. */
2317 propagate_subaccesses_across_link (struct access
*lacc
, struct access
*racc
)
2319 struct access
*rchild
;
2320 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
2323 if (is_gimple_reg_type (lacc
->type
)
2324 || lacc
->grp_unscalarizable_region
2325 || racc
->grp_unscalarizable_region
)
2328 if (is_gimple_reg_type (racc
->type
))
2330 if (!lacc
->first_child
&& !racc
->first_child
)
2332 tree t
= lacc
->base
;
2334 lacc
->type
= racc
->type
;
2335 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
),
2336 lacc
->offset
, racc
->type
))
2340 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
2341 lacc
->base
, lacc
->offset
,
2343 lacc
->grp_no_warning
= true;
2349 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
2351 struct access
*new_acc
= NULL
;
2352 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
2354 if (rchild
->grp_unscalarizable_region
)
2357 if (child_would_conflict_in_lacc (lacc
, norm_offset
, rchild
->size
,
2362 rchild
->grp_hint
= 1;
2363 new_acc
->grp_hint
|= new_acc
->grp_read
;
2364 if (rchild
->first_child
)
2365 ret
|= propagate_subaccesses_across_link (new_acc
, rchild
);
2370 rchild
->grp_hint
= 1;
2371 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
);
2375 if (racc
->first_child
)
2376 propagate_subaccesses_across_link (new_acc
, rchild
);
2383 /* Propagate all subaccesses across assignment links. */
2386 propagate_all_subaccesses (void)
2388 while (work_queue_head
)
2390 struct access
*racc
= pop_access_from_work_queue ();
2391 struct assign_link
*link
;
2393 gcc_assert (racc
->first_link
);
2395 for (link
= racc
->first_link
; link
; link
= link
->next
)
2397 struct access
*lacc
= link
->lacc
;
2399 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2401 lacc
= lacc
->group_representative
;
2402 if (propagate_subaccesses_across_link (lacc
, racc
)
2403 && lacc
->first_link
)
2404 add_access_to_work_queue (lacc
);
2409 /* Go through all accesses collected throughout the (intraprocedural) analysis
2410 stage, exclude overlapping ones, identify representatives and build trees
2411 out of them, making decisions about scalarization on the way. Return true
2412 iff there are any to-be-scalarized variables after this stage. */
2415 analyze_all_variable_accesses (void)
2418 bitmap tmp
= BITMAP_ALLOC (NULL
);
2420 unsigned i
, max_total_scalarization_size
;
2422 max_total_scalarization_size
= UNITS_PER_WORD
* BITS_PER_UNIT
2423 * MOVE_RATIO (optimize_function_for_speed_p (cfun
));
2425 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2426 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
2427 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
2429 tree var
= candidate (i
);
2431 if (TREE_CODE (var
) == VAR_DECL
2432 && type_consists_of_records_p (TREE_TYPE (var
)))
2434 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1)
2435 <= max_total_scalarization_size
)
2437 completely_scalarize_var (var
);
2438 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2440 fprintf (dump_file
, "Will attempt to totally scalarize ");
2441 print_generic_expr (dump_file
, var
, 0);
2442 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2445 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2447 fprintf (dump_file
, "Too big to totally scalarize: ");
2448 print_generic_expr (dump_file
, var
, 0);
2449 fprintf (dump_file
, " (UID: %u)\n", DECL_UID (var
));
2454 bitmap_copy (tmp
, candidate_bitmap
);
2455 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2457 tree var
= candidate (i
);
2458 struct access
*access
;
2460 access
= sort_and_splice_var_accesses (var
);
2461 if (!access
|| !build_access_trees (access
))
2462 disqualify_candidate (var
,
2463 "No or inhibitingly overlapping accesses.");
2466 propagate_all_subaccesses ();
2468 bitmap_copy (tmp
, candidate_bitmap
);
2469 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2471 tree var
= candidate (i
);
2472 struct access
*access
= get_first_repr_for_decl (var
);
2474 if (analyze_access_trees (access
))
2477 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2479 fprintf (dump_file
, "\nAccess trees for ");
2480 print_generic_expr (dump_file
, var
, 0);
2481 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2482 dump_access_tree (dump_file
, access
);
2483 fprintf (dump_file
, "\n");
2487 disqualify_candidate (var
, "No scalar replacements to be created.");
2494 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
2501 /* Generate statements copying scalar replacements of accesses within a subtree
2502 into or out of AGG. ACCESS, all its children, siblings and their children
2503 are to be processed. AGG is an aggregate type expression (can be a
2504 declaration but does not have to be, it can for example also be a mem_ref or
2505 a series of handled components). TOP_OFFSET is the offset of the processed
2506 subtree which has to be subtracted from offsets of individual accesses to
2507 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2508 replacements in the interval <start_offset, start_offset + chunk_size>,
2509 otherwise copy all. GSI is a statement iterator used to place the new
2510 statements. WRITE should be true when the statements should write from AGG
2511 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2512 statements will be added after the current statement in GSI, they will be
2513 added before the statement otherwise. */
2516 generate_subtree_copies (struct access
*access
, tree agg
,
2517 HOST_WIDE_INT top_offset
,
2518 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
2519 gimple_stmt_iterator
*gsi
, bool write
,
2520 bool insert_after
, location_t loc
)
2524 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
2527 if (access
->grp_to_be_replaced
2529 || access
->offset
+ access
->size
> start_offset
))
2531 tree expr
, repl
= get_access_replacement (access
);
2534 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
2535 access
, gsi
, insert_after
);
2539 if (access
->grp_partial_lhs
)
2540 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
2542 insert_after
? GSI_NEW_STMT
2544 stmt
= gimple_build_assign (repl
, expr
);
2548 TREE_NO_WARNING (repl
) = 1;
2549 if (access
->grp_partial_lhs
)
2550 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2552 insert_after
? GSI_NEW_STMT
2554 stmt
= gimple_build_assign (expr
, repl
);
2556 gimple_set_location (stmt
, loc
);
2559 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2561 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2563 sra_stats
.subtree_copies
++;
2566 && access
->grp_to_be_debug_replaced
2568 || access
->offset
+ access
->size
> start_offset
))
2571 tree drhs
= build_debug_ref_for_model (loc
, agg
,
2572 access
->offset
- top_offset
,
2574 ds
= gimple_build_debug_bind (get_access_replacement (access
),
2575 drhs
, gsi_stmt (*gsi
));
2577 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2579 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2582 if (access
->first_child
)
2583 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
2584 start_offset
, chunk_size
, gsi
,
2585 write
, insert_after
, loc
);
2587 access
= access
->next_sibling
;
2592 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2593 the root of the subtree to be processed. GSI is the statement iterator used
2594 for inserting statements which are added after the current statement if
2595 INSERT_AFTER is true or before it otherwise. */
2598 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
2599 bool insert_after
, location_t loc
)
2602 struct access
*child
;
2604 if (access
->grp_to_be_replaced
)
2608 stmt
= gimple_build_assign (get_access_replacement (access
),
2609 build_zero_cst (access
->type
));
2611 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2613 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2615 gimple_set_location (stmt
, loc
);
2617 else if (access
->grp_to_be_debug_replaced
)
2619 gimple ds
= gimple_build_debug_bind (get_access_replacement (access
),
2620 build_zero_cst (access
->type
),
2623 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2625 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2628 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2629 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
2632 /* Search for an access representative for the given expression EXPR and
2633 return it or NULL if it cannot be found. */
2635 static struct access
*
2636 get_access_for_expr (tree expr
)
2638 HOST_WIDE_INT offset
, size
, max_size
;
2641 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2642 a different size than the size of its argument and we need the latter
2644 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2645 expr
= TREE_OPERAND (expr
, 0);
2647 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
2648 if (max_size
== -1 || !DECL_P (base
))
2651 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
2654 return get_var_base_offset_size_access (base
, offset
, max_size
);
2657 /* Replace the expression EXPR with a scalar replacement if there is one and
2658 generate other statements to do type conversion or subtree copying if
2659 necessary. GSI is used to place newly created statements, WRITE is true if
2660 the expression is being written to (it is on a LHS of a statement or output
2661 in an assembly statement). */
2664 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
2667 struct access
*access
;
2670 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
2673 expr
= &TREE_OPERAND (*expr
, 0);
2678 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
2679 expr
= &TREE_OPERAND (*expr
, 0);
2680 access
= get_access_for_expr (*expr
);
2683 type
= TREE_TYPE (*expr
);
2685 loc
= gimple_location (gsi_stmt (*gsi
));
2686 if (access
->grp_to_be_replaced
)
2688 tree repl
= get_access_replacement (access
);
2689 /* If we replace a non-register typed access simply use the original
2690 access expression to extract the scalar component afterwards.
2691 This happens if scalarizing a function return value or parameter
2692 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2693 gcc.c-torture/compile/20011217-1.c.
2695 We also want to use this when accessing a complex or vector which can
2696 be accessed as a different type too, potentially creating a need for
2697 type conversion (see PR42196) and when scalarized unions are involved
2698 in assembler statements (see PR42398). */
2699 if (!useless_type_conversion_p (type
, access
->type
))
2703 ref
= build_ref_for_model (loc
, access
->base
, access
->offset
, access
,
2710 if (access
->grp_partial_lhs
)
2711 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
2712 false, GSI_NEW_STMT
);
2713 stmt
= gimple_build_assign (repl
, ref
);
2714 gimple_set_location (stmt
, loc
);
2715 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2721 if (access
->grp_partial_lhs
)
2722 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2723 true, GSI_SAME_STMT
);
2724 stmt
= gimple_build_assign (ref
, repl
);
2725 gimple_set_location (stmt
, loc
);
2726 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2733 else if (write
&& access
->grp_to_be_debug_replaced
)
2735 gimple ds
= gimple_build_debug_bind (get_access_replacement (access
),
2738 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2741 if (access
->first_child
)
2743 HOST_WIDE_INT start_offset
, chunk_size
;
2745 && host_integerp (TREE_OPERAND (bfr
, 1), 1)
2746 && host_integerp (TREE_OPERAND (bfr
, 2), 1))
2748 chunk_size
= tree_low_cst (TREE_OPERAND (bfr
, 1), 1);
2749 start_offset
= access
->offset
2750 + tree_low_cst (TREE_OPERAND (bfr
, 2), 1);
2753 start_offset
= chunk_size
= 0;
2755 generate_subtree_copies (access
->first_child
, access
->base
, 0,
2756 start_offset
, chunk_size
, gsi
, write
, write
,
2762 /* Where scalar replacements of the RHS have been written to when a replacement
2763 of a LHS of an assigments cannot be direclty loaded from a replacement of
2765 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
2766 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
2767 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
2769 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2770 base aggregate if there are unscalarized data or directly to LHS of the
2771 statement that is pointed to by GSI otherwise. */
2773 static enum unscalarized_data_handling
2774 handle_unscalarized_data_in_subtree (struct access
*top_racc
,
2775 gimple_stmt_iterator
*gsi
)
2777 if (top_racc
->grp_unscalarized_data
)
2779 generate_subtree_copies (top_racc
->first_child
, top_racc
->base
, 0, 0, 0,
2781 gimple_location (gsi_stmt (*gsi
)));
2782 return SRA_UDH_RIGHT
;
2786 tree lhs
= gimple_assign_lhs (gsi_stmt (*gsi
));
2787 generate_subtree_copies (top_racc
->first_child
, lhs
, top_racc
->offset
,
2788 0, 0, gsi
, false, false,
2789 gimple_location (gsi_stmt (*gsi
)));
2790 return SRA_UDH_LEFT
;
2795 /* Try to generate statements to load all sub-replacements in an access subtree
2796 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2797 If that is not possible, refresh the TOP_RACC base aggregate and load the
2798 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2799 copied. NEW_GSI is stmt iterator used for statement insertions after the
2800 original assignment, OLD_GSI is used to insert statements before the
2801 assignment. *REFRESHED keeps the information whether we have needed to
2802 refresh replacements of the LHS and from which side of the assignments this
2806 load_assign_lhs_subreplacements (struct access
*lacc
, struct access
*top_racc
,
2807 HOST_WIDE_INT left_offset
,
2808 gimple_stmt_iterator
*old_gsi
,
2809 gimple_stmt_iterator
*new_gsi
,
2810 enum unscalarized_data_handling
*refreshed
)
2812 location_t loc
= gimple_location (gsi_stmt (*old_gsi
));
2813 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
2815 HOST_WIDE_INT offset
= lacc
->offset
- left_offset
+ top_racc
->offset
;
2817 if (lacc
->grp_to_be_replaced
)
2819 struct access
*racc
;
2823 racc
= find_access_in_subtree (top_racc
, offset
, lacc
->size
);
2824 if (racc
&& racc
->grp_to_be_replaced
)
2826 rhs
= get_access_replacement (racc
);
2827 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
2828 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, lacc
->type
, rhs
);
2830 if (racc
->grp_partial_lhs
&& lacc
->grp_partial_lhs
)
2831 rhs
= force_gimple_operand_gsi (old_gsi
, rhs
, true, NULL_TREE
,
2832 true, GSI_SAME_STMT
);
2836 /* No suitable access on the right hand side, need to load from
2837 the aggregate. See if we have to update it first... */
2838 if (*refreshed
== SRA_UDH_NONE
)
2839 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2842 if (*refreshed
== SRA_UDH_LEFT
)
2843 rhs
= build_ref_for_model (loc
, lacc
->base
, lacc
->offset
, lacc
,
2846 rhs
= build_ref_for_model (loc
, top_racc
->base
, offset
, lacc
,
2848 if (lacc
->grp_partial_lhs
)
2849 rhs
= force_gimple_operand_gsi (new_gsi
, rhs
, true, NULL_TREE
,
2850 false, GSI_NEW_STMT
);
2853 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
2854 gsi_insert_after (new_gsi
, stmt
, GSI_NEW_STMT
);
2855 gimple_set_location (stmt
, loc
);
2857 sra_stats
.subreplacements
++;
2861 if (*refreshed
== SRA_UDH_NONE
2862 && lacc
->grp_read
&& !lacc
->grp_covered
)
2863 *refreshed
= handle_unscalarized_data_in_subtree (top_racc
,
2865 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
2869 struct access
*racc
= find_access_in_subtree (top_racc
, offset
,
2872 if (racc
&& racc
->grp_to_be_replaced
)
2874 if (racc
->grp_write
)
2875 drhs
= get_access_replacement (racc
);
2879 else if (*refreshed
== SRA_UDH_LEFT
)
2880 drhs
= build_debug_ref_for_model (loc
, lacc
->base
, lacc
->offset
,
2882 else if (*refreshed
== SRA_UDH_RIGHT
)
2883 drhs
= build_debug_ref_for_model (loc
, top_racc
->base
, offset
,
2887 ds
= gimple_build_debug_bind (get_access_replacement (lacc
),
2888 drhs
, gsi_stmt (*old_gsi
));
2889 gsi_insert_after (new_gsi
, ds
, GSI_NEW_STMT
);
2893 if (lacc
->first_child
)
2894 load_assign_lhs_subreplacements (lacc
, top_racc
, left_offset
,
2895 old_gsi
, new_gsi
, refreshed
);
2899 /* Result code for SRA assignment modification. */
2900 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
2901 SRA_AM_MODIFIED
, /* stmt changed but not
2903 SRA_AM_REMOVED
}; /* stmt eliminated */
2905 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2906 to the assignment and GSI is the statement iterator pointing at it. Returns
2907 the same values as sra_modify_assign. */
2909 static enum assignment_mod_result
2910 sra_modify_constructor_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
2912 tree lhs
= gimple_assign_lhs (*stmt
);
2916 acc
= get_access_for_expr (lhs
);
2920 if (gimple_clobber_p (*stmt
))
2922 /* Remove clobbers of fully scalarized variables, otherwise
2924 if (acc
->grp_covered
)
2926 unlink_stmt_vdef (*stmt
);
2927 gsi_remove (gsi
, true);
2928 release_defs (*stmt
);
2929 return SRA_AM_REMOVED
;
2935 loc
= gimple_location (*stmt
);
2936 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt
))) > 0)
2938 /* I have never seen this code path trigger but if it can happen the
2939 following should handle it gracefully. */
2940 if (access_has_children_p (acc
))
2941 generate_subtree_copies (acc
->first_child
, acc
->base
, 0, 0, 0, gsi
,
2943 return SRA_AM_MODIFIED
;
2946 if (acc
->grp_covered
)
2948 init_subtree_with_zero (acc
, gsi
, false, loc
);
2949 unlink_stmt_vdef (*stmt
);
2950 gsi_remove (gsi
, true);
2951 release_defs (*stmt
);
2952 return SRA_AM_REMOVED
;
2956 init_subtree_with_zero (acc
, gsi
, true, loc
);
2957 return SRA_AM_MODIFIED
;
2961 /* Create and return a new suitable default definition SSA_NAME for RACC which
2962 is an access describing an uninitialized part of an aggregate that is being
2966 get_repl_default_def_ssa_name (struct access
*racc
)
2968 gcc_checking_assert (!racc
->grp_to_be_replaced
&&
2969 !racc
->grp_to_be_debug_replaced
);
2970 if (!racc
->replacement_decl
)
2971 racc
->replacement_decl
= create_access_replacement (racc
);
2972 return get_or_create_ssa_default_def (cfun
, racc
->replacement_decl
);
2975 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2979 contains_bitfld_comp_ref_p (const_tree ref
)
2981 while (handled_component_p (ref
))
2983 if (TREE_CODE (ref
) == COMPONENT_REF
2984 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
2986 ref
= TREE_OPERAND (ref
, 0);
2992 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2993 bit-field field declaration somewhere in it. */
2996 contains_vce_or_bfcref_p (const_tree ref
)
2998 while (handled_component_p (ref
))
3000 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
3001 || (TREE_CODE (ref
) == COMPONENT_REF
3002 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1))))
3004 ref
= TREE_OPERAND (ref
, 0);
3010 /* Examine both sides of the assignment statement pointed to by STMT, replace
3011 them with a scalare replacement if there is one and generate copying of
3012 replacements if scalarized aggregates have been used in the assignment. GSI
3013 is used to hold generated statements for type conversions and subtree
3016 static enum assignment_mod_result
3017 sra_modify_assign (gimple
*stmt
, gimple_stmt_iterator
*gsi
)
3019 struct access
*lacc
, *racc
;
3021 bool modify_this_stmt
= false;
3022 bool force_gimple_rhs
= false;
3024 gimple_stmt_iterator orig_gsi
= *gsi
;
3026 if (!gimple_assign_single_p (*stmt
))
3028 lhs
= gimple_assign_lhs (*stmt
);
3029 rhs
= gimple_assign_rhs1 (*stmt
);
3031 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3032 return sra_modify_constructor_assign (stmt
, gsi
);
3034 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
3035 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
3036 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
3038 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (*stmt
),
3040 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (*stmt
),
3042 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3045 lacc
= get_access_for_expr (lhs
);
3046 racc
= get_access_for_expr (rhs
);
3050 loc
= gimple_location (*stmt
);
3051 if (lacc
&& lacc
->grp_to_be_replaced
)
3053 lhs
= get_access_replacement (lacc
);
3054 gimple_assign_set_lhs (*stmt
, lhs
);
3055 modify_this_stmt
= true;
3056 if (lacc
->grp_partial_lhs
)
3057 force_gimple_rhs
= true;
3061 if (racc
&& racc
->grp_to_be_replaced
)
3063 rhs
= get_access_replacement (racc
);
3064 modify_this_stmt
= true;
3065 if (racc
->grp_partial_lhs
)
3066 force_gimple_rhs
= true;
3070 && !racc
->grp_unscalarized_data
3071 && TREE_CODE (lhs
) == SSA_NAME
3072 && !access_has_replacements_p (racc
))
3074 rhs
= get_repl_default_def_ssa_name (racc
);
3075 modify_this_stmt
= true;
3079 if (modify_this_stmt
)
3081 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3083 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3084 ??? This should move to fold_stmt which we simply should
3085 call after building a VIEW_CONVERT_EXPR here. */
3086 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
3087 && !contains_bitfld_comp_ref_p (lhs
))
3089 lhs
= build_ref_for_model (loc
, lhs
, 0, racc
, gsi
, false);
3090 gimple_assign_set_lhs (*stmt
, lhs
);
3092 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
3093 && !contains_vce_or_bfcref_p (rhs
))
3094 rhs
= build_ref_for_model (loc
, rhs
, 0, lacc
, gsi
, false);
3096 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3098 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
3100 if (is_gimple_reg_type (TREE_TYPE (lhs
))
3101 && TREE_CODE (lhs
) != SSA_NAME
)
3102 force_gimple_rhs
= true;
3107 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
3109 tree dlhs
= get_access_replacement (lacc
);
3110 tree drhs
= unshare_expr (rhs
);
3111 if (!useless_type_conversion_p (TREE_TYPE (dlhs
), TREE_TYPE (drhs
)))
3113 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs
))
3114 && !contains_vce_or_bfcref_p (drhs
))
3115 drhs
= build_debug_ref_for_model (loc
, drhs
, 0, lacc
);
3117 && !useless_type_conversion_p (TREE_TYPE (dlhs
),
3119 drhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
3120 TREE_TYPE (dlhs
), drhs
);
3122 gimple ds
= gimple_build_debug_bind (dlhs
, drhs
, *stmt
);
3123 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
3126 /* From this point on, the function deals with assignments in between
3127 aggregates when at least one has scalar reductions of some of its
3128 components. There are three possible scenarios: Both the LHS and RHS have
3129 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3131 In the first case, we would like to load the LHS components from RHS
3132 components whenever possible. If that is not possible, we would like to
3133 read it directly from the RHS (after updating it by storing in it its own
3134 components). If there are some necessary unscalarized data in the LHS,
3135 those will be loaded by the original assignment too. If neither of these
3136 cases happen, the original statement can be removed. Most of this is done
3137 by load_assign_lhs_subreplacements.
3139 In the second case, we would like to store all RHS scalarized components
3140 directly into LHS and if they cover the aggregate completely, remove the
3141 statement too. In the third case, we want the LHS components to be loaded
3142 directly from the RHS (DSE will remove the original statement if it
3145 This is a bit complex but manageable when types match and when unions do
3146 not cause confusion in a way that we cannot really load a component of LHS
3147 from the RHS or vice versa (the access representing this level can have
3148 subaccesses that are accessible only through a different union field at a
3149 higher level - different from the one used in the examined expression).
3152 Therefore, I specially handle a fourth case, happening when there is a
3153 specific type cast or it is impossible to locate a scalarized subaccess on
3154 the other side of the expression. If that happens, I simply "refresh" the
3155 RHS by storing in it is scalarized components leave the original statement
3156 there to do the copying and then load the scalar replacements of the LHS.
3157 This is what the first branch does. */
3159 if (modify_this_stmt
3160 || gimple_has_volatile_ops (*stmt
)
3161 || contains_vce_or_bfcref_p (rhs
)
3162 || contains_vce_or_bfcref_p (lhs
))
3164 if (access_has_children_p (racc
))
3165 generate_subtree_copies (racc
->first_child
, racc
->base
, 0, 0, 0,
3166 gsi
, false, false, loc
);
3167 if (access_has_children_p (lacc
))
3168 generate_subtree_copies (lacc
->first_child
, lacc
->base
, 0, 0, 0,
3169 gsi
, true, true, loc
);
3170 sra_stats
.separate_lhs_rhs_handling
++;
3172 /* This gimplification must be done after generate_subtree_copies,
3173 lest we insert the subtree copies in the middle of the gimplified
3175 if (force_gimple_rhs
)
3176 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
3177 true, GSI_SAME_STMT
);
3178 if (gimple_assign_rhs1 (*stmt
) != rhs
)
3180 modify_this_stmt
= true;
3181 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
3182 gcc_assert (*stmt
== gsi_stmt (orig_gsi
));
3185 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3189 if (access_has_children_p (lacc
)
3190 && access_has_children_p (racc
)
3191 /* When an access represents an unscalarizable region, it usually
3192 represents accesses with variable offset and thus must not be used
3193 to generate new memory accesses. */
3194 && !lacc
->grp_unscalarizable_region
3195 && !racc
->grp_unscalarizable_region
)
3197 gimple_stmt_iterator orig_gsi
= *gsi
;
3198 enum unscalarized_data_handling refreshed
;
3200 if (lacc
->grp_read
&& !lacc
->grp_covered
)
3201 refreshed
= handle_unscalarized_data_in_subtree (racc
, gsi
);
3203 refreshed
= SRA_UDH_NONE
;
3205 load_assign_lhs_subreplacements (lacc
, racc
, lacc
->offset
,
3206 &orig_gsi
, gsi
, &refreshed
);
3207 if (refreshed
!= SRA_UDH_RIGHT
)
3210 unlink_stmt_vdef (*stmt
);
3211 gsi_remove (&orig_gsi
, true);
3212 release_defs (*stmt
);
3213 sra_stats
.deleted
++;
3214 return SRA_AM_REMOVED
;
3219 if (access_has_children_p (racc
)
3220 && !racc
->grp_unscalarized_data
)
3224 fprintf (dump_file
, "Removing load: ");
3225 print_gimple_stmt (dump_file
, *stmt
, 0, 0);
3227 generate_subtree_copies (racc
->first_child
, lhs
,
3228 racc
->offset
, 0, 0, gsi
,
3230 gcc_assert (*stmt
== gsi_stmt (*gsi
));
3231 unlink_stmt_vdef (*stmt
);
3232 gsi_remove (gsi
, true);
3233 release_defs (*stmt
);
3234 sra_stats
.deleted
++;
3235 return SRA_AM_REMOVED
;
3237 /* Restore the aggregate RHS from its components so the
3238 prevailing aggregate copy does the right thing. */
3239 if (access_has_children_p (racc
))
3240 generate_subtree_copies (racc
->first_child
, racc
->base
, 0, 0, 0,
3241 gsi
, false, false, loc
);
3242 /* Re-load the components of the aggregate copy destination.
3243 But use the RHS aggregate to load from to expose more
3244 optimization opportunities. */
3245 if (access_has_children_p (lacc
))
3246 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
3247 0, 0, gsi
, true, true, loc
);
3254 /* Traverse the function body and all modifications as decided in
3255 analyze_all_variable_accesses. Return true iff the CFG has been
3259 sra_modify_function_body (void)
3261 bool cfg_changed
= false;
3266 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
3267 while (!gsi_end_p (gsi
))
3269 gimple stmt
= gsi_stmt (gsi
);
3270 enum assignment_mod_result assign_result
;
3271 bool modified
= false, deleted
= false;
3275 switch (gimple_code (stmt
))
3278 t
= gimple_return_retval_ptr (stmt
);
3279 if (*t
!= NULL_TREE
)
3280 modified
|= sra_modify_expr (t
, &gsi
, false);
3284 assign_result
= sra_modify_assign (&stmt
, &gsi
);
3285 modified
|= assign_result
== SRA_AM_MODIFIED
;
3286 deleted
= assign_result
== SRA_AM_REMOVED
;
3290 /* Operands must be processed before the lhs. */
3291 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3293 t
= gimple_call_arg_ptr (stmt
, i
);
3294 modified
|= sra_modify_expr (t
, &gsi
, false);
3297 if (gimple_call_lhs (stmt
))
3299 t
= gimple_call_lhs_ptr (stmt
);
3300 modified
|= sra_modify_expr (t
, &gsi
, true);
3305 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
3307 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
3308 modified
|= sra_modify_expr (t
, &gsi
, false);
3310 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
3312 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
3313 modified
|= sra_modify_expr (t
, &gsi
, true);
3324 if (maybe_clean_eh_stmt (stmt
)
3325 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
3336 /* Generate statements initializing scalar replacements of parts of function
3340 initialize_parameter_reductions (void)
3342 gimple_stmt_iterator gsi
;
3343 gimple_seq seq
= NULL
;
3346 gsi
= gsi_start (seq
);
3347 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3349 parm
= DECL_CHAIN (parm
))
3351 vec
<access_p
> *access_vec
;
3352 struct access
*access
;
3354 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3356 access_vec
= get_base_access_vector (parm
);
3360 for (access
= (*access_vec
)[0];
3362 access
= access
->next_grp
)
3363 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
3364 EXPR_LOCATION (parm
));
3367 seq
= gsi_seq (gsi
);
3369 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR
), seq
);
3372 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3373 it reveals there are components of some aggregates to be scalarized, it runs
3374 the required transformations. */
3376 perform_intra_sra (void)
3381 if (!find_var_candidates ())
3384 if (!scan_function ())
3387 if (!analyze_all_variable_accesses ())
3390 if (sra_modify_function_body ())
3391 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
3393 ret
= TODO_update_ssa
;
3394 initialize_parameter_reductions ();
3396 statistics_counter_event (cfun
, "Scalar replacements created",
3397 sra_stats
.replacements
);
3398 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
3399 statistics_counter_event (cfun
, "Subtree copy stmts",
3400 sra_stats
.subtree_copies
);
3401 statistics_counter_event (cfun
, "Subreplacement stmts",
3402 sra_stats
.subreplacements
);
3403 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
3404 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
3405 sra_stats
.separate_lhs_rhs_handling
);
3408 sra_deinitialize ();
3412 /* Perform early intraprocedural SRA. */
3414 early_intra_sra (void)
3416 sra_mode
= SRA_MODE_EARLY_INTRA
;
3417 return perform_intra_sra ();
3420 /* Perform "late" intraprocedural SRA. */
3422 late_intra_sra (void)
3424 sra_mode
= SRA_MODE_INTRA
;
3425 return perform_intra_sra ();
3430 gate_intra_sra (void)
3432 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
3436 struct gimple_opt_pass pass_sra_early
=
3441 OPTGROUP_NONE
, /* optinfo_flags */
3442 gate_intra_sra
, /* gate */
3443 early_intra_sra
, /* execute */
3446 0, /* static_pass_number */
3447 TV_TREE_SRA
, /* tv_id */
3448 PROP_cfg
| PROP_ssa
, /* properties_required */
3449 0, /* properties_provided */
3450 0, /* properties_destroyed */
3451 0, /* todo_flags_start */
3454 | TODO_verify_ssa
/* todo_flags_finish */
3458 struct gimple_opt_pass pass_sra
=
3463 OPTGROUP_NONE
, /* optinfo_flags */
3464 gate_intra_sra
, /* gate */
3465 late_intra_sra
, /* execute */
3468 0, /* static_pass_number */
3469 TV_TREE_SRA
, /* tv_id */
3470 PROP_cfg
| PROP_ssa
, /* properties_required */
3471 0, /* properties_provided */
3472 0, /* properties_destroyed */
3473 TODO_update_address_taken
, /* todo_flags_start */
3476 | TODO_verify_ssa
/* todo_flags_finish */
3481 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3485 is_unused_scalar_param (tree parm
)
3488 return (is_gimple_reg (parm
)
3489 && (!(name
= ssa_default_def (cfun
, parm
))
3490 || has_zero_uses (name
)));
3493 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3494 examine whether there are any direct or otherwise infeasible ones. If so,
3495 return true, otherwise return false. PARM must be a gimple register with a
3496 non-NULL default definition. */
3499 ptr_parm_has_direct_uses (tree parm
)
3501 imm_use_iterator ui
;
3503 tree name
= ssa_default_def (cfun
, parm
);
3506 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
3509 use_operand_p use_p
;
3511 if (is_gimple_debug (stmt
))
3514 /* Valid uses include dereferences on the lhs and the rhs. */
3515 if (gimple_has_lhs (stmt
))
3517 tree lhs
= gimple_get_lhs (stmt
);
3518 while (handled_component_p (lhs
))
3519 lhs
= TREE_OPERAND (lhs
, 0);
3520 if (TREE_CODE (lhs
) == MEM_REF
3521 && TREE_OPERAND (lhs
, 0) == name
3522 && integer_zerop (TREE_OPERAND (lhs
, 1))
3523 && types_compatible_p (TREE_TYPE (lhs
),
3524 TREE_TYPE (TREE_TYPE (name
)))
3525 && !TREE_THIS_VOLATILE (lhs
))
3528 if (gimple_assign_single_p (stmt
))
3530 tree rhs
= gimple_assign_rhs1 (stmt
);
3531 while (handled_component_p (rhs
))
3532 rhs
= TREE_OPERAND (rhs
, 0);
3533 if (TREE_CODE (rhs
) == MEM_REF
3534 && TREE_OPERAND (rhs
, 0) == name
3535 && integer_zerop (TREE_OPERAND (rhs
, 1))
3536 && types_compatible_p (TREE_TYPE (rhs
),
3537 TREE_TYPE (TREE_TYPE (name
)))
3538 && !TREE_THIS_VOLATILE (rhs
))
3541 else if (is_gimple_call (stmt
))
3544 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3546 tree arg
= gimple_call_arg (stmt
, i
);
3547 while (handled_component_p (arg
))
3548 arg
= TREE_OPERAND (arg
, 0);
3549 if (TREE_CODE (arg
) == MEM_REF
3550 && TREE_OPERAND (arg
, 0) == name
3551 && integer_zerop (TREE_OPERAND (arg
, 1))
3552 && types_compatible_p (TREE_TYPE (arg
),
3553 TREE_TYPE (TREE_TYPE (name
)))
3554 && !TREE_THIS_VOLATILE (arg
))
3559 /* If the number of valid uses does not match the number of
3560 uses in this stmt there is an unhandled use. */
3561 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
3568 BREAK_FROM_IMM_USE_STMT (ui
);
3574 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3575 them in candidate_bitmap. Note that these do not necessarily include
3576 parameter which are unused and thus can be removed. Return true iff any
3577 such candidate has been found. */
3580 find_param_candidates (void)
3587 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3589 parm
= DECL_CHAIN (parm
))
3591 tree type
= TREE_TYPE (parm
);
3596 if (TREE_THIS_VOLATILE (parm
)
3597 || TREE_ADDRESSABLE (parm
)
3598 || (!is_gimple_reg_type (type
) && is_va_list_type (type
)))
3601 if (is_unused_scalar_param (parm
))
3607 if (POINTER_TYPE_P (type
))
3609 type
= TREE_TYPE (type
);
3611 if (TREE_CODE (type
) == FUNCTION_TYPE
3612 || TYPE_VOLATILE (type
)
3613 || (TREE_CODE (type
) == ARRAY_TYPE
3614 && TYPE_NONALIASED_COMPONENT (type
))
3615 || !is_gimple_reg (parm
)
3616 || is_va_list_type (type
)
3617 || ptr_parm_has_direct_uses (parm
))
3620 else if (!AGGREGATE_TYPE_P (type
))
3623 if (!COMPLETE_TYPE_P (type
)
3624 || !host_integerp (TYPE_SIZE (type
), 1)
3625 || tree_low_cst (TYPE_SIZE (type
), 1) == 0
3626 || (AGGREGATE_TYPE_P (type
)
3627 && type_internals_preclude_sra_p (type
, &msg
)))
3630 bitmap_set_bit (candidate_bitmap
, DECL_UID (parm
));
3631 slot
= htab_find_slot_with_hash (candidates
, parm
,
3632 DECL_UID (parm
), INSERT
);
3633 *slot
= (void *) parm
;
3636 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3638 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (parm
));
3639 print_generic_expr (dump_file
, parm
, 0);
3640 fprintf (dump_file
, "\n");
3644 func_param_count
= count
;
3648 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3652 mark_maybe_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
3655 struct access
*repr
= (struct access
*) data
;
3657 repr
->grp_maybe_modified
= 1;
3661 /* Analyze what representatives (in linked lists accessible from
3662 REPRESENTATIVES) can be modified by side effects of statements in the
3663 current function. */
3666 analyze_modified_params (vec
<access_p
> representatives
)
3670 for (i
= 0; i
< func_param_count
; i
++)
3672 struct access
*repr
;
3674 for (repr
= representatives
[i
];
3676 repr
= repr
->next_grp
)
3678 struct access
*access
;
3682 if (no_accesses_p (repr
))
3684 if (!POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3685 || repr
->grp_maybe_modified
)
3688 ao_ref_init (&ar
, repr
->expr
);
3689 visited
= BITMAP_ALLOC (NULL
);
3690 for (access
= repr
; access
; access
= access
->next_sibling
)
3692 /* All accesses are read ones, otherwise grp_maybe_modified would
3693 be trivially set. */
3694 walk_aliased_vdefs (&ar
, gimple_vuse (access
->stmt
),
3695 mark_maybe_modified
, repr
, &visited
);
3696 if (repr
->grp_maybe_modified
)
3699 BITMAP_FREE (visited
);
3704 /* Propagate distances in bb_dereferences in the opposite direction than the
3705 control flow edges, in each step storing the maximum of the current value
3706 and the minimum of all successors. These steps are repeated until the table
3707 stabilizes. Note that BBs which might terminate the functions (according to
3708 final_bbs bitmap) never updated in this way. */
3711 propagate_dereference_distances (void)
3713 vec
<basic_block
> queue
;
3716 queue
.create (last_basic_block_for_function (cfun
));
3717 queue
.quick_push (ENTRY_BLOCK_PTR
);
3720 queue
.quick_push (bb
);
3724 while (!queue
.is_empty ())
3728 bool change
= false;
3734 if (bitmap_bit_p (final_bbs
, bb
->index
))
3737 for (i
= 0; i
< func_param_count
; i
++)
3739 int idx
= bb
->index
* func_param_count
+ i
;
3741 HOST_WIDE_INT inh
= 0;
3743 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3745 int succ_idx
= e
->dest
->index
* func_param_count
+ i
;
3747 if (e
->src
== EXIT_BLOCK_PTR
)
3753 inh
= bb_dereferences
[succ_idx
];
3755 else if (bb_dereferences
[succ_idx
] < inh
)
3756 inh
= bb_dereferences
[succ_idx
];
3759 if (!first
&& bb_dereferences
[idx
] < inh
)
3761 bb_dereferences
[idx
] = inh
;
3766 if (change
&& !bitmap_bit_p (final_bbs
, bb
->index
))
3767 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3772 e
->src
->aux
= e
->src
;
3773 queue
.quick_push (e
->src
);
3780 /* Dump a dereferences TABLE with heading STR to file F. */
3783 dump_dereferences_table (FILE *f
, const char *str
, HOST_WIDE_INT
*table
)
3787 fprintf (dump_file
, str
);
3788 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
3790 fprintf (f
, "%4i %i ", bb
->index
, bitmap_bit_p (final_bbs
, bb
->index
));
3791 if (bb
!= EXIT_BLOCK_PTR
)
3794 for (i
= 0; i
< func_param_count
; i
++)
3796 int idx
= bb
->index
* func_param_count
+ i
;
3797 fprintf (f
, " %4" HOST_WIDE_INT_PRINT
"d", table
[idx
]);
3802 fprintf (dump_file
, "\n");
3805 /* Determine what (parts of) parameters passed by reference that are not
3806 assigned to are not certainly dereferenced in this function and thus the
3807 dereferencing cannot be safely moved to the caller without potentially
3808 introducing a segfault. Mark such REPRESENTATIVES as
3809 grp_not_necessarilly_dereferenced.
3811 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3812 part is calculated rather than simple booleans are calculated for each
3813 pointer parameter to handle cases when only a fraction of the whole
3814 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3817 The maximum dereference distances for each pointer parameter and BB are
3818 already stored in bb_dereference. This routine simply propagates these
3819 values upwards by propagate_dereference_distances and then compares the
3820 distances of individual parameters in the ENTRY BB to the equivalent
3821 distances of each representative of a (fraction of a) parameter. */
3824 analyze_caller_dereference_legality (vec
<access_p
> representatives
)
3828 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3829 dump_dereferences_table (dump_file
,
3830 "Dereference table before propagation:\n",
3833 propagate_dereference_distances ();
3835 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3836 dump_dereferences_table (dump_file
,
3837 "Dereference table after propagation:\n",
3840 for (i
= 0; i
< func_param_count
; i
++)
3842 struct access
*repr
= representatives
[i
];
3843 int idx
= ENTRY_BLOCK_PTR
->index
* func_param_count
+ i
;
3845 if (!repr
|| no_accesses_p (repr
))
3850 if ((repr
->offset
+ repr
->size
) > bb_dereferences
[idx
])
3851 repr
->grp_not_necessarilly_dereferenced
= 1;
3852 repr
= repr
->next_grp
;
3858 /* Return the representative access for the parameter declaration PARM if it is
3859 a scalar passed by reference which is not written to and the pointer value
3860 is not used directly. Thus, if it is legal to dereference it in the caller
3861 and we can rule out modifications through aliases, such parameter should be
3862 turned into one passed by value. Return NULL otherwise. */
3864 static struct access
*
3865 unmodified_by_ref_scalar_representative (tree parm
)
3867 int i
, access_count
;
3868 struct access
*repr
;
3869 vec
<access_p
> *access_vec
;
3871 access_vec
= get_base_access_vector (parm
);
3872 gcc_assert (access_vec
);
3873 repr
= (*access_vec
)[0];
3876 repr
->group_representative
= repr
;
3878 access_count
= access_vec
->length ();
3879 for (i
= 1; i
< access_count
; i
++)
3881 struct access
*access
= (*access_vec
)[i
];
3884 access
->group_representative
= repr
;
3885 access
->next_sibling
= repr
->next_sibling
;
3886 repr
->next_sibling
= access
;
3890 repr
->grp_scalar_ptr
= 1;
3894 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
3895 associated with. REQ_ALIGN is the minimum required alignment. */
3898 access_precludes_ipa_sra_p (struct access
*access
, unsigned int req_align
)
3900 unsigned int exp_align
;
3901 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3902 is incompatible assign in a call statement (and possibly even in asm
3903 statements). This can be relaxed by using a new temporary but only for
3904 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3905 intraprocedural SRA we deal with this by keeping the old aggregate around,
3906 something we cannot do in IPA-SRA.) */
3908 && (is_gimple_call (access
->stmt
)
3909 || gimple_code (access
->stmt
) == GIMPLE_ASM
))
3912 exp_align
= get_object_alignment (access
->expr
);
3913 if (exp_align
< req_align
)
3920 /* Sort collected accesses for parameter PARM, identify representatives for
3921 each accessed region and link them together. Return NULL if there are
3922 different but overlapping accesses, return the special ptr value meaning
3923 there are no accesses for this parameter if that is the case and return the
3924 first representative otherwise. Set *RO_GRP if there is a group of accesses
3925 with only read (i.e. no write) accesses. */
3927 static struct access
*
3928 splice_param_accesses (tree parm
, bool *ro_grp
)
3930 int i
, j
, access_count
, group_count
;
3931 int agg_size
, total_size
= 0;
3932 struct access
*access
, *res
, **prev_acc_ptr
= &res
;
3933 vec
<access_p
> *access_vec
;
3935 access_vec
= get_base_access_vector (parm
);
3937 return &no_accesses_representant
;
3938 access_count
= access_vec
->length ();
3940 access_vec
->qsort (compare_access_positions
);
3945 while (i
< access_count
)
3949 access
= (*access_vec
)[i
];
3950 modification
= access
->write
;
3951 if (access_precludes_ipa_sra_p (access
, TYPE_ALIGN (access
->type
)))
3953 a1_alias_type
= reference_alias_ptr_type (access
->expr
);
3955 /* Access is about to become group representative unless we find some
3956 nasty overlap which would preclude us from breaking this parameter
3960 while (j
< access_count
)
3962 struct access
*ac2
= (*access_vec
)[j
];
3963 if (ac2
->offset
!= access
->offset
)
3965 /* All or nothing law for parameters. */
3966 if (access
->offset
+ access
->size
> ac2
->offset
)
3971 else if (ac2
->size
!= access
->size
)
3974 if (access_precludes_ipa_sra_p (ac2
, TYPE_ALIGN (access
->type
))
3975 || (ac2
->type
!= access
->type
3976 && (TREE_ADDRESSABLE (ac2
->type
)
3977 || TREE_ADDRESSABLE (access
->type
)))
3978 || (reference_alias_ptr_type (ac2
->expr
) != a1_alias_type
))
3981 modification
|= ac2
->write
;
3982 ac2
->group_representative
= access
;
3983 ac2
->next_sibling
= access
->next_sibling
;
3984 access
->next_sibling
= ac2
;
3989 access
->grp_maybe_modified
= modification
;
3992 *prev_acc_ptr
= access
;
3993 prev_acc_ptr
= &access
->next_grp
;
3994 total_size
+= access
->size
;
3998 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3999 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
4001 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
4002 if (total_size
>= agg_size
)
4005 gcc_assert (group_count
> 0);
4009 /* Decide whether parameters with representative accesses given by REPR should
4010 be reduced into components. */
4013 decide_one_param_reduction (struct access
*repr
)
4015 int total_size
, cur_parm_size
, agg_size
, new_param_count
, parm_size_limit
;
4020 cur_parm_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (parm
)), 1);
4021 gcc_assert (cur_parm_size
> 0);
4023 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4026 agg_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))), 1);
4031 agg_size
= cur_parm_size
;
4037 fprintf (dump_file
, "Evaluating PARAM group sizes for ");
4038 print_generic_expr (dump_file
, parm
, 0);
4039 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (parm
));
4040 for (acc
= repr
; acc
; acc
= acc
->next_grp
)
4041 dump_access (dump_file
, acc
, true);
4045 new_param_count
= 0;
4047 for (; repr
; repr
= repr
->next_grp
)
4049 gcc_assert (parm
== repr
->base
);
4051 /* Taking the address of a non-addressable field is verboten. */
4052 if (by_ref
&& repr
->non_addressable
)
4055 /* Do not decompose a non-BLKmode param in a way that would
4056 create BLKmode params. Especially for by-reference passing
4057 (thus, pointer-type param) this is hardly worthwhile. */
4058 if (DECL_MODE (parm
) != BLKmode
4059 && TYPE_MODE (repr
->type
) == BLKmode
)
4062 if (!by_ref
|| (!repr
->grp_maybe_modified
4063 && !repr
->grp_not_necessarilly_dereferenced
))
4064 total_size
+= repr
->size
;
4066 total_size
+= cur_parm_size
;
4071 gcc_assert (new_param_count
> 0);
4073 if (optimize_function_for_size_p (cfun
))
4074 parm_size_limit
= cur_parm_size
;
4076 parm_size_limit
= (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR
)
4079 if (total_size
< agg_size
4080 && total_size
<= parm_size_limit
)
4083 fprintf (dump_file
, " ....will be split into %i components\n",
4085 return new_param_count
;
4091 /* The order of the following enums is important, we need to do extra work for
4092 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4093 enum ipa_splicing_result
{ NO_GOOD_ACCESS
, UNUSED_PARAMS
, BY_VAL_ACCESSES
,
4094 MODIF_BY_REF_ACCESSES
, UNMODIF_BY_REF_ACCESSES
};
4096 /* Identify representatives of all accesses to all candidate parameters for
4097 IPA-SRA. Return result based on what representatives have been found. */
4099 static enum ipa_splicing_result
4100 splice_all_param_accesses (vec
<access_p
> &representatives
)
4102 enum ipa_splicing_result result
= NO_GOOD_ACCESS
;
4104 struct access
*repr
;
4106 representatives
.create (func_param_count
);
4108 for (parm
= DECL_ARGUMENTS (current_function_decl
);
4110 parm
= DECL_CHAIN (parm
))
4112 if (is_unused_scalar_param (parm
))
4114 representatives
.quick_push (&no_accesses_representant
);
4115 if (result
== NO_GOOD_ACCESS
)
4116 result
= UNUSED_PARAMS
;
4118 else if (POINTER_TYPE_P (TREE_TYPE (parm
))
4119 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm
)))
4120 && bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4122 repr
= unmodified_by_ref_scalar_representative (parm
);
4123 representatives
.quick_push (repr
);
4125 result
= UNMODIF_BY_REF_ACCESSES
;
4127 else if (bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4129 bool ro_grp
= false;
4130 repr
= splice_param_accesses (parm
, &ro_grp
);
4131 representatives
.quick_push (repr
);
4133 if (repr
&& !no_accesses_p (repr
))
4135 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4138 result
= UNMODIF_BY_REF_ACCESSES
;
4139 else if (result
< MODIF_BY_REF_ACCESSES
)
4140 result
= MODIF_BY_REF_ACCESSES
;
4142 else if (result
< BY_VAL_ACCESSES
)
4143 result
= BY_VAL_ACCESSES
;
4145 else if (no_accesses_p (repr
) && (result
== NO_GOOD_ACCESS
))
4146 result
= UNUSED_PARAMS
;
4149 representatives
.quick_push (NULL
);
4152 if (result
== NO_GOOD_ACCESS
)
4154 representatives
.release ();
4155 return NO_GOOD_ACCESS
;
4161 /* Return the index of BASE in PARMS. Abort if it is not found. */
4164 get_param_index (tree base
, vec
<tree
> parms
)
4168 len
= parms
.length ();
4169 for (i
= 0; i
< len
; i
++)
4170 if (parms
[i
] == base
)
4175 /* Convert the decisions made at the representative level into compact
4176 parameter adjustments. REPRESENTATIVES are pointers to first
4177 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4178 final number of adjustments. */
4180 static ipa_parm_adjustment_vec
4181 turn_representatives_into_adjustments (vec
<access_p
> representatives
,
4182 int adjustments_count
)
4185 ipa_parm_adjustment_vec adjustments
;
4189 gcc_assert (adjustments_count
> 0);
4190 parms
= ipa_get_vector_of_formal_parms (current_function_decl
);
4191 adjustments
.create (adjustments_count
);
4192 parm
= DECL_ARGUMENTS (current_function_decl
);
4193 for (i
= 0; i
< func_param_count
; i
++, parm
= DECL_CHAIN (parm
))
4195 struct access
*repr
= representatives
[i
];
4197 if (!repr
|| no_accesses_p (repr
))
4199 struct ipa_parm_adjustment adj
;
4201 memset (&adj
, 0, sizeof (adj
));
4202 adj
.base_index
= get_param_index (parm
, parms
);
4207 adj
.remove_param
= 1;
4208 adjustments
.quick_push (adj
);
4212 struct ipa_parm_adjustment adj
;
4213 int index
= get_param_index (parm
, parms
);
4215 for (; repr
; repr
= repr
->next_grp
)
4217 memset (&adj
, 0, sizeof (adj
));
4218 gcc_assert (repr
->base
== parm
);
4219 adj
.base_index
= index
;
4220 adj
.base
= repr
->base
;
4221 adj
.type
= repr
->type
;
4222 adj
.alias_ptr_type
= reference_alias_ptr_type (repr
->expr
);
4223 adj
.offset
= repr
->offset
;
4224 adj
.by_ref
= (POINTER_TYPE_P (TREE_TYPE (repr
->base
))
4225 && (repr
->grp_maybe_modified
4226 || repr
->grp_not_necessarilly_dereferenced
));
4227 adjustments
.quick_push (adj
);
4235 /* Analyze the collected accesses and produce a plan what to do with the
4236 parameters in the form of adjustments, NULL meaning nothing. */
4238 static ipa_parm_adjustment_vec
4239 analyze_all_param_acesses (void)
4241 enum ipa_splicing_result repr_state
;
4242 bool proceed
= false;
4243 int i
, adjustments_count
= 0;
4244 vec
<access_p
> representatives
;
4245 ipa_parm_adjustment_vec adjustments
;
4247 repr_state
= splice_all_param_accesses (representatives
);
4248 if (repr_state
== NO_GOOD_ACCESS
)
4249 return ipa_parm_adjustment_vec();
4251 /* If there are any parameters passed by reference which are not modified
4252 directly, we need to check whether they can be modified indirectly. */
4253 if (repr_state
== UNMODIF_BY_REF_ACCESSES
)
4255 analyze_caller_dereference_legality (representatives
);
4256 analyze_modified_params (representatives
);
4259 for (i
= 0; i
< func_param_count
; i
++)
4261 struct access
*repr
= representatives
[i
];
4263 if (repr
&& !no_accesses_p (repr
))
4265 if (repr
->grp_scalar_ptr
)
4267 adjustments_count
++;
4268 if (repr
->grp_not_necessarilly_dereferenced
4269 || repr
->grp_maybe_modified
)
4270 representatives
[i
] = NULL
;
4274 sra_stats
.scalar_by_ref_to_by_val
++;
4279 int new_components
= decide_one_param_reduction (repr
);
4281 if (new_components
== 0)
4283 representatives
[i
] = NULL
;
4284 adjustments_count
++;
4288 adjustments_count
+= new_components
;
4289 sra_stats
.aggregate_params_reduced
++;
4290 sra_stats
.param_reductions_created
+= new_components
;
4297 if (no_accesses_p (repr
))
4300 sra_stats
.deleted_unused_parameters
++;
4302 adjustments_count
++;
4306 if (!proceed
&& dump_file
)
4307 fprintf (dump_file
, "NOT proceeding to change params.\n");
4310 adjustments
= turn_representatives_into_adjustments (representatives
,
4313 adjustments
= ipa_parm_adjustment_vec();
4315 representatives
.release ();
4319 /* If a parameter replacement identified by ADJ does not yet exist in the form
4320 of declaration, create it and record it, otherwise return the previously
4324 get_replaced_param_substitute (struct ipa_parm_adjustment
*adj
)
4327 if (!adj
->new_ssa_base
)
4329 char *pretty_name
= make_fancy_name (adj
->base
);
4331 repl
= create_tmp_reg (TREE_TYPE (adj
->base
), "ISR");
4332 DECL_NAME (repl
) = get_identifier (pretty_name
);
4333 obstack_free (&name_obstack
, pretty_name
);
4335 adj
->new_ssa_base
= repl
;
4338 repl
= adj
->new_ssa_base
;
4342 /* Find the first adjustment for a particular parameter BASE in a vector of
4343 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4346 static struct ipa_parm_adjustment
*
4347 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments
, tree base
)
4351 len
= adjustments
.length ();
4352 for (i
= 0; i
< len
; i
++)
4354 struct ipa_parm_adjustment
*adj
;
4356 adj
= &adjustments
[i
];
4357 if (!adj
->copy_param
&& adj
->base
== base
)
4364 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4365 removed because its value is not used, replace the SSA_NAME with a one
4366 relating to a created VAR_DECL together all of its uses and return true.
4367 ADJUSTMENTS is a pointer to an adjustments vector. */
4370 replace_removed_params_ssa_names (gimple stmt
,
4371 ipa_parm_adjustment_vec adjustments
)
4373 struct ipa_parm_adjustment
*adj
;
4374 tree lhs
, decl
, repl
, name
;
4376 if (gimple_code (stmt
) == GIMPLE_PHI
)
4377 lhs
= gimple_phi_result (stmt
);
4378 else if (is_gimple_assign (stmt
))
4379 lhs
= gimple_assign_lhs (stmt
);
4380 else if (is_gimple_call (stmt
))
4381 lhs
= gimple_call_lhs (stmt
);
4385 if (TREE_CODE (lhs
) != SSA_NAME
)
4388 decl
= SSA_NAME_VAR (lhs
);
4389 if (decl
== NULL_TREE
4390 || TREE_CODE (decl
) != PARM_DECL
)
4393 adj
= get_adjustment_for_base (adjustments
, decl
);
4397 repl
= get_replaced_param_substitute (adj
);
4398 name
= make_ssa_name (repl
, stmt
);
4402 fprintf (dump_file
, "replacing an SSA name of a removed param ");
4403 print_generic_expr (dump_file
, lhs
, 0);
4404 fprintf (dump_file
, " with ");
4405 print_generic_expr (dump_file
, name
, 0);
4406 fprintf (dump_file
, "\n");
4409 if (is_gimple_assign (stmt
))
4410 gimple_assign_set_lhs (stmt
, name
);
4411 else if (is_gimple_call (stmt
))
4412 gimple_call_set_lhs (stmt
, name
);
4414 gimple_phi_set_result (stmt
, name
);
4416 replace_uses_by (lhs
, name
);
4417 release_ssa_name (lhs
);
4421 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4422 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4423 specifies whether the function should care about type incompatibility the
4424 current and new expressions. If it is false, the function will leave
4425 incompatibility issues to the caller. Return true iff the expression
4429 sra_ipa_modify_expr (tree
*expr
, bool convert
,
4430 ipa_parm_adjustment_vec adjustments
)
4433 struct ipa_parm_adjustment
*adj
, *cand
= NULL
;
4434 HOST_WIDE_INT offset
, size
, max_size
;
4437 len
= adjustments
.length ();
4439 if (TREE_CODE (*expr
) == BIT_FIELD_REF
4440 || TREE_CODE (*expr
) == IMAGPART_EXPR
4441 || TREE_CODE (*expr
) == REALPART_EXPR
)
4443 expr
= &TREE_OPERAND (*expr
, 0);
4447 base
= get_ref_base_and_extent (*expr
, &offset
, &size
, &max_size
);
4448 if (!base
|| size
== -1 || max_size
== -1)
4451 if (TREE_CODE (base
) == MEM_REF
)
4453 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
4454 base
= TREE_OPERAND (base
, 0);
4457 base
= get_ssa_base_param (base
);
4458 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4461 for (i
= 0; i
< len
; i
++)
4463 adj
= &adjustments
[i
];
4465 if (adj
->base
== base
&&
4466 (adj
->offset
== offset
|| adj
->remove_param
))
4472 if (!cand
|| cand
->copy_param
|| cand
->remove_param
)
4476 src
= build_simple_mem_ref (cand
->reduction
);
4478 src
= cand
->reduction
;
4480 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4482 fprintf (dump_file
, "About to replace expr ");
4483 print_generic_expr (dump_file
, *expr
, 0);
4484 fprintf (dump_file
, " with ");
4485 print_generic_expr (dump_file
, src
, 0);
4486 fprintf (dump_file
, "\n");
4489 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4491 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4499 /* If the statement pointed to by STMT_PTR contains any expressions that need
4500 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4501 potential type incompatibilities (GSI is used to accommodate conversion
4502 statements and must point to the statement). Return true iff the statement
4506 sra_ipa_modify_assign (gimple
*stmt_ptr
, gimple_stmt_iterator
*gsi
,
4507 ipa_parm_adjustment_vec adjustments
)
4509 gimple stmt
= *stmt_ptr
;
4510 tree
*lhs_p
, *rhs_p
;
4513 if (!gimple_assign_single_p (stmt
))
4516 rhs_p
= gimple_assign_rhs1_ptr (stmt
);
4517 lhs_p
= gimple_assign_lhs_ptr (stmt
);
4519 any
= sra_ipa_modify_expr (rhs_p
, false, adjustments
);
4520 any
|= sra_ipa_modify_expr (lhs_p
, false, adjustments
);
4523 tree new_rhs
= NULL_TREE
;
4525 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p
), TREE_TYPE (*rhs_p
)))
4527 if (TREE_CODE (*rhs_p
) == CONSTRUCTOR
)
4529 /* V_C_Es of constructors can cause trouble (PR 42714). */
4530 if (is_gimple_reg_type (TREE_TYPE (*lhs_p
)))
4531 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
4533 *rhs_p
= build_constructor (TREE_TYPE (*lhs_p
),
4537 new_rhs
= fold_build1_loc (gimple_location (stmt
),
4538 VIEW_CONVERT_EXPR
, TREE_TYPE (*lhs_p
),
4541 else if (REFERENCE_CLASS_P (*rhs_p
)
4542 && is_gimple_reg_type (TREE_TYPE (*lhs_p
))
4543 && !is_gimple_reg (*lhs_p
))
4544 /* This can happen when an assignment in between two single field
4545 structures is turned into an assignment in between two pointers to
4546 scalars (PR 42237). */
4551 tree tmp
= force_gimple_operand_gsi (gsi
, new_rhs
, true, NULL_TREE
,
4552 true, GSI_SAME_STMT
);
4554 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
4563 /* Traverse the function body and all modifications as described in
4564 ADJUSTMENTS. Return true iff the CFG has been changed. */
4567 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments
)
4569 bool cfg_changed
= false;
4574 gimple_stmt_iterator gsi
;
4576 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4577 replace_removed_params_ssa_names (gsi_stmt (gsi
), adjustments
);
4579 gsi
= gsi_start_bb (bb
);
4580 while (!gsi_end_p (gsi
))
4582 gimple stmt
= gsi_stmt (gsi
);
4583 bool modified
= false;
4587 switch (gimple_code (stmt
))
4590 t
= gimple_return_retval_ptr (stmt
);
4591 if (*t
!= NULL_TREE
)
4592 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4596 modified
|= sra_ipa_modify_assign (&stmt
, &gsi
, adjustments
);
4597 modified
|= replace_removed_params_ssa_names (stmt
, adjustments
);
4601 /* Operands must be processed before the lhs. */
4602 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4604 t
= gimple_call_arg_ptr (stmt
, i
);
4605 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4608 if (gimple_call_lhs (stmt
))
4610 t
= gimple_call_lhs_ptr (stmt
);
4611 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4612 modified
|= replace_removed_params_ssa_names (stmt
,
4618 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
4620 t
= &TREE_VALUE (gimple_asm_input_op (stmt
, i
));
4621 modified
|= sra_ipa_modify_expr (t
, true, adjustments
);
4623 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
4625 t
= &TREE_VALUE (gimple_asm_output_op (stmt
, i
));
4626 modified
|= sra_ipa_modify_expr (t
, false, adjustments
);
4637 if (maybe_clean_eh_stmt (stmt
)
4638 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4648 /* Call gimple_debug_bind_reset_value on all debug statements describing
4649 gimple register parameters that are being removed or replaced. */
4652 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments
)
4655 gimple_stmt_iterator
*gsip
= NULL
, gsi
;
4657 if (MAY_HAVE_DEBUG_STMTS
&& single_succ_p (ENTRY_BLOCK_PTR
))
4659 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR
));
4662 len
= adjustments
.length ();
4663 for (i
= 0; i
< len
; i
++)
4665 struct ipa_parm_adjustment
*adj
;
4666 imm_use_iterator ui
;
4667 gimple stmt
, def_temp
;
4668 tree name
, vexpr
, copy
= NULL_TREE
;
4669 use_operand_p use_p
;
4671 adj
= &adjustments
[i
];
4672 if (adj
->copy_param
|| !is_gimple_reg (adj
->base
))
4674 name
= ssa_default_def (cfun
, adj
->base
);
4677 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
4679 /* All other users must have been removed by
4680 ipa_sra_modify_function_body. */
4681 gcc_assert (is_gimple_debug (stmt
));
4682 if (vexpr
== NULL
&& gsip
!= NULL
)
4684 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4685 vexpr
= make_node (DEBUG_EXPR_DECL
);
4686 def_temp
= gimple_build_debug_source_bind (vexpr
, adj
->base
,
4688 DECL_ARTIFICIAL (vexpr
) = 1;
4689 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
4690 DECL_MODE (vexpr
) = DECL_MODE (adj
->base
);
4691 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4695 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
4696 SET_USE (use_p
, vexpr
);
4699 gimple_debug_bind_reset_value (stmt
);
4702 /* Create a VAR_DECL for debug info purposes. */
4703 if (!DECL_IGNORED_P (adj
->base
))
4705 copy
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
4706 VAR_DECL
, DECL_NAME (adj
->base
),
4707 TREE_TYPE (adj
->base
));
4708 if (DECL_PT_UID_SET_P (adj
->base
))
4709 SET_DECL_PT_UID (copy
, DECL_PT_UID (adj
->base
));
4710 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (adj
->base
);
4711 TREE_READONLY (copy
) = TREE_READONLY (adj
->base
);
4712 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (adj
->base
);
4713 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (adj
->base
);
4714 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (adj
->base
);
4715 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (adj
->base
);
4716 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (adj
->base
);
4717 DECL_SEEN_IN_BIND_EXPR_P (copy
) = 1;
4718 SET_DECL_RTL (copy
, 0);
4719 TREE_USED (copy
) = 1;
4720 DECL_CONTEXT (copy
) = current_function_decl
;
4721 add_local_decl (cfun
, copy
);
4723 BLOCK_VARS (DECL_INITIAL (current_function_decl
));
4724 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = copy
;
4726 if (gsip
!= NULL
&& copy
&& target_for_debug_bind (adj
->base
))
4728 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4730 def_temp
= gimple_build_debug_bind (copy
, vexpr
, NULL
);
4732 def_temp
= gimple_build_debug_source_bind (copy
, adj
->base
,
4734 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4739 /* Return false iff all callers have at least as many actual arguments as there
4740 are formal parameters in the current function. */
4743 not_all_callers_have_enough_arguments_p (struct cgraph_node
*node
,
4744 void *data ATTRIBUTE_UNUSED
)
4746 struct cgraph_edge
*cs
;
4747 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4748 if (!callsite_has_enough_arguments_p (cs
->call_stmt
))
4754 /* Convert all callers of NODE. */
4757 convert_callers_for_node (struct cgraph_node
*node
,
4760 ipa_parm_adjustment_vec
*adjustments
= (ipa_parm_adjustment_vec
*) data
;
4761 bitmap recomputed_callers
= BITMAP_ALLOC (NULL
);
4762 struct cgraph_edge
*cs
;
4764 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4766 push_cfun (DECL_STRUCT_FUNCTION (cs
->caller
->symbol
.decl
));
4769 fprintf (dump_file
, "Adjusting call (%i -> %i) %s -> %s\n",
4770 cs
->caller
->uid
, cs
->callee
->uid
,
4771 xstrdup (cgraph_node_name (cs
->caller
)),
4772 xstrdup (cgraph_node_name (cs
->callee
)));
4774 ipa_modify_call_arguments (cs
, cs
->call_stmt
, *adjustments
);
4779 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4780 if (bitmap_set_bit (recomputed_callers
, cs
->caller
->uid
)
4781 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs
->caller
->symbol
.decl
)))
4782 compute_inline_parameters (cs
->caller
, true);
4783 BITMAP_FREE (recomputed_callers
);
4788 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4791 convert_callers (struct cgraph_node
*node
, tree old_decl
,
4792 ipa_parm_adjustment_vec adjustments
)
4794 basic_block this_block
;
4796 cgraph_for_node_and_aliases (node
, convert_callers_for_node
,
4797 &adjustments
, false);
4799 if (!encountered_recursive_call
)
4802 FOR_EACH_BB (this_block
)
4804 gimple_stmt_iterator gsi
;
4806 for (gsi
= gsi_start_bb (this_block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4808 gimple stmt
= gsi_stmt (gsi
);
4810 if (gimple_code (stmt
) != GIMPLE_CALL
)
4812 call_fndecl
= gimple_call_fndecl (stmt
);
4813 if (call_fndecl
== old_decl
)
4816 fprintf (dump_file
, "Adjusting recursive call");
4817 gimple_call_set_fndecl (stmt
, node
->symbol
.decl
);
4818 ipa_modify_call_arguments (NULL
, stmt
, adjustments
);
4826 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4827 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4830 modify_function (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
4832 struct cgraph_node
*new_node
;
4834 vec
<cgraph_edge_p
> redirect_callers
= collect_callers_of_node (node
);
4836 rebuild_cgraph_edges ();
4837 free_dominance_info (CDI_DOMINATORS
);
4840 new_node
= cgraph_function_versioning (node
, redirect_callers
,
4842 NULL
, false, NULL
, NULL
, "isra");
4843 redirect_callers
.release ();
4845 push_cfun (DECL_STRUCT_FUNCTION (new_node
->symbol
.decl
));
4846 ipa_modify_formal_parameters (current_function_decl
, adjustments
, "ISRA");
4847 cfg_changed
= ipa_sra_modify_function_body (adjustments
);
4848 sra_ipa_reset_debug_stmts (adjustments
);
4849 convert_callers (new_node
, node
->symbol
.decl
, adjustments
);
4850 cgraph_make_node_local (new_node
);
4854 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4855 attributes, return true otherwise. NODE is the cgraph node of the current
4859 ipa_sra_preliminary_function_checks (struct cgraph_node
*node
)
4861 if (!cgraph_node_can_be_local_p (node
))
4864 fprintf (dump_file
, "Function not local to this compilation unit.\n");
4868 if (!node
->local
.can_change_signature
)
4871 fprintf (dump_file
, "Function can not change signature.\n");
4875 if (!tree_versionable_function_p (node
->symbol
.decl
))
4878 fprintf (dump_file
, "Function is not versionable.\n");
4882 if (DECL_VIRTUAL_P (current_function_decl
))
4885 fprintf (dump_file
, "Function is a virtual method.\n");
4889 if ((DECL_COMDAT (node
->symbol
.decl
) || DECL_EXTERNAL (node
->symbol
.decl
))
4890 && inline_summary(node
)->size
>= MAX_INLINE_INSNS_AUTO
)
4893 fprintf (dump_file
, "Function too big to be made truly local.\n");
4901 "Function has no callers in this compilation unit.\n");
4908 fprintf (dump_file
, "Function uses stdarg. \n");
4912 if (TYPE_ATTRIBUTES (TREE_TYPE (node
->symbol
.decl
)))
4918 /* Perform early interprocedural SRA. */
4921 ipa_early_sra (void)
4923 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
4924 ipa_parm_adjustment_vec adjustments
;
4927 if (!ipa_sra_preliminary_function_checks (node
))
4931 sra_mode
= SRA_MODE_EARLY_IPA
;
4933 if (!find_param_candidates ())
4936 fprintf (dump_file
, "Function has no IPA-SRA candidates.\n");
4940 if (cgraph_for_node_and_aliases (node
, not_all_callers_have_enough_arguments_p
,
4944 fprintf (dump_file
, "There are callers with insufficient number of "
4949 bb_dereferences
= XCNEWVEC (HOST_WIDE_INT
,
4951 * last_basic_block_for_function (cfun
));
4952 final_bbs
= BITMAP_ALLOC (NULL
);
4955 if (encountered_apply_args
)
4958 fprintf (dump_file
, "Function calls __builtin_apply_args().\n");
4962 if (encountered_unchangable_recursive_call
)
4965 fprintf (dump_file
, "Function calls itself with insufficient "
4966 "number of arguments.\n");
4970 adjustments
= analyze_all_param_acesses ();
4971 if (!adjustments
.exists ())
4974 ipa_dump_param_adjustments (dump_file
, adjustments
, current_function_decl
);
4976 if (modify_function (node
, adjustments
))
4977 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
4979 ret
= TODO_update_ssa
;
4980 adjustments
.release ();
4982 statistics_counter_event (cfun
, "Unused parameters deleted",
4983 sra_stats
.deleted_unused_parameters
);
4984 statistics_counter_event (cfun
, "Scalar parameters converted to by-value",
4985 sra_stats
.scalar_by_ref_to_by_val
);
4986 statistics_counter_event (cfun
, "Aggregate parameters broken up",
4987 sra_stats
.aggregate_params_reduced
);
4988 statistics_counter_event (cfun
, "Aggregate parameter components created",
4989 sra_stats
.param_reductions_created
);
4992 BITMAP_FREE (final_bbs
);
4993 free (bb_dereferences
);
4995 sra_deinitialize ();
4999 /* Return if early ipa sra shall be performed. */
5001 ipa_early_sra_gate (void)
5003 return flag_ipa_sra
&& dbg_cnt (eipa_sra
);
5006 struct gimple_opt_pass pass_early_ipa_sra
=
5010 "eipa_sra", /* name */
5011 OPTGROUP_NONE
, /* optinfo_flags */
5012 ipa_early_sra_gate
, /* gate */
5013 ipa_early_sra
, /* execute */
5016 0, /* static_pass_number */
5017 TV_IPA_SRA
, /* tv_id */
5018 0, /* properties_required */
5019 0, /* properties_provided */
5020 0, /* properties_destroyed */
5021 0, /* todo_flags_start */
5022 TODO_dump_symtab
/* todo_flags_finish */