2011-12-09 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / tree-sra.c
blob346519a6af57b3d3fc2e76a6a7b43dead5a5a7fe
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "ipa-prop.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
87 #include "timevar.h"
88 #include "params.h"
89 #include "target.h"
90 #include "flags.h"
91 #include "dbgcnt.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
101 /* Global variable describing which aggregate reduction we are performing at
102 the moment. */
103 static enum sra_mode sra_mode;
105 struct assign_link;
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
123 struct access
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
129 HOST_WIDE_INT size;
130 tree base;
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
134 testcase. */
135 tree expr;
136 /* Type. */
137 tree type;
139 /* The statement this access belongs to. */
140 gimple stmt;
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
158 /* Pointers to the first and last element in the linked list of assign
159 links. */
160 struct assign_link *first_link, *last_link;
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
170 /* Is this particular access write access? */
171 unsigned write : 1;
173 /* Is this access an access to a non-addressable field? */
174 unsigned non_addressable : 1;
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued : 1;
179 /* Does this group contain a write access? This flag is propagated down the
180 access tree. */
181 unsigned grp_write : 1;
183 /* Does this group contain a read access? This flag is propagated down the
184 access tree. */
185 unsigned grp_read : 1;
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read : 1;
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write : 1;
195 /* Does this group contain a read access through a scalar type? This flag is
196 not propagated in the access tree in any direction. */
197 unsigned grp_scalar_read : 1;
199 /* Does this group contain a write access through a scalar type? This flag
200 is not propagated in the access tree in any direction. */
201 unsigned grp_scalar_write : 1;
203 /* Is this access an artificial one created to scalarize some record
204 entirely? */
205 unsigned grp_total_scalarization : 1;
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
209 possible. */
210 unsigned grp_hint : 1;
212 /* Is the subtree rooted in this access fully covered by scalar
213 replacements? */
214 unsigned grp_covered : 1;
216 /* If set to true, this access and all below it in an access tree must not be
217 scalarized. */
218 unsigned grp_unscalarizable_region : 1;
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
222 access tree. */
223 unsigned grp_unscalarized_data : 1;
225 /* Does this access and/or group contain a write access through a
226 BIT_FIELD_REF? */
227 unsigned grp_partial_lhs : 1;
229 /* Set when a scalar replacement should be created for this variable. We do
230 the decision and creation at different places because create_tmp_var
231 cannot be called from within FOR_EACH_REFERENCED_VAR. */
232 unsigned grp_to_be_replaced : 1;
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning : 1;
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified : 1;
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr : 1;
246 /* Set when we discover that this pointer is not safe to dereference in the
247 caller. */
248 unsigned grp_not_necessarilly_dereferenced : 1;
251 typedef struct access *access_p;
253 DEF_VEC_P (access_p);
254 DEF_VEC_ALLOC_P (access_p, heap);
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool;
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
262 struct assign_link
264 struct access *lacc, *racc;
265 struct assign_link *next;
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool;
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
272 static struct pointer_map_t *base_access_vec;
274 /* Bitmap of candidates. */
275 static bitmap candidate_bitmap;
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278 those which cannot be (because they are and need be used as a whole). */
279 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
281 /* Obstack for creation of fancy names. */
282 static struct obstack name_obstack;
284 /* Head of a linked list of accesses that need to have its subaccesses
285 propagated to their assignment counterparts. */
286 static struct access *work_queue_head;
288 /* Number of parameters of the analyzed function when doing early ipa SRA. */
289 static int func_param_count;
291 /* scan_function sets the following to true if it encounters a call to
292 __builtin_apply_args. */
293 static bool encountered_apply_args;
295 /* Set by scan_function when it finds a recursive call. */
296 static bool encountered_recursive_call;
298 /* Set by scan_function when it finds a recursive call with less actual
299 arguments than formal parameters.. */
300 static bool encountered_unchangable_recursive_call;
302 /* This is a table in which for each basic block and parameter there is a
303 distance (offset + size) in that parameter which is dereferenced and
304 accessed in that BB. */
305 static HOST_WIDE_INT *bb_dereferences;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307 returning, throwing externally, looping infinitely or calling a function
308 which might abort etc.. */
309 static bitmap final_bbs;
311 /* Representative of no accesses at all. */
312 static struct access no_accesses_representant;
314 /* Predicate to test the special value. */
316 static inline bool
317 no_accesses_p (struct access *access)
319 return access == &no_accesses_representant;
322 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
323 representative fields are dumped, otherwise those which only describe the
324 individual access are. */
326 static struct
328 /* Number of processed aggregates is readily available in
329 analyze_all_variable_accesses and so is not stored here. */
331 /* Number of created scalar replacements. */
332 int replacements;
334 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
335 expression. */
336 int exprs;
338 /* Number of statements created by generate_subtree_copies. */
339 int subtree_copies;
341 /* Number of statements created by load_assign_lhs_subreplacements. */
342 int subreplacements;
344 /* Number of times sra_modify_assign has deleted a statement. */
345 int deleted;
347 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348 RHS reparately due to type conversions or nonexistent matching
349 references. */
350 int separate_lhs_rhs_handling;
352 /* Number of parameters that were removed because they were unused. */
353 int deleted_unused_parameters;
355 /* Number of scalars passed as parameters by reference that have been
356 converted to be passed by value. */
357 int scalar_by_ref_to_by_val;
359 /* Number of aggregate parameters that were replaced by one or more of their
360 components. */
361 int aggregate_params_reduced;
363 /* Numbber of components created when splitting aggregate parameters. */
364 int param_reductions_created;
365 } sra_stats;
367 static void
368 dump_access (FILE *f, struct access *access, bool grp)
370 fprintf (f, "access { ");
371 fprintf (f, "base = (%d)'", DECL_UID (access->base));
372 print_generic_expr (f, access->base, 0);
373 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
374 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
375 fprintf (f, ", expr = ");
376 print_generic_expr (f, access->expr, 0);
377 fprintf (f, ", type = ");
378 print_generic_expr (f, access->type, 0);
379 if (grp)
380 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381 "grp_assignment_write = %d, grp_scalar_read = %d, "
382 "grp_scalar_write = %d, grp_total_scalarization = %d, "
383 "grp_hint = %d, grp_covered = %d, "
384 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 "grp_maybe_modified = %d, "
387 "grp_not_necessarilly_dereferenced = %d\n",
388 access->grp_read, access->grp_write, access->grp_assignment_read,
389 access->grp_assignment_write, access->grp_scalar_read,
390 access->grp_scalar_write, access->grp_total_scalarization,
391 access->grp_hint, access->grp_covered,
392 access->grp_unscalarizable_region, access->grp_unscalarized_data,
393 access->grp_partial_lhs, access->grp_to_be_replaced,
394 access->grp_maybe_modified,
395 access->grp_not_necessarilly_dereferenced);
396 else
397 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
398 "grp_partial_lhs = %d\n",
399 access->write, access->grp_total_scalarization,
400 access->grp_partial_lhs);
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
405 static void
406 dump_access_tree_1 (FILE *f, struct access *access, int level)
410 int i;
412 for (i = 0; i < level; i++)
413 fputs ("* ", dump_file);
415 dump_access (f, access, true);
417 if (access->first_child)
418 dump_access_tree_1 (f, access->first_child, level + 1);
420 access = access->next_sibling;
422 while (access);
425 /* Dump all access trees for a variable, given the pointer to the first root in
426 ACCESS. */
428 static void
429 dump_access_tree (FILE *f, struct access *access)
431 for (; access; access = access->next_grp)
432 dump_access_tree_1 (f, access, 0);
435 /* Return true iff ACC is non-NULL and has subaccesses. */
437 static inline bool
438 access_has_children_p (struct access *acc)
440 return acc && acc->first_child;
443 /* Return a vector of pointers to accesses for the variable given in BASE or
444 NULL if there is none. */
446 static VEC (access_p, heap) *
447 get_base_access_vector (tree base)
449 void **slot;
451 slot = pointer_map_contains (base_access_vec, base);
452 if (!slot)
453 return NULL;
454 else
455 return *(VEC (access_p, heap) **) slot;
458 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
459 in ACCESS. Return NULL if it cannot be found. */
461 static struct access *
462 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
463 HOST_WIDE_INT size)
465 while (access && (access->offset != offset || access->size != size))
467 struct access *child = access->first_child;
469 while (child && (child->offset + child->size <= offset))
470 child = child->next_sibling;
471 access = child;
474 return access;
477 /* Return the first group representative for DECL or NULL if none exists. */
479 static struct access *
480 get_first_repr_for_decl (tree base)
482 VEC (access_p, heap) *access_vec;
484 access_vec = get_base_access_vector (base);
485 if (!access_vec)
486 return NULL;
488 return VEC_index (access_p, access_vec, 0);
491 /* Find an access representative for the variable BASE and given OFFSET and
492 SIZE. Requires that access trees have already been built. Return NULL if
493 it cannot be found. */
495 static struct access *
496 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
497 HOST_WIDE_INT size)
499 struct access *access;
501 access = get_first_repr_for_decl (base);
502 while (access && (access->offset + access->size <= offset))
503 access = access->next_grp;
504 if (!access)
505 return NULL;
507 return find_access_in_subtree (access, offset, size);
510 /* Add LINK to the linked list of assign links of RACC. */
511 static void
512 add_link_to_rhs (struct access *racc, struct assign_link *link)
514 gcc_assert (link->racc == racc);
516 if (!racc->first_link)
518 gcc_assert (!racc->last_link);
519 racc->first_link = link;
521 else
522 racc->last_link->next = link;
524 racc->last_link = link;
525 link->next = NULL;
528 /* Move all link structures in their linked list in OLD_RACC to the linked list
529 in NEW_RACC. */
530 static void
531 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
533 if (!old_racc->first_link)
535 gcc_assert (!old_racc->last_link);
536 return;
539 if (new_racc->first_link)
541 gcc_assert (!new_racc->last_link->next);
542 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
544 new_racc->last_link->next = old_racc->first_link;
545 new_racc->last_link = old_racc->last_link;
547 else
549 gcc_assert (!new_racc->last_link);
551 new_racc->first_link = old_racc->first_link;
552 new_racc->last_link = old_racc->last_link;
554 old_racc->first_link = old_racc->last_link = NULL;
557 /* Add ACCESS to the work queue (which is actually a stack). */
559 static void
560 add_access_to_work_queue (struct access *access)
562 if (!access->grp_queued)
564 gcc_assert (!access->next_queued);
565 access->next_queued = work_queue_head;
566 access->grp_queued = 1;
567 work_queue_head = access;
571 /* Pop an access from the work queue, and return it, assuming there is one. */
573 static struct access *
574 pop_access_from_work_queue (void)
576 struct access *access = work_queue_head;
578 work_queue_head = access->next_queued;
579 access->next_queued = NULL;
580 access->grp_queued = 0;
581 return access;
585 /* Allocate necessary structures. */
587 static void
588 sra_initialize (void)
590 candidate_bitmap = BITMAP_ALLOC (NULL);
591 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
592 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
593 gcc_obstack_init (&name_obstack);
594 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
595 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
596 base_access_vec = pointer_map_create ();
597 memset (&sra_stats, 0, sizeof (sra_stats));
598 encountered_apply_args = false;
599 encountered_recursive_call = false;
600 encountered_unchangable_recursive_call = false;
603 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
605 static bool
606 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
607 void *data ATTRIBUTE_UNUSED)
609 VEC (access_p, heap) *access_vec;
610 access_vec = (VEC (access_p, heap) *) *value;
611 VEC_free (access_p, heap, access_vec);
613 return true;
616 /* Deallocate all general structures. */
618 static void
619 sra_deinitialize (void)
621 BITMAP_FREE (candidate_bitmap);
622 BITMAP_FREE (should_scalarize_away_bitmap);
623 BITMAP_FREE (cannot_scalarize_away_bitmap);
624 free_alloc_pool (access_pool);
625 free_alloc_pool (link_pool);
626 obstack_free (&name_obstack, NULL);
628 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
629 pointer_map_destroy (base_access_vec);
632 /* Remove DECL from candidates for SRA and write REASON to the dump file if
633 there is one. */
634 static void
635 disqualify_candidate (tree decl, const char *reason)
637 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
639 if (dump_file && (dump_flags & TDF_DETAILS))
641 fprintf (dump_file, "! Disqualifying ");
642 print_generic_expr (dump_file, decl, 0);
643 fprintf (dump_file, " - %s\n", reason);
647 /* Return true iff the type contains a field or an element which does not allow
648 scalarization. */
650 static bool
651 type_internals_preclude_sra_p (tree type, const char **msg)
653 tree fld;
654 tree et;
656 switch (TREE_CODE (type))
658 case RECORD_TYPE:
659 case UNION_TYPE:
660 case QUAL_UNION_TYPE:
661 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
662 if (TREE_CODE (fld) == FIELD_DECL)
664 tree ft = TREE_TYPE (fld);
666 if (TREE_THIS_VOLATILE (fld))
668 *msg = "volatile structure field";
669 return true;
671 if (!DECL_FIELD_OFFSET (fld))
673 *msg = "no structure field offset";
674 return true;
676 if (!DECL_SIZE (fld))
678 *msg = "zero structure field size";
679 return true;
681 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
683 *msg = "structure field offset not fixed";
684 return true;
686 if (!host_integerp (DECL_SIZE (fld), 1))
688 *msg = "structure field size not fixed";
689 return true;
691 if (AGGREGATE_TYPE_P (ft)
692 && int_bit_position (fld) % BITS_PER_UNIT != 0)
694 *msg = "structure field is bit field";
695 return true;
698 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
699 return true;
702 return false;
704 case ARRAY_TYPE:
705 et = TREE_TYPE (type);
707 if (TYPE_VOLATILE (et))
709 *msg = "element type is volatile";
710 return true;
713 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
714 return true;
716 return false;
718 default:
719 return false;
723 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
724 base variable if it is. Return T if it is not an SSA_NAME. */
726 static tree
727 get_ssa_base_param (tree t)
729 if (TREE_CODE (t) == SSA_NAME)
731 if (SSA_NAME_IS_DEFAULT_DEF (t))
732 return SSA_NAME_VAR (t);
733 else
734 return NULL_TREE;
736 return t;
739 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
740 belongs to, unless the BB has already been marked as a potentially
741 final. */
743 static void
744 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
746 basic_block bb = gimple_bb (stmt);
747 int idx, parm_index = 0;
748 tree parm;
750 if (bitmap_bit_p (final_bbs, bb->index))
751 return;
753 for (parm = DECL_ARGUMENTS (current_function_decl);
754 parm && parm != base;
755 parm = DECL_CHAIN (parm))
756 parm_index++;
758 gcc_assert (parm_index < func_param_count);
760 idx = bb->index * func_param_count + parm_index;
761 if (bb_dereferences[idx] < dist)
762 bb_dereferences[idx] = dist;
765 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
766 the three fields. Also add it to the vector of accesses corresponding to
767 the base. Finally, return the new access. */
769 static struct access *
770 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
772 VEC (access_p, heap) *vec;
773 struct access *access;
774 void **slot;
776 access = (struct access *) pool_alloc (access_pool);
777 memset (access, 0, sizeof (struct access));
778 access->base = base;
779 access->offset = offset;
780 access->size = size;
782 slot = pointer_map_contains (base_access_vec, base);
783 if (slot)
784 vec = (VEC (access_p, heap) *) *slot;
785 else
786 vec = VEC_alloc (access_p, heap, 32);
788 VEC_safe_push (access_p, heap, vec, access);
790 *((struct VEC (access_p,heap) **)
791 pointer_map_insert (base_access_vec, base)) = vec;
793 return access;
796 /* Create and insert access for EXPR. Return created access, or NULL if it is
797 not possible. */
799 static struct access *
800 create_access (tree expr, gimple stmt, bool write)
802 struct access *access;
803 HOST_WIDE_INT offset, size, max_size;
804 tree base = expr;
805 bool ptr, unscalarizable_region = false;
807 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
809 if (sra_mode == SRA_MODE_EARLY_IPA
810 && TREE_CODE (base) == MEM_REF)
812 base = get_ssa_base_param (TREE_OPERAND (base, 0));
813 if (!base)
814 return NULL;
815 ptr = true;
817 else
818 ptr = false;
820 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
821 return NULL;
823 if (sra_mode == SRA_MODE_EARLY_IPA)
825 if (size < 0 || size != max_size)
827 disqualify_candidate (base, "Encountered a variable sized access.");
828 return NULL;
830 if (TREE_CODE (expr) == COMPONENT_REF
831 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
833 disqualify_candidate (base, "Encountered a bit-field access.");
834 return NULL;
836 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
838 if (ptr)
839 mark_parm_dereference (base, offset + size, stmt);
841 else
843 if (size != max_size)
845 size = max_size;
846 unscalarizable_region = true;
848 if (size < 0)
850 disqualify_candidate (base, "Encountered an unconstrained access.");
851 return NULL;
855 access = create_access_1 (base, offset, size);
856 access->expr = expr;
857 access->type = TREE_TYPE (expr);
858 access->write = write;
859 access->grp_unscalarizable_region = unscalarizable_region;
860 access->stmt = stmt;
862 if (TREE_CODE (expr) == COMPONENT_REF
863 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
864 access->non_addressable = 1;
866 return access;
870 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
871 register types or (recursively) records with only these two kinds of fields.
872 It also returns false if any of these records contains a bit-field. */
874 static bool
875 type_consists_of_records_p (tree type)
877 tree fld;
879 if (TREE_CODE (type) != RECORD_TYPE)
880 return false;
882 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
883 if (TREE_CODE (fld) == FIELD_DECL)
885 tree ft = TREE_TYPE (fld);
887 if (DECL_BIT_FIELD (fld))
888 return false;
890 if (!is_gimple_reg_type (ft)
891 && !type_consists_of_records_p (ft))
892 return false;
895 return true;
898 /* Create total_scalarization accesses for all scalar type fields in DECL that
899 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
900 must be the top-most VAR_DECL representing the variable, OFFSET must be the
901 offset of DECL within BASE. REF must be the memory reference expression for
902 the given decl. */
904 static void
905 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
906 tree ref)
908 tree fld, decl_type = TREE_TYPE (decl);
910 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
911 if (TREE_CODE (fld) == FIELD_DECL)
913 HOST_WIDE_INT pos = offset + int_bit_position (fld);
914 tree ft = TREE_TYPE (fld);
915 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
916 NULL_TREE);
918 if (is_gimple_reg_type (ft))
920 struct access *access;
921 HOST_WIDE_INT size;
923 size = tree_low_cst (DECL_SIZE (fld), 1);
924 access = create_access_1 (base, pos, size);
925 access->expr = nref;
926 access->type = ft;
927 access->grp_total_scalarization = 1;
928 /* Accesses for intraprocedural SRA can have their stmt NULL. */
930 else
931 completely_scalarize_record (base, fld, pos, nref);
935 /* Create total_scalarization accesses for all scalar type fields in VAR and
936 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
937 type_consists_of_records_p. */
939 static void
940 completely_scalarize_var (tree var)
942 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
943 struct access *access;
945 access = create_access_1 (var, 0, size);
946 access->expr = var;
947 access->type = TREE_TYPE (var);
948 access->grp_total_scalarization = 1;
950 completely_scalarize_record (var, var, 0, var);
953 /* Search the given tree for a declaration by skipping handled components and
954 exclude it from the candidates. */
956 static void
957 disqualify_base_of_expr (tree t, const char *reason)
959 t = get_base_address (t);
960 if (sra_mode == SRA_MODE_EARLY_IPA
961 && TREE_CODE (t) == MEM_REF)
962 t = get_ssa_base_param (TREE_OPERAND (t, 0));
964 if (t && DECL_P (t))
965 disqualify_candidate (t, reason);
968 /* Scan expression EXPR and create access structures for all accesses to
969 candidates for scalarization. Return the created access or NULL if none is
970 created. */
972 static struct access *
973 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
975 struct access *ret = NULL;
976 bool partial_ref;
978 if (TREE_CODE (expr) == BIT_FIELD_REF
979 || TREE_CODE (expr) == IMAGPART_EXPR
980 || TREE_CODE (expr) == REALPART_EXPR)
982 expr = TREE_OPERAND (expr, 0);
983 partial_ref = true;
985 else
986 partial_ref = false;
988 /* We need to dive through V_C_Es in order to get the size of its parameter
989 and not the result type. Ada produces such statements. We are also
990 capable of handling the topmost V_C_E but not any of those buried in other
991 handled components. */
992 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
993 expr = TREE_OPERAND (expr, 0);
995 if (contains_view_convert_expr_p (expr))
997 disqualify_base_of_expr (expr, "V_C_E under a different handled "
998 "component.");
999 return NULL;
1002 switch (TREE_CODE (expr))
1004 case MEM_REF:
1005 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1006 && sra_mode != SRA_MODE_EARLY_IPA)
1007 return NULL;
1008 /* fall through */
1009 case VAR_DECL:
1010 case PARM_DECL:
1011 case RESULT_DECL:
1012 case COMPONENT_REF:
1013 case ARRAY_REF:
1014 case ARRAY_RANGE_REF:
1015 ret = create_access (expr, stmt, write);
1016 break;
1018 default:
1019 break;
1022 if (write && partial_ref && ret)
1023 ret->grp_partial_lhs = 1;
1025 return ret;
1028 /* Scan expression EXPR and create access structures for all accesses to
1029 candidates for scalarization. Return true if any access has been inserted.
1030 STMT must be the statement from which the expression is taken, WRITE must be
1031 true if the expression is a store and false otherwise. */
1033 static bool
1034 build_access_from_expr (tree expr, gimple stmt, bool write)
1036 struct access *access;
1038 access = build_access_from_expr_1 (expr, stmt, write);
1039 if (access)
1041 /* This means the aggregate is accesses as a whole in a way other than an
1042 assign statement and thus cannot be removed even if we had a scalar
1043 replacement for everything. */
1044 if (cannot_scalarize_away_bitmap)
1045 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1046 return true;
1048 return false;
1051 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1052 modes in which it matters, return true iff they have been disqualified. RHS
1053 may be NULL, in that case ignore it. If we scalarize an aggregate in
1054 intra-SRA we may need to add statements after each statement. This is not
1055 possible if a statement unconditionally has to end the basic block. */
1056 static bool
1057 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1059 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1060 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1062 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1063 if (rhs)
1064 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1065 return true;
1067 return false;
1070 /* Return true if EXP is a memory reference less aligned than ALIGN. This is
1071 invoked only on strict-alignment targets. */
1073 static bool
1074 tree_non_aligned_mem_p (tree exp, unsigned int align)
1076 unsigned int exp_align;
1078 if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
1079 exp = TREE_OPERAND (exp, 0);
1081 if (TREE_CODE (exp) == SSA_NAME || is_gimple_min_invariant (exp))
1082 return false;
1084 /* get_object_alignment will fall back to BITS_PER_UNIT if it cannot
1085 compute an explicit alignment. Pretend that dereferenced pointers
1086 are always aligned on strict-alignment targets. */
1087 if (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF)
1088 exp_align = get_object_or_type_alignment (exp);
1089 else
1090 exp_align = get_object_alignment (exp);
1092 if (exp_align < align)
1093 return true;
1095 return false;
1098 /* Scan expressions occuring in STMT, create access structures for all accesses
1099 to candidates for scalarization and remove those candidates which occur in
1100 statements or expressions that prevent them from being split apart. Return
1101 true if any access has been inserted. */
1103 static bool
1104 build_accesses_from_assign (gimple stmt)
1106 tree lhs, rhs;
1107 struct access *lacc, *racc;
1109 if (!gimple_assign_single_p (stmt)
1110 /* Scope clobbers don't influence scalarization. */
1111 || gimple_clobber_p (stmt))
1112 return false;
1114 lhs = gimple_assign_lhs (stmt);
1115 rhs = gimple_assign_rhs1 (stmt);
1117 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1118 return false;
1120 racc = build_access_from_expr_1 (rhs, stmt, false);
1121 lacc = build_access_from_expr_1 (lhs, stmt, true);
1123 if (lacc)
1125 lacc->grp_assignment_write = 1;
1126 if (STRICT_ALIGNMENT
1127 && tree_non_aligned_mem_p (rhs, get_object_alignment (lhs)))
1128 lacc->grp_unscalarizable_region = 1;
1131 if (racc)
1133 racc->grp_assignment_read = 1;
1134 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1135 && !is_gimple_reg_type (racc->type))
1136 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1137 if (STRICT_ALIGNMENT
1138 && tree_non_aligned_mem_p (lhs, get_object_alignment (rhs)))
1139 racc->grp_unscalarizable_region = 1;
1142 if (lacc && racc
1143 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1144 && !lacc->grp_unscalarizable_region
1145 && !racc->grp_unscalarizable_region
1146 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1147 /* FIXME: Turn the following line into an assert after PR 40058 is
1148 fixed. */
1149 && lacc->size == racc->size
1150 && useless_type_conversion_p (lacc->type, racc->type))
1152 struct assign_link *link;
1154 link = (struct assign_link *) pool_alloc (link_pool);
1155 memset (link, 0, sizeof (struct assign_link));
1157 link->lacc = lacc;
1158 link->racc = racc;
1160 add_link_to_rhs (racc, link);
1163 return lacc || racc;
1166 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1167 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1169 static bool
1170 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1171 void *data ATTRIBUTE_UNUSED)
1173 op = get_base_address (op);
1174 if (op
1175 && DECL_P (op))
1176 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1178 return false;
1181 /* Return true iff callsite CALL has at least as many actual arguments as there
1182 are formal parameters of the function currently processed by IPA-SRA. */
1184 static inline bool
1185 callsite_has_enough_arguments_p (gimple call)
1187 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1190 /* Scan function and look for interesting expressions and create access
1191 structures for them. Return true iff any access is created. */
1193 static bool
1194 scan_function (void)
1196 basic_block bb;
1197 bool ret = false;
1199 FOR_EACH_BB (bb)
1201 gimple_stmt_iterator gsi;
1202 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1204 gimple stmt = gsi_stmt (gsi);
1205 tree t;
1206 unsigned i;
1208 if (final_bbs && stmt_can_throw_external (stmt))
1209 bitmap_set_bit (final_bbs, bb->index);
1210 switch (gimple_code (stmt))
1212 case GIMPLE_RETURN:
1213 t = gimple_return_retval (stmt);
1214 if (t != NULL_TREE)
1215 ret |= build_access_from_expr (t, stmt, false);
1216 if (final_bbs)
1217 bitmap_set_bit (final_bbs, bb->index);
1218 break;
1220 case GIMPLE_ASSIGN:
1221 ret |= build_accesses_from_assign (stmt);
1222 break;
1224 case GIMPLE_CALL:
1225 for (i = 0; i < gimple_call_num_args (stmt); i++)
1226 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1227 stmt, false);
1229 if (sra_mode == SRA_MODE_EARLY_IPA)
1231 tree dest = gimple_call_fndecl (stmt);
1232 int flags = gimple_call_flags (stmt);
1234 if (dest)
1236 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1237 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1238 encountered_apply_args = true;
1239 if (cgraph_get_node (dest)
1240 == cgraph_get_node (current_function_decl))
1242 encountered_recursive_call = true;
1243 if (!callsite_has_enough_arguments_p (stmt))
1244 encountered_unchangable_recursive_call = true;
1248 if (final_bbs
1249 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1250 bitmap_set_bit (final_bbs, bb->index);
1253 t = gimple_call_lhs (stmt);
1254 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1255 ret |= build_access_from_expr (t, stmt, true);
1256 break;
1258 case GIMPLE_ASM:
1259 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1260 asm_visit_addr);
1261 if (final_bbs)
1262 bitmap_set_bit (final_bbs, bb->index);
1264 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1266 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1267 ret |= build_access_from_expr (t, stmt, false);
1269 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1271 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1272 ret |= build_access_from_expr (t, stmt, true);
1274 break;
1276 default:
1277 break;
1282 return ret;
1285 /* Helper of QSORT function. There are pointers to accesses in the array. An
1286 access is considered smaller than another if it has smaller offset or if the
1287 offsets are the same but is size is bigger. */
1289 static int
1290 compare_access_positions (const void *a, const void *b)
1292 const access_p *fp1 = (const access_p *) a;
1293 const access_p *fp2 = (const access_p *) b;
1294 const access_p f1 = *fp1;
1295 const access_p f2 = *fp2;
1297 if (f1->offset != f2->offset)
1298 return f1->offset < f2->offset ? -1 : 1;
1300 if (f1->size == f2->size)
1302 if (f1->type == f2->type)
1303 return 0;
1304 /* Put any non-aggregate type before any aggregate type. */
1305 else if (!is_gimple_reg_type (f1->type)
1306 && is_gimple_reg_type (f2->type))
1307 return 1;
1308 else if (is_gimple_reg_type (f1->type)
1309 && !is_gimple_reg_type (f2->type))
1310 return -1;
1311 /* Put any complex or vector type before any other scalar type. */
1312 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1313 && TREE_CODE (f1->type) != VECTOR_TYPE
1314 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1315 || TREE_CODE (f2->type) == VECTOR_TYPE))
1316 return 1;
1317 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1318 || TREE_CODE (f1->type) == VECTOR_TYPE)
1319 && TREE_CODE (f2->type) != COMPLEX_TYPE
1320 && TREE_CODE (f2->type) != VECTOR_TYPE)
1321 return -1;
1322 /* Put the integral type with the bigger precision first. */
1323 else if (INTEGRAL_TYPE_P (f1->type)
1324 && INTEGRAL_TYPE_P (f2->type))
1325 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1326 /* Put any integral type with non-full precision last. */
1327 else if (INTEGRAL_TYPE_P (f1->type)
1328 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1329 != TYPE_PRECISION (f1->type)))
1330 return 1;
1331 else if (INTEGRAL_TYPE_P (f2->type)
1332 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1333 != TYPE_PRECISION (f2->type)))
1334 return -1;
1335 /* Stabilize the sort. */
1336 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1339 /* We want the bigger accesses first, thus the opposite operator in the next
1340 line: */
1341 return f1->size > f2->size ? -1 : 1;
1345 /* Append a name of the declaration to the name obstack. A helper function for
1346 make_fancy_name. */
1348 static void
1349 make_fancy_decl_name (tree decl)
1351 char buffer[32];
1353 tree name = DECL_NAME (decl);
1354 if (name)
1355 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1356 IDENTIFIER_LENGTH (name));
1357 else
1359 sprintf (buffer, "D%u", DECL_UID (decl));
1360 obstack_grow (&name_obstack, buffer, strlen (buffer));
1364 /* Helper for make_fancy_name. */
1366 static void
1367 make_fancy_name_1 (tree expr)
1369 char buffer[32];
1370 tree index;
1372 if (DECL_P (expr))
1374 make_fancy_decl_name (expr);
1375 return;
1378 switch (TREE_CODE (expr))
1380 case COMPONENT_REF:
1381 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1382 obstack_1grow (&name_obstack, '$');
1383 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1384 break;
1386 case ARRAY_REF:
1387 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1388 obstack_1grow (&name_obstack, '$');
1389 /* Arrays with only one element may not have a constant as their
1390 index. */
1391 index = TREE_OPERAND (expr, 1);
1392 if (TREE_CODE (index) != INTEGER_CST)
1393 break;
1394 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1395 obstack_grow (&name_obstack, buffer, strlen (buffer));
1396 break;
1398 case ADDR_EXPR:
1399 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1400 break;
1402 case MEM_REF:
1403 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1404 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1406 obstack_1grow (&name_obstack, '$');
1407 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1408 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1409 obstack_grow (&name_obstack, buffer, strlen (buffer));
1411 break;
1413 case BIT_FIELD_REF:
1414 case REALPART_EXPR:
1415 case IMAGPART_EXPR:
1416 gcc_unreachable (); /* we treat these as scalars. */
1417 break;
1418 default:
1419 break;
1423 /* Create a human readable name for replacement variable of ACCESS. */
1425 static char *
1426 make_fancy_name (tree expr)
1428 make_fancy_name_1 (expr);
1429 obstack_1grow (&name_obstack, '\0');
1430 return XOBFINISH (&name_obstack, char *);
1433 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1434 EXP_TYPE at the given OFFSET. If BASE is something for which
1435 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1436 to insert new statements either before or below the current one as specified
1437 by INSERT_AFTER. This function is not capable of handling bitfields. */
1439 tree
1440 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1441 tree exp_type, gimple_stmt_iterator *gsi,
1442 bool insert_after)
1444 tree prev_base = base;
1445 tree off;
1446 HOST_WIDE_INT base_offset;
1448 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1450 base = get_addr_base_and_unit_offset (base, &base_offset);
1452 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1453 offset such as array[var_index]. */
1454 if (!base)
1456 gimple stmt;
1457 tree tmp, addr;
1459 gcc_checking_assert (gsi);
1460 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1461 add_referenced_var (tmp);
1462 tmp = make_ssa_name (tmp, NULL);
1463 addr = build_fold_addr_expr (unshare_expr (prev_base));
1464 STRIP_USELESS_TYPE_CONVERSION (addr);
1465 stmt = gimple_build_assign (tmp, addr);
1466 gimple_set_location (stmt, loc);
1467 SSA_NAME_DEF_STMT (tmp) = stmt;
1468 if (insert_after)
1469 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1470 else
1471 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1472 update_stmt (stmt);
1474 off = build_int_cst (reference_alias_ptr_type (prev_base),
1475 offset / BITS_PER_UNIT);
1476 base = tmp;
1478 else if (TREE_CODE (base) == MEM_REF)
1480 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1481 base_offset + offset / BITS_PER_UNIT);
1482 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1483 base = unshare_expr (TREE_OPERAND (base, 0));
1485 else
1487 off = build_int_cst (reference_alias_ptr_type (base),
1488 base_offset + offset / BITS_PER_UNIT);
1489 base = build_fold_addr_expr (unshare_expr (base));
1492 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1495 /* Construct a memory reference to a part of an aggregate BASE at the given
1496 OFFSET and of the same type as MODEL. In case this is a reference to a
1497 component, the function will replicate the last COMPONENT_REF of model's
1498 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1499 build_ref_for_offset. */
1501 static tree
1502 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1503 struct access *model, gimple_stmt_iterator *gsi,
1504 bool insert_after)
1506 if (TREE_CODE (model->expr) == COMPONENT_REF)
1508 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1509 tree cr_offset = component_ref_field_offset (model->expr);
1511 gcc_assert (cr_offset && host_integerp (cr_offset, 1));
1512 offset -= TREE_INT_CST_LOW (cr_offset) * BITS_PER_UNIT;
1513 offset -= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fld));
1514 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1515 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1516 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1517 TREE_OPERAND (model->expr, 2));
1519 else
1520 return build_ref_for_offset (loc, base, offset, model->type,
1521 gsi, insert_after);
1524 /* Construct a memory reference consisting of component_refs and array_refs to
1525 a part of an aggregate *RES (which is of type TYPE). The requested part
1526 should have type EXP_TYPE at be the given OFFSET. This function might not
1527 succeed, it returns true when it does and only then *RES points to something
1528 meaningful. This function should be used only to build expressions that we
1529 might need to present to user (e.g. in warnings). In all other situations,
1530 build_ref_for_model or build_ref_for_offset should be used instead. */
1532 static bool
1533 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1534 tree exp_type)
1536 while (1)
1538 tree fld;
1539 tree tr_size, index, minidx;
1540 HOST_WIDE_INT el_size;
1542 if (offset == 0 && exp_type
1543 && types_compatible_p (exp_type, type))
1544 return true;
1546 switch (TREE_CODE (type))
1548 case UNION_TYPE:
1549 case QUAL_UNION_TYPE:
1550 case RECORD_TYPE:
1551 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1553 HOST_WIDE_INT pos, size;
1554 tree expr, *expr_ptr;
1556 if (TREE_CODE (fld) != FIELD_DECL)
1557 continue;
1559 pos = int_bit_position (fld);
1560 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1561 tr_size = DECL_SIZE (fld);
1562 if (!tr_size || !host_integerp (tr_size, 1))
1563 continue;
1564 size = tree_low_cst (tr_size, 1);
1565 if (size == 0)
1567 if (pos != offset)
1568 continue;
1570 else if (pos > offset || (pos + size) <= offset)
1571 continue;
1573 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1574 NULL_TREE);
1575 expr_ptr = &expr;
1576 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1577 offset - pos, exp_type))
1579 *res = expr;
1580 return true;
1583 return false;
1585 case ARRAY_TYPE:
1586 tr_size = TYPE_SIZE (TREE_TYPE (type));
1587 if (!tr_size || !host_integerp (tr_size, 1))
1588 return false;
1589 el_size = tree_low_cst (tr_size, 1);
1591 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1592 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1593 return false;
1594 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1595 if (!integer_zerop (minidx))
1596 index = int_const_binop (PLUS_EXPR, index, minidx);
1597 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1598 NULL_TREE, NULL_TREE);
1599 offset = offset % el_size;
1600 type = TREE_TYPE (type);
1601 break;
1603 default:
1604 if (offset != 0)
1605 return false;
1607 if (exp_type)
1608 return false;
1609 else
1610 return true;
1615 /* Return true iff TYPE is stdarg va_list type. */
1617 static inline bool
1618 is_va_list_type (tree type)
1620 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1623 /* Print message to dump file why a variable was rejected. */
1625 static void
1626 reject (tree var, const char *msg)
1628 if (dump_file && (dump_flags & TDF_DETAILS))
1630 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1631 print_generic_expr (dump_file, var, 0);
1632 fprintf (dump_file, "\n");
1636 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1637 those with type which is suitable for scalarization. */
1639 static bool
1640 find_var_candidates (void)
1642 tree var, type;
1643 referenced_var_iterator rvi;
1644 bool ret = false;
1645 const char *msg;
1647 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1649 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1650 continue;
1651 type = TREE_TYPE (var);
1653 if (!AGGREGATE_TYPE_P (type))
1655 reject (var, "not aggregate");
1656 continue;
1658 if (needs_to_live_in_memory (var))
1660 reject (var, "needs to live in memory");
1661 continue;
1663 if (TREE_THIS_VOLATILE (var))
1665 reject (var, "is volatile");
1666 continue;
1668 if (!COMPLETE_TYPE_P (type))
1670 reject (var, "has incomplete type");
1671 continue;
1673 if (!host_integerp (TYPE_SIZE (type), 1))
1675 reject (var, "type size not fixed");
1676 continue;
1678 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1680 reject (var, "type size is zero");
1681 continue;
1683 if (type_internals_preclude_sra_p (type, &msg))
1685 reject (var, msg);
1686 continue;
1688 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1689 we also want to schedule it rather late. Thus we ignore it in
1690 the early pass. */
1691 (sra_mode == SRA_MODE_EARLY_INTRA
1692 && is_va_list_type (type)))
1694 reject (var, "is va_list");
1695 continue;
1698 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1700 if (dump_file && (dump_flags & TDF_DETAILS))
1702 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1703 print_generic_expr (dump_file, var, 0);
1704 fprintf (dump_file, "\n");
1706 ret = true;
1709 return ret;
1712 /* Sort all accesses for the given variable, check for partial overlaps and
1713 return NULL if there are any. If there are none, pick a representative for
1714 each combination of offset and size and create a linked list out of them.
1715 Return the pointer to the first representative and make sure it is the first
1716 one in the vector of accesses. */
1718 static struct access *
1719 sort_and_splice_var_accesses (tree var)
1721 int i, j, access_count;
1722 struct access *res, **prev_acc_ptr = &res;
1723 VEC (access_p, heap) *access_vec;
1724 bool first = true;
1725 HOST_WIDE_INT low = -1, high = 0;
1727 access_vec = get_base_access_vector (var);
1728 if (!access_vec)
1729 return NULL;
1730 access_count = VEC_length (access_p, access_vec);
1732 /* Sort by <OFFSET, SIZE>. */
1733 VEC_qsort (access_p, access_vec, compare_access_positions);
1735 i = 0;
1736 while (i < access_count)
1738 struct access *access = VEC_index (access_p, access_vec, i);
1739 bool grp_write = access->write;
1740 bool grp_read = !access->write;
1741 bool grp_scalar_write = access->write
1742 && is_gimple_reg_type (access->type);
1743 bool grp_scalar_read = !access->write
1744 && is_gimple_reg_type (access->type);
1745 bool grp_assignment_read = access->grp_assignment_read;
1746 bool grp_assignment_write = access->grp_assignment_write;
1747 bool multiple_scalar_reads = false;
1748 bool total_scalarization = access->grp_total_scalarization;
1749 bool grp_partial_lhs = access->grp_partial_lhs;
1750 bool first_scalar = is_gimple_reg_type (access->type);
1751 bool unscalarizable_region = access->grp_unscalarizable_region;
1753 if (first || access->offset >= high)
1755 first = false;
1756 low = access->offset;
1757 high = access->offset + access->size;
1759 else if (access->offset > low && access->offset + access->size > high)
1760 return NULL;
1761 else
1762 gcc_assert (access->offset >= low
1763 && access->offset + access->size <= high);
1765 j = i + 1;
1766 while (j < access_count)
1768 struct access *ac2 = VEC_index (access_p, access_vec, j);
1769 if (ac2->offset != access->offset || ac2->size != access->size)
1770 break;
1771 if (ac2->write)
1773 grp_write = true;
1774 grp_scalar_write = (grp_scalar_write
1775 || is_gimple_reg_type (ac2->type));
1777 else
1779 grp_read = true;
1780 if (is_gimple_reg_type (ac2->type))
1782 if (grp_scalar_read)
1783 multiple_scalar_reads = true;
1784 else
1785 grp_scalar_read = true;
1788 grp_assignment_read |= ac2->grp_assignment_read;
1789 grp_assignment_write |= ac2->grp_assignment_write;
1790 grp_partial_lhs |= ac2->grp_partial_lhs;
1791 unscalarizable_region |= ac2->grp_unscalarizable_region;
1792 total_scalarization |= ac2->grp_total_scalarization;
1793 relink_to_new_repr (access, ac2);
1795 /* If there are both aggregate-type and scalar-type accesses with
1796 this combination of size and offset, the comparison function
1797 should have put the scalars first. */
1798 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1799 ac2->group_representative = access;
1800 j++;
1803 i = j;
1805 access->group_representative = access;
1806 access->grp_write = grp_write;
1807 access->grp_read = grp_read;
1808 access->grp_scalar_read = grp_scalar_read;
1809 access->grp_scalar_write = grp_scalar_write;
1810 access->grp_assignment_read = grp_assignment_read;
1811 access->grp_assignment_write = grp_assignment_write;
1812 access->grp_hint = multiple_scalar_reads || total_scalarization;
1813 access->grp_total_scalarization = total_scalarization;
1814 access->grp_partial_lhs = grp_partial_lhs;
1815 access->grp_unscalarizable_region = unscalarizable_region;
1816 if (access->first_link)
1817 add_access_to_work_queue (access);
1819 *prev_acc_ptr = access;
1820 prev_acc_ptr = &access->next_grp;
1823 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1824 return res;
1827 /* Create a variable for the given ACCESS which determines the type, name and a
1828 few other properties. Return the variable declaration and store it also to
1829 ACCESS->replacement. */
1831 static tree
1832 create_access_replacement (struct access *access, bool rename)
1834 tree repl;
1836 repl = create_tmp_var (access->type, "SR");
1837 add_referenced_var (repl);
1838 if (rename)
1839 mark_sym_for_renaming (repl);
1841 if (!access->grp_partial_lhs
1842 && (TREE_CODE (access->type) == COMPLEX_TYPE
1843 || TREE_CODE (access->type) == VECTOR_TYPE))
1844 DECL_GIMPLE_REG_P (repl) = 1;
1846 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1847 DECL_ARTIFICIAL (repl) = 1;
1848 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1850 if (DECL_NAME (access->base)
1851 && !DECL_IGNORED_P (access->base)
1852 && !DECL_ARTIFICIAL (access->base))
1854 char *pretty_name = make_fancy_name (access->expr);
1855 tree debug_expr = unshare_expr (access->expr), d;
1857 DECL_NAME (repl) = get_identifier (pretty_name);
1858 obstack_free (&name_obstack, pretty_name);
1860 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1861 as DECL_DEBUG_EXPR isn't considered when looking for still
1862 used SSA_NAMEs and thus they could be freed. All debug info
1863 generation cares is whether something is constant or variable
1864 and that get_ref_base_and_extent works properly on the
1865 expression. */
1866 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1867 switch (TREE_CODE (d))
1869 case ARRAY_REF:
1870 case ARRAY_RANGE_REF:
1871 if (TREE_OPERAND (d, 1)
1872 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1873 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1874 if (TREE_OPERAND (d, 3)
1875 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1876 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1877 /* FALLTHRU */
1878 case COMPONENT_REF:
1879 if (TREE_OPERAND (d, 2)
1880 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1881 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1882 break;
1883 default:
1884 break;
1886 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1887 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1888 if (access->grp_no_warning)
1889 TREE_NO_WARNING (repl) = 1;
1890 else
1891 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1893 else
1894 TREE_NO_WARNING (repl) = 1;
1896 if (dump_file)
1898 fprintf (dump_file, "Created a replacement for ");
1899 print_generic_expr (dump_file, access->base, 0);
1900 fprintf (dump_file, " offset: %u, size: %u: ",
1901 (unsigned) access->offset, (unsigned) access->size);
1902 print_generic_expr (dump_file, repl, 0);
1903 fprintf (dump_file, "\n");
1905 sra_stats.replacements++;
1907 return repl;
1910 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1912 static inline tree
1913 get_access_replacement (struct access *access)
1915 gcc_assert (access->grp_to_be_replaced);
1917 if (!access->replacement_decl)
1918 access->replacement_decl = create_access_replacement (access, true);
1919 return access->replacement_decl;
1922 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1923 not mark it for renaming. */
1925 static inline tree
1926 get_unrenamed_access_replacement (struct access *access)
1928 gcc_assert (!access->grp_to_be_replaced);
1930 if (!access->replacement_decl)
1931 access->replacement_decl = create_access_replacement (access, false);
1932 return access->replacement_decl;
1936 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1937 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1938 to it is not "within" the root. Return false iff some accesses partially
1939 overlap. */
1941 static bool
1942 build_access_subtree (struct access **access)
1944 struct access *root = *access, *last_child = NULL;
1945 HOST_WIDE_INT limit = root->offset + root->size;
1947 *access = (*access)->next_grp;
1948 while (*access && (*access)->offset + (*access)->size <= limit)
1950 if (!last_child)
1951 root->first_child = *access;
1952 else
1953 last_child->next_sibling = *access;
1954 last_child = *access;
1956 if (!build_access_subtree (access))
1957 return false;
1960 if (*access && (*access)->offset < limit)
1961 return false;
1963 return true;
1966 /* Build a tree of access representatives, ACCESS is the pointer to the first
1967 one, others are linked in a list by the next_grp field. Return false iff
1968 some accesses partially overlap. */
1970 static bool
1971 build_access_trees (struct access *access)
1973 while (access)
1975 struct access *root = access;
1977 if (!build_access_subtree (&access))
1978 return false;
1979 root->next_grp = access;
1981 return true;
1984 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1985 array. */
1987 static bool
1988 expr_with_var_bounded_array_refs_p (tree expr)
1990 while (handled_component_p (expr))
1992 if (TREE_CODE (expr) == ARRAY_REF
1993 && !host_integerp (array_ref_low_bound (expr), 0))
1994 return true;
1995 expr = TREE_OPERAND (expr, 0);
1997 return false;
2000 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2001 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2002 sorts of access flags appropriately along the way, notably always set
2003 grp_read and grp_assign_read according to MARK_READ and grp_write when
2004 MARK_WRITE is true.
2006 Creating a replacement for a scalar access is considered beneficial if its
2007 grp_hint is set (this means we are either attempting total scalarization or
2008 there is more than one direct read access) or according to the following
2009 table:
2011 Access written to through a scalar type (once or more times)
2013 | Written to in an assignment statement
2015 | | Access read as scalar _once_
2016 | | |
2017 | | | Read in an assignment statement
2018 | | | |
2019 | | | | Scalarize Comment
2020 -----------------------------------------------------------------------------
2021 0 0 0 0 No access for the scalar
2022 0 0 0 1 No access for the scalar
2023 0 0 1 0 No Single read - won't help
2024 0 0 1 1 No The same case
2025 0 1 0 0 No access for the scalar
2026 0 1 0 1 No access for the scalar
2027 0 1 1 0 Yes s = *g; return s.i;
2028 0 1 1 1 Yes The same case as above
2029 1 0 0 0 No Won't help
2030 1 0 0 1 Yes s.i = 1; *g = s;
2031 1 0 1 0 Yes s.i = 5; g = s.i;
2032 1 0 1 1 Yes The same case as above
2033 1 1 0 0 No Won't help.
2034 1 1 0 1 Yes s.i = 1; *g = s;
2035 1 1 1 0 Yes s = *g; return s.i;
2036 1 1 1 1 Yes Any of the above yeses */
2038 static bool
2039 analyze_access_subtree (struct access *root, struct access *parent,
2040 bool allow_replacements)
2042 struct access *child;
2043 HOST_WIDE_INT limit = root->offset + root->size;
2044 HOST_WIDE_INT covered_to = root->offset;
2045 bool scalar = is_gimple_reg_type (root->type);
2046 bool hole = false, sth_created = false;
2048 if (parent)
2050 if (parent->grp_read)
2051 root->grp_read = 1;
2052 if (parent->grp_assignment_read)
2053 root->grp_assignment_read = 1;
2054 if (parent->grp_write)
2055 root->grp_write = 1;
2056 if (parent->grp_assignment_write)
2057 root->grp_assignment_write = 1;
2058 if (parent->grp_total_scalarization)
2059 root->grp_total_scalarization = 1;
2062 if (root->grp_unscalarizable_region)
2063 allow_replacements = false;
2065 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2066 allow_replacements = false;
2068 for (child = root->first_child; child; child = child->next_sibling)
2070 hole |= covered_to < child->offset;
2071 sth_created |= analyze_access_subtree (child, root,
2072 allow_replacements && !scalar);
2074 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2075 root->grp_total_scalarization &= child->grp_total_scalarization;
2076 if (child->grp_covered)
2077 covered_to += child->size;
2078 else
2079 hole = true;
2082 if (allow_replacements && scalar && !root->first_child
2083 && (root->grp_hint
2084 || ((root->grp_scalar_read || root->grp_assignment_read)
2085 && (root->grp_scalar_write || root->grp_assignment_write))))
2087 bool new_integer_type;
2088 if (TREE_CODE (root->type) == ENUMERAL_TYPE)
2090 tree rt = root->type;
2091 root->type = build_nonstandard_integer_type (TYPE_PRECISION (rt),
2092 TYPE_UNSIGNED (rt));
2093 new_integer_type = true;
2095 else
2096 new_integer_type = false;
2098 if (dump_file && (dump_flags & TDF_DETAILS))
2100 fprintf (dump_file, "Marking ");
2101 print_generic_expr (dump_file, root->base, 0);
2102 fprintf (dump_file, " offset: %u, size: %u ",
2103 (unsigned) root->offset, (unsigned) root->size);
2104 fprintf (dump_file, " to be replaced%s.\n",
2105 new_integer_type ? " with an integer": "");
2108 root->grp_to_be_replaced = 1;
2109 sth_created = true;
2110 hole = false;
2112 else
2114 if (covered_to < limit)
2115 hole = true;
2116 if (scalar)
2117 root->grp_total_scalarization = 0;
2120 if (sth_created
2121 && (!hole || root->grp_total_scalarization))
2123 root->grp_covered = 1;
2124 return true;
2126 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2127 root->grp_unscalarized_data = 1; /* not covered and written to */
2128 if (sth_created)
2129 return true;
2130 return false;
2133 /* Analyze all access trees linked by next_grp by the means of
2134 analyze_access_subtree. */
2135 static bool
2136 analyze_access_trees (struct access *access)
2138 bool ret = false;
2140 while (access)
2142 if (analyze_access_subtree (access, NULL, true))
2143 ret = true;
2144 access = access->next_grp;
2147 return ret;
2150 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2151 SIZE would conflict with an already existing one. If exactly such a child
2152 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2154 static bool
2155 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2156 HOST_WIDE_INT size, struct access **exact_match)
2158 struct access *child;
2160 for (child = lacc->first_child; child; child = child->next_sibling)
2162 if (child->offset == norm_offset && child->size == size)
2164 *exact_match = child;
2165 return true;
2168 if (child->offset < norm_offset + size
2169 && child->offset + child->size > norm_offset)
2170 return true;
2173 return false;
2176 /* Create a new child access of PARENT, with all properties just like MODEL
2177 except for its offset and with its grp_write false and grp_read true.
2178 Return the new access or NULL if it cannot be created. Note that this access
2179 is created long after all splicing and sorting, it's not located in any
2180 access vector and is automatically a representative of its group. */
2182 static struct access *
2183 create_artificial_child_access (struct access *parent, struct access *model,
2184 HOST_WIDE_INT new_offset)
2186 struct access *access;
2187 struct access **child;
2188 tree expr = parent->base;
2190 gcc_assert (!model->grp_unscalarizable_region);
2192 access = (struct access *) pool_alloc (access_pool);
2193 memset (access, 0, sizeof (struct access));
2194 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2195 model->type))
2197 access->grp_no_warning = true;
2198 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2199 new_offset, model, NULL, false);
2202 access->base = parent->base;
2203 access->expr = expr;
2204 access->offset = new_offset;
2205 access->size = model->size;
2206 access->type = model->type;
2207 access->grp_write = true;
2208 access->grp_read = false;
2210 child = &parent->first_child;
2211 while (*child && (*child)->offset < new_offset)
2212 child = &(*child)->next_sibling;
2214 access->next_sibling = *child;
2215 *child = access;
2217 return access;
2221 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2222 true if any new subaccess was created. Additionally, if RACC is a scalar
2223 access but LACC is not, change the type of the latter, if possible. */
2225 static bool
2226 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2228 struct access *rchild;
2229 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2230 bool ret = false;
2232 if (is_gimple_reg_type (lacc->type)
2233 || lacc->grp_unscalarizable_region
2234 || racc->grp_unscalarizable_region)
2235 return false;
2237 if (!lacc->first_child && !racc->first_child
2238 && is_gimple_reg_type (racc->type))
2240 tree t = lacc->base;
2242 lacc->type = racc->type;
2243 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t), lacc->offset,
2244 racc->type))
2245 lacc->expr = t;
2246 else
2248 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2249 lacc->base, lacc->offset,
2250 racc, NULL, false);
2251 lacc->grp_no_warning = true;
2253 return false;
2256 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2258 struct access *new_acc = NULL;
2259 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2261 if (rchild->grp_unscalarizable_region)
2262 continue;
2264 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2265 &new_acc))
2267 if (new_acc)
2269 rchild->grp_hint = 1;
2270 new_acc->grp_hint |= new_acc->grp_read;
2271 if (rchild->first_child)
2272 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2274 continue;
2277 rchild->grp_hint = 1;
2278 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2279 if (new_acc)
2281 ret = true;
2282 if (racc->first_child)
2283 propagate_subaccesses_across_link (new_acc, rchild);
2287 return ret;
2290 /* Propagate all subaccesses across assignment links. */
2292 static void
2293 propagate_all_subaccesses (void)
2295 while (work_queue_head)
2297 struct access *racc = pop_access_from_work_queue ();
2298 struct assign_link *link;
2300 gcc_assert (racc->first_link);
2302 for (link = racc->first_link; link; link = link->next)
2304 struct access *lacc = link->lacc;
2306 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2307 continue;
2308 lacc = lacc->group_representative;
2309 if (propagate_subaccesses_across_link (lacc, racc)
2310 && lacc->first_link)
2311 add_access_to_work_queue (lacc);
2316 /* Go through all accesses collected throughout the (intraprocedural) analysis
2317 stage, exclude overlapping ones, identify representatives and build trees
2318 out of them, making decisions about scalarization on the way. Return true
2319 iff there are any to-be-scalarized variables after this stage. */
2321 static bool
2322 analyze_all_variable_accesses (void)
2324 int res = 0;
2325 bitmap tmp = BITMAP_ALLOC (NULL);
2326 bitmap_iterator bi;
2327 unsigned i, max_total_scalarization_size;
2329 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2330 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2332 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2333 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2334 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2336 tree var = referenced_var (i);
2338 if (TREE_CODE (var) == VAR_DECL
2339 && type_consists_of_records_p (TREE_TYPE (var)))
2341 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2342 <= max_total_scalarization_size)
2344 completely_scalarize_var (var);
2345 if (dump_file && (dump_flags & TDF_DETAILS))
2347 fprintf (dump_file, "Will attempt to totally scalarize ");
2348 print_generic_expr (dump_file, var, 0);
2349 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2352 else if (dump_file && (dump_flags & TDF_DETAILS))
2354 fprintf (dump_file, "Too big to totally scalarize: ");
2355 print_generic_expr (dump_file, var, 0);
2356 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2361 bitmap_copy (tmp, candidate_bitmap);
2362 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2364 tree var = referenced_var (i);
2365 struct access *access;
2367 access = sort_and_splice_var_accesses (var);
2368 if (!access || !build_access_trees (access))
2369 disqualify_candidate (var,
2370 "No or inhibitingly overlapping accesses.");
2373 propagate_all_subaccesses ();
2375 bitmap_copy (tmp, candidate_bitmap);
2376 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2378 tree var = referenced_var (i);
2379 struct access *access = get_first_repr_for_decl (var);
2381 if (analyze_access_trees (access))
2383 res++;
2384 if (dump_file && (dump_flags & TDF_DETAILS))
2386 fprintf (dump_file, "\nAccess trees for ");
2387 print_generic_expr (dump_file, var, 0);
2388 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2389 dump_access_tree (dump_file, access);
2390 fprintf (dump_file, "\n");
2393 else
2394 disqualify_candidate (var, "No scalar replacements to be created.");
2397 BITMAP_FREE (tmp);
2399 if (res)
2401 statistics_counter_event (cfun, "Scalarized aggregates", res);
2402 return true;
2404 else
2405 return false;
2408 /* Generate statements copying scalar replacements of accesses within a subtree
2409 into or out of AGG. ACCESS, all its children, siblings and their children
2410 are to be processed. AGG is an aggregate type expression (can be a
2411 declaration but does not have to be, it can for example also be a mem_ref or
2412 a series of handled components). TOP_OFFSET is the offset of the processed
2413 subtree which has to be subtracted from offsets of individual accesses to
2414 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2415 replacements in the interval <start_offset, start_offset + chunk_size>,
2416 otherwise copy all. GSI is a statement iterator used to place the new
2417 statements. WRITE should be true when the statements should write from AGG
2418 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2419 statements will be added after the current statement in GSI, they will be
2420 added before the statement otherwise. */
2422 static void
2423 generate_subtree_copies (struct access *access, tree agg,
2424 HOST_WIDE_INT top_offset,
2425 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2426 gimple_stmt_iterator *gsi, bool write,
2427 bool insert_after, location_t loc)
2431 if (chunk_size && access->offset >= start_offset + chunk_size)
2432 return;
2434 if (access->grp_to_be_replaced
2435 && (chunk_size == 0
2436 || access->offset + access->size > start_offset))
2438 tree expr, repl = get_access_replacement (access);
2439 gimple stmt;
2441 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2442 access, gsi, insert_after);
2444 if (write)
2446 if (access->grp_partial_lhs)
2447 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2448 !insert_after,
2449 insert_after ? GSI_NEW_STMT
2450 : GSI_SAME_STMT);
2451 stmt = gimple_build_assign (repl, expr);
2453 else
2455 TREE_NO_WARNING (repl) = 1;
2456 if (access->grp_partial_lhs)
2457 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2458 !insert_after,
2459 insert_after ? GSI_NEW_STMT
2460 : GSI_SAME_STMT);
2461 stmt = gimple_build_assign (expr, repl);
2463 gimple_set_location (stmt, loc);
2465 if (insert_after)
2466 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2467 else
2468 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2469 update_stmt (stmt);
2470 sra_stats.subtree_copies++;
2473 if (access->first_child)
2474 generate_subtree_copies (access->first_child, agg, top_offset,
2475 start_offset, chunk_size, gsi,
2476 write, insert_after, loc);
2478 access = access->next_sibling;
2480 while (access);
2483 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2484 the root of the subtree to be processed. GSI is the statement iterator used
2485 for inserting statements which are added after the current statement if
2486 INSERT_AFTER is true or before it otherwise. */
2488 static void
2489 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2490 bool insert_after, location_t loc)
2493 struct access *child;
2495 if (access->grp_to_be_replaced)
2497 gimple stmt;
2499 stmt = gimple_build_assign (get_access_replacement (access),
2500 build_zero_cst (access->type));
2501 if (insert_after)
2502 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2503 else
2504 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2505 update_stmt (stmt);
2506 gimple_set_location (stmt, loc);
2509 for (child = access->first_child; child; child = child->next_sibling)
2510 init_subtree_with_zero (child, gsi, insert_after, loc);
2513 /* Search for an access representative for the given expression EXPR and
2514 return it or NULL if it cannot be found. */
2516 static struct access *
2517 get_access_for_expr (tree expr)
2519 HOST_WIDE_INT offset, size, max_size;
2520 tree base;
2522 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2523 a different size than the size of its argument and we need the latter
2524 one. */
2525 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2526 expr = TREE_OPERAND (expr, 0);
2528 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2529 if (max_size == -1 || !DECL_P (base))
2530 return NULL;
2532 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2533 return NULL;
2535 return get_var_base_offset_size_access (base, offset, max_size);
2538 /* Replace the expression EXPR with a scalar replacement if there is one and
2539 generate other statements to do type conversion or subtree copying if
2540 necessary. GSI is used to place newly created statements, WRITE is true if
2541 the expression is being written to (it is on a LHS of a statement or output
2542 in an assembly statement). */
2544 static bool
2545 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2547 location_t loc;
2548 struct access *access;
2549 tree type, bfr;
2551 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2553 bfr = *expr;
2554 expr = &TREE_OPERAND (*expr, 0);
2556 else
2557 bfr = NULL_TREE;
2559 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2560 expr = &TREE_OPERAND (*expr, 0);
2561 access = get_access_for_expr (*expr);
2562 if (!access)
2563 return false;
2564 type = TREE_TYPE (*expr);
2566 loc = gimple_location (gsi_stmt (*gsi));
2567 if (access->grp_to_be_replaced)
2569 tree repl = get_access_replacement (access);
2570 /* If we replace a non-register typed access simply use the original
2571 access expression to extract the scalar component afterwards.
2572 This happens if scalarizing a function return value or parameter
2573 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2574 gcc.c-torture/compile/20011217-1.c.
2576 We also want to use this when accessing a complex or vector which can
2577 be accessed as a different type too, potentially creating a need for
2578 type conversion (see PR42196) and when scalarized unions are involved
2579 in assembler statements (see PR42398). */
2580 if (!useless_type_conversion_p (type, access->type))
2582 tree ref;
2584 ref = build_ref_for_model (loc, access->base, access->offset, access,
2585 NULL, false);
2587 if (write)
2589 gimple stmt;
2591 if (access->grp_partial_lhs)
2592 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2593 false, GSI_NEW_STMT);
2594 stmt = gimple_build_assign (repl, ref);
2595 gimple_set_location (stmt, loc);
2596 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2598 else
2600 gimple stmt;
2602 if (access->grp_partial_lhs)
2603 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2604 true, GSI_SAME_STMT);
2605 stmt = gimple_build_assign (ref, repl);
2606 gimple_set_location (stmt, loc);
2607 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2610 else
2611 *expr = repl;
2612 sra_stats.exprs++;
2615 if (access->first_child)
2617 HOST_WIDE_INT start_offset, chunk_size;
2618 if (bfr
2619 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2620 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2622 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2623 start_offset = access->offset
2624 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2626 else
2627 start_offset = chunk_size = 0;
2629 generate_subtree_copies (access->first_child, access->base, 0,
2630 start_offset, chunk_size, gsi, write, write,
2631 loc);
2633 return true;
2636 /* Where scalar replacements of the RHS have been written to when a replacement
2637 of a LHS of an assigments cannot be direclty loaded from a replacement of
2638 the RHS. */
2639 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2640 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2641 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2643 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2644 base aggregate if there are unscalarized data or directly to LHS of the
2645 statement that is pointed to by GSI otherwise. */
2647 static enum unscalarized_data_handling
2648 handle_unscalarized_data_in_subtree (struct access *top_racc,
2649 gimple_stmt_iterator *gsi)
2651 if (top_racc->grp_unscalarized_data)
2653 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2654 gsi, false, false,
2655 gimple_location (gsi_stmt (*gsi)));
2656 return SRA_UDH_RIGHT;
2658 else
2660 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2661 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2662 0, 0, gsi, false, false,
2663 gimple_location (gsi_stmt (*gsi)));
2664 return SRA_UDH_LEFT;
2669 /* Try to generate statements to load all sub-replacements in an access subtree
2670 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2671 If that is not possible, refresh the TOP_RACC base aggregate and load the
2672 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2673 copied. NEW_GSI is stmt iterator used for statement insertions after the
2674 original assignment, OLD_GSI is used to insert statements before the
2675 assignment. *REFRESHED keeps the information whether we have needed to
2676 refresh replacements of the LHS and from which side of the assignments this
2677 takes place. */
2679 static void
2680 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2681 HOST_WIDE_INT left_offset,
2682 gimple_stmt_iterator *old_gsi,
2683 gimple_stmt_iterator *new_gsi,
2684 enum unscalarized_data_handling *refreshed)
2686 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2687 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2689 if (lacc->grp_to_be_replaced)
2691 struct access *racc;
2692 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2693 gimple stmt;
2694 tree rhs;
2696 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2697 if (racc && racc->grp_to_be_replaced)
2699 rhs = get_access_replacement (racc);
2700 if (!useless_type_conversion_p (lacc->type, racc->type))
2701 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2703 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2704 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2705 true, GSI_SAME_STMT);
2707 else
2709 /* No suitable access on the right hand side, need to load from
2710 the aggregate. See if we have to update it first... */
2711 if (*refreshed == SRA_UDH_NONE)
2712 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2713 old_gsi);
2715 if (*refreshed == SRA_UDH_LEFT)
2716 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2717 new_gsi, true);
2718 else
2719 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2720 new_gsi, true);
2723 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2724 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2725 gimple_set_location (stmt, loc);
2726 update_stmt (stmt);
2727 sra_stats.subreplacements++;
2729 else if (*refreshed == SRA_UDH_NONE
2730 && lacc->grp_read && !lacc->grp_covered)
2731 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2732 old_gsi);
2734 if (lacc->first_child)
2735 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2736 old_gsi, new_gsi, refreshed);
2740 /* Result code for SRA assignment modification. */
2741 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2742 SRA_AM_MODIFIED, /* stmt changed but not
2743 removed */
2744 SRA_AM_REMOVED }; /* stmt eliminated */
2746 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2747 to the assignment and GSI is the statement iterator pointing at it. Returns
2748 the same values as sra_modify_assign. */
2750 static enum assignment_mod_result
2751 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2753 tree lhs = gimple_assign_lhs (*stmt);
2754 struct access *acc;
2755 location_t loc;
2757 acc = get_access_for_expr (lhs);
2758 if (!acc)
2759 return SRA_AM_NONE;
2761 loc = gimple_location (*stmt);
2762 if (VEC_length (constructor_elt,
2763 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2765 /* I have never seen this code path trigger but if it can happen the
2766 following should handle it gracefully. */
2767 if (access_has_children_p (acc))
2768 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2769 true, true, loc);
2770 return SRA_AM_MODIFIED;
2773 if (acc->grp_covered)
2775 init_subtree_with_zero (acc, gsi, false, loc);
2776 unlink_stmt_vdef (*stmt);
2777 gsi_remove (gsi, true);
2778 return SRA_AM_REMOVED;
2780 else
2782 init_subtree_with_zero (acc, gsi, true, loc);
2783 return SRA_AM_MODIFIED;
2787 /* Create and return a new suitable default definition SSA_NAME for RACC which
2788 is an access describing an uninitialized part of an aggregate that is being
2789 loaded. */
2791 static tree
2792 get_repl_default_def_ssa_name (struct access *racc)
2794 tree repl, decl;
2796 decl = get_unrenamed_access_replacement (racc);
2798 repl = gimple_default_def (cfun, decl);
2799 if (!repl)
2801 repl = make_ssa_name (decl, gimple_build_nop ());
2802 set_default_def (decl, repl);
2805 return repl;
2808 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2809 somewhere in it. */
2811 static inline bool
2812 contains_bitfld_comp_ref_p (const_tree ref)
2814 while (handled_component_p (ref))
2816 if (TREE_CODE (ref) == COMPONENT_REF
2817 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2818 return true;
2819 ref = TREE_OPERAND (ref, 0);
2822 return false;
2825 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2826 bit-field field declaration somewhere in it. */
2828 static inline bool
2829 contains_vce_or_bfcref_p (const_tree ref)
2831 while (handled_component_p (ref))
2833 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2834 || (TREE_CODE (ref) == COMPONENT_REF
2835 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2836 return true;
2837 ref = TREE_OPERAND (ref, 0);
2840 return false;
2843 /* Examine both sides of the assignment statement pointed to by STMT, replace
2844 them with a scalare replacement if there is one and generate copying of
2845 replacements if scalarized aggregates have been used in the assignment. GSI
2846 is used to hold generated statements for type conversions and subtree
2847 copying. */
2849 static enum assignment_mod_result
2850 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2852 struct access *lacc, *racc;
2853 tree lhs, rhs;
2854 bool modify_this_stmt = false;
2855 bool force_gimple_rhs = false;
2856 location_t loc;
2857 gimple_stmt_iterator orig_gsi = *gsi;
2859 if (!gimple_assign_single_p (*stmt))
2860 return SRA_AM_NONE;
2861 lhs = gimple_assign_lhs (*stmt);
2862 rhs = gimple_assign_rhs1 (*stmt);
2864 if (TREE_CODE (rhs) == CONSTRUCTOR)
2865 return sra_modify_constructor_assign (stmt, gsi);
2867 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2868 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2869 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2871 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2872 gsi, false);
2873 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2874 gsi, true);
2875 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2878 lacc = get_access_for_expr (lhs);
2879 racc = get_access_for_expr (rhs);
2880 if (!lacc && !racc)
2881 return SRA_AM_NONE;
2883 loc = gimple_location (*stmt);
2884 if (lacc && lacc->grp_to_be_replaced)
2886 lhs = get_access_replacement (lacc);
2887 gimple_assign_set_lhs (*stmt, lhs);
2888 modify_this_stmt = true;
2889 if (lacc->grp_partial_lhs)
2890 force_gimple_rhs = true;
2891 sra_stats.exprs++;
2894 if (racc && racc->grp_to_be_replaced)
2896 rhs = get_access_replacement (racc);
2897 modify_this_stmt = true;
2898 if (racc->grp_partial_lhs)
2899 force_gimple_rhs = true;
2900 sra_stats.exprs++;
2903 if (modify_this_stmt)
2905 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2907 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2908 ??? This should move to fold_stmt which we simply should
2909 call after building a VIEW_CONVERT_EXPR here. */
2910 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2911 && !contains_bitfld_comp_ref_p (lhs)
2912 && !access_has_children_p (lacc))
2914 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
2915 gimple_assign_set_lhs (*stmt, lhs);
2917 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2918 && !contains_vce_or_bfcref_p (rhs)
2919 && !access_has_children_p (racc))
2920 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
2922 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2924 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
2925 rhs);
2926 if (is_gimple_reg_type (TREE_TYPE (lhs))
2927 && TREE_CODE (lhs) != SSA_NAME)
2928 force_gimple_rhs = true;
2933 /* From this point on, the function deals with assignments in between
2934 aggregates when at least one has scalar reductions of some of its
2935 components. There are three possible scenarios: Both the LHS and RHS have
2936 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2938 In the first case, we would like to load the LHS components from RHS
2939 components whenever possible. If that is not possible, we would like to
2940 read it directly from the RHS (after updating it by storing in it its own
2941 components). If there are some necessary unscalarized data in the LHS,
2942 those will be loaded by the original assignment too. If neither of these
2943 cases happen, the original statement can be removed. Most of this is done
2944 by load_assign_lhs_subreplacements.
2946 In the second case, we would like to store all RHS scalarized components
2947 directly into LHS and if they cover the aggregate completely, remove the
2948 statement too. In the third case, we want the LHS components to be loaded
2949 directly from the RHS (DSE will remove the original statement if it
2950 becomes redundant).
2952 This is a bit complex but manageable when types match and when unions do
2953 not cause confusion in a way that we cannot really load a component of LHS
2954 from the RHS or vice versa (the access representing this level can have
2955 subaccesses that are accessible only through a different union field at a
2956 higher level - different from the one used in the examined expression).
2957 Unions are fun.
2959 Therefore, I specially handle a fourth case, happening when there is a
2960 specific type cast or it is impossible to locate a scalarized subaccess on
2961 the other side of the expression. If that happens, I simply "refresh" the
2962 RHS by storing in it is scalarized components leave the original statement
2963 there to do the copying and then load the scalar replacements of the LHS.
2964 This is what the first branch does. */
2966 if (modify_this_stmt
2967 || gimple_has_volatile_ops (*stmt)
2968 || contains_vce_or_bfcref_p (rhs)
2969 || contains_vce_or_bfcref_p (lhs))
2971 if (access_has_children_p (racc))
2972 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2973 gsi, false, false, loc);
2974 if (access_has_children_p (lacc))
2975 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2976 gsi, true, true, loc);
2977 sra_stats.separate_lhs_rhs_handling++;
2979 else
2981 if (access_has_children_p (lacc) && access_has_children_p (racc))
2983 gimple_stmt_iterator orig_gsi = *gsi;
2984 enum unscalarized_data_handling refreshed;
2986 if (lacc->grp_read && !lacc->grp_covered)
2987 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
2988 else
2989 refreshed = SRA_UDH_NONE;
2991 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
2992 &orig_gsi, gsi, &refreshed);
2993 if (refreshed != SRA_UDH_RIGHT)
2995 gsi_next (gsi);
2996 unlink_stmt_vdef (*stmt);
2997 gsi_remove (&orig_gsi, true);
2998 sra_stats.deleted++;
2999 return SRA_AM_REMOVED;
3002 else
3004 if (racc)
3006 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
3008 if (dump_file)
3010 fprintf (dump_file, "Removing load: ");
3011 print_gimple_stmt (dump_file, *stmt, 0, 0);
3014 if (TREE_CODE (lhs) == SSA_NAME)
3016 rhs = get_repl_default_def_ssa_name (racc);
3017 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3018 TREE_TYPE (rhs)))
3019 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3020 TREE_TYPE (lhs), rhs);
3022 else
3024 if (racc->first_child)
3025 generate_subtree_copies (racc->first_child, lhs,
3026 racc->offset, 0, 0, gsi,
3027 false, false, loc);
3029 gcc_assert (*stmt == gsi_stmt (*gsi));
3030 unlink_stmt_vdef (*stmt);
3031 gsi_remove (gsi, true);
3032 sra_stats.deleted++;
3033 return SRA_AM_REMOVED;
3036 else if (racc->first_child)
3037 generate_subtree_copies (racc->first_child, lhs, racc->offset,
3038 0, 0, gsi, false, true, loc);
3040 if (access_has_children_p (lacc))
3041 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3042 0, 0, gsi, true, true, loc);
3046 /* This gimplification must be done after generate_subtree_copies, lest we
3047 insert the subtree copies in the middle of the gimplified sequence. */
3048 if (force_gimple_rhs)
3049 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3050 true, GSI_SAME_STMT);
3051 if (gimple_assign_rhs1 (*stmt) != rhs)
3053 modify_this_stmt = true;
3054 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3055 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3058 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3061 /* Traverse the function body and all modifications as decided in
3062 analyze_all_variable_accesses. Return true iff the CFG has been
3063 changed. */
3065 static bool
3066 sra_modify_function_body (void)
3068 bool cfg_changed = false;
3069 basic_block bb;
3071 FOR_EACH_BB (bb)
3073 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3074 while (!gsi_end_p (gsi))
3076 gimple stmt = gsi_stmt (gsi);
3077 enum assignment_mod_result assign_result;
3078 bool modified = false, deleted = false;
3079 tree *t;
3080 unsigned i;
3082 switch (gimple_code (stmt))
3084 case GIMPLE_RETURN:
3085 t = gimple_return_retval_ptr (stmt);
3086 if (*t != NULL_TREE)
3087 modified |= sra_modify_expr (t, &gsi, false);
3088 break;
3090 case GIMPLE_ASSIGN:
3091 assign_result = sra_modify_assign (&stmt, &gsi);
3092 modified |= assign_result == SRA_AM_MODIFIED;
3093 deleted = assign_result == SRA_AM_REMOVED;
3094 break;
3096 case GIMPLE_CALL:
3097 /* Operands must be processed before the lhs. */
3098 for (i = 0; i < gimple_call_num_args (stmt); i++)
3100 t = gimple_call_arg_ptr (stmt, i);
3101 modified |= sra_modify_expr (t, &gsi, false);
3104 if (gimple_call_lhs (stmt))
3106 t = gimple_call_lhs_ptr (stmt);
3107 modified |= sra_modify_expr (t, &gsi, true);
3109 break;
3111 case GIMPLE_ASM:
3112 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3114 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3115 modified |= sra_modify_expr (t, &gsi, false);
3117 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3119 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3120 modified |= sra_modify_expr (t, &gsi, true);
3122 break;
3124 default:
3125 break;
3128 if (modified)
3130 update_stmt (stmt);
3131 if (maybe_clean_eh_stmt (stmt)
3132 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3133 cfg_changed = true;
3135 if (!deleted)
3136 gsi_next (&gsi);
3140 return cfg_changed;
3143 /* Generate statements initializing scalar replacements of parts of function
3144 parameters. */
3146 static void
3147 initialize_parameter_reductions (void)
3149 gimple_stmt_iterator gsi;
3150 gimple_seq seq = NULL;
3151 tree parm;
3153 for (parm = DECL_ARGUMENTS (current_function_decl);
3154 parm;
3155 parm = DECL_CHAIN (parm))
3157 VEC (access_p, heap) *access_vec;
3158 struct access *access;
3160 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3161 continue;
3162 access_vec = get_base_access_vector (parm);
3163 if (!access_vec)
3164 continue;
3166 if (!seq)
3168 seq = gimple_seq_alloc ();
3169 gsi = gsi_start (seq);
3172 for (access = VEC_index (access_p, access_vec, 0);
3173 access;
3174 access = access->next_grp)
3175 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3176 EXPR_LOCATION (parm));
3179 if (seq)
3180 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3183 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3184 it reveals there are components of some aggregates to be scalarized, it runs
3185 the required transformations. */
3186 static unsigned int
3187 perform_intra_sra (void)
3189 int ret = 0;
3190 sra_initialize ();
3192 if (!find_var_candidates ())
3193 goto out;
3195 if (!scan_function ())
3196 goto out;
3198 if (!analyze_all_variable_accesses ())
3199 goto out;
3201 if (sra_modify_function_body ())
3202 ret = TODO_update_ssa | TODO_cleanup_cfg;
3203 else
3204 ret = TODO_update_ssa;
3205 initialize_parameter_reductions ();
3207 statistics_counter_event (cfun, "Scalar replacements created",
3208 sra_stats.replacements);
3209 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3210 statistics_counter_event (cfun, "Subtree copy stmts",
3211 sra_stats.subtree_copies);
3212 statistics_counter_event (cfun, "Subreplacement stmts",
3213 sra_stats.subreplacements);
3214 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3215 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3216 sra_stats.separate_lhs_rhs_handling);
3218 out:
3219 sra_deinitialize ();
3220 return ret;
3223 /* Perform early intraprocedural SRA. */
3224 static unsigned int
3225 early_intra_sra (void)
3227 sra_mode = SRA_MODE_EARLY_INTRA;
3228 return perform_intra_sra ();
3231 /* Perform "late" intraprocedural SRA. */
3232 static unsigned int
3233 late_intra_sra (void)
3235 sra_mode = SRA_MODE_INTRA;
3236 return perform_intra_sra ();
3240 static bool
3241 gate_intra_sra (void)
3243 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3247 struct gimple_opt_pass pass_sra_early =
3250 GIMPLE_PASS,
3251 "esra", /* name */
3252 gate_intra_sra, /* gate */
3253 early_intra_sra, /* execute */
3254 NULL, /* sub */
3255 NULL, /* next */
3256 0, /* static_pass_number */
3257 TV_TREE_SRA, /* tv_id */
3258 PROP_cfg | PROP_ssa, /* properties_required */
3259 0, /* properties_provided */
3260 0, /* properties_destroyed */
3261 0, /* todo_flags_start */
3262 TODO_update_ssa
3263 | TODO_ggc_collect
3264 | TODO_verify_ssa /* todo_flags_finish */
3268 struct gimple_opt_pass pass_sra =
3271 GIMPLE_PASS,
3272 "sra", /* name */
3273 gate_intra_sra, /* gate */
3274 late_intra_sra, /* execute */
3275 NULL, /* sub */
3276 NULL, /* next */
3277 0, /* static_pass_number */
3278 TV_TREE_SRA, /* tv_id */
3279 PROP_cfg | PROP_ssa, /* properties_required */
3280 0, /* properties_provided */
3281 0, /* properties_destroyed */
3282 TODO_update_address_taken, /* todo_flags_start */
3283 TODO_update_ssa
3284 | TODO_ggc_collect
3285 | TODO_verify_ssa /* todo_flags_finish */
3290 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3291 parameter. */
3293 static bool
3294 is_unused_scalar_param (tree parm)
3296 tree name;
3297 return (is_gimple_reg (parm)
3298 && (!(name = gimple_default_def (cfun, parm))
3299 || has_zero_uses (name)));
3302 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3303 examine whether there are any direct or otherwise infeasible ones. If so,
3304 return true, otherwise return false. PARM must be a gimple register with a
3305 non-NULL default definition. */
3307 static bool
3308 ptr_parm_has_direct_uses (tree parm)
3310 imm_use_iterator ui;
3311 gimple stmt;
3312 tree name = gimple_default_def (cfun, parm);
3313 bool ret = false;
3315 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3317 int uses_ok = 0;
3318 use_operand_p use_p;
3320 if (is_gimple_debug (stmt))
3321 continue;
3323 /* Valid uses include dereferences on the lhs and the rhs. */
3324 if (gimple_has_lhs (stmt))
3326 tree lhs = gimple_get_lhs (stmt);
3327 while (handled_component_p (lhs))
3328 lhs = TREE_OPERAND (lhs, 0);
3329 if (TREE_CODE (lhs) == MEM_REF
3330 && TREE_OPERAND (lhs, 0) == name
3331 && integer_zerop (TREE_OPERAND (lhs, 1))
3332 && types_compatible_p (TREE_TYPE (lhs),
3333 TREE_TYPE (TREE_TYPE (name)))
3334 && !TREE_THIS_VOLATILE (lhs))
3335 uses_ok++;
3337 if (gimple_assign_single_p (stmt))
3339 tree rhs = gimple_assign_rhs1 (stmt);
3340 while (handled_component_p (rhs))
3341 rhs = TREE_OPERAND (rhs, 0);
3342 if (TREE_CODE (rhs) == MEM_REF
3343 && TREE_OPERAND (rhs, 0) == name
3344 && integer_zerop (TREE_OPERAND (rhs, 1))
3345 && types_compatible_p (TREE_TYPE (rhs),
3346 TREE_TYPE (TREE_TYPE (name)))
3347 && !TREE_THIS_VOLATILE (rhs))
3348 uses_ok++;
3350 else if (is_gimple_call (stmt))
3352 unsigned i;
3353 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3355 tree arg = gimple_call_arg (stmt, i);
3356 while (handled_component_p (arg))
3357 arg = TREE_OPERAND (arg, 0);
3358 if (TREE_CODE (arg) == MEM_REF
3359 && TREE_OPERAND (arg, 0) == name
3360 && integer_zerop (TREE_OPERAND (arg, 1))
3361 && types_compatible_p (TREE_TYPE (arg),
3362 TREE_TYPE (TREE_TYPE (name)))
3363 && !TREE_THIS_VOLATILE (arg))
3364 uses_ok++;
3368 /* If the number of valid uses does not match the number of
3369 uses in this stmt there is an unhandled use. */
3370 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3371 --uses_ok;
3373 if (uses_ok != 0)
3374 ret = true;
3376 if (ret)
3377 BREAK_FROM_IMM_USE_STMT (ui);
3380 return ret;
3383 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3384 them in candidate_bitmap. Note that these do not necessarily include
3385 parameter which are unused and thus can be removed. Return true iff any
3386 such candidate has been found. */
3388 static bool
3389 find_param_candidates (void)
3391 tree parm;
3392 int count = 0;
3393 bool ret = false;
3394 const char *msg;
3396 for (parm = DECL_ARGUMENTS (current_function_decl);
3397 parm;
3398 parm = DECL_CHAIN (parm))
3400 tree type = TREE_TYPE (parm);
3402 count++;
3404 if (TREE_THIS_VOLATILE (parm)
3405 || TREE_ADDRESSABLE (parm)
3406 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3407 continue;
3409 if (is_unused_scalar_param (parm))
3411 ret = true;
3412 continue;
3415 if (POINTER_TYPE_P (type))
3417 type = TREE_TYPE (type);
3419 if (TREE_CODE (type) == FUNCTION_TYPE
3420 || TYPE_VOLATILE (type)
3421 || (TREE_CODE (type) == ARRAY_TYPE
3422 && TYPE_NONALIASED_COMPONENT (type))
3423 || !is_gimple_reg (parm)
3424 || is_va_list_type (type)
3425 || ptr_parm_has_direct_uses (parm))
3426 continue;
3428 else if (!AGGREGATE_TYPE_P (type))
3429 continue;
3431 if (!COMPLETE_TYPE_P (type)
3432 || !host_integerp (TYPE_SIZE (type), 1)
3433 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3434 || (AGGREGATE_TYPE_P (type)
3435 && type_internals_preclude_sra_p (type, &msg)))
3436 continue;
3438 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3439 ret = true;
3440 if (dump_file && (dump_flags & TDF_DETAILS))
3442 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3443 print_generic_expr (dump_file, parm, 0);
3444 fprintf (dump_file, "\n");
3448 func_param_count = count;
3449 return ret;
3452 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3453 maybe_modified. */
3455 static bool
3456 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3457 void *data)
3459 struct access *repr = (struct access *) data;
3461 repr->grp_maybe_modified = 1;
3462 return true;
3465 /* Analyze what representatives (in linked lists accessible from
3466 REPRESENTATIVES) can be modified by side effects of statements in the
3467 current function. */
3469 static void
3470 analyze_modified_params (VEC (access_p, heap) *representatives)
3472 int i;
3474 for (i = 0; i < func_param_count; i++)
3476 struct access *repr;
3478 for (repr = VEC_index (access_p, representatives, i);
3479 repr;
3480 repr = repr->next_grp)
3482 struct access *access;
3483 bitmap visited;
3484 ao_ref ar;
3486 if (no_accesses_p (repr))
3487 continue;
3488 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3489 || repr->grp_maybe_modified)
3490 continue;
3492 ao_ref_init (&ar, repr->expr);
3493 visited = BITMAP_ALLOC (NULL);
3494 for (access = repr; access; access = access->next_sibling)
3496 /* All accesses are read ones, otherwise grp_maybe_modified would
3497 be trivially set. */
3498 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3499 mark_maybe_modified, repr, &visited);
3500 if (repr->grp_maybe_modified)
3501 break;
3503 BITMAP_FREE (visited);
3508 /* Propagate distances in bb_dereferences in the opposite direction than the
3509 control flow edges, in each step storing the maximum of the current value
3510 and the minimum of all successors. These steps are repeated until the table
3511 stabilizes. Note that BBs which might terminate the functions (according to
3512 final_bbs bitmap) never updated in this way. */
3514 static void
3515 propagate_dereference_distances (void)
3517 VEC (basic_block, heap) *queue;
3518 basic_block bb;
3520 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3521 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3522 FOR_EACH_BB (bb)
3524 VEC_quick_push (basic_block, queue, bb);
3525 bb->aux = bb;
3528 while (!VEC_empty (basic_block, queue))
3530 edge_iterator ei;
3531 edge e;
3532 bool change = false;
3533 int i;
3535 bb = VEC_pop (basic_block, queue);
3536 bb->aux = NULL;
3538 if (bitmap_bit_p (final_bbs, bb->index))
3539 continue;
3541 for (i = 0; i < func_param_count; i++)
3543 int idx = bb->index * func_param_count + i;
3544 bool first = true;
3545 HOST_WIDE_INT inh = 0;
3547 FOR_EACH_EDGE (e, ei, bb->succs)
3549 int succ_idx = e->dest->index * func_param_count + i;
3551 if (e->src == EXIT_BLOCK_PTR)
3552 continue;
3554 if (first)
3556 first = false;
3557 inh = bb_dereferences [succ_idx];
3559 else if (bb_dereferences [succ_idx] < inh)
3560 inh = bb_dereferences [succ_idx];
3563 if (!first && bb_dereferences[idx] < inh)
3565 bb_dereferences[idx] = inh;
3566 change = true;
3570 if (change && !bitmap_bit_p (final_bbs, bb->index))
3571 FOR_EACH_EDGE (e, ei, bb->preds)
3573 if (e->src->aux)
3574 continue;
3576 e->src->aux = e->src;
3577 VEC_quick_push (basic_block, queue, e->src);
3581 VEC_free (basic_block, heap, queue);
3584 /* Dump a dereferences TABLE with heading STR to file F. */
3586 static void
3587 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3589 basic_block bb;
3591 fprintf (dump_file, str);
3592 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3594 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3595 if (bb != EXIT_BLOCK_PTR)
3597 int i;
3598 for (i = 0; i < func_param_count; i++)
3600 int idx = bb->index * func_param_count + i;
3601 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3604 fprintf (f, "\n");
3606 fprintf (dump_file, "\n");
3609 /* Determine what (parts of) parameters passed by reference that are not
3610 assigned to are not certainly dereferenced in this function and thus the
3611 dereferencing cannot be safely moved to the caller without potentially
3612 introducing a segfault. Mark such REPRESENTATIVES as
3613 grp_not_necessarilly_dereferenced.
3615 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3616 part is calculated rather than simple booleans are calculated for each
3617 pointer parameter to handle cases when only a fraction of the whole
3618 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3619 an example).
3621 The maximum dereference distances for each pointer parameter and BB are
3622 already stored in bb_dereference. This routine simply propagates these
3623 values upwards by propagate_dereference_distances and then compares the
3624 distances of individual parameters in the ENTRY BB to the equivalent
3625 distances of each representative of a (fraction of a) parameter. */
3627 static void
3628 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3630 int i;
3632 if (dump_file && (dump_flags & TDF_DETAILS))
3633 dump_dereferences_table (dump_file,
3634 "Dereference table before propagation:\n",
3635 bb_dereferences);
3637 propagate_dereference_distances ();
3639 if (dump_file && (dump_flags & TDF_DETAILS))
3640 dump_dereferences_table (dump_file,
3641 "Dereference table after propagation:\n",
3642 bb_dereferences);
3644 for (i = 0; i < func_param_count; i++)
3646 struct access *repr = VEC_index (access_p, representatives, i);
3647 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3649 if (!repr || no_accesses_p (repr))
3650 continue;
3654 if ((repr->offset + repr->size) > bb_dereferences[idx])
3655 repr->grp_not_necessarilly_dereferenced = 1;
3656 repr = repr->next_grp;
3658 while (repr);
3662 /* Return the representative access for the parameter declaration PARM if it is
3663 a scalar passed by reference which is not written to and the pointer value
3664 is not used directly. Thus, if it is legal to dereference it in the caller
3665 and we can rule out modifications through aliases, such parameter should be
3666 turned into one passed by value. Return NULL otherwise. */
3668 static struct access *
3669 unmodified_by_ref_scalar_representative (tree parm)
3671 int i, access_count;
3672 struct access *repr;
3673 VEC (access_p, heap) *access_vec;
3675 access_vec = get_base_access_vector (parm);
3676 gcc_assert (access_vec);
3677 repr = VEC_index (access_p, access_vec, 0);
3678 if (repr->write)
3679 return NULL;
3680 repr->group_representative = repr;
3682 access_count = VEC_length (access_p, access_vec);
3683 for (i = 1; i < access_count; i++)
3685 struct access *access = VEC_index (access_p, access_vec, i);
3686 if (access->write)
3687 return NULL;
3688 access->group_representative = repr;
3689 access->next_sibling = repr->next_sibling;
3690 repr->next_sibling = access;
3693 repr->grp_read = 1;
3694 repr->grp_scalar_ptr = 1;
3695 return repr;
3698 /* Return true iff this access precludes IPA-SRA of the parameter it is
3699 associated with. */
3701 static bool
3702 access_precludes_ipa_sra_p (struct access *access)
3704 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3705 is incompatible assign in a call statement (and possibly even in asm
3706 statements). This can be relaxed by using a new temporary but only for
3707 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3708 intraprocedural SRA we deal with this by keeping the old aggregate around,
3709 something we cannot do in IPA-SRA.) */
3710 if (access->write
3711 && (is_gimple_call (access->stmt)
3712 || gimple_code (access->stmt) == GIMPLE_ASM))
3713 return true;
3715 if (STRICT_ALIGNMENT
3716 && tree_non_aligned_mem_p (access->expr, TYPE_ALIGN (access->type)))
3717 return true;
3719 return false;
3723 /* Sort collected accesses for parameter PARM, identify representatives for
3724 each accessed region and link them together. Return NULL if there are
3725 different but overlapping accesses, return the special ptr value meaning
3726 there are no accesses for this parameter if that is the case and return the
3727 first representative otherwise. Set *RO_GRP if there is a group of accesses
3728 with only read (i.e. no write) accesses. */
3730 static struct access *
3731 splice_param_accesses (tree parm, bool *ro_grp)
3733 int i, j, access_count, group_count;
3734 int agg_size, total_size = 0;
3735 struct access *access, *res, **prev_acc_ptr = &res;
3736 VEC (access_p, heap) *access_vec;
3738 access_vec = get_base_access_vector (parm);
3739 if (!access_vec)
3740 return &no_accesses_representant;
3741 access_count = VEC_length (access_p, access_vec);
3743 VEC_qsort (access_p, access_vec, compare_access_positions);
3745 i = 0;
3746 total_size = 0;
3747 group_count = 0;
3748 while (i < access_count)
3750 bool modification;
3751 tree a1_alias_type;
3752 access = VEC_index (access_p, access_vec, i);
3753 modification = access->write;
3754 if (access_precludes_ipa_sra_p (access))
3755 return NULL;
3756 a1_alias_type = reference_alias_ptr_type (access->expr);
3758 /* Access is about to become group representative unless we find some
3759 nasty overlap which would preclude us from breaking this parameter
3760 apart. */
3762 j = i + 1;
3763 while (j < access_count)
3765 struct access *ac2 = VEC_index (access_p, access_vec, j);
3766 if (ac2->offset != access->offset)
3768 /* All or nothing law for parameters. */
3769 if (access->offset + access->size > ac2->offset)
3770 return NULL;
3771 else
3772 break;
3774 else if (ac2->size != access->size)
3775 return NULL;
3777 if (access_precludes_ipa_sra_p (ac2)
3778 || (ac2->type != access->type
3779 && (TREE_ADDRESSABLE (ac2->type)
3780 || TREE_ADDRESSABLE (access->type)))
3781 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3782 return NULL;
3784 modification |= ac2->write;
3785 ac2->group_representative = access;
3786 ac2->next_sibling = access->next_sibling;
3787 access->next_sibling = ac2;
3788 j++;
3791 group_count++;
3792 access->grp_maybe_modified = modification;
3793 if (!modification)
3794 *ro_grp = true;
3795 *prev_acc_ptr = access;
3796 prev_acc_ptr = &access->next_grp;
3797 total_size += access->size;
3798 i = j;
3801 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3802 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3803 else
3804 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3805 if (total_size >= agg_size)
3806 return NULL;
3808 gcc_assert (group_count > 0);
3809 return res;
3812 /* Decide whether parameters with representative accesses given by REPR should
3813 be reduced into components. */
3815 static int
3816 decide_one_param_reduction (struct access *repr)
3818 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3819 bool by_ref;
3820 tree parm;
3822 parm = repr->base;
3823 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3824 gcc_assert (cur_parm_size > 0);
3826 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3828 by_ref = true;
3829 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3831 else
3833 by_ref = false;
3834 agg_size = cur_parm_size;
3837 if (dump_file)
3839 struct access *acc;
3840 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3841 print_generic_expr (dump_file, parm, 0);
3842 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3843 for (acc = repr; acc; acc = acc->next_grp)
3844 dump_access (dump_file, acc, true);
3847 total_size = 0;
3848 new_param_count = 0;
3850 for (; repr; repr = repr->next_grp)
3852 gcc_assert (parm == repr->base);
3854 /* Taking the address of a non-addressable field is verboten. */
3855 if (by_ref && repr->non_addressable)
3856 return 0;
3858 if (!by_ref || (!repr->grp_maybe_modified
3859 && !repr->grp_not_necessarilly_dereferenced))
3860 total_size += repr->size;
3861 else
3862 total_size += cur_parm_size;
3864 new_param_count++;
3867 gcc_assert (new_param_count > 0);
3869 if (optimize_function_for_size_p (cfun))
3870 parm_size_limit = cur_parm_size;
3871 else
3872 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3873 * cur_parm_size);
3875 if (total_size < agg_size
3876 && total_size <= parm_size_limit)
3878 if (dump_file)
3879 fprintf (dump_file, " ....will be split into %i components\n",
3880 new_param_count);
3881 return new_param_count;
3883 else
3884 return 0;
3887 /* The order of the following enums is important, we need to do extra work for
3888 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3889 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3890 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3892 /* Identify representatives of all accesses to all candidate parameters for
3893 IPA-SRA. Return result based on what representatives have been found. */
3895 static enum ipa_splicing_result
3896 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3898 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3899 tree parm;
3900 struct access *repr;
3902 *representatives = VEC_alloc (access_p, heap, func_param_count);
3904 for (parm = DECL_ARGUMENTS (current_function_decl);
3905 parm;
3906 parm = DECL_CHAIN (parm))
3908 if (is_unused_scalar_param (parm))
3910 VEC_quick_push (access_p, *representatives,
3911 &no_accesses_representant);
3912 if (result == NO_GOOD_ACCESS)
3913 result = UNUSED_PARAMS;
3915 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3916 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3917 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3919 repr = unmodified_by_ref_scalar_representative (parm);
3920 VEC_quick_push (access_p, *representatives, repr);
3921 if (repr)
3922 result = UNMODIF_BY_REF_ACCESSES;
3924 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3926 bool ro_grp = false;
3927 repr = splice_param_accesses (parm, &ro_grp);
3928 VEC_quick_push (access_p, *representatives, repr);
3930 if (repr && !no_accesses_p (repr))
3932 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3934 if (ro_grp)
3935 result = UNMODIF_BY_REF_ACCESSES;
3936 else if (result < MODIF_BY_REF_ACCESSES)
3937 result = MODIF_BY_REF_ACCESSES;
3939 else if (result < BY_VAL_ACCESSES)
3940 result = BY_VAL_ACCESSES;
3942 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3943 result = UNUSED_PARAMS;
3945 else
3946 VEC_quick_push (access_p, *representatives, NULL);
3949 if (result == NO_GOOD_ACCESS)
3951 VEC_free (access_p, heap, *representatives);
3952 *representatives = NULL;
3953 return NO_GOOD_ACCESS;
3956 return result;
3959 /* Return the index of BASE in PARMS. Abort if it is not found. */
3961 static inline int
3962 get_param_index (tree base, VEC(tree, heap) *parms)
3964 int i, len;
3966 len = VEC_length (tree, parms);
3967 for (i = 0; i < len; i++)
3968 if (VEC_index (tree, parms, i) == base)
3969 return i;
3970 gcc_unreachable ();
3973 /* Convert the decisions made at the representative level into compact
3974 parameter adjustments. REPRESENTATIVES are pointers to first
3975 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3976 final number of adjustments. */
3978 static ipa_parm_adjustment_vec
3979 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3980 int adjustments_count)
3982 VEC (tree, heap) *parms;
3983 ipa_parm_adjustment_vec adjustments;
3984 tree parm;
3985 int i;
3987 gcc_assert (adjustments_count > 0);
3988 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3989 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3990 parm = DECL_ARGUMENTS (current_function_decl);
3991 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
3993 struct access *repr = VEC_index (access_p, representatives, i);
3995 if (!repr || no_accesses_p (repr))
3997 struct ipa_parm_adjustment *adj;
3999 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4000 memset (adj, 0, sizeof (*adj));
4001 adj->base_index = get_param_index (parm, parms);
4002 adj->base = parm;
4003 if (!repr)
4004 adj->copy_param = 1;
4005 else
4006 adj->remove_param = 1;
4008 else
4010 struct ipa_parm_adjustment *adj;
4011 int index = get_param_index (parm, parms);
4013 for (; repr; repr = repr->next_grp)
4015 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4016 memset (adj, 0, sizeof (*adj));
4017 gcc_assert (repr->base == parm);
4018 adj->base_index = index;
4019 adj->base = repr->base;
4020 adj->type = repr->type;
4021 adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
4022 adj->offset = repr->offset;
4023 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4024 && (repr->grp_maybe_modified
4025 || repr->grp_not_necessarilly_dereferenced));
4030 VEC_free (tree, heap, parms);
4031 return adjustments;
4034 /* Analyze the collected accesses and produce a plan what to do with the
4035 parameters in the form of adjustments, NULL meaning nothing. */
4037 static ipa_parm_adjustment_vec
4038 analyze_all_param_acesses (void)
4040 enum ipa_splicing_result repr_state;
4041 bool proceed = false;
4042 int i, adjustments_count = 0;
4043 VEC (access_p, heap) *representatives;
4044 ipa_parm_adjustment_vec adjustments;
4046 repr_state = splice_all_param_accesses (&representatives);
4047 if (repr_state == NO_GOOD_ACCESS)
4048 return NULL;
4050 /* If there are any parameters passed by reference which are not modified
4051 directly, we need to check whether they can be modified indirectly. */
4052 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4054 analyze_caller_dereference_legality (representatives);
4055 analyze_modified_params (representatives);
4058 for (i = 0; i < func_param_count; i++)
4060 struct access *repr = VEC_index (access_p, representatives, i);
4062 if (repr && !no_accesses_p (repr))
4064 if (repr->grp_scalar_ptr)
4066 adjustments_count++;
4067 if (repr->grp_not_necessarilly_dereferenced
4068 || repr->grp_maybe_modified)
4069 VEC_replace (access_p, representatives, i, NULL);
4070 else
4072 proceed = true;
4073 sra_stats.scalar_by_ref_to_by_val++;
4076 else
4078 int new_components = decide_one_param_reduction (repr);
4080 if (new_components == 0)
4082 VEC_replace (access_p, representatives, i, NULL);
4083 adjustments_count++;
4085 else
4087 adjustments_count += new_components;
4088 sra_stats.aggregate_params_reduced++;
4089 sra_stats.param_reductions_created += new_components;
4090 proceed = true;
4094 else
4096 if (no_accesses_p (repr))
4098 proceed = true;
4099 sra_stats.deleted_unused_parameters++;
4101 adjustments_count++;
4105 if (!proceed && dump_file)
4106 fprintf (dump_file, "NOT proceeding to change params.\n");
4108 if (proceed)
4109 adjustments = turn_representatives_into_adjustments (representatives,
4110 adjustments_count);
4111 else
4112 adjustments = NULL;
4114 VEC_free (access_p, heap, representatives);
4115 return adjustments;
4118 /* If a parameter replacement identified by ADJ does not yet exist in the form
4119 of declaration, create it and record it, otherwise return the previously
4120 created one. */
4122 static tree
4123 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4125 tree repl;
4126 if (!adj->new_ssa_base)
4128 char *pretty_name = make_fancy_name (adj->base);
4130 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4131 DECL_NAME (repl) = get_identifier (pretty_name);
4132 obstack_free (&name_obstack, pretty_name);
4134 add_referenced_var (repl);
4135 adj->new_ssa_base = repl;
4137 else
4138 repl = adj->new_ssa_base;
4139 return repl;
4142 /* Find the first adjustment for a particular parameter BASE in a vector of
4143 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4144 adjustment. */
4146 static struct ipa_parm_adjustment *
4147 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4149 int i, len;
4151 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4152 for (i = 0; i < len; i++)
4154 struct ipa_parm_adjustment *adj;
4156 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4157 if (!adj->copy_param && adj->base == base)
4158 return adj;
4161 return NULL;
4164 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4165 removed because its value is not used, replace the SSA_NAME with a one
4166 relating to a created VAR_DECL together all of its uses and return true.
4167 ADJUSTMENTS is a pointer to an adjustments vector. */
4169 static bool
4170 replace_removed_params_ssa_names (gimple stmt,
4171 ipa_parm_adjustment_vec adjustments)
4173 struct ipa_parm_adjustment *adj;
4174 tree lhs, decl, repl, name;
4176 if (gimple_code (stmt) == GIMPLE_PHI)
4177 lhs = gimple_phi_result (stmt);
4178 else if (is_gimple_assign (stmt))
4179 lhs = gimple_assign_lhs (stmt);
4180 else if (is_gimple_call (stmt))
4181 lhs = gimple_call_lhs (stmt);
4182 else
4183 gcc_unreachable ();
4185 if (TREE_CODE (lhs) != SSA_NAME)
4186 return false;
4187 decl = SSA_NAME_VAR (lhs);
4188 if (TREE_CODE (decl) != PARM_DECL)
4189 return false;
4191 adj = get_adjustment_for_base (adjustments, decl);
4192 if (!adj)
4193 return false;
4195 repl = get_replaced_param_substitute (adj);
4196 name = make_ssa_name (repl, stmt);
4198 if (dump_file)
4200 fprintf (dump_file, "replacing an SSA name of a removed param ");
4201 print_generic_expr (dump_file, lhs, 0);
4202 fprintf (dump_file, " with ");
4203 print_generic_expr (dump_file, name, 0);
4204 fprintf (dump_file, "\n");
4207 if (is_gimple_assign (stmt))
4208 gimple_assign_set_lhs (stmt, name);
4209 else if (is_gimple_call (stmt))
4210 gimple_call_set_lhs (stmt, name);
4211 else
4212 gimple_phi_set_result (stmt, name);
4214 replace_uses_by (lhs, name);
4215 release_ssa_name (lhs);
4216 return true;
4219 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4220 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4221 specifies whether the function should care about type incompatibility the
4222 current and new expressions. If it is false, the function will leave
4223 incompatibility issues to the caller. Return true iff the expression
4224 was modified. */
4226 static bool
4227 sra_ipa_modify_expr (tree *expr, bool convert,
4228 ipa_parm_adjustment_vec adjustments)
4230 int i, len;
4231 struct ipa_parm_adjustment *adj, *cand = NULL;
4232 HOST_WIDE_INT offset, size, max_size;
4233 tree base, src;
4235 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4237 if (TREE_CODE (*expr) == BIT_FIELD_REF
4238 || TREE_CODE (*expr) == IMAGPART_EXPR
4239 || TREE_CODE (*expr) == REALPART_EXPR)
4241 expr = &TREE_OPERAND (*expr, 0);
4242 convert = true;
4245 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4246 if (!base || size == -1 || max_size == -1)
4247 return false;
4249 if (TREE_CODE (base) == MEM_REF)
4251 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4252 base = TREE_OPERAND (base, 0);
4255 base = get_ssa_base_param (base);
4256 if (!base || TREE_CODE (base) != PARM_DECL)
4257 return false;
4259 for (i = 0; i < len; i++)
4261 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4263 if (adj->base == base &&
4264 (adj->offset == offset || adj->remove_param))
4266 cand = adj;
4267 break;
4270 if (!cand || cand->copy_param || cand->remove_param)
4271 return false;
4273 if (cand->by_ref)
4274 src = build_simple_mem_ref (cand->reduction);
4275 else
4276 src = cand->reduction;
4278 if (dump_file && (dump_flags & TDF_DETAILS))
4280 fprintf (dump_file, "About to replace expr ");
4281 print_generic_expr (dump_file, *expr, 0);
4282 fprintf (dump_file, " with ");
4283 print_generic_expr (dump_file, src, 0);
4284 fprintf (dump_file, "\n");
4287 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4289 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4290 *expr = vce;
4292 else
4293 *expr = src;
4294 return true;
4297 /* If the statement pointed to by STMT_PTR contains any expressions that need
4298 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4299 potential type incompatibilities (GSI is used to accommodate conversion
4300 statements and must point to the statement). Return true iff the statement
4301 was modified. */
4303 static bool
4304 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4305 ipa_parm_adjustment_vec adjustments)
4307 gimple stmt = *stmt_ptr;
4308 tree *lhs_p, *rhs_p;
4309 bool any;
4311 if (!gimple_assign_single_p (stmt))
4312 return false;
4314 rhs_p = gimple_assign_rhs1_ptr (stmt);
4315 lhs_p = gimple_assign_lhs_ptr (stmt);
4317 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4318 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4319 if (any)
4321 tree new_rhs = NULL_TREE;
4323 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4325 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4327 /* V_C_Es of constructors can cause trouble (PR 42714). */
4328 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4329 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4330 else
4331 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4333 else
4334 new_rhs = fold_build1_loc (gimple_location (stmt),
4335 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4336 *rhs_p);
4338 else if (REFERENCE_CLASS_P (*rhs_p)
4339 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4340 && !is_gimple_reg (*lhs_p))
4341 /* This can happen when an assignment in between two single field
4342 structures is turned into an assignment in between two pointers to
4343 scalars (PR 42237). */
4344 new_rhs = *rhs_p;
4346 if (new_rhs)
4348 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4349 true, GSI_SAME_STMT);
4351 gimple_assign_set_rhs_from_tree (gsi, tmp);
4354 return true;
4357 return false;
4360 /* Traverse the function body and all modifications as described in
4361 ADJUSTMENTS. Return true iff the CFG has been changed. */
4363 static bool
4364 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4366 bool cfg_changed = false;
4367 basic_block bb;
4369 FOR_EACH_BB (bb)
4371 gimple_stmt_iterator gsi;
4373 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4374 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4376 gsi = gsi_start_bb (bb);
4377 while (!gsi_end_p (gsi))
4379 gimple stmt = gsi_stmt (gsi);
4380 bool modified = false;
4381 tree *t;
4382 unsigned i;
4384 switch (gimple_code (stmt))
4386 case GIMPLE_RETURN:
4387 t = gimple_return_retval_ptr (stmt);
4388 if (*t != NULL_TREE)
4389 modified |= sra_ipa_modify_expr (t, true, adjustments);
4390 break;
4392 case GIMPLE_ASSIGN:
4393 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4394 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4395 break;
4397 case GIMPLE_CALL:
4398 /* Operands must be processed before the lhs. */
4399 for (i = 0; i < gimple_call_num_args (stmt); i++)
4401 t = gimple_call_arg_ptr (stmt, i);
4402 modified |= sra_ipa_modify_expr (t, true, adjustments);
4405 if (gimple_call_lhs (stmt))
4407 t = gimple_call_lhs_ptr (stmt);
4408 modified |= sra_ipa_modify_expr (t, false, adjustments);
4409 modified |= replace_removed_params_ssa_names (stmt,
4410 adjustments);
4412 break;
4414 case GIMPLE_ASM:
4415 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4417 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4418 modified |= sra_ipa_modify_expr (t, true, adjustments);
4420 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4422 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4423 modified |= sra_ipa_modify_expr (t, false, adjustments);
4425 break;
4427 default:
4428 break;
4431 if (modified)
4433 update_stmt (stmt);
4434 if (maybe_clean_eh_stmt (stmt)
4435 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4436 cfg_changed = true;
4438 gsi_next (&gsi);
4442 return cfg_changed;
4445 /* Call gimple_debug_bind_reset_value on all debug statements describing
4446 gimple register parameters that are being removed or replaced. */
4448 static void
4449 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4451 int i, len;
4452 gimple_stmt_iterator *gsip = NULL, gsi;
4454 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4456 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4457 gsip = &gsi;
4459 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4460 for (i = 0; i < len; i++)
4462 struct ipa_parm_adjustment *adj;
4463 imm_use_iterator ui;
4464 gimple stmt, def_temp;
4465 tree name, vexpr, copy = NULL_TREE;
4466 use_operand_p use_p;
4468 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4469 if (adj->copy_param || !is_gimple_reg (adj->base))
4470 continue;
4471 name = gimple_default_def (cfun, adj->base);
4472 vexpr = NULL;
4473 if (name)
4474 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4476 /* All other users must have been removed by
4477 ipa_sra_modify_function_body. */
4478 gcc_assert (is_gimple_debug (stmt));
4479 if (vexpr == NULL && gsip != NULL)
4481 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4482 vexpr = make_node (DEBUG_EXPR_DECL);
4483 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4484 NULL);
4485 DECL_ARTIFICIAL (vexpr) = 1;
4486 TREE_TYPE (vexpr) = TREE_TYPE (name);
4487 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4488 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4490 if (vexpr)
4492 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4493 SET_USE (use_p, vexpr);
4495 else
4496 gimple_debug_bind_reset_value (stmt);
4497 update_stmt (stmt);
4499 /* Create a VAR_DECL for debug info purposes. */
4500 if (!DECL_IGNORED_P (adj->base))
4502 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4503 VAR_DECL, DECL_NAME (adj->base),
4504 TREE_TYPE (adj->base));
4505 if (DECL_PT_UID_SET_P (adj->base))
4506 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4507 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4508 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4509 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4510 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4511 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4512 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4513 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4514 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4515 SET_DECL_RTL (copy, 0);
4516 TREE_USED (copy) = 1;
4517 DECL_CONTEXT (copy) = current_function_decl;
4518 add_referenced_var (copy);
4519 add_local_decl (cfun, copy);
4520 DECL_CHAIN (copy) =
4521 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4522 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4524 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4526 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4527 if (vexpr)
4528 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4529 else
4530 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4531 NULL);
4532 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4537 /* Return false iff all callers have at least as many actual arguments as there
4538 are formal parameters in the current function. */
4540 static bool
4541 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4542 void *data ATTRIBUTE_UNUSED)
4544 struct cgraph_edge *cs;
4545 for (cs = node->callers; cs; cs = cs->next_caller)
4546 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4547 return true;
4549 return false;
4552 /* Convert all callers of NODE. */
4554 static bool
4555 convert_callers_for_node (struct cgraph_node *node,
4556 void *data)
4558 ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
4559 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4560 struct cgraph_edge *cs;
4562 for (cs = node->callers; cs; cs = cs->next_caller)
4564 current_function_decl = cs->caller->decl;
4565 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4567 if (dump_file)
4568 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4569 cs->caller->uid, cs->callee->uid,
4570 cgraph_node_name (cs->caller),
4571 cgraph_node_name (cs->callee));
4573 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4575 pop_cfun ();
4578 for (cs = node->callers; cs; cs = cs->next_caller)
4579 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4580 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4581 compute_inline_parameters (cs->caller, true);
4582 BITMAP_FREE (recomputed_callers);
4584 return true;
4587 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4589 static void
4590 convert_callers (struct cgraph_node *node, tree old_decl,
4591 ipa_parm_adjustment_vec adjustments)
4593 tree old_cur_fndecl = current_function_decl;
4594 basic_block this_block;
4596 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4597 adjustments, false);
4599 current_function_decl = old_cur_fndecl;
4601 if (!encountered_recursive_call)
4602 return;
4604 FOR_EACH_BB (this_block)
4606 gimple_stmt_iterator gsi;
4608 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4610 gimple stmt = gsi_stmt (gsi);
4611 tree call_fndecl;
4612 if (gimple_code (stmt) != GIMPLE_CALL)
4613 continue;
4614 call_fndecl = gimple_call_fndecl (stmt);
4615 if (call_fndecl == old_decl)
4617 if (dump_file)
4618 fprintf (dump_file, "Adjusting recursive call");
4619 gimple_call_set_fndecl (stmt, node->decl);
4620 ipa_modify_call_arguments (NULL, stmt, adjustments);
4625 return;
4628 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4629 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4631 static bool
4632 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4634 struct cgraph_node *new_node;
4635 bool cfg_changed;
4636 VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
4638 rebuild_cgraph_edges ();
4639 free_dominance_info (CDI_DOMINATORS);
4640 pop_cfun ();
4641 current_function_decl = NULL_TREE;
4643 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4644 NULL, NULL, "isra");
4645 current_function_decl = new_node->decl;
4646 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4648 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4649 cfg_changed = ipa_sra_modify_function_body (adjustments);
4650 sra_ipa_reset_debug_stmts (adjustments);
4651 convert_callers (new_node, node->decl, adjustments);
4652 cgraph_make_node_local (new_node);
4653 return cfg_changed;
4656 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4657 attributes, return true otherwise. NODE is the cgraph node of the current
4658 function. */
4660 static bool
4661 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4663 if (!cgraph_node_can_be_local_p (node))
4665 if (dump_file)
4666 fprintf (dump_file, "Function not local to this compilation unit.\n");
4667 return false;
4670 if (!node->local.can_change_signature)
4672 if (dump_file)
4673 fprintf (dump_file, "Function can not change signature.\n");
4674 return false;
4677 if (!tree_versionable_function_p (node->decl))
4679 if (dump_file)
4680 fprintf (dump_file, "Function is not versionable.\n");
4681 return false;
4684 if (DECL_VIRTUAL_P (current_function_decl))
4686 if (dump_file)
4687 fprintf (dump_file, "Function is a virtual method.\n");
4688 return false;
4691 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4692 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4694 if (dump_file)
4695 fprintf (dump_file, "Function too big to be made truly local.\n");
4696 return false;
4699 if (!node->callers)
4701 if (dump_file)
4702 fprintf (dump_file,
4703 "Function has no callers in this compilation unit.\n");
4704 return false;
4707 if (cfun->stdarg)
4709 if (dump_file)
4710 fprintf (dump_file, "Function uses stdarg. \n");
4711 return false;
4714 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4715 return false;
4717 return true;
4720 /* Perform early interprocedural SRA. */
4722 static unsigned int
4723 ipa_early_sra (void)
4725 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4726 ipa_parm_adjustment_vec adjustments;
4727 int ret = 0;
4729 if (!ipa_sra_preliminary_function_checks (node))
4730 return 0;
4732 sra_initialize ();
4733 sra_mode = SRA_MODE_EARLY_IPA;
4735 if (!find_param_candidates ())
4737 if (dump_file)
4738 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4739 goto simple_out;
4742 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4743 NULL, true))
4745 if (dump_file)
4746 fprintf (dump_file, "There are callers with insufficient number of "
4747 "arguments.\n");
4748 goto simple_out;
4751 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4752 func_param_count
4753 * last_basic_block_for_function (cfun));
4754 final_bbs = BITMAP_ALLOC (NULL);
4756 scan_function ();
4757 if (encountered_apply_args)
4759 if (dump_file)
4760 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4761 goto out;
4764 if (encountered_unchangable_recursive_call)
4766 if (dump_file)
4767 fprintf (dump_file, "Function calls itself with insufficient "
4768 "number of arguments.\n");
4769 goto out;
4772 adjustments = analyze_all_param_acesses ();
4773 if (!adjustments)
4774 goto out;
4775 if (dump_file)
4776 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4778 if (modify_function (node, adjustments))
4779 ret = TODO_update_ssa | TODO_cleanup_cfg;
4780 else
4781 ret = TODO_update_ssa;
4782 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4784 statistics_counter_event (cfun, "Unused parameters deleted",
4785 sra_stats.deleted_unused_parameters);
4786 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4787 sra_stats.scalar_by_ref_to_by_val);
4788 statistics_counter_event (cfun, "Aggregate parameters broken up",
4789 sra_stats.aggregate_params_reduced);
4790 statistics_counter_event (cfun, "Aggregate parameter components created",
4791 sra_stats.param_reductions_created);
4793 out:
4794 BITMAP_FREE (final_bbs);
4795 free (bb_dereferences);
4796 simple_out:
4797 sra_deinitialize ();
4798 return ret;
4801 /* Return if early ipa sra shall be performed. */
4802 static bool
4803 ipa_early_sra_gate (void)
4805 return flag_ipa_sra && dbg_cnt (eipa_sra);
4808 struct gimple_opt_pass pass_early_ipa_sra =
4811 GIMPLE_PASS,
4812 "eipa_sra", /* name */
4813 ipa_early_sra_gate, /* gate */
4814 ipa_early_sra, /* execute */
4815 NULL, /* sub */
4816 NULL, /* next */
4817 0, /* static_pass_number */
4818 TV_IPA_SRA, /* tv_id */
4819 0, /* properties_required */
4820 0, /* properties_provided */
4821 0, /* properties_destroyed */
4822 0, /* todo_flags_start */
4823 TODO_dump_cgraph /* todo_flags_finish */