* go-gcc.cc: #include "langhooks.h".
[official-gcc.git] / gcc / tree-sra.c
blob4a24e6a1b70a5f869b0a222741d7bba990ce2ca3
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2014 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "hash-table.h"
78 #include "alloc-pool.h"
79 #include "tm.h"
80 #include "tree.h"
81 #include "pointer-set.h"
82 #include "basic-block.h"
83 #include "tree-ssa-alias.h"
84 #include "internal-fn.h"
85 #include "tree-eh.h"
86 #include "gimple-expr.h"
87 #include "is-a.h"
88 #include "gimple.h"
89 #include "stor-layout.h"
90 #include "gimplify.h"
91 #include "gimple-iterator.h"
92 #include "gimplify-me.h"
93 #include "gimple-walk.h"
94 #include "bitmap.h"
95 #include "gimple-ssa.h"
96 #include "tree-cfg.h"
97 #include "tree-phinodes.h"
98 #include "ssa-iterators.h"
99 #include "stringpool.h"
100 #include "tree-ssanames.h"
101 #include "expr.h"
102 #include "tree-dfa.h"
103 #include "tree-ssa.h"
104 #include "tree-pass.h"
105 #include "ipa-prop.h"
106 #include "statistics.h"
107 #include "params.h"
108 #include "target.h"
109 #include "flags.h"
110 #include "dbgcnt.h"
111 #include "tree-inline.h"
112 #include "gimple-pretty-print.h"
113 #include "ipa-inline.h"
114 #include "ipa-utils.h"
116 /* Enumeration of all aggregate reductions we can do. */
117 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
118 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
119 SRA_MODE_INTRA }; /* late intraprocedural SRA */
121 /* Global variable describing which aggregate reduction we are performing at
122 the moment. */
123 static enum sra_mode sra_mode;
125 struct assign_link;
127 /* ACCESS represents each access to an aggregate variable (as a whole or a
128 part). It can also represent a group of accesses that refer to exactly the
129 same fragment of an aggregate (i.e. those that have exactly the same offset
130 and size). Such representatives for a single aggregate, once determined,
131 are linked in a linked list and have the group fields set.
133 Moreover, when doing intraprocedural SRA, a tree is built from those
134 representatives (by the means of first_child and next_sibling pointers), in
135 which all items in a subtree are "within" the root, i.e. their offset is
136 greater or equal to offset of the root and offset+size is smaller or equal
137 to offset+size of the root. Children of an access are sorted by offset.
139 Note that accesses to parts of vector and complex number types always
140 represented by an access to the whole complex number or a vector. It is a
141 duty of the modifying functions to replace them appropriately. */
143 struct access
145 /* Values returned by `get_ref_base_and_extent' for each component reference
146 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
147 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
148 HOST_WIDE_INT offset;
149 HOST_WIDE_INT size;
150 tree base;
152 /* Expression. It is context dependent so do not use it to create new
153 expressions to access the original aggregate. See PR 42154 for a
154 testcase. */
155 tree expr;
156 /* Type. */
157 tree type;
159 /* The statement this access belongs to. */
160 gimple stmt;
162 /* Next group representative for this aggregate. */
163 struct access *next_grp;
165 /* Pointer to the group representative. Pointer to itself if the struct is
166 the representative. */
167 struct access *group_representative;
169 /* If this access has any children (in terms of the definition above), this
170 points to the first one. */
171 struct access *first_child;
173 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
174 described above. In IPA-SRA this is a pointer to the next access
175 belonging to the same group (having the same representative). */
176 struct access *next_sibling;
178 /* Pointers to the first and last element in the linked list of assign
179 links. */
180 struct assign_link *first_link, *last_link;
182 /* Pointer to the next access in the work queue. */
183 struct access *next_queued;
185 /* Replacement variable for this access "region." Never to be accessed
186 directly, always only by the means of get_access_replacement() and only
187 when grp_to_be_replaced flag is set. */
188 tree replacement_decl;
190 /* Is this particular access write access? */
191 unsigned write : 1;
193 /* Is this access an access to a non-addressable field? */
194 unsigned non_addressable : 1;
196 /* Is this access currently in the work queue? */
197 unsigned grp_queued : 1;
199 /* Does this group contain a write access? This flag is propagated down the
200 access tree. */
201 unsigned grp_write : 1;
203 /* Does this group contain a read access? This flag is propagated down the
204 access tree. */
205 unsigned grp_read : 1;
207 /* Does this group contain a read access that comes from an assignment
208 statement? This flag is propagated down the access tree. */
209 unsigned grp_assignment_read : 1;
211 /* Does this group contain a write access that comes from an assignment
212 statement? This flag is propagated down the access tree. */
213 unsigned grp_assignment_write : 1;
215 /* Does this group contain a read access through a scalar type? This flag is
216 not propagated in the access tree in any direction. */
217 unsigned grp_scalar_read : 1;
219 /* Does this group contain a write access through a scalar type? This flag
220 is not propagated in the access tree in any direction. */
221 unsigned grp_scalar_write : 1;
223 /* Is this access an artificial one created to scalarize some record
224 entirely? */
225 unsigned grp_total_scalarization : 1;
227 /* Other passes of the analysis use this bit to make function
228 analyze_access_subtree create scalar replacements for this group if
229 possible. */
230 unsigned grp_hint : 1;
232 /* Is the subtree rooted in this access fully covered by scalar
233 replacements? */
234 unsigned grp_covered : 1;
236 /* If set to true, this access and all below it in an access tree must not be
237 scalarized. */
238 unsigned grp_unscalarizable_region : 1;
240 /* Whether data have been written to parts of the aggregate covered by this
241 access which is not to be scalarized. This flag is propagated up in the
242 access tree. */
243 unsigned grp_unscalarized_data : 1;
245 /* Does this access and/or group contain a write access through a
246 BIT_FIELD_REF? */
247 unsigned grp_partial_lhs : 1;
249 /* Set when a scalar replacement should be created for this variable. */
250 unsigned grp_to_be_replaced : 1;
252 /* Set when we want a replacement for the sole purpose of having it in
253 generated debug statements. */
254 unsigned grp_to_be_debug_replaced : 1;
256 /* Should TREE_NO_WARNING of a replacement be set? */
257 unsigned grp_no_warning : 1;
259 /* Is it possible that the group refers to data which might be (directly or
260 otherwise) modified? */
261 unsigned grp_maybe_modified : 1;
263 /* Set when this is a representative of a pointer to scalar (i.e. by
264 reference) parameter which we consider for turning into a plain scalar
265 (i.e. a by value parameter). */
266 unsigned grp_scalar_ptr : 1;
268 /* Set when we discover that this pointer is not safe to dereference in the
269 caller. */
270 unsigned grp_not_necessarilly_dereferenced : 1;
273 typedef struct access *access_p;
276 /* Alloc pool for allocating access structures. */
277 static alloc_pool access_pool;
279 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
280 are used to propagate subaccesses from rhs to lhs as long as they don't
281 conflict with what is already there. */
282 struct assign_link
284 struct access *lacc, *racc;
285 struct assign_link *next;
288 /* Alloc pool for allocating assign link structures. */
289 static alloc_pool link_pool;
291 /* Base (tree) -> Vector (vec<access_p> *) map. */
292 static struct pointer_map_t *base_access_vec;
294 /* Candidate hash table helpers. */
296 struct uid_decl_hasher : typed_noop_remove <tree_node>
298 typedef tree_node value_type;
299 typedef tree_node compare_type;
300 static inline hashval_t hash (const value_type *);
301 static inline bool equal (const value_type *, const compare_type *);
304 /* Hash a tree in a uid_decl_map. */
306 inline hashval_t
307 uid_decl_hasher::hash (const value_type *item)
309 return item->decl_minimal.uid;
312 /* Return true if the DECL_UID in both trees are equal. */
314 inline bool
315 uid_decl_hasher::equal (const value_type *a, const compare_type *b)
317 return (a->decl_minimal.uid == b->decl_minimal.uid);
320 /* Set of candidates. */
321 static bitmap candidate_bitmap;
322 static hash_table <uid_decl_hasher> candidates;
324 /* For a candidate UID return the candidates decl. */
326 static inline tree
327 candidate (unsigned uid)
329 tree_node t;
330 t.decl_minimal.uid = uid;
331 return candidates.find_with_hash (&t, static_cast <hashval_t> (uid));
334 /* Bitmap of candidates which we should try to entirely scalarize away and
335 those which cannot be (because they are and need be used as a whole). */
336 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
338 /* Obstack for creation of fancy names. */
339 static struct obstack name_obstack;
341 /* Head of a linked list of accesses that need to have its subaccesses
342 propagated to their assignment counterparts. */
343 static struct access *work_queue_head;
345 /* Number of parameters of the analyzed function when doing early ipa SRA. */
346 static int func_param_count;
348 /* scan_function sets the following to true if it encounters a call to
349 __builtin_apply_args. */
350 static bool encountered_apply_args;
352 /* Set by scan_function when it finds a recursive call. */
353 static bool encountered_recursive_call;
355 /* Set by scan_function when it finds a recursive call with less actual
356 arguments than formal parameters.. */
357 static bool encountered_unchangable_recursive_call;
359 /* This is a table in which for each basic block and parameter there is a
360 distance (offset + size) in that parameter which is dereferenced and
361 accessed in that BB. */
362 static HOST_WIDE_INT *bb_dereferences;
363 /* Bitmap of BBs that can cause the function to "stop" progressing by
364 returning, throwing externally, looping infinitely or calling a function
365 which might abort etc.. */
366 static bitmap final_bbs;
368 /* Representative of no accesses at all. */
369 static struct access no_accesses_representant;
371 /* Predicate to test the special value. */
373 static inline bool
374 no_accesses_p (struct access *access)
376 return access == &no_accesses_representant;
379 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
380 representative fields are dumped, otherwise those which only describe the
381 individual access are. */
383 static struct
385 /* Number of processed aggregates is readily available in
386 analyze_all_variable_accesses and so is not stored here. */
388 /* Number of created scalar replacements. */
389 int replacements;
391 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
392 expression. */
393 int exprs;
395 /* Number of statements created by generate_subtree_copies. */
396 int subtree_copies;
398 /* Number of statements created by load_assign_lhs_subreplacements. */
399 int subreplacements;
401 /* Number of times sra_modify_assign has deleted a statement. */
402 int deleted;
404 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
405 RHS reparately due to type conversions or nonexistent matching
406 references. */
407 int separate_lhs_rhs_handling;
409 /* Number of parameters that were removed because they were unused. */
410 int deleted_unused_parameters;
412 /* Number of scalars passed as parameters by reference that have been
413 converted to be passed by value. */
414 int scalar_by_ref_to_by_val;
416 /* Number of aggregate parameters that were replaced by one or more of their
417 components. */
418 int aggregate_params_reduced;
420 /* Numbber of components created when splitting aggregate parameters. */
421 int param_reductions_created;
422 } sra_stats;
424 static void
425 dump_access (FILE *f, struct access *access, bool grp)
427 fprintf (f, "access { ");
428 fprintf (f, "base = (%d)'", DECL_UID (access->base));
429 print_generic_expr (f, access->base, 0);
430 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
431 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
432 fprintf (f, ", expr = ");
433 print_generic_expr (f, access->expr, 0);
434 fprintf (f, ", type = ");
435 print_generic_expr (f, access->type, 0);
436 if (grp)
437 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
438 "grp_assignment_write = %d, grp_scalar_read = %d, "
439 "grp_scalar_write = %d, grp_total_scalarization = %d, "
440 "grp_hint = %d, grp_covered = %d, "
441 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
442 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
443 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
444 "grp_not_necessarilly_dereferenced = %d\n",
445 access->grp_read, access->grp_write, access->grp_assignment_read,
446 access->grp_assignment_write, access->grp_scalar_read,
447 access->grp_scalar_write, access->grp_total_scalarization,
448 access->grp_hint, access->grp_covered,
449 access->grp_unscalarizable_region, access->grp_unscalarized_data,
450 access->grp_partial_lhs, access->grp_to_be_replaced,
451 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
452 access->grp_not_necessarilly_dereferenced);
453 else
454 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
455 "grp_partial_lhs = %d\n",
456 access->write, access->grp_total_scalarization,
457 access->grp_partial_lhs);
460 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
462 static void
463 dump_access_tree_1 (FILE *f, struct access *access, int level)
467 int i;
469 for (i = 0; i < level; i++)
470 fputs ("* ", dump_file);
472 dump_access (f, access, true);
474 if (access->first_child)
475 dump_access_tree_1 (f, access->first_child, level + 1);
477 access = access->next_sibling;
479 while (access);
482 /* Dump all access trees for a variable, given the pointer to the first root in
483 ACCESS. */
485 static void
486 dump_access_tree (FILE *f, struct access *access)
488 for (; access; access = access->next_grp)
489 dump_access_tree_1 (f, access, 0);
492 /* Return true iff ACC is non-NULL and has subaccesses. */
494 static inline bool
495 access_has_children_p (struct access *acc)
497 return acc && acc->first_child;
500 /* Return true iff ACC is (partly) covered by at least one replacement. */
502 static bool
503 access_has_replacements_p (struct access *acc)
505 struct access *child;
506 if (acc->grp_to_be_replaced)
507 return true;
508 for (child = acc->first_child; child; child = child->next_sibling)
509 if (access_has_replacements_p (child))
510 return true;
511 return false;
514 /* Return a vector of pointers to accesses for the variable given in BASE or
515 NULL if there is none. */
517 static vec<access_p> *
518 get_base_access_vector (tree base)
520 void **slot;
522 slot = pointer_map_contains (base_access_vec, base);
523 if (!slot)
524 return NULL;
525 else
526 return *(vec<access_p> **) slot;
529 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
530 in ACCESS. Return NULL if it cannot be found. */
532 static struct access *
533 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
534 HOST_WIDE_INT size)
536 while (access && (access->offset != offset || access->size != size))
538 struct access *child = access->first_child;
540 while (child && (child->offset + child->size <= offset))
541 child = child->next_sibling;
542 access = child;
545 return access;
548 /* Return the first group representative for DECL or NULL if none exists. */
550 static struct access *
551 get_first_repr_for_decl (tree base)
553 vec<access_p> *access_vec;
555 access_vec = get_base_access_vector (base);
556 if (!access_vec)
557 return NULL;
559 return (*access_vec)[0];
562 /* Find an access representative for the variable BASE and given OFFSET and
563 SIZE. Requires that access trees have already been built. Return NULL if
564 it cannot be found. */
566 static struct access *
567 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
568 HOST_WIDE_INT size)
570 struct access *access;
572 access = get_first_repr_for_decl (base);
573 while (access && (access->offset + access->size <= offset))
574 access = access->next_grp;
575 if (!access)
576 return NULL;
578 return find_access_in_subtree (access, offset, size);
581 /* Add LINK to the linked list of assign links of RACC. */
582 static void
583 add_link_to_rhs (struct access *racc, struct assign_link *link)
585 gcc_assert (link->racc == racc);
587 if (!racc->first_link)
589 gcc_assert (!racc->last_link);
590 racc->first_link = link;
592 else
593 racc->last_link->next = link;
595 racc->last_link = link;
596 link->next = NULL;
599 /* Move all link structures in their linked list in OLD_RACC to the linked list
600 in NEW_RACC. */
601 static void
602 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
604 if (!old_racc->first_link)
606 gcc_assert (!old_racc->last_link);
607 return;
610 if (new_racc->first_link)
612 gcc_assert (!new_racc->last_link->next);
613 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
615 new_racc->last_link->next = old_racc->first_link;
616 new_racc->last_link = old_racc->last_link;
618 else
620 gcc_assert (!new_racc->last_link);
622 new_racc->first_link = old_racc->first_link;
623 new_racc->last_link = old_racc->last_link;
625 old_racc->first_link = old_racc->last_link = NULL;
628 /* Add ACCESS to the work queue (which is actually a stack). */
630 static void
631 add_access_to_work_queue (struct access *access)
633 if (!access->grp_queued)
635 gcc_assert (!access->next_queued);
636 access->next_queued = work_queue_head;
637 access->grp_queued = 1;
638 work_queue_head = access;
642 /* Pop an access from the work queue, and return it, assuming there is one. */
644 static struct access *
645 pop_access_from_work_queue (void)
647 struct access *access = work_queue_head;
649 work_queue_head = access->next_queued;
650 access->next_queued = NULL;
651 access->grp_queued = 0;
652 return access;
656 /* Allocate necessary structures. */
658 static void
659 sra_initialize (void)
661 candidate_bitmap = BITMAP_ALLOC (NULL);
662 candidates.create (vec_safe_length (cfun->local_decls) / 2);
663 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
664 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
665 gcc_obstack_init (&name_obstack);
666 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
667 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
668 base_access_vec = pointer_map_create ();
669 memset (&sra_stats, 0, sizeof (sra_stats));
670 encountered_apply_args = false;
671 encountered_recursive_call = false;
672 encountered_unchangable_recursive_call = false;
675 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
677 static bool
678 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
679 void *data ATTRIBUTE_UNUSED)
681 vec<access_p> *access_vec = (vec<access_p> *) *value;
682 vec_free (access_vec);
683 return true;
686 /* Deallocate all general structures. */
688 static void
689 sra_deinitialize (void)
691 BITMAP_FREE (candidate_bitmap);
692 candidates.dispose ();
693 BITMAP_FREE (should_scalarize_away_bitmap);
694 BITMAP_FREE (cannot_scalarize_away_bitmap);
695 free_alloc_pool (access_pool);
696 free_alloc_pool (link_pool);
697 obstack_free (&name_obstack, NULL);
699 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
700 pointer_map_destroy (base_access_vec);
703 /* Remove DECL from candidates for SRA and write REASON to the dump file if
704 there is one. */
705 static void
706 disqualify_candidate (tree decl, const char *reason)
708 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
709 candidates.clear_slot (candidates.find_slot_with_hash (decl,
710 DECL_UID (decl),
711 NO_INSERT));
713 if (dump_file && (dump_flags & TDF_DETAILS))
715 fprintf (dump_file, "! Disqualifying ");
716 print_generic_expr (dump_file, decl, 0);
717 fprintf (dump_file, " - %s\n", reason);
721 /* Return true iff the type contains a field or an element which does not allow
722 scalarization. */
724 static bool
725 type_internals_preclude_sra_p (tree type, const char **msg)
727 tree fld;
728 tree et;
730 switch (TREE_CODE (type))
732 case RECORD_TYPE:
733 case UNION_TYPE:
734 case QUAL_UNION_TYPE:
735 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
736 if (TREE_CODE (fld) == FIELD_DECL)
738 tree ft = TREE_TYPE (fld);
740 if (TREE_THIS_VOLATILE (fld))
742 *msg = "volatile structure field";
743 return true;
745 if (!DECL_FIELD_OFFSET (fld))
747 *msg = "no structure field offset";
748 return true;
750 if (!DECL_SIZE (fld))
752 *msg = "zero structure field size";
753 return true;
755 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
757 *msg = "structure field offset not fixed";
758 return true;
760 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
762 *msg = "structure field size not fixed";
763 return true;
765 if (!tree_fits_shwi_p (bit_position (fld)))
767 *msg = "structure field size too big";
768 return true;
770 if (AGGREGATE_TYPE_P (ft)
771 && int_bit_position (fld) % BITS_PER_UNIT != 0)
773 *msg = "structure field is bit field";
774 return true;
777 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
778 return true;
781 return false;
783 case ARRAY_TYPE:
784 et = TREE_TYPE (type);
786 if (TYPE_VOLATILE (et))
788 *msg = "element type is volatile";
789 return true;
792 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
793 return true;
795 return false;
797 default:
798 return false;
802 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
803 base variable if it is. Return T if it is not an SSA_NAME. */
805 static tree
806 get_ssa_base_param (tree t)
808 if (TREE_CODE (t) == SSA_NAME)
810 if (SSA_NAME_IS_DEFAULT_DEF (t))
811 return SSA_NAME_VAR (t);
812 else
813 return NULL_TREE;
815 return t;
818 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
819 belongs to, unless the BB has already been marked as a potentially
820 final. */
822 static void
823 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
825 basic_block bb = gimple_bb (stmt);
826 int idx, parm_index = 0;
827 tree parm;
829 if (bitmap_bit_p (final_bbs, bb->index))
830 return;
832 for (parm = DECL_ARGUMENTS (current_function_decl);
833 parm && parm != base;
834 parm = DECL_CHAIN (parm))
835 parm_index++;
837 gcc_assert (parm_index < func_param_count);
839 idx = bb->index * func_param_count + parm_index;
840 if (bb_dereferences[idx] < dist)
841 bb_dereferences[idx] = dist;
844 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
845 the three fields. Also add it to the vector of accesses corresponding to
846 the base. Finally, return the new access. */
848 static struct access *
849 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
851 vec<access_p> *v;
852 struct access *access;
853 void **slot;
855 access = (struct access *) pool_alloc (access_pool);
856 memset (access, 0, sizeof (struct access));
857 access->base = base;
858 access->offset = offset;
859 access->size = size;
861 slot = pointer_map_contains (base_access_vec, base);
862 if (slot)
863 v = (vec<access_p> *) *slot;
864 else
865 vec_alloc (v, 32);
867 v->safe_push (access);
869 *((vec<access_p> **)
870 pointer_map_insert (base_access_vec, base)) = v;
872 return access;
875 /* Create and insert access for EXPR. Return created access, or NULL if it is
876 not possible. */
878 static struct access *
879 create_access (tree expr, gimple stmt, bool write)
881 struct access *access;
882 HOST_WIDE_INT offset, size, max_size;
883 tree base = expr;
884 bool ptr, unscalarizable_region = false;
886 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
888 if (sra_mode == SRA_MODE_EARLY_IPA
889 && TREE_CODE (base) == MEM_REF)
891 base = get_ssa_base_param (TREE_OPERAND (base, 0));
892 if (!base)
893 return NULL;
894 ptr = true;
896 else
897 ptr = false;
899 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
900 return NULL;
902 if (sra_mode == SRA_MODE_EARLY_IPA)
904 if (size < 0 || size != max_size)
906 disqualify_candidate (base, "Encountered a variable sized access.");
907 return NULL;
909 if (TREE_CODE (expr) == COMPONENT_REF
910 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
912 disqualify_candidate (base, "Encountered a bit-field access.");
913 return NULL;
915 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
917 if (ptr)
918 mark_parm_dereference (base, offset + size, stmt);
920 else
922 if (size != max_size)
924 size = max_size;
925 unscalarizable_region = true;
927 if (size < 0)
929 disqualify_candidate (base, "Encountered an unconstrained access.");
930 return NULL;
934 access = create_access_1 (base, offset, size);
935 access->expr = expr;
936 access->type = TREE_TYPE (expr);
937 access->write = write;
938 access->grp_unscalarizable_region = unscalarizable_region;
939 access->stmt = stmt;
941 if (TREE_CODE (expr) == COMPONENT_REF
942 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
943 access->non_addressable = 1;
945 return access;
949 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
950 register types or (recursively) records with only these two kinds of fields.
951 It also returns false if any of these records contains a bit-field. */
953 static bool
954 type_consists_of_records_p (tree type)
956 tree fld;
958 if (TREE_CODE (type) != RECORD_TYPE)
959 return false;
961 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
962 if (TREE_CODE (fld) == FIELD_DECL)
964 tree ft = TREE_TYPE (fld);
966 if (DECL_BIT_FIELD (fld))
967 return false;
969 if (!is_gimple_reg_type (ft)
970 && !type_consists_of_records_p (ft))
971 return false;
974 return true;
977 /* Create total_scalarization accesses for all scalar type fields in DECL that
978 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
979 must be the top-most VAR_DECL representing the variable, OFFSET must be the
980 offset of DECL within BASE. REF must be the memory reference expression for
981 the given decl. */
983 static void
984 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
985 tree ref)
987 tree fld, decl_type = TREE_TYPE (decl);
989 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
990 if (TREE_CODE (fld) == FIELD_DECL)
992 HOST_WIDE_INT pos = offset + int_bit_position (fld);
993 tree ft = TREE_TYPE (fld);
994 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
995 NULL_TREE);
997 if (is_gimple_reg_type (ft))
999 struct access *access;
1000 HOST_WIDE_INT size;
1002 size = tree_to_uhwi (DECL_SIZE (fld));
1003 access = create_access_1 (base, pos, size);
1004 access->expr = nref;
1005 access->type = ft;
1006 access->grp_total_scalarization = 1;
1007 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1009 else
1010 completely_scalarize_record (base, fld, pos, nref);
1014 /* Create total_scalarization accesses for all scalar type fields in VAR and
1015 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
1016 type_consists_of_records_p. */
1018 static void
1019 completely_scalarize_var (tree var)
1021 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1022 struct access *access;
1024 access = create_access_1 (var, 0, size);
1025 access->expr = var;
1026 access->type = TREE_TYPE (var);
1027 access->grp_total_scalarization = 1;
1029 completely_scalarize_record (var, var, 0, var);
1032 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1034 static inline bool
1035 contains_view_convert_expr_p (const_tree ref)
1037 while (handled_component_p (ref))
1039 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1040 return true;
1041 ref = TREE_OPERAND (ref, 0);
1044 return false;
1047 /* Search the given tree for a declaration by skipping handled components and
1048 exclude it from the candidates. */
1050 static void
1051 disqualify_base_of_expr (tree t, const char *reason)
1053 t = get_base_address (t);
1054 if (sra_mode == SRA_MODE_EARLY_IPA
1055 && TREE_CODE (t) == MEM_REF)
1056 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1058 if (t && DECL_P (t))
1059 disqualify_candidate (t, reason);
1062 /* Scan expression EXPR and create access structures for all accesses to
1063 candidates for scalarization. Return the created access or NULL if none is
1064 created. */
1066 static struct access *
1067 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1069 struct access *ret = NULL;
1070 bool partial_ref;
1072 if (TREE_CODE (expr) == BIT_FIELD_REF
1073 || TREE_CODE (expr) == IMAGPART_EXPR
1074 || TREE_CODE (expr) == REALPART_EXPR)
1076 expr = TREE_OPERAND (expr, 0);
1077 partial_ref = true;
1079 else
1080 partial_ref = false;
1082 /* We need to dive through V_C_Es in order to get the size of its parameter
1083 and not the result type. Ada produces such statements. We are also
1084 capable of handling the topmost V_C_E but not any of those buried in other
1085 handled components. */
1086 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1087 expr = TREE_OPERAND (expr, 0);
1089 if (contains_view_convert_expr_p (expr))
1091 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1092 "component.");
1093 return NULL;
1096 switch (TREE_CODE (expr))
1098 case MEM_REF:
1099 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1100 && sra_mode != SRA_MODE_EARLY_IPA)
1101 return NULL;
1102 /* fall through */
1103 case VAR_DECL:
1104 case PARM_DECL:
1105 case RESULT_DECL:
1106 case COMPONENT_REF:
1107 case ARRAY_REF:
1108 case ARRAY_RANGE_REF:
1109 ret = create_access (expr, stmt, write);
1110 break;
1112 default:
1113 break;
1116 if (write && partial_ref && ret)
1117 ret->grp_partial_lhs = 1;
1119 return ret;
1122 /* Scan expression EXPR and create access structures for all accesses to
1123 candidates for scalarization. Return true if any access has been inserted.
1124 STMT must be the statement from which the expression is taken, WRITE must be
1125 true if the expression is a store and false otherwise. */
1127 static bool
1128 build_access_from_expr (tree expr, gimple stmt, bool write)
1130 struct access *access;
1132 access = build_access_from_expr_1 (expr, stmt, write);
1133 if (access)
1135 /* This means the aggregate is accesses as a whole in a way other than an
1136 assign statement and thus cannot be removed even if we had a scalar
1137 replacement for everything. */
1138 if (cannot_scalarize_away_bitmap)
1139 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1140 return true;
1142 return false;
1145 /* Return the single non-EH successor edge of BB or NULL if there is none or
1146 more than one. */
1148 static edge
1149 single_non_eh_succ (basic_block bb)
1151 edge e, res = NULL;
1152 edge_iterator ei;
1154 FOR_EACH_EDGE (e, ei, bb->succs)
1155 if (!(e->flags & EDGE_EH))
1157 if (res)
1158 return NULL;
1159 res = e;
1162 return res;
1165 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1166 there is no alternative spot where to put statements SRA might need to
1167 generate after it. The spot we are looking for is an edge leading to a
1168 single non-EH successor, if it exists and is indeed single. RHS may be
1169 NULL, in that case ignore it. */
1171 static bool
1172 disqualify_if_bad_bb_terminating_stmt (gimple stmt, tree lhs, tree rhs)
1174 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1175 && stmt_ends_bb_p (stmt))
1177 if (single_non_eh_succ (gimple_bb (stmt)))
1178 return false;
1180 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1181 if (rhs)
1182 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1183 return true;
1185 return false;
1188 /* Scan expressions occurring in STMT, create access structures for all accesses
1189 to candidates for scalarization and remove those candidates which occur in
1190 statements or expressions that prevent them from being split apart. Return
1191 true if any access has been inserted. */
1193 static bool
1194 build_accesses_from_assign (gimple stmt)
1196 tree lhs, rhs;
1197 struct access *lacc, *racc;
1199 if (!gimple_assign_single_p (stmt)
1200 /* Scope clobbers don't influence scalarization. */
1201 || gimple_clobber_p (stmt))
1202 return false;
1204 lhs = gimple_assign_lhs (stmt);
1205 rhs = gimple_assign_rhs1 (stmt);
1207 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1208 return false;
1210 racc = build_access_from_expr_1 (rhs, stmt, false);
1211 lacc = build_access_from_expr_1 (lhs, stmt, true);
1213 if (lacc)
1214 lacc->grp_assignment_write = 1;
1216 if (racc)
1218 racc->grp_assignment_read = 1;
1219 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1220 && !is_gimple_reg_type (racc->type))
1221 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1224 if (lacc && racc
1225 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1226 && !lacc->grp_unscalarizable_region
1227 && !racc->grp_unscalarizable_region
1228 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1229 && lacc->size == racc->size
1230 && useless_type_conversion_p (lacc->type, racc->type))
1232 struct assign_link *link;
1234 link = (struct assign_link *) pool_alloc (link_pool);
1235 memset (link, 0, sizeof (struct assign_link));
1237 link->lacc = lacc;
1238 link->racc = racc;
1240 add_link_to_rhs (racc, link);
1243 return lacc || racc;
1246 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1247 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1249 static bool
1250 asm_visit_addr (gimple, tree op, tree, void *)
1252 op = get_base_address (op);
1253 if (op
1254 && DECL_P (op))
1255 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1257 return false;
1260 /* Return true iff callsite CALL has at least as many actual arguments as there
1261 are formal parameters of the function currently processed by IPA-SRA and
1262 that their types match. */
1264 static inline bool
1265 callsite_arguments_match_p (gimple call)
1267 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1268 return false;
1270 tree parm;
1271 int i;
1272 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1273 parm;
1274 parm = DECL_CHAIN (parm), i++)
1276 tree arg = gimple_call_arg (call, i);
1277 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1278 return false;
1280 return true;
1283 /* Scan function and look for interesting expressions and create access
1284 structures for them. Return true iff any access is created. */
1286 static bool
1287 scan_function (void)
1289 basic_block bb;
1290 bool ret = false;
1292 FOR_EACH_BB_FN (bb, cfun)
1294 gimple_stmt_iterator gsi;
1295 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1297 gimple stmt = gsi_stmt (gsi);
1298 tree t;
1299 unsigned i;
1301 if (final_bbs && stmt_can_throw_external (stmt))
1302 bitmap_set_bit (final_bbs, bb->index);
1303 switch (gimple_code (stmt))
1305 case GIMPLE_RETURN:
1306 t = gimple_return_retval (stmt);
1307 if (t != NULL_TREE)
1308 ret |= build_access_from_expr (t, stmt, false);
1309 if (final_bbs)
1310 bitmap_set_bit (final_bbs, bb->index);
1311 break;
1313 case GIMPLE_ASSIGN:
1314 ret |= build_accesses_from_assign (stmt);
1315 break;
1317 case GIMPLE_CALL:
1318 for (i = 0; i < gimple_call_num_args (stmt); i++)
1319 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1320 stmt, false);
1322 if (sra_mode == SRA_MODE_EARLY_IPA)
1324 tree dest = gimple_call_fndecl (stmt);
1325 int flags = gimple_call_flags (stmt);
1327 if (dest)
1329 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1330 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1331 encountered_apply_args = true;
1332 if (recursive_call_p (current_function_decl, dest))
1334 encountered_recursive_call = true;
1335 if (!callsite_arguments_match_p (stmt))
1336 encountered_unchangable_recursive_call = true;
1340 if (final_bbs
1341 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1342 bitmap_set_bit (final_bbs, bb->index);
1345 t = gimple_call_lhs (stmt);
1346 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1347 ret |= build_access_from_expr (t, stmt, true);
1348 break;
1350 case GIMPLE_ASM:
1351 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1352 asm_visit_addr);
1353 if (final_bbs)
1354 bitmap_set_bit (final_bbs, bb->index);
1356 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1358 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1359 ret |= build_access_from_expr (t, stmt, false);
1361 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1363 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1364 ret |= build_access_from_expr (t, stmt, true);
1366 break;
1368 default:
1369 break;
1374 return ret;
1377 /* Helper of QSORT function. There are pointers to accesses in the array. An
1378 access is considered smaller than another if it has smaller offset or if the
1379 offsets are the same but is size is bigger. */
1381 static int
1382 compare_access_positions (const void *a, const void *b)
1384 const access_p *fp1 = (const access_p *) a;
1385 const access_p *fp2 = (const access_p *) b;
1386 const access_p f1 = *fp1;
1387 const access_p f2 = *fp2;
1389 if (f1->offset != f2->offset)
1390 return f1->offset < f2->offset ? -1 : 1;
1392 if (f1->size == f2->size)
1394 if (f1->type == f2->type)
1395 return 0;
1396 /* Put any non-aggregate type before any aggregate type. */
1397 else if (!is_gimple_reg_type (f1->type)
1398 && is_gimple_reg_type (f2->type))
1399 return 1;
1400 else if (is_gimple_reg_type (f1->type)
1401 && !is_gimple_reg_type (f2->type))
1402 return -1;
1403 /* Put any complex or vector type before any other scalar type. */
1404 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1405 && TREE_CODE (f1->type) != VECTOR_TYPE
1406 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1407 || TREE_CODE (f2->type) == VECTOR_TYPE))
1408 return 1;
1409 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1410 || TREE_CODE (f1->type) == VECTOR_TYPE)
1411 && TREE_CODE (f2->type) != COMPLEX_TYPE
1412 && TREE_CODE (f2->type) != VECTOR_TYPE)
1413 return -1;
1414 /* Put the integral type with the bigger precision first. */
1415 else if (INTEGRAL_TYPE_P (f1->type)
1416 && INTEGRAL_TYPE_P (f2->type))
1417 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1418 /* Put any integral type with non-full precision last. */
1419 else if (INTEGRAL_TYPE_P (f1->type)
1420 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1421 != TYPE_PRECISION (f1->type)))
1422 return 1;
1423 else if (INTEGRAL_TYPE_P (f2->type)
1424 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1425 != TYPE_PRECISION (f2->type)))
1426 return -1;
1427 /* Stabilize the sort. */
1428 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1431 /* We want the bigger accesses first, thus the opposite operator in the next
1432 line: */
1433 return f1->size > f2->size ? -1 : 1;
1437 /* Append a name of the declaration to the name obstack. A helper function for
1438 make_fancy_name. */
1440 static void
1441 make_fancy_decl_name (tree decl)
1443 char buffer[32];
1445 tree name = DECL_NAME (decl);
1446 if (name)
1447 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1448 IDENTIFIER_LENGTH (name));
1449 else
1451 sprintf (buffer, "D%u", DECL_UID (decl));
1452 obstack_grow (&name_obstack, buffer, strlen (buffer));
1456 /* Helper for make_fancy_name. */
1458 static void
1459 make_fancy_name_1 (tree expr)
1461 char buffer[32];
1462 tree index;
1464 if (DECL_P (expr))
1466 make_fancy_decl_name (expr);
1467 return;
1470 switch (TREE_CODE (expr))
1472 case COMPONENT_REF:
1473 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1474 obstack_1grow (&name_obstack, '$');
1475 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1476 break;
1478 case ARRAY_REF:
1479 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1480 obstack_1grow (&name_obstack, '$');
1481 /* Arrays with only one element may not have a constant as their
1482 index. */
1483 index = TREE_OPERAND (expr, 1);
1484 if (TREE_CODE (index) != INTEGER_CST)
1485 break;
1486 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1487 obstack_grow (&name_obstack, buffer, strlen (buffer));
1488 break;
1490 case ADDR_EXPR:
1491 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1492 break;
1494 case MEM_REF:
1495 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1496 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1498 obstack_1grow (&name_obstack, '$');
1499 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1500 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1501 obstack_grow (&name_obstack, buffer, strlen (buffer));
1503 break;
1505 case BIT_FIELD_REF:
1506 case REALPART_EXPR:
1507 case IMAGPART_EXPR:
1508 gcc_unreachable (); /* we treat these as scalars. */
1509 break;
1510 default:
1511 break;
1515 /* Create a human readable name for replacement variable of ACCESS. */
1517 static char *
1518 make_fancy_name (tree expr)
1520 make_fancy_name_1 (expr);
1521 obstack_1grow (&name_obstack, '\0');
1522 return XOBFINISH (&name_obstack, char *);
1525 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1526 EXP_TYPE at the given OFFSET. If BASE is something for which
1527 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1528 to insert new statements either before or below the current one as specified
1529 by INSERT_AFTER. This function is not capable of handling bitfields.
1531 BASE must be either a declaration or a memory reference that has correct
1532 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1534 tree
1535 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1536 tree exp_type, gimple_stmt_iterator *gsi,
1537 bool insert_after)
1539 tree prev_base = base;
1540 tree off;
1541 tree mem_ref;
1542 HOST_WIDE_INT base_offset;
1543 unsigned HOST_WIDE_INT misalign;
1544 unsigned int align;
1546 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1547 get_object_alignment_1 (base, &align, &misalign);
1548 base = get_addr_base_and_unit_offset (base, &base_offset);
1550 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1551 offset such as array[var_index]. */
1552 if (!base)
1554 gimple stmt;
1555 tree tmp, addr;
1557 gcc_checking_assert (gsi);
1558 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1559 addr = build_fold_addr_expr (unshare_expr (prev_base));
1560 STRIP_USELESS_TYPE_CONVERSION (addr);
1561 stmt = gimple_build_assign (tmp, addr);
1562 gimple_set_location (stmt, loc);
1563 if (insert_after)
1564 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1565 else
1566 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1568 off = build_int_cst (reference_alias_ptr_type (prev_base),
1569 offset / BITS_PER_UNIT);
1570 base = tmp;
1572 else if (TREE_CODE (base) == MEM_REF)
1574 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1575 base_offset + offset / BITS_PER_UNIT);
1576 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1577 base = unshare_expr (TREE_OPERAND (base, 0));
1579 else
1581 off = build_int_cst (reference_alias_ptr_type (base),
1582 base_offset + offset / BITS_PER_UNIT);
1583 base = build_fold_addr_expr (unshare_expr (base));
1586 misalign = (misalign + offset) & (align - 1);
1587 if (misalign != 0)
1588 align = (misalign & -misalign);
1589 if (align < TYPE_ALIGN (exp_type))
1590 exp_type = build_aligned_type (exp_type, align);
1592 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1593 if (TREE_THIS_VOLATILE (prev_base))
1594 TREE_THIS_VOLATILE (mem_ref) = 1;
1595 if (TREE_SIDE_EFFECTS (prev_base))
1596 TREE_SIDE_EFFECTS (mem_ref) = 1;
1597 return mem_ref;
1600 /* Construct a memory reference to a part of an aggregate BASE at the given
1601 OFFSET and of the same type as MODEL. In case this is a reference to a
1602 bit-field, the function will replicate the last component_ref of model's
1603 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1604 build_ref_for_offset. */
1606 static tree
1607 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1608 struct access *model, gimple_stmt_iterator *gsi,
1609 bool insert_after)
1611 if (TREE_CODE (model->expr) == COMPONENT_REF
1612 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1614 /* This access represents a bit-field. */
1615 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1617 offset -= int_bit_position (fld);
1618 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1619 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1620 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1621 NULL_TREE);
1623 else
1624 return build_ref_for_offset (loc, base, offset, model->type,
1625 gsi, insert_after);
1628 /* Attempt to build a memory reference that we could but into a gimple
1629 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1630 create statements and return s NULL instead. This function also ignores
1631 alignment issues and so its results should never end up in non-debug
1632 statements. */
1634 static tree
1635 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1636 struct access *model)
1638 HOST_WIDE_INT base_offset;
1639 tree off;
1641 if (TREE_CODE (model->expr) == COMPONENT_REF
1642 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1643 return NULL_TREE;
1645 base = get_addr_base_and_unit_offset (base, &base_offset);
1646 if (!base)
1647 return NULL_TREE;
1648 if (TREE_CODE (base) == MEM_REF)
1650 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1651 base_offset + offset / BITS_PER_UNIT);
1652 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1653 base = unshare_expr (TREE_OPERAND (base, 0));
1655 else
1657 off = build_int_cst (reference_alias_ptr_type (base),
1658 base_offset + offset / BITS_PER_UNIT);
1659 base = build_fold_addr_expr (unshare_expr (base));
1662 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1665 /* Construct a memory reference consisting of component_refs and array_refs to
1666 a part of an aggregate *RES (which is of type TYPE). The requested part
1667 should have type EXP_TYPE at be the given OFFSET. This function might not
1668 succeed, it returns true when it does and only then *RES points to something
1669 meaningful. This function should be used only to build expressions that we
1670 might need to present to user (e.g. in warnings). In all other situations,
1671 build_ref_for_model or build_ref_for_offset should be used instead. */
1673 static bool
1674 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1675 tree exp_type)
1677 while (1)
1679 tree fld;
1680 tree tr_size, index, minidx;
1681 HOST_WIDE_INT el_size;
1683 if (offset == 0 && exp_type
1684 && types_compatible_p (exp_type, type))
1685 return true;
1687 switch (TREE_CODE (type))
1689 case UNION_TYPE:
1690 case QUAL_UNION_TYPE:
1691 case RECORD_TYPE:
1692 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1694 HOST_WIDE_INT pos, size;
1695 tree tr_pos, expr, *expr_ptr;
1697 if (TREE_CODE (fld) != FIELD_DECL)
1698 continue;
1700 tr_pos = bit_position (fld);
1701 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1702 continue;
1703 pos = tree_to_uhwi (tr_pos);
1704 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1705 tr_size = DECL_SIZE (fld);
1706 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1707 continue;
1708 size = tree_to_uhwi (tr_size);
1709 if (size == 0)
1711 if (pos != offset)
1712 continue;
1714 else if (pos > offset || (pos + size) <= offset)
1715 continue;
1717 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1718 NULL_TREE);
1719 expr_ptr = &expr;
1720 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1721 offset - pos, exp_type))
1723 *res = expr;
1724 return true;
1727 return false;
1729 case ARRAY_TYPE:
1730 tr_size = TYPE_SIZE (TREE_TYPE (type));
1731 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1732 return false;
1733 el_size = tree_to_uhwi (tr_size);
1735 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1736 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1737 return false;
1738 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1739 if (!integer_zerop (minidx))
1740 index = int_const_binop (PLUS_EXPR, index, minidx);
1741 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1742 NULL_TREE, NULL_TREE);
1743 offset = offset % el_size;
1744 type = TREE_TYPE (type);
1745 break;
1747 default:
1748 if (offset != 0)
1749 return false;
1751 if (exp_type)
1752 return false;
1753 else
1754 return true;
1759 /* Return true iff TYPE is stdarg va_list type. */
1761 static inline bool
1762 is_va_list_type (tree type)
1764 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1767 /* Print message to dump file why a variable was rejected. */
1769 static void
1770 reject (tree var, const char *msg)
1772 if (dump_file && (dump_flags & TDF_DETAILS))
1774 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1775 print_generic_expr (dump_file, var, 0);
1776 fprintf (dump_file, "\n");
1780 /* Return true if VAR is a candidate for SRA. */
1782 static bool
1783 maybe_add_sra_candidate (tree var)
1785 tree type = TREE_TYPE (var);
1786 const char *msg;
1787 tree_node **slot;
1789 if (!AGGREGATE_TYPE_P (type))
1791 reject (var, "not aggregate");
1792 return false;
1794 if (needs_to_live_in_memory (var))
1796 reject (var, "needs to live in memory");
1797 return false;
1799 if (TREE_THIS_VOLATILE (var))
1801 reject (var, "is volatile");
1802 return false;
1804 if (!COMPLETE_TYPE_P (type))
1806 reject (var, "has incomplete type");
1807 return false;
1809 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1811 reject (var, "type size not fixed");
1812 return false;
1814 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1816 reject (var, "type size is zero");
1817 return false;
1819 if (type_internals_preclude_sra_p (type, &msg))
1821 reject (var, msg);
1822 return false;
1824 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1825 we also want to schedule it rather late. Thus we ignore it in
1826 the early pass. */
1827 (sra_mode == SRA_MODE_EARLY_INTRA
1828 && is_va_list_type (type)))
1830 reject (var, "is va_list");
1831 return false;
1834 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1835 slot = candidates.find_slot_with_hash (var, DECL_UID (var), INSERT);
1836 *slot = var;
1838 if (dump_file && (dump_flags & TDF_DETAILS))
1840 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1841 print_generic_expr (dump_file, var, 0);
1842 fprintf (dump_file, "\n");
1845 return true;
1848 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1849 those with type which is suitable for scalarization. */
1851 static bool
1852 find_var_candidates (void)
1854 tree var, parm;
1855 unsigned int i;
1856 bool ret = false;
1858 for (parm = DECL_ARGUMENTS (current_function_decl);
1859 parm;
1860 parm = DECL_CHAIN (parm))
1861 ret |= maybe_add_sra_candidate (parm);
1863 FOR_EACH_LOCAL_DECL (cfun, i, var)
1865 if (TREE_CODE (var) != VAR_DECL)
1866 continue;
1868 ret |= maybe_add_sra_candidate (var);
1871 return ret;
1874 /* Sort all accesses for the given variable, check for partial overlaps and
1875 return NULL if there are any. If there are none, pick a representative for
1876 each combination of offset and size and create a linked list out of them.
1877 Return the pointer to the first representative and make sure it is the first
1878 one in the vector of accesses. */
1880 static struct access *
1881 sort_and_splice_var_accesses (tree var)
1883 int i, j, access_count;
1884 struct access *res, **prev_acc_ptr = &res;
1885 vec<access_p> *access_vec;
1886 bool first = true;
1887 HOST_WIDE_INT low = -1, high = 0;
1889 access_vec = get_base_access_vector (var);
1890 if (!access_vec)
1891 return NULL;
1892 access_count = access_vec->length ();
1894 /* Sort by <OFFSET, SIZE>. */
1895 access_vec->qsort (compare_access_positions);
1897 i = 0;
1898 while (i < access_count)
1900 struct access *access = (*access_vec)[i];
1901 bool grp_write = access->write;
1902 bool grp_read = !access->write;
1903 bool grp_scalar_write = access->write
1904 && is_gimple_reg_type (access->type);
1905 bool grp_scalar_read = !access->write
1906 && is_gimple_reg_type (access->type);
1907 bool grp_assignment_read = access->grp_assignment_read;
1908 bool grp_assignment_write = access->grp_assignment_write;
1909 bool multiple_scalar_reads = false;
1910 bool total_scalarization = access->grp_total_scalarization;
1911 bool grp_partial_lhs = access->grp_partial_lhs;
1912 bool first_scalar = is_gimple_reg_type (access->type);
1913 bool unscalarizable_region = access->grp_unscalarizable_region;
1915 if (first || access->offset >= high)
1917 first = false;
1918 low = access->offset;
1919 high = access->offset + access->size;
1921 else if (access->offset > low && access->offset + access->size > high)
1922 return NULL;
1923 else
1924 gcc_assert (access->offset >= low
1925 && access->offset + access->size <= high);
1927 j = i + 1;
1928 while (j < access_count)
1930 struct access *ac2 = (*access_vec)[j];
1931 if (ac2->offset != access->offset || ac2->size != access->size)
1932 break;
1933 if (ac2->write)
1935 grp_write = true;
1936 grp_scalar_write = (grp_scalar_write
1937 || is_gimple_reg_type (ac2->type));
1939 else
1941 grp_read = true;
1942 if (is_gimple_reg_type (ac2->type))
1944 if (grp_scalar_read)
1945 multiple_scalar_reads = true;
1946 else
1947 grp_scalar_read = true;
1950 grp_assignment_read |= ac2->grp_assignment_read;
1951 grp_assignment_write |= ac2->grp_assignment_write;
1952 grp_partial_lhs |= ac2->grp_partial_lhs;
1953 unscalarizable_region |= ac2->grp_unscalarizable_region;
1954 total_scalarization |= ac2->grp_total_scalarization;
1955 relink_to_new_repr (access, ac2);
1957 /* If there are both aggregate-type and scalar-type accesses with
1958 this combination of size and offset, the comparison function
1959 should have put the scalars first. */
1960 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1961 ac2->group_representative = access;
1962 j++;
1965 i = j;
1967 access->group_representative = access;
1968 access->grp_write = grp_write;
1969 access->grp_read = grp_read;
1970 access->grp_scalar_read = grp_scalar_read;
1971 access->grp_scalar_write = grp_scalar_write;
1972 access->grp_assignment_read = grp_assignment_read;
1973 access->grp_assignment_write = grp_assignment_write;
1974 access->grp_hint = multiple_scalar_reads || total_scalarization;
1975 access->grp_total_scalarization = total_scalarization;
1976 access->grp_partial_lhs = grp_partial_lhs;
1977 access->grp_unscalarizable_region = unscalarizable_region;
1978 if (access->first_link)
1979 add_access_to_work_queue (access);
1981 *prev_acc_ptr = access;
1982 prev_acc_ptr = &access->next_grp;
1985 gcc_assert (res == (*access_vec)[0]);
1986 return res;
1989 /* Create a variable for the given ACCESS which determines the type, name and a
1990 few other properties. Return the variable declaration and store it also to
1991 ACCESS->replacement. */
1993 static tree
1994 create_access_replacement (struct access *access)
1996 tree repl;
1998 if (access->grp_to_be_debug_replaced)
2000 repl = create_tmp_var_raw (access->type, NULL);
2001 DECL_CONTEXT (repl) = current_function_decl;
2003 else
2004 repl = create_tmp_var (access->type, "SR");
2005 if (TREE_CODE (access->type) == COMPLEX_TYPE
2006 || TREE_CODE (access->type) == VECTOR_TYPE)
2008 if (!access->grp_partial_lhs)
2009 DECL_GIMPLE_REG_P (repl) = 1;
2011 else if (access->grp_partial_lhs
2012 && is_gimple_reg_type (access->type))
2013 TREE_ADDRESSABLE (repl) = 1;
2015 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2016 DECL_ARTIFICIAL (repl) = 1;
2017 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2019 if (DECL_NAME (access->base)
2020 && !DECL_IGNORED_P (access->base)
2021 && !DECL_ARTIFICIAL (access->base))
2023 char *pretty_name = make_fancy_name (access->expr);
2024 tree debug_expr = unshare_expr_without_location (access->expr), d;
2025 bool fail = false;
2027 DECL_NAME (repl) = get_identifier (pretty_name);
2028 obstack_free (&name_obstack, pretty_name);
2030 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2031 as DECL_DEBUG_EXPR isn't considered when looking for still
2032 used SSA_NAMEs and thus they could be freed. All debug info
2033 generation cares is whether something is constant or variable
2034 and that get_ref_base_and_extent works properly on the
2035 expression. It cannot handle accesses at a non-constant offset
2036 though, so just give up in those cases. */
2037 for (d = debug_expr;
2038 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2039 d = TREE_OPERAND (d, 0))
2040 switch (TREE_CODE (d))
2042 case ARRAY_REF:
2043 case ARRAY_RANGE_REF:
2044 if (TREE_OPERAND (d, 1)
2045 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2046 fail = true;
2047 if (TREE_OPERAND (d, 3)
2048 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2049 fail = true;
2050 /* FALLTHRU */
2051 case COMPONENT_REF:
2052 if (TREE_OPERAND (d, 2)
2053 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2054 fail = true;
2055 break;
2056 case MEM_REF:
2057 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2058 fail = true;
2059 else
2060 d = TREE_OPERAND (d, 0);
2061 break;
2062 default:
2063 break;
2065 if (!fail)
2067 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2068 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2070 if (access->grp_no_warning)
2071 TREE_NO_WARNING (repl) = 1;
2072 else
2073 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2075 else
2076 TREE_NO_WARNING (repl) = 1;
2078 if (dump_file)
2080 if (access->grp_to_be_debug_replaced)
2082 fprintf (dump_file, "Created a debug-only replacement for ");
2083 print_generic_expr (dump_file, access->base, 0);
2084 fprintf (dump_file, " offset: %u, size: %u\n",
2085 (unsigned) access->offset, (unsigned) access->size);
2087 else
2089 fprintf (dump_file, "Created a replacement for ");
2090 print_generic_expr (dump_file, access->base, 0);
2091 fprintf (dump_file, " offset: %u, size: %u: ",
2092 (unsigned) access->offset, (unsigned) access->size);
2093 print_generic_expr (dump_file, repl, 0);
2094 fprintf (dump_file, "\n");
2097 sra_stats.replacements++;
2099 return repl;
2102 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2104 static inline tree
2105 get_access_replacement (struct access *access)
2107 gcc_checking_assert (access->replacement_decl);
2108 return access->replacement_decl;
2112 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2113 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2114 to it is not "within" the root. Return false iff some accesses partially
2115 overlap. */
2117 static bool
2118 build_access_subtree (struct access **access)
2120 struct access *root = *access, *last_child = NULL;
2121 HOST_WIDE_INT limit = root->offset + root->size;
2123 *access = (*access)->next_grp;
2124 while (*access && (*access)->offset + (*access)->size <= limit)
2126 if (!last_child)
2127 root->first_child = *access;
2128 else
2129 last_child->next_sibling = *access;
2130 last_child = *access;
2132 if (!build_access_subtree (access))
2133 return false;
2136 if (*access && (*access)->offset < limit)
2137 return false;
2139 return true;
2142 /* Build a tree of access representatives, ACCESS is the pointer to the first
2143 one, others are linked in a list by the next_grp field. Return false iff
2144 some accesses partially overlap. */
2146 static bool
2147 build_access_trees (struct access *access)
2149 while (access)
2151 struct access *root = access;
2153 if (!build_access_subtree (&access))
2154 return false;
2155 root->next_grp = access;
2157 return true;
2160 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2161 array. */
2163 static bool
2164 expr_with_var_bounded_array_refs_p (tree expr)
2166 while (handled_component_p (expr))
2168 if (TREE_CODE (expr) == ARRAY_REF
2169 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2170 return true;
2171 expr = TREE_OPERAND (expr, 0);
2173 return false;
2176 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2177 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2178 sorts of access flags appropriately along the way, notably always set
2179 grp_read and grp_assign_read according to MARK_READ and grp_write when
2180 MARK_WRITE is true.
2182 Creating a replacement for a scalar access is considered beneficial if its
2183 grp_hint is set (this means we are either attempting total scalarization or
2184 there is more than one direct read access) or according to the following
2185 table:
2187 Access written to through a scalar type (once or more times)
2189 | Written to in an assignment statement
2191 | | Access read as scalar _once_
2192 | | |
2193 | | | Read in an assignment statement
2194 | | | |
2195 | | | | Scalarize Comment
2196 -----------------------------------------------------------------------------
2197 0 0 0 0 No access for the scalar
2198 0 0 0 1 No access for the scalar
2199 0 0 1 0 No Single read - won't help
2200 0 0 1 1 No The same case
2201 0 1 0 0 No access for the scalar
2202 0 1 0 1 No access for the scalar
2203 0 1 1 0 Yes s = *g; return s.i;
2204 0 1 1 1 Yes The same case as above
2205 1 0 0 0 No Won't help
2206 1 0 0 1 Yes s.i = 1; *g = s;
2207 1 0 1 0 Yes s.i = 5; g = s.i;
2208 1 0 1 1 Yes The same case as above
2209 1 1 0 0 No Won't help.
2210 1 1 0 1 Yes s.i = 1; *g = s;
2211 1 1 1 0 Yes s = *g; return s.i;
2212 1 1 1 1 Yes Any of the above yeses */
2214 static bool
2215 analyze_access_subtree (struct access *root, struct access *parent,
2216 bool allow_replacements)
2218 struct access *child;
2219 HOST_WIDE_INT limit = root->offset + root->size;
2220 HOST_WIDE_INT covered_to = root->offset;
2221 bool scalar = is_gimple_reg_type (root->type);
2222 bool hole = false, sth_created = false;
2224 if (parent)
2226 if (parent->grp_read)
2227 root->grp_read = 1;
2228 if (parent->grp_assignment_read)
2229 root->grp_assignment_read = 1;
2230 if (parent->grp_write)
2231 root->grp_write = 1;
2232 if (parent->grp_assignment_write)
2233 root->grp_assignment_write = 1;
2234 if (parent->grp_total_scalarization)
2235 root->grp_total_scalarization = 1;
2238 if (root->grp_unscalarizable_region)
2239 allow_replacements = false;
2241 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2242 allow_replacements = false;
2244 for (child = root->first_child; child; child = child->next_sibling)
2246 hole |= covered_to < child->offset;
2247 sth_created |= analyze_access_subtree (child, root,
2248 allow_replacements && !scalar);
2250 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2251 root->grp_total_scalarization &= child->grp_total_scalarization;
2252 if (child->grp_covered)
2253 covered_to += child->size;
2254 else
2255 hole = true;
2258 if (allow_replacements && scalar && !root->first_child
2259 && (root->grp_hint
2260 || ((root->grp_scalar_read || root->grp_assignment_read)
2261 && (root->grp_scalar_write || root->grp_assignment_write))))
2263 /* Always create access replacements that cover the whole access.
2264 For integral types this means the precision has to match.
2265 Avoid assumptions based on the integral type kind, too. */
2266 if (INTEGRAL_TYPE_P (root->type)
2267 && (TREE_CODE (root->type) != INTEGER_TYPE
2268 || TYPE_PRECISION (root->type) != root->size)
2269 /* But leave bitfield accesses alone. */
2270 && (TREE_CODE (root->expr) != COMPONENT_REF
2271 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2273 tree rt = root->type;
2274 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2275 && (root->size % BITS_PER_UNIT) == 0);
2276 root->type = build_nonstandard_integer_type (root->size,
2277 TYPE_UNSIGNED (rt));
2278 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2279 root->base, root->offset,
2280 root->type, NULL, false);
2282 if (dump_file && (dump_flags & TDF_DETAILS))
2284 fprintf (dump_file, "Changing the type of a replacement for ");
2285 print_generic_expr (dump_file, root->base, 0);
2286 fprintf (dump_file, " offset: %u, size: %u ",
2287 (unsigned) root->offset, (unsigned) root->size);
2288 fprintf (dump_file, " to an integer.\n");
2292 root->grp_to_be_replaced = 1;
2293 root->replacement_decl = create_access_replacement (root);
2294 sth_created = true;
2295 hole = false;
2297 else
2299 if (allow_replacements
2300 && scalar && !root->first_child
2301 && (root->grp_scalar_write || root->grp_assignment_write)
2302 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2303 DECL_UID (root->base)))
2305 gcc_checking_assert (!root->grp_scalar_read
2306 && !root->grp_assignment_read);
2307 sth_created = true;
2308 if (MAY_HAVE_DEBUG_STMTS)
2310 root->grp_to_be_debug_replaced = 1;
2311 root->replacement_decl = create_access_replacement (root);
2315 if (covered_to < limit)
2316 hole = true;
2317 if (scalar)
2318 root->grp_total_scalarization = 0;
2321 if (!hole || root->grp_total_scalarization)
2322 root->grp_covered = 1;
2323 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2324 root->grp_unscalarized_data = 1; /* not covered and written to */
2325 return sth_created;
2328 /* Analyze all access trees linked by next_grp by the means of
2329 analyze_access_subtree. */
2330 static bool
2331 analyze_access_trees (struct access *access)
2333 bool ret = false;
2335 while (access)
2337 if (analyze_access_subtree (access, NULL, true))
2338 ret = true;
2339 access = access->next_grp;
2342 return ret;
2345 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2346 SIZE would conflict with an already existing one. If exactly such a child
2347 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2349 static bool
2350 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2351 HOST_WIDE_INT size, struct access **exact_match)
2353 struct access *child;
2355 for (child = lacc->first_child; child; child = child->next_sibling)
2357 if (child->offset == norm_offset && child->size == size)
2359 *exact_match = child;
2360 return true;
2363 if (child->offset < norm_offset + size
2364 && child->offset + child->size > norm_offset)
2365 return true;
2368 return false;
2371 /* Create a new child access of PARENT, with all properties just like MODEL
2372 except for its offset and with its grp_write false and grp_read true.
2373 Return the new access or NULL if it cannot be created. Note that this access
2374 is created long after all splicing and sorting, it's not located in any
2375 access vector and is automatically a representative of its group. */
2377 static struct access *
2378 create_artificial_child_access (struct access *parent, struct access *model,
2379 HOST_WIDE_INT new_offset)
2381 struct access *access;
2382 struct access **child;
2383 tree expr = parent->base;
2385 gcc_assert (!model->grp_unscalarizable_region);
2387 access = (struct access *) pool_alloc (access_pool);
2388 memset (access, 0, sizeof (struct access));
2389 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2390 model->type))
2392 access->grp_no_warning = true;
2393 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2394 new_offset, model, NULL, false);
2397 access->base = parent->base;
2398 access->expr = expr;
2399 access->offset = new_offset;
2400 access->size = model->size;
2401 access->type = model->type;
2402 access->grp_write = true;
2403 access->grp_read = false;
2405 child = &parent->first_child;
2406 while (*child && (*child)->offset < new_offset)
2407 child = &(*child)->next_sibling;
2409 access->next_sibling = *child;
2410 *child = access;
2412 return access;
2416 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2417 true if any new subaccess was created. Additionally, if RACC is a scalar
2418 access but LACC is not, change the type of the latter, if possible. */
2420 static bool
2421 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2423 struct access *rchild;
2424 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2425 bool ret = false;
2427 if (is_gimple_reg_type (lacc->type)
2428 || lacc->grp_unscalarizable_region
2429 || racc->grp_unscalarizable_region)
2430 return false;
2432 if (is_gimple_reg_type (racc->type))
2434 if (!lacc->first_child && !racc->first_child)
2436 tree t = lacc->base;
2438 lacc->type = racc->type;
2439 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2440 lacc->offset, racc->type))
2441 lacc->expr = t;
2442 else
2444 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2445 lacc->base, lacc->offset,
2446 racc, NULL, false);
2447 lacc->grp_no_warning = true;
2450 return false;
2453 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2455 struct access *new_acc = NULL;
2456 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2458 if (rchild->grp_unscalarizable_region)
2459 continue;
2461 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2462 &new_acc))
2464 if (new_acc)
2466 rchild->grp_hint = 1;
2467 new_acc->grp_hint |= new_acc->grp_read;
2468 if (rchild->first_child)
2469 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2471 continue;
2474 rchild->grp_hint = 1;
2475 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2476 if (new_acc)
2478 ret = true;
2479 if (racc->first_child)
2480 propagate_subaccesses_across_link (new_acc, rchild);
2484 return ret;
2487 /* Propagate all subaccesses across assignment links. */
2489 static void
2490 propagate_all_subaccesses (void)
2492 while (work_queue_head)
2494 struct access *racc = pop_access_from_work_queue ();
2495 struct assign_link *link;
2497 gcc_assert (racc->first_link);
2499 for (link = racc->first_link; link; link = link->next)
2501 struct access *lacc = link->lacc;
2503 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2504 continue;
2505 lacc = lacc->group_representative;
2506 if (propagate_subaccesses_across_link (lacc, racc)
2507 && lacc->first_link)
2508 add_access_to_work_queue (lacc);
2513 /* Go through all accesses collected throughout the (intraprocedural) analysis
2514 stage, exclude overlapping ones, identify representatives and build trees
2515 out of them, making decisions about scalarization on the way. Return true
2516 iff there are any to-be-scalarized variables after this stage. */
2518 static bool
2519 analyze_all_variable_accesses (void)
2521 int res = 0;
2522 bitmap tmp = BITMAP_ALLOC (NULL);
2523 bitmap_iterator bi;
2524 unsigned i, max_total_scalarization_size;
2526 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2527 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2529 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2530 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2531 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2533 tree var = candidate (i);
2535 if (TREE_CODE (var) == VAR_DECL
2536 && type_consists_of_records_p (TREE_TYPE (var)))
2538 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2539 <= max_total_scalarization_size)
2541 completely_scalarize_var (var);
2542 if (dump_file && (dump_flags & TDF_DETAILS))
2544 fprintf (dump_file, "Will attempt to totally scalarize ");
2545 print_generic_expr (dump_file, var, 0);
2546 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2549 else if (dump_file && (dump_flags & TDF_DETAILS))
2551 fprintf (dump_file, "Too big to totally scalarize: ");
2552 print_generic_expr (dump_file, var, 0);
2553 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2558 bitmap_copy (tmp, candidate_bitmap);
2559 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2561 tree var = candidate (i);
2562 struct access *access;
2564 access = sort_and_splice_var_accesses (var);
2565 if (!access || !build_access_trees (access))
2566 disqualify_candidate (var,
2567 "No or inhibitingly overlapping accesses.");
2570 propagate_all_subaccesses ();
2572 bitmap_copy (tmp, candidate_bitmap);
2573 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2575 tree var = candidate (i);
2576 struct access *access = get_first_repr_for_decl (var);
2578 if (analyze_access_trees (access))
2580 res++;
2581 if (dump_file && (dump_flags & TDF_DETAILS))
2583 fprintf (dump_file, "\nAccess trees for ");
2584 print_generic_expr (dump_file, var, 0);
2585 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2586 dump_access_tree (dump_file, access);
2587 fprintf (dump_file, "\n");
2590 else
2591 disqualify_candidate (var, "No scalar replacements to be created.");
2594 BITMAP_FREE (tmp);
2596 if (res)
2598 statistics_counter_event (cfun, "Scalarized aggregates", res);
2599 return true;
2601 else
2602 return false;
2605 /* Generate statements copying scalar replacements of accesses within a subtree
2606 into or out of AGG. ACCESS, all its children, siblings and their children
2607 are to be processed. AGG is an aggregate type expression (can be a
2608 declaration but does not have to be, it can for example also be a mem_ref or
2609 a series of handled components). TOP_OFFSET is the offset of the processed
2610 subtree which has to be subtracted from offsets of individual accesses to
2611 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2612 replacements in the interval <start_offset, start_offset + chunk_size>,
2613 otherwise copy all. GSI is a statement iterator used to place the new
2614 statements. WRITE should be true when the statements should write from AGG
2615 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2616 statements will be added after the current statement in GSI, they will be
2617 added before the statement otherwise. */
2619 static void
2620 generate_subtree_copies (struct access *access, tree agg,
2621 HOST_WIDE_INT top_offset,
2622 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2623 gimple_stmt_iterator *gsi, bool write,
2624 bool insert_after, location_t loc)
2628 if (chunk_size && access->offset >= start_offset + chunk_size)
2629 return;
2631 if (access->grp_to_be_replaced
2632 && (chunk_size == 0
2633 || access->offset + access->size > start_offset))
2635 tree expr, repl = get_access_replacement (access);
2636 gimple stmt;
2638 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2639 access, gsi, insert_after);
2641 if (write)
2643 if (access->grp_partial_lhs)
2644 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2645 !insert_after,
2646 insert_after ? GSI_NEW_STMT
2647 : GSI_SAME_STMT);
2648 stmt = gimple_build_assign (repl, expr);
2650 else
2652 TREE_NO_WARNING (repl) = 1;
2653 if (access->grp_partial_lhs)
2654 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2655 !insert_after,
2656 insert_after ? GSI_NEW_STMT
2657 : GSI_SAME_STMT);
2658 stmt = gimple_build_assign (expr, repl);
2660 gimple_set_location (stmt, loc);
2662 if (insert_after)
2663 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2664 else
2665 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2666 update_stmt (stmt);
2667 sra_stats.subtree_copies++;
2669 else if (write
2670 && access->grp_to_be_debug_replaced
2671 && (chunk_size == 0
2672 || access->offset + access->size > start_offset))
2674 gimple ds;
2675 tree drhs = build_debug_ref_for_model (loc, agg,
2676 access->offset - top_offset,
2677 access);
2678 ds = gimple_build_debug_bind (get_access_replacement (access),
2679 drhs, gsi_stmt (*gsi));
2680 if (insert_after)
2681 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2682 else
2683 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2686 if (access->first_child)
2687 generate_subtree_copies (access->first_child, agg, top_offset,
2688 start_offset, chunk_size, gsi,
2689 write, insert_after, loc);
2691 access = access->next_sibling;
2693 while (access);
2696 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2697 the root of the subtree to be processed. GSI is the statement iterator used
2698 for inserting statements which are added after the current statement if
2699 INSERT_AFTER is true or before it otherwise. */
2701 static void
2702 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2703 bool insert_after, location_t loc)
2706 struct access *child;
2708 if (access->grp_to_be_replaced)
2710 gimple stmt;
2712 stmt = gimple_build_assign (get_access_replacement (access),
2713 build_zero_cst (access->type));
2714 if (insert_after)
2715 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2716 else
2717 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2718 update_stmt (stmt);
2719 gimple_set_location (stmt, loc);
2721 else if (access->grp_to_be_debug_replaced)
2723 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2724 build_zero_cst (access->type),
2725 gsi_stmt (*gsi));
2726 if (insert_after)
2727 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2728 else
2729 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2732 for (child = access->first_child; child; child = child->next_sibling)
2733 init_subtree_with_zero (child, gsi, insert_after, loc);
2736 /* Search for an access representative for the given expression EXPR and
2737 return it or NULL if it cannot be found. */
2739 static struct access *
2740 get_access_for_expr (tree expr)
2742 HOST_WIDE_INT offset, size, max_size;
2743 tree base;
2745 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2746 a different size than the size of its argument and we need the latter
2747 one. */
2748 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2749 expr = TREE_OPERAND (expr, 0);
2751 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2752 if (max_size == -1 || !DECL_P (base))
2753 return NULL;
2755 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2756 return NULL;
2758 return get_var_base_offset_size_access (base, offset, max_size);
2761 /* Replace the expression EXPR with a scalar replacement if there is one and
2762 generate other statements to do type conversion or subtree copying if
2763 necessary. GSI is used to place newly created statements, WRITE is true if
2764 the expression is being written to (it is on a LHS of a statement or output
2765 in an assembly statement). */
2767 static bool
2768 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2770 location_t loc;
2771 struct access *access;
2772 tree type, bfr, orig_expr;
2774 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2776 bfr = *expr;
2777 expr = &TREE_OPERAND (*expr, 0);
2779 else
2780 bfr = NULL_TREE;
2782 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2783 expr = &TREE_OPERAND (*expr, 0);
2784 access = get_access_for_expr (*expr);
2785 if (!access)
2786 return false;
2787 type = TREE_TYPE (*expr);
2788 orig_expr = *expr;
2790 loc = gimple_location (gsi_stmt (*gsi));
2791 gimple_stmt_iterator alt_gsi = gsi_none ();
2792 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2794 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2795 gsi = &alt_gsi;
2798 if (access->grp_to_be_replaced)
2800 tree repl = get_access_replacement (access);
2801 /* If we replace a non-register typed access simply use the original
2802 access expression to extract the scalar component afterwards.
2803 This happens if scalarizing a function return value or parameter
2804 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2805 gcc.c-torture/compile/20011217-1.c.
2807 We also want to use this when accessing a complex or vector which can
2808 be accessed as a different type too, potentially creating a need for
2809 type conversion (see PR42196) and when scalarized unions are involved
2810 in assembler statements (see PR42398). */
2811 if (!useless_type_conversion_p (type, access->type))
2813 tree ref;
2815 ref = build_ref_for_model (loc, orig_expr, 0, access, NULL, false);
2817 if (write)
2819 gimple stmt;
2821 if (access->grp_partial_lhs)
2822 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2823 false, GSI_NEW_STMT);
2824 stmt = gimple_build_assign (repl, ref);
2825 gimple_set_location (stmt, loc);
2826 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2828 else
2830 gimple stmt;
2832 if (access->grp_partial_lhs)
2833 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2834 true, GSI_SAME_STMT);
2835 stmt = gimple_build_assign (ref, repl);
2836 gimple_set_location (stmt, loc);
2837 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2840 else
2841 *expr = repl;
2842 sra_stats.exprs++;
2844 else if (write && access->grp_to_be_debug_replaced)
2846 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2847 NULL_TREE,
2848 gsi_stmt (*gsi));
2849 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2852 if (access->first_child)
2854 HOST_WIDE_INT start_offset, chunk_size;
2855 if (bfr
2856 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
2857 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
2859 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
2860 start_offset = access->offset
2861 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
2863 else
2864 start_offset = chunk_size = 0;
2866 generate_subtree_copies (access->first_child, orig_expr, access->offset,
2867 start_offset, chunk_size, gsi, write, write,
2868 loc);
2870 return true;
2873 /* Where scalar replacements of the RHS have been written to when a replacement
2874 of a LHS of an assigments cannot be direclty loaded from a replacement of
2875 the RHS. */
2876 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2877 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2878 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2880 struct subreplacement_assignment_data
2882 /* Offset of the access representing the lhs of the assignment. */
2883 HOST_WIDE_INT left_offset;
2885 /* LHS and RHS of the original assignment. */
2886 tree assignment_lhs, assignment_rhs;
2888 /* Access representing the rhs of the whole assignment. */
2889 struct access *top_racc;
2891 /* Stmt iterator used for statement insertions after the original assignment.
2892 It points to the main GSI used to traverse a BB during function body
2893 modification. */
2894 gimple_stmt_iterator *new_gsi;
2896 /* Stmt iterator used for statement insertions before the original
2897 assignment. Keeps on pointing to the original statement. */
2898 gimple_stmt_iterator old_gsi;
2900 /* Location of the assignment. */
2901 location_t loc;
2903 /* Keeps the information whether we have needed to refresh replacements of
2904 the LHS and from which side of the assignments this takes place. */
2905 enum unscalarized_data_handling refreshed;
2908 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2909 base aggregate if there are unscalarized data or directly to LHS of the
2910 statement that is pointed to by GSI otherwise. */
2912 static void
2913 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
2915 tree src;
2916 if (sad->top_racc->grp_unscalarized_data)
2918 src = sad->assignment_rhs;
2919 sad->refreshed = SRA_UDH_RIGHT;
2921 else
2923 src = sad->assignment_lhs;
2924 sad->refreshed = SRA_UDH_LEFT;
2926 generate_subtree_copies (sad->top_racc->first_child, src,
2927 sad->top_racc->offset, 0, 0,
2928 &sad->old_gsi, false, false, sad->loc);
2931 /* Try to generate statements to load all sub-replacements in an access subtree
2932 formed by children of LACC from scalar replacements in the SAD->top_racc
2933 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
2934 and load the accesses from it. */
2936 static void
2937 load_assign_lhs_subreplacements (struct access *lacc,
2938 struct subreplacement_assignment_data *sad)
2940 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2942 HOST_WIDE_INT offset;
2943 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
2945 if (lacc->grp_to_be_replaced)
2947 struct access *racc;
2948 gimple stmt;
2949 tree rhs;
2951 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
2952 if (racc && racc->grp_to_be_replaced)
2954 rhs = get_access_replacement (racc);
2955 if (!useless_type_conversion_p (lacc->type, racc->type))
2956 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
2957 lacc->type, rhs);
2959 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2960 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
2961 NULL_TREE, true, GSI_SAME_STMT);
2963 else
2965 /* No suitable access on the right hand side, need to load from
2966 the aggregate. See if we have to update it first... */
2967 if (sad->refreshed == SRA_UDH_NONE)
2968 handle_unscalarized_data_in_subtree (sad);
2970 if (sad->refreshed == SRA_UDH_LEFT)
2971 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
2972 lacc->offset - sad->left_offset,
2973 lacc, sad->new_gsi, true);
2974 else
2975 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
2976 lacc->offset - sad->left_offset,
2977 lacc, sad->new_gsi, true);
2978 if (lacc->grp_partial_lhs)
2979 rhs = force_gimple_operand_gsi (sad->new_gsi,
2980 rhs, true, NULL_TREE,
2981 false, GSI_NEW_STMT);
2984 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2985 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
2986 gimple_set_location (stmt, sad->loc);
2987 update_stmt (stmt);
2988 sra_stats.subreplacements++;
2990 else
2992 if (sad->refreshed == SRA_UDH_NONE
2993 && lacc->grp_read && !lacc->grp_covered)
2994 handle_unscalarized_data_in_subtree (sad);
2996 if (lacc && lacc->grp_to_be_debug_replaced)
2998 gimple ds;
2999 tree drhs;
3000 struct access *racc = find_access_in_subtree (sad->top_racc,
3001 offset,
3002 lacc->size);
3004 if (racc && racc->grp_to_be_replaced)
3006 if (racc->grp_write)
3007 drhs = get_access_replacement (racc);
3008 else
3009 drhs = NULL;
3011 else if (sad->refreshed == SRA_UDH_LEFT)
3012 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3013 lacc->offset, lacc);
3014 else if (sad->refreshed == SRA_UDH_RIGHT)
3015 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3016 offset, lacc);
3017 else
3018 drhs = NULL_TREE;
3019 if (drhs
3020 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3021 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3022 lacc->type, drhs);
3023 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3024 drhs, gsi_stmt (sad->old_gsi));
3025 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3029 if (lacc->first_child)
3030 load_assign_lhs_subreplacements (lacc, sad);
3034 /* Result code for SRA assignment modification. */
3035 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3036 SRA_AM_MODIFIED, /* stmt changed but not
3037 removed */
3038 SRA_AM_REMOVED }; /* stmt eliminated */
3040 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3041 to the assignment and GSI is the statement iterator pointing at it. Returns
3042 the same values as sra_modify_assign. */
3044 static enum assignment_mod_result
3045 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3047 tree lhs = gimple_assign_lhs (*stmt);
3048 struct access *acc;
3049 location_t loc;
3051 acc = get_access_for_expr (lhs);
3052 if (!acc)
3053 return SRA_AM_NONE;
3055 if (gimple_clobber_p (*stmt))
3057 /* Remove clobbers of fully scalarized variables, otherwise
3058 do nothing. */
3059 if (acc->grp_covered)
3061 unlink_stmt_vdef (*stmt);
3062 gsi_remove (gsi, true);
3063 release_defs (*stmt);
3064 return SRA_AM_REMOVED;
3066 else
3067 return SRA_AM_NONE;
3070 loc = gimple_location (*stmt);
3071 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
3073 /* I have never seen this code path trigger but if it can happen the
3074 following should handle it gracefully. */
3075 if (access_has_children_p (acc))
3076 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3077 true, true, loc);
3078 return SRA_AM_MODIFIED;
3081 if (acc->grp_covered)
3083 init_subtree_with_zero (acc, gsi, false, loc);
3084 unlink_stmt_vdef (*stmt);
3085 gsi_remove (gsi, true);
3086 release_defs (*stmt);
3087 return SRA_AM_REMOVED;
3089 else
3091 init_subtree_with_zero (acc, gsi, true, loc);
3092 return SRA_AM_MODIFIED;
3096 /* Create and return a new suitable default definition SSA_NAME for RACC which
3097 is an access describing an uninitialized part of an aggregate that is being
3098 loaded. */
3100 static tree
3101 get_repl_default_def_ssa_name (struct access *racc)
3103 gcc_checking_assert (!racc->grp_to_be_replaced
3104 && !racc->grp_to_be_debug_replaced);
3105 if (!racc->replacement_decl)
3106 racc->replacement_decl = create_access_replacement (racc);
3107 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3110 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3111 bit-field field declaration somewhere in it. */
3113 static inline bool
3114 contains_vce_or_bfcref_p (const_tree ref)
3116 while (handled_component_p (ref))
3118 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3119 || (TREE_CODE (ref) == COMPONENT_REF
3120 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3121 return true;
3122 ref = TREE_OPERAND (ref, 0);
3125 return false;
3128 /* Examine both sides of the assignment statement pointed to by STMT, replace
3129 them with a scalare replacement if there is one and generate copying of
3130 replacements if scalarized aggregates have been used in the assignment. GSI
3131 is used to hold generated statements for type conversions and subtree
3132 copying. */
3134 static enum assignment_mod_result
3135 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3137 struct access *lacc, *racc;
3138 tree lhs, rhs;
3139 bool modify_this_stmt = false;
3140 bool force_gimple_rhs = false;
3141 location_t loc;
3142 gimple_stmt_iterator orig_gsi = *gsi;
3144 if (!gimple_assign_single_p (*stmt))
3145 return SRA_AM_NONE;
3146 lhs = gimple_assign_lhs (*stmt);
3147 rhs = gimple_assign_rhs1 (*stmt);
3149 if (TREE_CODE (rhs) == CONSTRUCTOR)
3150 return sra_modify_constructor_assign (stmt, gsi);
3152 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3153 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3154 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3156 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
3157 gsi, false);
3158 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
3159 gsi, true);
3160 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3163 lacc = get_access_for_expr (lhs);
3164 racc = get_access_for_expr (rhs);
3165 if (!lacc && !racc)
3166 return SRA_AM_NONE;
3168 loc = gimple_location (*stmt);
3169 if (lacc && lacc->grp_to_be_replaced)
3171 lhs = get_access_replacement (lacc);
3172 gimple_assign_set_lhs (*stmt, lhs);
3173 modify_this_stmt = true;
3174 if (lacc->grp_partial_lhs)
3175 force_gimple_rhs = true;
3176 sra_stats.exprs++;
3179 if (racc && racc->grp_to_be_replaced)
3181 rhs = get_access_replacement (racc);
3182 modify_this_stmt = true;
3183 if (racc->grp_partial_lhs)
3184 force_gimple_rhs = true;
3185 sra_stats.exprs++;
3187 else if (racc
3188 && !racc->grp_unscalarized_data
3189 && TREE_CODE (lhs) == SSA_NAME
3190 && !access_has_replacements_p (racc))
3192 rhs = get_repl_default_def_ssa_name (racc);
3193 modify_this_stmt = true;
3194 sra_stats.exprs++;
3197 if (modify_this_stmt)
3199 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3201 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3202 ??? This should move to fold_stmt which we simply should
3203 call after building a VIEW_CONVERT_EXPR here. */
3204 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3205 && !contains_bitfld_component_ref_p (lhs))
3207 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3208 gimple_assign_set_lhs (*stmt, lhs);
3210 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3211 && !contains_vce_or_bfcref_p (rhs))
3212 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3214 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3216 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3217 rhs);
3218 if (is_gimple_reg_type (TREE_TYPE (lhs))
3219 && TREE_CODE (lhs) != SSA_NAME)
3220 force_gimple_rhs = true;
3225 if (lacc && lacc->grp_to_be_debug_replaced)
3227 tree dlhs = get_access_replacement (lacc);
3228 tree drhs = unshare_expr (rhs);
3229 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3231 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3232 && !contains_vce_or_bfcref_p (drhs))
3233 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3234 if (drhs
3235 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3236 TREE_TYPE (drhs)))
3237 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3238 TREE_TYPE (dlhs), drhs);
3240 gimple ds = gimple_build_debug_bind (dlhs, drhs, *stmt);
3241 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3244 /* From this point on, the function deals with assignments in between
3245 aggregates when at least one has scalar reductions of some of its
3246 components. There are three possible scenarios: Both the LHS and RHS have
3247 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3249 In the first case, we would like to load the LHS components from RHS
3250 components whenever possible. If that is not possible, we would like to
3251 read it directly from the RHS (after updating it by storing in it its own
3252 components). If there are some necessary unscalarized data in the LHS,
3253 those will be loaded by the original assignment too. If neither of these
3254 cases happen, the original statement can be removed. Most of this is done
3255 by load_assign_lhs_subreplacements.
3257 In the second case, we would like to store all RHS scalarized components
3258 directly into LHS and if they cover the aggregate completely, remove the
3259 statement too. In the third case, we want the LHS components to be loaded
3260 directly from the RHS (DSE will remove the original statement if it
3261 becomes redundant).
3263 This is a bit complex but manageable when types match and when unions do
3264 not cause confusion in a way that we cannot really load a component of LHS
3265 from the RHS or vice versa (the access representing this level can have
3266 subaccesses that are accessible only through a different union field at a
3267 higher level - different from the one used in the examined expression).
3268 Unions are fun.
3270 Therefore, I specially handle a fourth case, happening when there is a
3271 specific type cast or it is impossible to locate a scalarized subaccess on
3272 the other side of the expression. If that happens, I simply "refresh" the
3273 RHS by storing in it is scalarized components leave the original statement
3274 there to do the copying and then load the scalar replacements of the LHS.
3275 This is what the first branch does. */
3277 if (modify_this_stmt
3278 || gimple_has_volatile_ops (*stmt)
3279 || contains_vce_or_bfcref_p (rhs)
3280 || contains_vce_or_bfcref_p (lhs)
3281 || stmt_ends_bb_p (*stmt))
3283 if (access_has_children_p (racc))
3284 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3285 gsi, false, false, loc);
3286 if (access_has_children_p (lacc))
3288 gimple_stmt_iterator alt_gsi = gsi_none ();
3289 if (stmt_ends_bb_p (*stmt))
3291 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3292 gsi = &alt_gsi;
3294 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3295 gsi, true, true, loc);
3297 sra_stats.separate_lhs_rhs_handling++;
3299 /* This gimplification must be done after generate_subtree_copies,
3300 lest we insert the subtree copies in the middle of the gimplified
3301 sequence. */
3302 if (force_gimple_rhs)
3303 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3304 true, GSI_SAME_STMT);
3305 if (gimple_assign_rhs1 (*stmt) != rhs)
3307 modify_this_stmt = true;
3308 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3309 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3312 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3314 else
3316 if (access_has_children_p (lacc)
3317 && access_has_children_p (racc)
3318 /* When an access represents an unscalarizable region, it usually
3319 represents accesses with variable offset and thus must not be used
3320 to generate new memory accesses. */
3321 && !lacc->grp_unscalarizable_region
3322 && !racc->grp_unscalarizable_region)
3324 struct subreplacement_assignment_data sad;
3326 sad.left_offset = lacc->offset;
3327 sad.assignment_lhs = lhs;
3328 sad.assignment_rhs = rhs;
3329 sad.top_racc = racc;
3330 sad.old_gsi = *gsi;
3331 sad.new_gsi = gsi;
3332 sad.loc = gimple_location (*stmt);
3333 sad.refreshed = SRA_UDH_NONE;
3335 if (lacc->grp_read && !lacc->grp_covered)
3336 handle_unscalarized_data_in_subtree (&sad);
3338 load_assign_lhs_subreplacements (lacc, &sad);
3339 if (sad.refreshed != SRA_UDH_RIGHT)
3341 gsi_next (gsi);
3342 unlink_stmt_vdef (*stmt);
3343 gsi_remove (&sad.old_gsi, true);
3344 release_defs (*stmt);
3345 sra_stats.deleted++;
3346 return SRA_AM_REMOVED;
3349 else
3351 if (access_has_children_p (racc)
3352 && !racc->grp_unscalarized_data)
3354 if (dump_file)
3356 fprintf (dump_file, "Removing load: ");
3357 print_gimple_stmt (dump_file, *stmt, 0, 0);
3359 generate_subtree_copies (racc->first_child, lhs,
3360 racc->offset, 0, 0, gsi,
3361 false, false, loc);
3362 gcc_assert (*stmt == gsi_stmt (*gsi));
3363 unlink_stmt_vdef (*stmt);
3364 gsi_remove (gsi, true);
3365 release_defs (*stmt);
3366 sra_stats.deleted++;
3367 return SRA_AM_REMOVED;
3369 /* Restore the aggregate RHS from its components so the
3370 prevailing aggregate copy does the right thing. */
3371 if (access_has_children_p (racc))
3372 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3373 gsi, false, false, loc);
3374 /* Re-load the components of the aggregate copy destination.
3375 But use the RHS aggregate to load from to expose more
3376 optimization opportunities. */
3377 if (access_has_children_p (lacc))
3378 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3379 0, 0, gsi, true, true, loc);
3382 return SRA_AM_NONE;
3386 /* Traverse the function body and all modifications as decided in
3387 analyze_all_variable_accesses. Return true iff the CFG has been
3388 changed. */
3390 static bool
3391 sra_modify_function_body (void)
3393 bool cfg_changed = false;
3394 basic_block bb;
3396 FOR_EACH_BB_FN (bb, cfun)
3398 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3399 while (!gsi_end_p (gsi))
3401 gimple stmt = gsi_stmt (gsi);
3402 enum assignment_mod_result assign_result;
3403 bool modified = false, deleted = false;
3404 tree *t;
3405 unsigned i;
3407 switch (gimple_code (stmt))
3409 case GIMPLE_RETURN:
3410 t = gimple_return_retval_ptr (stmt);
3411 if (*t != NULL_TREE)
3412 modified |= sra_modify_expr (t, &gsi, false);
3413 break;
3415 case GIMPLE_ASSIGN:
3416 assign_result = sra_modify_assign (&stmt, &gsi);
3417 modified |= assign_result == SRA_AM_MODIFIED;
3418 deleted = assign_result == SRA_AM_REMOVED;
3419 break;
3421 case GIMPLE_CALL:
3422 /* Operands must be processed before the lhs. */
3423 for (i = 0; i < gimple_call_num_args (stmt); i++)
3425 t = gimple_call_arg_ptr (stmt, i);
3426 modified |= sra_modify_expr (t, &gsi, false);
3429 if (gimple_call_lhs (stmt))
3431 t = gimple_call_lhs_ptr (stmt);
3432 modified |= sra_modify_expr (t, &gsi, true);
3434 break;
3436 case GIMPLE_ASM:
3437 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3439 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3440 modified |= sra_modify_expr (t, &gsi, false);
3442 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3444 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3445 modified |= sra_modify_expr (t, &gsi, true);
3447 break;
3449 default:
3450 break;
3453 if (modified)
3455 update_stmt (stmt);
3456 if (maybe_clean_eh_stmt (stmt)
3457 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3458 cfg_changed = true;
3460 if (!deleted)
3461 gsi_next (&gsi);
3465 gsi_commit_edge_inserts ();
3466 return cfg_changed;
3469 /* Generate statements initializing scalar replacements of parts of function
3470 parameters. */
3472 static void
3473 initialize_parameter_reductions (void)
3475 gimple_stmt_iterator gsi;
3476 gimple_seq seq = NULL;
3477 tree parm;
3479 gsi = gsi_start (seq);
3480 for (parm = DECL_ARGUMENTS (current_function_decl);
3481 parm;
3482 parm = DECL_CHAIN (parm))
3484 vec<access_p> *access_vec;
3485 struct access *access;
3487 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3488 continue;
3489 access_vec = get_base_access_vector (parm);
3490 if (!access_vec)
3491 continue;
3493 for (access = (*access_vec)[0];
3494 access;
3495 access = access->next_grp)
3496 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3497 EXPR_LOCATION (parm));
3500 seq = gsi_seq (gsi);
3501 if (seq)
3502 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3505 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3506 it reveals there are components of some aggregates to be scalarized, it runs
3507 the required transformations. */
3508 static unsigned int
3509 perform_intra_sra (void)
3511 int ret = 0;
3512 sra_initialize ();
3514 if (!find_var_candidates ())
3515 goto out;
3517 if (!scan_function ())
3518 goto out;
3520 if (!analyze_all_variable_accesses ())
3521 goto out;
3523 if (sra_modify_function_body ())
3524 ret = TODO_update_ssa | TODO_cleanup_cfg;
3525 else
3526 ret = TODO_update_ssa;
3527 initialize_parameter_reductions ();
3529 statistics_counter_event (cfun, "Scalar replacements created",
3530 sra_stats.replacements);
3531 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3532 statistics_counter_event (cfun, "Subtree copy stmts",
3533 sra_stats.subtree_copies);
3534 statistics_counter_event (cfun, "Subreplacement stmts",
3535 sra_stats.subreplacements);
3536 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3537 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3538 sra_stats.separate_lhs_rhs_handling);
3540 out:
3541 sra_deinitialize ();
3542 return ret;
3545 /* Perform early intraprocedural SRA. */
3546 static unsigned int
3547 early_intra_sra (void)
3549 sra_mode = SRA_MODE_EARLY_INTRA;
3550 return perform_intra_sra ();
3553 /* Perform "late" intraprocedural SRA. */
3554 static unsigned int
3555 late_intra_sra (void)
3557 sra_mode = SRA_MODE_INTRA;
3558 return perform_intra_sra ();
3562 static bool
3563 gate_intra_sra (void)
3565 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3569 namespace {
3571 const pass_data pass_data_sra_early =
3573 GIMPLE_PASS, /* type */
3574 "esra", /* name */
3575 OPTGROUP_NONE, /* optinfo_flags */
3576 true, /* has_execute */
3577 TV_TREE_SRA, /* tv_id */
3578 ( PROP_cfg | PROP_ssa ), /* properties_required */
3579 0, /* properties_provided */
3580 0, /* properties_destroyed */
3581 0, /* todo_flags_start */
3582 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
3585 class pass_sra_early : public gimple_opt_pass
3587 public:
3588 pass_sra_early (gcc::context *ctxt)
3589 : gimple_opt_pass (pass_data_sra_early, ctxt)
3592 /* opt_pass methods: */
3593 virtual bool gate (function *) { return gate_intra_sra (); }
3594 virtual unsigned int execute (function *) { return early_intra_sra (); }
3596 }; // class pass_sra_early
3598 } // anon namespace
3600 gimple_opt_pass *
3601 make_pass_sra_early (gcc::context *ctxt)
3603 return new pass_sra_early (ctxt);
3606 namespace {
3608 const pass_data pass_data_sra =
3610 GIMPLE_PASS, /* type */
3611 "sra", /* name */
3612 OPTGROUP_NONE, /* optinfo_flags */
3613 true, /* has_execute */
3614 TV_TREE_SRA, /* tv_id */
3615 ( PROP_cfg | PROP_ssa ), /* properties_required */
3616 0, /* properties_provided */
3617 0, /* properties_destroyed */
3618 TODO_update_address_taken, /* todo_flags_start */
3619 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
3622 class pass_sra : public gimple_opt_pass
3624 public:
3625 pass_sra (gcc::context *ctxt)
3626 : gimple_opt_pass (pass_data_sra, ctxt)
3629 /* opt_pass methods: */
3630 virtual bool gate (function *) { return gate_intra_sra (); }
3631 virtual unsigned int execute (function *) { return late_intra_sra (); }
3633 }; // class pass_sra
3635 } // anon namespace
3637 gimple_opt_pass *
3638 make_pass_sra (gcc::context *ctxt)
3640 return new pass_sra (ctxt);
3644 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3645 parameter. */
3647 static bool
3648 is_unused_scalar_param (tree parm)
3650 tree name;
3651 return (is_gimple_reg (parm)
3652 && (!(name = ssa_default_def (cfun, parm))
3653 || has_zero_uses (name)));
3656 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3657 examine whether there are any direct or otherwise infeasible ones. If so,
3658 return true, otherwise return false. PARM must be a gimple register with a
3659 non-NULL default definition. */
3661 static bool
3662 ptr_parm_has_direct_uses (tree parm)
3664 imm_use_iterator ui;
3665 gimple stmt;
3666 tree name = ssa_default_def (cfun, parm);
3667 bool ret = false;
3669 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3671 int uses_ok = 0;
3672 use_operand_p use_p;
3674 if (is_gimple_debug (stmt))
3675 continue;
3677 /* Valid uses include dereferences on the lhs and the rhs. */
3678 if (gimple_has_lhs (stmt))
3680 tree lhs = gimple_get_lhs (stmt);
3681 while (handled_component_p (lhs))
3682 lhs = TREE_OPERAND (lhs, 0);
3683 if (TREE_CODE (lhs) == MEM_REF
3684 && TREE_OPERAND (lhs, 0) == name
3685 && integer_zerop (TREE_OPERAND (lhs, 1))
3686 && types_compatible_p (TREE_TYPE (lhs),
3687 TREE_TYPE (TREE_TYPE (name)))
3688 && !TREE_THIS_VOLATILE (lhs))
3689 uses_ok++;
3691 if (gimple_assign_single_p (stmt))
3693 tree rhs = gimple_assign_rhs1 (stmt);
3694 while (handled_component_p (rhs))
3695 rhs = TREE_OPERAND (rhs, 0);
3696 if (TREE_CODE (rhs) == MEM_REF
3697 && TREE_OPERAND (rhs, 0) == name
3698 && integer_zerop (TREE_OPERAND (rhs, 1))
3699 && types_compatible_p (TREE_TYPE (rhs),
3700 TREE_TYPE (TREE_TYPE (name)))
3701 && !TREE_THIS_VOLATILE (rhs))
3702 uses_ok++;
3704 else if (is_gimple_call (stmt))
3706 unsigned i;
3707 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3709 tree arg = gimple_call_arg (stmt, i);
3710 while (handled_component_p (arg))
3711 arg = TREE_OPERAND (arg, 0);
3712 if (TREE_CODE (arg) == MEM_REF
3713 && TREE_OPERAND (arg, 0) == name
3714 && integer_zerop (TREE_OPERAND (arg, 1))
3715 && types_compatible_p (TREE_TYPE (arg),
3716 TREE_TYPE (TREE_TYPE (name)))
3717 && !TREE_THIS_VOLATILE (arg))
3718 uses_ok++;
3722 /* If the number of valid uses does not match the number of
3723 uses in this stmt there is an unhandled use. */
3724 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3725 --uses_ok;
3727 if (uses_ok != 0)
3728 ret = true;
3730 if (ret)
3731 BREAK_FROM_IMM_USE_STMT (ui);
3734 return ret;
3737 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3738 them in candidate_bitmap. Note that these do not necessarily include
3739 parameter which are unused and thus can be removed. Return true iff any
3740 such candidate has been found. */
3742 static bool
3743 find_param_candidates (void)
3745 tree parm;
3746 int count = 0;
3747 bool ret = false;
3748 const char *msg;
3750 for (parm = DECL_ARGUMENTS (current_function_decl);
3751 parm;
3752 parm = DECL_CHAIN (parm))
3754 tree type = TREE_TYPE (parm);
3755 tree_node **slot;
3757 count++;
3759 if (TREE_THIS_VOLATILE (parm)
3760 || TREE_ADDRESSABLE (parm)
3761 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3762 continue;
3764 if (is_unused_scalar_param (parm))
3766 ret = true;
3767 continue;
3770 if (POINTER_TYPE_P (type))
3772 type = TREE_TYPE (type);
3774 if (TREE_CODE (type) == FUNCTION_TYPE
3775 || TYPE_VOLATILE (type)
3776 || (TREE_CODE (type) == ARRAY_TYPE
3777 && TYPE_NONALIASED_COMPONENT (type))
3778 || !is_gimple_reg (parm)
3779 || is_va_list_type (type)
3780 || ptr_parm_has_direct_uses (parm))
3781 continue;
3783 else if (!AGGREGATE_TYPE_P (type))
3784 continue;
3786 if (!COMPLETE_TYPE_P (type)
3787 || !tree_fits_uhwi_p (TYPE_SIZE (type))
3788 || tree_to_uhwi (TYPE_SIZE (type)) == 0
3789 || (AGGREGATE_TYPE_P (type)
3790 && type_internals_preclude_sra_p (type, &msg)))
3791 continue;
3793 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3794 slot = candidates.find_slot_with_hash (parm, DECL_UID (parm), INSERT);
3795 *slot = parm;
3797 ret = true;
3798 if (dump_file && (dump_flags & TDF_DETAILS))
3800 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3801 print_generic_expr (dump_file, parm, 0);
3802 fprintf (dump_file, "\n");
3806 func_param_count = count;
3807 return ret;
3810 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3811 maybe_modified. */
3813 static bool
3814 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3815 void *data)
3817 struct access *repr = (struct access *) data;
3819 repr->grp_maybe_modified = 1;
3820 return true;
3823 /* Analyze what representatives (in linked lists accessible from
3824 REPRESENTATIVES) can be modified by side effects of statements in the
3825 current function. */
3827 static void
3828 analyze_modified_params (vec<access_p> representatives)
3830 int i;
3832 for (i = 0; i < func_param_count; i++)
3834 struct access *repr;
3836 for (repr = representatives[i];
3837 repr;
3838 repr = repr->next_grp)
3840 struct access *access;
3841 bitmap visited;
3842 ao_ref ar;
3844 if (no_accesses_p (repr))
3845 continue;
3846 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3847 || repr->grp_maybe_modified)
3848 continue;
3850 ao_ref_init (&ar, repr->expr);
3851 visited = BITMAP_ALLOC (NULL);
3852 for (access = repr; access; access = access->next_sibling)
3854 /* All accesses are read ones, otherwise grp_maybe_modified would
3855 be trivially set. */
3856 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3857 mark_maybe_modified, repr, &visited);
3858 if (repr->grp_maybe_modified)
3859 break;
3861 BITMAP_FREE (visited);
3866 /* Propagate distances in bb_dereferences in the opposite direction than the
3867 control flow edges, in each step storing the maximum of the current value
3868 and the minimum of all successors. These steps are repeated until the table
3869 stabilizes. Note that BBs which might terminate the functions (according to
3870 final_bbs bitmap) never updated in this way. */
3872 static void
3873 propagate_dereference_distances (void)
3875 basic_block bb;
3877 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
3878 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3879 FOR_EACH_BB_FN (bb, cfun)
3881 queue.quick_push (bb);
3882 bb->aux = bb;
3885 while (!queue.is_empty ())
3887 edge_iterator ei;
3888 edge e;
3889 bool change = false;
3890 int i;
3892 bb = queue.pop ();
3893 bb->aux = NULL;
3895 if (bitmap_bit_p (final_bbs, bb->index))
3896 continue;
3898 for (i = 0; i < func_param_count; i++)
3900 int idx = bb->index * func_param_count + i;
3901 bool first = true;
3902 HOST_WIDE_INT inh = 0;
3904 FOR_EACH_EDGE (e, ei, bb->succs)
3906 int succ_idx = e->dest->index * func_param_count + i;
3908 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
3909 continue;
3911 if (first)
3913 first = false;
3914 inh = bb_dereferences [succ_idx];
3916 else if (bb_dereferences [succ_idx] < inh)
3917 inh = bb_dereferences [succ_idx];
3920 if (!first && bb_dereferences[idx] < inh)
3922 bb_dereferences[idx] = inh;
3923 change = true;
3927 if (change && !bitmap_bit_p (final_bbs, bb->index))
3928 FOR_EACH_EDGE (e, ei, bb->preds)
3930 if (e->src->aux)
3931 continue;
3933 e->src->aux = e->src;
3934 queue.quick_push (e->src);
3939 /* Dump a dereferences TABLE with heading STR to file F. */
3941 static void
3942 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3944 basic_block bb;
3946 fprintf (dump_file, str);
3947 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
3948 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3950 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3951 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3953 int i;
3954 for (i = 0; i < func_param_count; i++)
3956 int idx = bb->index * func_param_count + i;
3957 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3960 fprintf (f, "\n");
3962 fprintf (dump_file, "\n");
3965 /* Determine what (parts of) parameters passed by reference that are not
3966 assigned to are not certainly dereferenced in this function and thus the
3967 dereferencing cannot be safely moved to the caller without potentially
3968 introducing a segfault. Mark such REPRESENTATIVES as
3969 grp_not_necessarilly_dereferenced.
3971 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3972 part is calculated rather than simple booleans are calculated for each
3973 pointer parameter to handle cases when only a fraction of the whole
3974 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3975 an example).
3977 The maximum dereference distances for each pointer parameter and BB are
3978 already stored in bb_dereference. This routine simply propagates these
3979 values upwards by propagate_dereference_distances and then compares the
3980 distances of individual parameters in the ENTRY BB to the equivalent
3981 distances of each representative of a (fraction of a) parameter. */
3983 static void
3984 analyze_caller_dereference_legality (vec<access_p> representatives)
3986 int i;
3988 if (dump_file && (dump_flags & TDF_DETAILS))
3989 dump_dereferences_table (dump_file,
3990 "Dereference table before propagation:\n",
3991 bb_dereferences);
3993 propagate_dereference_distances ();
3995 if (dump_file && (dump_flags & TDF_DETAILS))
3996 dump_dereferences_table (dump_file,
3997 "Dereference table after propagation:\n",
3998 bb_dereferences);
4000 for (i = 0; i < func_param_count; i++)
4002 struct access *repr = representatives[i];
4003 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4005 if (!repr || no_accesses_p (repr))
4006 continue;
4010 if ((repr->offset + repr->size) > bb_dereferences[idx])
4011 repr->grp_not_necessarilly_dereferenced = 1;
4012 repr = repr->next_grp;
4014 while (repr);
4018 /* Return the representative access for the parameter declaration PARM if it is
4019 a scalar passed by reference which is not written to and the pointer value
4020 is not used directly. Thus, if it is legal to dereference it in the caller
4021 and we can rule out modifications through aliases, such parameter should be
4022 turned into one passed by value. Return NULL otherwise. */
4024 static struct access *
4025 unmodified_by_ref_scalar_representative (tree parm)
4027 int i, access_count;
4028 struct access *repr;
4029 vec<access_p> *access_vec;
4031 access_vec = get_base_access_vector (parm);
4032 gcc_assert (access_vec);
4033 repr = (*access_vec)[0];
4034 if (repr->write)
4035 return NULL;
4036 repr->group_representative = repr;
4038 access_count = access_vec->length ();
4039 for (i = 1; i < access_count; i++)
4041 struct access *access = (*access_vec)[i];
4042 if (access->write)
4043 return NULL;
4044 access->group_representative = repr;
4045 access->next_sibling = repr->next_sibling;
4046 repr->next_sibling = access;
4049 repr->grp_read = 1;
4050 repr->grp_scalar_ptr = 1;
4051 return repr;
4054 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4055 associated with. REQ_ALIGN is the minimum required alignment. */
4057 static bool
4058 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4060 unsigned int exp_align;
4061 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4062 is incompatible assign in a call statement (and possibly even in asm
4063 statements). This can be relaxed by using a new temporary but only for
4064 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4065 intraprocedural SRA we deal with this by keeping the old aggregate around,
4066 something we cannot do in IPA-SRA.) */
4067 if (access->write
4068 && (is_gimple_call (access->stmt)
4069 || gimple_code (access->stmt) == GIMPLE_ASM))
4070 return true;
4072 exp_align = get_object_alignment (access->expr);
4073 if (exp_align < req_align)
4074 return true;
4076 return false;
4080 /* Sort collected accesses for parameter PARM, identify representatives for
4081 each accessed region and link them together. Return NULL if there are
4082 different but overlapping accesses, return the special ptr value meaning
4083 there are no accesses for this parameter if that is the case and return the
4084 first representative otherwise. Set *RO_GRP if there is a group of accesses
4085 with only read (i.e. no write) accesses. */
4087 static struct access *
4088 splice_param_accesses (tree parm, bool *ro_grp)
4090 int i, j, access_count, group_count;
4091 int agg_size, total_size = 0;
4092 struct access *access, *res, **prev_acc_ptr = &res;
4093 vec<access_p> *access_vec;
4095 access_vec = get_base_access_vector (parm);
4096 if (!access_vec)
4097 return &no_accesses_representant;
4098 access_count = access_vec->length ();
4100 access_vec->qsort (compare_access_positions);
4102 i = 0;
4103 total_size = 0;
4104 group_count = 0;
4105 while (i < access_count)
4107 bool modification;
4108 tree a1_alias_type;
4109 access = (*access_vec)[i];
4110 modification = access->write;
4111 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4112 return NULL;
4113 a1_alias_type = reference_alias_ptr_type (access->expr);
4115 /* Access is about to become group representative unless we find some
4116 nasty overlap which would preclude us from breaking this parameter
4117 apart. */
4119 j = i + 1;
4120 while (j < access_count)
4122 struct access *ac2 = (*access_vec)[j];
4123 if (ac2->offset != access->offset)
4125 /* All or nothing law for parameters. */
4126 if (access->offset + access->size > ac2->offset)
4127 return NULL;
4128 else
4129 break;
4131 else if (ac2->size != access->size)
4132 return NULL;
4134 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4135 || (ac2->type != access->type
4136 && (TREE_ADDRESSABLE (ac2->type)
4137 || TREE_ADDRESSABLE (access->type)))
4138 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4139 return NULL;
4141 modification |= ac2->write;
4142 ac2->group_representative = access;
4143 ac2->next_sibling = access->next_sibling;
4144 access->next_sibling = ac2;
4145 j++;
4148 group_count++;
4149 access->grp_maybe_modified = modification;
4150 if (!modification)
4151 *ro_grp = true;
4152 *prev_acc_ptr = access;
4153 prev_acc_ptr = &access->next_grp;
4154 total_size += access->size;
4155 i = j;
4158 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4159 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4160 else
4161 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4162 if (total_size >= agg_size)
4163 return NULL;
4165 gcc_assert (group_count > 0);
4166 return res;
4169 /* Decide whether parameters with representative accesses given by REPR should
4170 be reduced into components. */
4172 static int
4173 decide_one_param_reduction (struct access *repr)
4175 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4176 bool by_ref;
4177 tree parm;
4179 parm = repr->base;
4180 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4181 gcc_assert (cur_parm_size > 0);
4183 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4185 by_ref = true;
4186 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4188 else
4190 by_ref = false;
4191 agg_size = cur_parm_size;
4194 if (dump_file)
4196 struct access *acc;
4197 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4198 print_generic_expr (dump_file, parm, 0);
4199 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4200 for (acc = repr; acc; acc = acc->next_grp)
4201 dump_access (dump_file, acc, true);
4204 total_size = 0;
4205 new_param_count = 0;
4207 for (; repr; repr = repr->next_grp)
4209 gcc_assert (parm == repr->base);
4211 /* Taking the address of a non-addressable field is verboten. */
4212 if (by_ref && repr->non_addressable)
4213 return 0;
4215 /* Do not decompose a non-BLKmode param in a way that would
4216 create BLKmode params. Especially for by-reference passing
4217 (thus, pointer-type param) this is hardly worthwhile. */
4218 if (DECL_MODE (parm) != BLKmode
4219 && TYPE_MODE (repr->type) == BLKmode)
4220 return 0;
4222 if (!by_ref || (!repr->grp_maybe_modified
4223 && !repr->grp_not_necessarilly_dereferenced))
4224 total_size += repr->size;
4225 else
4226 total_size += cur_parm_size;
4228 new_param_count++;
4231 gcc_assert (new_param_count > 0);
4233 if (optimize_function_for_size_p (cfun))
4234 parm_size_limit = cur_parm_size;
4235 else
4236 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4237 * cur_parm_size);
4239 if (total_size < agg_size
4240 && total_size <= parm_size_limit)
4242 if (dump_file)
4243 fprintf (dump_file, " ....will be split into %i components\n",
4244 new_param_count);
4245 return new_param_count;
4247 else
4248 return 0;
4251 /* The order of the following enums is important, we need to do extra work for
4252 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4253 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4254 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4256 /* Identify representatives of all accesses to all candidate parameters for
4257 IPA-SRA. Return result based on what representatives have been found. */
4259 static enum ipa_splicing_result
4260 splice_all_param_accesses (vec<access_p> &representatives)
4262 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4263 tree parm;
4264 struct access *repr;
4266 representatives.create (func_param_count);
4268 for (parm = DECL_ARGUMENTS (current_function_decl);
4269 parm;
4270 parm = DECL_CHAIN (parm))
4272 if (is_unused_scalar_param (parm))
4274 representatives.quick_push (&no_accesses_representant);
4275 if (result == NO_GOOD_ACCESS)
4276 result = UNUSED_PARAMS;
4278 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4279 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4280 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4282 repr = unmodified_by_ref_scalar_representative (parm);
4283 representatives.quick_push (repr);
4284 if (repr)
4285 result = UNMODIF_BY_REF_ACCESSES;
4287 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4289 bool ro_grp = false;
4290 repr = splice_param_accesses (parm, &ro_grp);
4291 representatives.quick_push (repr);
4293 if (repr && !no_accesses_p (repr))
4295 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4297 if (ro_grp)
4298 result = UNMODIF_BY_REF_ACCESSES;
4299 else if (result < MODIF_BY_REF_ACCESSES)
4300 result = MODIF_BY_REF_ACCESSES;
4302 else if (result < BY_VAL_ACCESSES)
4303 result = BY_VAL_ACCESSES;
4305 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4306 result = UNUSED_PARAMS;
4308 else
4309 representatives.quick_push (NULL);
4312 if (result == NO_GOOD_ACCESS)
4314 representatives.release ();
4315 return NO_GOOD_ACCESS;
4318 return result;
4321 /* Return the index of BASE in PARMS. Abort if it is not found. */
4323 static inline int
4324 get_param_index (tree base, vec<tree> parms)
4326 int i, len;
4328 len = parms.length ();
4329 for (i = 0; i < len; i++)
4330 if (parms[i] == base)
4331 return i;
4332 gcc_unreachable ();
4335 /* Convert the decisions made at the representative level into compact
4336 parameter adjustments. REPRESENTATIVES are pointers to first
4337 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4338 final number of adjustments. */
4340 static ipa_parm_adjustment_vec
4341 turn_representatives_into_adjustments (vec<access_p> representatives,
4342 int adjustments_count)
4344 vec<tree> parms;
4345 ipa_parm_adjustment_vec adjustments;
4346 tree parm;
4347 int i;
4349 gcc_assert (adjustments_count > 0);
4350 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4351 adjustments.create (adjustments_count);
4352 parm = DECL_ARGUMENTS (current_function_decl);
4353 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4355 struct access *repr = representatives[i];
4357 if (!repr || no_accesses_p (repr))
4359 struct ipa_parm_adjustment adj;
4361 memset (&adj, 0, sizeof (adj));
4362 adj.base_index = get_param_index (parm, parms);
4363 adj.base = parm;
4364 if (!repr)
4365 adj.op = IPA_PARM_OP_COPY;
4366 else
4367 adj.op = IPA_PARM_OP_REMOVE;
4368 adj.arg_prefix = "ISRA";
4369 adjustments.quick_push (adj);
4371 else
4373 struct ipa_parm_adjustment adj;
4374 int index = get_param_index (parm, parms);
4376 for (; repr; repr = repr->next_grp)
4378 memset (&adj, 0, sizeof (adj));
4379 gcc_assert (repr->base == parm);
4380 adj.base_index = index;
4381 adj.base = repr->base;
4382 adj.type = repr->type;
4383 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4384 adj.offset = repr->offset;
4385 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4386 && (repr->grp_maybe_modified
4387 || repr->grp_not_necessarilly_dereferenced));
4388 adj.arg_prefix = "ISRA";
4389 adjustments.quick_push (adj);
4393 parms.release ();
4394 return adjustments;
4397 /* Analyze the collected accesses and produce a plan what to do with the
4398 parameters in the form of adjustments, NULL meaning nothing. */
4400 static ipa_parm_adjustment_vec
4401 analyze_all_param_acesses (void)
4403 enum ipa_splicing_result repr_state;
4404 bool proceed = false;
4405 int i, adjustments_count = 0;
4406 vec<access_p> representatives;
4407 ipa_parm_adjustment_vec adjustments;
4409 repr_state = splice_all_param_accesses (representatives);
4410 if (repr_state == NO_GOOD_ACCESS)
4411 return ipa_parm_adjustment_vec ();
4413 /* If there are any parameters passed by reference which are not modified
4414 directly, we need to check whether they can be modified indirectly. */
4415 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4417 analyze_caller_dereference_legality (representatives);
4418 analyze_modified_params (representatives);
4421 for (i = 0; i < func_param_count; i++)
4423 struct access *repr = representatives[i];
4425 if (repr && !no_accesses_p (repr))
4427 if (repr->grp_scalar_ptr)
4429 adjustments_count++;
4430 if (repr->grp_not_necessarilly_dereferenced
4431 || repr->grp_maybe_modified)
4432 representatives[i] = NULL;
4433 else
4435 proceed = true;
4436 sra_stats.scalar_by_ref_to_by_val++;
4439 else
4441 int new_components = decide_one_param_reduction (repr);
4443 if (new_components == 0)
4445 representatives[i] = NULL;
4446 adjustments_count++;
4448 else
4450 adjustments_count += new_components;
4451 sra_stats.aggregate_params_reduced++;
4452 sra_stats.param_reductions_created += new_components;
4453 proceed = true;
4457 else
4459 if (no_accesses_p (repr))
4461 proceed = true;
4462 sra_stats.deleted_unused_parameters++;
4464 adjustments_count++;
4468 if (!proceed && dump_file)
4469 fprintf (dump_file, "NOT proceeding to change params.\n");
4471 if (proceed)
4472 adjustments = turn_representatives_into_adjustments (representatives,
4473 adjustments_count);
4474 else
4475 adjustments = ipa_parm_adjustment_vec ();
4477 representatives.release ();
4478 return adjustments;
4481 /* If a parameter replacement identified by ADJ does not yet exist in the form
4482 of declaration, create it and record it, otherwise return the previously
4483 created one. */
4485 static tree
4486 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4488 tree repl;
4489 if (!adj->new_ssa_base)
4491 char *pretty_name = make_fancy_name (adj->base);
4493 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4494 DECL_NAME (repl) = get_identifier (pretty_name);
4495 obstack_free (&name_obstack, pretty_name);
4497 adj->new_ssa_base = repl;
4499 else
4500 repl = adj->new_ssa_base;
4501 return repl;
4504 /* Find the first adjustment for a particular parameter BASE in a vector of
4505 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4506 adjustment. */
4508 static struct ipa_parm_adjustment *
4509 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4511 int i, len;
4513 len = adjustments.length ();
4514 for (i = 0; i < len; i++)
4516 struct ipa_parm_adjustment *adj;
4518 adj = &adjustments[i];
4519 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4520 return adj;
4523 return NULL;
4526 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4527 removed because its value is not used, replace the SSA_NAME with a one
4528 relating to a created VAR_DECL together all of its uses and return true.
4529 ADJUSTMENTS is a pointer to an adjustments vector. */
4531 static bool
4532 replace_removed_params_ssa_names (gimple stmt,
4533 ipa_parm_adjustment_vec adjustments)
4535 struct ipa_parm_adjustment *adj;
4536 tree lhs, decl, repl, name;
4538 if (gimple_code (stmt) == GIMPLE_PHI)
4539 lhs = gimple_phi_result (stmt);
4540 else if (is_gimple_assign (stmt))
4541 lhs = gimple_assign_lhs (stmt);
4542 else if (is_gimple_call (stmt))
4543 lhs = gimple_call_lhs (stmt);
4544 else
4545 gcc_unreachable ();
4547 if (TREE_CODE (lhs) != SSA_NAME)
4548 return false;
4550 decl = SSA_NAME_VAR (lhs);
4551 if (decl == NULL_TREE
4552 || TREE_CODE (decl) != PARM_DECL)
4553 return false;
4555 adj = get_adjustment_for_base (adjustments, decl);
4556 if (!adj)
4557 return false;
4559 repl = get_replaced_param_substitute (adj);
4560 name = make_ssa_name (repl, stmt);
4562 if (dump_file)
4564 fprintf (dump_file, "replacing an SSA name of a removed param ");
4565 print_generic_expr (dump_file, lhs, 0);
4566 fprintf (dump_file, " with ");
4567 print_generic_expr (dump_file, name, 0);
4568 fprintf (dump_file, "\n");
4571 if (is_gimple_assign (stmt))
4572 gimple_assign_set_lhs (stmt, name);
4573 else if (is_gimple_call (stmt))
4574 gimple_call_set_lhs (stmt, name);
4575 else
4576 gimple_phi_set_result (stmt, name);
4578 replace_uses_by (lhs, name);
4579 release_ssa_name (lhs);
4580 return true;
4583 /* If the statement pointed to by STMT_PTR contains any expressions that need
4584 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4585 potential type incompatibilities (GSI is used to accommodate conversion
4586 statements and must point to the statement). Return true iff the statement
4587 was modified. */
4589 static bool
4590 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4591 ipa_parm_adjustment_vec adjustments)
4593 gimple stmt = *stmt_ptr;
4594 tree *lhs_p, *rhs_p;
4595 bool any;
4597 if (!gimple_assign_single_p (stmt))
4598 return false;
4600 rhs_p = gimple_assign_rhs1_ptr (stmt);
4601 lhs_p = gimple_assign_lhs_ptr (stmt);
4603 any = ipa_modify_expr (rhs_p, false, adjustments);
4604 any |= ipa_modify_expr (lhs_p, false, adjustments);
4605 if (any)
4607 tree new_rhs = NULL_TREE;
4609 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4611 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4613 /* V_C_Es of constructors can cause trouble (PR 42714). */
4614 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4615 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4616 else
4617 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4618 NULL);
4620 else
4621 new_rhs = fold_build1_loc (gimple_location (stmt),
4622 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4623 *rhs_p);
4625 else if (REFERENCE_CLASS_P (*rhs_p)
4626 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4627 && !is_gimple_reg (*lhs_p))
4628 /* This can happen when an assignment in between two single field
4629 structures is turned into an assignment in between two pointers to
4630 scalars (PR 42237). */
4631 new_rhs = *rhs_p;
4633 if (new_rhs)
4635 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4636 true, GSI_SAME_STMT);
4638 gimple_assign_set_rhs_from_tree (gsi, tmp);
4641 return true;
4644 return false;
4647 /* Traverse the function body and all modifications as described in
4648 ADJUSTMENTS. Return true iff the CFG has been changed. */
4650 bool
4651 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4653 bool cfg_changed = false;
4654 basic_block bb;
4656 FOR_EACH_BB_FN (bb, cfun)
4658 gimple_stmt_iterator gsi;
4660 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4661 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4663 gsi = gsi_start_bb (bb);
4664 while (!gsi_end_p (gsi))
4666 gimple stmt = gsi_stmt (gsi);
4667 bool modified = false;
4668 tree *t;
4669 unsigned i;
4671 switch (gimple_code (stmt))
4673 case GIMPLE_RETURN:
4674 t = gimple_return_retval_ptr (stmt);
4675 if (*t != NULL_TREE)
4676 modified |= ipa_modify_expr (t, true, adjustments);
4677 break;
4679 case GIMPLE_ASSIGN:
4680 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4681 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4682 break;
4684 case GIMPLE_CALL:
4685 /* Operands must be processed before the lhs. */
4686 for (i = 0; i < gimple_call_num_args (stmt); i++)
4688 t = gimple_call_arg_ptr (stmt, i);
4689 modified |= ipa_modify_expr (t, true, adjustments);
4692 if (gimple_call_lhs (stmt))
4694 t = gimple_call_lhs_ptr (stmt);
4695 modified |= ipa_modify_expr (t, false, adjustments);
4696 modified |= replace_removed_params_ssa_names (stmt,
4697 adjustments);
4699 break;
4701 case GIMPLE_ASM:
4702 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4704 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4705 modified |= ipa_modify_expr (t, true, adjustments);
4707 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4709 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4710 modified |= ipa_modify_expr (t, false, adjustments);
4712 break;
4714 default:
4715 break;
4718 if (modified)
4720 update_stmt (stmt);
4721 if (maybe_clean_eh_stmt (stmt)
4722 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4723 cfg_changed = true;
4725 gsi_next (&gsi);
4729 return cfg_changed;
4732 /* Call gimple_debug_bind_reset_value on all debug statements describing
4733 gimple register parameters that are being removed or replaced. */
4735 static void
4736 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4738 int i, len;
4739 gimple_stmt_iterator *gsip = NULL, gsi;
4741 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4743 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4744 gsip = &gsi;
4746 len = adjustments.length ();
4747 for (i = 0; i < len; i++)
4749 struct ipa_parm_adjustment *adj;
4750 imm_use_iterator ui;
4751 gimple stmt, def_temp;
4752 tree name, vexpr, copy = NULL_TREE;
4753 use_operand_p use_p;
4755 adj = &adjustments[i];
4756 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4757 continue;
4758 name = ssa_default_def (cfun, adj->base);
4759 vexpr = NULL;
4760 if (name)
4761 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4763 if (gimple_clobber_p (stmt))
4765 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4766 unlink_stmt_vdef (stmt);
4767 gsi_remove (&cgsi, true);
4768 release_defs (stmt);
4769 continue;
4771 /* All other users must have been removed by
4772 ipa_sra_modify_function_body. */
4773 gcc_assert (is_gimple_debug (stmt));
4774 if (vexpr == NULL && gsip != NULL)
4776 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4777 vexpr = make_node (DEBUG_EXPR_DECL);
4778 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4779 NULL);
4780 DECL_ARTIFICIAL (vexpr) = 1;
4781 TREE_TYPE (vexpr) = TREE_TYPE (name);
4782 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4783 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4785 if (vexpr)
4787 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4788 SET_USE (use_p, vexpr);
4790 else
4791 gimple_debug_bind_reset_value (stmt);
4792 update_stmt (stmt);
4794 /* Create a VAR_DECL for debug info purposes. */
4795 if (!DECL_IGNORED_P (adj->base))
4797 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4798 VAR_DECL, DECL_NAME (adj->base),
4799 TREE_TYPE (adj->base));
4800 if (DECL_PT_UID_SET_P (adj->base))
4801 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4802 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4803 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4804 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4805 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4806 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4807 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4808 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4809 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4810 SET_DECL_RTL (copy, 0);
4811 TREE_USED (copy) = 1;
4812 DECL_CONTEXT (copy) = current_function_decl;
4813 add_local_decl (cfun, copy);
4814 DECL_CHAIN (copy) =
4815 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4816 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4818 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4820 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4821 if (vexpr)
4822 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4823 else
4824 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4825 NULL);
4826 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4831 /* Return false if all callers have at least as many actual arguments as there
4832 are formal parameters in the current function and that their types
4833 match. */
4835 static bool
4836 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
4837 void *data ATTRIBUTE_UNUSED)
4839 struct cgraph_edge *cs;
4840 for (cs = node->callers; cs; cs = cs->next_caller)
4841 if (!callsite_arguments_match_p (cs->call_stmt))
4842 return true;
4844 return false;
4847 /* Convert all callers of NODE. */
4849 static bool
4850 convert_callers_for_node (struct cgraph_node *node,
4851 void *data)
4853 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4854 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4855 struct cgraph_edge *cs;
4857 for (cs = node->callers; cs; cs = cs->next_caller)
4859 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4861 if (dump_file)
4862 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
4863 xstrdup (cs->caller->name ()),
4864 cs->caller->order,
4865 xstrdup (cs->callee->name ()),
4866 cs->callee->order);
4868 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4870 pop_cfun ();
4873 for (cs = node->callers; cs; cs = cs->next_caller)
4874 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4875 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4876 compute_inline_parameters (cs->caller, true);
4877 BITMAP_FREE (recomputed_callers);
4879 return true;
4882 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4884 static void
4885 convert_callers (struct cgraph_node *node, tree old_decl,
4886 ipa_parm_adjustment_vec adjustments)
4888 basic_block this_block;
4890 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4891 &adjustments, false);
4893 if (!encountered_recursive_call)
4894 return;
4896 FOR_EACH_BB_FN (this_block, cfun)
4898 gimple_stmt_iterator gsi;
4900 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4902 gimple stmt = gsi_stmt (gsi);
4903 tree call_fndecl;
4904 if (gimple_code (stmt) != GIMPLE_CALL)
4905 continue;
4906 call_fndecl = gimple_call_fndecl (stmt);
4907 if (call_fndecl == old_decl)
4909 if (dump_file)
4910 fprintf (dump_file, "Adjusting recursive call");
4911 gimple_call_set_fndecl (stmt, node->decl);
4912 ipa_modify_call_arguments (NULL, stmt, adjustments);
4917 return;
4920 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4921 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4923 static bool
4924 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4926 struct cgraph_node *new_node;
4927 bool cfg_changed;
4928 vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
4930 rebuild_cgraph_edges ();
4931 free_dominance_info (CDI_DOMINATORS);
4932 pop_cfun ();
4934 new_node = cgraph_function_versioning (node, redirect_callers,
4935 NULL,
4936 NULL, false, NULL, NULL, "isra");
4937 redirect_callers.release ();
4939 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4940 ipa_modify_formal_parameters (current_function_decl, adjustments);
4941 cfg_changed = ipa_sra_modify_function_body (adjustments);
4942 sra_ipa_reset_debug_stmts (adjustments);
4943 convert_callers (new_node, node->decl, adjustments);
4944 cgraph_make_node_local (new_node);
4945 return cfg_changed;
4948 /* If NODE has a caller, return true. */
4950 static bool
4951 has_caller_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4953 if (node->callers)
4954 return true;
4955 return false;
4958 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4959 attributes, return true otherwise. NODE is the cgraph node of the current
4960 function. */
4962 static bool
4963 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4965 if (!cgraph_node_can_be_local_p (node))
4967 if (dump_file)
4968 fprintf (dump_file, "Function not local to this compilation unit.\n");
4969 return false;
4972 if (!node->local.can_change_signature)
4974 if (dump_file)
4975 fprintf (dump_file, "Function can not change signature.\n");
4976 return false;
4979 if (!tree_versionable_function_p (node->decl))
4981 if (dump_file)
4982 fprintf (dump_file, "Function is not versionable.\n");
4983 return false;
4986 if (!opt_for_fn (node->decl, optimize)
4987 || !opt_for_fn (node->decl, flag_ipa_sra))
4989 if (dump_file)
4990 fprintf (dump_file, "Function not optimized.\n");
4991 return false;
4994 if (DECL_VIRTUAL_P (current_function_decl))
4996 if (dump_file)
4997 fprintf (dump_file, "Function is a virtual method.\n");
4998 return false;
5001 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
5002 && inline_summary (node)->size >= MAX_INLINE_INSNS_AUTO)
5004 if (dump_file)
5005 fprintf (dump_file, "Function too big to be made truly local.\n");
5006 return false;
5009 if (!cgraph_for_node_and_aliases (node, has_caller_p, NULL, true))
5011 if (dump_file)
5012 fprintf (dump_file,
5013 "Function has no callers in this compilation unit.\n");
5014 return false;
5017 if (cfun->stdarg)
5019 if (dump_file)
5020 fprintf (dump_file, "Function uses stdarg. \n");
5021 return false;
5024 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5025 return false;
5027 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5029 if (dump_file)
5030 fprintf (dump_file, "Always inline function will be inlined "
5031 "anyway. \n");
5032 return false;
5035 return true;
5038 /* Perform early interprocedural SRA. */
5040 static unsigned int
5041 ipa_early_sra (void)
5043 struct cgraph_node *node = cgraph_get_node (current_function_decl);
5044 ipa_parm_adjustment_vec adjustments;
5045 int ret = 0;
5047 if (!ipa_sra_preliminary_function_checks (node))
5048 return 0;
5050 sra_initialize ();
5051 sra_mode = SRA_MODE_EARLY_IPA;
5053 if (!find_param_candidates ())
5055 if (dump_file)
5056 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5057 goto simple_out;
5060 if (cgraph_for_node_and_aliases (node,
5061 some_callers_have_mismatched_arguments_p,
5062 NULL, true))
5064 if (dump_file)
5065 fprintf (dump_file, "There are callers with insufficient number of "
5066 "arguments or arguments with type mismatches.\n");
5067 goto simple_out;
5070 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5071 func_param_count
5072 * last_basic_block_for_fn (cfun));
5073 final_bbs = BITMAP_ALLOC (NULL);
5075 scan_function ();
5076 if (encountered_apply_args)
5078 if (dump_file)
5079 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5080 goto out;
5083 if (encountered_unchangable_recursive_call)
5085 if (dump_file)
5086 fprintf (dump_file, "Function calls itself with insufficient "
5087 "number of arguments.\n");
5088 goto out;
5091 adjustments = analyze_all_param_acesses ();
5092 if (!adjustments.exists ())
5093 goto out;
5094 if (dump_file)
5095 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5097 if (modify_function (node, adjustments))
5098 ret = TODO_update_ssa | TODO_cleanup_cfg;
5099 else
5100 ret = TODO_update_ssa;
5101 adjustments.release ();
5103 statistics_counter_event (cfun, "Unused parameters deleted",
5104 sra_stats.deleted_unused_parameters);
5105 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5106 sra_stats.scalar_by_ref_to_by_val);
5107 statistics_counter_event (cfun, "Aggregate parameters broken up",
5108 sra_stats.aggregate_params_reduced);
5109 statistics_counter_event (cfun, "Aggregate parameter components created",
5110 sra_stats.param_reductions_created);
5112 out:
5113 BITMAP_FREE (final_bbs);
5114 free (bb_dereferences);
5115 simple_out:
5116 sra_deinitialize ();
5117 return ret;
5120 namespace {
5122 const pass_data pass_data_early_ipa_sra =
5124 GIMPLE_PASS, /* type */
5125 "eipa_sra", /* name */
5126 OPTGROUP_NONE, /* optinfo_flags */
5127 true, /* has_execute */
5128 TV_IPA_SRA, /* tv_id */
5129 0, /* properties_required */
5130 0, /* properties_provided */
5131 0, /* properties_destroyed */
5132 0, /* todo_flags_start */
5133 TODO_dump_symtab, /* todo_flags_finish */
5136 class pass_early_ipa_sra : public gimple_opt_pass
5138 public:
5139 pass_early_ipa_sra (gcc::context *ctxt)
5140 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5143 /* opt_pass methods: */
5144 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5145 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5147 }; // class pass_early_ipa_sra
5149 } // anon namespace
5151 gimple_opt_pass *
5152 make_pass_early_ipa_sra (gcc::context *ctxt)
5154 return new pass_early_ipa_sra (ctxt);