re PR ipa/61324 (ICE: SIGSEGV at ipa-comdats.c:321 with -fno-use-cxa-atexit -fkeep...
[official-gcc.git] / gcc / tree-sra.c
blobf213c8076e6e77a40c7d55c3fb328b6b47bc1c4f
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2014 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "hash-map.h"
78 #include "hash-table.h"
79 #include "alloc-pool.h"
80 #include "tm.h"
81 #include "tree.h"
82 #include "predict.h"
83 #include "vec.h"
84 #include "hashtab.h"
85 #include "hash-set.h"
86 #include "machmode.h"
87 #include "hard-reg-set.h"
88 #include "input.h"
89 #include "function.h"
90 #include "dominance.h"
91 #include "cfg.h"
92 #include "basic-block.h"
93 #include "tree-ssa-alias.h"
94 #include "internal-fn.h"
95 #include "tree-eh.h"
96 #include "gimple-expr.h"
97 #include "is-a.h"
98 #include "gimple.h"
99 #include "stor-layout.h"
100 #include "gimplify.h"
101 #include "gimple-iterator.h"
102 #include "gimplify-me.h"
103 #include "gimple-walk.h"
104 #include "bitmap.h"
105 #include "gimple-ssa.h"
106 #include "tree-cfg.h"
107 #include "tree-phinodes.h"
108 #include "ssa-iterators.h"
109 #include "stringpool.h"
110 #include "tree-ssanames.h"
111 #include "expr.h"
112 #include "tree-dfa.h"
113 #include "tree-ssa.h"
114 #include "tree-pass.h"
115 #include "plugin-api.h"
116 #include "ipa-ref.h"
117 #include "cgraph.h"
118 #include "ipa-prop.h"
119 #include "statistics.h"
120 #include "params.h"
121 #include "target.h"
122 #include "flags.h"
123 #include "dbgcnt.h"
124 #include "tree-inline.h"
125 #include "gimple-pretty-print.h"
126 #include "ipa-inline.h"
127 #include "ipa-utils.h"
128 #include "builtins.h"
130 /* Enumeration of all aggregate reductions we can do. */
131 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
132 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
133 SRA_MODE_INTRA }; /* late intraprocedural SRA */
135 /* Global variable describing which aggregate reduction we are performing at
136 the moment. */
137 static enum sra_mode sra_mode;
139 struct assign_link;
141 /* ACCESS represents each access to an aggregate variable (as a whole or a
142 part). It can also represent a group of accesses that refer to exactly the
143 same fragment of an aggregate (i.e. those that have exactly the same offset
144 and size). Such representatives for a single aggregate, once determined,
145 are linked in a linked list and have the group fields set.
147 Moreover, when doing intraprocedural SRA, a tree is built from those
148 representatives (by the means of first_child and next_sibling pointers), in
149 which all items in a subtree are "within" the root, i.e. their offset is
150 greater or equal to offset of the root and offset+size is smaller or equal
151 to offset+size of the root. Children of an access are sorted by offset.
153 Note that accesses to parts of vector and complex number types always
154 represented by an access to the whole complex number or a vector. It is a
155 duty of the modifying functions to replace them appropriately. */
157 struct access
159 /* Values returned by `get_ref_base_and_extent' for each component reference
160 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
161 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
162 HOST_WIDE_INT offset;
163 HOST_WIDE_INT size;
164 tree base;
166 /* Expression. It is context dependent so do not use it to create new
167 expressions to access the original aggregate. See PR 42154 for a
168 testcase. */
169 tree expr;
170 /* Type. */
171 tree type;
173 /* The statement this access belongs to. */
174 gimple stmt;
176 /* Next group representative for this aggregate. */
177 struct access *next_grp;
179 /* Pointer to the group representative. Pointer to itself if the struct is
180 the representative. */
181 struct access *group_representative;
183 /* If this access has any children (in terms of the definition above), this
184 points to the first one. */
185 struct access *first_child;
187 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
188 described above. In IPA-SRA this is a pointer to the next access
189 belonging to the same group (having the same representative). */
190 struct access *next_sibling;
192 /* Pointers to the first and last element in the linked list of assign
193 links. */
194 struct assign_link *first_link, *last_link;
196 /* Pointer to the next access in the work queue. */
197 struct access *next_queued;
199 /* Replacement variable for this access "region." Never to be accessed
200 directly, always only by the means of get_access_replacement() and only
201 when grp_to_be_replaced flag is set. */
202 tree replacement_decl;
204 /* Is this particular access write access? */
205 unsigned write : 1;
207 /* Is this access an access to a non-addressable field? */
208 unsigned non_addressable : 1;
210 /* Is this access currently in the work queue? */
211 unsigned grp_queued : 1;
213 /* Does this group contain a write access? This flag is propagated down the
214 access tree. */
215 unsigned grp_write : 1;
217 /* Does this group contain a read access? This flag is propagated down the
218 access tree. */
219 unsigned grp_read : 1;
221 /* Does this group contain a read access that comes from an assignment
222 statement? This flag is propagated down the access tree. */
223 unsigned grp_assignment_read : 1;
225 /* Does this group contain a write access that comes from an assignment
226 statement? This flag is propagated down the access tree. */
227 unsigned grp_assignment_write : 1;
229 /* Does this group contain a read access through a scalar type? This flag is
230 not propagated in the access tree in any direction. */
231 unsigned grp_scalar_read : 1;
233 /* Does this group contain a write access through a scalar type? This flag
234 is not propagated in the access tree in any direction. */
235 unsigned grp_scalar_write : 1;
237 /* Is this access an artificial one created to scalarize some record
238 entirely? */
239 unsigned grp_total_scalarization : 1;
241 /* Other passes of the analysis use this bit to make function
242 analyze_access_subtree create scalar replacements for this group if
243 possible. */
244 unsigned grp_hint : 1;
246 /* Is the subtree rooted in this access fully covered by scalar
247 replacements? */
248 unsigned grp_covered : 1;
250 /* If set to true, this access and all below it in an access tree must not be
251 scalarized. */
252 unsigned grp_unscalarizable_region : 1;
254 /* Whether data have been written to parts of the aggregate covered by this
255 access which is not to be scalarized. This flag is propagated up in the
256 access tree. */
257 unsigned grp_unscalarized_data : 1;
259 /* Does this access and/or group contain a write access through a
260 BIT_FIELD_REF? */
261 unsigned grp_partial_lhs : 1;
263 /* Set when a scalar replacement should be created for this variable. */
264 unsigned grp_to_be_replaced : 1;
266 /* Set when we want a replacement for the sole purpose of having it in
267 generated debug statements. */
268 unsigned grp_to_be_debug_replaced : 1;
270 /* Should TREE_NO_WARNING of a replacement be set? */
271 unsigned grp_no_warning : 1;
273 /* Is it possible that the group refers to data which might be (directly or
274 otherwise) modified? */
275 unsigned grp_maybe_modified : 1;
277 /* Set when this is a representative of a pointer to scalar (i.e. by
278 reference) parameter which we consider for turning into a plain scalar
279 (i.e. a by value parameter). */
280 unsigned grp_scalar_ptr : 1;
282 /* Set when we discover that this pointer is not safe to dereference in the
283 caller. */
284 unsigned grp_not_necessarilly_dereferenced : 1;
287 typedef struct access *access_p;
290 /* Alloc pool for allocating access structures. */
291 static alloc_pool access_pool;
293 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
294 are used to propagate subaccesses from rhs to lhs as long as they don't
295 conflict with what is already there. */
296 struct assign_link
298 struct access *lacc, *racc;
299 struct assign_link *next;
302 /* Alloc pool for allocating assign link structures. */
303 static alloc_pool link_pool;
305 /* Base (tree) -> Vector (vec<access_p> *) map. */
306 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
308 /* Candidate hash table helpers. */
310 struct uid_decl_hasher : typed_noop_remove <tree_node>
312 typedef tree_node value_type;
313 typedef tree_node compare_type;
314 static inline hashval_t hash (const value_type *);
315 static inline bool equal (const value_type *, const compare_type *);
318 /* Hash a tree in a uid_decl_map. */
320 inline hashval_t
321 uid_decl_hasher::hash (const value_type *item)
323 return item->decl_minimal.uid;
326 /* Return true if the DECL_UID in both trees are equal. */
328 inline bool
329 uid_decl_hasher::equal (const value_type *a, const compare_type *b)
331 return (a->decl_minimal.uid == b->decl_minimal.uid);
334 /* Set of candidates. */
335 static bitmap candidate_bitmap;
336 static hash_table<uid_decl_hasher> *candidates;
338 /* For a candidate UID return the candidates decl. */
340 static inline tree
341 candidate (unsigned uid)
343 tree_node t;
344 t.decl_minimal.uid = uid;
345 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
348 /* Bitmap of candidates which we should try to entirely scalarize away and
349 those which cannot be (because they are and need be used as a whole). */
350 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
352 /* Obstack for creation of fancy names. */
353 static struct obstack name_obstack;
355 /* Head of a linked list of accesses that need to have its subaccesses
356 propagated to their assignment counterparts. */
357 static struct access *work_queue_head;
359 /* Number of parameters of the analyzed function when doing early ipa SRA. */
360 static int func_param_count;
362 /* scan_function sets the following to true if it encounters a call to
363 __builtin_apply_args. */
364 static bool encountered_apply_args;
366 /* Set by scan_function when it finds a recursive call. */
367 static bool encountered_recursive_call;
369 /* Set by scan_function when it finds a recursive call with less actual
370 arguments than formal parameters.. */
371 static bool encountered_unchangable_recursive_call;
373 /* This is a table in which for each basic block and parameter there is a
374 distance (offset + size) in that parameter which is dereferenced and
375 accessed in that BB. */
376 static HOST_WIDE_INT *bb_dereferences;
377 /* Bitmap of BBs that can cause the function to "stop" progressing by
378 returning, throwing externally, looping infinitely or calling a function
379 which might abort etc.. */
380 static bitmap final_bbs;
382 /* Representative of no accesses at all. */
383 static struct access no_accesses_representant;
385 /* Predicate to test the special value. */
387 static inline bool
388 no_accesses_p (struct access *access)
390 return access == &no_accesses_representant;
393 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
394 representative fields are dumped, otherwise those which only describe the
395 individual access are. */
397 static struct
399 /* Number of processed aggregates is readily available in
400 analyze_all_variable_accesses and so is not stored here. */
402 /* Number of created scalar replacements. */
403 int replacements;
405 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
406 expression. */
407 int exprs;
409 /* Number of statements created by generate_subtree_copies. */
410 int subtree_copies;
412 /* Number of statements created by load_assign_lhs_subreplacements. */
413 int subreplacements;
415 /* Number of times sra_modify_assign has deleted a statement. */
416 int deleted;
418 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
419 RHS reparately due to type conversions or nonexistent matching
420 references. */
421 int separate_lhs_rhs_handling;
423 /* Number of parameters that were removed because they were unused. */
424 int deleted_unused_parameters;
426 /* Number of scalars passed as parameters by reference that have been
427 converted to be passed by value. */
428 int scalar_by_ref_to_by_val;
430 /* Number of aggregate parameters that were replaced by one or more of their
431 components. */
432 int aggregate_params_reduced;
434 /* Numbber of components created when splitting aggregate parameters. */
435 int param_reductions_created;
436 } sra_stats;
438 static void
439 dump_access (FILE *f, struct access *access, bool grp)
441 fprintf (f, "access { ");
442 fprintf (f, "base = (%d)'", DECL_UID (access->base));
443 print_generic_expr (f, access->base, 0);
444 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
445 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
446 fprintf (f, ", expr = ");
447 print_generic_expr (f, access->expr, 0);
448 fprintf (f, ", type = ");
449 print_generic_expr (f, access->type, 0);
450 if (grp)
451 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
452 "grp_assignment_write = %d, grp_scalar_read = %d, "
453 "grp_scalar_write = %d, grp_total_scalarization = %d, "
454 "grp_hint = %d, grp_covered = %d, "
455 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
456 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
457 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
458 "grp_not_necessarilly_dereferenced = %d\n",
459 access->grp_read, access->grp_write, access->grp_assignment_read,
460 access->grp_assignment_write, access->grp_scalar_read,
461 access->grp_scalar_write, access->grp_total_scalarization,
462 access->grp_hint, access->grp_covered,
463 access->grp_unscalarizable_region, access->grp_unscalarized_data,
464 access->grp_partial_lhs, access->grp_to_be_replaced,
465 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
466 access->grp_not_necessarilly_dereferenced);
467 else
468 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
469 "grp_partial_lhs = %d\n",
470 access->write, access->grp_total_scalarization,
471 access->grp_partial_lhs);
474 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
476 static void
477 dump_access_tree_1 (FILE *f, struct access *access, int level)
481 int i;
483 for (i = 0; i < level; i++)
484 fputs ("* ", dump_file);
486 dump_access (f, access, true);
488 if (access->first_child)
489 dump_access_tree_1 (f, access->first_child, level + 1);
491 access = access->next_sibling;
493 while (access);
496 /* Dump all access trees for a variable, given the pointer to the first root in
497 ACCESS. */
499 static void
500 dump_access_tree (FILE *f, struct access *access)
502 for (; access; access = access->next_grp)
503 dump_access_tree_1 (f, access, 0);
506 /* Return true iff ACC is non-NULL and has subaccesses. */
508 static inline bool
509 access_has_children_p (struct access *acc)
511 return acc && acc->first_child;
514 /* Return true iff ACC is (partly) covered by at least one replacement. */
516 static bool
517 access_has_replacements_p (struct access *acc)
519 struct access *child;
520 if (acc->grp_to_be_replaced)
521 return true;
522 for (child = acc->first_child; child; child = child->next_sibling)
523 if (access_has_replacements_p (child))
524 return true;
525 return false;
528 /* Return a vector of pointers to accesses for the variable given in BASE or
529 NULL if there is none. */
531 static vec<access_p> *
532 get_base_access_vector (tree base)
534 return base_access_vec->get (base);
537 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
538 in ACCESS. Return NULL if it cannot be found. */
540 static struct access *
541 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
542 HOST_WIDE_INT size)
544 while (access && (access->offset != offset || access->size != size))
546 struct access *child = access->first_child;
548 while (child && (child->offset + child->size <= offset))
549 child = child->next_sibling;
550 access = child;
553 return access;
556 /* Return the first group representative for DECL or NULL if none exists. */
558 static struct access *
559 get_first_repr_for_decl (tree base)
561 vec<access_p> *access_vec;
563 access_vec = get_base_access_vector (base);
564 if (!access_vec)
565 return NULL;
567 return (*access_vec)[0];
570 /* Find an access representative for the variable BASE and given OFFSET and
571 SIZE. Requires that access trees have already been built. Return NULL if
572 it cannot be found. */
574 static struct access *
575 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
576 HOST_WIDE_INT size)
578 struct access *access;
580 access = get_first_repr_for_decl (base);
581 while (access && (access->offset + access->size <= offset))
582 access = access->next_grp;
583 if (!access)
584 return NULL;
586 return find_access_in_subtree (access, offset, size);
589 /* Add LINK to the linked list of assign links of RACC. */
590 static void
591 add_link_to_rhs (struct access *racc, struct assign_link *link)
593 gcc_assert (link->racc == racc);
595 if (!racc->first_link)
597 gcc_assert (!racc->last_link);
598 racc->first_link = link;
600 else
601 racc->last_link->next = link;
603 racc->last_link = link;
604 link->next = NULL;
607 /* Move all link structures in their linked list in OLD_RACC to the linked list
608 in NEW_RACC. */
609 static void
610 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
612 if (!old_racc->first_link)
614 gcc_assert (!old_racc->last_link);
615 return;
618 if (new_racc->first_link)
620 gcc_assert (!new_racc->last_link->next);
621 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
623 new_racc->last_link->next = old_racc->first_link;
624 new_racc->last_link = old_racc->last_link;
626 else
628 gcc_assert (!new_racc->last_link);
630 new_racc->first_link = old_racc->first_link;
631 new_racc->last_link = old_racc->last_link;
633 old_racc->first_link = old_racc->last_link = NULL;
636 /* Add ACCESS to the work queue (which is actually a stack). */
638 static void
639 add_access_to_work_queue (struct access *access)
641 if (!access->grp_queued)
643 gcc_assert (!access->next_queued);
644 access->next_queued = work_queue_head;
645 access->grp_queued = 1;
646 work_queue_head = access;
650 /* Pop an access from the work queue, and return it, assuming there is one. */
652 static struct access *
653 pop_access_from_work_queue (void)
655 struct access *access = work_queue_head;
657 work_queue_head = access->next_queued;
658 access->next_queued = NULL;
659 access->grp_queued = 0;
660 return access;
664 /* Allocate necessary structures. */
666 static void
667 sra_initialize (void)
669 candidate_bitmap = BITMAP_ALLOC (NULL);
670 candidates = new hash_table<uid_decl_hasher>
671 (vec_safe_length (cfun->local_decls) / 2);
672 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
673 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
674 gcc_obstack_init (&name_obstack);
675 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
676 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
677 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
678 memset (&sra_stats, 0, sizeof (sra_stats));
679 encountered_apply_args = false;
680 encountered_recursive_call = false;
681 encountered_unchangable_recursive_call = false;
684 /* Deallocate all general structures. */
686 static void
687 sra_deinitialize (void)
689 BITMAP_FREE (candidate_bitmap);
690 delete candidates;
691 candidates = NULL;
692 BITMAP_FREE (should_scalarize_away_bitmap);
693 BITMAP_FREE (cannot_scalarize_away_bitmap);
694 free_alloc_pool (access_pool);
695 free_alloc_pool (link_pool);
696 obstack_free (&name_obstack, NULL);
698 delete base_access_vec;
701 /* Remove DECL from candidates for SRA and write REASON to the dump file if
702 there is one. */
703 static void
704 disqualify_candidate (tree decl, const char *reason)
706 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
707 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
709 if (dump_file && (dump_flags & TDF_DETAILS))
711 fprintf (dump_file, "! Disqualifying ");
712 print_generic_expr (dump_file, decl, 0);
713 fprintf (dump_file, " - %s\n", reason);
717 /* Return true iff the type contains a field or an element which does not allow
718 scalarization. */
720 static bool
721 type_internals_preclude_sra_p (tree type, const char **msg)
723 tree fld;
724 tree et;
726 switch (TREE_CODE (type))
728 case RECORD_TYPE:
729 case UNION_TYPE:
730 case QUAL_UNION_TYPE:
731 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
732 if (TREE_CODE (fld) == FIELD_DECL)
734 tree ft = TREE_TYPE (fld);
736 if (TREE_THIS_VOLATILE (fld))
738 *msg = "volatile structure field";
739 return true;
741 if (!DECL_FIELD_OFFSET (fld))
743 *msg = "no structure field offset";
744 return true;
746 if (!DECL_SIZE (fld))
748 *msg = "zero structure field size";
749 return true;
751 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
753 *msg = "structure field offset not fixed";
754 return true;
756 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
758 *msg = "structure field size not fixed";
759 return true;
761 if (!tree_fits_shwi_p (bit_position (fld)))
763 *msg = "structure field size too big";
764 return true;
766 if (AGGREGATE_TYPE_P (ft)
767 && int_bit_position (fld) % BITS_PER_UNIT != 0)
769 *msg = "structure field is bit field";
770 return true;
773 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
774 return true;
777 return false;
779 case ARRAY_TYPE:
780 et = TREE_TYPE (type);
782 if (TYPE_VOLATILE (et))
784 *msg = "element type is volatile";
785 return true;
788 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
789 return true;
791 return false;
793 default:
794 return false;
798 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
799 base variable if it is. Return T if it is not an SSA_NAME. */
801 static tree
802 get_ssa_base_param (tree t)
804 if (TREE_CODE (t) == SSA_NAME)
806 if (SSA_NAME_IS_DEFAULT_DEF (t))
807 return SSA_NAME_VAR (t);
808 else
809 return NULL_TREE;
811 return t;
814 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
815 belongs to, unless the BB has already been marked as a potentially
816 final. */
818 static void
819 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
821 basic_block bb = gimple_bb (stmt);
822 int idx, parm_index = 0;
823 tree parm;
825 if (bitmap_bit_p (final_bbs, bb->index))
826 return;
828 for (parm = DECL_ARGUMENTS (current_function_decl);
829 parm && parm != base;
830 parm = DECL_CHAIN (parm))
831 parm_index++;
833 gcc_assert (parm_index < func_param_count);
835 idx = bb->index * func_param_count + parm_index;
836 if (bb_dereferences[idx] < dist)
837 bb_dereferences[idx] = dist;
840 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
841 the three fields. Also add it to the vector of accesses corresponding to
842 the base. Finally, return the new access. */
844 static struct access *
845 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
847 struct access *access;
849 access = (struct access *) pool_alloc (access_pool);
850 memset (access, 0, sizeof (struct access));
851 access->base = base;
852 access->offset = offset;
853 access->size = size;
855 base_access_vec->get_or_insert (base).safe_push (access);
857 return access;
860 /* Create and insert access for EXPR. Return created access, or NULL if it is
861 not possible. */
863 static struct access *
864 create_access (tree expr, gimple stmt, bool write)
866 struct access *access;
867 HOST_WIDE_INT offset, size, max_size;
868 tree base = expr;
869 bool ptr, unscalarizable_region = false;
871 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
873 if (sra_mode == SRA_MODE_EARLY_IPA
874 && TREE_CODE (base) == MEM_REF)
876 base = get_ssa_base_param (TREE_OPERAND (base, 0));
877 if (!base)
878 return NULL;
879 ptr = true;
881 else
882 ptr = false;
884 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
885 return NULL;
887 if (sra_mode == SRA_MODE_EARLY_IPA)
889 if (size < 0 || size != max_size)
891 disqualify_candidate (base, "Encountered a variable sized access.");
892 return NULL;
894 if (TREE_CODE (expr) == COMPONENT_REF
895 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
897 disqualify_candidate (base, "Encountered a bit-field access.");
898 return NULL;
900 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
902 if (ptr)
903 mark_parm_dereference (base, offset + size, stmt);
905 else
907 if (size != max_size)
909 size = max_size;
910 unscalarizable_region = true;
912 if (size < 0)
914 disqualify_candidate (base, "Encountered an unconstrained access.");
915 return NULL;
919 access = create_access_1 (base, offset, size);
920 access->expr = expr;
921 access->type = TREE_TYPE (expr);
922 access->write = write;
923 access->grp_unscalarizable_region = unscalarizable_region;
924 access->stmt = stmt;
926 if (TREE_CODE (expr) == COMPONENT_REF
927 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
928 access->non_addressable = 1;
930 return access;
934 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
935 register types or (recursively) records with only these two kinds of fields.
936 It also returns false if any of these records contains a bit-field. */
938 static bool
939 type_consists_of_records_p (tree type)
941 tree fld;
943 if (TREE_CODE (type) != RECORD_TYPE)
944 return false;
946 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
947 if (TREE_CODE (fld) == FIELD_DECL)
949 tree ft = TREE_TYPE (fld);
951 if (DECL_BIT_FIELD (fld))
952 return false;
954 if (!is_gimple_reg_type (ft)
955 && !type_consists_of_records_p (ft))
956 return false;
959 return true;
962 /* Create total_scalarization accesses for all scalar type fields in DECL that
963 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
964 must be the top-most VAR_DECL representing the variable, OFFSET must be the
965 offset of DECL within BASE. REF must be the memory reference expression for
966 the given decl. */
968 static void
969 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
970 tree ref)
972 tree fld, decl_type = TREE_TYPE (decl);
974 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
975 if (TREE_CODE (fld) == FIELD_DECL)
977 HOST_WIDE_INT pos = offset + int_bit_position (fld);
978 tree ft = TREE_TYPE (fld);
979 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
980 NULL_TREE);
982 if (is_gimple_reg_type (ft))
984 struct access *access;
985 HOST_WIDE_INT size;
987 size = tree_to_uhwi (DECL_SIZE (fld));
988 access = create_access_1 (base, pos, size);
989 access->expr = nref;
990 access->type = ft;
991 access->grp_total_scalarization = 1;
992 /* Accesses for intraprocedural SRA can have their stmt NULL. */
994 else
995 completely_scalarize_record (base, fld, pos, nref);
999 /* Create total_scalarization accesses for all scalar type fields in VAR and
1000 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
1001 type_consists_of_records_p. */
1003 static void
1004 completely_scalarize_var (tree var)
1006 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1007 struct access *access;
1009 access = create_access_1 (var, 0, size);
1010 access->expr = var;
1011 access->type = TREE_TYPE (var);
1012 access->grp_total_scalarization = 1;
1014 completely_scalarize_record (var, var, 0, var);
1017 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1019 static inline bool
1020 contains_view_convert_expr_p (const_tree ref)
1022 while (handled_component_p (ref))
1024 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1025 return true;
1026 ref = TREE_OPERAND (ref, 0);
1029 return false;
1032 /* Search the given tree for a declaration by skipping handled components and
1033 exclude it from the candidates. */
1035 static void
1036 disqualify_base_of_expr (tree t, const char *reason)
1038 t = get_base_address (t);
1039 if (sra_mode == SRA_MODE_EARLY_IPA
1040 && TREE_CODE (t) == MEM_REF)
1041 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1043 if (t && DECL_P (t))
1044 disqualify_candidate (t, reason);
1047 /* Scan expression EXPR and create access structures for all accesses to
1048 candidates for scalarization. Return the created access or NULL if none is
1049 created. */
1051 static struct access *
1052 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1054 struct access *ret = NULL;
1055 bool partial_ref;
1057 if (TREE_CODE (expr) == BIT_FIELD_REF
1058 || TREE_CODE (expr) == IMAGPART_EXPR
1059 || TREE_CODE (expr) == REALPART_EXPR)
1061 expr = TREE_OPERAND (expr, 0);
1062 partial_ref = true;
1064 else
1065 partial_ref = false;
1067 /* We need to dive through V_C_Es in order to get the size of its parameter
1068 and not the result type. Ada produces such statements. We are also
1069 capable of handling the topmost V_C_E but not any of those buried in other
1070 handled components. */
1071 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1072 expr = TREE_OPERAND (expr, 0);
1074 if (contains_view_convert_expr_p (expr))
1076 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1077 "component.");
1078 return NULL;
1080 if (TREE_THIS_VOLATILE (expr))
1082 disqualify_base_of_expr (expr, "part of a volatile reference.");
1083 return NULL;
1086 switch (TREE_CODE (expr))
1088 case MEM_REF:
1089 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1090 && sra_mode != SRA_MODE_EARLY_IPA)
1091 return NULL;
1092 /* fall through */
1093 case VAR_DECL:
1094 case PARM_DECL:
1095 case RESULT_DECL:
1096 case COMPONENT_REF:
1097 case ARRAY_REF:
1098 case ARRAY_RANGE_REF:
1099 ret = create_access (expr, stmt, write);
1100 break;
1102 default:
1103 break;
1106 if (write && partial_ref && ret)
1107 ret->grp_partial_lhs = 1;
1109 return ret;
1112 /* Scan expression EXPR and create access structures for all accesses to
1113 candidates for scalarization. Return true if any access has been inserted.
1114 STMT must be the statement from which the expression is taken, WRITE must be
1115 true if the expression is a store and false otherwise. */
1117 static bool
1118 build_access_from_expr (tree expr, gimple stmt, bool write)
1120 struct access *access;
1122 access = build_access_from_expr_1 (expr, stmt, write);
1123 if (access)
1125 /* This means the aggregate is accesses as a whole in a way other than an
1126 assign statement and thus cannot be removed even if we had a scalar
1127 replacement for everything. */
1128 if (cannot_scalarize_away_bitmap)
1129 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1130 return true;
1132 return false;
1135 /* Return the single non-EH successor edge of BB or NULL if there is none or
1136 more than one. */
1138 static edge
1139 single_non_eh_succ (basic_block bb)
1141 edge e, res = NULL;
1142 edge_iterator ei;
1144 FOR_EACH_EDGE (e, ei, bb->succs)
1145 if (!(e->flags & EDGE_EH))
1147 if (res)
1148 return NULL;
1149 res = e;
1152 return res;
1155 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1156 there is no alternative spot where to put statements SRA might need to
1157 generate after it. The spot we are looking for is an edge leading to a
1158 single non-EH successor, if it exists and is indeed single. RHS may be
1159 NULL, in that case ignore it. */
1161 static bool
1162 disqualify_if_bad_bb_terminating_stmt (gimple stmt, tree lhs, tree rhs)
1164 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1165 && stmt_ends_bb_p (stmt))
1167 if (single_non_eh_succ (gimple_bb (stmt)))
1168 return false;
1170 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1171 if (rhs)
1172 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1173 return true;
1175 return false;
1178 /* Scan expressions occurring in STMT, create access structures for all accesses
1179 to candidates for scalarization and remove those candidates which occur in
1180 statements or expressions that prevent them from being split apart. Return
1181 true if any access has been inserted. */
1183 static bool
1184 build_accesses_from_assign (gimple stmt)
1186 tree lhs, rhs;
1187 struct access *lacc, *racc;
1189 if (!gimple_assign_single_p (stmt)
1190 /* Scope clobbers don't influence scalarization. */
1191 || gimple_clobber_p (stmt))
1192 return false;
1194 lhs = gimple_assign_lhs (stmt);
1195 rhs = gimple_assign_rhs1 (stmt);
1197 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1198 return false;
1200 racc = build_access_from_expr_1 (rhs, stmt, false);
1201 lacc = build_access_from_expr_1 (lhs, stmt, true);
1203 if (lacc)
1204 lacc->grp_assignment_write = 1;
1206 if (racc)
1208 racc->grp_assignment_read = 1;
1209 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1210 && !is_gimple_reg_type (racc->type))
1211 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1214 if (lacc && racc
1215 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1216 && !lacc->grp_unscalarizable_region
1217 && !racc->grp_unscalarizable_region
1218 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1219 && lacc->size == racc->size
1220 && useless_type_conversion_p (lacc->type, racc->type))
1222 struct assign_link *link;
1224 link = (struct assign_link *) pool_alloc (link_pool);
1225 memset (link, 0, sizeof (struct assign_link));
1227 link->lacc = lacc;
1228 link->racc = racc;
1230 add_link_to_rhs (racc, link);
1233 return lacc || racc;
1236 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1237 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1239 static bool
1240 asm_visit_addr (gimple, tree op, tree, void *)
1242 op = get_base_address (op);
1243 if (op
1244 && DECL_P (op))
1245 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1247 return false;
1250 /* Return true iff callsite CALL has at least as many actual arguments as there
1251 are formal parameters of the function currently processed by IPA-SRA and
1252 that their types match. */
1254 static inline bool
1255 callsite_arguments_match_p (gimple call)
1257 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1258 return false;
1260 tree parm;
1261 int i;
1262 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1263 parm;
1264 parm = DECL_CHAIN (parm), i++)
1266 tree arg = gimple_call_arg (call, i);
1267 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1268 return false;
1270 return true;
1273 /* Scan function and look for interesting expressions and create access
1274 structures for them. Return true iff any access is created. */
1276 static bool
1277 scan_function (void)
1279 basic_block bb;
1280 bool ret = false;
1282 FOR_EACH_BB_FN (bb, cfun)
1284 gimple_stmt_iterator gsi;
1285 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1287 gimple stmt = gsi_stmt (gsi);
1288 tree t;
1289 unsigned i;
1291 if (final_bbs && stmt_can_throw_external (stmt))
1292 bitmap_set_bit (final_bbs, bb->index);
1293 switch (gimple_code (stmt))
1295 case GIMPLE_RETURN:
1296 t = gimple_return_retval (as_a <greturn *> (stmt));
1297 if (t != NULL_TREE)
1298 ret |= build_access_from_expr (t, stmt, false);
1299 if (final_bbs)
1300 bitmap_set_bit (final_bbs, bb->index);
1301 break;
1303 case GIMPLE_ASSIGN:
1304 ret |= build_accesses_from_assign (stmt);
1305 break;
1307 case GIMPLE_CALL:
1308 for (i = 0; i < gimple_call_num_args (stmt); i++)
1309 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1310 stmt, false);
1312 if (sra_mode == SRA_MODE_EARLY_IPA)
1314 tree dest = gimple_call_fndecl (stmt);
1315 int flags = gimple_call_flags (stmt);
1317 if (dest)
1319 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1320 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1321 encountered_apply_args = true;
1322 if (recursive_call_p (current_function_decl, dest))
1324 encountered_recursive_call = true;
1325 if (!callsite_arguments_match_p (stmt))
1326 encountered_unchangable_recursive_call = true;
1330 if (final_bbs
1331 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1332 bitmap_set_bit (final_bbs, bb->index);
1335 t = gimple_call_lhs (stmt);
1336 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1337 ret |= build_access_from_expr (t, stmt, true);
1338 break;
1340 case GIMPLE_ASM:
1342 gasm *asm_stmt = as_a <gasm *> (stmt);
1343 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1344 asm_visit_addr);
1345 if (final_bbs)
1346 bitmap_set_bit (final_bbs, bb->index);
1348 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1350 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1351 ret |= build_access_from_expr (t, asm_stmt, false);
1353 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1355 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1356 ret |= build_access_from_expr (t, asm_stmt, true);
1359 break;
1361 default:
1362 break;
1367 return ret;
1370 /* Helper of QSORT function. There are pointers to accesses in the array. An
1371 access is considered smaller than another if it has smaller offset or if the
1372 offsets are the same but is size is bigger. */
1374 static int
1375 compare_access_positions (const void *a, const void *b)
1377 const access_p *fp1 = (const access_p *) a;
1378 const access_p *fp2 = (const access_p *) b;
1379 const access_p f1 = *fp1;
1380 const access_p f2 = *fp2;
1382 if (f1->offset != f2->offset)
1383 return f1->offset < f2->offset ? -1 : 1;
1385 if (f1->size == f2->size)
1387 if (f1->type == f2->type)
1388 return 0;
1389 /* Put any non-aggregate type before any aggregate type. */
1390 else if (!is_gimple_reg_type (f1->type)
1391 && is_gimple_reg_type (f2->type))
1392 return 1;
1393 else if (is_gimple_reg_type (f1->type)
1394 && !is_gimple_reg_type (f2->type))
1395 return -1;
1396 /* Put any complex or vector type before any other scalar type. */
1397 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1398 && TREE_CODE (f1->type) != VECTOR_TYPE
1399 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1400 || TREE_CODE (f2->type) == VECTOR_TYPE))
1401 return 1;
1402 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1403 || TREE_CODE (f1->type) == VECTOR_TYPE)
1404 && TREE_CODE (f2->type) != COMPLEX_TYPE
1405 && TREE_CODE (f2->type) != VECTOR_TYPE)
1406 return -1;
1407 /* Put the integral type with the bigger precision first. */
1408 else if (INTEGRAL_TYPE_P (f1->type)
1409 && INTEGRAL_TYPE_P (f2->type))
1410 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1411 /* Put any integral type with non-full precision last. */
1412 else if (INTEGRAL_TYPE_P (f1->type)
1413 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1414 != TYPE_PRECISION (f1->type)))
1415 return 1;
1416 else if (INTEGRAL_TYPE_P (f2->type)
1417 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1418 != TYPE_PRECISION (f2->type)))
1419 return -1;
1420 /* Stabilize the sort. */
1421 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1424 /* We want the bigger accesses first, thus the opposite operator in the next
1425 line: */
1426 return f1->size > f2->size ? -1 : 1;
1430 /* Append a name of the declaration to the name obstack. A helper function for
1431 make_fancy_name. */
1433 static void
1434 make_fancy_decl_name (tree decl)
1436 char buffer[32];
1438 tree name = DECL_NAME (decl);
1439 if (name)
1440 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1441 IDENTIFIER_LENGTH (name));
1442 else
1444 sprintf (buffer, "D%u", DECL_UID (decl));
1445 obstack_grow (&name_obstack, buffer, strlen (buffer));
1449 /* Helper for make_fancy_name. */
1451 static void
1452 make_fancy_name_1 (tree expr)
1454 char buffer[32];
1455 tree index;
1457 if (DECL_P (expr))
1459 make_fancy_decl_name (expr);
1460 return;
1463 switch (TREE_CODE (expr))
1465 case COMPONENT_REF:
1466 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1467 obstack_1grow (&name_obstack, '$');
1468 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1469 break;
1471 case ARRAY_REF:
1472 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1473 obstack_1grow (&name_obstack, '$');
1474 /* Arrays with only one element may not have a constant as their
1475 index. */
1476 index = TREE_OPERAND (expr, 1);
1477 if (TREE_CODE (index) != INTEGER_CST)
1478 break;
1479 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1480 obstack_grow (&name_obstack, buffer, strlen (buffer));
1481 break;
1483 case ADDR_EXPR:
1484 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1485 break;
1487 case MEM_REF:
1488 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1489 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1491 obstack_1grow (&name_obstack, '$');
1492 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1493 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1494 obstack_grow (&name_obstack, buffer, strlen (buffer));
1496 break;
1498 case BIT_FIELD_REF:
1499 case REALPART_EXPR:
1500 case IMAGPART_EXPR:
1501 gcc_unreachable (); /* we treat these as scalars. */
1502 break;
1503 default:
1504 break;
1508 /* Create a human readable name for replacement variable of ACCESS. */
1510 static char *
1511 make_fancy_name (tree expr)
1513 make_fancy_name_1 (expr);
1514 obstack_1grow (&name_obstack, '\0');
1515 return XOBFINISH (&name_obstack, char *);
1518 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1519 EXP_TYPE at the given OFFSET. If BASE is something for which
1520 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1521 to insert new statements either before or below the current one as specified
1522 by INSERT_AFTER. This function is not capable of handling bitfields.
1524 BASE must be either a declaration or a memory reference that has correct
1525 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1527 tree
1528 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1529 tree exp_type, gimple_stmt_iterator *gsi,
1530 bool insert_after)
1532 tree prev_base = base;
1533 tree off;
1534 tree mem_ref;
1535 HOST_WIDE_INT base_offset;
1536 unsigned HOST_WIDE_INT misalign;
1537 unsigned int align;
1539 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1540 get_object_alignment_1 (base, &align, &misalign);
1541 base = get_addr_base_and_unit_offset (base, &base_offset);
1543 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1544 offset such as array[var_index]. */
1545 if (!base)
1547 gassign *stmt;
1548 tree tmp, addr;
1550 gcc_checking_assert (gsi);
1551 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1552 addr = build_fold_addr_expr (unshare_expr (prev_base));
1553 STRIP_USELESS_TYPE_CONVERSION (addr);
1554 stmt = gimple_build_assign (tmp, addr);
1555 gimple_set_location (stmt, loc);
1556 if (insert_after)
1557 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1558 else
1559 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1561 off = build_int_cst (reference_alias_ptr_type (prev_base),
1562 offset / BITS_PER_UNIT);
1563 base = tmp;
1565 else if (TREE_CODE (base) == MEM_REF)
1567 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1568 base_offset + offset / BITS_PER_UNIT);
1569 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1570 base = unshare_expr (TREE_OPERAND (base, 0));
1572 else
1574 off = build_int_cst (reference_alias_ptr_type (base),
1575 base_offset + offset / BITS_PER_UNIT);
1576 base = build_fold_addr_expr (unshare_expr (base));
1579 misalign = (misalign + offset) & (align - 1);
1580 if (misalign != 0)
1581 align = (misalign & -misalign);
1582 if (align < TYPE_ALIGN (exp_type))
1583 exp_type = build_aligned_type (exp_type, align);
1585 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1586 if (TREE_THIS_VOLATILE (prev_base))
1587 TREE_THIS_VOLATILE (mem_ref) = 1;
1588 if (TREE_SIDE_EFFECTS (prev_base))
1589 TREE_SIDE_EFFECTS (mem_ref) = 1;
1590 return mem_ref;
1593 /* Construct a memory reference to a part of an aggregate BASE at the given
1594 OFFSET and of the same type as MODEL. In case this is a reference to a
1595 bit-field, the function will replicate the last component_ref of model's
1596 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1597 build_ref_for_offset. */
1599 static tree
1600 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1601 struct access *model, gimple_stmt_iterator *gsi,
1602 bool insert_after)
1604 if (TREE_CODE (model->expr) == COMPONENT_REF
1605 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1607 /* This access represents a bit-field. */
1608 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1610 offset -= int_bit_position (fld);
1611 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1612 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1613 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1614 NULL_TREE);
1616 else
1617 return build_ref_for_offset (loc, base, offset, model->type,
1618 gsi, insert_after);
1621 /* Attempt to build a memory reference that we could but into a gimple
1622 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1623 create statements and return s NULL instead. This function also ignores
1624 alignment issues and so its results should never end up in non-debug
1625 statements. */
1627 static tree
1628 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1629 struct access *model)
1631 HOST_WIDE_INT base_offset;
1632 tree off;
1634 if (TREE_CODE (model->expr) == COMPONENT_REF
1635 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1636 return NULL_TREE;
1638 base = get_addr_base_and_unit_offset (base, &base_offset);
1639 if (!base)
1640 return NULL_TREE;
1641 if (TREE_CODE (base) == MEM_REF)
1643 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1644 base_offset + offset / BITS_PER_UNIT);
1645 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1646 base = unshare_expr (TREE_OPERAND (base, 0));
1648 else
1650 off = build_int_cst (reference_alias_ptr_type (base),
1651 base_offset + offset / BITS_PER_UNIT);
1652 base = build_fold_addr_expr (unshare_expr (base));
1655 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1658 /* Construct a memory reference consisting of component_refs and array_refs to
1659 a part of an aggregate *RES (which is of type TYPE). The requested part
1660 should have type EXP_TYPE at be the given OFFSET. This function might not
1661 succeed, it returns true when it does and only then *RES points to something
1662 meaningful. This function should be used only to build expressions that we
1663 might need to present to user (e.g. in warnings). In all other situations,
1664 build_ref_for_model or build_ref_for_offset should be used instead. */
1666 static bool
1667 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1668 tree exp_type)
1670 while (1)
1672 tree fld;
1673 tree tr_size, index, minidx;
1674 HOST_WIDE_INT el_size;
1676 if (offset == 0 && exp_type
1677 && types_compatible_p (exp_type, type))
1678 return true;
1680 switch (TREE_CODE (type))
1682 case UNION_TYPE:
1683 case QUAL_UNION_TYPE:
1684 case RECORD_TYPE:
1685 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1687 HOST_WIDE_INT pos, size;
1688 tree tr_pos, expr, *expr_ptr;
1690 if (TREE_CODE (fld) != FIELD_DECL)
1691 continue;
1693 tr_pos = bit_position (fld);
1694 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1695 continue;
1696 pos = tree_to_uhwi (tr_pos);
1697 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1698 tr_size = DECL_SIZE (fld);
1699 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1700 continue;
1701 size = tree_to_uhwi (tr_size);
1702 if (size == 0)
1704 if (pos != offset)
1705 continue;
1707 else if (pos > offset || (pos + size) <= offset)
1708 continue;
1710 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1711 NULL_TREE);
1712 expr_ptr = &expr;
1713 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1714 offset - pos, exp_type))
1716 *res = expr;
1717 return true;
1720 return false;
1722 case ARRAY_TYPE:
1723 tr_size = TYPE_SIZE (TREE_TYPE (type));
1724 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1725 return false;
1726 el_size = tree_to_uhwi (tr_size);
1728 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1729 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1730 return false;
1731 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1732 if (!integer_zerop (minidx))
1733 index = int_const_binop (PLUS_EXPR, index, minidx);
1734 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1735 NULL_TREE, NULL_TREE);
1736 offset = offset % el_size;
1737 type = TREE_TYPE (type);
1738 break;
1740 default:
1741 if (offset != 0)
1742 return false;
1744 if (exp_type)
1745 return false;
1746 else
1747 return true;
1752 /* Return true iff TYPE is stdarg va_list type. */
1754 static inline bool
1755 is_va_list_type (tree type)
1757 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1760 /* Print message to dump file why a variable was rejected. */
1762 static void
1763 reject (tree var, const char *msg)
1765 if (dump_file && (dump_flags & TDF_DETAILS))
1767 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1768 print_generic_expr (dump_file, var, 0);
1769 fprintf (dump_file, "\n");
1773 /* Return true if VAR is a candidate for SRA. */
1775 static bool
1776 maybe_add_sra_candidate (tree var)
1778 tree type = TREE_TYPE (var);
1779 const char *msg;
1780 tree_node **slot;
1782 if (!AGGREGATE_TYPE_P (type))
1784 reject (var, "not aggregate");
1785 return false;
1787 if (needs_to_live_in_memory (var))
1789 reject (var, "needs to live in memory");
1790 return false;
1792 if (TREE_THIS_VOLATILE (var))
1794 reject (var, "is volatile");
1795 return false;
1797 if (!COMPLETE_TYPE_P (type))
1799 reject (var, "has incomplete type");
1800 return false;
1802 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1804 reject (var, "type size not fixed");
1805 return false;
1807 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1809 reject (var, "type size is zero");
1810 return false;
1812 if (type_internals_preclude_sra_p (type, &msg))
1814 reject (var, msg);
1815 return false;
1817 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1818 we also want to schedule it rather late. Thus we ignore it in
1819 the early pass. */
1820 (sra_mode == SRA_MODE_EARLY_INTRA
1821 && is_va_list_type (type)))
1823 reject (var, "is va_list");
1824 return false;
1827 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1828 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1829 *slot = var;
1831 if (dump_file && (dump_flags & TDF_DETAILS))
1833 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1834 print_generic_expr (dump_file, var, 0);
1835 fprintf (dump_file, "\n");
1838 return true;
1841 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1842 those with type which is suitable for scalarization. */
1844 static bool
1845 find_var_candidates (void)
1847 tree var, parm;
1848 unsigned int i;
1849 bool ret = false;
1851 for (parm = DECL_ARGUMENTS (current_function_decl);
1852 parm;
1853 parm = DECL_CHAIN (parm))
1854 ret |= maybe_add_sra_candidate (parm);
1856 FOR_EACH_LOCAL_DECL (cfun, i, var)
1858 if (TREE_CODE (var) != VAR_DECL)
1859 continue;
1861 ret |= maybe_add_sra_candidate (var);
1864 return ret;
1867 /* Sort all accesses for the given variable, check for partial overlaps and
1868 return NULL if there are any. If there are none, pick a representative for
1869 each combination of offset and size and create a linked list out of them.
1870 Return the pointer to the first representative and make sure it is the first
1871 one in the vector of accesses. */
1873 static struct access *
1874 sort_and_splice_var_accesses (tree var)
1876 int i, j, access_count;
1877 struct access *res, **prev_acc_ptr = &res;
1878 vec<access_p> *access_vec;
1879 bool first = true;
1880 HOST_WIDE_INT low = -1, high = 0;
1882 access_vec = get_base_access_vector (var);
1883 if (!access_vec)
1884 return NULL;
1885 access_count = access_vec->length ();
1887 /* Sort by <OFFSET, SIZE>. */
1888 access_vec->qsort (compare_access_positions);
1890 i = 0;
1891 while (i < access_count)
1893 struct access *access = (*access_vec)[i];
1894 bool grp_write = access->write;
1895 bool grp_read = !access->write;
1896 bool grp_scalar_write = access->write
1897 && is_gimple_reg_type (access->type);
1898 bool grp_scalar_read = !access->write
1899 && is_gimple_reg_type (access->type);
1900 bool grp_assignment_read = access->grp_assignment_read;
1901 bool grp_assignment_write = access->grp_assignment_write;
1902 bool multiple_scalar_reads = false;
1903 bool total_scalarization = access->grp_total_scalarization;
1904 bool grp_partial_lhs = access->grp_partial_lhs;
1905 bool first_scalar = is_gimple_reg_type (access->type);
1906 bool unscalarizable_region = access->grp_unscalarizable_region;
1908 if (first || access->offset >= high)
1910 first = false;
1911 low = access->offset;
1912 high = access->offset + access->size;
1914 else if (access->offset > low && access->offset + access->size > high)
1915 return NULL;
1916 else
1917 gcc_assert (access->offset >= low
1918 && access->offset + access->size <= high);
1920 j = i + 1;
1921 while (j < access_count)
1923 struct access *ac2 = (*access_vec)[j];
1924 if (ac2->offset != access->offset || ac2->size != access->size)
1925 break;
1926 if (ac2->write)
1928 grp_write = true;
1929 grp_scalar_write = (grp_scalar_write
1930 || is_gimple_reg_type (ac2->type));
1932 else
1934 grp_read = true;
1935 if (is_gimple_reg_type (ac2->type))
1937 if (grp_scalar_read)
1938 multiple_scalar_reads = true;
1939 else
1940 grp_scalar_read = true;
1943 grp_assignment_read |= ac2->grp_assignment_read;
1944 grp_assignment_write |= ac2->grp_assignment_write;
1945 grp_partial_lhs |= ac2->grp_partial_lhs;
1946 unscalarizable_region |= ac2->grp_unscalarizable_region;
1947 total_scalarization |= ac2->grp_total_scalarization;
1948 relink_to_new_repr (access, ac2);
1950 /* If there are both aggregate-type and scalar-type accesses with
1951 this combination of size and offset, the comparison function
1952 should have put the scalars first. */
1953 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1954 ac2->group_representative = access;
1955 j++;
1958 i = j;
1960 access->group_representative = access;
1961 access->grp_write = grp_write;
1962 access->grp_read = grp_read;
1963 access->grp_scalar_read = grp_scalar_read;
1964 access->grp_scalar_write = grp_scalar_write;
1965 access->grp_assignment_read = grp_assignment_read;
1966 access->grp_assignment_write = grp_assignment_write;
1967 access->grp_hint = multiple_scalar_reads || total_scalarization;
1968 access->grp_total_scalarization = total_scalarization;
1969 access->grp_partial_lhs = grp_partial_lhs;
1970 access->grp_unscalarizable_region = unscalarizable_region;
1971 if (access->first_link)
1972 add_access_to_work_queue (access);
1974 *prev_acc_ptr = access;
1975 prev_acc_ptr = &access->next_grp;
1978 gcc_assert (res == (*access_vec)[0]);
1979 return res;
1982 /* Create a variable for the given ACCESS which determines the type, name and a
1983 few other properties. Return the variable declaration and store it also to
1984 ACCESS->replacement. */
1986 static tree
1987 create_access_replacement (struct access *access)
1989 tree repl;
1991 if (access->grp_to_be_debug_replaced)
1993 repl = create_tmp_var_raw (access->type);
1994 DECL_CONTEXT (repl) = current_function_decl;
1996 else
1997 repl = create_tmp_var (access->type, "SR");
1998 if (TREE_CODE (access->type) == COMPLEX_TYPE
1999 || TREE_CODE (access->type) == VECTOR_TYPE)
2001 if (!access->grp_partial_lhs)
2002 DECL_GIMPLE_REG_P (repl) = 1;
2004 else if (access->grp_partial_lhs
2005 && is_gimple_reg_type (access->type))
2006 TREE_ADDRESSABLE (repl) = 1;
2008 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2009 DECL_ARTIFICIAL (repl) = 1;
2010 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2012 if (DECL_NAME (access->base)
2013 && !DECL_IGNORED_P (access->base)
2014 && !DECL_ARTIFICIAL (access->base))
2016 char *pretty_name = make_fancy_name (access->expr);
2017 tree debug_expr = unshare_expr_without_location (access->expr), d;
2018 bool fail = false;
2020 DECL_NAME (repl) = get_identifier (pretty_name);
2021 obstack_free (&name_obstack, pretty_name);
2023 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2024 as DECL_DEBUG_EXPR isn't considered when looking for still
2025 used SSA_NAMEs and thus they could be freed. All debug info
2026 generation cares is whether something is constant or variable
2027 and that get_ref_base_and_extent works properly on the
2028 expression. It cannot handle accesses at a non-constant offset
2029 though, so just give up in those cases. */
2030 for (d = debug_expr;
2031 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2032 d = TREE_OPERAND (d, 0))
2033 switch (TREE_CODE (d))
2035 case ARRAY_REF:
2036 case ARRAY_RANGE_REF:
2037 if (TREE_OPERAND (d, 1)
2038 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2039 fail = true;
2040 if (TREE_OPERAND (d, 3)
2041 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2042 fail = true;
2043 /* FALLTHRU */
2044 case COMPONENT_REF:
2045 if (TREE_OPERAND (d, 2)
2046 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2047 fail = true;
2048 break;
2049 case MEM_REF:
2050 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2051 fail = true;
2052 else
2053 d = TREE_OPERAND (d, 0);
2054 break;
2055 default:
2056 break;
2058 if (!fail)
2060 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2061 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2063 if (access->grp_no_warning)
2064 TREE_NO_WARNING (repl) = 1;
2065 else
2066 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2068 else
2069 TREE_NO_WARNING (repl) = 1;
2071 if (dump_file)
2073 if (access->grp_to_be_debug_replaced)
2075 fprintf (dump_file, "Created a debug-only replacement for ");
2076 print_generic_expr (dump_file, access->base, 0);
2077 fprintf (dump_file, " offset: %u, size: %u\n",
2078 (unsigned) access->offset, (unsigned) access->size);
2080 else
2082 fprintf (dump_file, "Created a replacement for ");
2083 print_generic_expr (dump_file, access->base, 0);
2084 fprintf (dump_file, " offset: %u, size: %u: ",
2085 (unsigned) access->offset, (unsigned) access->size);
2086 print_generic_expr (dump_file, repl, 0);
2087 fprintf (dump_file, "\n");
2090 sra_stats.replacements++;
2092 return repl;
2095 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2097 static inline tree
2098 get_access_replacement (struct access *access)
2100 gcc_checking_assert (access->replacement_decl);
2101 return access->replacement_decl;
2105 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2106 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2107 to it is not "within" the root. Return false iff some accesses partially
2108 overlap. */
2110 static bool
2111 build_access_subtree (struct access **access)
2113 struct access *root = *access, *last_child = NULL;
2114 HOST_WIDE_INT limit = root->offset + root->size;
2116 *access = (*access)->next_grp;
2117 while (*access && (*access)->offset + (*access)->size <= limit)
2119 if (!last_child)
2120 root->first_child = *access;
2121 else
2122 last_child->next_sibling = *access;
2123 last_child = *access;
2125 if (!build_access_subtree (access))
2126 return false;
2129 if (*access && (*access)->offset < limit)
2130 return false;
2132 return true;
2135 /* Build a tree of access representatives, ACCESS is the pointer to the first
2136 one, others are linked in a list by the next_grp field. Return false iff
2137 some accesses partially overlap. */
2139 static bool
2140 build_access_trees (struct access *access)
2142 while (access)
2144 struct access *root = access;
2146 if (!build_access_subtree (&access))
2147 return false;
2148 root->next_grp = access;
2150 return true;
2153 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2154 array. */
2156 static bool
2157 expr_with_var_bounded_array_refs_p (tree expr)
2159 while (handled_component_p (expr))
2161 if (TREE_CODE (expr) == ARRAY_REF
2162 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2163 return true;
2164 expr = TREE_OPERAND (expr, 0);
2166 return false;
2169 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2170 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2171 sorts of access flags appropriately along the way, notably always set
2172 grp_read and grp_assign_read according to MARK_READ and grp_write when
2173 MARK_WRITE is true.
2175 Creating a replacement for a scalar access is considered beneficial if its
2176 grp_hint is set (this means we are either attempting total scalarization or
2177 there is more than one direct read access) or according to the following
2178 table:
2180 Access written to through a scalar type (once or more times)
2182 | Written to in an assignment statement
2184 | | Access read as scalar _once_
2185 | | |
2186 | | | Read in an assignment statement
2187 | | | |
2188 | | | | Scalarize Comment
2189 -----------------------------------------------------------------------------
2190 0 0 0 0 No access for the scalar
2191 0 0 0 1 No access for the scalar
2192 0 0 1 0 No Single read - won't help
2193 0 0 1 1 No The same case
2194 0 1 0 0 No access for the scalar
2195 0 1 0 1 No access for the scalar
2196 0 1 1 0 Yes s = *g; return s.i;
2197 0 1 1 1 Yes The same case as above
2198 1 0 0 0 No Won't help
2199 1 0 0 1 Yes s.i = 1; *g = s;
2200 1 0 1 0 Yes s.i = 5; g = s.i;
2201 1 0 1 1 Yes The same case as above
2202 1 1 0 0 No Won't help.
2203 1 1 0 1 Yes s.i = 1; *g = s;
2204 1 1 1 0 Yes s = *g; return s.i;
2205 1 1 1 1 Yes Any of the above yeses */
2207 static bool
2208 analyze_access_subtree (struct access *root, struct access *parent,
2209 bool allow_replacements)
2211 struct access *child;
2212 HOST_WIDE_INT limit = root->offset + root->size;
2213 HOST_WIDE_INT covered_to = root->offset;
2214 bool scalar = is_gimple_reg_type (root->type);
2215 bool hole = false, sth_created = false;
2217 if (parent)
2219 if (parent->grp_read)
2220 root->grp_read = 1;
2221 if (parent->grp_assignment_read)
2222 root->grp_assignment_read = 1;
2223 if (parent->grp_write)
2224 root->grp_write = 1;
2225 if (parent->grp_assignment_write)
2226 root->grp_assignment_write = 1;
2227 if (parent->grp_total_scalarization)
2228 root->grp_total_scalarization = 1;
2231 if (root->grp_unscalarizable_region)
2232 allow_replacements = false;
2234 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2235 allow_replacements = false;
2237 for (child = root->first_child; child; child = child->next_sibling)
2239 hole |= covered_to < child->offset;
2240 sth_created |= analyze_access_subtree (child, root,
2241 allow_replacements && !scalar);
2243 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2244 root->grp_total_scalarization &= child->grp_total_scalarization;
2245 if (child->grp_covered)
2246 covered_to += child->size;
2247 else
2248 hole = true;
2251 if (allow_replacements && scalar && !root->first_child
2252 && (root->grp_hint
2253 || ((root->grp_scalar_read || root->grp_assignment_read)
2254 && (root->grp_scalar_write || root->grp_assignment_write))))
2256 /* Always create access replacements that cover the whole access.
2257 For integral types this means the precision has to match.
2258 Avoid assumptions based on the integral type kind, too. */
2259 if (INTEGRAL_TYPE_P (root->type)
2260 && (TREE_CODE (root->type) != INTEGER_TYPE
2261 || TYPE_PRECISION (root->type) != root->size)
2262 /* But leave bitfield accesses alone. */
2263 && (TREE_CODE (root->expr) != COMPONENT_REF
2264 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2266 tree rt = root->type;
2267 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2268 && (root->size % BITS_PER_UNIT) == 0);
2269 root->type = build_nonstandard_integer_type (root->size,
2270 TYPE_UNSIGNED (rt));
2271 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2272 root->base, root->offset,
2273 root->type, NULL, false);
2275 if (dump_file && (dump_flags & TDF_DETAILS))
2277 fprintf (dump_file, "Changing the type of a replacement for ");
2278 print_generic_expr (dump_file, root->base, 0);
2279 fprintf (dump_file, " offset: %u, size: %u ",
2280 (unsigned) root->offset, (unsigned) root->size);
2281 fprintf (dump_file, " to an integer.\n");
2285 root->grp_to_be_replaced = 1;
2286 root->replacement_decl = create_access_replacement (root);
2287 sth_created = true;
2288 hole = false;
2290 else
2292 if (allow_replacements
2293 && scalar && !root->first_child
2294 && (root->grp_scalar_write || root->grp_assignment_write)
2295 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2296 DECL_UID (root->base)))
2298 gcc_checking_assert (!root->grp_scalar_read
2299 && !root->grp_assignment_read);
2300 sth_created = true;
2301 if (MAY_HAVE_DEBUG_STMTS)
2303 root->grp_to_be_debug_replaced = 1;
2304 root->replacement_decl = create_access_replacement (root);
2308 if (covered_to < limit)
2309 hole = true;
2310 if (scalar)
2311 root->grp_total_scalarization = 0;
2314 if (!hole || root->grp_total_scalarization)
2315 root->grp_covered = 1;
2316 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2317 root->grp_unscalarized_data = 1; /* not covered and written to */
2318 return sth_created;
2321 /* Analyze all access trees linked by next_grp by the means of
2322 analyze_access_subtree. */
2323 static bool
2324 analyze_access_trees (struct access *access)
2326 bool ret = false;
2328 while (access)
2330 if (analyze_access_subtree (access, NULL, true))
2331 ret = true;
2332 access = access->next_grp;
2335 return ret;
2338 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2339 SIZE would conflict with an already existing one. If exactly such a child
2340 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2342 static bool
2343 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2344 HOST_WIDE_INT size, struct access **exact_match)
2346 struct access *child;
2348 for (child = lacc->first_child; child; child = child->next_sibling)
2350 if (child->offset == norm_offset && child->size == size)
2352 *exact_match = child;
2353 return true;
2356 if (child->offset < norm_offset + size
2357 && child->offset + child->size > norm_offset)
2358 return true;
2361 return false;
2364 /* Create a new child access of PARENT, with all properties just like MODEL
2365 except for its offset and with its grp_write false and grp_read true.
2366 Return the new access or NULL if it cannot be created. Note that this access
2367 is created long after all splicing and sorting, it's not located in any
2368 access vector and is automatically a representative of its group. */
2370 static struct access *
2371 create_artificial_child_access (struct access *parent, struct access *model,
2372 HOST_WIDE_INT new_offset)
2374 struct access *access;
2375 struct access **child;
2376 tree expr = parent->base;
2378 gcc_assert (!model->grp_unscalarizable_region);
2380 access = (struct access *) pool_alloc (access_pool);
2381 memset (access, 0, sizeof (struct access));
2382 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2383 model->type))
2385 access->grp_no_warning = true;
2386 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2387 new_offset, model, NULL, false);
2390 access->base = parent->base;
2391 access->expr = expr;
2392 access->offset = new_offset;
2393 access->size = model->size;
2394 access->type = model->type;
2395 access->grp_write = true;
2396 access->grp_read = false;
2398 child = &parent->first_child;
2399 while (*child && (*child)->offset < new_offset)
2400 child = &(*child)->next_sibling;
2402 access->next_sibling = *child;
2403 *child = access;
2405 return access;
2409 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2410 true if any new subaccess was created. Additionally, if RACC is a scalar
2411 access but LACC is not, change the type of the latter, if possible. */
2413 static bool
2414 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2416 struct access *rchild;
2417 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2418 bool ret = false;
2420 if (is_gimple_reg_type (lacc->type)
2421 || lacc->grp_unscalarizable_region
2422 || racc->grp_unscalarizable_region)
2423 return false;
2425 if (is_gimple_reg_type (racc->type))
2427 if (!lacc->first_child && !racc->first_child)
2429 tree t = lacc->base;
2431 lacc->type = racc->type;
2432 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2433 lacc->offset, racc->type))
2434 lacc->expr = t;
2435 else
2437 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2438 lacc->base, lacc->offset,
2439 racc, NULL, false);
2440 lacc->grp_no_warning = true;
2443 return false;
2446 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2448 struct access *new_acc = NULL;
2449 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2451 if (rchild->grp_unscalarizable_region)
2452 continue;
2454 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2455 &new_acc))
2457 if (new_acc)
2459 rchild->grp_hint = 1;
2460 new_acc->grp_hint |= new_acc->grp_read;
2461 if (rchild->first_child)
2462 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2464 continue;
2467 rchild->grp_hint = 1;
2468 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2469 if (new_acc)
2471 ret = true;
2472 if (racc->first_child)
2473 propagate_subaccesses_across_link (new_acc, rchild);
2477 return ret;
2480 /* Propagate all subaccesses across assignment links. */
2482 static void
2483 propagate_all_subaccesses (void)
2485 while (work_queue_head)
2487 struct access *racc = pop_access_from_work_queue ();
2488 struct assign_link *link;
2490 gcc_assert (racc->first_link);
2492 for (link = racc->first_link; link; link = link->next)
2494 struct access *lacc = link->lacc;
2496 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2497 continue;
2498 lacc = lacc->group_representative;
2499 if (propagate_subaccesses_across_link (lacc, racc)
2500 && lacc->first_link)
2501 add_access_to_work_queue (lacc);
2506 /* Go through all accesses collected throughout the (intraprocedural) analysis
2507 stage, exclude overlapping ones, identify representatives and build trees
2508 out of them, making decisions about scalarization on the way. Return true
2509 iff there are any to-be-scalarized variables after this stage. */
2511 static bool
2512 analyze_all_variable_accesses (void)
2514 int res = 0;
2515 bitmap tmp = BITMAP_ALLOC (NULL);
2516 bitmap_iterator bi;
2517 unsigned i;
2518 unsigned max_scalarization_size
2519 = (optimize_function_for_size_p (cfun)
2520 ? PARAM_VALUE (PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE)
2521 : PARAM_VALUE (PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED))
2522 * BITS_PER_UNIT;
2524 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2525 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2526 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2528 tree var = candidate (i);
2530 if (TREE_CODE (var) == VAR_DECL
2531 && type_consists_of_records_p (TREE_TYPE (var)))
2533 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2534 <= max_scalarization_size)
2536 completely_scalarize_var (var);
2537 if (dump_file && (dump_flags & TDF_DETAILS))
2539 fprintf (dump_file, "Will attempt to totally scalarize ");
2540 print_generic_expr (dump_file, var, 0);
2541 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2544 else if (dump_file && (dump_flags & TDF_DETAILS))
2546 fprintf (dump_file, "Too big to totally scalarize: ");
2547 print_generic_expr (dump_file, var, 0);
2548 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2553 bitmap_copy (tmp, candidate_bitmap);
2554 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2556 tree var = candidate (i);
2557 struct access *access;
2559 access = sort_and_splice_var_accesses (var);
2560 if (!access || !build_access_trees (access))
2561 disqualify_candidate (var,
2562 "No or inhibitingly overlapping accesses.");
2565 propagate_all_subaccesses ();
2567 bitmap_copy (tmp, candidate_bitmap);
2568 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2570 tree var = candidate (i);
2571 struct access *access = get_first_repr_for_decl (var);
2573 if (analyze_access_trees (access))
2575 res++;
2576 if (dump_file && (dump_flags & TDF_DETAILS))
2578 fprintf (dump_file, "\nAccess trees for ");
2579 print_generic_expr (dump_file, var, 0);
2580 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2581 dump_access_tree (dump_file, access);
2582 fprintf (dump_file, "\n");
2585 else
2586 disqualify_candidate (var, "No scalar replacements to be created.");
2589 BITMAP_FREE (tmp);
2591 if (res)
2593 statistics_counter_event (cfun, "Scalarized aggregates", res);
2594 return true;
2596 else
2597 return false;
2600 /* Generate statements copying scalar replacements of accesses within a subtree
2601 into or out of AGG. ACCESS, all its children, siblings and their children
2602 are to be processed. AGG is an aggregate type expression (can be a
2603 declaration but does not have to be, it can for example also be a mem_ref or
2604 a series of handled components). TOP_OFFSET is the offset of the processed
2605 subtree which has to be subtracted from offsets of individual accesses to
2606 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2607 replacements in the interval <start_offset, start_offset + chunk_size>,
2608 otherwise copy all. GSI is a statement iterator used to place the new
2609 statements. WRITE should be true when the statements should write from AGG
2610 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2611 statements will be added after the current statement in GSI, they will be
2612 added before the statement otherwise. */
2614 static void
2615 generate_subtree_copies (struct access *access, tree agg,
2616 HOST_WIDE_INT top_offset,
2617 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2618 gimple_stmt_iterator *gsi, bool write,
2619 bool insert_after, location_t loc)
2623 if (chunk_size && access->offset >= start_offset + chunk_size)
2624 return;
2626 if (access->grp_to_be_replaced
2627 && (chunk_size == 0
2628 || access->offset + access->size > start_offset))
2630 tree expr, repl = get_access_replacement (access);
2631 gassign *stmt;
2633 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2634 access, gsi, insert_after);
2636 if (write)
2638 if (access->grp_partial_lhs)
2639 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2640 !insert_after,
2641 insert_after ? GSI_NEW_STMT
2642 : GSI_SAME_STMT);
2643 stmt = gimple_build_assign (repl, expr);
2645 else
2647 TREE_NO_WARNING (repl) = 1;
2648 if (access->grp_partial_lhs)
2649 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2650 !insert_after,
2651 insert_after ? GSI_NEW_STMT
2652 : GSI_SAME_STMT);
2653 stmt = gimple_build_assign (expr, repl);
2655 gimple_set_location (stmt, loc);
2657 if (insert_after)
2658 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2659 else
2660 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2661 update_stmt (stmt);
2662 sra_stats.subtree_copies++;
2664 else if (write
2665 && access->grp_to_be_debug_replaced
2666 && (chunk_size == 0
2667 || access->offset + access->size > start_offset))
2669 gdebug *ds;
2670 tree drhs = build_debug_ref_for_model (loc, agg,
2671 access->offset - top_offset,
2672 access);
2673 ds = gimple_build_debug_bind (get_access_replacement (access),
2674 drhs, gsi_stmt (*gsi));
2675 if (insert_after)
2676 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2677 else
2678 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2681 if (access->first_child)
2682 generate_subtree_copies (access->first_child, agg, top_offset,
2683 start_offset, chunk_size, gsi,
2684 write, insert_after, loc);
2686 access = access->next_sibling;
2688 while (access);
2691 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2692 the root of the subtree to be processed. GSI is the statement iterator used
2693 for inserting statements which are added after the current statement if
2694 INSERT_AFTER is true or before it otherwise. */
2696 static void
2697 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2698 bool insert_after, location_t loc)
2701 struct access *child;
2703 if (access->grp_to_be_replaced)
2705 gassign *stmt;
2707 stmt = gimple_build_assign (get_access_replacement (access),
2708 build_zero_cst (access->type));
2709 if (insert_after)
2710 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2711 else
2712 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2713 update_stmt (stmt);
2714 gimple_set_location (stmt, loc);
2716 else if (access->grp_to_be_debug_replaced)
2718 gdebug *ds
2719 = gimple_build_debug_bind (get_access_replacement (access),
2720 build_zero_cst (access->type),
2721 gsi_stmt (*gsi));
2722 if (insert_after)
2723 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2724 else
2725 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2728 for (child = access->first_child; child; child = child->next_sibling)
2729 init_subtree_with_zero (child, gsi, insert_after, loc);
2732 /* Clobber all scalar replacements in an access subtree. ACCESS is the the
2733 root of the subtree to be processed. GSI is the statement iterator used
2734 for inserting statements which are added after the current statement if
2735 INSERT_AFTER is true or before it otherwise. */
2737 static void
2738 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2739 bool insert_after, location_t loc)
2742 struct access *child;
2744 if (access->grp_to_be_replaced)
2746 tree rep = get_access_replacement (access);
2747 tree clobber = build_constructor (access->type, NULL);
2748 TREE_THIS_VOLATILE (clobber) = 1;
2749 gimple stmt = gimple_build_assign (rep, clobber);
2751 if (insert_after)
2752 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2753 else
2754 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2755 update_stmt (stmt);
2756 gimple_set_location (stmt, loc);
2759 for (child = access->first_child; child; child = child->next_sibling)
2760 clobber_subtree (child, gsi, insert_after, loc);
2763 /* Search for an access representative for the given expression EXPR and
2764 return it or NULL if it cannot be found. */
2766 static struct access *
2767 get_access_for_expr (tree expr)
2769 HOST_WIDE_INT offset, size, max_size;
2770 tree base;
2772 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2773 a different size than the size of its argument and we need the latter
2774 one. */
2775 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2776 expr = TREE_OPERAND (expr, 0);
2778 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2779 if (max_size == -1 || !DECL_P (base))
2780 return NULL;
2782 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2783 return NULL;
2785 return get_var_base_offset_size_access (base, offset, max_size);
2788 /* Replace the expression EXPR with a scalar replacement if there is one and
2789 generate other statements to do type conversion or subtree copying if
2790 necessary. GSI is used to place newly created statements, WRITE is true if
2791 the expression is being written to (it is on a LHS of a statement or output
2792 in an assembly statement). */
2794 static bool
2795 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2797 location_t loc;
2798 struct access *access;
2799 tree type, bfr, orig_expr;
2801 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2803 bfr = *expr;
2804 expr = &TREE_OPERAND (*expr, 0);
2806 else
2807 bfr = NULL_TREE;
2809 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2810 expr = &TREE_OPERAND (*expr, 0);
2811 access = get_access_for_expr (*expr);
2812 if (!access)
2813 return false;
2814 type = TREE_TYPE (*expr);
2815 orig_expr = *expr;
2817 loc = gimple_location (gsi_stmt (*gsi));
2818 gimple_stmt_iterator alt_gsi = gsi_none ();
2819 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2821 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2822 gsi = &alt_gsi;
2825 if (access->grp_to_be_replaced)
2827 tree repl = get_access_replacement (access);
2828 /* If we replace a non-register typed access simply use the original
2829 access expression to extract the scalar component afterwards.
2830 This happens if scalarizing a function return value or parameter
2831 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2832 gcc.c-torture/compile/20011217-1.c.
2834 We also want to use this when accessing a complex or vector which can
2835 be accessed as a different type too, potentially creating a need for
2836 type conversion (see PR42196) and when scalarized unions are involved
2837 in assembler statements (see PR42398). */
2838 if (!useless_type_conversion_p (type, access->type))
2840 tree ref;
2842 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2844 if (write)
2846 gassign *stmt;
2848 if (access->grp_partial_lhs)
2849 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2850 false, GSI_NEW_STMT);
2851 stmt = gimple_build_assign (repl, ref);
2852 gimple_set_location (stmt, loc);
2853 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2855 else
2857 gassign *stmt;
2859 if (access->grp_partial_lhs)
2860 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2861 true, GSI_SAME_STMT);
2862 stmt = gimple_build_assign (ref, repl);
2863 gimple_set_location (stmt, loc);
2864 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2867 else
2868 *expr = repl;
2869 sra_stats.exprs++;
2871 else if (write && access->grp_to_be_debug_replaced)
2873 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
2874 NULL_TREE,
2875 gsi_stmt (*gsi));
2876 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2879 if (access->first_child)
2881 HOST_WIDE_INT start_offset, chunk_size;
2882 if (bfr
2883 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
2884 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
2886 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
2887 start_offset = access->offset
2888 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
2890 else
2891 start_offset = chunk_size = 0;
2893 generate_subtree_copies (access->first_child, orig_expr, access->offset,
2894 start_offset, chunk_size, gsi, write, write,
2895 loc);
2897 return true;
2900 /* Where scalar replacements of the RHS have been written to when a replacement
2901 of a LHS of an assigments cannot be direclty loaded from a replacement of
2902 the RHS. */
2903 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2904 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2905 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2907 struct subreplacement_assignment_data
2909 /* Offset of the access representing the lhs of the assignment. */
2910 HOST_WIDE_INT left_offset;
2912 /* LHS and RHS of the original assignment. */
2913 tree assignment_lhs, assignment_rhs;
2915 /* Access representing the rhs of the whole assignment. */
2916 struct access *top_racc;
2918 /* Stmt iterator used for statement insertions after the original assignment.
2919 It points to the main GSI used to traverse a BB during function body
2920 modification. */
2921 gimple_stmt_iterator *new_gsi;
2923 /* Stmt iterator used for statement insertions before the original
2924 assignment. Keeps on pointing to the original statement. */
2925 gimple_stmt_iterator old_gsi;
2927 /* Location of the assignment. */
2928 location_t loc;
2930 /* Keeps the information whether we have needed to refresh replacements of
2931 the LHS and from which side of the assignments this takes place. */
2932 enum unscalarized_data_handling refreshed;
2935 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2936 base aggregate if there are unscalarized data or directly to LHS of the
2937 statement that is pointed to by GSI otherwise. */
2939 static void
2940 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
2942 tree src;
2943 if (sad->top_racc->grp_unscalarized_data)
2945 src = sad->assignment_rhs;
2946 sad->refreshed = SRA_UDH_RIGHT;
2948 else
2950 src = sad->assignment_lhs;
2951 sad->refreshed = SRA_UDH_LEFT;
2953 generate_subtree_copies (sad->top_racc->first_child, src,
2954 sad->top_racc->offset, 0, 0,
2955 &sad->old_gsi, false, false, sad->loc);
2958 /* Try to generate statements to load all sub-replacements in an access subtree
2959 formed by children of LACC from scalar replacements in the SAD->top_racc
2960 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
2961 and load the accesses from it. */
2963 static void
2964 load_assign_lhs_subreplacements (struct access *lacc,
2965 struct subreplacement_assignment_data *sad)
2967 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2969 HOST_WIDE_INT offset;
2970 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
2972 if (lacc->grp_to_be_replaced)
2974 struct access *racc;
2975 gassign *stmt;
2976 tree rhs;
2978 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
2979 if (racc && racc->grp_to_be_replaced)
2981 rhs = get_access_replacement (racc);
2982 if (!useless_type_conversion_p (lacc->type, racc->type))
2983 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
2984 lacc->type, rhs);
2986 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2987 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
2988 NULL_TREE, true, GSI_SAME_STMT);
2990 else
2992 /* No suitable access on the right hand side, need to load from
2993 the aggregate. See if we have to update it first... */
2994 if (sad->refreshed == SRA_UDH_NONE)
2995 handle_unscalarized_data_in_subtree (sad);
2997 if (sad->refreshed == SRA_UDH_LEFT)
2998 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
2999 lacc->offset - sad->left_offset,
3000 lacc, sad->new_gsi, true);
3001 else
3002 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3003 lacc->offset - sad->left_offset,
3004 lacc, sad->new_gsi, true);
3005 if (lacc->grp_partial_lhs)
3006 rhs = force_gimple_operand_gsi (sad->new_gsi,
3007 rhs, true, NULL_TREE,
3008 false, GSI_NEW_STMT);
3011 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3012 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3013 gimple_set_location (stmt, sad->loc);
3014 update_stmt (stmt);
3015 sra_stats.subreplacements++;
3017 else
3019 if (sad->refreshed == SRA_UDH_NONE
3020 && lacc->grp_read && !lacc->grp_covered)
3021 handle_unscalarized_data_in_subtree (sad);
3023 if (lacc && lacc->grp_to_be_debug_replaced)
3025 gdebug *ds;
3026 tree drhs;
3027 struct access *racc = find_access_in_subtree (sad->top_racc,
3028 offset,
3029 lacc->size);
3031 if (racc && racc->grp_to_be_replaced)
3033 if (racc->grp_write)
3034 drhs = get_access_replacement (racc);
3035 else
3036 drhs = NULL;
3038 else if (sad->refreshed == SRA_UDH_LEFT)
3039 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3040 lacc->offset, lacc);
3041 else if (sad->refreshed == SRA_UDH_RIGHT)
3042 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3043 offset, lacc);
3044 else
3045 drhs = NULL_TREE;
3046 if (drhs
3047 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3048 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3049 lacc->type, drhs);
3050 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3051 drhs, gsi_stmt (sad->old_gsi));
3052 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3056 if (lacc->first_child)
3057 load_assign_lhs_subreplacements (lacc, sad);
3061 /* Result code for SRA assignment modification. */
3062 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3063 SRA_AM_MODIFIED, /* stmt changed but not
3064 removed */
3065 SRA_AM_REMOVED }; /* stmt eliminated */
3067 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3068 to the assignment and GSI is the statement iterator pointing at it. Returns
3069 the same values as sra_modify_assign. */
3071 static enum assignment_mod_result
3072 sra_modify_constructor_assign (gimple stmt, gimple_stmt_iterator *gsi)
3074 tree lhs = gimple_assign_lhs (stmt);
3075 struct access *acc = get_access_for_expr (lhs);
3076 if (!acc)
3077 return SRA_AM_NONE;
3078 location_t loc = gimple_location (stmt);
3080 if (gimple_clobber_p (stmt))
3082 /* Clobber the replacement variable. */
3083 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3084 /* Remove clobbers of fully scalarized variables, they are dead. */
3085 if (acc->grp_covered)
3087 unlink_stmt_vdef (stmt);
3088 gsi_remove (gsi, true);
3089 release_defs (stmt);
3090 return SRA_AM_REMOVED;
3092 else
3093 return SRA_AM_MODIFIED;
3096 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt))) > 0)
3098 /* I have never seen this code path trigger but if it can happen the
3099 following should handle it gracefully. */
3100 if (access_has_children_p (acc))
3101 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3102 true, true, loc);
3103 return SRA_AM_MODIFIED;
3106 if (acc->grp_covered)
3108 init_subtree_with_zero (acc, gsi, false, loc);
3109 unlink_stmt_vdef (stmt);
3110 gsi_remove (gsi, true);
3111 release_defs (stmt);
3112 return SRA_AM_REMOVED;
3114 else
3116 init_subtree_with_zero (acc, gsi, true, loc);
3117 return SRA_AM_MODIFIED;
3121 /* Create and return a new suitable default definition SSA_NAME for RACC which
3122 is an access describing an uninitialized part of an aggregate that is being
3123 loaded. */
3125 static tree
3126 get_repl_default_def_ssa_name (struct access *racc)
3128 gcc_checking_assert (!racc->grp_to_be_replaced
3129 && !racc->grp_to_be_debug_replaced);
3130 if (!racc->replacement_decl)
3131 racc->replacement_decl = create_access_replacement (racc);
3132 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3135 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3136 bit-field field declaration somewhere in it. */
3138 static inline bool
3139 contains_vce_or_bfcref_p (const_tree ref)
3141 while (handled_component_p (ref))
3143 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3144 || (TREE_CODE (ref) == COMPONENT_REF
3145 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3146 return true;
3147 ref = TREE_OPERAND (ref, 0);
3150 return false;
3153 /* Examine both sides of the assignment statement pointed to by STMT, replace
3154 them with a scalare replacement if there is one and generate copying of
3155 replacements if scalarized aggregates have been used in the assignment. GSI
3156 is used to hold generated statements for type conversions and subtree
3157 copying. */
3159 static enum assignment_mod_result
3160 sra_modify_assign (gimple stmt, gimple_stmt_iterator *gsi)
3162 struct access *lacc, *racc;
3163 tree lhs, rhs;
3164 bool modify_this_stmt = false;
3165 bool force_gimple_rhs = false;
3166 location_t loc;
3167 gimple_stmt_iterator orig_gsi = *gsi;
3169 if (!gimple_assign_single_p (stmt))
3170 return SRA_AM_NONE;
3171 lhs = gimple_assign_lhs (stmt);
3172 rhs = gimple_assign_rhs1 (stmt);
3174 if (TREE_CODE (rhs) == CONSTRUCTOR)
3175 return sra_modify_constructor_assign (stmt, gsi);
3177 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3178 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3179 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3181 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3182 gsi, false);
3183 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3184 gsi, true);
3185 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3188 lacc = get_access_for_expr (lhs);
3189 racc = get_access_for_expr (rhs);
3190 if (!lacc && !racc)
3191 return SRA_AM_NONE;
3193 loc = gimple_location (stmt);
3194 if (lacc && lacc->grp_to_be_replaced)
3196 lhs = get_access_replacement (lacc);
3197 gimple_assign_set_lhs (stmt, lhs);
3198 modify_this_stmt = true;
3199 if (lacc->grp_partial_lhs)
3200 force_gimple_rhs = true;
3201 sra_stats.exprs++;
3204 if (racc && racc->grp_to_be_replaced)
3206 rhs = get_access_replacement (racc);
3207 modify_this_stmt = true;
3208 if (racc->grp_partial_lhs)
3209 force_gimple_rhs = true;
3210 sra_stats.exprs++;
3212 else if (racc
3213 && !racc->grp_unscalarized_data
3214 && TREE_CODE (lhs) == SSA_NAME
3215 && !access_has_replacements_p (racc))
3217 rhs = get_repl_default_def_ssa_name (racc);
3218 modify_this_stmt = true;
3219 sra_stats.exprs++;
3222 if (modify_this_stmt)
3224 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3226 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3227 ??? This should move to fold_stmt which we simply should
3228 call after building a VIEW_CONVERT_EXPR here. */
3229 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3230 && !contains_bitfld_component_ref_p (lhs))
3232 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3233 gimple_assign_set_lhs (stmt, lhs);
3235 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3236 && !contains_vce_or_bfcref_p (rhs))
3237 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3239 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3241 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3242 rhs);
3243 if (is_gimple_reg_type (TREE_TYPE (lhs))
3244 && TREE_CODE (lhs) != SSA_NAME)
3245 force_gimple_rhs = true;
3250 if (lacc && lacc->grp_to_be_debug_replaced)
3252 tree dlhs = get_access_replacement (lacc);
3253 tree drhs = unshare_expr (rhs);
3254 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3256 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3257 && !contains_vce_or_bfcref_p (drhs))
3258 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3259 if (drhs
3260 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3261 TREE_TYPE (drhs)))
3262 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3263 TREE_TYPE (dlhs), drhs);
3265 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3266 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3269 /* From this point on, the function deals with assignments in between
3270 aggregates when at least one has scalar reductions of some of its
3271 components. There are three possible scenarios: Both the LHS and RHS have
3272 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3274 In the first case, we would like to load the LHS components from RHS
3275 components whenever possible. If that is not possible, we would like to
3276 read it directly from the RHS (after updating it by storing in it its own
3277 components). If there are some necessary unscalarized data in the LHS,
3278 those will be loaded by the original assignment too. If neither of these
3279 cases happen, the original statement can be removed. Most of this is done
3280 by load_assign_lhs_subreplacements.
3282 In the second case, we would like to store all RHS scalarized components
3283 directly into LHS and if they cover the aggregate completely, remove the
3284 statement too. In the third case, we want the LHS components to be loaded
3285 directly from the RHS (DSE will remove the original statement if it
3286 becomes redundant).
3288 This is a bit complex but manageable when types match and when unions do
3289 not cause confusion in a way that we cannot really load a component of LHS
3290 from the RHS or vice versa (the access representing this level can have
3291 subaccesses that are accessible only through a different union field at a
3292 higher level - different from the one used in the examined expression).
3293 Unions are fun.
3295 Therefore, I specially handle a fourth case, happening when there is a
3296 specific type cast or it is impossible to locate a scalarized subaccess on
3297 the other side of the expression. If that happens, I simply "refresh" the
3298 RHS by storing in it is scalarized components leave the original statement
3299 there to do the copying and then load the scalar replacements of the LHS.
3300 This is what the first branch does. */
3302 if (modify_this_stmt
3303 || gimple_has_volatile_ops (stmt)
3304 || contains_vce_or_bfcref_p (rhs)
3305 || contains_vce_or_bfcref_p (lhs)
3306 || stmt_ends_bb_p (stmt))
3308 if (access_has_children_p (racc))
3309 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3310 gsi, false, false, loc);
3311 if (access_has_children_p (lacc))
3313 gimple_stmt_iterator alt_gsi = gsi_none ();
3314 if (stmt_ends_bb_p (stmt))
3316 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3317 gsi = &alt_gsi;
3319 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3320 gsi, true, true, loc);
3322 sra_stats.separate_lhs_rhs_handling++;
3324 /* This gimplification must be done after generate_subtree_copies,
3325 lest we insert the subtree copies in the middle of the gimplified
3326 sequence. */
3327 if (force_gimple_rhs)
3328 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3329 true, GSI_SAME_STMT);
3330 if (gimple_assign_rhs1 (stmt) != rhs)
3332 modify_this_stmt = true;
3333 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3334 gcc_assert (stmt == gsi_stmt (orig_gsi));
3337 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3339 else
3341 if (access_has_children_p (lacc)
3342 && access_has_children_p (racc)
3343 /* When an access represents an unscalarizable region, it usually
3344 represents accesses with variable offset and thus must not be used
3345 to generate new memory accesses. */
3346 && !lacc->grp_unscalarizable_region
3347 && !racc->grp_unscalarizable_region)
3349 struct subreplacement_assignment_data sad;
3351 sad.left_offset = lacc->offset;
3352 sad.assignment_lhs = lhs;
3353 sad.assignment_rhs = rhs;
3354 sad.top_racc = racc;
3355 sad.old_gsi = *gsi;
3356 sad.new_gsi = gsi;
3357 sad.loc = gimple_location (stmt);
3358 sad.refreshed = SRA_UDH_NONE;
3360 if (lacc->grp_read && !lacc->grp_covered)
3361 handle_unscalarized_data_in_subtree (&sad);
3363 load_assign_lhs_subreplacements (lacc, &sad);
3364 if (sad.refreshed != SRA_UDH_RIGHT)
3366 gsi_next (gsi);
3367 unlink_stmt_vdef (stmt);
3368 gsi_remove (&sad.old_gsi, true);
3369 release_defs (stmt);
3370 sra_stats.deleted++;
3371 return SRA_AM_REMOVED;
3374 else
3376 if (access_has_children_p (racc)
3377 && !racc->grp_unscalarized_data)
3379 if (dump_file)
3381 fprintf (dump_file, "Removing load: ");
3382 print_gimple_stmt (dump_file, stmt, 0, 0);
3384 generate_subtree_copies (racc->first_child, lhs,
3385 racc->offset, 0, 0, gsi,
3386 false, false, loc);
3387 gcc_assert (stmt == gsi_stmt (*gsi));
3388 unlink_stmt_vdef (stmt);
3389 gsi_remove (gsi, true);
3390 release_defs (stmt);
3391 sra_stats.deleted++;
3392 return SRA_AM_REMOVED;
3394 /* Restore the aggregate RHS from its components so the
3395 prevailing aggregate copy does the right thing. */
3396 if (access_has_children_p (racc))
3397 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3398 gsi, false, false, loc);
3399 /* Re-load the components of the aggregate copy destination.
3400 But use the RHS aggregate to load from to expose more
3401 optimization opportunities. */
3402 if (access_has_children_p (lacc))
3403 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3404 0, 0, gsi, true, true, loc);
3407 return SRA_AM_NONE;
3411 /* Traverse the function body and all modifications as decided in
3412 analyze_all_variable_accesses. Return true iff the CFG has been
3413 changed. */
3415 static bool
3416 sra_modify_function_body (void)
3418 bool cfg_changed = false;
3419 basic_block bb;
3421 FOR_EACH_BB_FN (bb, cfun)
3423 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3424 while (!gsi_end_p (gsi))
3426 gimple stmt = gsi_stmt (gsi);
3427 enum assignment_mod_result assign_result;
3428 bool modified = false, deleted = false;
3429 tree *t;
3430 unsigned i;
3432 switch (gimple_code (stmt))
3434 case GIMPLE_RETURN:
3435 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3436 if (*t != NULL_TREE)
3437 modified |= sra_modify_expr (t, &gsi, false);
3438 break;
3440 case GIMPLE_ASSIGN:
3441 assign_result = sra_modify_assign (stmt, &gsi);
3442 modified |= assign_result == SRA_AM_MODIFIED;
3443 deleted = assign_result == SRA_AM_REMOVED;
3444 break;
3446 case GIMPLE_CALL:
3447 /* Operands must be processed before the lhs. */
3448 for (i = 0; i < gimple_call_num_args (stmt); i++)
3450 t = gimple_call_arg_ptr (stmt, i);
3451 modified |= sra_modify_expr (t, &gsi, false);
3454 if (gimple_call_lhs (stmt))
3456 t = gimple_call_lhs_ptr (stmt);
3457 modified |= sra_modify_expr (t, &gsi, true);
3459 break;
3461 case GIMPLE_ASM:
3463 gasm *asm_stmt = as_a <gasm *> (stmt);
3464 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3466 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3467 modified |= sra_modify_expr (t, &gsi, false);
3469 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3471 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3472 modified |= sra_modify_expr (t, &gsi, true);
3475 break;
3477 default:
3478 break;
3481 if (modified)
3483 update_stmt (stmt);
3484 if (maybe_clean_eh_stmt (stmt)
3485 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3486 cfg_changed = true;
3488 if (!deleted)
3489 gsi_next (&gsi);
3493 gsi_commit_edge_inserts ();
3494 return cfg_changed;
3497 /* Generate statements initializing scalar replacements of parts of function
3498 parameters. */
3500 static void
3501 initialize_parameter_reductions (void)
3503 gimple_stmt_iterator gsi;
3504 gimple_seq seq = NULL;
3505 tree parm;
3507 gsi = gsi_start (seq);
3508 for (parm = DECL_ARGUMENTS (current_function_decl);
3509 parm;
3510 parm = DECL_CHAIN (parm))
3512 vec<access_p> *access_vec;
3513 struct access *access;
3515 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3516 continue;
3517 access_vec = get_base_access_vector (parm);
3518 if (!access_vec)
3519 continue;
3521 for (access = (*access_vec)[0];
3522 access;
3523 access = access->next_grp)
3524 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3525 EXPR_LOCATION (parm));
3528 seq = gsi_seq (gsi);
3529 if (seq)
3530 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3533 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3534 it reveals there are components of some aggregates to be scalarized, it runs
3535 the required transformations. */
3536 static unsigned int
3537 perform_intra_sra (void)
3539 int ret = 0;
3540 sra_initialize ();
3542 if (!find_var_candidates ())
3543 goto out;
3545 if (!scan_function ())
3546 goto out;
3548 if (!analyze_all_variable_accesses ())
3549 goto out;
3551 if (sra_modify_function_body ())
3552 ret = TODO_update_ssa | TODO_cleanup_cfg;
3553 else
3554 ret = TODO_update_ssa;
3555 initialize_parameter_reductions ();
3557 statistics_counter_event (cfun, "Scalar replacements created",
3558 sra_stats.replacements);
3559 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3560 statistics_counter_event (cfun, "Subtree copy stmts",
3561 sra_stats.subtree_copies);
3562 statistics_counter_event (cfun, "Subreplacement stmts",
3563 sra_stats.subreplacements);
3564 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3565 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3566 sra_stats.separate_lhs_rhs_handling);
3568 out:
3569 sra_deinitialize ();
3570 return ret;
3573 /* Perform early intraprocedural SRA. */
3574 static unsigned int
3575 early_intra_sra (void)
3577 sra_mode = SRA_MODE_EARLY_INTRA;
3578 return perform_intra_sra ();
3581 /* Perform "late" intraprocedural SRA. */
3582 static unsigned int
3583 late_intra_sra (void)
3585 sra_mode = SRA_MODE_INTRA;
3586 return perform_intra_sra ();
3590 static bool
3591 gate_intra_sra (void)
3593 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3597 namespace {
3599 const pass_data pass_data_sra_early =
3601 GIMPLE_PASS, /* type */
3602 "esra", /* name */
3603 OPTGROUP_NONE, /* optinfo_flags */
3604 TV_TREE_SRA, /* tv_id */
3605 ( PROP_cfg | PROP_ssa ), /* properties_required */
3606 0, /* properties_provided */
3607 0, /* properties_destroyed */
3608 0, /* todo_flags_start */
3609 TODO_update_ssa, /* todo_flags_finish */
3612 class pass_sra_early : public gimple_opt_pass
3614 public:
3615 pass_sra_early (gcc::context *ctxt)
3616 : gimple_opt_pass (pass_data_sra_early, ctxt)
3619 /* opt_pass methods: */
3620 virtual bool gate (function *) { return gate_intra_sra (); }
3621 virtual unsigned int execute (function *) { return early_intra_sra (); }
3623 }; // class pass_sra_early
3625 } // anon namespace
3627 gimple_opt_pass *
3628 make_pass_sra_early (gcc::context *ctxt)
3630 return new pass_sra_early (ctxt);
3633 namespace {
3635 const pass_data pass_data_sra =
3637 GIMPLE_PASS, /* type */
3638 "sra", /* name */
3639 OPTGROUP_NONE, /* optinfo_flags */
3640 TV_TREE_SRA, /* tv_id */
3641 ( PROP_cfg | PROP_ssa ), /* properties_required */
3642 0, /* properties_provided */
3643 0, /* properties_destroyed */
3644 TODO_update_address_taken, /* todo_flags_start */
3645 TODO_update_ssa, /* todo_flags_finish */
3648 class pass_sra : public gimple_opt_pass
3650 public:
3651 pass_sra (gcc::context *ctxt)
3652 : gimple_opt_pass (pass_data_sra, ctxt)
3655 /* opt_pass methods: */
3656 virtual bool gate (function *) { return gate_intra_sra (); }
3657 virtual unsigned int execute (function *) { return late_intra_sra (); }
3659 }; // class pass_sra
3661 } // anon namespace
3663 gimple_opt_pass *
3664 make_pass_sra (gcc::context *ctxt)
3666 return new pass_sra (ctxt);
3670 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3671 parameter. */
3673 static bool
3674 is_unused_scalar_param (tree parm)
3676 tree name;
3677 return (is_gimple_reg (parm)
3678 && (!(name = ssa_default_def (cfun, parm))
3679 || has_zero_uses (name)));
3682 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3683 examine whether there are any direct or otherwise infeasible ones. If so,
3684 return true, otherwise return false. PARM must be a gimple register with a
3685 non-NULL default definition. */
3687 static bool
3688 ptr_parm_has_direct_uses (tree parm)
3690 imm_use_iterator ui;
3691 gimple stmt;
3692 tree name = ssa_default_def (cfun, parm);
3693 bool ret = false;
3695 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3697 int uses_ok = 0;
3698 use_operand_p use_p;
3700 if (is_gimple_debug (stmt))
3701 continue;
3703 /* Valid uses include dereferences on the lhs and the rhs. */
3704 if (gimple_has_lhs (stmt))
3706 tree lhs = gimple_get_lhs (stmt);
3707 while (handled_component_p (lhs))
3708 lhs = TREE_OPERAND (lhs, 0);
3709 if (TREE_CODE (lhs) == MEM_REF
3710 && TREE_OPERAND (lhs, 0) == name
3711 && integer_zerop (TREE_OPERAND (lhs, 1))
3712 && types_compatible_p (TREE_TYPE (lhs),
3713 TREE_TYPE (TREE_TYPE (name)))
3714 && !TREE_THIS_VOLATILE (lhs))
3715 uses_ok++;
3717 if (gimple_assign_single_p (stmt))
3719 tree rhs = gimple_assign_rhs1 (stmt);
3720 while (handled_component_p (rhs))
3721 rhs = TREE_OPERAND (rhs, 0);
3722 if (TREE_CODE (rhs) == MEM_REF
3723 && TREE_OPERAND (rhs, 0) == name
3724 && integer_zerop (TREE_OPERAND (rhs, 1))
3725 && types_compatible_p (TREE_TYPE (rhs),
3726 TREE_TYPE (TREE_TYPE (name)))
3727 && !TREE_THIS_VOLATILE (rhs))
3728 uses_ok++;
3730 else if (is_gimple_call (stmt))
3732 unsigned i;
3733 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3735 tree arg = gimple_call_arg (stmt, i);
3736 while (handled_component_p (arg))
3737 arg = TREE_OPERAND (arg, 0);
3738 if (TREE_CODE (arg) == MEM_REF
3739 && TREE_OPERAND (arg, 0) == name
3740 && integer_zerop (TREE_OPERAND (arg, 1))
3741 && types_compatible_p (TREE_TYPE (arg),
3742 TREE_TYPE (TREE_TYPE (name)))
3743 && !TREE_THIS_VOLATILE (arg))
3744 uses_ok++;
3748 /* If the number of valid uses does not match the number of
3749 uses in this stmt there is an unhandled use. */
3750 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3751 --uses_ok;
3753 if (uses_ok != 0)
3754 ret = true;
3756 if (ret)
3757 BREAK_FROM_IMM_USE_STMT (ui);
3760 return ret;
3763 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3764 them in candidate_bitmap. Note that these do not necessarily include
3765 parameter which are unused and thus can be removed. Return true iff any
3766 such candidate has been found. */
3768 static bool
3769 find_param_candidates (void)
3771 tree parm;
3772 int count = 0;
3773 bool ret = false;
3774 const char *msg;
3776 for (parm = DECL_ARGUMENTS (current_function_decl);
3777 parm;
3778 parm = DECL_CHAIN (parm))
3780 tree type = TREE_TYPE (parm);
3781 tree_node **slot;
3783 count++;
3785 if (TREE_THIS_VOLATILE (parm)
3786 || TREE_ADDRESSABLE (parm)
3787 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3788 continue;
3790 if (is_unused_scalar_param (parm))
3792 ret = true;
3793 continue;
3796 if (POINTER_TYPE_P (type))
3798 type = TREE_TYPE (type);
3800 if (TREE_CODE (type) == FUNCTION_TYPE
3801 || TYPE_VOLATILE (type)
3802 || (TREE_CODE (type) == ARRAY_TYPE
3803 && TYPE_NONALIASED_COMPONENT (type))
3804 || !is_gimple_reg (parm)
3805 || is_va_list_type (type)
3806 || ptr_parm_has_direct_uses (parm))
3807 continue;
3809 else if (!AGGREGATE_TYPE_P (type))
3810 continue;
3812 if (!COMPLETE_TYPE_P (type)
3813 || !tree_fits_uhwi_p (TYPE_SIZE (type))
3814 || tree_to_uhwi (TYPE_SIZE (type)) == 0
3815 || (AGGREGATE_TYPE_P (type)
3816 && type_internals_preclude_sra_p (type, &msg)))
3817 continue;
3819 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3820 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
3821 *slot = parm;
3823 ret = true;
3824 if (dump_file && (dump_flags & TDF_DETAILS))
3826 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3827 print_generic_expr (dump_file, parm, 0);
3828 fprintf (dump_file, "\n");
3832 func_param_count = count;
3833 return ret;
3836 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3837 maybe_modified. */
3839 static bool
3840 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3841 void *data)
3843 struct access *repr = (struct access *) data;
3845 repr->grp_maybe_modified = 1;
3846 return true;
3849 /* Analyze what representatives (in linked lists accessible from
3850 REPRESENTATIVES) can be modified by side effects of statements in the
3851 current function. */
3853 static void
3854 analyze_modified_params (vec<access_p> representatives)
3856 int i;
3858 for (i = 0; i < func_param_count; i++)
3860 struct access *repr;
3862 for (repr = representatives[i];
3863 repr;
3864 repr = repr->next_grp)
3866 struct access *access;
3867 bitmap visited;
3868 ao_ref ar;
3870 if (no_accesses_p (repr))
3871 continue;
3872 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3873 || repr->grp_maybe_modified)
3874 continue;
3876 ao_ref_init (&ar, repr->expr);
3877 visited = BITMAP_ALLOC (NULL);
3878 for (access = repr; access; access = access->next_sibling)
3880 /* All accesses are read ones, otherwise grp_maybe_modified would
3881 be trivially set. */
3882 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3883 mark_maybe_modified, repr, &visited);
3884 if (repr->grp_maybe_modified)
3885 break;
3887 BITMAP_FREE (visited);
3892 /* Propagate distances in bb_dereferences in the opposite direction than the
3893 control flow edges, in each step storing the maximum of the current value
3894 and the minimum of all successors. These steps are repeated until the table
3895 stabilizes. Note that BBs which might terminate the functions (according to
3896 final_bbs bitmap) never updated in this way. */
3898 static void
3899 propagate_dereference_distances (void)
3901 basic_block bb;
3903 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
3904 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3905 FOR_EACH_BB_FN (bb, cfun)
3907 queue.quick_push (bb);
3908 bb->aux = bb;
3911 while (!queue.is_empty ())
3913 edge_iterator ei;
3914 edge e;
3915 bool change = false;
3916 int i;
3918 bb = queue.pop ();
3919 bb->aux = NULL;
3921 if (bitmap_bit_p (final_bbs, bb->index))
3922 continue;
3924 for (i = 0; i < func_param_count; i++)
3926 int idx = bb->index * func_param_count + i;
3927 bool first = true;
3928 HOST_WIDE_INT inh = 0;
3930 FOR_EACH_EDGE (e, ei, bb->succs)
3932 int succ_idx = e->dest->index * func_param_count + i;
3934 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
3935 continue;
3937 if (first)
3939 first = false;
3940 inh = bb_dereferences [succ_idx];
3942 else if (bb_dereferences [succ_idx] < inh)
3943 inh = bb_dereferences [succ_idx];
3946 if (!first && bb_dereferences[idx] < inh)
3948 bb_dereferences[idx] = inh;
3949 change = true;
3953 if (change && !bitmap_bit_p (final_bbs, bb->index))
3954 FOR_EACH_EDGE (e, ei, bb->preds)
3956 if (e->src->aux)
3957 continue;
3959 e->src->aux = e->src;
3960 queue.quick_push (e->src);
3965 /* Dump a dereferences TABLE with heading STR to file F. */
3967 static void
3968 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3970 basic_block bb;
3972 fprintf (dump_file, str);
3973 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
3974 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3976 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3977 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3979 int i;
3980 for (i = 0; i < func_param_count; i++)
3982 int idx = bb->index * func_param_count + i;
3983 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3986 fprintf (f, "\n");
3988 fprintf (dump_file, "\n");
3991 /* Determine what (parts of) parameters passed by reference that are not
3992 assigned to are not certainly dereferenced in this function and thus the
3993 dereferencing cannot be safely moved to the caller without potentially
3994 introducing a segfault. Mark such REPRESENTATIVES as
3995 grp_not_necessarilly_dereferenced.
3997 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3998 part is calculated rather than simple booleans are calculated for each
3999 pointer parameter to handle cases when only a fraction of the whole
4000 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4001 an example).
4003 The maximum dereference distances for each pointer parameter and BB are
4004 already stored in bb_dereference. This routine simply propagates these
4005 values upwards by propagate_dereference_distances and then compares the
4006 distances of individual parameters in the ENTRY BB to the equivalent
4007 distances of each representative of a (fraction of a) parameter. */
4009 static void
4010 analyze_caller_dereference_legality (vec<access_p> representatives)
4012 int i;
4014 if (dump_file && (dump_flags & TDF_DETAILS))
4015 dump_dereferences_table (dump_file,
4016 "Dereference table before propagation:\n",
4017 bb_dereferences);
4019 propagate_dereference_distances ();
4021 if (dump_file && (dump_flags & TDF_DETAILS))
4022 dump_dereferences_table (dump_file,
4023 "Dereference table after propagation:\n",
4024 bb_dereferences);
4026 for (i = 0; i < func_param_count; i++)
4028 struct access *repr = representatives[i];
4029 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4031 if (!repr || no_accesses_p (repr))
4032 continue;
4036 if ((repr->offset + repr->size) > bb_dereferences[idx])
4037 repr->grp_not_necessarilly_dereferenced = 1;
4038 repr = repr->next_grp;
4040 while (repr);
4044 /* Return the representative access for the parameter declaration PARM if it is
4045 a scalar passed by reference which is not written to and the pointer value
4046 is not used directly. Thus, if it is legal to dereference it in the caller
4047 and we can rule out modifications through aliases, such parameter should be
4048 turned into one passed by value. Return NULL otherwise. */
4050 static struct access *
4051 unmodified_by_ref_scalar_representative (tree parm)
4053 int i, access_count;
4054 struct access *repr;
4055 vec<access_p> *access_vec;
4057 access_vec = get_base_access_vector (parm);
4058 gcc_assert (access_vec);
4059 repr = (*access_vec)[0];
4060 if (repr->write)
4061 return NULL;
4062 repr->group_representative = repr;
4064 access_count = access_vec->length ();
4065 for (i = 1; i < access_count; i++)
4067 struct access *access = (*access_vec)[i];
4068 if (access->write)
4069 return NULL;
4070 access->group_representative = repr;
4071 access->next_sibling = repr->next_sibling;
4072 repr->next_sibling = access;
4075 repr->grp_read = 1;
4076 repr->grp_scalar_ptr = 1;
4077 return repr;
4080 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4081 associated with. REQ_ALIGN is the minimum required alignment. */
4083 static bool
4084 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4086 unsigned int exp_align;
4087 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4088 is incompatible assign in a call statement (and possibly even in asm
4089 statements). This can be relaxed by using a new temporary but only for
4090 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4091 intraprocedural SRA we deal with this by keeping the old aggregate around,
4092 something we cannot do in IPA-SRA.) */
4093 if (access->write
4094 && (is_gimple_call (access->stmt)
4095 || gimple_code (access->stmt) == GIMPLE_ASM))
4096 return true;
4098 exp_align = get_object_alignment (access->expr);
4099 if (exp_align < req_align)
4100 return true;
4102 return false;
4106 /* Sort collected accesses for parameter PARM, identify representatives for
4107 each accessed region and link them together. Return NULL if there are
4108 different but overlapping accesses, return the special ptr value meaning
4109 there are no accesses for this parameter if that is the case and return the
4110 first representative otherwise. Set *RO_GRP if there is a group of accesses
4111 with only read (i.e. no write) accesses. */
4113 static struct access *
4114 splice_param_accesses (tree parm, bool *ro_grp)
4116 int i, j, access_count, group_count;
4117 int agg_size, total_size = 0;
4118 struct access *access, *res, **prev_acc_ptr = &res;
4119 vec<access_p> *access_vec;
4121 access_vec = get_base_access_vector (parm);
4122 if (!access_vec)
4123 return &no_accesses_representant;
4124 access_count = access_vec->length ();
4126 access_vec->qsort (compare_access_positions);
4128 i = 0;
4129 total_size = 0;
4130 group_count = 0;
4131 while (i < access_count)
4133 bool modification;
4134 tree a1_alias_type;
4135 access = (*access_vec)[i];
4136 modification = access->write;
4137 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4138 return NULL;
4139 a1_alias_type = reference_alias_ptr_type (access->expr);
4141 /* Access is about to become group representative unless we find some
4142 nasty overlap which would preclude us from breaking this parameter
4143 apart. */
4145 j = i + 1;
4146 while (j < access_count)
4148 struct access *ac2 = (*access_vec)[j];
4149 if (ac2->offset != access->offset)
4151 /* All or nothing law for parameters. */
4152 if (access->offset + access->size > ac2->offset)
4153 return NULL;
4154 else
4155 break;
4157 else if (ac2->size != access->size)
4158 return NULL;
4160 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4161 || (ac2->type != access->type
4162 && (TREE_ADDRESSABLE (ac2->type)
4163 || TREE_ADDRESSABLE (access->type)))
4164 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4165 return NULL;
4167 modification |= ac2->write;
4168 ac2->group_representative = access;
4169 ac2->next_sibling = access->next_sibling;
4170 access->next_sibling = ac2;
4171 j++;
4174 group_count++;
4175 access->grp_maybe_modified = modification;
4176 if (!modification)
4177 *ro_grp = true;
4178 *prev_acc_ptr = access;
4179 prev_acc_ptr = &access->next_grp;
4180 total_size += access->size;
4181 i = j;
4184 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4185 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4186 else
4187 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4188 if (total_size >= agg_size)
4189 return NULL;
4191 gcc_assert (group_count > 0);
4192 return res;
4195 /* Decide whether parameters with representative accesses given by REPR should
4196 be reduced into components. */
4198 static int
4199 decide_one_param_reduction (struct access *repr)
4201 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4202 bool by_ref;
4203 tree parm;
4205 parm = repr->base;
4206 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4207 gcc_assert (cur_parm_size > 0);
4209 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4211 by_ref = true;
4212 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4214 else
4216 by_ref = false;
4217 agg_size = cur_parm_size;
4220 if (dump_file)
4222 struct access *acc;
4223 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4224 print_generic_expr (dump_file, parm, 0);
4225 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4226 for (acc = repr; acc; acc = acc->next_grp)
4227 dump_access (dump_file, acc, true);
4230 total_size = 0;
4231 new_param_count = 0;
4233 for (; repr; repr = repr->next_grp)
4235 gcc_assert (parm == repr->base);
4237 /* Taking the address of a non-addressable field is verboten. */
4238 if (by_ref && repr->non_addressable)
4239 return 0;
4241 /* Do not decompose a non-BLKmode param in a way that would
4242 create BLKmode params. Especially for by-reference passing
4243 (thus, pointer-type param) this is hardly worthwhile. */
4244 if (DECL_MODE (parm) != BLKmode
4245 && TYPE_MODE (repr->type) == BLKmode)
4246 return 0;
4248 if (!by_ref || (!repr->grp_maybe_modified
4249 && !repr->grp_not_necessarilly_dereferenced))
4250 total_size += repr->size;
4251 else
4252 total_size += cur_parm_size;
4254 new_param_count++;
4257 gcc_assert (new_param_count > 0);
4259 if (optimize_function_for_size_p (cfun))
4260 parm_size_limit = cur_parm_size;
4261 else
4262 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4263 * cur_parm_size);
4265 if (total_size < agg_size
4266 && total_size <= parm_size_limit)
4268 if (dump_file)
4269 fprintf (dump_file, " ....will be split into %i components\n",
4270 new_param_count);
4271 return new_param_count;
4273 else
4274 return 0;
4277 /* The order of the following enums is important, we need to do extra work for
4278 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4279 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4280 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4282 /* Identify representatives of all accesses to all candidate parameters for
4283 IPA-SRA. Return result based on what representatives have been found. */
4285 static enum ipa_splicing_result
4286 splice_all_param_accesses (vec<access_p> &representatives)
4288 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4289 tree parm;
4290 struct access *repr;
4292 representatives.create (func_param_count);
4294 for (parm = DECL_ARGUMENTS (current_function_decl);
4295 parm;
4296 parm = DECL_CHAIN (parm))
4298 if (is_unused_scalar_param (parm))
4300 representatives.quick_push (&no_accesses_representant);
4301 if (result == NO_GOOD_ACCESS)
4302 result = UNUSED_PARAMS;
4304 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4305 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4306 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4308 repr = unmodified_by_ref_scalar_representative (parm);
4309 representatives.quick_push (repr);
4310 if (repr)
4311 result = UNMODIF_BY_REF_ACCESSES;
4313 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4315 bool ro_grp = false;
4316 repr = splice_param_accesses (parm, &ro_grp);
4317 representatives.quick_push (repr);
4319 if (repr && !no_accesses_p (repr))
4321 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4323 if (ro_grp)
4324 result = UNMODIF_BY_REF_ACCESSES;
4325 else if (result < MODIF_BY_REF_ACCESSES)
4326 result = MODIF_BY_REF_ACCESSES;
4328 else if (result < BY_VAL_ACCESSES)
4329 result = BY_VAL_ACCESSES;
4331 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4332 result = UNUSED_PARAMS;
4334 else
4335 representatives.quick_push (NULL);
4338 if (result == NO_GOOD_ACCESS)
4340 representatives.release ();
4341 return NO_GOOD_ACCESS;
4344 return result;
4347 /* Return the index of BASE in PARMS. Abort if it is not found. */
4349 static inline int
4350 get_param_index (tree base, vec<tree> parms)
4352 int i, len;
4354 len = parms.length ();
4355 for (i = 0; i < len; i++)
4356 if (parms[i] == base)
4357 return i;
4358 gcc_unreachable ();
4361 /* Convert the decisions made at the representative level into compact
4362 parameter adjustments. REPRESENTATIVES are pointers to first
4363 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4364 final number of adjustments. */
4366 static ipa_parm_adjustment_vec
4367 turn_representatives_into_adjustments (vec<access_p> representatives,
4368 int adjustments_count)
4370 vec<tree> parms;
4371 ipa_parm_adjustment_vec adjustments;
4372 tree parm;
4373 int i;
4375 gcc_assert (adjustments_count > 0);
4376 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4377 adjustments.create (adjustments_count);
4378 parm = DECL_ARGUMENTS (current_function_decl);
4379 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4381 struct access *repr = representatives[i];
4383 if (!repr || no_accesses_p (repr))
4385 struct ipa_parm_adjustment adj;
4387 memset (&adj, 0, sizeof (adj));
4388 adj.base_index = get_param_index (parm, parms);
4389 adj.base = parm;
4390 if (!repr)
4391 adj.op = IPA_PARM_OP_COPY;
4392 else
4393 adj.op = IPA_PARM_OP_REMOVE;
4394 adj.arg_prefix = "ISRA";
4395 adjustments.quick_push (adj);
4397 else
4399 struct ipa_parm_adjustment adj;
4400 int index = get_param_index (parm, parms);
4402 for (; repr; repr = repr->next_grp)
4404 memset (&adj, 0, sizeof (adj));
4405 gcc_assert (repr->base == parm);
4406 adj.base_index = index;
4407 adj.base = repr->base;
4408 adj.type = repr->type;
4409 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4410 adj.offset = repr->offset;
4411 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4412 && (repr->grp_maybe_modified
4413 || repr->grp_not_necessarilly_dereferenced));
4414 adj.arg_prefix = "ISRA";
4415 adjustments.quick_push (adj);
4419 parms.release ();
4420 return adjustments;
4423 /* Analyze the collected accesses and produce a plan what to do with the
4424 parameters in the form of adjustments, NULL meaning nothing. */
4426 static ipa_parm_adjustment_vec
4427 analyze_all_param_acesses (void)
4429 enum ipa_splicing_result repr_state;
4430 bool proceed = false;
4431 int i, adjustments_count = 0;
4432 vec<access_p> representatives;
4433 ipa_parm_adjustment_vec adjustments;
4435 repr_state = splice_all_param_accesses (representatives);
4436 if (repr_state == NO_GOOD_ACCESS)
4437 return ipa_parm_adjustment_vec ();
4439 /* If there are any parameters passed by reference which are not modified
4440 directly, we need to check whether they can be modified indirectly. */
4441 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4443 analyze_caller_dereference_legality (representatives);
4444 analyze_modified_params (representatives);
4447 for (i = 0; i < func_param_count; i++)
4449 struct access *repr = representatives[i];
4451 if (repr && !no_accesses_p (repr))
4453 if (repr->grp_scalar_ptr)
4455 adjustments_count++;
4456 if (repr->grp_not_necessarilly_dereferenced
4457 || repr->grp_maybe_modified)
4458 representatives[i] = NULL;
4459 else
4461 proceed = true;
4462 sra_stats.scalar_by_ref_to_by_val++;
4465 else
4467 int new_components = decide_one_param_reduction (repr);
4469 if (new_components == 0)
4471 representatives[i] = NULL;
4472 adjustments_count++;
4474 else
4476 adjustments_count += new_components;
4477 sra_stats.aggregate_params_reduced++;
4478 sra_stats.param_reductions_created += new_components;
4479 proceed = true;
4483 else
4485 if (no_accesses_p (repr))
4487 proceed = true;
4488 sra_stats.deleted_unused_parameters++;
4490 adjustments_count++;
4494 if (!proceed && dump_file)
4495 fprintf (dump_file, "NOT proceeding to change params.\n");
4497 if (proceed)
4498 adjustments = turn_representatives_into_adjustments (representatives,
4499 adjustments_count);
4500 else
4501 adjustments = ipa_parm_adjustment_vec ();
4503 representatives.release ();
4504 return adjustments;
4507 /* If a parameter replacement identified by ADJ does not yet exist in the form
4508 of declaration, create it and record it, otherwise return the previously
4509 created one. */
4511 static tree
4512 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4514 tree repl;
4515 if (!adj->new_ssa_base)
4517 char *pretty_name = make_fancy_name (adj->base);
4519 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4520 DECL_NAME (repl) = get_identifier (pretty_name);
4521 obstack_free (&name_obstack, pretty_name);
4523 adj->new_ssa_base = repl;
4525 else
4526 repl = adj->new_ssa_base;
4527 return repl;
4530 /* Find the first adjustment for a particular parameter BASE in a vector of
4531 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4532 adjustment. */
4534 static struct ipa_parm_adjustment *
4535 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4537 int i, len;
4539 len = adjustments.length ();
4540 for (i = 0; i < len; i++)
4542 struct ipa_parm_adjustment *adj;
4544 adj = &adjustments[i];
4545 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4546 return adj;
4549 return NULL;
4552 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4553 removed because its value is not used, replace the SSA_NAME with a one
4554 relating to a created VAR_DECL together all of its uses and return true.
4555 ADJUSTMENTS is a pointer to an adjustments vector. */
4557 static bool
4558 replace_removed_params_ssa_names (gimple stmt,
4559 ipa_parm_adjustment_vec adjustments)
4561 struct ipa_parm_adjustment *adj;
4562 tree lhs, decl, repl, name;
4564 if (gimple_code (stmt) == GIMPLE_PHI)
4565 lhs = gimple_phi_result (stmt);
4566 else if (is_gimple_assign (stmt))
4567 lhs = gimple_assign_lhs (stmt);
4568 else if (is_gimple_call (stmt))
4569 lhs = gimple_call_lhs (stmt);
4570 else
4571 gcc_unreachable ();
4573 if (TREE_CODE (lhs) != SSA_NAME)
4574 return false;
4576 decl = SSA_NAME_VAR (lhs);
4577 if (decl == NULL_TREE
4578 || TREE_CODE (decl) != PARM_DECL)
4579 return false;
4581 adj = get_adjustment_for_base (adjustments, decl);
4582 if (!adj)
4583 return false;
4585 repl = get_replaced_param_substitute (adj);
4586 name = make_ssa_name (repl, stmt);
4588 if (dump_file)
4590 fprintf (dump_file, "replacing an SSA name of a removed param ");
4591 print_generic_expr (dump_file, lhs, 0);
4592 fprintf (dump_file, " with ");
4593 print_generic_expr (dump_file, name, 0);
4594 fprintf (dump_file, "\n");
4597 if (is_gimple_assign (stmt))
4598 gimple_assign_set_lhs (stmt, name);
4599 else if (is_gimple_call (stmt))
4600 gimple_call_set_lhs (stmt, name);
4601 else
4602 gimple_phi_set_result (as_a <gphi *> (stmt), name);
4604 replace_uses_by (lhs, name);
4605 release_ssa_name (lhs);
4606 return true;
4609 /* If the statement STMT contains any expressions that need to replaced with a
4610 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4611 incompatibilities (GSI is used to accommodate conversion statements and must
4612 point to the statement). Return true iff the statement was modified. */
4614 static bool
4615 sra_ipa_modify_assign (gimple stmt, gimple_stmt_iterator *gsi,
4616 ipa_parm_adjustment_vec adjustments)
4618 tree *lhs_p, *rhs_p;
4619 bool any;
4621 if (!gimple_assign_single_p (stmt))
4622 return false;
4624 rhs_p = gimple_assign_rhs1_ptr (stmt);
4625 lhs_p = gimple_assign_lhs_ptr (stmt);
4627 any = ipa_modify_expr (rhs_p, false, adjustments);
4628 any |= ipa_modify_expr (lhs_p, false, adjustments);
4629 if (any)
4631 tree new_rhs = NULL_TREE;
4633 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4635 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4637 /* V_C_Es of constructors can cause trouble (PR 42714). */
4638 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4639 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4640 else
4641 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4642 NULL);
4644 else
4645 new_rhs = fold_build1_loc (gimple_location (stmt),
4646 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4647 *rhs_p);
4649 else if (REFERENCE_CLASS_P (*rhs_p)
4650 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4651 && !is_gimple_reg (*lhs_p))
4652 /* This can happen when an assignment in between two single field
4653 structures is turned into an assignment in between two pointers to
4654 scalars (PR 42237). */
4655 new_rhs = *rhs_p;
4657 if (new_rhs)
4659 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4660 true, GSI_SAME_STMT);
4662 gimple_assign_set_rhs_from_tree (gsi, tmp);
4665 return true;
4668 return false;
4671 /* Traverse the function body and all modifications as described in
4672 ADJUSTMENTS. Return true iff the CFG has been changed. */
4674 bool
4675 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4677 bool cfg_changed = false;
4678 basic_block bb;
4680 FOR_EACH_BB_FN (bb, cfun)
4682 gimple_stmt_iterator gsi;
4684 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4685 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4687 gsi = gsi_start_bb (bb);
4688 while (!gsi_end_p (gsi))
4690 gimple stmt = gsi_stmt (gsi);
4691 bool modified = false;
4692 tree *t;
4693 unsigned i;
4695 switch (gimple_code (stmt))
4697 case GIMPLE_RETURN:
4698 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4699 if (*t != NULL_TREE)
4700 modified |= ipa_modify_expr (t, true, adjustments);
4701 break;
4703 case GIMPLE_ASSIGN:
4704 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4705 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4706 break;
4708 case GIMPLE_CALL:
4709 /* Operands must be processed before the lhs. */
4710 for (i = 0; i < gimple_call_num_args (stmt); i++)
4712 t = gimple_call_arg_ptr (stmt, i);
4713 modified |= ipa_modify_expr (t, true, adjustments);
4716 if (gimple_call_lhs (stmt))
4718 t = gimple_call_lhs_ptr (stmt);
4719 modified |= ipa_modify_expr (t, false, adjustments);
4720 modified |= replace_removed_params_ssa_names (stmt,
4721 adjustments);
4723 break;
4725 case GIMPLE_ASM:
4727 gasm *asm_stmt = as_a <gasm *> (stmt);
4728 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4730 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4731 modified |= ipa_modify_expr (t, true, adjustments);
4733 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4735 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4736 modified |= ipa_modify_expr (t, false, adjustments);
4739 break;
4741 default:
4742 break;
4745 if (modified)
4747 update_stmt (stmt);
4748 if (maybe_clean_eh_stmt (stmt)
4749 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4750 cfg_changed = true;
4752 gsi_next (&gsi);
4756 return cfg_changed;
4759 /* Call gimple_debug_bind_reset_value on all debug statements describing
4760 gimple register parameters that are being removed or replaced. */
4762 static void
4763 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4765 int i, len;
4766 gimple_stmt_iterator *gsip = NULL, gsi;
4768 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4770 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4771 gsip = &gsi;
4773 len = adjustments.length ();
4774 for (i = 0; i < len; i++)
4776 struct ipa_parm_adjustment *adj;
4777 imm_use_iterator ui;
4778 gimple stmt;
4779 gdebug *def_temp;
4780 tree name, vexpr, copy = NULL_TREE;
4781 use_operand_p use_p;
4783 adj = &adjustments[i];
4784 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4785 continue;
4786 name = ssa_default_def (cfun, adj->base);
4787 vexpr = NULL;
4788 if (name)
4789 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4791 if (gimple_clobber_p (stmt))
4793 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4794 unlink_stmt_vdef (stmt);
4795 gsi_remove (&cgsi, true);
4796 release_defs (stmt);
4797 continue;
4799 /* All other users must have been removed by
4800 ipa_sra_modify_function_body. */
4801 gcc_assert (is_gimple_debug (stmt));
4802 if (vexpr == NULL && gsip != NULL)
4804 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4805 vexpr = make_node (DEBUG_EXPR_DECL);
4806 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4807 NULL);
4808 DECL_ARTIFICIAL (vexpr) = 1;
4809 TREE_TYPE (vexpr) = TREE_TYPE (name);
4810 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4811 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4813 if (vexpr)
4815 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4816 SET_USE (use_p, vexpr);
4818 else
4819 gimple_debug_bind_reset_value (stmt);
4820 update_stmt (stmt);
4822 /* Create a VAR_DECL for debug info purposes. */
4823 if (!DECL_IGNORED_P (adj->base))
4825 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4826 VAR_DECL, DECL_NAME (adj->base),
4827 TREE_TYPE (adj->base));
4828 if (DECL_PT_UID_SET_P (adj->base))
4829 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4830 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4831 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4832 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4833 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4834 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4835 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4836 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4837 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4838 SET_DECL_RTL (copy, 0);
4839 TREE_USED (copy) = 1;
4840 DECL_CONTEXT (copy) = current_function_decl;
4841 add_local_decl (cfun, copy);
4842 DECL_CHAIN (copy) =
4843 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4844 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4846 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4848 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4849 if (vexpr)
4850 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4851 else
4852 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4853 NULL);
4854 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4859 /* Return false if all callers have at least as many actual arguments as there
4860 are formal parameters in the current function and that their types
4861 match. */
4863 static bool
4864 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
4865 void *data ATTRIBUTE_UNUSED)
4867 struct cgraph_edge *cs;
4868 for (cs = node->callers; cs; cs = cs->next_caller)
4869 if (!callsite_arguments_match_p (cs->call_stmt))
4870 return true;
4872 return false;
4875 /* Convert all callers of NODE. */
4877 static bool
4878 convert_callers_for_node (struct cgraph_node *node,
4879 void *data)
4881 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4882 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4883 struct cgraph_edge *cs;
4885 for (cs = node->callers; cs; cs = cs->next_caller)
4887 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4889 if (dump_file)
4890 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
4891 xstrdup (cs->caller->name ()),
4892 cs->caller->order,
4893 xstrdup (cs->callee->name ()),
4894 cs->callee->order);
4896 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4898 pop_cfun ();
4901 for (cs = node->callers; cs; cs = cs->next_caller)
4902 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4903 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4904 compute_inline_parameters (cs->caller, true);
4905 BITMAP_FREE (recomputed_callers);
4907 return true;
4910 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4912 static void
4913 convert_callers (struct cgraph_node *node, tree old_decl,
4914 ipa_parm_adjustment_vec adjustments)
4916 basic_block this_block;
4918 node->call_for_symbol_thunks_and_aliases (convert_callers_for_node,
4919 &adjustments, false);
4921 if (!encountered_recursive_call)
4922 return;
4924 FOR_EACH_BB_FN (this_block, cfun)
4926 gimple_stmt_iterator gsi;
4928 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4930 gcall *stmt;
4931 tree call_fndecl;
4932 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
4933 if (!stmt)
4934 continue;
4935 call_fndecl = gimple_call_fndecl (stmt);
4936 if (call_fndecl == old_decl)
4938 if (dump_file)
4939 fprintf (dump_file, "Adjusting recursive call");
4940 gimple_call_set_fndecl (stmt, node->decl);
4941 ipa_modify_call_arguments (NULL, stmt, adjustments);
4946 return;
4949 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4950 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4952 static bool
4953 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4955 struct cgraph_node *new_node;
4956 bool cfg_changed;
4958 cgraph_edge::rebuild_edges ();
4959 free_dominance_info (CDI_DOMINATORS);
4960 pop_cfun ();
4962 /* This must be done after rebuilding cgraph edges for node above.
4963 Otherwise any recursive calls to node that are recorded in
4964 redirect_callers will be corrupted. */
4965 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
4966 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
4967 NULL, false, NULL, NULL,
4968 "isra");
4969 redirect_callers.release ();
4971 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4972 ipa_modify_formal_parameters (current_function_decl, adjustments);
4973 cfg_changed = ipa_sra_modify_function_body (adjustments);
4974 sra_ipa_reset_debug_stmts (adjustments);
4975 convert_callers (new_node, node->decl, adjustments);
4976 new_node->make_local ();
4977 return cfg_changed;
4980 /* If NODE has a caller, return true. */
4982 static bool
4983 has_caller_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4985 if (node->callers)
4986 return true;
4987 return false;
4990 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4991 attributes, return true otherwise. NODE is the cgraph node of the current
4992 function. */
4994 static bool
4995 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4997 if (!node->can_be_local_p ())
4999 if (dump_file)
5000 fprintf (dump_file, "Function not local to this compilation unit.\n");
5001 return false;
5004 if (!node->local.can_change_signature)
5006 if (dump_file)
5007 fprintf (dump_file, "Function can not change signature.\n");
5008 return false;
5011 if (!tree_versionable_function_p (node->decl))
5013 if (dump_file)
5014 fprintf (dump_file, "Function is not versionable.\n");
5015 return false;
5018 if (!opt_for_fn (node->decl, optimize)
5019 || !opt_for_fn (node->decl, flag_ipa_sra))
5021 if (dump_file)
5022 fprintf (dump_file, "Function not optimized.\n");
5023 return false;
5026 if (DECL_VIRTUAL_P (current_function_decl))
5028 if (dump_file)
5029 fprintf (dump_file, "Function is a virtual method.\n");
5030 return false;
5033 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
5034 && inline_summary (node)->size >= MAX_INLINE_INSNS_AUTO)
5036 if (dump_file)
5037 fprintf (dump_file, "Function too big to be made truly local.\n");
5038 return false;
5041 if (!node->call_for_symbol_thunks_and_aliases (has_caller_p, NULL, true))
5043 if (dump_file)
5044 fprintf (dump_file,
5045 "Function has no callers in this compilation unit.\n");
5046 return false;
5049 if (cfun->stdarg)
5051 if (dump_file)
5052 fprintf (dump_file, "Function uses stdarg. \n");
5053 return false;
5056 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5057 return false;
5059 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5061 if (dump_file)
5062 fprintf (dump_file, "Always inline function will be inlined "
5063 "anyway. \n");
5064 return false;
5067 return true;
5070 /* Perform early interprocedural SRA. */
5072 static unsigned int
5073 ipa_early_sra (void)
5075 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5076 ipa_parm_adjustment_vec adjustments;
5077 int ret = 0;
5079 if (!ipa_sra_preliminary_function_checks (node))
5080 return 0;
5082 sra_initialize ();
5083 sra_mode = SRA_MODE_EARLY_IPA;
5085 if (!find_param_candidates ())
5087 if (dump_file)
5088 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5089 goto simple_out;
5092 if (node->call_for_symbol_thunks_and_aliases
5093 (some_callers_have_mismatched_arguments_p, NULL, true))
5095 if (dump_file)
5096 fprintf (dump_file, "There are callers with insufficient number of "
5097 "arguments or arguments with type mismatches.\n");
5098 goto simple_out;
5101 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5102 func_param_count
5103 * last_basic_block_for_fn (cfun));
5104 final_bbs = BITMAP_ALLOC (NULL);
5106 scan_function ();
5107 if (encountered_apply_args)
5109 if (dump_file)
5110 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5111 goto out;
5114 if (encountered_unchangable_recursive_call)
5116 if (dump_file)
5117 fprintf (dump_file, "Function calls itself with insufficient "
5118 "number of arguments.\n");
5119 goto out;
5122 adjustments = analyze_all_param_acesses ();
5123 if (!adjustments.exists ())
5124 goto out;
5125 if (dump_file)
5126 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5128 if (modify_function (node, adjustments))
5129 ret = TODO_update_ssa | TODO_cleanup_cfg;
5130 else
5131 ret = TODO_update_ssa;
5132 adjustments.release ();
5134 statistics_counter_event (cfun, "Unused parameters deleted",
5135 sra_stats.deleted_unused_parameters);
5136 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5137 sra_stats.scalar_by_ref_to_by_val);
5138 statistics_counter_event (cfun, "Aggregate parameters broken up",
5139 sra_stats.aggregate_params_reduced);
5140 statistics_counter_event (cfun, "Aggregate parameter components created",
5141 sra_stats.param_reductions_created);
5143 out:
5144 BITMAP_FREE (final_bbs);
5145 free (bb_dereferences);
5146 simple_out:
5147 sra_deinitialize ();
5148 return ret;
5151 namespace {
5153 const pass_data pass_data_early_ipa_sra =
5155 GIMPLE_PASS, /* type */
5156 "eipa_sra", /* name */
5157 OPTGROUP_NONE, /* optinfo_flags */
5158 TV_IPA_SRA, /* tv_id */
5159 0, /* properties_required */
5160 0, /* properties_provided */
5161 0, /* properties_destroyed */
5162 0, /* todo_flags_start */
5163 TODO_dump_symtab, /* todo_flags_finish */
5166 class pass_early_ipa_sra : public gimple_opt_pass
5168 public:
5169 pass_early_ipa_sra (gcc::context *ctxt)
5170 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5173 /* opt_pass methods: */
5174 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5175 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5177 }; // class pass_early_ipa_sra
5179 } // anon namespace
5181 gimple_opt_pass *
5182 make_pass_early_ipa_sra (gcc::context *ctxt)
5184 return new pass_early_ipa_sra (ctxt);