* config/i386/i386.c (ix86_adjust_stack_and_probe_stack_clash): New.
[official-gcc.git] / gcc / tree-sra.c
blob163b7a2d03b27a6527c735019b9812e58419b982
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2017 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-prop.h"
101 #include "params.h"
102 #include "dbgcnt.h"
103 #include "tree-inline.h"
104 #include "ipa-fnsummary.h"
105 #include "ipa-utils.h"
106 #include "builtins.h"
108 /* Enumeration of all aggregate reductions we can do. */
109 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
110 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
111 SRA_MODE_INTRA }; /* late intraprocedural SRA */
113 /* Global variable describing which aggregate reduction we are performing at
114 the moment. */
115 static enum sra_mode sra_mode;
117 struct assign_link;
119 /* ACCESS represents each access to an aggregate variable (as a whole or a
120 part). It can also represent a group of accesses that refer to exactly the
121 same fragment of an aggregate (i.e. those that have exactly the same offset
122 and size). Such representatives for a single aggregate, once determined,
123 are linked in a linked list and have the group fields set.
125 Moreover, when doing intraprocedural SRA, a tree is built from those
126 representatives (by the means of first_child and next_sibling pointers), in
127 which all items in a subtree are "within" the root, i.e. their offset is
128 greater or equal to offset of the root and offset+size is smaller or equal
129 to offset+size of the root. Children of an access are sorted by offset.
131 Note that accesses to parts of vector and complex number types always
132 represented by an access to the whole complex number or a vector. It is a
133 duty of the modifying functions to replace them appropriately. */
135 struct access
137 /* Values returned by `get_ref_base_and_extent' for each component reference
138 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
139 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
140 HOST_WIDE_INT offset;
141 HOST_WIDE_INT size;
142 tree base;
144 /* Expression. It is context dependent so do not use it to create new
145 expressions to access the original aggregate. See PR 42154 for a
146 testcase. */
147 tree expr;
148 /* Type. */
149 tree type;
151 /* The statement this access belongs to. */
152 gimple *stmt;
154 /* Next group representative for this aggregate. */
155 struct access *next_grp;
157 /* Pointer to the group representative. Pointer to itself if the struct is
158 the representative. */
159 struct access *group_representative;
161 /* After access tree has been constructed, this points to the parent of the
162 current access, if there is one. NULL for roots. */
163 struct access *parent;
165 /* If this access has any children (in terms of the definition above), this
166 points to the first one. */
167 struct access *first_child;
169 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
170 described above. In IPA-SRA this is a pointer to the next access
171 belonging to the same group (having the same representative). */
172 struct access *next_sibling;
174 /* Pointers to the first and last element in the linked list of assign
175 links. */
176 struct assign_link *first_link, *last_link;
178 /* Pointer to the next access in the work queue. */
179 struct access *next_queued;
181 /* Replacement variable for this access "region." Never to be accessed
182 directly, always only by the means of get_access_replacement() and only
183 when grp_to_be_replaced flag is set. */
184 tree replacement_decl;
186 /* Is this access an access to a non-addressable field? */
187 unsigned non_addressable : 1;
189 /* Is this access made in reverse storage order? */
190 unsigned reverse : 1;
192 /* Is this particular access write access? */
193 unsigned write : 1;
195 /* Is this access currently in the work queue? */
196 unsigned grp_queued : 1;
198 /* Does this group contain a write access? This flag is propagated down the
199 access tree. */
200 unsigned grp_write : 1;
202 /* Does this group contain a read access? This flag is propagated down the
203 access tree. */
204 unsigned grp_read : 1;
206 /* Does this group contain a read access that comes from an assignment
207 statement? This flag is propagated down the access tree. */
208 unsigned grp_assignment_read : 1;
210 /* Does this group contain a write access that comes from an assignment
211 statement? This flag is propagated down the access tree. */
212 unsigned grp_assignment_write : 1;
214 /* Does this group contain a read access through a scalar type? This flag is
215 not propagated in the access tree in any direction. */
216 unsigned grp_scalar_read : 1;
218 /* Does this group contain a write access through a scalar type? This flag
219 is not propagated in the access tree in any direction. */
220 unsigned grp_scalar_write : 1;
222 /* Is this access an artificial one created to scalarize some record
223 entirely? */
224 unsigned grp_total_scalarization : 1;
226 /* Other passes of the analysis use this bit to make function
227 analyze_access_subtree create scalar replacements for this group if
228 possible. */
229 unsigned grp_hint : 1;
231 /* Is the subtree rooted in this access fully covered by scalar
232 replacements? */
233 unsigned grp_covered : 1;
235 /* If set to true, this access and all below it in an access tree must not be
236 scalarized. */
237 unsigned grp_unscalarizable_region : 1;
239 /* Whether data have been written to parts of the aggregate covered by this
240 access which is not to be scalarized. This flag is propagated up in the
241 access tree. */
242 unsigned grp_unscalarized_data : 1;
244 /* Does this access and/or group contain a write access through a
245 BIT_FIELD_REF? */
246 unsigned grp_partial_lhs : 1;
248 /* Set when a scalar replacement should be created for this variable. */
249 unsigned grp_to_be_replaced : 1;
251 /* Set when we want a replacement for the sole purpose of having it in
252 generated debug statements. */
253 unsigned grp_to_be_debug_replaced : 1;
255 /* Should TREE_NO_WARNING of a replacement be set? */
256 unsigned grp_no_warning : 1;
258 /* Is it possible that the group refers to data which might be (directly or
259 otherwise) modified? */
260 unsigned grp_maybe_modified : 1;
262 /* Set when this is a representative of a pointer to scalar (i.e. by
263 reference) parameter which we consider for turning into a plain scalar
264 (i.e. a by value parameter). */
265 unsigned grp_scalar_ptr : 1;
267 /* Set when we discover that this pointer is not safe to dereference in the
268 caller. */
269 unsigned grp_not_necessarilly_dereferenced : 1;
272 typedef struct access *access_p;
275 /* Alloc pool for allocating access structures. */
276 static object_allocator<struct access> access_pool ("SRA accesses");
278 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
279 are used to propagate subaccesses from rhs to lhs as long as they don't
280 conflict with what is already there. */
281 struct assign_link
283 struct access *lacc, *racc;
284 struct assign_link *next;
287 /* Alloc pool for allocating assign link structures. */
288 static object_allocator<assign_link> assign_link_pool ("SRA links");
290 /* Base (tree) -> Vector (vec<access_p> *) map. */
291 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
293 /* Candidate hash table helpers. */
295 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
297 static inline hashval_t hash (const tree_node *);
298 static inline bool equal (const tree_node *, const tree_node *);
301 /* Hash a tree in a uid_decl_map. */
303 inline hashval_t
304 uid_decl_hasher::hash (const tree_node *item)
306 return item->decl_minimal.uid;
309 /* Return true if the DECL_UID in both trees are equal. */
311 inline bool
312 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
314 return (a->decl_minimal.uid == b->decl_minimal.uid);
317 /* Set of candidates. */
318 static bitmap candidate_bitmap;
319 static hash_table<uid_decl_hasher> *candidates;
321 /* For a candidate UID return the candidates decl. */
323 static inline tree
324 candidate (unsigned uid)
326 tree_node t;
327 t.decl_minimal.uid = uid;
328 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
331 /* Bitmap of candidates which we should try to entirely scalarize away and
332 those which cannot be (because they are and need be used as a whole). */
333 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
335 /* Bitmap of candidates in the constant pool, which cannot be scalarized
336 because this would produce non-constant expressions (e.g. Ada). */
337 static bitmap disqualified_constants;
339 /* Obstack for creation of fancy names. */
340 static struct obstack name_obstack;
342 /* Head of a linked list of accesses that need to have its subaccesses
343 propagated to their assignment counterparts. */
344 static struct access *work_queue_head;
346 /* Number of parameters of the analyzed function when doing early ipa SRA. */
347 static int func_param_count;
349 /* scan_function sets the following to true if it encounters a call to
350 __builtin_apply_args. */
351 static bool encountered_apply_args;
353 /* Set by scan_function when it finds a recursive call. */
354 static bool encountered_recursive_call;
356 /* Set by scan_function when it finds a recursive call with less actual
357 arguments than formal parameters.. */
358 static bool encountered_unchangable_recursive_call;
360 /* This is a table in which for each basic block and parameter there is a
361 distance (offset + size) in that parameter which is dereferenced and
362 accessed in that BB. */
363 static HOST_WIDE_INT *bb_dereferences;
364 /* Bitmap of BBs that can cause the function to "stop" progressing by
365 returning, throwing externally, looping infinitely or calling a function
366 which might abort etc.. */
367 static bitmap final_bbs;
369 /* Representative of no accesses at all. */
370 static struct access no_accesses_representant;
372 /* Predicate to test the special value. */
374 static inline bool
375 no_accesses_p (struct access *access)
377 return access == &no_accesses_representant;
380 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
381 representative fields are dumped, otherwise those which only describe the
382 individual access are. */
384 static struct
386 /* Number of processed aggregates is readily available in
387 analyze_all_variable_accesses and so is not stored here. */
389 /* Number of created scalar replacements. */
390 int replacements;
392 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
393 expression. */
394 int exprs;
396 /* Number of statements created by generate_subtree_copies. */
397 int subtree_copies;
399 /* Number of statements created by load_assign_lhs_subreplacements. */
400 int subreplacements;
402 /* Number of times sra_modify_assign has deleted a statement. */
403 int deleted;
405 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
406 RHS reparately due to type conversions or nonexistent matching
407 references. */
408 int separate_lhs_rhs_handling;
410 /* Number of parameters that were removed because they were unused. */
411 int deleted_unused_parameters;
413 /* Number of scalars passed as parameters by reference that have been
414 converted to be passed by value. */
415 int scalar_by_ref_to_by_val;
417 /* Number of aggregate parameters that were replaced by one or more of their
418 components. */
419 int aggregate_params_reduced;
421 /* Numbber of components created when splitting aggregate parameters. */
422 int param_reductions_created;
423 } sra_stats;
425 static void
426 dump_access (FILE *f, struct access *access, bool grp)
428 fprintf (f, "access { ");
429 fprintf (f, "base = (%d)'", DECL_UID (access->base));
430 print_generic_expr (f, access->base);
431 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
432 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
433 fprintf (f, ", expr = ");
434 print_generic_expr (f, access->expr);
435 fprintf (f, ", type = ");
436 print_generic_expr (f, access->type);
437 fprintf (f, ", non_addressable = %d, reverse = %d",
438 access->non_addressable, access->reverse);
439 if (grp)
440 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
441 "grp_assignment_write = %d, grp_scalar_read = %d, "
442 "grp_scalar_write = %d, grp_total_scalarization = %d, "
443 "grp_hint = %d, grp_covered = %d, "
444 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
445 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
446 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
447 "grp_not_necessarilly_dereferenced = %d\n",
448 access->grp_read, access->grp_write, access->grp_assignment_read,
449 access->grp_assignment_write, access->grp_scalar_read,
450 access->grp_scalar_write, access->grp_total_scalarization,
451 access->grp_hint, access->grp_covered,
452 access->grp_unscalarizable_region, access->grp_unscalarized_data,
453 access->grp_partial_lhs, access->grp_to_be_replaced,
454 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
455 access->grp_not_necessarilly_dereferenced);
456 else
457 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
458 "grp_partial_lhs = %d\n",
459 access->write, access->grp_total_scalarization,
460 access->grp_partial_lhs);
463 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
465 static void
466 dump_access_tree_1 (FILE *f, struct access *access, int level)
470 int i;
472 for (i = 0; i < level; i++)
473 fputs ("* ", f);
475 dump_access (f, access, true);
477 if (access->first_child)
478 dump_access_tree_1 (f, access->first_child, level + 1);
480 access = access->next_sibling;
482 while (access);
485 /* Dump all access trees for a variable, given the pointer to the first root in
486 ACCESS. */
488 static void
489 dump_access_tree (FILE *f, struct access *access)
491 for (; access; access = access->next_grp)
492 dump_access_tree_1 (f, access, 0);
495 /* Return true iff ACC is non-NULL and has subaccesses. */
497 static inline bool
498 access_has_children_p (struct access *acc)
500 return acc && acc->first_child;
503 /* Return true iff ACC is (partly) covered by at least one replacement. */
505 static bool
506 access_has_replacements_p (struct access *acc)
508 struct access *child;
509 if (acc->grp_to_be_replaced)
510 return true;
511 for (child = acc->first_child; child; child = child->next_sibling)
512 if (access_has_replacements_p (child))
513 return true;
514 return false;
517 /* Return a vector of pointers to accesses for the variable given in BASE or
518 NULL if there is none. */
520 static vec<access_p> *
521 get_base_access_vector (tree base)
523 return base_access_vec->get (base);
526 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
527 in ACCESS. Return NULL if it cannot be found. */
529 static struct access *
530 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
531 HOST_WIDE_INT size)
533 while (access && (access->offset != offset || access->size != size))
535 struct access *child = access->first_child;
537 while (child && (child->offset + child->size <= offset))
538 child = child->next_sibling;
539 access = child;
542 return access;
545 /* Return the first group representative for DECL or NULL if none exists. */
547 static struct access *
548 get_first_repr_for_decl (tree base)
550 vec<access_p> *access_vec;
552 access_vec = get_base_access_vector (base);
553 if (!access_vec)
554 return NULL;
556 return (*access_vec)[0];
559 /* Find an access representative for the variable BASE and given OFFSET and
560 SIZE. Requires that access trees have already been built. Return NULL if
561 it cannot be found. */
563 static struct access *
564 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
565 HOST_WIDE_INT size)
567 struct access *access;
569 access = get_first_repr_for_decl (base);
570 while (access && (access->offset + access->size <= offset))
571 access = access->next_grp;
572 if (!access)
573 return NULL;
575 return find_access_in_subtree (access, offset, size);
578 /* Add LINK to the linked list of assign links of RACC. */
579 static void
580 add_link_to_rhs (struct access *racc, struct assign_link *link)
582 gcc_assert (link->racc == racc);
584 if (!racc->first_link)
586 gcc_assert (!racc->last_link);
587 racc->first_link = link;
589 else
590 racc->last_link->next = link;
592 racc->last_link = link;
593 link->next = NULL;
596 /* Move all link structures in their linked list in OLD_RACC to the linked list
597 in NEW_RACC. */
598 static void
599 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
601 if (!old_racc->first_link)
603 gcc_assert (!old_racc->last_link);
604 return;
607 if (new_racc->first_link)
609 gcc_assert (!new_racc->last_link->next);
610 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
612 new_racc->last_link->next = old_racc->first_link;
613 new_racc->last_link = old_racc->last_link;
615 else
617 gcc_assert (!new_racc->last_link);
619 new_racc->first_link = old_racc->first_link;
620 new_racc->last_link = old_racc->last_link;
622 old_racc->first_link = old_racc->last_link = NULL;
625 /* Add ACCESS to the work queue (which is actually a stack). */
627 static void
628 add_access_to_work_queue (struct access *access)
630 if (access->first_link && !access->grp_queued)
632 gcc_assert (!access->next_queued);
633 access->next_queued = work_queue_head;
634 access->grp_queued = 1;
635 work_queue_head = access;
639 /* Pop an access from the work queue, and return it, assuming there is one. */
641 static struct access *
642 pop_access_from_work_queue (void)
644 struct access *access = work_queue_head;
646 work_queue_head = access->next_queued;
647 access->next_queued = NULL;
648 access->grp_queued = 0;
649 return access;
653 /* Allocate necessary structures. */
655 static void
656 sra_initialize (void)
658 candidate_bitmap = BITMAP_ALLOC (NULL);
659 candidates = new hash_table<uid_decl_hasher>
660 (vec_safe_length (cfun->local_decls) / 2);
661 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
662 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
663 disqualified_constants = BITMAP_ALLOC (NULL);
664 gcc_obstack_init (&name_obstack);
665 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
666 memset (&sra_stats, 0, sizeof (sra_stats));
667 encountered_apply_args = false;
668 encountered_recursive_call = false;
669 encountered_unchangable_recursive_call = false;
672 /* Deallocate all general structures. */
674 static void
675 sra_deinitialize (void)
677 BITMAP_FREE (candidate_bitmap);
678 delete candidates;
679 candidates = NULL;
680 BITMAP_FREE (should_scalarize_away_bitmap);
681 BITMAP_FREE (cannot_scalarize_away_bitmap);
682 BITMAP_FREE (disqualified_constants);
683 access_pool.release ();
684 assign_link_pool.release ();
685 obstack_free (&name_obstack, NULL);
687 delete base_access_vec;
690 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
692 static bool constant_decl_p (tree decl)
694 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
697 /* Remove DECL from candidates for SRA and write REASON to the dump file if
698 there is one. */
700 static void
701 disqualify_candidate (tree decl, const char *reason)
703 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
704 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
705 if (constant_decl_p (decl))
706 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
708 if (dump_file && (dump_flags & TDF_DETAILS))
710 fprintf (dump_file, "! Disqualifying ");
711 print_generic_expr (dump_file, decl);
712 fprintf (dump_file, " - %s\n", reason);
716 /* Return true iff the type contains a field or an element which does not allow
717 scalarization. */
719 static bool
720 type_internals_preclude_sra_p (tree type, const char **msg)
722 tree fld;
723 tree et;
725 switch (TREE_CODE (type))
727 case RECORD_TYPE:
728 case UNION_TYPE:
729 case QUAL_UNION_TYPE:
730 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
731 if (TREE_CODE (fld) == FIELD_DECL)
733 tree ft = TREE_TYPE (fld);
735 if (TREE_THIS_VOLATILE (fld))
737 *msg = "volatile structure field";
738 return true;
740 if (!DECL_FIELD_OFFSET (fld))
742 *msg = "no structure field offset";
743 return true;
745 if (!DECL_SIZE (fld))
747 *msg = "zero structure field size";
748 return true;
750 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
752 *msg = "structure field offset not fixed";
753 return true;
755 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
757 *msg = "structure field size not fixed";
758 return true;
760 if (!tree_fits_shwi_p (bit_position (fld)))
762 *msg = "structure field size too big";
763 return true;
765 if (AGGREGATE_TYPE_P (ft)
766 && int_bit_position (fld) % BITS_PER_UNIT != 0)
768 *msg = "structure field is bit field";
769 return true;
772 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
773 return true;
776 return false;
778 case ARRAY_TYPE:
779 et = TREE_TYPE (type);
781 if (TYPE_VOLATILE (et))
783 *msg = "element type is volatile";
784 return true;
787 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
788 return true;
790 return false;
792 default:
793 return false;
797 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
798 base variable if it is. Return T if it is not an SSA_NAME. */
800 static tree
801 get_ssa_base_param (tree t)
803 if (TREE_CODE (t) == SSA_NAME)
805 if (SSA_NAME_IS_DEFAULT_DEF (t))
806 return SSA_NAME_VAR (t);
807 else
808 return NULL_TREE;
810 return t;
813 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
814 belongs to, unless the BB has already been marked as a potentially
815 final. */
817 static void
818 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
820 basic_block bb = gimple_bb (stmt);
821 int idx, parm_index = 0;
822 tree parm;
824 if (bitmap_bit_p (final_bbs, bb->index))
825 return;
827 for (parm = DECL_ARGUMENTS (current_function_decl);
828 parm && parm != base;
829 parm = DECL_CHAIN (parm))
830 parm_index++;
832 gcc_assert (parm_index < func_param_count);
834 idx = bb->index * func_param_count + parm_index;
835 if (bb_dereferences[idx] < dist)
836 bb_dereferences[idx] = dist;
839 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
840 the three fields. Also add it to the vector of accesses corresponding to
841 the base. Finally, return the new access. */
843 static struct access *
844 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
846 struct access *access = access_pool.allocate ();
848 memset (access, 0, sizeof (struct access));
849 access->base = base;
850 access->offset = offset;
851 access->size = size;
853 base_access_vec->get_or_insert (base).safe_push (access);
855 return access;
858 static bool maybe_add_sra_candidate (tree);
860 /* Create and insert access for EXPR. Return created access, or NULL if it is
861 not possible. Also scan for uses of constant pool as we go along and add
862 to candidates. */
864 static struct access *
865 create_access (tree expr, gimple *stmt, bool write)
867 struct access *access;
868 HOST_WIDE_INT offset, size, max_size;
869 tree base = expr;
870 bool reverse, ptr, unscalarizable_region = false;
872 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
874 if (sra_mode == SRA_MODE_EARLY_IPA
875 && TREE_CODE (base) == MEM_REF)
877 base = get_ssa_base_param (TREE_OPERAND (base, 0));
878 if (!base)
879 return NULL;
880 ptr = true;
882 else
883 ptr = false;
885 /* For constant-pool entries, check we can substitute the constant value. */
886 if (constant_decl_p (base)
887 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
889 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
890 if (expr != base
891 && !is_gimple_reg_type (TREE_TYPE (expr))
892 && dump_file && (dump_flags & TDF_DETAILS))
894 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
895 and elements of multidimensional arrays (which are
896 multi-element arrays in their own right). */
897 fprintf (dump_file, "Allowing non-reg-type load of part"
898 " of constant-pool entry: ");
899 print_generic_expr (dump_file, expr);
901 maybe_add_sra_candidate (base);
904 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
905 return NULL;
907 if (sra_mode == SRA_MODE_EARLY_IPA)
909 if (size < 0 || size != max_size)
911 disqualify_candidate (base, "Encountered a variable sized access.");
912 return NULL;
914 if (TREE_CODE (expr) == COMPONENT_REF
915 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
917 disqualify_candidate (base, "Encountered a bit-field access.");
918 return NULL;
920 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
922 if (ptr)
923 mark_parm_dereference (base, offset + size, stmt);
925 else
927 if (size != max_size)
929 size = max_size;
930 unscalarizable_region = true;
932 if (size < 0)
934 disqualify_candidate (base, "Encountered an unconstrained access.");
935 return NULL;
939 access = create_access_1 (base, offset, size);
940 access->expr = expr;
941 access->type = TREE_TYPE (expr);
942 access->write = write;
943 access->grp_unscalarizable_region = unscalarizable_region;
944 access->stmt = stmt;
945 access->reverse = reverse;
947 if (TREE_CODE (expr) == COMPONENT_REF
948 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
949 access->non_addressable = 1;
951 return access;
955 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
956 ARRAY_TYPE with fields that are either of gimple register types (excluding
957 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
958 we are considering a decl from constant pool. If it is false, char arrays
959 will be refused. */
961 static bool
962 scalarizable_type_p (tree type, bool const_decl)
964 gcc_assert (!is_gimple_reg_type (type));
965 if (type_contains_placeholder_p (type))
966 return false;
968 switch (TREE_CODE (type))
970 case RECORD_TYPE:
971 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
972 if (TREE_CODE (fld) == FIELD_DECL)
974 tree ft = TREE_TYPE (fld);
976 if (DECL_BIT_FIELD (fld))
977 return false;
979 if (!is_gimple_reg_type (ft)
980 && !scalarizable_type_p (ft, const_decl))
981 return false;
984 return true;
986 case ARRAY_TYPE:
988 HOST_WIDE_INT min_elem_size;
989 if (const_decl)
990 min_elem_size = 0;
991 else
992 min_elem_size = BITS_PER_UNIT;
994 if (TYPE_DOMAIN (type) == NULL_TREE
995 || !tree_fits_shwi_p (TYPE_SIZE (type))
996 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
997 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
998 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
999 return false;
1000 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1001 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1002 /* Zero-element array, should not prevent scalarization. */
1004 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1005 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1006 /* Variable-length array, do not allow scalarization. */
1007 return false;
1009 tree elem = TREE_TYPE (type);
1010 if (!is_gimple_reg_type (elem)
1011 && !scalarizable_type_p (elem, const_decl))
1012 return false;
1013 return true;
1015 default:
1016 return false;
1020 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1022 /* Create total_scalarization accesses for all scalar fields of a member
1023 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1024 must be the top-most VAR_DECL representing the variable; within that,
1025 OFFSET locates the member and REF must be the memory reference expression for
1026 the member. */
1028 static void
1029 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1031 switch (TREE_CODE (decl_type))
1033 case RECORD_TYPE:
1034 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1035 if (TREE_CODE (fld) == FIELD_DECL)
1037 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1038 tree ft = TREE_TYPE (fld);
1039 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1041 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1042 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1043 nref, ft);
1045 break;
1046 case ARRAY_TYPE:
1048 tree elemtype = TREE_TYPE (decl_type);
1049 tree elem_size = TYPE_SIZE (elemtype);
1050 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1051 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1052 gcc_assert (el_size > 0);
1054 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1055 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1056 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1057 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1058 if (maxidx)
1060 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1061 tree domain = TYPE_DOMAIN (decl_type);
1062 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1063 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1064 offset_int idx = wi::to_offset (minidx);
1065 offset_int max = wi::to_offset (maxidx);
1066 if (!TYPE_UNSIGNED (domain))
1068 idx = wi::sext (idx, TYPE_PRECISION (domain));
1069 max = wi::sext (max, TYPE_PRECISION (domain));
1071 for (int el_off = offset; idx <= max; ++idx)
1073 tree nref = build4 (ARRAY_REF, elemtype,
1074 ref,
1075 wide_int_to_tree (domain, idx),
1076 NULL_TREE, NULL_TREE);
1077 scalarize_elem (base, el_off, el_size,
1078 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1079 nref, elemtype);
1080 el_off += el_size;
1084 break;
1085 default:
1086 gcc_unreachable ();
1090 /* Create total_scalarization accesses for a member of type TYPE, which must
1091 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1092 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1093 the member, REVERSE gives its torage order. and REF must be the reference
1094 expression for it. */
1096 static void
1097 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1098 tree ref, tree type)
1100 if (is_gimple_reg_type (type))
1102 struct access *access = create_access_1 (base, pos, size);
1103 access->expr = ref;
1104 access->type = type;
1105 access->grp_total_scalarization = 1;
1106 access->reverse = reverse;
1107 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1109 else
1110 completely_scalarize (base, type, pos, ref);
1113 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1114 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1116 static void
1117 create_total_scalarization_access (tree var)
1119 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1120 struct access *access;
1122 access = create_access_1 (var, 0, size);
1123 access->expr = var;
1124 access->type = TREE_TYPE (var);
1125 access->grp_total_scalarization = 1;
1128 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1130 static inline bool
1131 contains_view_convert_expr_p (const_tree ref)
1133 while (handled_component_p (ref))
1135 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1136 return true;
1137 ref = TREE_OPERAND (ref, 0);
1140 return false;
1143 /* Search the given tree for a declaration by skipping handled components and
1144 exclude it from the candidates. */
1146 static void
1147 disqualify_base_of_expr (tree t, const char *reason)
1149 t = get_base_address (t);
1150 if (sra_mode == SRA_MODE_EARLY_IPA
1151 && TREE_CODE (t) == MEM_REF)
1152 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1154 if (t && DECL_P (t))
1155 disqualify_candidate (t, reason);
1158 /* Scan expression EXPR and create access structures for all accesses to
1159 candidates for scalarization. Return the created access or NULL if none is
1160 created. */
1162 static struct access *
1163 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1165 struct access *ret = NULL;
1166 bool partial_ref;
1168 if (TREE_CODE (expr) == BIT_FIELD_REF
1169 || TREE_CODE (expr) == IMAGPART_EXPR
1170 || TREE_CODE (expr) == REALPART_EXPR)
1172 expr = TREE_OPERAND (expr, 0);
1173 partial_ref = true;
1175 else
1176 partial_ref = false;
1178 if (storage_order_barrier_p (expr))
1180 disqualify_base_of_expr (expr, "storage order barrier.");
1181 return NULL;
1184 /* We need to dive through V_C_Es in order to get the size of its parameter
1185 and not the result type. Ada produces such statements. We are also
1186 capable of handling the topmost V_C_E but not any of those buried in other
1187 handled components. */
1188 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1189 expr = TREE_OPERAND (expr, 0);
1191 if (contains_view_convert_expr_p (expr))
1193 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1194 "component.");
1195 return NULL;
1197 if (TREE_THIS_VOLATILE (expr))
1199 disqualify_base_of_expr (expr, "part of a volatile reference.");
1200 return NULL;
1203 switch (TREE_CODE (expr))
1205 case MEM_REF:
1206 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1207 && sra_mode != SRA_MODE_EARLY_IPA)
1208 return NULL;
1209 /* fall through */
1210 case VAR_DECL:
1211 case PARM_DECL:
1212 case RESULT_DECL:
1213 case COMPONENT_REF:
1214 case ARRAY_REF:
1215 case ARRAY_RANGE_REF:
1216 ret = create_access (expr, stmt, write);
1217 break;
1219 default:
1220 break;
1223 if (write && partial_ref && ret)
1224 ret->grp_partial_lhs = 1;
1226 return ret;
1229 /* Scan expression EXPR and create access structures for all accesses to
1230 candidates for scalarization. Return true if any access has been inserted.
1231 STMT must be the statement from which the expression is taken, WRITE must be
1232 true if the expression is a store and false otherwise. */
1234 static bool
1235 build_access_from_expr (tree expr, gimple *stmt, bool write)
1237 struct access *access;
1239 access = build_access_from_expr_1 (expr, stmt, write);
1240 if (access)
1242 /* This means the aggregate is accesses as a whole in a way other than an
1243 assign statement and thus cannot be removed even if we had a scalar
1244 replacement for everything. */
1245 if (cannot_scalarize_away_bitmap)
1246 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1247 return true;
1249 return false;
1252 /* Return the single non-EH successor edge of BB or NULL if there is none or
1253 more than one. */
1255 static edge
1256 single_non_eh_succ (basic_block bb)
1258 edge e, res = NULL;
1259 edge_iterator ei;
1261 FOR_EACH_EDGE (e, ei, bb->succs)
1262 if (!(e->flags & EDGE_EH))
1264 if (res)
1265 return NULL;
1266 res = e;
1269 return res;
1272 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1273 there is no alternative spot where to put statements SRA might need to
1274 generate after it. The spot we are looking for is an edge leading to a
1275 single non-EH successor, if it exists and is indeed single. RHS may be
1276 NULL, in that case ignore it. */
1278 static bool
1279 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1281 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1282 && stmt_ends_bb_p (stmt))
1284 if (single_non_eh_succ (gimple_bb (stmt)))
1285 return false;
1287 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1288 if (rhs)
1289 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1290 return true;
1292 return false;
1295 /* Return true if the nature of BASE is such that it contains data even if
1296 there is no write to it in the function. */
1298 static bool
1299 comes_initialized_p (tree base)
1301 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1304 /* Scan expressions occurring in STMT, create access structures for all accesses
1305 to candidates for scalarization and remove those candidates which occur in
1306 statements or expressions that prevent them from being split apart. Return
1307 true if any access has been inserted. */
1309 static bool
1310 build_accesses_from_assign (gimple *stmt)
1312 tree lhs, rhs;
1313 struct access *lacc, *racc;
1315 if (!gimple_assign_single_p (stmt)
1316 /* Scope clobbers don't influence scalarization. */
1317 || gimple_clobber_p (stmt))
1318 return false;
1320 lhs = gimple_assign_lhs (stmt);
1321 rhs = gimple_assign_rhs1 (stmt);
1323 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1324 return false;
1326 racc = build_access_from_expr_1 (rhs, stmt, false);
1327 lacc = build_access_from_expr_1 (lhs, stmt, true);
1329 if (lacc)
1331 lacc->grp_assignment_write = 1;
1332 if (storage_order_barrier_p (rhs))
1333 lacc->grp_unscalarizable_region = 1;
1336 if (racc)
1338 racc->grp_assignment_read = 1;
1339 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1340 && !is_gimple_reg_type (racc->type))
1341 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1342 if (storage_order_barrier_p (lhs))
1343 racc->grp_unscalarizable_region = 1;
1346 if (lacc && racc
1347 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1348 && !lacc->grp_unscalarizable_region
1349 && !racc->grp_unscalarizable_region
1350 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1351 && lacc->size == racc->size
1352 && useless_type_conversion_p (lacc->type, racc->type))
1354 struct assign_link *link;
1356 link = assign_link_pool.allocate ();
1357 memset (link, 0, sizeof (struct assign_link));
1359 link->lacc = lacc;
1360 link->racc = racc;
1361 add_link_to_rhs (racc, link);
1362 add_access_to_work_queue (racc);
1364 /* Let's delay marking the areas as written until propagation of accesses
1365 across link, unless the nature of rhs tells us that its data comes
1366 from elsewhere. */
1367 if (!comes_initialized_p (racc->base))
1368 lacc->write = false;
1371 return lacc || racc;
1374 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1375 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1377 static bool
1378 asm_visit_addr (gimple *, tree op, tree, void *)
1380 op = get_base_address (op);
1381 if (op
1382 && DECL_P (op))
1383 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1385 return false;
1388 /* Return true iff callsite CALL has at least as many actual arguments as there
1389 are formal parameters of the function currently processed by IPA-SRA and
1390 that their types match. */
1392 static inline bool
1393 callsite_arguments_match_p (gimple *call)
1395 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1396 return false;
1398 tree parm;
1399 int i;
1400 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1401 parm;
1402 parm = DECL_CHAIN (parm), i++)
1404 tree arg = gimple_call_arg (call, i);
1405 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1406 return false;
1408 return true;
1411 /* Scan function and look for interesting expressions and create access
1412 structures for them. Return true iff any access is created. */
1414 static bool
1415 scan_function (void)
1417 basic_block bb;
1418 bool ret = false;
1420 FOR_EACH_BB_FN (bb, cfun)
1422 gimple_stmt_iterator gsi;
1423 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1425 gimple *stmt = gsi_stmt (gsi);
1426 tree t;
1427 unsigned i;
1429 if (final_bbs && stmt_can_throw_external (stmt))
1430 bitmap_set_bit (final_bbs, bb->index);
1431 switch (gimple_code (stmt))
1433 case GIMPLE_RETURN:
1434 t = gimple_return_retval (as_a <greturn *> (stmt));
1435 if (t != NULL_TREE)
1436 ret |= build_access_from_expr (t, stmt, false);
1437 if (final_bbs)
1438 bitmap_set_bit (final_bbs, bb->index);
1439 break;
1441 case GIMPLE_ASSIGN:
1442 ret |= build_accesses_from_assign (stmt);
1443 break;
1445 case GIMPLE_CALL:
1446 for (i = 0; i < gimple_call_num_args (stmt); i++)
1447 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1448 stmt, false);
1450 if (sra_mode == SRA_MODE_EARLY_IPA)
1452 tree dest = gimple_call_fndecl (stmt);
1453 int flags = gimple_call_flags (stmt);
1455 if (dest)
1457 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1458 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1459 encountered_apply_args = true;
1460 if (recursive_call_p (current_function_decl, dest))
1462 encountered_recursive_call = true;
1463 if (!callsite_arguments_match_p (stmt))
1464 encountered_unchangable_recursive_call = true;
1468 if (final_bbs
1469 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1470 bitmap_set_bit (final_bbs, bb->index);
1473 t = gimple_call_lhs (stmt);
1474 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1475 ret |= build_access_from_expr (t, stmt, true);
1476 break;
1478 case GIMPLE_ASM:
1480 gasm *asm_stmt = as_a <gasm *> (stmt);
1481 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1482 asm_visit_addr);
1483 if (final_bbs)
1484 bitmap_set_bit (final_bbs, bb->index);
1486 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1488 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1489 ret |= build_access_from_expr (t, asm_stmt, false);
1491 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1493 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1494 ret |= build_access_from_expr (t, asm_stmt, true);
1497 break;
1499 default:
1500 break;
1505 return ret;
1508 /* Helper of QSORT function. There are pointers to accesses in the array. An
1509 access is considered smaller than another if it has smaller offset or if the
1510 offsets are the same but is size is bigger. */
1512 static int
1513 compare_access_positions (const void *a, const void *b)
1515 const access_p *fp1 = (const access_p *) a;
1516 const access_p *fp2 = (const access_p *) b;
1517 const access_p f1 = *fp1;
1518 const access_p f2 = *fp2;
1520 if (f1->offset != f2->offset)
1521 return f1->offset < f2->offset ? -1 : 1;
1523 if (f1->size == f2->size)
1525 if (f1->type == f2->type)
1526 return 0;
1527 /* Put any non-aggregate type before any aggregate type. */
1528 else if (!is_gimple_reg_type (f1->type)
1529 && is_gimple_reg_type (f2->type))
1530 return 1;
1531 else if (is_gimple_reg_type (f1->type)
1532 && !is_gimple_reg_type (f2->type))
1533 return -1;
1534 /* Put any complex or vector type before any other scalar type. */
1535 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1536 && TREE_CODE (f1->type) != VECTOR_TYPE
1537 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1538 || TREE_CODE (f2->type) == VECTOR_TYPE))
1539 return 1;
1540 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1541 || TREE_CODE (f1->type) == VECTOR_TYPE)
1542 && TREE_CODE (f2->type) != COMPLEX_TYPE
1543 && TREE_CODE (f2->type) != VECTOR_TYPE)
1544 return -1;
1545 /* Put the integral type with the bigger precision first. */
1546 else if (INTEGRAL_TYPE_P (f1->type)
1547 && INTEGRAL_TYPE_P (f2->type))
1548 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1549 /* Put any integral type with non-full precision last. */
1550 else if (INTEGRAL_TYPE_P (f1->type)
1551 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1552 != TYPE_PRECISION (f1->type)))
1553 return 1;
1554 else if (INTEGRAL_TYPE_P (f2->type)
1555 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1556 != TYPE_PRECISION (f2->type)))
1557 return -1;
1558 /* Stabilize the sort. */
1559 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1562 /* We want the bigger accesses first, thus the opposite operator in the next
1563 line: */
1564 return f1->size > f2->size ? -1 : 1;
1568 /* Append a name of the declaration to the name obstack. A helper function for
1569 make_fancy_name. */
1571 static void
1572 make_fancy_decl_name (tree decl)
1574 char buffer[32];
1576 tree name = DECL_NAME (decl);
1577 if (name)
1578 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1579 IDENTIFIER_LENGTH (name));
1580 else
1582 sprintf (buffer, "D%u", DECL_UID (decl));
1583 obstack_grow (&name_obstack, buffer, strlen (buffer));
1587 /* Helper for make_fancy_name. */
1589 static void
1590 make_fancy_name_1 (tree expr)
1592 char buffer[32];
1593 tree index;
1595 if (DECL_P (expr))
1597 make_fancy_decl_name (expr);
1598 return;
1601 switch (TREE_CODE (expr))
1603 case COMPONENT_REF:
1604 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1605 obstack_1grow (&name_obstack, '$');
1606 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1607 break;
1609 case ARRAY_REF:
1610 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1611 obstack_1grow (&name_obstack, '$');
1612 /* Arrays with only one element may not have a constant as their
1613 index. */
1614 index = TREE_OPERAND (expr, 1);
1615 if (TREE_CODE (index) != INTEGER_CST)
1616 break;
1617 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1618 obstack_grow (&name_obstack, buffer, strlen (buffer));
1619 break;
1621 case ADDR_EXPR:
1622 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1623 break;
1625 case MEM_REF:
1626 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1627 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1629 obstack_1grow (&name_obstack, '$');
1630 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1631 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1632 obstack_grow (&name_obstack, buffer, strlen (buffer));
1634 break;
1636 case BIT_FIELD_REF:
1637 case REALPART_EXPR:
1638 case IMAGPART_EXPR:
1639 gcc_unreachable (); /* we treat these as scalars. */
1640 break;
1641 default:
1642 break;
1646 /* Create a human readable name for replacement variable of ACCESS. */
1648 static char *
1649 make_fancy_name (tree expr)
1651 make_fancy_name_1 (expr);
1652 obstack_1grow (&name_obstack, '\0');
1653 return XOBFINISH (&name_obstack, char *);
1656 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1657 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1658 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1659 be non-NULL and is used to insert new statements either before or below
1660 the current one as specified by INSERT_AFTER. This function is not capable
1661 of handling bitfields. */
1663 tree
1664 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1665 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1666 bool insert_after)
1668 tree prev_base = base;
1669 tree off;
1670 tree mem_ref;
1671 HOST_WIDE_INT base_offset;
1672 unsigned HOST_WIDE_INT misalign;
1673 unsigned int align;
1675 /* Preserve address-space information. */
1676 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1677 if (as != TYPE_ADDR_SPACE (exp_type))
1678 exp_type = build_qualified_type (exp_type,
1679 TYPE_QUALS (exp_type)
1680 | ENCODE_QUAL_ADDR_SPACE (as));
1682 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1683 get_object_alignment_1 (base, &align, &misalign);
1684 base = get_addr_base_and_unit_offset (base, &base_offset);
1686 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1687 offset such as array[var_index]. */
1688 if (!base)
1690 gassign *stmt;
1691 tree tmp, addr;
1693 gcc_checking_assert (gsi);
1694 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1695 addr = build_fold_addr_expr (unshare_expr (prev_base));
1696 STRIP_USELESS_TYPE_CONVERSION (addr);
1697 stmt = gimple_build_assign (tmp, addr);
1698 gimple_set_location (stmt, loc);
1699 if (insert_after)
1700 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1701 else
1702 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1704 off = build_int_cst (reference_alias_ptr_type (prev_base),
1705 offset / BITS_PER_UNIT);
1706 base = tmp;
1708 else if (TREE_CODE (base) == MEM_REF)
1710 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1711 base_offset + offset / BITS_PER_UNIT);
1712 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1713 base = unshare_expr (TREE_OPERAND (base, 0));
1715 else
1717 off = build_int_cst (reference_alias_ptr_type (prev_base),
1718 base_offset + offset / BITS_PER_UNIT);
1719 base = build_fold_addr_expr (unshare_expr (base));
1722 misalign = (misalign + offset) & (align - 1);
1723 if (misalign != 0)
1724 align = least_bit_hwi (misalign);
1725 if (align != TYPE_ALIGN (exp_type))
1726 exp_type = build_aligned_type (exp_type, align);
1728 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1729 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1730 if (TREE_THIS_VOLATILE (prev_base))
1731 TREE_THIS_VOLATILE (mem_ref) = 1;
1732 if (TREE_SIDE_EFFECTS (prev_base))
1733 TREE_SIDE_EFFECTS (mem_ref) = 1;
1734 return mem_ref;
1737 /* Construct a memory reference to a part of an aggregate BASE at the given
1738 OFFSET and of the same type as MODEL. In case this is a reference to a
1739 bit-field, the function will replicate the last component_ref of model's
1740 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1741 build_ref_for_offset. */
1743 static tree
1744 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1745 struct access *model, gimple_stmt_iterator *gsi,
1746 bool insert_after)
1748 if (TREE_CODE (model->expr) == COMPONENT_REF
1749 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1751 /* This access represents a bit-field. */
1752 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1754 offset -= int_bit_position (fld);
1755 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1756 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1757 gsi, insert_after);
1758 /* The flag will be set on the record type. */
1759 REF_REVERSE_STORAGE_ORDER (t) = 0;
1760 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1761 NULL_TREE);
1763 else
1764 return
1765 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1766 gsi, insert_after);
1769 /* Attempt to build a memory reference that we could but into a gimple
1770 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1771 create statements and return s NULL instead. This function also ignores
1772 alignment issues and so its results should never end up in non-debug
1773 statements. */
1775 static tree
1776 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1777 struct access *model)
1779 HOST_WIDE_INT base_offset;
1780 tree off;
1782 if (TREE_CODE (model->expr) == COMPONENT_REF
1783 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1784 return NULL_TREE;
1786 base = get_addr_base_and_unit_offset (base, &base_offset);
1787 if (!base)
1788 return NULL_TREE;
1789 if (TREE_CODE (base) == MEM_REF)
1791 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1792 base_offset + offset / BITS_PER_UNIT);
1793 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1794 base = unshare_expr (TREE_OPERAND (base, 0));
1796 else
1798 off = build_int_cst (reference_alias_ptr_type (base),
1799 base_offset + offset / BITS_PER_UNIT);
1800 base = build_fold_addr_expr (unshare_expr (base));
1803 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1806 /* Construct a memory reference consisting of component_refs and array_refs to
1807 a part of an aggregate *RES (which is of type TYPE). The requested part
1808 should have type EXP_TYPE at be the given OFFSET. This function might not
1809 succeed, it returns true when it does and only then *RES points to something
1810 meaningful. This function should be used only to build expressions that we
1811 might need to present to user (e.g. in warnings). In all other situations,
1812 build_ref_for_model or build_ref_for_offset should be used instead. */
1814 static bool
1815 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1816 tree exp_type)
1818 while (1)
1820 tree fld;
1821 tree tr_size, index, minidx;
1822 HOST_WIDE_INT el_size;
1824 if (offset == 0 && exp_type
1825 && types_compatible_p (exp_type, type))
1826 return true;
1828 switch (TREE_CODE (type))
1830 case UNION_TYPE:
1831 case QUAL_UNION_TYPE:
1832 case RECORD_TYPE:
1833 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1835 HOST_WIDE_INT pos, size;
1836 tree tr_pos, expr, *expr_ptr;
1838 if (TREE_CODE (fld) != FIELD_DECL)
1839 continue;
1841 tr_pos = bit_position (fld);
1842 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1843 continue;
1844 pos = tree_to_uhwi (tr_pos);
1845 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1846 tr_size = DECL_SIZE (fld);
1847 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1848 continue;
1849 size = tree_to_uhwi (tr_size);
1850 if (size == 0)
1852 if (pos != offset)
1853 continue;
1855 else if (pos > offset || (pos + size) <= offset)
1856 continue;
1858 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1859 NULL_TREE);
1860 expr_ptr = &expr;
1861 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1862 offset - pos, exp_type))
1864 *res = expr;
1865 return true;
1868 return false;
1870 case ARRAY_TYPE:
1871 tr_size = TYPE_SIZE (TREE_TYPE (type));
1872 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1873 return false;
1874 el_size = tree_to_uhwi (tr_size);
1876 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1877 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1878 return false;
1879 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1880 if (!integer_zerop (minidx))
1881 index = int_const_binop (PLUS_EXPR, index, minidx);
1882 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1883 NULL_TREE, NULL_TREE);
1884 offset = offset % el_size;
1885 type = TREE_TYPE (type);
1886 break;
1888 default:
1889 if (offset != 0)
1890 return false;
1892 if (exp_type)
1893 return false;
1894 else
1895 return true;
1900 /* Return true iff TYPE is stdarg va_list type. */
1902 static inline bool
1903 is_va_list_type (tree type)
1905 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1908 /* Print message to dump file why a variable was rejected. */
1910 static void
1911 reject (tree var, const char *msg)
1913 if (dump_file && (dump_flags & TDF_DETAILS))
1915 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1916 print_generic_expr (dump_file, var);
1917 fprintf (dump_file, "\n");
1921 /* Return true if VAR is a candidate for SRA. */
1923 static bool
1924 maybe_add_sra_candidate (tree var)
1926 tree type = TREE_TYPE (var);
1927 const char *msg;
1928 tree_node **slot;
1930 if (!AGGREGATE_TYPE_P (type))
1932 reject (var, "not aggregate");
1933 return false;
1935 /* Allow constant-pool entries (that "need to live in memory")
1936 unless we are doing IPA SRA. */
1937 if (needs_to_live_in_memory (var)
1938 && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
1940 reject (var, "needs to live in memory");
1941 return false;
1943 if (TREE_THIS_VOLATILE (var))
1945 reject (var, "is volatile");
1946 return false;
1948 if (!COMPLETE_TYPE_P (type))
1950 reject (var, "has incomplete type");
1951 return false;
1953 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1955 reject (var, "type size not fixed");
1956 return false;
1958 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1960 reject (var, "type size is zero");
1961 return false;
1963 if (type_internals_preclude_sra_p (type, &msg))
1965 reject (var, msg);
1966 return false;
1968 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1969 we also want to schedule it rather late. Thus we ignore it in
1970 the early pass. */
1971 (sra_mode == SRA_MODE_EARLY_INTRA
1972 && is_va_list_type (type)))
1974 reject (var, "is va_list");
1975 return false;
1978 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1979 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1980 *slot = var;
1982 if (dump_file && (dump_flags & TDF_DETAILS))
1984 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1985 print_generic_expr (dump_file, var);
1986 fprintf (dump_file, "\n");
1989 return true;
1992 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1993 those with type which is suitable for scalarization. */
1995 static bool
1996 find_var_candidates (void)
1998 tree var, parm;
1999 unsigned int i;
2000 bool ret = false;
2002 for (parm = DECL_ARGUMENTS (current_function_decl);
2003 parm;
2004 parm = DECL_CHAIN (parm))
2005 ret |= maybe_add_sra_candidate (parm);
2007 FOR_EACH_LOCAL_DECL (cfun, i, var)
2009 if (!VAR_P (var))
2010 continue;
2012 ret |= maybe_add_sra_candidate (var);
2015 return ret;
2018 /* Sort all accesses for the given variable, check for partial overlaps and
2019 return NULL if there are any. If there are none, pick a representative for
2020 each combination of offset and size and create a linked list out of them.
2021 Return the pointer to the first representative and make sure it is the first
2022 one in the vector of accesses. */
2024 static struct access *
2025 sort_and_splice_var_accesses (tree var)
2027 int i, j, access_count;
2028 struct access *res, **prev_acc_ptr = &res;
2029 vec<access_p> *access_vec;
2030 bool first = true;
2031 HOST_WIDE_INT low = -1, high = 0;
2033 access_vec = get_base_access_vector (var);
2034 if (!access_vec)
2035 return NULL;
2036 access_count = access_vec->length ();
2038 /* Sort by <OFFSET, SIZE>. */
2039 access_vec->qsort (compare_access_positions);
2041 i = 0;
2042 while (i < access_count)
2044 struct access *access = (*access_vec)[i];
2045 bool grp_write = access->write;
2046 bool grp_read = !access->write;
2047 bool grp_scalar_write = access->write
2048 && is_gimple_reg_type (access->type);
2049 bool grp_scalar_read = !access->write
2050 && is_gimple_reg_type (access->type);
2051 bool grp_assignment_read = access->grp_assignment_read;
2052 bool grp_assignment_write = access->grp_assignment_write;
2053 bool multiple_scalar_reads = false;
2054 bool total_scalarization = access->grp_total_scalarization;
2055 bool grp_partial_lhs = access->grp_partial_lhs;
2056 bool first_scalar = is_gimple_reg_type (access->type);
2057 bool unscalarizable_region = access->grp_unscalarizable_region;
2059 if (first || access->offset >= high)
2061 first = false;
2062 low = access->offset;
2063 high = access->offset + access->size;
2065 else if (access->offset > low && access->offset + access->size > high)
2066 return NULL;
2067 else
2068 gcc_assert (access->offset >= low
2069 && access->offset + access->size <= high);
2071 j = i + 1;
2072 while (j < access_count)
2074 struct access *ac2 = (*access_vec)[j];
2075 if (ac2->offset != access->offset || ac2->size != access->size)
2076 break;
2077 if (ac2->write)
2079 grp_write = true;
2080 grp_scalar_write = (grp_scalar_write
2081 || is_gimple_reg_type (ac2->type));
2083 else
2085 grp_read = true;
2086 if (is_gimple_reg_type (ac2->type))
2088 if (grp_scalar_read)
2089 multiple_scalar_reads = true;
2090 else
2091 grp_scalar_read = true;
2094 grp_assignment_read |= ac2->grp_assignment_read;
2095 grp_assignment_write |= ac2->grp_assignment_write;
2096 grp_partial_lhs |= ac2->grp_partial_lhs;
2097 unscalarizable_region |= ac2->grp_unscalarizable_region;
2098 total_scalarization |= ac2->grp_total_scalarization;
2099 relink_to_new_repr (access, ac2);
2101 /* If there are both aggregate-type and scalar-type accesses with
2102 this combination of size and offset, the comparison function
2103 should have put the scalars first. */
2104 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2105 ac2->group_representative = access;
2106 j++;
2109 i = j;
2111 access->group_representative = access;
2112 access->grp_write = grp_write;
2113 access->grp_read = grp_read;
2114 access->grp_scalar_read = grp_scalar_read;
2115 access->grp_scalar_write = grp_scalar_write;
2116 access->grp_assignment_read = grp_assignment_read;
2117 access->grp_assignment_write = grp_assignment_write;
2118 access->grp_hint = total_scalarization
2119 || (multiple_scalar_reads && !constant_decl_p (var));
2120 access->grp_total_scalarization = total_scalarization;
2121 access->grp_partial_lhs = grp_partial_lhs;
2122 access->grp_unscalarizable_region = unscalarizable_region;
2124 *prev_acc_ptr = access;
2125 prev_acc_ptr = &access->next_grp;
2128 gcc_assert (res == (*access_vec)[0]);
2129 return res;
2132 /* Create a variable for the given ACCESS which determines the type, name and a
2133 few other properties. Return the variable declaration and store it also to
2134 ACCESS->replacement. */
2136 static tree
2137 create_access_replacement (struct access *access)
2139 tree repl;
2141 if (access->grp_to_be_debug_replaced)
2143 repl = create_tmp_var_raw (access->type);
2144 DECL_CONTEXT (repl) = current_function_decl;
2146 else
2147 /* Drop any special alignment on the type if it's not on the main
2148 variant. This avoids issues with weirdo ABIs like AAPCS. */
2149 repl = create_tmp_var (build_qualified_type
2150 (TYPE_MAIN_VARIANT (access->type),
2151 TYPE_QUALS (access->type)), "SR");
2152 if (TREE_CODE (access->type) == COMPLEX_TYPE
2153 || TREE_CODE (access->type) == VECTOR_TYPE)
2155 if (!access->grp_partial_lhs)
2156 DECL_GIMPLE_REG_P (repl) = 1;
2158 else if (access->grp_partial_lhs
2159 && is_gimple_reg_type (access->type))
2160 TREE_ADDRESSABLE (repl) = 1;
2162 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2163 DECL_ARTIFICIAL (repl) = 1;
2164 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2166 if (DECL_NAME (access->base)
2167 && !DECL_IGNORED_P (access->base)
2168 && !DECL_ARTIFICIAL (access->base))
2170 char *pretty_name = make_fancy_name (access->expr);
2171 tree debug_expr = unshare_expr_without_location (access->expr), d;
2172 bool fail = false;
2174 DECL_NAME (repl) = get_identifier (pretty_name);
2175 DECL_NAMELESS (repl) = 1;
2176 obstack_free (&name_obstack, pretty_name);
2178 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2179 as DECL_DEBUG_EXPR isn't considered when looking for still
2180 used SSA_NAMEs and thus they could be freed. All debug info
2181 generation cares is whether something is constant or variable
2182 and that get_ref_base_and_extent works properly on the
2183 expression. It cannot handle accesses at a non-constant offset
2184 though, so just give up in those cases. */
2185 for (d = debug_expr;
2186 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2187 d = TREE_OPERAND (d, 0))
2188 switch (TREE_CODE (d))
2190 case ARRAY_REF:
2191 case ARRAY_RANGE_REF:
2192 if (TREE_OPERAND (d, 1)
2193 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2194 fail = true;
2195 if (TREE_OPERAND (d, 3)
2196 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2197 fail = true;
2198 /* FALLTHRU */
2199 case COMPONENT_REF:
2200 if (TREE_OPERAND (d, 2)
2201 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2202 fail = true;
2203 break;
2204 case MEM_REF:
2205 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2206 fail = true;
2207 else
2208 d = TREE_OPERAND (d, 0);
2209 break;
2210 default:
2211 break;
2213 if (!fail)
2215 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2216 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2218 if (access->grp_no_warning)
2219 TREE_NO_WARNING (repl) = 1;
2220 else
2221 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2223 else
2224 TREE_NO_WARNING (repl) = 1;
2226 if (dump_file)
2228 if (access->grp_to_be_debug_replaced)
2230 fprintf (dump_file, "Created a debug-only replacement for ");
2231 print_generic_expr (dump_file, access->base);
2232 fprintf (dump_file, " offset: %u, size: %u\n",
2233 (unsigned) access->offset, (unsigned) access->size);
2235 else
2237 fprintf (dump_file, "Created a replacement for ");
2238 print_generic_expr (dump_file, access->base);
2239 fprintf (dump_file, " offset: %u, size: %u: ",
2240 (unsigned) access->offset, (unsigned) access->size);
2241 print_generic_expr (dump_file, repl);
2242 fprintf (dump_file, "\n");
2245 sra_stats.replacements++;
2247 return repl;
2250 /* Return ACCESS scalar replacement, which must exist. */
2252 static inline tree
2253 get_access_replacement (struct access *access)
2255 gcc_checking_assert (access->replacement_decl);
2256 return access->replacement_decl;
2260 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2261 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2262 to it is not "within" the root. Return false iff some accesses partially
2263 overlap. */
2265 static bool
2266 build_access_subtree (struct access **access)
2268 struct access *root = *access, *last_child = NULL;
2269 HOST_WIDE_INT limit = root->offset + root->size;
2271 *access = (*access)->next_grp;
2272 while (*access && (*access)->offset + (*access)->size <= limit)
2274 if (!last_child)
2275 root->first_child = *access;
2276 else
2277 last_child->next_sibling = *access;
2278 last_child = *access;
2279 (*access)->parent = root;
2280 (*access)->grp_write |= root->grp_write;
2282 if (!build_access_subtree (access))
2283 return false;
2286 if (*access && (*access)->offset < limit)
2287 return false;
2289 return true;
2292 /* Build a tree of access representatives, ACCESS is the pointer to the first
2293 one, others are linked in a list by the next_grp field. Return false iff
2294 some accesses partially overlap. */
2296 static bool
2297 build_access_trees (struct access *access)
2299 while (access)
2301 struct access *root = access;
2303 if (!build_access_subtree (&access))
2304 return false;
2305 root->next_grp = access;
2307 return true;
2310 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2311 array. */
2313 static bool
2314 expr_with_var_bounded_array_refs_p (tree expr)
2316 while (handled_component_p (expr))
2318 if (TREE_CODE (expr) == ARRAY_REF
2319 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2320 return true;
2321 expr = TREE_OPERAND (expr, 0);
2323 return false;
2326 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2327 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2328 sorts of access flags appropriately along the way, notably always set
2329 grp_read and grp_assign_read according to MARK_READ and grp_write when
2330 MARK_WRITE is true.
2332 Creating a replacement for a scalar access is considered beneficial if its
2333 grp_hint is set (this means we are either attempting total scalarization or
2334 there is more than one direct read access) or according to the following
2335 table:
2337 Access written to through a scalar type (once or more times)
2339 | Written to in an assignment statement
2341 | | Access read as scalar _once_
2342 | | |
2343 | | | Read in an assignment statement
2344 | | | |
2345 | | | | Scalarize Comment
2346 -----------------------------------------------------------------------------
2347 0 0 0 0 No access for the scalar
2348 0 0 0 1 No access for the scalar
2349 0 0 1 0 No Single read - won't help
2350 0 0 1 1 No The same case
2351 0 1 0 0 No access for the scalar
2352 0 1 0 1 No access for the scalar
2353 0 1 1 0 Yes s = *g; return s.i;
2354 0 1 1 1 Yes The same case as above
2355 1 0 0 0 No Won't help
2356 1 0 0 1 Yes s.i = 1; *g = s;
2357 1 0 1 0 Yes s.i = 5; g = s.i;
2358 1 0 1 1 Yes The same case as above
2359 1 1 0 0 No Won't help.
2360 1 1 0 1 Yes s.i = 1; *g = s;
2361 1 1 1 0 Yes s = *g; return s.i;
2362 1 1 1 1 Yes Any of the above yeses */
2364 static bool
2365 analyze_access_subtree (struct access *root, struct access *parent,
2366 bool allow_replacements)
2368 struct access *child;
2369 HOST_WIDE_INT limit = root->offset + root->size;
2370 HOST_WIDE_INT covered_to = root->offset;
2371 bool scalar = is_gimple_reg_type (root->type);
2372 bool hole = false, sth_created = false;
2374 if (parent)
2376 if (parent->grp_read)
2377 root->grp_read = 1;
2378 if (parent->grp_assignment_read)
2379 root->grp_assignment_read = 1;
2380 if (parent->grp_write)
2381 root->grp_write = 1;
2382 if (parent->grp_assignment_write)
2383 root->grp_assignment_write = 1;
2384 if (parent->grp_total_scalarization)
2385 root->grp_total_scalarization = 1;
2388 if (root->grp_unscalarizable_region)
2389 allow_replacements = false;
2391 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2392 allow_replacements = false;
2394 for (child = root->first_child; child; child = child->next_sibling)
2396 hole |= covered_to < child->offset;
2397 sth_created |= analyze_access_subtree (child, root,
2398 allow_replacements && !scalar);
2400 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2401 root->grp_total_scalarization &= child->grp_total_scalarization;
2402 if (child->grp_covered)
2403 covered_to += child->size;
2404 else
2405 hole = true;
2408 if (allow_replacements && scalar && !root->first_child
2409 && (root->grp_hint
2410 || ((root->grp_scalar_read || root->grp_assignment_read)
2411 && (root->grp_scalar_write || root->grp_assignment_write))))
2413 /* Always create access replacements that cover the whole access.
2414 For integral types this means the precision has to match.
2415 Avoid assumptions based on the integral type kind, too. */
2416 if (INTEGRAL_TYPE_P (root->type)
2417 && (TREE_CODE (root->type) != INTEGER_TYPE
2418 || TYPE_PRECISION (root->type) != root->size)
2419 /* But leave bitfield accesses alone. */
2420 && (TREE_CODE (root->expr) != COMPONENT_REF
2421 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2423 tree rt = root->type;
2424 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2425 && (root->size % BITS_PER_UNIT) == 0);
2426 root->type = build_nonstandard_integer_type (root->size,
2427 TYPE_UNSIGNED (rt));
2428 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2429 root->offset, root->reverse,
2430 root->type, NULL, false);
2432 if (dump_file && (dump_flags & TDF_DETAILS))
2434 fprintf (dump_file, "Changing the type of a replacement for ");
2435 print_generic_expr (dump_file, root->base);
2436 fprintf (dump_file, " offset: %u, size: %u ",
2437 (unsigned) root->offset, (unsigned) root->size);
2438 fprintf (dump_file, " to an integer.\n");
2442 root->grp_to_be_replaced = 1;
2443 root->replacement_decl = create_access_replacement (root);
2444 sth_created = true;
2445 hole = false;
2447 else
2449 if (allow_replacements
2450 && scalar && !root->first_child
2451 && (root->grp_scalar_write || root->grp_assignment_write)
2452 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2453 DECL_UID (root->base)))
2455 gcc_checking_assert (!root->grp_scalar_read
2456 && !root->grp_assignment_read);
2457 sth_created = true;
2458 if (MAY_HAVE_DEBUG_STMTS)
2460 root->grp_to_be_debug_replaced = 1;
2461 root->replacement_decl = create_access_replacement (root);
2465 if (covered_to < limit)
2466 hole = true;
2467 if (scalar || !allow_replacements)
2468 root->grp_total_scalarization = 0;
2471 if (!hole || root->grp_total_scalarization)
2472 root->grp_covered = 1;
2473 else if (root->grp_write || comes_initialized_p (root->base))
2474 root->grp_unscalarized_data = 1; /* not covered and written to */
2475 return sth_created;
2478 /* Analyze all access trees linked by next_grp by the means of
2479 analyze_access_subtree. */
2480 static bool
2481 analyze_access_trees (struct access *access)
2483 bool ret = false;
2485 while (access)
2487 if (analyze_access_subtree (access, NULL, true))
2488 ret = true;
2489 access = access->next_grp;
2492 return ret;
2495 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2496 SIZE would conflict with an already existing one. If exactly such a child
2497 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2499 static bool
2500 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2501 HOST_WIDE_INT size, struct access **exact_match)
2503 struct access *child;
2505 for (child = lacc->first_child; child; child = child->next_sibling)
2507 if (child->offset == norm_offset && child->size == size)
2509 *exact_match = child;
2510 return true;
2513 if (child->offset < norm_offset + size
2514 && child->offset + child->size > norm_offset)
2515 return true;
2518 return false;
2521 /* Create a new child access of PARENT, with all properties just like MODEL
2522 except for its offset and with its grp_write false and grp_read true.
2523 Return the new access or NULL if it cannot be created. Note that this
2524 access is created long after all splicing and sorting, it's not located in
2525 any access vector and is automatically a representative of its group. Set
2526 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2528 static struct access *
2529 create_artificial_child_access (struct access *parent, struct access *model,
2530 HOST_WIDE_INT new_offset,
2531 bool set_grp_write)
2533 struct access **child;
2534 tree expr = parent->base;
2536 gcc_assert (!model->grp_unscalarizable_region);
2538 struct access *access = access_pool.allocate ();
2539 memset (access, 0, sizeof (struct access));
2540 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2541 model->type))
2543 access->grp_no_warning = true;
2544 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2545 new_offset, model, NULL, false);
2548 access->base = parent->base;
2549 access->expr = expr;
2550 access->offset = new_offset;
2551 access->size = model->size;
2552 access->type = model->type;
2553 access->grp_write = set_grp_write;
2554 access->grp_read = false;
2555 access->reverse = model->reverse;
2557 child = &parent->first_child;
2558 while (*child && (*child)->offset < new_offset)
2559 child = &(*child)->next_sibling;
2561 access->next_sibling = *child;
2562 *child = access;
2564 return access;
2568 /* Beginning with ACCESS, traverse its whole access subtree and mark all
2569 sub-trees as written to. If any of them has not been marked so previously
2570 and has assignment links leading from it, re-enqueue it. */
2572 static void
2573 subtree_mark_written_and_enqueue (struct access *access)
2575 if (access->grp_write)
2576 return;
2577 access->grp_write = true;
2578 add_access_to_work_queue (access);
2580 struct access *child;
2581 for (child = access->first_child; child; child = child->next_sibling)
2582 subtree_mark_written_and_enqueue (child);
2585 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
2586 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2587 propagated transitively. Return true if anything changed. Additionally, if
2588 RACC is a scalar access but LACC is not, change the type of the latter, if
2589 possible. */
2591 static bool
2592 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2594 struct access *rchild;
2595 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2596 bool ret = false;
2598 /* IF the LHS is still not marked as being written to, we only need to do so
2599 if the RHS at this level actually was. */
2600 if (!lacc->grp_write)
2602 gcc_checking_assert (!comes_initialized_p (racc->base));
2603 if (racc->grp_write)
2605 subtree_mark_written_and_enqueue (lacc);
2606 ret = true;
2610 if (is_gimple_reg_type (lacc->type)
2611 || lacc->grp_unscalarizable_region
2612 || racc->grp_unscalarizable_region)
2614 if (!lacc->grp_write)
2616 ret = true;
2617 subtree_mark_written_and_enqueue (lacc);
2619 return ret;
2622 if (is_gimple_reg_type (racc->type))
2624 if (!lacc->grp_write)
2626 ret = true;
2627 subtree_mark_written_and_enqueue (lacc);
2629 if (!lacc->first_child && !racc->first_child)
2631 tree t = lacc->base;
2633 lacc->type = racc->type;
2634 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2635 lacc->offset, racc->type))
2636 lacc->expr = t;
2637 else
2639 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2640 lacc->base, lacc->offset,
2641 racc, NULL, false);
2642 lacc->grp_no_warning = true;
2645 return ret;
2648 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2650 struct access *new_acc = NULL;
2651 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2653 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2654 &new_acc))
2656 if (new_acc)
2658 if (!new_acc->grp_write && rchild->grp_write)
2660 gcc_assert (!lacc->grp_write);
2661 subtree_mark_written_and_enqueue (new_acc);
2662 ret = true;
2665 rchild->grp_hint = 1;
2666 new_acc->grp_hint |= new_acc->grp_read;
2667 if (rchild->first_child)
2668 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2670 else
2672 if (rchild->grp_write && !lacc->grp_write)
2674 ret = true;
2675 subtree_mark_written_and_enqueue (lacc);
2678 continue;
2681 if (rchild->grp_unscalarizable_region)
2683 if (rchild->grp_write && !lacc->grp_write)
2685 ret = true;
2686 subtree_mark_written_and_enqueue (lacc);
2688 continue;
2691 rchild->grp_hint = 1;
2692 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2693 lacc->grp_write
2694 || rchild->grp_write);
2695 gcc_checking_assert (new_acc);
2696 if (racc->first_child)
2697 propagate_subaccesses_across_link (new_acc, rchild);
2699 add_access_to_work_queue (lacc);
2700 ret = true;
2703 return ret;
2706 /* Propagate all subaccesses across assignment links. */
2708 static void
2709 propagate_all_subaccesses (void)
2711 while (work_queue_head)
2713 struct access *racc = pop_access_from_work_queue ();
2714 struct assign_link *link;
2716 if (racc->group_representative)
2717 racc= racc->group_representative;
2718 gcc_assert (racc->first_link);
2720 for (link = racc->first_link; link; link = link->next)
2722 struct access *lacc = link->lacc;
2724 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2725 continue;
2726 lacc = lacc->group_representative;
2728 bool reque_parents = false;
2729 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2731 if (!lacc->grp_write)
2733 subtree_mark_written_and_enqueue (lacc);
2734 reque_parents = true;
2737 else if (propagate_subaccesses_across_link (lacc, racc))
2738 reque_parents = true;
2740 if (reque_parents)
2743 add_access_to_work_queue (lacc);
2744 lacc = lacc->parent;
2746 while (lacc);
2751 /* Go through all accesses collected throughout the (intraprocedural) analysis
2752 stage, exclude overlapping ones, identify representatives and build trees
2753 out of them, making decisions about scalarization on the way. Return true
2754 iff there are any to-be-scalarized variables after this stage. */
2756 static bool
2757 analyze_all_variable_accesses (void)
2759 int res = 0;
2760 bitmap tmp = BITMAP_ALLOC (NULL);
2761 bitmap_iterator bi;
2762 unsigned i;
2763 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2765 enum compiler_param param = optimize_speed_p
2766 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2767 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2769 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2770 fall back to a target default. */
2771 unsigned HOST_WIDE_INT max_scalarization_size
2772 = global_options_set.x_param_values[param]
2773 ? PARAM_VALUE (param)
2774 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2776 max_scalarization_size *= BITS_PER_UNIT;
2778 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2779 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2780 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2782 tree var = candidate (i);
2784 if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var),
2785 constant_decl_p (var)))
2787 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2788 <= max_scalarization_size)
2790 create_total_scalarization_access (var);
2791 completely_scalarize (var, TREE_TYPE (var), 0, var);
2792 statistics_counter_event (cfun,
2793 "Totally-scalarized aggregates", 1);
2794 if (dump_file && (dump_flags & TDF_DETAILS))
2796 fprintf (dump_file, "Will attempt to totally scalarize ");
2797 print_generic_expr (dump_file, var);
2798 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2801 else if (dump_file && (dump_flags & TDF_DETAILS))
2803 fprintf (dump_file, "Too big to totally scalarize: ");
2804 print_generic_expr (dump_file, var);
2805 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2810 bitmap_copy (tmp, candidate_bitmap);
2811 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2813 tree var = candidate (i);
2814 struct access *access;
2816 access = sort_and_splice_var_accesses (var);
2817 if (!access || !build_access_trees (access))
2818 disqualify_candidate (var,
2819 "No or inhibitingly overlapping accesses.");
2822 propagate_all_subaccesses ();
2824 bitmap_copy (tmp, candidate_bitmap);
2825 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2827 tree var = candidate (i);
2828 struct access *access = get_first_repr_for_decl (var);
2830 if (analyze_access_trees (access))
2832 res++;
2833 if (dump_file && (dump_flags & TDF_DETAILS))
2835 fprintf (dump_file, "\nAccess trees for ");
2836 print_generic_expr (dump_file, var);
2837 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2838 dump_access_tree (dump_file, access);
2839 fprintf (dump_file, "\n");
2842 else
2843 disqualify_candidate (var, "No scalar replacements to be created.");
2846 BITMAP_FREE (tmp);
2848 if (res)
2850 statistics_counter_event (cfun, "Scalarized aggregates", res);
2851 return true;
2853 else
2854 return false;
2857 /* Generate statements copying scalar replacements of accesses within a subtree
2858 into or out of AGG. ACCESS, all its children, siblings and their children
2859 are to be processed. AGG is an aggregate type expression (can be a
2860 declaration but does not have to be, it can for example also be a mem_ref or
2861 a series of handled components). TOP_OFFSET is the offset of the processed
2862 subtree which has to be subtracted from offsets of individual accesses to
2863 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2864 replacements in the interval <start_offset, start_offset + chunk_size>,
2865 otherwise copy all. GSI is a statement iterator used to place the new
2866 statements. WRITE should be true when the statements should write from AGG
2867 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2868 statements will be added after the current statement in GSI, they will be
2869 added before the statement otherwise. */
2871 static void
2872 generate_subtree_copies (struct access *access, tree agg,
2873 HOST_WIDE_INT top_offset,
2874 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2875 gimple_stmt_iterator *gsi, bool write,
2876 bool insert_after, location_t loc)
2878 /* Never write anything into constant pool decls. See PR70602. */
2879 if (!write && constant_decl_p (agg))
2880 return;
2883 if (chunk_size && access->offset >= start_offset + chunk_size)
2884 return;
2886 if (access->grp_to_be_replaced
2887 && (chunk_size == 0
2888 || access->offset + access->size > start_offset))
2890 tree expr, repl = get_access_replacement (access);
2891 gassign *stmt;
2893 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2894 access, gsi, insert_after);
2896 if (write)
2898 if (access->grp_partial_lhs)
2899 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2900 !insert_after,
2901 insert_after ? GSI_NEW_STMT
2902 : GSI_SAME_STMT);
2903 stmt = gimple_build_assign (repl, expr);
2905 else
2907 TREE_NO_WARNING (repl) = 1;
2908 if (access->grp_partial_lhs)
2909 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2910 !insert_after,
2911 insert_after ? GSI_NEW_STMT
2912 : GSI_SAME_STMT);
2913 stmt = gimple_build_assign (expr, repl);
2915 gimple_set_location (stmt, loc);
2917 if (insert_after)
2918 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2919 else
2920 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2921 update_stmt (stmt);
2922 sra_stats.subtree_copies++;
2924 else if (write
2925 && access->grp_to_be_debug_replaced
2926 && (chunk_size == 0
2927 || access->offset + access->size > start_offset))
2929 gdebug *ds;
2930 tree drhs = build_debug_ref_for_model (loc, agg,
2931 access->offset - top_offset,
2932 access);
2933 ds = gimple_build_debug_bind (get_access_replacement (access),
2934 drhs, gsi_stmt (*gsi));
2935 if (insert_after)
2936 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2937 else
2938 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2941 if (access->first_child)
2942 generate_subtree_copies (access->first_child, agg, top_offset,
2943 start_offset, chunk_size, gsi,
2944 write, insert_after, loc);
2946 access = access->next_sibling;
2948 while (access);
2951 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2952 root of the subtree to be processed. GSI is the statement iterator used
2953 for inserting statements which are added after the current statement if
2954 INSERT_AFTER is true or before it otherwise. */
2956 static void
2957 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2958 bool insert_after, location_t loc)
2961 struct access *child;
2963 if (access->grp_to_be_replaced)
2965 gassign *stmt;
2967 stmt = gimple_build_assign (get_access_replacement (access),
2968 build_zero_cst (access->type));
2969 if (insert_after)
2970 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2971 else
2972 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2973 update_stmt (stmt);
2974 gimple_set_location (stmt, loc);
2976 else if (access->grp_to_be_debug_replaced)
2978 gdebug *ds
2979 = gimple_build_debug_bind (get_access_replacement (access),
2980 build_zero_cst (access->type),
2981 gsi_stmt (*gsi));
2982 if (insert_after)
2983 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2984 else
2985 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2988 for (child = access->first_child; child; child = child->next_sibling)
2989 init_subtree_with_zero (child, gsi, insert_after, loc);
2992 /* Clobber all scalar replacements in an access subtree. ACCESS is the
2993 root of the subtree to be processed. GSI is the statement iterator used
2994 for inserting statements which are added after the current statement if
2995 INSERT_AFTER is true or before it otherwise. */
2997 static void
2998 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2999 bool insert_after, location_t loc)
3002 struct access *child;
3004 if (access->grp_to_be_replaced)
3006 tree rep = get_access_replacement (access);
3007 tree clobber = build_constructor (access->type, NULL);
3008 TREE_THIS_VOLATILE (clobber) = 1;
3009 gimple *stmt = gimple_build_assign (rep, clobber);
3011 if (insert_after)
3012 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3013 else
3014 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3015 update_stmt (stmt);
3016 gimple_set_location (stmt, loc);
3019 for (child = access->first_child; child; child = child->next_sibling)
3020 clobber_subtree (child, gsi, insert_after, loc);
3023 /* Search for an access representative for the given expression EXPR and
3024 return it or NULL if it cannot be found. */
3026 static struct access *
3027 get_access_for_expr (tree expr)
3029 HOST_WIDE_INT offset, size, max_size;
3030 tree base;
3031 bool reverse;
3033 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3034 a different size than the size of its argument and we need the latter
3035 one. */
3036 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3037 expr = TREE_OPERAND (expr, 0);
3039 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
3040 if (max_size == -1 || !DECL_P (base))
3041 return NULL;
3043 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3044 return NULL;
3046 return get_var_base_offset_size_access (base, offset, max_size);
3049 /* Replace the expression EXPR with a scalar replacement if there is one and
3050 generate other statements to do type conversion or subtree copying if
3051 necessary. GSI is used to place newly created statements, WRITE is true if
3052 the expression is being written to (it is on a LHS of a statement or output
3053 in an assembly statement). */
3055 static bool
3056 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
3058 location_t loc;
3059 struct access *access;
3060 tree type, bfr, orig_expr;
3062 if (TREE_CODE (*expr) == BIT_FIELD_REF)
3064 bfr = *expr;
3065 expr = &TREE_OPERAND (*expr, 0);
3067 else
3068 bfr = NULL_TREE;
3070 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3071 expr = &TREE_OPERAND (*expr, 0);
3072 access = get_access_for_expr (*expr);
3073 if (!access)
3074 return false;
3075 type = TREE_TYPE (*expr);
3076 orig_expr = *expr;
3078 loc = gimple_location (gsi_stmt (*gsi));
3079 gimple_stmt_iterator alt_gsi = gsi_none ();
3080 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3082 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3083 gsi = &alt_gsi;
3086 if (access->grp_to_be_replaced)
3088 tree repl = get_access_replacement (access);
3089 /* If we replace a non-register typed access simply use the original
3090 access expression to extract the scalar component afterwards.
3091 This happens if scalarizing a function return value or parameter
3092 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3093 gcc.c-torture/compile/20011217-1.c.
3095 We also want to use this when accessing a complex or vector which can
3096 be accessed as a different type too, potentially creating a need for
3097 type conversion (see PR42196) and when scalarized unions are involved
3098 in assembler statements (see PR42398). */
3099 if (!useless_type_conversion_p (type, access->type))
3101 tree ref;
3103 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3105 if (write)
3107 gassign *stmt;
3109 if (access->grp_partial_lhs)
3110 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3111 false, GSI_NEW_STMT);
3112 stmt = gimple_build_assign (repl, ref);
3113 gimple_set_location (stmt, loc);
3114 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3116 else
3118 gassign *stmt;
3120 if (access->grp_partial_lhs)
3121 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3122 true, GSI_SAME_STMT);
3123 stmt = gimple_build_assign (ref, repl);
3124 gimple_set_location (stmt, loc);
3125 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3128 else
3129 *expr = repl;
3130 sra_stats.exprs++;
3132 else if (write && access->grp_to_be_debug_replaced)
3134 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3135 NULL_TREE,
3136 gsi_stmt (*gsi));
3137 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3140 if (access->first_child)
3142 HOST_WIDE_INT start_offset, chunk_size;
3143 if (bfr
3144 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3145 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3147 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3148 start_offset = access->offset
3149 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3151 else
3152 start_offset = chunk_size = 0;
3154 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3155 start_offset, chunk_size, gsi, write, write,
3156 loc);
3158 return true;
3161 /* Where scalar replacements of the RHS have been written to when a replacement
3162 of a LHS of an assigments cannot be direclty loaded from a replacement of
3163 the RHS. */
3164 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3165 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3166 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3168 struct subreplacement_assignment_data
3170 /* Offset of the access representing the lhs of the assignment. */
3171 HOST_WIDE_INT left_offset;
3173 /* LHS and RHS of the original assignment. */
3174 tree assignment_lhs, assignment_rhs;
3176 /* Access representing the rhs of the whole assignment. */
3177 struct access *top_racc;
3179 /* Stmt iterator used for statement insertions after the original assignment.
3180 It points to the main GSI used to traverse a BB during function body
3181 modification. */
3182 gimple_stmt_iterator *new_gsi;
3184 /* Stmt iterator used for statement insertions before the original
3185 assignment. Keeps on pointing to the original statement. */
3186 gimple_stmt_iterator old_gsi;
3188 /* Location of the assignment. */
3189 location_t loc;
3191 /* Keeps the information whether we have needed to refresh replacements of
3192 the LHS and from which side of the assignments this takes place. */
3193 enum unscalarized_data_handling refreshed;
3196 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3197 base aggregate if there are unscalarized data or directly to LHS of the
3198 statement that is pointed to by GSI otherwise. */
3200 static void
3201 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3203 tree src;
3204 if (sad->top_racc->grp_unscalarized_data)
3206 src = sad->assignment_rhs;
3207 sad->refreshed = SRA_UDH_RIGHT;
3209 else
3211 src = sad->assignment_lhs;
3212 sad->refreshed = SRA_UDH_LEFT;
3214 generate_subtree_copies (sad->top_racc->first_child, src,
3215 sad->top_racc->offset, 0, 0,
3216 &sad->old_gsi, false, false, sad->loc);
3219 /* Try to generate statements to load all sub-replacements in an access subtree
3220 formed by children of LACC from scalar replacements in the SAD->top_racc
3221 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3222 and load the accesses from it. */
3224 static void
3225 load_assign_lhs_subreplacements (struct access *lacc,
3226 struct subreplacement_assignment_data *sad)
3228 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3230 HOST_WIDE_INT offset;
3231 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3233 if (lacc->grp_to_be_replaced)
3235 struct access *racc;
3236 gassign *stmt;
3237 tree rhs;
3239 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3240 if (racc && racc->grp_to_be_replaced)
3242 rhs = get_access_replacement (racc);
3243 if (!useless_type_conversion_p (lacc->type, racc->type))
3244 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3245 lacc->type, rhs);
3247 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3248 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3249 NULL_TREE, true, GSI_SAME_STMT);
3251 else
3253 /* No suitable access on the right hand side, need to load from
3254 the aggregate. See if we have to update it first... */
3255 if (sad->refreshed == SRA_UDH_NONE)
3256 handle_unscalarized_data_in_subtree (sad);
3258 if (sad->refreshed == SRA_UDH_LEFT)
3259 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3260 lacc->offset - sad->left_offset,
3261 lacc, sad->new_gsi, true);
3262 else
3263 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3264 lacc->offset - sad->left_offset,
3265 lacc, sad->new_gsi, true);
3266 if (lacc->grp_partial_lhs)
3267 rhs = force_gimple_operand_gsi (sad->new_gsi,
3268 rhs, true, NULL_TREE,
3269 false, GSI_NEW_STMT);
3272 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3273 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3274 gimple_set_location (stmt, sad->loc);
3275 update_stmt (stmt);
3276 sra_stats.subreplacements++;
3278 else
3280 if (sad->refreshed == SRA_UDH_NONE
3281 && lacc->grp_read && !lacc->grp_covered)
3282 handle_unscalarized_data_in_subtree (sad);
3284 if (lacc && lacc->grp_to_be_debug_replaced)
3286 gdebug *ds;
3287 tree drhs;
3288 struct access *racc = find_access_in_subtree (sad->top_racc,
3289 offset,
3290 lacc->size);
3292 if (racc && racc->grp_to_be_replaced)
3294 if (racc->grp_write || constant_decl_p (racc->base))
3295 drhs = get_access_replacement (racc);
3296 else
3297 drhs = NULL;
3299 else if (sad->refreshed == SRA_UDH_LEFT)
3300 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3301 lacc->offset, lacc);
3302 else if (sad->refreshed == SRA_UDH_RIGHT)
3303 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3304 offset, lacc);
3305 else
3306 drhs = NULL_TREE;
3307 if (drhs
3308 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3309 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3310 lacc->type, drhs);
3311 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3312 drhs, gsi_stmt (sad->old_gsi));
3313 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3317 if (lacc->first_child)
3318 load_assign_lhs_subreplacements (lacc, sad);
3322 /* Result code for SRA assignment modification. */
3323 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3324 SRA_AM_MODIFIED, /* stmt changed but not
3325 removed */
3326 SRA_AM_REMOVED }; /* stmt eliminated */
3328 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3329 to the assignment and GSI is the statement iterator pointing at it. Returns
3330 the same values as sra_modify_assign. */
3332 static enum assignment_mod_result
3333 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3335 tree lhs = gimple_assign_lhs (stmt);
3336 struct access *acc = get_access_for_expr (lhs);
3337 if (!acc)
3338 return SRA_AM_NONE;
3339 location_t loc = gimple_location (stmt);
3341 if (gimple_clobber_p (stmt))
3343 /* Clobber the replacement variable. */
3344 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3345 /* Remove clobbers of fully scalarized variables, they are dead. */
3346 if (acc->grp_covered)
3348 unlink_stmt_vdef (stmt);
3349 gsi_remove (gsi, true);
3350 release_defs (stmt);
3351 return SRA_AM_REMOVED;
3353 else
3354 return SRA_AM_MODIFIED;
3357 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
3359 /* I have never seen this code path trigger but if it can happen the
3360 following should handle it gracefully. */
3361 if (access_has_children_p (acc))
3362 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3363 true, true, loc);
3364 return SRA_AM_MODIFIED;
3367 if (acc->grp_covered)
3369 init_subtree_with_zero (acc, gsi, false, loc);
3370 unlink_stmt_vdef (stmt);
3371 gsi_remove (gsi, true);
3372 release_defs (stmt);
3373 return SRA_AM_REMOVED;
3375 else
3377 init_subtree_with_zero (acc, gsi, true, loc);
3378 return SRA_AM_MODIFIED;
3382 /* Create and return a new suitable default definition SSA_NAME for RACC which
3383 is an access describing an uninitialized part of an aggregate that is being
3384 loaded. */
3386 static tree
3387 get_repl_default_def_ssa_name (struct access *racc)
3389 gcc_checking_assert (!racc->grp_to_be_replaced
3390 && !racc->grp_to_be_debug_replaced);
3391 if (!racc->replacement_decl)
3392 racc->replacement_decl = create_access_replacement (racc);
3393 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3396 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3397 bit-field field declaration somewhere in it. */
3399 static inline bool
3400 contains_vce_or_bfcref_p (const_tree ref)
3402 while (handled_component_p (ref))
3404 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3405 || (TREE_CODE (ref) == COMPONENT_REF
3406 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3407 return true;
3408 ref = TREE_OPERAND (ref, 0);
3411 return false;
3414 /* Examine both sides of the assignment statement pointed to by STMT, replace
3415 them with a scalare replacement if there is one and generate copying of
3416 replacements if scalarized aggregates have been used in the assignment. GSI
3417 is used to hold generated statements for type conversions and subtree
3418 copying. */
3420 static enum assignment_mod_result
3421 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3423 struct access *lacc, *racc;
3424 tree lhs, rhs;
3425 bool modify_this_stmt = false;
3426 bool force_gimple_rhs = false;
3427 location_t loc;
3428 gimple_stmt_iterator orig_gsi = *gsi;
3430 if (!gimple_assign_single_p (stmt))
3431 return SRA_AM_NONE;
3432 lhs = gimple_assign_lhs (stmt);
3433 rhs = gimple_assign_rhs1 (stmt);
3435 if (TREE_CODE (rhs) == CONSTRUCTOR)
3436 return sra_modify_constructor_assign (stmt, gsi);
3438 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3439 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3440 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3442 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3443 gsi, false);
3444 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3445 gsi, true);
3446 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3449 lacc = get_access_for_expr (lhs);
3450 racc = get_access_for_expr (rhs);
3451 if (!lacc && !racc)
3452 return SRA_AM_NONE;
3453 /* Avoid modifying initializations of constant-pool replacements. */
3454 if (racc && (racc->replacement_decl == lhs))
3455 return SRA_AM_NONE;
3457 loc = gimple_location (stmt);
3458 if (lacc && lacc->grp_to_be_replaced)
3460 lhs = get_access_replacement (lacc);
3461 gimple_assign_set_lhs (stmt, lhs);
3462 modify_this_stmt = true;
3463 if (lacc->grp_partial_lhs)
3464 force_gimple_rhs = true;
3465 sra_stats.exprs++;
3468 if (racc && racc->grp_to_be_replaced)
3470 rhs = get_access_replacement (racc);
3471 modify_this_stmt = true;
3472 if (racc->grp_partial_lhs)
3473 force_gimple_rhs = true;
3474 sra_stats.exprs++;
3476 else if (racc
3477 && !racc->grp_unscalarized_data
3478 && !racc->grp_unscalarizable_region
3479 && TREE_CODE (lhs) == SSA_NAME
3480 && !access_has_replacements_p (racc))
3482 rhs = get_repl_default_def_ssa_name (racc);
3483 modify_this_stmt = true;
3484 sra_stats.exprs++;
3487 if (modify_this_stmt)
3489 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3491 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3492 ??? This should move to fold_stmt which we simply should
3493 call after building a VIEW_CONVERT_EXPR here. */
3494 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3495 && !contains_bitfld_component_ref_p (lhs))
3497 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3498 gimple_assign_set_lhs (stmt, lhs);
3500 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3501 && !contains_vce_or_bfcref_p (rhs))
3502 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3504 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3506 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3507 rhs);
3508 if (is_gimple_reg_type (TREE_TYPE (lhs))
3509 && TREE_CODE (lhs) != SSA_NAME)
3510 force_gimple_rhs = true;
3515 if (lacc && lacc->grp_to_be_debug_replaced)
3517 tree dlhs = get_access_replacement (lacc);
3518 tree drhs = unshare_expr (rhs);
3519 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3521 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3522 && !contains_vce_or_bfcref_p (drhs))
3523 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3524 if (drhs
3525 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3526 TREE_TYPE (drhs)))
3527 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3528 TREE_TYPE (dlhs), drhs);
3530 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3531 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3534 /* From this point on, the function deals with assignments in between
3535 aggregates when at least one has scalar reductions of some of its
3536 components. There are three possible scenarios: Both the LHS and RHS have
3537 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3539 In the first case, we would like to load the LHS components from RHS
3540 components whenever possible. If that is not possible, we would like to
3541 read it directly from the RHS (after updating it by storing in it its own
3542 components). If there are some necessary unscalarized data in the LHS,
3543 those will be loaded by the original assignment too. If neither of these
3544 cases happen, the original statement can be removed. Most of this is done
3545 by load_assign_lhs_subreplacements.
3547 In the second case, we would like to store all RHS scalarized components
3548 directly into LHS and if they cover the aggregate completely, remove the
3549 statement too. In the third case, we want the LHS components to be loaded
3550 directly from the RHS (DSE will remove the original statement if it
3551 becomes redundant).
3553 This is a bit complex but manageable when types match and when unions do
3554 not cause confusion in a way that we cannot really load a component of LHS
3555 from the RHS or vice versa (the access representing this level can have
3556 subaccesses that are accessible only through a different union field at a
3557 higher level - different from the one used in the examined expression).
3558 Unions are fun.
3560 Therefore, I specially handle a fourth case, happening when there is a
3561 specific type cast or it is impossible to locate a scalarized subaccess on
3562 the other side of the expression. If that happens, I simply "refresh" the
3563 RHS by storing in it is scalarized components leave the original statement
3564 there to do the copying and then load the scalar replacements of the LHS.
3565 This is what the first branch does. */
3567 if (modify_this_stmt
3568 || gimple_has_volatile_ops (stmt)
3569 || contains_vce_or_bfcref_p (rhs)
3570 || contains_vce_or_bfcref_p (lhs)
3571 || stmt_ends_bb_p (stmt))
3573 /* No need to copy into a constant-pool, it comes pre-initialized. */
3574 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3575 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3576 gsi, false, false, loc);
3577 if (access_has_children_p (lacc))
3579 gimple_stmt_iterator alt_gsi = gsi_none ();
3580 if (stmt_ends_bb_p (stmt))
3582 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3583 gsi = &alt_gsi;
3585 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3586 gsi, true, true, loc);
3588 sra_stats.separate_lhs_rhs_handling++;
3590 /* This gimplification must be done after generate_subtree_copies,
3591 lest we insert the subtree copies in the middle of the gimplified
3592 sequence. */
3593 if (force_gimple_rhs)
3594 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3595 true, GSI_SAME_STMT);
3596 if (gimple_assign_rhs1 (stmt) != rhs)
3598 modify_this_stmt = true;
3599 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3600 gcc_assert (stmt == gsi_stmt (orig_gsi));
3603 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3605 else
3607 if (access_has_children_p (lacc)
3608 && access_has_children_p (racc)
3609 /* When an access represents an unscalarizable region, it usually
3610 represents accesses with variable offset and thus must not be used
3611 to generate new memory accesses. */
3612 && !lacc->grp_unscalarizable_region
3613 && !racc->grp_unscalarizable_region)
3615 struct subreplacement_assignment_data sad;
3617 sad.left_offset = lacc->offset;
3618 sad.assignment_lhs = lhs;
3619 sad.assignment_rhs = rhs;
3620 sad.top_racc = racc;
3621 sad.old_gsi = *gsi;
3622 sad.new_gsi = gsi;
3623 sad.loc = gimple_location (stmt);
3624 sad.refreshed = SRA_UDH_NONE;
3626 if (lacc->grp_read && !lacc->grp_covered)
3627 handle_unscalarized_data_in_subtree (&sad);
3629 load_assign_lhs_subreplacements (lacc, &sad);
3630 if (sad.refreshed != SRA_UDH_RIGHT)
3632 gsi_next (gsi);
3633 unlink_stmt_vdef (stmt);
3634 gsi_remove (&sad.old_gsi, true);
3635 release_defs (stmt);
3636 sra_stats.deleted++;
3637 return SRA_AM_REMOVED;
3640 else
3642 if (access_has_children_p (racc)
3643 && !racc->grp_unscalarized_data
3644 && TREE_CODE (lhs) != SSA_NAME)
3646 if (dump_file)
3648 fprintf (dump_file, "Removing load: ");
3649 print_gimple_stmt (dump_file, stmt, 0);
3651 generate_subtree_copies (racc->first_child, lhs,
3652 racc->offset, 0, 0, gsi,
3653 false, false, loc);
3654 gcc_assert (stmt == gsi_stmt (*gsi));
3655 unlink_stmt_vdef (stmt);
3656 gsi_remove (gsi, true);
3657 release_defs (stmt);
3658 sra_stats.deleted++;
3659 return SRA_AM_REMOVED;
3661 /* Restore the aggregate RHS from its components so the
3662 prevailing aggregate copy does the right thing. */
3663 if (access_has_children_p (racc))
3664 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3665 gsi, false, false, loc);
3666 /* Re-load the components of the aggregate copy destination.
3667 But use the RHS aggregate to load from to expose more
3668 optimization opportunities. */
3669 if (access_has_children_p (lacc))
3670 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3671 0, 0, gsi, true, true, loc);
3674 return SRA_AM_NONE;
3678 /* Set any scalar replacements of values in the constant pool to the initial
3679 value of the constant. (Constant-pool decls like *.LC0 have effectively
3680 been initialized before the program starts, we must do the same for their
3681 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3682 the function's entry block. */
3684 static void
3685 initialize_constant_pool_replacements (void)
3687 gimple_seq seq = NULL;
3688 gimple_stmt_iterator gsi = gsi_start (seq);
3689 bitmap_iterator bi;
3690 unsigned i;
3692 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3694 tree var = candidate (i);
3695 if (!constant_decl_p (var))
3696 continue;
3697 vec<access_p> *access_vec = get_base_access_vector (var);
3698 if (!access_vec)
3699 continue;
3700 for (unsigned i = 0; i < access_vec->length (); i++)
3702 struct access *access = (*access_vec)[i];
3703 if (!access->replacement_decl)
3704 continue;
3705 gassign *stmt
3706 = gimple_build_assign (get_access_replacement (access),
3707 unshare_expr (access->expr));
3708 if (dump_file && (dump_flags & TDF_DETAILS))
3710 fprintf (dump_file, "Generating constant initializer: ");
3711 print_gimple_stmt (dump_file, stmt, 0);
3712 fprintf (dump_file, "\n");
3714 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3715 update_stmt (stmt);
3719 seq = gsi_seq (gsi);
3720 if (seq)
3721 gsi_insert_seq_on_edge_immediate (
3722 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3725 /* Traverse the function body and all modifications as decided in
3726 analyze_all_variable_accesses. Return true iff the CFG has been
3727 changed. */
3729 static bool
3730 sra_modify_function_body (void)
3732 bool cfg_changed = false;
3733 basic_block bb;
3735 initialize_constant_pool_replacements ();
3737 FOR_EACH_BB_FN (bb, cfun)
3739 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3740 while (!gsi_end_p (gsi))
3742 gimple *stmt = gsi_stmt (gsi);
3743 enum assignment_mod_result assign_result;
3744 bool modified = false, deleted = false;
3745 tree *t;
3746 unsigned i;
3748 switch (gimple_code (stmt))
3750 case GIMPLE_RETURN:
3751 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3752 if (*t != NULL_TREE)
3753 modified |= sra_modify_expr (t, &gsi, false);
3754 break;
3756 case GIMPLE_ASSIGN:
3757 assign_result = sra_modify_assign (stmt, &gsi);
3758 modified |= assign_result == SRA_AM_MODIFIED;
3759 deleted = assign_result == SRA_AM_REMOVED;
3760 break;
3762 case GIMPLE_CALL:
3763 /* Operands must be processed before the lhs. */
3764 for (i = 0; i < gimple_call_num_args (stmt); i++)
3766 t = gimple_call_arg_ptr (stmt, i);
3767 modified |= sra_modify_expr (t, &gsi, false);
3770 if (gimple_call_lhs (stmt))
3772 t = gimple_call_lhs_ptr (stmt);
3773 modified |= sra_modify_expr (t, &gsi, true);
3775 break;
3777 case GIMPLE_ASM:
3779 gasm *asm_stmt = as_a <gasm *> (stmt);
3780 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3782 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3783 modified |= sra_modify_expr (t, &gsi, false);
3785 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3787 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3788 modified |= sra_modify_expr (t, &gsi, true);
3791 break;
3793 default:
3794 break;
3797 if (modified)
3799 update_stmt (stmt);
3800 if (maybe_clean_eh_stmt (stmt)
3801 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3802 cfg_changed = true;
3804 if (!deleted)
3805 gsi_next (&gsi);
3809 gsi_commit_edge_inserts ();
3810 return cfg_changed;
3813 /* Generate statements initializing scalar replacements of parts of function
3814 parameters. */
3816 static void
3817 initialize_parameter_reductions (void)
3819 gimple_stmt_iterator gsi;
3820 gimple_seq seq = NULL;
3821 tree parm;
3823 gsi = gsi_start (seq);
3824 for (parm = DECL_ARGUMENTS (current_function_decl);
3825 parm;
3826 parm = DECL_CHAIN (parm))
3828 vec<access_p> *access_vec;
3829 struct access *access;
3831 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3832 continue;
3833 access_vec = get_base_access_vector (parm);
3834 if (!access_vec)
3835 continue;
3837 for (access = (*access_vec)[0];
3838 access;
3839 access = access->next_grp)
3840 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3841 EXPR_LOCATION (parm));
3844 seq = gsi_seq (gsi);
3845 if (seq)
3846 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3849 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3850 it reveals there are components of some aggregates to be scalarized, it runs
3851 the required transformations. */
3852 static unsigned int
3853 perform_intra_sra (void)
3855 int ret = 0;
3856 sra_initialize ();
3858 if (!find_var_candidates ())
3859 goto out;
3861 if (!scan_function ())
3862 goto out;
3864 if (!analyze_all_variable_accesses ())
3865 goto out;
3867 if (sra_modify_function_body ())
3868 ret = TODO_update_ssa | TODO_cleanup_cfg;
3869 else
3870 ret = TODO_update_ssa;
3871 initialize_parameter_reductions ();
3873 statistics_counter_event (cfun, "Scalar replacements created",
3874 sra_stats.replacements);
3875 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3876 statistics_counter_event (cfun, "Subtree copy stmts",
3877 sra_stats.subtree_copies);
3878 statistics_counter_event (cfun, "Subreplacement stmts",
3879 sra_stats.subreplacements);
3880 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3881 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3882 sra_stats.separate_lhs_rhs_handling);
3884 out:
3885 sra_deinitialize ();
3886 return ret;
3889 /* Perform early intraprocedural SRA. */
3890 static unsigned int
3891 early_intra_sra (void)
3893 sra_mode = SRA_MODE_EARLY_INTRA;
3894 return perform_intra_sra ();
3897 /* Perform "late" intraprocedural SRA. */
3898 static unsigned int
3899 late_intra_sra (void)
3901 sra_mode = SRA_MODE_INTRA;
3902 return perform_intra_sra ();
3906 static bool
3907 gate_intra_sra (void)
3909 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3913 namespace {
3915 const pass_data pass_data_sra_early =
3917 GIMPLE_PASS, /* type */
3918 "esra", /* name */
3919 OPTGROUP_NONE, /* optinfo_flags */
3920 TV_TREE_SRA, /* tv_id */
3921 ( PROP_cfg | PROP_ssa ), /* properties_required */
3922 0, /* properties_provided */
3923 0, /* properties_destroyed */
3924 0, /* todo_flags_start */
3925 TODO_update_ssa, /* todo_flags_finish */
3928 class pass_sra_early : public gimple_opt_pass
3930 public:
3931 pass_sra_early (gcc::context *ctxt)
3932 : gimple_opt_pass (pass_data_sra_early, ctxt)
3935 /* opt_pass methods: */
3936 virtual bool gate (function *) { return gate_intra_sra (); }
3937 virtual unsigned int execute (function *) { return early_intra_sra (); }
3939 }; // class pass_sra_early
3941 } // anon namespace
3943 gimple_opt_pass *
3944 make_pass_sra_early (gcc::context *ctxt)
3946 return new pass_sra_early (ctxt);
3949 namespace {
3951 const pass_data pass_data_sra =
3953 GIMPLE_PASS, /* type */
3954 "sra", /* name */
3955 OPTGROUP_NONE, /* optinfo_flags */
3956 TV_TREE_SRA, /* tv_id */
3957 ( PROP_cfg | PROP_ssa ), /* properties_required */
3958 0, /* properties_provided */
3959 0, /* properties_destroyed */
3960 TODO_update_address_taken, /* todo_flags_start */
3961 TODO_update_ssa, /* todo_flags_finish */
3964 class pass_sra : public gimple_opt_pass
3966 public:
3967 pass_sra (gcc::context *ctxt)
3968 : gimple_opt_pass (pass_data_sra, ctxt)
3971 /* opt_pass methods: */
3972 virtual bool gate (function *) { return gate_intra_sra (); }
3973 virtual unsigned int execute (function *) { return late_intra_sra (); }
3975 }; // class pass_sra
3977 } // anon namespace
3979 gimple_opt_pass *
3980 make_pass_sra (gcc::context *ctxt)
3982 return new pass_sra (ctxt);
3986 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3987 parameter. */
3989 static bool
3990 is_unused_scalar_param (tree parm)
3992 tree name;
3993 return (is_gimple_reg (parm)
3994 && (!(name = ssa_default_def (cfun, parm))
3995 || has_zero_uses (name)));
3998 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3999 examine whether there are any direct or otherwise infeasible ones. If so,
4000 return true, otherwise return false. PARM must be a gimple register with a
4001 non-NULL default definition. */
4003 static bool
4004 ptr_parm_has_direct_uses (tree parm)
4006 imm_use_iterator ui;
4007 gimple *stmt;
4008 tree name = ssa_default_def (cfun, parm);
4009 bool ret = false;
4011 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4013 int uses_ok = 0;
4014 use_operand_p use_p;
4016 if (is_gimple_debug (stmt))
4017 continue;
4019 /* Valid uses include dereferences on the lhs and the rhs. */
4020 if (gimple_has_lhs (stmt))
4022 tree lhs = gimple_get_lhs (stmt);
4023 while (handled_component_p (lhs))
4024 lhs = TREE_OPERAND (lhs, 0);
4025 if (TREE_CODE (lhs) == MEM_REF
4026 && TREE_OPERAND (lhs, 0) == name
4027 && integer_zerop (TREE_OPERAND (lhs, 1))
4028 && types_compatible_p (TREE_TYPE (lhs),
4029 TREE_TYPE (TREE_TYPE (name)))
4030 && !TREE_THIS_VOLATILE (lhs))
4031 uses_ok++;
4033 if (gimple_assign_single_p (stmt))
4035 tree rhs = gimple_assign_rhs1 (stmt);
4036 while (handled_component_p (rhs))
4037 rhs = TREE_OPERAND (rhs, 0);
4038 if (TREE_CODE (rhs) == MEM_REF
4039 && TREE_OPERAND (rhs, 0) == name
4040 && integer_zerop (TREE_OPERAND (rhs, 1))
4041 && types_compatible_p (TREE_TYPE (rhs),
4042 TREE_TYPE (TREE_TYPE (name)))
4043 && !TREE_THIS_VOLATILE (rhs))
4044 uses_ok++;
4046 else if (is_gimple_call (stmt))
4048 unsigned i;
4049 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4051 tree arg = gimple_call_arg (stmt, i);
4052 while (handled_component_p (arg))
4053 arg = TREE_OPERAND (arg, 0);
4054 if (TREE_CODE (arg) == MEM_REF
4055 && TREE_OPERAND (arg, 0) == name
4056 && integer_zerop (TREE_OPERAND (arg, 1))
4057 && types_compatible_p (TREE_TYPE (arg),
4058 TREE_TYPE (TREE_TYPE (name)))
4059 && !TREE_THIS_VOLATILE (arg))
4060 uses_ok++;
4064 /* If the number of valid uses does not match the number of
4065 uses in this stmt there is an unhandled use. */
4066 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4067 --uses_ok;
4069 if (uses_ok != 0)
4070 ret = true;
4072 if (ret)
4073 BREAK_FROM_IMM_USE_STMT (ui);
4076 return ret;
4079 /* Identify candidates for reduction for IPA-SRA based on their type and mark
4080 them in candidate_bitmap. Note that these do not necessarily include
4081 parameter which are unused and thus can be removed. Return true iff any
4082 such candidate has been found. */
4084 static bool
4085 find_param_candidates (void)
4087 tree parm;
4088 int count = 0;
4089 bool ret = false;
4090 const char *msg;
4092 for (parm = DECL_ARGUMENTS (current_function_decl);
4093 parm;
4094 parm = DECL_CHAIN (parm))
4096 tree type = TREE_TYPE (parm);
4097 tree_node **slot;
4099 count++;
4101 if (TREE_THIS_VOLATILE (parm)
4102 || TREE_ADDRESSABLE (parm)
4103 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
4104 continue;
4106 if (is_unused_scalar_param (parm))
4108 ret = true;
4109 continue;
4112 if (POINTER_TYPE_P (type))
4114 type = TREE_TYPE (type);
4116 if (TREE_CODE (type) == FUNCTION_TYPE
4117 || TYPE_VOLATILE (type)
4118 || (TREE_CODE (type) == ARRAY_TYPE
4119 && TYPE_NONALIASED_COMPONENT (type))
4120 || !is_gimple_reg (parm)
4121 || is_va_list_type (type)
4122 || ptr_parm_has_direct_uses (parm))
4123 continue;
4125 else if (!AGGREGATE_TYPE_P (type))
4126 continue;
4128 if (!COMPLETE_TYPE_P (type)
4129 || !tree_fits_uhwi_p (TYPE_SIZE (type))
4130 || tree_to_uhwi (TYPE_SIZE (type)) == 0
4131 || (AGGREGATE_TYPE_P (type)
4132 && type_internals_preclude_sra_p (type, &msg)))
4133 continue;
4135 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4136 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4137 *slot = parm;
4139 ret = true;
4140 if (dump_file && (dump_flags & TDF_DETAILS))
4142 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4143 print_generic_expr (dump_file, parm);
4144 fprintf (dump_file, "\n");
4148 func_param_count = count;
4149 return ret;
4152 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4153 maybe_modified. */
4155 static bool
4156 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4157 void *data)
4159 struct access *repr = (struct access *) data;
4161 repr->grp_maybe_modified = 1;
4162 return true;
4165 /* Analyze what representatives (in linked lists accessible from
4166 REPRESENTATIVES) can be modified by side effects of statements in the
4167 current function. */
4169 static void
4170 analyze_modified_params (vec<access_p> representatives)
4172 int i;
4174 for (i = 0; i < func_param_count; i++)
4176 struct access *repr;
4178 for (repr = representatives[i];
4179 repr;
4180 repr = repr->next_grp)
4182 struct access *access;
4183 bitmap visited;
4184 ao_ref ar;
4186 if (no_accesses_p (repr))
4187 continue;
4188 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4189 || repr->grp_maybe_modified)
4190 continue;
4192 ao_ref_init (&ar, repr->expr);
4193 visited = BITMAP_ALLOC (NULL);
4194 for (access = repr; access; access = access->next_sibling)
4196 /* All accesses are read ones, otherwise grp_maybe_modified would
4197 be trivially set. */
4198 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4199 mark_maybe_modified, repr, &visited);
4200 if (repr->grp_maybe_modified)
4201 break;
4203 BITMAP_FREE (visited);
4208 /* Propagate distances in bb_dereferences in the opposite direction than the
4209 control flow edges, in each step storing the maximum of the current value
4210 and the minimum of all successors. These steps are repeated until the table
4211 stabilizes. Note that BBs which might terminate the functions (according to
4212 final_bbs bitmap) never updated in this way. */
4214 static void
4215 propagate_dereference_distances (void)
4217 basic_block bb;
4219 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4220 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4221 FOR_EACH_BB_FN (bb, cfun)
4223 queue.quick_push (bb);
4224 bb->aux = bb;
4227 while (!queue.is_empty ())
4229 edge_iterator ei;
4230 edge e;
4231 bool change = false;
4232 int i;
4234 bb = queue.pop ();
4235 bb->aux = NULL;
4237 if (bitmap_bit_p (final_bbs, bb->index))
4238 continue;
4240 for (i = 0; i < func_param_count; i++)
4242 int idx = bb->index * func_param_count + i;
4243 bool first = true;
4244 HOST_WIDE_INT inh = 0;
4246 FOR_EACH_EDGE (e, ei, bb->succs)
4248 int succ_idx = e->dest->index * func_param_count + i;
4250 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4251 continue;
4253 if (first)
4255 first = false;
4256 inh = bb_dereferences [succ_idx];
4258 else if (bb_dereferences [succ_idx] < inh)
4259 inh = bb_dereferences [succ_idx];
4262 if (!first && bb_dereferences[idx] < inh)
4264 bb_dereferences[idx] = inh;
4265 change = true;
4269 if (change && !bitmap_bit_p (final_bbs, bb->index))
4270 FOR_EACH_EDGE (e, ei, bb->preds)
4272 if (e->src->aux)
4273 continue;
4275 e->src->aux = e->src;
4276 queue.quick_push (e->src);
4281 /* Dump a dereferences TABLE with heading STR to file F. */
4283 static void
4284 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4286 basic_block bb;
4288 fprintf (dump_file, "%s", str);
4289 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4290 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4292 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4293 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4295 int i;
4296 for (i = 0; i < func_param_count; i++)
4298 int idx = bb->index * func_param_count + i;
4299 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4302 fprintf (f, "\n");
4304 fprintf (dump_file, "\n");
4307 /* Determine what (parts of) parameters passed by reference that are not
4308 assigned to are not certainly dereferenced in this function and thus the
4309 dereferencing cannot be safely moved to the caller without potentially
4310 introducing a segfault. Mark such REPRESENTATIVES as
4311 grp_not_necessarilly_dereferenced.
4313 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4314 part is calculated rather than simple booleans are calculated for each
4315 pointer parameter to handle cases when only a fraction of the whole
4316 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4317 an example).
4319 The maximum dereference distances for each pointer parameter and BB are
4320 already stored in bb_dereference. This routine simply propagates these
4321 values upwards by propagate_dereference_distances and then compares the
4322 distances of individual parameters in the ENTRY BB to the equivalent
4323 distances of each representative of a (fraction of a) parameter. */
4325 static void
4326 analyze_caller_dereference_legality (vec<access_p> representatives)
4328 int i;
4330 if (dump_file && (dump_flags & TDF_DETAILS))
4331 dump_dereferences_table (dump_file,
4332 "Dereference table before propagation:\n",
4333 bb_dereferences);
4335 propagate_dereference_distances ();
4337 if (dump_file && (dump_flags & TDF_DETAILS))
4338 dump_dereferences_table (dump_file,
4339 "Dereference table after propagation:\n",
4340 bb_dereferences);
4342 for (i = 0; i < func_param_count; i++)
4344 struct access *repr = representatives[i];
4345 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4347 if (!repr || no_accesses_p (repr))
4348 continue;
4352 if ((repr->offset + repr->size) > bb_dereferences[idx])
4353 repr->grp_not_necessarilly_dereferenced = 1;
4354 repr = repr->next_grp;
4356 while (repr);
4360 /* Return the representative access for the parameter declaration PARM if it is
4361 a scalar passed by reference which is not written to and the pointer value
4362 is not used directly. Thus, if it is legal to dereference it in the caller
4363 and we can rule out modifications through aliases, such parameter should be
4364 turned into one passed by value. Return NULL otherwise. */
4366 static struct access *
4367 unmodified_by_ref_scalar_representative (tree parm)
4369 int i, access_count;
4370 struct access *repr;
4371 vec<access_p> *access_vec;
4373 access_vec = get_base_access_vector (parm);
4374 gcc_assert (access_vec);
4375 repr = (*access_vec)[0];
4376 if (repr->write)
4377 return NULL;
4378 repr->group_representative = repr;
4380 access_count = access_vec->length ();
4381 for (i = 1; i < access_count; i++)
4383 struct access *access = (*access_vec)[i];
4384 if (access->write)
4385 return NULL;
4386 access->group_representative = repr;
4387 access->next_sibling = repr->next_sibling;
4388 repr->next_sibling = access;
4391 repr->grp_read = 1;
4392 repr->grp_scalar_ptr = 1;
4393 return repr;
4396 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4397 associated with. REQ_ALIGN is the minimum required alignment. */
4399 static bool
4400 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4402 unsigned int exp_align;
4403 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4404 is incompatible assign in a call statement (and possibly even in asm
4405 statements). This can be relaxed by using a new temporary but only for
4406 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4407 intraprocedural SRA we deal with this by keeping the old aggregate around,
4408 something we cannot do in IPA-SRA.) */
4409 if (access->write
4410 && (is_gimple_call (access->stmt)
4411 || gimple_code (access->stmt) == GIMPLE_ASM))
4412 return true;
4414 exp_align = get_object_alignment (access->expr);
4415 if (exp_align < req_align)
4416 return true;
4418 return false;
4422 /* Sort collected accesses for parameter PARM, identify representatives for
4423 each accessed region and link them together. Return NULL if there are
4424 different but overlapping accesses, return the special ptr value meaning
4425 there are no accesses for this parameter if that is the case and return the
4426 first representative otherwise. Set *RO_GRP if there is a group of accesses
4427 with only read (i.e. no write) accesses. */
4429 static struct access *
4430 splice_param_accesses (tree parm, bool *ro_grp)
4432 int i, j, access_count, group_count;
4433 int agg_size, total_size = 0;
4434 struct access *access, *res, **prev_acc_ptr = &res;
4435 vec<access_p> *access_vec;
4437 access_vec = get_base_access_vector (parm);
4438 if (!access_vec)
4439 return &no_accesses_representant;
4440 access_count = access_vec->length ();
4442 access_vec->qsort (compare_access_positions);
4444 i = 0;
4445 total_size = 0;
4446 group_count = 0;
4447 while (i < access_count)
4449 bool modification;
4450 tree a1_alias_type;
4451 access = (*access_vec)[i];
4452 modification = access->write;
4453 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4454 return NULL;
4455 a1_alias_type = reference_alias_ptr_type (access->expr);
4457 /* Access is about to become group representative unless we find some
4458 nasty overlap which would preclude us from breaking this parameter
4459 apart. */
4461 j = i + 1;
4462 while (j < access_count)
4464 struct access *ac2 = (*access_vec)[j];
4465 if (ac2->offset != access->offset)
4467 /* All or nothing law for parameters. */
4468 if (access->offset + access->size > ac2->offset)
4469 return NULL;
4470 else
4471 break;
4473 else if (ac2->size != access->size)
4474 return NULL;
4476 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4477 || (ac2->type != access->type
4478 && (TREE_ADDRESSABLE (ac2->type)
4479 || TREE_ADDRESSABLE (access->type)))
4480 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4481 return NULL;
4483 modification |= ac2->write;
4484 ac2->group_representative = access;
4485 ac2->next_sibling = access->next_sibling;
4486 access->next_sibling = ac2;
4487 j++;
4490 group_count++;
4491 access->grp_maybe_modified = modification;
4492 if (!modification)
4493 *ro_grp = true;
4494 *prev_acc_ptr = access;
4495 prev_acc_ptr = &access->next_grp;
4496 total_size += access->size;
4497 i = j;
4500 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4501 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4502 else
4503 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4504 if (total_size >= agg_size)
4505 return NULL;
4507 gcc_assert (group_count > 0);
4508 return res;
4511 /* Decide whether parameters with representative accesses given by REPR should
4512 be reduced into components. */
4514 static int
4515 decide_one_param_reduction (struct access *repr)
4517 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4518 bool by_ref;
4519 tree parm;
4521 parm = repr->base;
4522 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4523 gcc_assert (cur_parm_size > 0);
4525 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4527 by_ref = true;
4528 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4530 else
4532 by_ref = false;
4533 agg_size = cur_parm_size;
4536 if (dump_file)
4538 struct access *acc;
4539 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4540 print_generic_expr (dump_file, parm);
4541 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4542 for (acc = repr; acc; acc = acc->next_grp)
4543 dump_access (dump_file, acc, true);
4546 total_size = 0;
4547 new_param_count = 0;
4549 for (; repr; repr = repr->next_grp)
4551 gcc_assert (parm == repr->base);
4553 /* Taking the address of a non-addressable field is verboten. */
4554 if (by_ref && repr->non_addressable)
4555 return 0;
4557 /* Do not decompose a non-BLKmode param in a way that would
4558 create BLKmode params. Especially for by-reference passing
4559 (thus, pointer-type param) this is hardly worthwhile. */
4560 if (DECL_MODE (parm) != BLKmode
4561 && TYPE_MODE (repr->type) == BLKmode)
4562 return 0;
4564 if (!by_ref || (!repr->grp_maybe_modified
4565 && !repr->grp_not_necessarilly_dereferenced))
4566 total_size += repr->size;
4567 else
4568 total_size += cur_parm_size;
4570 new_param_count++;
4573 gcc_assert (new_param_count > 0);
4575 if (optimize_function_for_size_p (cfun))
4576 parm_size_limit = cur_parm_size;
4577 else
4578 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4579 * cur_parm_size);
4581 if (total_size < agg_size
4582 && total_size <= parm_size_limit)
4584 if (dump_file)
4585 fprintf (dump_file, " ....will be split into %i components\n",
4586 new_param_count);
4587 return new_param_count;
4589 else
4590 return 0;
4593 /* The order of the following enums is important, we need to do extra work for
4594 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4595 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4596 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4598 /* Identify representatives of all accesses to all candidate parameters for
4599 IPA-SRA. Return result based on what representatives have been found. */
4601 static enum ipa_splicing_result
4602 splice_all_param_accesses (vec<access_p> &representatives)
4604 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4605 tree parm;
4606 struct access *repr;
4608 representatives.create (func_param_count);
4610 for (parm = DECL_ARGUMENTS (current_function_decl);
4611 parm;
4612 parm = DECL_CHAIN (parm))
4614 if (is_unused_scalar_param (parm))
4616 representatives.quick_push (&no_accesses_representant);
4617 if (result == NO_GOOD_ACCESS)
4618 result = UNUSED_PARAMS;
4620 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4621 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4622 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4624 repr = unmodified_by_ref_scalar_representative (parm);
4625 representatives.quick_push (repr);
4626 if (repr)
4627 result = UNMODIF_BY_REF_ACCESSES;
4629 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4631 bool ro_grp = false;
4632 repr = splice_param_accesses (parm, &ro_grp);
4633 representatives.quick_push (repr);
4635 if (repr && !no_accesses_p (repr))
4637 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4639 if (ro_grp)
4640 result = UNMODIF_BY_REF_ACCESSES;
4641 else if (result < MODIF_BY_REF_ACCESSES)
4642 result = MODIF_BY_REF_ACCESSES;
4644 else if (result < BY_VAL_ACCESSES)
4645 result = BY_VAL_ACCESSES;
4647 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4648 result = UNUSED_PARAMS;
4650 else
4651 representatives.quick_push (NULL);
4654 if (result == NO_GOOD_ACCESS)
4656 representatives.release ();
4657 return NO_GOOD_ACCESS;
4660 return result;
4663 /* Return the index of BASE in PARMS. Abort if it is not found. */
4665 static inline int
4666 get_param_index (tree base, vec<tree> parms)
4668 int i, len;
4670 len = parms.length ();
4671 for (i = 0; i < len; i++)
4672 if (parms[i] == base)
4673 return i;
4674 gcc_unreachable ();
4677 /* Convert the decisions made at the representative level into compact
4678 parameter adjustments. REPRESENTATIVES are pointers to first
4679 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4680 final number of adjustments. */
4682 static ipa_parm_adjustment_vec
4683 turn_representatives_into_adjustments (vec<access_p> representatives,
4684 int adjustments_count)
4686 vec<tree> parms;
4687 ipa_parm_adjustment_vec adjustments;
4688 tree parm;
4689 int i;
4691 gcc_assert (adjustments_count > 0);
4692 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4693 adjustments.create (adjustments_count);
4694 parm = DECL_ARGUMENTS (current_function_decl);
4695 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4697 struct access *repr = representatives[i];
4699 if (!repr || no_accesses_p (repr))
4701 struct ipa_parm_adjustment adj;
4703 memset (&adj, 0, sizeof (adj));
4704 adj.base_index = get_param_index (parm, parms);
4705 adj.base = parm;
4706 if (!repr)
4707 adj.op = IPA_PARM_OP_COPY;
4708 else
4709 adj.op = IPA_PARM_OP_REMOVE;
4710 adj.arg_prefix = "ISRA";
4711 adjustments.quick_push (adj);
4713 else
4715 struct ipa_parm_adjustment adj;
4716 int index = get_param_index (parm, parms);
4718 for (; repr; repr = repr->next_grp)
4720 memset (&adj, 0, sizeof (adj));
4721 gcc_assert (repr->base == parm);
4722 adj.base_index = index;
4723 adj.base = repr->base;
4724 adj.type = repr->type;
4725 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4726 adj.offset = repr->offset;
4727 adj.reverse = repr->reverse;
4728 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4729 && (repr->grp_maybe_modified
4730 || repr->grp_not_necessarilly_dereferenced));
4731 adj.arg_prefix = "ISRA";
4732 adjustments.quick_push (adj);
4736 parms.release ();
4737 return adjustments;
4740 /* Analyze the collected accesses and produce a plan what to do with the
4741 parameters in the form of adjustments, NULL meaning nothing. */
4743 static ipa_parm_adjustment_vec
4744 analyze_all_param_acesses (void)
4746 enum ipa_splicing_result repr_state;
4747 bool proceed = false;
4748 int i, adjustments_count = 0;
4749 vec<access_p> representatives;
4750 ipa_parm_adjustment_vec adjustments;
4752 repr_state = splice_all_param_accesses (representatives);
4753 if (repr_state == NO_GOOD_ACCESS)
4754 return ipa_parm_adjustment_vec ();
4756 /* If there are any parameters passed by reference which are not modified
4757 directly, we need to check whether they can be modified indirectly. */
4758 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4760 analyze_caller_dereference_legality (representatives);
4761 analyze_modified_params (representatives);
4764 for (i = 0; i < func_param_count; i++)
4766 struct access *repr = representatives[i];
4768 if (repr && !no_accesses_p (repr))
4770 if (repr->grp_scalar_ptr)
4772 adjustments_count++;
4773 if (repr->grp_not_necessarilly_dereferenced
4774 || repr->grp_maybe_modified)
4775 representatives[i] = NULL;
4776 else
4778 proceed = true;
4779 sra_stats.scalar_by_ref_to_by_val++;
4782 else
4784 int new_components = decide_one_param_reduction (repr);
4786 if (new_components == 0)
4788 representatives[i] = NULL;
4789 adjustments_count++;
4791 else
4793 adjustments_count += new_components;
4794 sra_stats.aggregate_params_reduced++;
4795 sra_stats.param_reductions_created += new_components;
4796 proceed = true;
4800 else
4802 if (no_accesses_p (repr))
4804 proceed = true;
4805 sra_stats.deleted_unused_parameters++;
4807 adjustments_count++;
4811 if (!proceed && dump_file)
4812 fprintf (dump_file, "NOT proceeding to change params.\n");
4814 if (proceed)
4815 adjustments = turn_representatives_into_adjustments (representatives,
4816 adjustments_count);
4817 else
4818 adjustments = ipa_parm_adjustment_vec ();
4820 representatives.release ();
4821 return adjustments;
4824 /* If a parameter replacement identified by ADJ does not yet exist in the form
4825 of declaration, create it and record it, otherwise return the previously
4826 created one. */
4828 static tree
4829 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4831 tree repl;
4832 if (!adj->new_ssa_base)
4834 char *pretty_name = make_fancy_name (adj->base);
4836 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4837 DECL_NAME (repl) = get_identifier (pretty_name);
4838 DECL_NAMELESS (repl) = 1;
4839 obstack_free (&name_obstack, pretty_name);
4841 adj->new_ssa_base = repl;
4843 else
4844 repl = adj->new_ssa_base;
4845 return repl;
4848 /* Find the first adjustment for a particular parameter BASE in a vector of
4849 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4850 adjustment. */
4852 static struct ipa_parm_adjustment *
4853 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4855 int i, len;
4857 len = adjustments.length ();
4858 for (i = 0; i < len; i++)
4860 struct ipa_parm_adjustment *adj;
4862 adj = &adjustments[i];
4863 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4864 return adj;
4867 return NULL;
4870 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4871 parameter which is to be removed because its value is not used, create a new
4872 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4873 original with it and return it. If there is no need to re-map, return NULL.
4874 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4876 static tree
4877 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4878 ipa_parm_adjustment_vec adjustments)
4880 struct ipa_parm_adjustment *adj;
4881 tree decl, repl, new_name;
4883 if (TREE_CODE (old_name) != SSA_NAME)
4884 return NULL;
4886 decl = SSA_NAME_VAR (old_name);
4887 if (decl == NULL_TREE
4888 || TREE_CODE (decl) != PARM_DECL)
4889 return NULL;
4891 adj = get_adjustment_for_base (adjustments, decl);
4892 if (!adj)
4893 return NULL;
4895 repl = get_replaced_param_substitute (adj);
4896 new_name = make_ssa_name (repl, stmt);
4897 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4898 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
4900 if (dump_file)
4902 fprintf (dump_file, "replacing an SSA name of a removed param ");
4903 print_generic_expr (dump_file, old_name);
4904 fprintf (dump_file, " with ");
4905 print_generic_expr (dump_file, new_name);
4906 fprintf (dump_file, "\n");
4909 replace_uses_by (old_name, new_name);
4910 return new_name;
4913 /* If the statement STMT contains any expressions that need to replaced with a
4914 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4915 incompatibilities (GSI is used to accommodate conversion statements and must
4916 point to the statement). Return true iff the statement was modified. */
4918 static bool
4919 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4920 ipa_parm_adjustment_vec adjustments)
4922 tree *lhs_p, *rhs_p;
4923 bool any;
4925 if (!gimple_assign_single_p (stmt))
4926 return false;
4928 rhs_p = gimple_assign_rhs1_ptr (stmt);
4929 lhs_p = gimple_assign_lhs_ptr (stmt);
4931 any = ipa_modify_expr (rhs_p, false, adjustments);
4932 any |= ipa_modify_expr (lhs_p, false, adjustments);
4933 if (any)
4935 tree new_rhs = NULL_TREE;
4937 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4939 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4941 /* V_C_Es of constructors can cause trouble (PR 42714). */
4942 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4943 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4944 else
4945 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4946 NULL);
4948 else
4949 new_rhs = fold_build1_loc (gimple_location (stmt),
4950 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4951 *rhs_p);
4953 else if (REFERENCE_CLASS_P (*rhs_p)
4954 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4955 && !is_gimple_reg (*lhs_p))
4956 /* This can happen when an assignment in between two single field
4957 structures is turned into an assignment in between two pointers to
4958 scalars (PR 42237). */
4959 new_rhs = *rhs_p;
4961 if (new_rhs)
4963 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4964 true, GSI_SAME_STMT);
4966 gimple_assign_set_rhs_from_tree (gsi, tmp);
4969 return true;
4972 return false;
4975 /* Traverse the function body and all modifications as described in
4976 ADJUSTMENTS. Return true iff the CFG has been changed. */
4978 bool
4979 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4981 bool cfg_changed = false;
4982 basic_block bb;
4984 FOR_EACH_BB_FN (bb, cfun)
4986 gimple_stmt_iterator gsi;
4988 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4990 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
4991 tree new_lhs, old_lhs = gimple_phi_result (phi);
4992 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
4993 if (new_lhs)
4995 gimple_phi_set_result (phi, new_lhs);
4996 release_ssa_name (old_lhs);
5000 gsi = gsi_start_bb (bb);
5001 while (!gsi_end_p (gsi))
5003 gimple *stmt = gsi_stmt (gsi);
5004 bool modified = false;
5005 tree *t;
5006 unsigned i;
5008 switch (gimple_code (stmt))
5010 case GIMPLE_RETURN:
5011 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
5012 if (*t != NULL_TREE)
5013 modified |= ipa_modify_expr (t, true, adjustments);
5014 break;
5016 case GIMPLE_ASSIGN:
5017 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
5018 break;
5020 case GIMPLE_CALL:
5021 /* Operands must be processed before the lhs. */
5022 for (i = 0; i < gimple_call_num_args (stmt); i++)
5024 t = gimple_call_arg_ptr (stmt, i);
5025 modified |= ipa_modify_expr (t, true, adjustments);
5028 if (gimple_call_lhs (stmt))
5030 t = gimple_call_lhs_ptr (stmt);
5031 modified |= ipa_modify_expr (t, false, adjustments);
5033 break;
5035 case GIMPLE_ASM:
5037 gasm *asm_stmt = as_a <gasm *> (stmt);
5038 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
5040 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
5041 modified |= ipa_modify_expr (t, true, adjustments);
5043 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
5045 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
5046 modified |= ipa_modify_expr (t, false, adjustments);
5049 break;
5051 default:
5052 break;
5055 def_operand_p defp;
5056 ssa_op_iter iter;
5057 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5059 tree old_def = DEF_FROM_PTR (defp);
5060 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
5061 adjustments))
5063 SET_DEF (defp, new_def);
5064 release_ssa_name (old_def);
5065 modified = true;
5069 if (modified)
5071 update_stmt (stmt);
5072 if (maybe_clean_eh_stmt (stmt)
5073 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5074 cfg_changed = true;
5076 gsi_next (&gsi);
5080 return cfg_changed;
5083 /* Call gimple_debug_bind_reset_value on all debug statements describing
5084 gimple register parameters that are being removed or replaced. */
5086 static void
5087 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
5089 int i, len;
5090 gimple_stmt_iterator *gsip = NULL, gsi;
5092 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
5094 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5095 gsip = &gsi;
5097 len = adjustments.length ();
5098 for (i = 0; i < len; i++)
5100 struct ipa_parm_adjustment *adj;
5101 imm_use_iterator ui;
5102 gimple *stmt;
5103 gdebug *def_temp;
5104 tree name, vexpr, copy = NULL_TREE;
5105 use_operand_p use_p;
5107 adj = &adjustments[i];
5108 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
5109 continue;
5110 name = ssa_default_def (cfun, adj->base);
5111 vexpr = NULL;
5112 if (name)
5113 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
5115 if (gimple_clobber_p (stmt))
5117 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
5118 unlink_stmt_vdef (stmt);
5119 gsi_remove (&cgsi, true);
5120 release_defs (stmt);
5121 continue;
5123 /* All other users must have been removed by
5124 ipa_sra_modify_function_body. */
5125 gcc_assert (is_gimple_debug (stmt));
5126 if (vexpr == NULL && gsip != NULL)
5128 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5129 vexpr = make_node (DEBUG_EXPR_DECL);
5130 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
5131 NULL);
5132 DECL_ARTIFICIAL (vexpr) = 1;
5133 TREE_TYPE (vexpr) = TREE_TYPE (name);
5134 SET_DECL_MODE (vexpr, DECL_MODE (adj->base));
5135 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5137 if (vexpr)
5139 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5140 SET_USE (use_p, vexpr);
5142 else
5143 gimple_debug_bind_reset_value (stmt);
5144 update_stmt (stmt);
5146 /* Create a VAR_DECL for debug info purposes. */
5147 if (!DECL_IGNORED_P (adj->base))
5149 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5150 VAR_DECL, DECL_NAME (adj->base),
5151 TREE_TYPE (adj->base));
5152 if (DECL_PT_UID_SET_P (adj->base))
5153 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5154 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5155 TREE_READONLY (copy) = TREE_READONLY (adj->base);
5156 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5157 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5158 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5159 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5160 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5161 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5162 SET_DECL_RTL (copy, 0);
5163 TREE_USED (copy) = 1;
5164 DECL_CONTEXT (copy) = current_function_decl;
5165 add_local_decl (cfun, copy);
5166 DECL_CHAIN (copy) =
5167 BLOCK_VARS (DECL_INITIAL (current_function_decl));
5168 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5170 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5172 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5173 if (vexpr)
5174 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5175 else
5176 def_temp = gimple_build_debug_source_bind (copy, adj->base,
5177 NULL);
5178 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5183 /* Return false if all callers have at least as many actual arguments as there
5184 are formal parameters in the current function and that their types
5185 match. */
5187 static bool
5188 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5189 void *data ATTRIBUTE_UNUSED)
5191 struct cgraph_edge *cs;
5192 for (cs = node->callers; cs; cs = cs->next_caller)
5193 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
5194 return true;
5196 return false;
5199 /* Return false if all callers have vuse attached to a call statement. */
5201 static bool
5202 some_callers_have_no_vuse_p (struct cgraph_node *node,
5203 void *data ATTRIBUTE_UNUSED)
5205 struct cgraph_edge *cs;
5206 for (cs = node->callers; cs; cs = cs->next_caller)
5207 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5208 return true;
5210 return false;
5213 /* Convert all callers of NODE. */
5215 static bool
5216 convert_callers_for_node (struct cgraph_node *node,
5217 void *data)
5219 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
5220 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
5221 struct cgraph_edge *cs;
5223 for (cs = node->callers; cs; cs = cs->next_caller)
5225 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
5227 if (dump_file)
5228 fprintf (dump_file, "Adjusting call %s -> %s\n",
5229 cs->caller->dump_name (), cs->callee->dump_name ());
5231 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5233 pop_cfun ();
5236 for (cs = node->callers; cs; cs = cs->next_caller)
5237 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
5238 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5239 compute_fn_summary (cs->caller, true);
5240 BITMAP_FREE (recomputed_callers);
5242 return true;
5245 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5247 static void
5248 convert_callers (struct cgraph_node *node, tree old_decl,
5249 ipa_parm_adjustment_vec adjustments)
5251 basic_block this_block;
5253 node->call_for_symbol_and_aliases (convert_callers_for_node,
5254 &adjustments, false);
5256 if (!encountered_recursive_call)
5257 return;
5259 FOR_EACH_BB_FN (this_block, cfun)
5261 gimple_stmt_iterator gsi;
5263 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5265 gcall *stmt;
5266 tree call_fndecl;
5267 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5268 if (!stmt)
5269 continue;
5270 call_fndecl = gimple_call_fndecl (stmt);
5271 if (call_fndecl == old_decl)
5273 if (dump_file)
5274 fprintf (dump_file, "Adjusting recursive call");
5275 gimple_call_set_fndecl (stmt, node->decl);
5276 ipa_modify_call_arguments (NULL, stmt, adjustments);
5281 return;
5284 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5285 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5287 static bool
5288 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5290 struct cgraph_node *new_node;
5291 bool cfg_changed;
5293 cgraph_edge::rebuild_edges ();
5294 free_dominance_info (CDI_DOMINATORS);
5295 pop_cfun ();
5297 /* This must be done after rebuilding cgraph edges for node above.
5298 Otherwise any recursive calls to node that are recorded in
5299 redirect_callers will be corrupted. */
5300 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5301 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5302 NULL, false, NULL, NULL,
5303 "isra");
5304 redirect_callers.release ();
5306 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5307 ipa_modify_formal_parameters (current_function_decl, adjustments);
5308 cfg_changed = ipa_sra_modify_function_body (adjustments);
5309 sra_ipa_reset_debug_stmts (adjustments);
5310 convert_callers (new_node, node->decl, adjustments);
5311 new_node->make_local ();
5312 return cfg_changed;
5315 /* Means of communication between ipa_sra_check_caller and
5316 ipa_sra_preliminary_function_checks. */
5318 struct ipa_sra_check_caller_data
5320 bool has_callers;
5321 bool bad_arg_alignment;
5322 bool has_thunk;
5325 /* If NODE has a caller, mark that fact in DATA which is pointer to
5326 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5327 calls if they are unit aligned and if not, set the appropriate flag in DATA
5328 too. */
5330 static bool
5331 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5333 if (!node->callers)
5334 return false;
5336 struct ipa_sra_check_caller_data *iscc;
5337 iscc = (struct ipa_sra_check_caller_data *) data;
5338 iscc->has_callers = true;
5340 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5342 if (cs->caller->thunk.thunk_p)
5344 iscc->has_thunk = true;
5345 return true;
5347 gimple *call_stmt = cs->call_stmt;
5348 unsigned count = gimple_call_num_args (call_stmt);
5349 for (unsigned i = 0; i < count; i++)
5351 tree arg = gimple_call_arg (call_stmt, i);
5352 if (is_gimple_reg (arg))
5353 continue;
5355 tree offset;
5356 HOST_WIDE_INT bitsize, bitpos;
5357 machine_mode mode;
5358 int unsignedp, reversep, volatilep = 0;
5359 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5360 &unsignedp, &reversep, &volatilep);
5361 if (bitpos % BITS_PER_UNIT)
5363 iscc->bad_arg_alignment = true;
5364 return true;
5369 return false;
5372 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5373 attributes, return true otherwise. NODE is the cgraph node of the current
5374 function. */
5376 static bool
5377 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5379 if (!node->can_be_local_p ())
5381 if (dump_file)
5382 fprintf (dump_file, "Function not local to this compilation unit.\n");
5383 return false;
5386 if (!node->local.can_change_signature)
5388 if (dump_file)
5389 fprintf (dump_file, "Function can not change signature.\n");
5390 return false;
5393 if (!tree_versionable_function_p (node->decl))
5395 if (dump_file)
5396 fprintf (dump_file, "Function is not versionable.\n");
5397 return false;
5400 if (!opt_for_fn (node->decl, optimize)
5401 || !opt_for_fn (node->decl, flag_ipa_sra))
5403 if (dump_file)
5404 fprintf (dump_file, "Function not optimized.\n");
5405 return false;
5408 if (DECL_VIRTUAL_P (current_function_decl))
5410 if (dump_file)
5411 fprintf (dump_file, "Function is a virtual method.\n");
5412 return false;
5415 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5416 && ipa_fn_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5418 if (dump_file)
5419 fprintf (dump_file, "Function too big to be made truly local.\n");
5420 return false;
5423 if (cfun->stdarg)
5425 if (dump_file)
5426 fprintf (dump_file, "Function uses stdarg. \n");
5427 return false;
5430 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5431 return false;
5433 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5435 if (dump_file)
5436 fprintf (dump_file, "Always inline function will be inlined "
5437 "anyway. \n");
5438 return false;
5441 struct ipa_sra_check_caller_data iscc;
5442 memset (&iscc, 0, sizeof(iscc));
5443 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5444 if (!iscc.has_callers)
5446 if (dump_file)
5447 fprintf (dump_file,
5448 "Function has no callers in this compilation unit.\n");
5449 return false;
5452 if (iscc.bad_arg_alignment)
5454 if (dump_file)
5455 fprintf (dump_file,
5456 "A function call has an argument with non-unit alignment.\n");
5457 return false;
5460 if (iscc.has_thunk)
5462 if (dump_file)
5463 fprintf (dump_file,
5464 "A has thunk.\n");
5465 return false;
5468 return true;
5471 /* Perform early interprocedural SRA. */
5473 static unsigned int
5474 ipa_early_sra (void)
5476 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5477 ipa_parm_adjustment_vec adjustments;
5478 int ret = 0;
5480 if (!ipa_sra_preliminary_function_checks (node))
5481 return 0;
5483 sra_initialize ();
5484 sra_mode = SRA_MODE_EARLY_IPA;
5486 if (!find_param_candidates ())
5488 if (dump_file)
5489 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5490 goto simple_out;
5493 if (node->call_for_symbol_and_aliases
5494 (some_callers_have_mismatched_arguments_p, NULL, true))
5496 if (dump_file)
5497 fprintf (dump_file, "There are callers with insufficient number of "
5498 "arguments or arguments with type mismatches.\n");
5499 goto simple_out;
5502 if (node->call_for_symbol_and_aliases
5503 (some_callers_have_no_vuse_p, NULL, true))
5505 if (dump_file)
5506 fprintf (dump_file, "There are callers with no VUSE attached "
5507 "to a call stmt.\n");
5508 goto simple_out;
5511 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5512 func_param_count
5513 * last_basic_block_for_fn (cfun));
5514 final_bbs = BITMAP_ALLOC (NULL);
5516 scan_function ();
5517 if (encountered_apply_args)
5519 if (dump_file)
5520 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5521 goto out;
5524 if (encountered_unchangable_recursive_call)
5526 if (dump_file)
5527 fprintf (dump_file, "Function calls itself with insufficient "
5528 "number of arguments.\n");
5529 goto out;
5532 adjustments = analyze_all_param_acesses ();
5533 if (!adjustments.exists ())
5534 goto out;
5535 if (dump_file)
5536 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5538 if (modify_function (node, adjustments))
5539 ret = TODO_update_ssa | TODO_cleanup_cfg;
5540 else
5541 ret = TODO_update_ssa;
5542 adjustments.release ();
5544 statistics_counter_event (cfun, "Unused parameters deleted",
5545 sra_stats.deleted_unused_parameters);
5546 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5547 sra_stats.scalar_by_ref_to_by_val);
5548 statistics_counter_event (cfun, "Aggregate parameters broken up",
5549 sra_stats.aggregate_params_reduced);
5550 statistics_counter_event (cfun, "Aggregate parameter components created",
5551 sra_stats.param_reductions_created);
5553 out:
5554 BITMAP_FREE (final_bbs);
5555 free (bb_dereferences);
5556 simple_out:
5557 sra_deinitialize ();
5558 return ret;
5561 namespace {
5563 const pass_data pass_data_early_ipa_sra =
5565 GIMPLE_PASS, /* type */
5566 "eipa_sra", /* name */
5567 OPTGROUP_NONE, /* optinfo_flags */
5568 TV_IPA_SRA, /* tv_id */
5569 0, /* properties_required */
5570 0, /* properties_provided */
5571 0, /* properties_destroyed */
5572 0, /* todo_flags_start */
5573 TODO_dump_symtab, /* todo_flags_finish */
5576 class pass_early_ipa_sra : public gimple_opt_pass
5578 public:
5579 pass_early_ipa_sra (gcc::context *ctxt)
5580 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5583 /* opt_pass methods: */
5584 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5585 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5587 }; // class pass_early_ipa_sra
5589 } // anon namespace
5591 gimple_opt_pass *
5592 make_pass_early_ipa_sra (gcc::context *ctxt)
5594 return new pass_early_ipa_sra (ctxt);