[ARM] Fix typo in comment in arm_expand_prologue
[official-gcc.git] / gcc / tree-sra.c
blob6a8a0a4a4273328e0ee3dbd998d6d0b454ef5115
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2017 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-prop.h"
101 #include "params.h"
102 #include "dbgcnt.h"
103 #include "tree-inline.h"
104 #include "ipa-fnsummary.h"
105 #include "ipa-utils.h"
106 #include "builtins.h"
108 /* Enumeration of all aggregate reductions we can do. */
109 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
110 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
111 SRA_MODE_INTRA }; /* late intraprocedural SRA */
113 /* Global variable describing which aggregate reduction we are performing at
114 the moment. */
115 static enum sra_mode sra_mode;
117 struct assign_link;
119 /* ACCESS represents each access to an aggregate variable (as a whole or a
120 part). It can also represent a group of accesses that refer to exactly the
121 same fragment of an aggregate (i.e. those that have exactly the same offset
122 and size). Such representatives for a single aggregate, once determined,
123 are linked in a linked list and have the group fields set.
125 Moreover, when doing intraprocedural SRA, a tree is built from those
126 representatives (by the means of first_child and next_sibling pointers), in
127 which all items in a subtree are "within" the root, i.e. their offset is
128 greater or equal to offset of the root and offset+size is smaller or equal
129 to offset+size of the root. Children of an access are sorted by offset.
131 Note that accesses to parts of vector and complex number types always
132 represented by an access to the whole complex number or a vector. It is a
133 duty of the modifying functions to replace them appropriately. */
135 struct access
137 /* Values returned by `get_ref_base_and_extent' for each component reference
138 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
139 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
140 HOST_WIDE_INT offset;
141 HOST_WIDE_INT size;
142 tree base;
144 /* Expression. It is context dependent so do not use it to create new
145 expressions to access the original aggregate. See PR 42154 for a
146 testcase. */
147 tree expr;
148 /* Type. */
149 tree type;
151 /* The statement this access belongs to. */
152 gimple *stmt;
154 /* Next group representative for this aggregate. */
155 struct access *next_grp;
157 /* Pointer to the group representative. Pointer to itself if the struct is
158 the representative. */
159 struct access *group_representative;
161 /* After access tree has been constructed, this points to the parent of the
162 current access, if there is one. NULL for roots. */
163 struct access *parent;
165 /* If this access has any children (in terms of the definition above), this
166 points to the first one. */
167 struct access *first_child;
169 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
170 described above. In IPA-SRA this is a pointer to the next access
171 belonging to the same group (having the same representative). */
172 struct access *next_sibling;
174 /* Pointers to the first and last element in the linked list of assign
175 links. */
176 struct assign_link *first_link, *last_link;
178 /* Pointer to the next access in the work queue. */
179 struct access *next_queued;
181 /* Replacement variable for this access "region." Never to be accessed
182 directly, always only by the means of get_access_replacement() and only
183 when grp_to_be_replaced flag is set. */
184 tree replacement_decl;
186 /* Is this access an access to a non-addressable field? */
187 unsigned non_addressable : 1;
189 /* Is this access made in reverse storage order? */
190 unsigned reverse : 1;
192 /* Is this particular access write access? */
193 unsigned write : 1;
195 /* Is this access currently in the work queue? */
196 unsigned grp_queued : 1;
198 /* Does this group contain a write access? This flag is propagated down the
199 access tree. */
200 unsigned grp_write : 1;
202 /* Does this group contain a read access? This flag is propagated down the
203 access tree. */
204 unsigned grp_read : 1;
206 /* Does this group contain a read access that comes from an assignment
207 statement? This flag is propagated down the access tree. */
208 unsigned grp_assignment_read : 1;
210 /* Does this group contain a write access that comes from an assignment
211 statement? This flag is propagated down the access tree. */
212 unsigned grp_assignment_write : 1;
214 /* Does this group contain a read access through a scalar type? This flag is
215 not propagated in the access tree in any direction. */
216 unsigned grp_scalar_read : 1;
218 /* Does this group contain a write access through a scalar type? This flag
219 is not propagated in the access tree in any direction. */
220 unsigned grp_scalar_write : 1;
222 /* Is this access an artificial one created to scalarize some record
223 entirely? */
224 unsigned grp_total_scalarization : 1;
226 /* Other passes of the analysis use this bit to make function
227 analyze_access_subtree create scalar replacements for this group if
228 possible. */
229 unsigned grp_hint : 1;
231 /* Is the subtree rooted in this access fully covered by scalar
232 replacements? */
233 unsigned grp_covered : 1;
235 /* If set to true, this access and all below it in an access tree must not be
236 scalarized. */
237 unsigned grp_unscalarizable_region : 1;
239 /* Whether data have been written to parts of the aggregate covered by this
240 access which is not to be scalarized. This flag is propagated up in the
241 access tree. */
242 unsigned grp_unscalarized_data : 1;
244 /* Does this access and/or group contain a write access through a
245 BIT_FIELD_REF? */
246 unsigned grp_partial_lhs : 1;
248 /* Set when a scalar replacement should be created for this variable. */
249 unsigned grp_to_be_replaced : 1;
251 /* Set when we want a replacement for the sole purpose of having it in
252 generated debug statements. */
253 unsigned grp_to_be_debug_replaced : 1;
255 /* Should TREE_NO_WARNING of a replacement be set? */
256 unsigned grp_no_warning : 1;
258 /* Is it possible that the group refers to data which might be (directly or
259 otherwise) modified? */
260 unsigned grp_maybe_modified : 1;
262 /* Set when this is a representative of a pointer to scalar (i.e. by
263 reference) parameter which we consider for turning into a plain scalar
264 (i.e. a by value parameter). */
265 unsigned grp_scalar_ptr : 1;
267 /* Set when we discover that this pointer is not safe to dereference in the
268 caller. */
269 unsigned grp_not_necessarilly_dereferenced : 1;
272 typedef struct access *access_p;
275 /* Alloc pool for allocating access structures. */
276 static object_allocator<struct access> access_pool ("SRA accesses");
278 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
279 are used to propagate subaccesses from rhs to lhs as long as they don't
280 conflict with what is already there. */
281 struct assign_link
283 struct access *lacc, *racc;
284 struct assign_link *next;
287 /* Alloc pool for allocating assign link structures. */
288 static object_allocator<assign_link> assign_link_pool ("SRA links");
290 /* Base (tree) -> Vector (vec<access_p> *) map. */
291 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
293 /* Candidate hash table helpers. */
295 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
297 static inline hashval_t hash (const tree_node *);
298 static inline bool equal (const tree_node *, const tree_node *);
301 /* Hash a tree in a uid_decl_map. */
303 inline hashval_t
304 uid_decl_hasher::hash (const tree_node *item)
306 return item->decl_minimal.uid;
309 /* Return true if the DECL_UID in both trees are equal. */
311 inline bool
312 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
314 return (a->decl_minimal.uid == b->decl_minimal.uid);
317 /* Set of candidates. */
318 static bitmap candidate_bitmap;
319 static hash_table<uid_decl_hasher> *candidates;
321 /* For a candidate UID return the candidates decl. */
323 static inline tree
324 candidate (unsigned uid)
326 tree_node t;
327 t.decl_minimal.uid = uid;
328 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
331 /* Bitmap of candidates which we should try to entirely scalarize away and
332 those which cannot be (because they are and need be used as a whole). */
333 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
335 /* Bitmap of candidates in the constant pool, which cannot be scalarized
336 because this would produce non-constant expressions (e.g. Ada). */
337 static bitmap disqualified_constants;
339 /* Obstack for creation of fancy names. */
340 static struct obstack name_obstack;
342 /* Head of a linked list of accesses that need to have its subaccesses
343 propagated to their assignment counterparts. */
344 static struct access *work_queue_head;
346 /* Number of parameters of the analyzed function when doing early ipa SRA. */
347 static int func_param_count;
349 /* scan_function sets the following to true if it encounters a call to
350 __builtin_apply_args. */
351 static bool encountered_apply_args;
353 /* Set by scan_function when it finds a recursive call. */
354 static bool encountered_recursive_call;
356 /* Set by scan_function when it finds a recursive call with less actual
357 arguments than formal parameters.. */
358 static bool encountered_unchangable_recursive_call;
360 /* This is a table in which for each basic block and parameter there is a
361 distance (offset + size) in that parameter which is dereferenced and
362 accessed in that BB. */
363 static HOST_WIDE_INT *bb_dereferences;
364 /* Bitmap of BBs that can cause the function to "stop" progressing by
365 returning, throwing externally, looping infinitely or calling a function
366 which might abort etc.. */
367 static bitmap final_bbs;
369 /* Representative of no accesses at all. */
370 static struct access no_accesses_representant;
372 /* Predicate to test the special value. */
374 static inline bool
375 no_accesses_p (struct access *access)
377 return access == &no_accesses_representant;
380 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
381 representative fields are dumped, otherwise those which only describe the
382 individual access are. */
384 static struct
386 /* Number of processed aggregates is readily available in
387 analyze_all_variable_accesses and so is not stored here. */
389 /* Number of created scalar replacements. */
390 int replacements;
392 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
393 expression. */
394 int exprs;
396 /* Number of statements created by generate_subtree_copies. */
397 int subtree_copies;
399 /* Number of statements created by load_assign_lhs_subreplacements. */
400 int subreplacements;
402 /* Number of times sra_modify_assign has deleted a statement. */
403 int deleted;
405 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
406 RHS reparately due to type conversions or nonexistent matching
407 references. */
408 int separate_lhs_rhs_handling;
410 /* Number of parameters that were removed because they were unused. */
411 int deleted_unused_parameters;
413 /* Number of scalars passed as parameters by reference that have been
414 converted to be passed by value. */
415 int scalar_by_ref_to_by_val;
417 /* Number of aggregate parameters that were replaced by one or more of their
418 components. */
419 int aggregate_params_reduced;
421 /* Numbber of components created when splitting aggregate parameters. */
422 int param_reductions_created;
423 } sra_stats;
425 static void
426 dump_access (FILE *f, struct access *access, bool grp)
428 fprintf (f, "access { ");
429 fprintf (f, "base = (%d)'", DECL_UID (access->base));
430 print_generic_expr (f, access->base);
431 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
432 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
433 fprintf (f, ", expr = ");
434 print_generic_expr (f, access->expr);
435 fprintf (f, ", type = ");
436 print_generic_expr (f, access->type);
437 fprintf (f, ", non_addressable = %d, reverse = %d",
438 access->non_addressable, access->reverse);
439 if (grp)
440 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
441 "grp_assignment_write = %d, grp_scalar_read = %d, "
442 "grp_scalar_write = %d, grp_total_scalarization = %d, "
443 "grp_hint = %d, grp_covered = %d, "
444 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
445 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
446 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
447 "grp_not_necessarilly_dereferenced = %d\n",
448 access->grp_read, access->grp_write, access->grp_assignment_read,
449 access->grp_assignment_write, access->grp_scalar_read,
450 access->grp_scalar_write, access->grp_total_scalarization,
451 access->grp_hint, access->grp_covered,
452 access->grp_unscalarizable_region, access->grp_unscalarized_data,
453 access->grp_partial_lhs, access->grp_to_be_replaced,
454 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
455 access->grp_not_necessarilly_dereferenced);
456 else
457 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
458 "grp_partial_lhs = %d\n",
459 access->write, access->grp_total_scalarization,
460 access->grp_partial_lhs);
463 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
465 static void
466 dump_access_tree_1 (FILE *f, struct access *access, int level)
470 int i;
472 for (i = 0; i < level; i++)
473 fputs ("* ", dump_file);
475 dump_access (f, access, true);
477 if (access->first_child)
478 dump_access_tree_1 (f, access->first_child, level + 1);
480 access = access->next_sibling;
482 while (access);
485 /* Dump all access trees for a variable, given the pointer to the first root in
486 ACCESS. */
488 static void
489 dump_access_tree (FILE *f, struct access *access)
491 for (; access; access = access->next_grp)
492 dump_access_tree_1 (f, access, 0);
495 /* Return true iff ACC is non-NULL and has subaccesses. */
497 static inline bool
498 access_has_children_p (struct access *acc)
500 return acc && acc->first_child;
503 /* Return true iff ACC is (partly) covered by at least one replacement. */
505 static bool
506 access_has_replacements_p (struct access *acc)
508 struct access *child;
509 if (acc->grp_to_be_replaced)
510 return true;
511 for (child = acc->first_child; child; child = child->next_sibling)
512 if (access_has_replacements_p (child))
513 return true;
514 return false;
517 /* Return a vector of pointers to accesses for the variable given in BASE or
518 NULL if there is none. */
520 static vec<access_p> *
521 get_base_access_vector (tree base)
523 return base_access_vec->get (base);
526 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
527 in ACCESS. Return NULL if it cannot be found. */
529 static struct access *
530 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
531 HOST_WIDE_INT size)
533 while (access && (access->offset != offset || access->size != size))
535 struct access *child = access->first_child;
537 while (child && (child->offset + child->size <= offset))
538 child = child->next_sibling;
539 access = child;
542 return access;
545 /* Return the first group representative for DECL or NULL if none exists. */
547 static struct access *
548 get_first_repr_for_decl (tree base)
550 vec<access_p> *access_vec;
552 access_vec = get_base_access_vector (base);
553 if (!access_vec)
554 return NULL;
556 return (*access_vec)[0];
559 /* Find an access representative for the variable BASE and given OFFSET and
560 SIZE. Requires that access trees have already been built. Return NULL if
561 it cannot be found. */
563 static struct access *
564 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
565 HOST_WIDE_INT size)
567 struct access *access;
569 access = get_first_repr_for_decl (base);
570 while (access && (access->offset + access->size <= offset))
571 access = access->next_grp;
572 if (!access)
573 return NULL;
575 return find_access_in_subtree (access, offset, size);
578 /* Add LINK to the linked list of assign links of RACC. */
579 static void
580 add_link_to_rhs (struct access *racc, struct assign_link *link)
582 gcc_assert (link->racc == racc);
584 if (!racc->first_link)
586 gcc_assert (!racc->last_link);
587 racc->first_link = link;
589 else
590 racc->last_link->next = link;
592 racc->last_link = link;
593 link->next = NULL;
596 /* Move all link structures in their linked list in OLD_RACC to the linked list
597 in NEW_RACC. */
598 static void
599 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
601 if (!old_racc->first_link)
603 gcc_assert (!old_racc->last_link);
604 return;
607 if (new_racc->first_link)
609 gcc_assert (!new_racc->last_link->next);
610 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
612 new_racc->last_link->next = old_racc->first_link;
613 new_racc->last_link = old_racc->last_link;
615 else
617 gcc_assert (!new_racc->last_link);
619 new_racc->first_link = old_racc->first_link;
620 new_racc->last_link = old_racc->last_link;
622 old_racc->first_link = old_racc->last_link = NULL;
625 /* Add ACCESS to the work queue (which is actually a stack). */
627 static void
628 add_access_to_work_queue (struct access *access)
630 if (!access->grp_queued)
632 gcc_assert (!access->next_queued);
633 access->next_queued = work_queue_head;
634 access->grp_queued = 1;
635 work_queue_head = access;
639 /* Pop an access from the work queue, and return it, assuming there is one. */
641 static struct access *
642 pop_access_from_work_queue (void)
644 struct access *access = work_queue_head;
646 work_queue_head = access->next_queued;
647 access->next_queued = NULL;
648 access->grp_queued = 0;
649 return access;
653 /* Allocate necessary structures. */
655 static void
656 sra_initialize (void)
658 candidate_bitmap = BITMAP_ALLOC (NULL);
659 candidates = new hash_table<uid_decl_hasher>
660 (vec_safe_length (cfun->local_decls) / 2);
661 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
662 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
663 disqualified_constants = BITMAP_ALLOC (NULL);
664 gcc_obstack_init (&name_obstack);
665 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
666 memset (&sra_stats, 0, sizeof (sra_stats));
667 encountered_apply_args = false;
668 encountered_recursive_call = false;
669 encountered_unchangable_recursive_call = false;
672 /* Deallocate all general structures. */
674 static void
675 sra_deinitialize (void)
677 BITMAP_FREE (candidate_bitmap);
678 delete candidates;
679 candidates = NULL;
680 BITMAP_FREE (should_scalarize_away_bitmap);
681 BITMAP_FREE (cannot_scalarize_away_bitmap);
682 BITMAP_FREE (disqualified_constants);
683 access_pool.release ();
684 assign_link_pool.release ();
685 obstack_free (&name_obstack, NULL);
687 delete base_access_vec;
690 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
692 static bool constant_decl_p (tree decl)
694 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
698 /* Mark LHS of assign links out of ACCESS and its children as written to. */
700 static void
701 process_subtree_disqualification (struct access *access)
703 struct access *child;
704 for (struct assign_link *link = access->first_link; link; link = link->next)
705 link->lacc->grp_write = true;
706 for (child = access->first_child; child; child = child->next_sibling)
707 process_subtree_disqualification (child);
710 /* Remove DECL from candidates for SRA and write REASON to the dump file if
711 there is one. */
712 static void
713 disqualify_candidate (tree decl, const char *reason)
715 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
716 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
717 if (constant_decl_p (decl))
718 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
720 if (dump_file && (dump_flags & TDF_DETAILS))
722 fprintf (dump_file, "! Disqualifying ");
723 print_generic_expr (dump_file, decl);
724 fprintf (dump_file, " - %s\n", reason);
727 struct access *access = get_first_repr_for_decl (decl);
728 while (access)
730 process_subtree_disqualification (access);
731 access = access->next_grp;
735 /* Return true iff the type contains a field or an element which does not allow
736 scalarization. */
738 static bool
739 type_internals_preclude_sra_p (tree type, const char **msg)
741 tree fld;
742 tree et;
744 switch (TREE_CODE (type))
746 case RECORD_TYPE:
747 case UNION_TYPE:
748 case QUAL_UNION_TYPE:
749 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
750 if (TREE_CODE (fld) == FIELD_DECL)
752 tree ft = TREE_TYPE (fld);
754 if (TREE_THIS_VOLATILE (fld))
756 *msg = "volatile structure field";
757 return true;
759 if (!DECL_FIELD_OFFSET (fld))
761 *msg = "no structure field offset";
762 return true;
764 if (!DECL_SIZE (fld))
766 *msg = "zero structure field size";
767 return true;
769 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
771 *msg = "structure field offset not fixed";
772 return true;
774 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
776 *msg = "structure field size not fixed";
777 return true;
779 if (!tree_fits_shwi_p (bit_position (fld)))
781 *msg = "structure field size too big";
782 return true;
784 if (AGGREGATE_TYPE_P (ft)
785 && int_bit_position (fld) % BITS_PER_UNIT != 0)
787 *msg = "structure field is bit field";
788 return true;
791 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
792 return true;
795 return false;
797 case ARRAY_TYPE:
798 et = TREE_TYPE (type);
800 if (TYPE_VOLATILE (et))
802 *msg = "element type is volatile";
803 return true;
806 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
807 return true;
809 return false;
811 default:
812 return false;
816 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
817 base variable if it is. Return T if it is not an SSA_NAME. */
819 static tree
820 get_ssa_base_param (tree t)
822 if (TREE_CODE (t) == SSA_NAME)
824 if (SSA_NAME_IS_DEFAULT_DEF (t))
825 return SSA_NAME_VAR (t);
826 else
827 return NULL_TREE;
829 return t;
832 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
833 belongs to, unless the BB has already been marked as a potentially
834 final. */
836 static void
837 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
839 basic_block bb = gimple_bb (stmt);
840 int idx, parm_index = 0;
841 tree parm;
843 if (bitmap_bit_p (final_bbs, bb->index))
844 return;
846 for (parm = DECL_ARGUMENTS (current_function_decl);
847 parm && parm != base;
848 parm = DECL_CHAIN (parm))
849 parm_index++;
851 gcc_assert (parm_index < func_param_count);
853 idx = bb->index * func_param_count + parm_index;
854 if (bb_dereferences[idx] < dist)
855 bb_dereferences[idx] = dist;
858 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
859 the three fields. Also add it to the vector of accesses corresponding to
860 the base. Finally, return the new access. */
862 static struct access *
863 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
865 struct access *access = access_pool.allocate ();
867 memset (access, 0, sizeof (struct access));
868 access->base = base;
869 access->offset = offset;
870 access->size = size;
872 base_access_vec->get_or_insert (base).safe_push (access);
874 return access;
877 static bool maybe_add_sra_candidate (tree);
879 /* Create and insert access for EXPR. Return created access, or NULL if it is
880 not possible. Also scan for uses of constant pool as we go along and add
881 to candidates. */
883 static struct access *
884 create_access (tree expr, gimple *stmt, bool write)
886 struct access *access;
887 HOST_WIDE_INT offset, size, max_size;
888 tree base = expr;
889 bool reverse, ptr, unscalarizable_region = false;
891 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
893 if (sra_mode == SRA_MODE_EARLY_IPA
894 && TREE_CODE (base) == MEM_REF)
896 base = get_ssa_base_param (TREE_OPERAND (base, 0));
897 if (!base)
898 return NULL;
899 ptr = true;
901 else
902 ptr = false;
904 /* For constant-pool entries, check we can substitute the constant value. */
905 if (constant_decl_p (base)
906 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
908 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
909 if (expr != base
910 && !is_gimple_reg_type (TREE_TYPE (expr))
911 && dump_file && (dump_flags & TDF_DETAILS))
913 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
914 and elements of multidimensional arrays (which are
915 multi-element arrays in their own right). */
916 fprintf (dump_file, "Allowing non-reg-type load of part"
917 " of constant-pool entry: ");
918 print_generic_expr (dump_file, expr);
920 maybe_add_sra_candidate (base);
923 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
924 return NULL;
926 if (sra_mode == SRA_MODE_EARLY_IPA)
928 if (size < 0 || size != max_size)
930 disqualify_candidate (base, "Encountered a variable sized access.");
931 return NULL;
933 if (TREE_CODE (expr) == COMPONENT_REF
934 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
936 disqualify_candidate (base, "Encountered a bit-field access.");
937 return NULL;
939 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
941 if (ptr)
942 mark_parm_dereference (base, offset + size, stmt);
944 else
946 if (size != max_size)
948 size = max_size;
949 unscalarizable_region = true;
951 if (size < 0)
953 disqualify_candidate (base, "Encountered an unconstrained access.");
954 return NULL;
958 access = create_access_1 (base, offset, size);
959 access->expr = expr;
960 access->type = TREE_TYPE (expr);
961 access->write = write;
962 access->grp_unscalarizable_region = unscalarizable_region;
963 access->stmt = stmt;
964 access->reverse = reverse;
966 if (TREE_CODE (expr) == COMPONENT_REF
967 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
968 access->non_addressable = 1;
970 return access;
974 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
975 ARRAY_TYPE with fields that are either of gimple register types (excluding
976 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
977 we are considering a decl from constant pool. If it is false, char arrays
978 will be refused. */
980 static bool
981 scalarizable_type_p (tree type, bool const_decl)
983 gcc_assert (!is_gimple_reg_type (type));
984 if (type_contains_placeholder_p (type))
985 return false;
987 switch (TREE_CODE (type))
989 case RECORD_TYPE:
990 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
991 if (TREE_CODE (fld) == FIELD_DECL)
993 tree ft = TREE_TYPE (fld);
995 if (DECL_BIT_FIELD (fld))
996 return false;
998 if (!is_gimple_reg_type (ft)
999 && !scalarizable_type_p (ft, const_decl))
1000 return false;
1003 return true;
1005 case ARRAY_TYPE:
1007 HOST_WIDE_INT min_elem_size;
1008 if (const_decl)
1009 min_elem_size = 0;
1010 else
1011 min_elem_size = BITS_PER_UNIT;
1013 if (TYPE_DOMAIN (type) == NULL_TREE
1014 || !tree_fits_shwi_p (TYPE_SIZE (type))
1015 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
1016 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
1017 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1018 return false;
1019 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1020 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1021 /* Zero-element array, should not prevent scalarization. */
1023 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1024 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1025 /* Variable-length array, do not allow scalarization. */
1026 return false;
1028 tree elem = TREE_TYPE (type);
1029 if (!is_gimple_reg_type (elem)
1030 && !scalarizable_type_p (elem, const_decl))
1031 return false;
1032 return true;
1034 default:
1035 return false;
1039 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1041 /* Create total_scalarization accesses for all scalar fields of a member
1042 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1043 must be the top-most VAR_DECL representing the variable; within that,
1044 OFFSET locates the member and REF must be the memory reference expression for
1045 the member. */
1047 static void
1048 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1050 switch (TREE_CODE (decl_type))
1052 case RECORD_TYPE:
1053 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1054 if (TREE_CODE (fld) == FIELD_DECL)
1056 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1057 tree ft = TREE_TYPE (fld);
1058 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1060 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1061 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1062 nref, ft);
1064 break;
1065 case ARRAY_TYPE:
1067 tree elemtype = TREE_TYPE (decl_type);
1068 tree elem_size = TYPE_SIZE (elemtype);
1069 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1070 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1071 gcc_assert (el_size > 0);
1073 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1074 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1075 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1076 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1077 if (maxidx)
1079 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1080 tree domain = TYPE_DOMAIN (decl_type);
1081 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1082 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1083 offset_int idx = wi::to_offset (minidx);
1084 offset_int max = wi::to_offset (maxidx);
1085 if (!TYPE_UNSIGNED (domain))
1087 idx = wi::sext (idx, TYPE_PRECISION (domain));
1088 max = wi::sext (max, TYPE_PRECISION (domain));
1090 for (int el_off = offset; idx <= max; ++idx)
1092 tree nref = build4 (ARRAY_REF, elemtype,
1093 ref,
1094 wide_int_to_tree (domain, idx),
1095 NULL_TREE, NULL_TREE);
1096 scalarize_elem (base, el_off, el_size,
1097 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1098 nref, elemtype);
1099 el_off += el_size;
1103 break;
1104 default:
1105 gcc_unreachable ();
1109 /* Create total_scalarization accesses for a member of type TYPE, which must
1110 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1111 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1112 the member, REVERSE gives its torage order. and REF must be the reference
1113 expression for it. */
1115 static void
1116 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1117 tree ref, tree type)
1119 if (is_gimple_reg_type (type))
1121 struct access *access = create_access_1 (base, pos, size);
1122 access->expr = ref;
1123 access->type = type;
1124 access->grp_total_scalarization = 1;
1125 access->reverse = reverse;
1126 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1128 else
1129 completely_scalarize (base, type, pos, ref);
1132 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1133 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1135 static void
1136 create_total_scalarization_access (tree var)
1138 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1139 struct access *access;
1141 access = create_access_1 (var, 0, size);
1142 access->expr = var;
1143 access->type = TREE_TYPE (var);
1144 access->grp_total_scalarization = 1;
1147 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1149 static inline bool
1150 contains_view_convert_expr_p (const_tree ref)
1152 while (handled_component_p (ref))
1154 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1155 return true;
1156 ref = TREE_OPERAND (ref, 0);
1159 return false;
1162 /* Search the given tree for a declaration by skipping handled components and
1163 exclude it from the candidates. */
1165 static void
1166 disqualify_base_of_expr (tree t, const char *reason)
1168 t = get_base_address (t);
1169 if (sra_mode == SRA_MODE_EARLY_IPA
1170 && TREE_CODE (t) == MEM_REF)
1171 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1173 if (t && DECL_P (t))
1174 disqualify_candidate (t, reason);
1177 /* Scan expression EXPR and create access structures for all accesses to
1178 candidates for scalarization. Return the created access or NULL if none is
1179 created. */
1181 static struct access *
1182 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1184 struct access *ret = NULL;
1185 bool partial_ref;
1187 if (TREE_CODE (expr) == BIT_FIELD_REF
1188 || TREE_CODE (expr) == IMAGPART_EXPR
1189 || TREE_CODE (expr) == REALPART_EXPR)
1191 expr = TREE_OPERAND (expr, 0);
1192 partial_ref = true;
1194 else
1195 partial_ref = false;
1197 /* We need to dive through V_C_Es in order to get the size of its parameter
1198 and not the result type. Ada produces such statements. We are also
1199 capable of handling the topmost V_C_E but not any of those buried in other
1200 handled components. */
1201 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR && !storage_order_barrier_p (expr))
1202 expr = TREE_OPERAND (expr, 0);
1204 if (contains_view_convert_expr_p (expr))
1206 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1207 "component.");
1208 return NULL;
1210 if (TREE_THIS_VOLATILE (expr))
1212 disqualify_base_of_expr (expr, "part of a volatile reference.");
1213 return NULL;
1216 switch (TREE_CODE (expr))
1218 case MEM_REF:
1219 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1220 && sra_mode != SRA_MODE_EARLY_IPA)
1221 return NULL;
1222 /* fall through */
1223 case VAR_DECL:
1224 case PARM_DECL:
1225 case RESULT_DECL:
1226 case COMPONENT_REF:
1227 case ARRAY_REF:
1228 case ARRAY_RANGE_REF:
1229 ret = create_access (expr, stmt, write);
1230 break;
1232 default:
1233 break;
1236 if (write && partial_ref && ret)
1237 ret->grp_partial_lhs = 1;
1239 return ret;
1242 /* Scan expression EXPR and create access structures for all accesses to
1243 candidates for scalarization. Return true if any access has been inserted.
1244 STMT must be the statement from which the expression is taken, WRITE must be
1245 true if the expression is a store and false otherwise. */
1247 static bool
1248 build_access_from_expr (tree expr, gimple *stmt, bool write)
1250 struct access *access;
1252 access = build_access_from_expr_1 (expr, stmt, write);
1253 if (access)
1255 /* This means the aggregate is accesses as a whole in a way other than an
1256 assign statement and thus cannot be removed even if we had a scalar
1257 replacement for everything. */
1258 if (cannot_scalarize_away_bitmap)
1259 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1260 return true;
1262 return false;
1265 /* Return the single non-EH successor edge of BB or NULL if there is none or
1266 more than one. */
1268 static edge
1269 single_non_eh_succ (basic_block bb)
1271 edge e, res = NULL;
1272 edge_iterator ei;
1274 FOR_EACH_EDGE (e, ei, bb->succs)
1275 if (!(e->flags & EDGE_EH))
1277 if (res)
1278 return NULL;
1279 res = e;
1282 return res;
1285 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1286 there is no alternative spot where to put statements SRA might need to
1287 generate after it. The spot we are looking for is an edge leading to a
1288 single non-EH successor, if it exists and is indeed single. RHS may be
1289 NULL, in that case ignore it. */
1291 static bool
1292 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1294 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1295 && stmt_ends_bb_p (stmt))
1297 if (single_non_eh_succ (gimple_bb (stmt)))
1298 return false;
1300 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1301 if (rhs)
1302 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1303 return true;
1305 return false;
1308 /* Return true if the nature of BASE is such that it contains data even if
1309 there is no write to it in the function. */
1311 static bool
1312 comes_initialized_p (tree base)
1314 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1317 /* Scan expressions occurring in STMT, create access structures for all accesses
1318 to candidates for scalarization and remove those candidates which occur in
1319 statements or expressions that prevent them from being split apart. Return
1320 true if any access has been inserted. */
1322 static bool
1323 build_accesses_from_assign (gimple *stmt)
1325 tree lhs, rhs;
1326 struct access *lacc, *racc;
1328 if (!gimple_assign_single_p (stmt)
1329 /* Scope clobbers don't influence scalarization. */
1330 || gimple_clobber_p (stmt))
1331 return false;
1333 lhs = gimple_assign_lhs (stmt);
1334 rhs = gimple_assign_rhs1 (stmt);
1336 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1337 return false;
1339 racc = build_access_from_expr_1 (rhs, stmt, false);
1340 lacc = build_access_from_expr_1 (lhs, stmt, true);
1342 if (lacc)
1344 lacc->grp_assignment_write = 1;
1345 if (storage_order_barrier_p (rhs))
1346 lacc->grp_unscalarizable_region = 1;
1349 if (racc)
1351 racc->grp_assignment_read = 1;
1352 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1353 && !is_gimple_reg_type (racc->type))
1354 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1355 if (storage_order_barrier_p (lhs))
1356 racc->grp_unscalarizable_region = 1;
1359 if (lacc && racc
1360 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1361 && !lacc->grp_unscalarizable_region
1362 && !racc->grp_unscalarizable_region
1363 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1364 && lacc->size == racc->size
1365 && useless_type_conversion_p (lacc->type, racc->type))
1367 struct assign_link *link;
1369 link = assign_link_pool.allocate ();
1370 memset (link, 0, sizeof (struct assign_link));
1372 link->lacc = lacc;
1373 link->racc = racc;
1374 add_link_to_rhs (racc, link);
1375 /* Let's delay marking the areas as written until propagation of accesses
1376 across link, unless the nature of rhs tells us that its data comes
1377 from elsewhere. */
1378 if (!comes_initialized_p (racc->base))
1379 lacc->write = false;
1382 return lacc || racc;
1385 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1386 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1388 static bool
1389 asm_visit_addr (gimple *, tree op, tree, void *)
1391 op = get_base_address (op);
1392 if (op
1393 && DECL_P (op))
1394 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1396 return false;
1399 /* Return true iff callsite CALL has at least as many actual arguments as there
1400 are formal parameters of the function currently processed by IPA-SRA and
1401 that their types match. */
1403 static inline bool
1404 callsite_arguments_match_p (gimple *call)
1406 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1407 return false;
1409 tree parm;
1410 int i;
1411 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1412 parm;
1413 parm = DECL_CHAIN (parm), i++)
1415 tree arg = gimple_call_arg (call, i);
1416 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1417 return false;
1419 return true;
1422 /* Scan function and look for interesting expressions and create access
1423 structures for them. Return true iff any access is created. */
1425 static bool
1426 scan_function (void)
1428 basic_block bb;
1429 bool ret = false;
1431 FOR_EACH_BB_FN (bb, cfun)
1433 gimple_stmt_iterator gsi;
1434 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1436 gimple *stmt = gsi_stmt (gsi);
1437 tree t;
1438 unsigned i;
1440 if (final_bbs && stmt_can_throw_external (stmt))
1441 bitmap_set_bit (final_bbs, bb->index);
1442 switch (gimple_code (stmt))
1444 case GIMPLE_RETURN:
1445 t = gimple_return_retval (as_a <greturn *> (stmt));
1446 if (t != NULL_TREE)
1447 ret |= build_access_from_expr (t, stmt, false);
1448 if (final_bbs)
1449 bitmap_set_bit (final_bbs, bb->index);
1450 break;
1452 case GIMPLE_ASSIGN:
1453 ret |= build_accesses_from_assign (stmt);
1454 break;
1456 case GIMPLE_CALL:
1457 for (i = 0; i < gimple_call_num_args (stmt); i++)
1458 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1459 stmt, false);
1461 if (sra_mode == SRA_MODE_EARLY_IPA)
1463 tree dest = gimple_call_fndecl (stmt);
1464 int flags = gimple_call_flags (stmt);
1466 if (dest)
1468 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1469 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1470 encountered_apply_args = true;
1471 if (recursive_call_p (current_function_decl, dest))
1473 encountered_recursive_call = true;
1474 if (!callsite_arguments_match_p (stmt))
1475 encountered_unchangable_recursive_call = true;
1479 if (final_bbs
1480 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1481 bitmap_set_bit (final_bbs, bb->index);
1484 t = gimple_call_lhs (stmt);
1485 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1486 ret |= build_access_from_expr (t, stmt, true);
1487 break;
1489 case GIMPLE_ASM:
1491 gasm *asm_stmt = as_a <gasm *> (stmt);
1492 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1493 asm_visit_addr);
1494 if (final_bbs)
1495 bitmap_set_bit (final_bbs, bb->index);
1497 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1499 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1500 ret |= build_access_from_expr (t, asm_stmt, false);
1502 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1504 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1505 ret |= build_access_from_expr (t, asm_stmt, true);
1508 break;
1510 default:
1511 break;
1516 return ret;
1519 /* Helper of QSORT function. There are pointers to accesses in the array. An
1520 access is considered smaller than another if it has smaller offset or if the
1521 offsets are the same but is size is bigger. */
1523 static int
1524 compare_access_positions (const void *a, const void *b)
1526 const access_p *fp1 = (const access_p *) a;
1527 const access_p *fp2 = (const access_p *) b;
1528 const access_p f1 = *fp1;
1529 const access_p f2 = *fp2;
1531 if (f1->offset != f2->offset)
1532 return f1->offset < f2->offset ? -1 : 1;
1534 if (f1->size == f2->size)
1536 if (f1->type == f2->type)
1537 return 0;
1538 /* Put any non-aggregate type before any aggregate type. */
1539 else if (!is_gimple_reg_type (f1->type)
1540 && is_gimple_reg_type (f2->type))
1541 return 1;
1542 else if (is_gimple_reg_type (f1->type)
1543 && !is_gimple_reg_type (f2->type))
1544 return -1;
1545 /* Put any complex or vector type before any other scalar type. */
1546 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1547 && TREE_CODE (f1->type) != VECTOR_TYPE
1548 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1549 || TREE_CODE (f2->type) == VECTOR_TYPE))
1550 return 1;
1551 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1552 || TREE_CODE (f1->type) == VECTOR_TYPE)
1553 && TREE_CODE (f2->type) != COMPLEX_TYPE
1554 && TREE_CODE (f2->type) != VECTOR_TYPE)
1555 return -1;
1556 /* Put the integral type with the bigger precision first. */
1557 else if (INTEGRAL_TYPE_P (f1->type)
1558 && INTEGRAL_TYPE_P (f2->type))
1559 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1560 /* Put any integral type with non-full precision last. */
1561 else if (INTEGRAL_TYPE_P (f1->type)
1562 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1563 != TYPE_PRECISION (f1->type)))
1564 return 1;
1565 else if (INTEGRAL_TYPE_P (f2->type)
1566 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1567 != TYPE_PRECISION (f2->type)))
1568 return -1;
1569 /* Stabilize the sort. */
1570 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1573 /* We want the bigger accesses first, thus the opposite operator in the next
1574 line: */
1575 return f1->size > f2->size ? -1 : 1;
1579 /* Append a name of the declaration to the name obstack. A helper function for
1580 make_fancy_name. */
1582 static void
1583 make_fancy_decl_name (tree decl)
1585 char buffer[32];
1587 tree name = DECL_NAME (decl);
1588 if (name)
1589 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1590 IDENTIFIER_LENGTH (name));
1591 else
1593 sprintf (buffer, "D%u", DECL_UID (decl));
1594 obstack_grow (&name_obstack, buffer, strlen (buffer));
1598 /* Helper for make_fancy_name. */
1600 static void
1601 make_fancy_name_1 (tree expr)
1603 char buffer[32];
1604 tree index;
1606 if (DECL_P (expr))
1608 make_fancy_decl_name (expr);
1609 return;
1612 switch (TREE_CODE (expr))
1614 case COMPONENT_REF:
1615 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1616 obstack_1grow (&name_obstack, '$');
1617 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1618 break;
1620 case ARRAY_REF:
1621 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1622 obstack_1grow (&name_obstack, '$');
1623 /* Arrays with only one element may not have a constant as their
1624 index. */
1625 index = TREE_OPERAND (expr, 1);
1626 if (TREE_CODE (index) != INTEGER_CST)
1627 break;
1628 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1629 obstack_grow (&name_obstack, buffer, strlen (buffer));
1630 break;
1632 case ADDR_EXPR:
1633 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1634 break;
1636 case MEM_REF:
1637 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1638 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1640 obstack_1grow (&name_obstack, '$');
1641 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1642 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1643 obstack_grow (&name_obstack, buffer, strlen (buffer));
1645 break;
1647 case BIT_FIELD_REF:
1648 case REALPART_EXPR:
1649 case IMAGPART_EXPR:
1650 gcc_unreachable (); /* we treat these as scalars. */
1651 break;
1652 default:
1653 break;
1657 /* Create a human readable name for replacement variable of ACCESS. */
1659 static char *
1660 make_fancy_name (tree expr)
1662 make_fancy_name_1 (expr);
1663 obstack_1grow (&name_obstack, '\0');
1664 return XOBFINISH (&name_obstack, char *);
1667 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1668 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1669 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1670 be non-NULL and is used to insert new statements either before or below
1671 the current one as specified by INSERT_AFTER. This function is not capable
1672 of handling bitfields. */
1674 tree
1675 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1676 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1677 bool insert_after)
1679 tree prev_base = base;
1680 tree off;
1681 tree mem_ref;
1682 HOST_WIDE_INT base_offset;
1683 unsigned HOST_WIDE_INT misalign;
1684 unsigned int align;
1686 /* Preserve address-space information. */
1687 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1688 if (as != TYPE_ADDR_SPACE (exp_type))
1689 exp_type = build_qualified_type (exp_type,
1690 TYPE_QUALS (exp_type)
1691 | ENCODE_QUAL_ADDR_SPACE (as));
1693 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1694 get_object_alignment_1 (base, &align, &misalign);
1695 base = get_addr_base_and_unit_offset (base, &base_offset);
1697 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1698 offset such as array[var_index]. */
1699 if (!base)
1701 gassign *stmt;
1702 tree tmp, addr;
1704 gcc_checking_assert (gsi);
1705 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1706 addr = build_fold_addr_expr (unshare_expr (prev_base));
1707 STRIP_USELESS_TYPE_CONVERSION (addr);
1708 stmt = gimple_build_assign (tmp, addr);
1709 gimple_set_location (stmt, loc);
1710 if (insert_after)
1711 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1712 else
1713 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1715 off = build_int_cst (reference_alias_ptr_type (prev_base),
1716 offset / BITS_PER_UNIT);
1717 base = tmp;
1719 else if (TREE_CODE (base) == MEM_REF)
1721 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1722 base_offset + offset / BITS_PER_UNIT);
1723 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1724 base = unshare_expr (TREE_OPERAND (base, 0));
1726 else
1728 off = build_int_cst (reference_alias_ptr_type (prev_base),
1729 base_offset + offset / BITS_PER_UNIT);
1730 base = build_fold_addr_expr (unshare_expr (base));
1733 misalign = (misalign + offset) & (align - 1);
1734 if (misalign != 0)
1735 align = least_bit_hwi (misalign);
1736 if (align != TYPE_ALIGN (exp_type))
1737 exp_type = build_aligned_type (exp_type, align);
1739 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1740 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1741 if (TREE_THIS_VOLATILE (prev_base))
1742 TREE_THIS_VOLATILE (mem_ref) = 1;
1743 if (TREE_SIDE_EFFECTS (prev_base))
1744 TREE_SIDE_EFFECTS (mem_ref) = 1;
1745 return mem_ref;
1748 /* Construct a memory reference to a part of an aggregate BASE at the given
1749 OFFSET and of the same type as MODEL. In case this is a reference to a
1750 bit-field, the function will replicate the last component_ref of model's
1751 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1752 build_ref_for_offset. */
1754 static tree
1755 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1756 struct access *model, gimple_stmt_iterator *gsi,
1757 bool insert_after)
1759 if (TREE_CODE (model->expr) == COMPONENT_REF
1760 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1762 /* This access represents a bit-field. */
1763 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1765 offset -= int_bit_position (fld);
1766 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1767 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1768 gsi, insert_after);
1769 /* The flag will be set on the record type. */
1770 REF_REVERSE_STORAGE_ORDER (t) = 0;
1771 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1772 NULL_TREE);
1774 else
1775 return
1776 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1777 gsi, insert_after);
1780 /* Attempt to build a memory reference that we could but into a gimple
1781 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1782 create statements and return s NULL instead. This function also ignores
1783 alignment issues and so its results should never end up in non-debug
1784 statements. */
1786 static tree
1787 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1788 struct access *model)
1790 HOST_WIDE_INT base_offset;
1791 tree off;
1793 if (TREE_CODE (model->expr) == COMPONENT_REF
1794 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1795 return NULL_TREE;
1797 base = get_addr_base_and_unit_offset (base, &base_offset);
1798 if (!base)
1799 return NULL_TREE;
1800 if (TREE_CODE (base) == MEM_REF)
1802 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1803 base_offset + offset / BITS_PER_UNIT);
1804 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1805 base = unshare_expr (TREE_OPERAND (base, 0));
1807 else
1809 off = build_int_cst (reference_alias_ptr_type (base),
1810 base_offset + offset / BITS_PER_UNIT);
1811 base = build_fold_addr_expr (unshare_expr (base));
1814 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1817 /* Construct a memory reference consisting of component_refs and array_refs to
1818 a part of an aggregate *RES (which is of type TYPE). The requested part
1819 should have type EXP_TYPE at be the given OFFSET. This function might not
1820 succeed, it returns true when it does and only then *RES points to something
1821 meaningful. This function should be used only to build expressions that we
1822 might need to present to user (e.g. in warnings). In all other situations,
1823 build_ref_for_model or build_ref_for_offset should be used instead. */
1825 static bool
1826 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1827 tree exp_type)
1829 while (1)
1831 tree fld;
1832 tree tr_size, index, minidx;
1833 HOST_WIDE_INT el_size;
1835 if (offset == 0 && exp_type
1836 && types_compatible_p (exp_type, type))
1837 return true;
1839 switch (TREE_CODE (type))
1841 case UNION_TYPE:
1842 case QUAL_UNION_TYPE:
1843 case RECORD_TYPE:
1844 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1846 HOST_WIDE_INT pos, size;
1847 tree tr_pos, expr, *expr_ptr;
1849 if (TREE_CODE (fld) != FIELD_DECL)
1850 continue;
1852 tr_pos = bit_position (fld);
1853 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1854 continue;
1855 pos = tree_to_uhwi (tr_pos);
1856 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1857 tr_size = DECL_SIZE (fld);
1858 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1859 continue;
1860 size = tree_to_uhwi (tr_size);
1861 if (size == 0)
1863 if (pos != offset)
1864 continue;
1866 else if (pos > offset || (pos + size) <= offset)
1867 continue;
1869 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1870 NULL_TREE);
1871 expr_ptr = &expr;
1872 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1873 offset - pos, exp_type))
1875 *res = expr;
1876 return true;
1879 return false;
1881 case ARRAY_TYPE:
1882 tr_size = TYPE_SIZE (TREE_TYPE (type));
1883 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1884 return false;
1885 el_size = tree_to_uhwi (tr_size);
1887 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1888 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1889 return false;
1890 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1891 if (!integer_zerop (minidx))
1892 index = int_const_binop (PLUS_EXPR, index, minidx);
1893 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1894 NULL_TREE, NULL_TREE);
1895 offset = offset % el_size;
1896 type = TREE_TYPE (type);
1897 break;
1899 default:
1900 if (offset != 0)
1901 return false;
1903 if (exp_type)
1904 return false;
1905 else
1906 return true;
1911 /* Return true iff TYPE is stdarg va_list type. */
1913 static inline bool
1914 is_va_list_type (tree type)
1916 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1919 /* Print message to dump file why a variable was rejected. */
1921 static void
1922 reject (tree var, const char *msg)
1924 if (dump_file && (dump_flags & TDF_DETAILS))
1926 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1927 print_generic_expr (dump_file, var);
1928 fprintf (dump_file, "\n");
1932 /* Return true if VAR is a candidate for SRA. */
1934 static bool
1935 maybe_add_sra_candidate (tree var)
1937 tree type = TREE_TYPE (var);
1938 const char *msg;
1939 tree_node **slot;
1941 if (!AGGREGATE_TYPE_P (type))
1943 reject (var, "not aggregate");
1944 return false;
1946 /* Allow constant-pool entries (that "need to live in memory")
1947 unless we are doing IPA SRA. */
1948 if (needs_to_live_in_memory (var)
1949 && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
1951 reject (var, "needs to live in memory");
1952 return false;
1954 if (TREE_THIS_VOLATILE (var))
1956 reject (var, "is volatile");
1957 return false;
1959 if (!COMPLETE_TYPE_P (type))
1961 reject (var, "has incomplete type");
1962 return false;
1964 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1966 reject (var, "type size not fixed");
1967 return false;
1969 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1971 reject (var, "type size is zero");
1972 return false;
1974 if (type_internals_preclude_sra_p (type, &msg))
1976 reject (var, msg);
1977 return false;
1979 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1980 we also want to schedule it rather late. Thus we ignore it in
1981 the early pass. */
1982 (sra_mode == SRA_MODE_EARLY_INTRA
1983 && is_va_list_type (type)))
1985 reject (var, "is va_list");
1986 return false;
1989 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1990 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1991 *slot = var;
1993 if (dump_file && (dump_flags & TDF_DETAILS))
1995 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1996 print_generic_expr (dump_file, var);
1997 fprintf (dump_file, "\n");
2000 return true;
2003 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
2004 those with type which is suitable for scalarization. */
2006 static bool
2007 find_var_candidates (void)
2009 tree var, parm;
2010 unsigned int i;
2011 bool ret = false;
2013 for (parm = DECL_ARGUMENTS (current_function_decl);
2014 parm;
2015 parm = DECL_CHAIN (parm))
2016 ret |= maybe_add_sra_candidate (parm);
2018 FOR_EACH_LOCAL_DECL (cfun, i, var)
2020 if (!VAR_P (var))
2021 continue;
2023 ret |= maybe_add_sra_candidate (var);
2026 return ret;
2029 /* Sort all accesses for the given variable, check for partial overlaps and
2030 return NULL if there are any. If there are none, pick a representative for
2031 each combination of offset and size and create a linked list out of them.
2032 Return the pointer to the first representative and make sure it is the first
2033 one in the vector of accesses. */
2035 static struct access *
2036 sort_and_splice_var_accesses (tree var)
2038 int i, j, access_count;
2039 struct access *res, **prev_acc_ptr = &res;
2040 vec<access_p> *access_vec;
2041 bool first = true;
2042 HOST_WIDE_INT low = -1, high = 0;
2044 access_vec = get_base_access_vector (var);
2045 if (!access_vec)
2046 return NULL;
2047 access_count = access_vec->length ();
2049 /* Sort by <OFFSET, SIZE>. */
2050 access_vec->qsort (compare_access_positions);
2052 i = 0;
2053 while (i < access_count)
2055 struct access *access = (*access_vec)[i];
2056 bool grp_write = access->write;
2057 bool grp_read = !access->write;
2058 bool grp_scalar_write = access->write
2059 && is_gimple_reg_type (access->type);
2060 bool grp_scalar_read = !access->write
2061 && is_gimple_reg_type (access->type);
2062 bool grp_assignment_read = access->grp_assignment_read;
2063 bool grp_assignment_write = access->grp_assignment_write;
2064 bool multiple_scalar_reads = false;
2065 bool total_scalarization = access->grp_total_scalarization;
2066 bool grp_partial_lhs = access->grp_partial_lhs;
2067 bool first_scalar = is_gimple_reg_type (access->type);
2068 bool unscalarizable_region = access->grp_unscalarizable_region;
2070 if (first || access->offset >= high)
2072 first = false;
2073 low = access->offset;
2074 high = access->offset + access->size;
2076 else if (access->offset > low && access->offset + access->size > high)
2077 return NULL;
2078 else
2079 gcc_assert (access->offset >= low
2080 && access->offset + access->size <= high);
2082 j = i + 1;
2083 while (j < access_count)
2085 struct access *ac2 = (*access_vec)[j];
2086 if (ac2->offset != access->offset || ac2->size != access->size)
2087 break;
2088 if (ac2->write)
2090 grp_write = true;
2091 grp_scalar_write = (grp_scalar_write
2092 || is_gimple_reg_type (ac2->type));
2094 else
2096 grp_read = true;
2097 if (is_gimple_reg_type (ac2->type))
2099 if (grp_scalar_read)
2100 multiple_scalar_reads = true;
2101 else
2102 grp_scalar_read = true;
2105 grp_assignment_read |= ac2->grp_assignment_read;
2106 grp_assignment_write |= ac2->grp_assignment_write;
2107 grp_partial_lhs |= ac2->grp_partial_lhs;
2108 unscalarizable_region |= ac2->grp_unscalarizable_region;
2109 total_scalarization |= ac2->grp_total_scalarization;
2110 relink_to_new_repr (access, ac2);
2112 /* If there are both aggregate-type and scalar-type accesses with
2113 this combination of size and offset, the comparison function
2114 should have put the scalars first. */
2115 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2116 ac2->group_representative = access;
2117 j++;
2120 i = j;
2122 access->group_representative = access;
2123 access->grp_write = grp_write;
2124 access->grp_read = grp_read;
2125 access->grp_scalar_read = grp_scalar_read;
2126 access->grp_scalar_write = grp_scalar_write;
2127 access->grp_assignment_read = grp_assignment_read;
2128 access->grp_assignment_write = grp_assignment_write;
2129 access->grp_hint = total_scalarization
2130 || (multiple_scalar_reads && !constant_decl_p (var));
2131 access->grp_total_scalarization = total_scalarization;
2132 access->grp_partial_lhs = grp_partial_lhs;
2133 access->grp_unscalarizable_region = unscalarizable_region;
2134 if (access->first_link)
2135 add_access_to_work_queue (access);
2137 *prev_acc_ptr = access;
2138 prev_acc_ptr = &access->next_grp;
2141 gcc_assert (res == (*access_vec)[0]);
2142 return res;
2145 /* Create a variable for the given ACCESS which determines the type, name and a
2146 few other properties. Return the variable declaration and store it also to
2147 ACCESS->replacement. */
2149 static tree
2150 create_access_replacement (struct access *access)
2152 tree repl;
2154 if (access->grp_to_be_debug_replaced)
2156 repl = create_tmp_var_raw (access->type);
2157 DECL_CONTEXT (repl) = current_function_decl;
2159 else
2160 /* Drop any special alignment on the type if it's not on the main
2161 variant. This avoids issues with weirdo ABIs like AAPCS. */
2162 repl = create_tmp_var (build_qualified_type
2163 (TYPE_MAIN_VARIANT (access->type),
2164 TYPE_QUALS (access->type)), "SR");
2165 if (TREE_CODE (access->type) == COMPLEX_TYPE
2166 || TREE_CODE (access->type) == VECTOR_TYPE)
2168 if (!access->grp_partial_lhs)
2169 DECL_GIMPLE_REG_P (repl) = 1;
2171 else if (access->grp_partial_lhs
2172 && is_gimple_reg_type (access->type))
2173 TREE_ADDRESSABLE (repl) = 1;
2175 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2176 DECL_ARTIFICIAL (repl) = 1;
2177 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2179 if (DECL_NAME (access->base)
2180 && !DECL_IGNORED_P (access->base)
2181 && !DECL_ARTIFICIAL (access->base))
2183 char *pretty_name = make_fancy_name (access->expr);
2184 tree debug_expr = unshare_expr_without_location (access->expr), d;
2185 bool fail = false;
2187 DECL_NAME (repl) = get_identifier (pretty_name);
2188 DECL_NAMELESS (repl) = 1;
2189 obstack_free (&name_obstack, pretty_name);
2191 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2192 as DECL_DEBUG_EXPR isn't considered when looking for still
2193 used SSA_NAMEs and thus they could be freed. All debug info
2194 generation cares is whether something is constant or variable
2195 and that get_ref_base_and_extent works properly on the
2196 expression. It cannot handle accesses at a non-constant offset
2197 though, so just give up in those cases. */
2198 for (d = debug_expr;
2199 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2200 d = TREE_OPERAND (d, 0))
2201 switch (TREE_CODE (d))
2203 case ARRAY_REF:
2204 case ARRAY_RANGE_REF:
2205 if (TREE_OPERAND (d, 1)
2206 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2207 fail = true;
2208 if (TREE_OPERAND (d, 3)
2209 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2210 fail = true;
2211 /* FALLTHRU */
2212 case COMPONENT_REF:
2213 if (TREE_OPERAND (d, 2)
2214 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2215 fail = true;
2216 break;
2217 case MEM_REF:
2218 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2219 fail = true;
2220 else
2221 d = TREE_OPERAND (d, 0);
2222 break;
2223 default:
2224 break;
2226 if (!fail)
2228 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2229 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2231 if (access->grp_no_warning)
2232 TREE_NO_WARNING (repl) = 1;
2233 else
2234 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2236 else
2237 TREE_NO_WARNING (repl) = 1;
2239 if (dump_file)
2241 if (access->grp_to_be_debug_replaced)
2243 fprintf (dump_file, "Created a debug-only replacement for ");
2244 print_generic_expr (dump_file, access->base);
2245 fprintf (dump_file, " offset: %u, size: %u\n",
2246 (unsigned) access->offset, (unsigned) access->size);
2248 else
2250 fprintf (dump_file, "Created a replacement for ");
2251 print_generic_expr (dump_file, access->base);
2252 fprintf (dump_file, " offset: %u, size: %u: ",
2253 (unsigned) access->offset, (unsigned) access->size);
2254 print_generic_expr (dump_file, repl);
2255 fprintf (dump_file, "\n");
2258 sra_stats.replacements++;
2260 return repl;
2263 /* Return ACCESS scalar replacement, which must exist. */
2265 static inline tree
2266 get_access_replacement (struct access *access)
2268 gcc_checking_assert (access->replacement_decl);
2269 return access->replacement_decl;
2273 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2274 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2275 to it is not "within" the root. Return false iff some accesses partially
2276 overlap. */
2278 static bool
2279 build_access_subtree (struct access **access)
2281 struct access *root = *access, *last_child = NULL;
2282 HOST_WIDE_INT limit = root->offset + root->size;
2284 *access = (*access)->next_grp;
2285 while (*access && (*access)->offset + (*access)->size <= limit)
2287 if (!last_child)
2288 root->first_child = *access;
2289 else
2290 last_child->next_sibling = *access;
2291 last_child = *access;
2292 (*access)->parent = root;
2293 (*access)->grp_write |= root->grp_write;
2295 if (!build_access_subtree (access))
2296 return false;
2299 if (*access && (*access)->offset < limit)
2300 return false;
2302 return true;
2305 /* Build a tree of access representatives, ACCESS is the pointer to the first
2306 one, others are linked in a list by the next_grp field. Return false iff
2307 some accesses partially overlap. */
2309 static bool
2310 build_access_trees (struct access *access)
2312 while (access)
2314 struct access *root = access;
2316 if (!build_access_subtree (&access))
2317 return false;
2318 root->next_grp = access;
2320 return true;
2323 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2324 array. */
2326 static bool
2327 expr_with_var_bounded_array_refs_p (tree expr)
2329 while (handled_component_p (expr))
2331 if (TREE_CODE (expr) == ARRAY_REF
2332 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2333 return true;
2334 expr = TREE_OPERAND (expr, 0);
2336 return false;
2339 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2340 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2341 sorts of access flags appropriately along the way, notably always set
2342 grp_read and grp_assign_read according to MARK_READ and grp_write when
2343 MARK_WRITE is true.
2345 Creating a replacement for a scalar access is considered beneficial if its
2346 grp_hint is set (this means we are either attempting total scalarization or
2347 there is more than one direct read access) or according to the following
2348 table:
2350 Access written to through a scalar type (once or more times)
2352 | Written to in an assignment statement
2354 | | Access read as scalar _once_
2355 | | |
2356 | | | Read in an assignment statement
2357 | | | |
2358 | | | | Scalarize Comment
2359 -----------------------------------------------------------------------------
2360 0 0 0 0 No access for the scalar
2361 0 0 0 1 No access for the scalar
2362 0 0 1 0 No Single read - won't help
2363 0 0 1 1 No The same case
2364 0 1 0 0 No access for the scalar
2365 0 1 0 1 No access for the scalar
2366 0 1 1 0 Yes s = *g; return s.i;
2367 0 1 1 1 Yes The same case as above
2368 1 0 0 0 No Won't help
2369 1 0 0 1 Yes s.i = 1; *g = s;
2370 1 0 1 0 Yes s.i = 5; g = s.i;
2371 1 0 1 1 Yes The same case as above
2372 1 1 0 0 No Won't help.
2373 1 1 0 1 Yes s.i = 1; *g = s;
2374 1 1 1 0 Yes s = *g; return s.i;
2375 1 1 1 1 Yes Any of the above yeses */
2377 static bool
2378 analyze_access_subtree (struct access *root, struct access *parent,
2379 bool allow_replacements)
2381 struct access *child;
2382 HOST_WIDE_INT limit = root->offset + root->size;
2383 HOST_WIDE_INT covered_to = root->offset;
2384 bool scalar = is_gimple_reg_type (root->type);
2385 bool hole = false, sth_created = false;
2387 if (parent)
2389 if (parent->grp_read)
2390 root->grp_read = 1;
2391 if (parent->grp_assignment_read)
2392 root->grp_assignment_read = 1;
2393 if (parent->grp_write)
2394 root->grp_write = 1;
2395 if (parent->grp_assignment_write)
2396 root->grp_assignment_write = 1;
2397 if (parent->grp_total_scalarization)
2398 root->grp_total_scalarization = 1;
2401 if (root->grp_unscalarizable_region)
2402 allow_replacements = false;
2404 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2405 allow_replacements = false;
2407 for (child = root->first_child; child; child = child->next_sibling)
2409 hole |= covered_to < child->offset;
2410 sth_created |= analyze_access_subtree (child, root,
2411 allow_replacements && !scalar);
2413 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2414 root->grp_total_scalarization &= child->grp_total_scalarization;
2415 if (child->grp_covered)
2416 covered_to += child->size;
2417 else
2418 hole = true;
2421 if (allow_replacements && scalar && !root->first_child
2422 && (root->grp_hint
2423 || ((root->grp_scalar_read || root->grp_assignment_read)
2424 && (root->grp_scalar_write || root->grp_assignment_write))))
2426 /* Always create access replacements that cover the whole access.
2427 For integral types this means the precision has to match.
2428 Avoid assumptions based on the integral type kind, too. */
2429 if (INTEGRAL_TYPE_P (root->type)
2430 && (TREE_CODE (root->type) != INTEGER_TYPE
2431 || TYPE_PRECISION (root->type) != root->size)
2432 /* But leave bitfield accesses alone. */
2433 && (TREE_CODE (root->expr) != COMPONENT_REF
2434 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2436 tree rt = root->type;
2437 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2438 && (root->size % BITS_PER_UNIT) == 0);
2439 root->type = build_nonstandard_integer_type (root->size,
2440 TYPE_UNSIGNED (rt));
2441 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2442 root->offset, root->reverse,
2443 root->type, NULL, false);
2445 if (dump_file && (dump_flags & TDF_DETAILS))
2447 fprintf (dump_file, "Changing the type of a replacement for ");
2448 print_generic_expr (dump_file, root->base);
2449 fprintf (dump_file, " offset: %u, size: %u ",
2450 (unsigned) root->offset, (unsigned) root->size);
2451 fprintf (dump_file, " to an integer.\n");
2455 root->grp_to_be_replaced = 1;
2456 root->replacement_decl = create_access_replacement (root);
2457 sth_created = true;
2458 hole = false;
2460 else
2462 if (allow_replacements
2463 && scalar && !root->first_child
2464 && (root->grp_scalar_write || root->grp_assignment_write)
2465 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2466 DECL_UID (root->base)))
2468 gcc_checking_assert (!root->grp_scalar_read
2469 && !root->grp_assignment_read);
2470 sth_created = true;
2471 if (MAY_HAVE_DEBUG_STMTS)
2473 root->grp_to_be_debug_replaced = 1;
2474 root->replacement_decl = create_access_replacement (root);
2478 if (covered_to < limit)
2479 hole = true;
2480 if (scalar || !allow_replacements)
2481 root->grp_total_scalarization = 0;
2484 if (!hole || root->grp_total_scalarization)
2485 root->grp_covered = 1;
2486 else if (root->grp_write || comes_initialized_p (root->base))
2487 root->grp_unscalarized_data = 1; /* not covered and written to */
2488 return sth_created;
2491 /* Analyze all access trees linked by next_grp by the means of
2492 analyze_access_subtree. */
2493 static bool
2494 analyze_access_trees (struct access *access)
2496 bool ret = false;
2498 while (access)
2500 if (analyze_access_subtree (access, NULL, true))
2501 ret = true;
2502 access = access->next_grp;
2505 return ret;
2508 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2509 SIZE would conflict with an already existing one. If exactly such a child
2510 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2512 static bool
2513 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2514 HOST_WIDE_INT size, struct access **exact_match)
2516 struct access *child;
2518 for (child = lacc->first_child; child; child = child->next_sibling)
2520 if (child->offset == norm_offset && child->size == size)
2522 *exact_match = child;
2523 return true;
2526 if (child->offset < norm_offset + size
2527 && child->offset + child->size > norm_offset)
2528 return true;
2531 return false;
2534 /* Create a new child access of PARENT, with all properties just like MODEL
2535 except for its offset and with its grp_write false and grp_read true.
2536 Return the new access or NULL if it cannot be created. Note that this
2537 access is created long after all splicing and sorting, it's not located in
2538 any access vector and is automatically a representative of its group. Set
2539 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2541 static struct access *
2542 create_artificial_child_access (struct access *parent, struct access *model,
2543 HOST_WIDE_INT new_offset,
2544 bool set_grp_write)
2546 struct access **child;
2547 tree expr = parent->base;
2549 gcc_assert (!model->grp_unscalarizable_region);
2551 struct access *access = access_pool.allocate ();
2552 memset (access, 0, sizeof (struct access));
2553 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2554 model->type))
2556 access->grp_no_warning = true;
2557 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2558 new_offset, model, NULL, false);
2561 access->base = parent->base;
2562 access->expr = expr;
2563 access->offset = new_offset;
2564 access->size = model->size;
2565 access->type = model->type;
2566 access->grp_write = set_grp_write;
2567 access->grp_read = false;
2568 access->reverse = model->reverse;
2570 child = &parent->first_child;
2571 while (*child && (*child)->offset < new_offset)
2572 child = &(*child)->next_sibling;
2574 access->next_sibling = *child;
2575 *child = access;
2577 return access;
2581 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2582 true if any new subaccess was created. Additionally, if RACC is a scalar
2583 access but LACC is not, change the type of the latter, if possible. */
2585 static bool
2586 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2588 struct access *rchild;
2589 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2590 bool ret = false;
2592 /* IF the LHS is still not marked as being written to, we only need to do so
2593 if the RHS at this level actually was. */
2594 if (!lacc->grp_write)
2596 gcc_checking_assert (!comes_initialized_p (racc->base));
2597 if (racc->grp_write)
2599 lacc->grp_write = true;
2600 ret = true;
2604 if (is_gimple_reg_type (lacc->type)
2605 || lacc->grp_unscalarizable_region
2606 || racc->grp_unscalarizable_region)
2608 ret |= !lacc->grp_write;
2609 lacc->grp_write = true;
2610 return ret;
2613 if (is_gimple_reg_type (racc->type))
2615 if (!lacc->first_child && !racc->first_child)
2617 tree t = lacc->base;
2619 lacc->type = racc->type;
2620 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2621 lacc->offset, racc->type))
2622 lacc->expr = t;
2623 else
2625 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2626 lacc->base, lacc->offset,
2627 racc, NULL, false);
2628 lacc->grp_no_warning = true;
2631 return ret;
2634 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2636 struct access *new_acc = NULL;
2637 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2639 if (rchild->grp_unscalarizable_region)
2641 lacc->grp_write = true;
2642 continue;
2645 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2646 &new_acc))
2648 if (new_acc)
2650 if (!new_acc->grp_write
2651 && (lacc->grp_write || rchild->grp_write))
2653 new_acc ->grp_write = true;
2654 ret = true;
2657 rchild->grp_hint = 1;
2658 new_acc->grp_hint |= new_acc->grp_read;
2659 if (rchild->first_child)
2660 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2662 else
2663 lacc->grp_write = true;
2664 continue;
2667 rchild->grp_hint = 1;
2668 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2669 lacc->grp_write
2670 || rchild->grp_write);
2671 if (new_acc)
2673 ret = true;
2674 if (racc->first_child)
2675 propagate_subaccesses_across_link (new_acc, rchild);
2679 return ret;
2682 /* Propagate all subaccesses across assignment links. */
2684 static void
2685 propagate_all_subaccesses (void)
2687 while (work_queue_head)
2689 struct access *racc = pop_access_from_work_queue ();
2690 struct assign_link *link;
2692 gcc_assert (racc->first_link);
2694 for (link = racc->first_link; link; link = link->next)
2696 struct access *lacc = link->lacc;
2698 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2699 continue;
2700 lacc = lacc->group_representative;
2701 if (propagate_subaccesses_across_link (lacc, racc))
2704 if (lacc->first_link)
2706 add_access_to_work_queue (lacc);
2707 break;
2709 lacc = lacc->parent;
2711 while (lacc);
2716 /* Go through all accesses collected throughout the (intraprocedural) analysis
2717 stage, exclude overlapping ones, identify representatives and build trees
2718 out of them, making decisions about scalarization on the way. Return true
2719 iff there are any to-be-scalarized variables after this stage. */
2721 static bool
2722 analyze_all_variable_accesses (void)
2724 int res = 0;
2725 bitmap tmp = BITMAP_ALLOC (NULL);
2726 bitmap_iterator bi;
2727 unsigned i;
2728 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2730 enum compiler_param param = optimize_speed_p
2731 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2732 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2734 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2735 fall back to a target default. */
2736 unsigned HOST_WIDE_INT max_scalarization_size
2737 = global_options_set.x_param_values[param]
2738 ? PARAM_VALUE (param)
2739 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2741 max_scalarization_size *= BITS_PER_UNIT;
2743 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2744 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2745 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2747 tree var = candidate (i);
2749 if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var),
2750 constant_decl_p (var)))
2752 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2753 <= max_scalarization_size)
2755 create_total_scalarization_access (var);
2756 completely_scalarize (var, TREE_TYPE (var), 0, var);
2757 statistics_counter_event (cfun,
2758 "Totally-scalarized aggregates", 1);
2759 if (dump_file && (dump_flags & TDF_DETAILS))
2761 fprintf (dump_file, "Will attempt to totally scalarize ");
2762 print_generic_expr (dump_file, var);
2763 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2766 else if (dump_file && (dump_flags & TDF_DETAILS))
2768 fprintf (dump_file, "Too big to totally scalarize: ");
2769 print_generic_expr (dump_file, var);
2770 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2775 bitmap_copy (tmp, candidate_bitmap);
2776 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2778 tree var = candidate (i);
2779 struct access *access;
2781 access = sort_and_splice_var_accesses (var);
2782 if (!access || !build_access_trees (access))
2783 disqualify_candidate (var,
2784 "No or inhibitingly overlapping accesses.");
2787 propagate_all_subaccesses ();
2789 bitmap_copy (tmp, candidate_bitmap);
2790 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2792 tree var = candidate (i);
2793 struct access *access = get_first_repr_for_decl (var);
2795 if (analyze_access_trees (access))
2797 res++;
2798 if (dump_file && (dump_flags & TDF_DETAILS))
2800 fprintf (dump_file, "\nAccess trees for ");
2801 print_generic_expr (dump_file, var);
2802 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2803 dump_access_tree (dump_file, access);
2804 fprintf (dump_file, "\n");
2807 else
2808 disqualify_candidate (var, "No scalar replacements to be created.");
2811 BITMAP_FREE (tmp);
2813 if (res)
2815 statistics_counter_event (cfun, "Scalarized aggregates", res);
2816 return true;
2818 else
2819 return false;
2822 /* Generate statements copying scalar replacements of accesses within a subtree
2823 into or out of AGG. ACCESS, all its children, siblings and their children
2824 are to be processed. AGG is an aggregate type expression (can be a
2825 declaration but does not have to be, it can for example also be a mem_ref or
2826 a series of handled components). TOP_OFFSET is the offset of the processed
2827 subtree which has to be subtracted from offsets of individual accesses to
2828 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2829 replacements in the interval <start_offset, start_offset + chunk_size>,
2830 otherwise copy all. GSI is a statement iterator used to place the new
2831 statements. WRITE should be true when the statements should write from AGG
2832 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2833 statements will be added after the current statement in GSI, they will be
2834 added before the statement otherwise. */
2836 static void
2837 generate_subtree_copies (struct access *access, tree agg,
2838 HOST_WIDE_INT top_offset,
2839 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2840 gimple_stmt_iterator *gsi, bool write,
2841 bool insert_after, location_t loc)
2843 /* Never write anything into constant pool decls. See PR70602. */
2844 if (!write && constant_decl_p (agg))
2845 return;
2848 if (chunk_size && access->offset >= start_offset + chunk_size)
2849 return;
2851 if (access->grp_to_be_replaced
2852 && (chunk_size == 0
2853 || access->offset + access->size > start_offset))
2855 tree expr, repl = get_access_replacement (access);
2856 gassign *stmt;
2858 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2859 access, gsi, insert_after);
2861 if (write)
2863 if (access->grp_partial_lhs)
2864 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2865 !insert_after,
2866 insert_after ? GSI_NEW_STMT
2867 : GSI_SAME_STMT);
2868 stmt = gimple_build_assign (repl, expr);
2870 else
2872 TREE_NO_WARNING (repl) = 1;
2873 if (access->grp_partial_lhs)
2874 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2875 !insert_after,
2876 insert_after ? GSI_NEW_STMT
2877 : GSI_SAME_STMT);
2878 stmt = gimple_build_assign (expr, repl);
2880 gimple_set_location (stmt, loc);
2882 if (insert_after)
2883 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2884 else
2885 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2886 update_stmt (stmt);
2887 sra_stats.subtree_copies++;
2889 else if (write
2890 && access->grp_to_be_debug_replaced
2891 && (chunk_size == 0
2892 || access->offset + access->size > start_offset))
2894 gdebug *ds;
2895 tree drhs = build_debug_ref_for_model (loc, agg,
2896 access->offset - top_offset,
2897 access);
2898 ds = gimple_build_debug_bind (get_access_replacement (access),
2899 drhs, gsi_stmt (*gsi));
2900 if (insert_after)
2901 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2902 else
2903 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2906 if (access->first_child)
2907 generate_subtree_copies (access->first_child, agg, top_offset,
2908 start_offset, chunk_size, gsi,
2909 write, insert_after, loc);
2911 access = access->next_sibling;
2913 while (access);
2916 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2917 root of the subtree to be processed. GSI is the statement iterator used
2918 for inserting statements which are added after the current statement if
2919 INSERT_AFTER is true or before it otherwise. */
2921 static void
2922 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2923 bool insert_after, location_t loc)
2926 struct access *child;
2928 if (access->grp_to_be_replaced)
2930 gassign *stmt;
2932 stmt = gimple_build_assign (get_access_replacement (access),
2933 build_zero_cst (access->type));
2934 if (insert_after)
2935 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2936 else
2937 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2938 update_stmt (stmt);
2939 gimple_set_location (stmt, loc);
2941 else if (access->grp_to_be_debug_replaced)
2943 gdebug *ds
2944 = gimple_build_debug_bind (get_access_replacement (access),
2945 build_zero_cst (access->type),
2946 gsi_stmt (*gsi));
2947 if (insert_after)
2948 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2949 else
2950 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2953 for (child = access->first_child; child; child = child->next_sibling)
2954 init_subtree_with_zero (child, gsi, insert_after, loc);
2957 /* Clobber all scalar replacements in an access subtree. ACCESS is the
2958 root of the subtree to be processed. GSI is the statement iterator used
2959 for inserting statements which are added after the current statement if
2960 INSERT_AFTER is true or before it otherwise. */
2962 static void
2963 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2964 bool insert_after, location_t loc)
2967 struct access *child;
2969 if (access->grp_to_be_replaced)
2971 tree rep = get_access_replacement (access);
2972 tree clobber = build_constructor (access->type, NULL);
2973 TREE_THIS_VOLATILE (clobber) = 1;
2974 gimple *stmt = gimple_build_assign (rep, clobber);
2976 if (insert_after)
2977 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2978 else
2979 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2980 update_stmt (stmt);
2981 gimple_set_location (stmt, loc);
2984 for (child = access->first_child; child; child = child->next_sibling)
2985 clobber_subtree (child, gsi, insert_after, loc);
2988 /* Search for an access representative for the given expression EXPR and
2989 return it or NULL if it cannot be found. */
2991 static struct access *
2992 get_access_for_expr (tree expr)
2994 HOST_WIDE_INT offset, size, max_size;
2995 tree base;
2996 bool reverse;
2998 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2999 a different size than the size of its argument and we need the latter
3000 one. */
3001 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3002 expr = TREE_OPERAND (expr, 0);
3004 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
3005 if (max_size == -1 || !DECL_P (base))
3006 return NULL;
3008 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3009 return NULL;
3011 return get_var_base_offset_size_access (base, offset, max_size);
3014 /* Replace the expression EXPR with a scalar replacement if there is one and
3015 generate other statements to do type conversion or subtree copying if
3016 necessary. GSI is used to place newly created statements, WRITE is true if
3017 the expression is being written to (it is on a LHS of a statement or output
3018 in an assembly statement). */
3020 static bool
3021 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
3023 location_t loc;
3024 struct access *access;
3025 tree type, bfr, orig_expr;
3027 if (TREE_CODE (*expr) == BIT_FIELD_REF)
3029 bfr = *expr;
3030 expr = &TREE_OPERAND (*expr, 0);
3032 else
3033 bfr = NULL_TREE;
3035 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3036 expr = &TREE_OPERAND (*expr, 0);
3037 access = get_access_for_expr (*expr);
3038 if (!access)
3039 return false;
3040 type = TREE_TYPE (*expr);
3041 orig_expr = *expr;
3043 loc = gimple_location (gsi_stmt (*gsi));
3044 gimple_stmt_iterator alt_gsi = gsi_none ();
3045 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3047 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3048 gsi = &alt_gsi;
3051 if (access->grp_to_be_replaced)
3053 tree repl = get_access_replacement (access);
3054 /* If we replace a non-register typed access simply use the original
3055 access expression to extract the scalar component afterwards.
3056 This happens if scalarizing a function return value or parameter
3057 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3058 gcc.c-torture/compile/20011217-1.c.
3060 We also want to use this when accessing a complex or vector which can
3061 be accessed as a different type too, potentially creating a need for
3062 type conversion (see PR42196) and when scalarized unions are involved
3063 in assembler statements (see PR42398). */
3064 if (!useless_type_conversion_p (type, access->type))
3066 tree ref;
3068 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3070 if (write)
3072 gassign *stmt;
3074 if (access->grp_partial_lhs)
3075 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3076 false, GSI_NEW_STMT);
3077 stmt = gimple_build_assign (repl, ref);
3078 gimple_set_location (stmt, loc);
3079 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3081 else
3083 gassign *stmt;
3085 if (access->grp_partial_lhs)
3086 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3087 true, GSI_SAME_STMT);
3088 stmt = gimple_build_assign (ref, repl);
3089 gimple_set_location (stmt, loc);
3090 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3093 else
3094 *expr = repl;
3095 sra_stats.exprs++;
3097 else if (write && access->grp_to_be_debug_replaced)
3099 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3100 NULL_TREE,
3101 gsi_stmt (*gsi));
3102 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3105 if (access->first_child)
3107 HOST_WIDE_INT start_offset, chunk_size;
3108 if (bfr
3109 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3110 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3112 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3113 start_offset = access->offset
3114 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3116 else
3117 start_offset = chunk_size = 0;
3119 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3120 start_offset, chunk_size, gsi, write, write,
3121 loc);
3123 return true;
3126 /* Where scalar replacements of the RHS have been written to when a replacement
3127 of a LHS of an assigments cannot be direclty loaded from a replacement of
3128 the RHS. */
3129 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3130 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3131 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3133 struct subreplacement_assignment_data
3135 /* Offset of the access representing the lhs of the assignment. */
3136 HOST_WIDE_INT left_offset;
3138 /* LHS and RHS of the original assignment. */
3139 tree assignment_lhs, assignment_rhs;
3141 /* Access representing the rhs of the whole assignment. */
3142 struct access *top_racc;
3144 /* Stmt iterator used for statement insertions after the original assignment.
3145 It points to the main GSI used to traverse a BB during function body
3146 modification. */
3147 gimple_stmt_iterator *new_gsi;
3149 /* Stmt iterator used for statement insertions before the original
3150 assignment. Keeps on pointing to the original statement. */
3151 gimple_stmt_iterator old_gsi;
3153 /* Location of the assignment. */
3154 location_t loc;
3156 /* Keeps the information whether we have needed to refresh replacements of
3157 the LHS and from which side of the assignments this takes place. */
3158 enum unscalarized_data_handling refreshed;
3161 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3162 base aggregate if there are unscalarized data or directly to LHS of the
3163 statement that is pointed to by GSI otherwise. */
3165 static void
3166 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3168 tree src;
3169 if (sad->top_racc->grp_unscalarized_data)
3171 src = sad->assignment_rhs;
3172 sad->refreshed = SRA_UDH_RIGHT;
3174 else
3176 src = sad->assignment_lhs;
3177 sad->refreshed = SRA_UDH_LEFT;
3179 generate_subtree_copies (sad->top_racc->first_child, src,
3180 sad->top_racc->offset, 0, 0,
3181 &sad->old_gsi, false, false, sad->loc);
3184 /* Try to generate statements to load all sub-replacements in an access subtree
3185 formed by children of LACC from scalar replacements in the SAD->top_racc
3186 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3187 and load the accesses from it. */
3189 static void
3190 load_assign_lhs_subreplacements (struct access *lacc,
3191 struct subreplacement_assignment_data *sad)
3193 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3195 HOST_WIDE_INT offset;
3196 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3198 if (lacc->grp_to_be_replaced)
3200 struct access *racc;
3201 gassign *stmt;
3202 tree rhs;
3204 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3205 if (racc && racc->grp_to_be_replaced)
3207 rhs = get_access_replacement (racc);
3208 if (!useless_type_conversion_p (lacc->type, racc->type))
3209 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3210 lacc->type, rhs);
3212 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3213 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3214 NULL_TREE, true, GSI_SAME_STMT);
3216 else
3218 /* No suitable access on the right hand side, need to load from
3219 the aggregate. See if we have to update it first... */
3220 if (sad->refreshed == SRA_UDH_NONE)
3221 handle_unscalarized_data_in_subtree (sad);
3223 if (sad->refreshed == SRA_UDH_LEFT)
3224 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3225 lacc->offset - sad->left_offset,
3226 lacc, sad->new_gsi, true);
3227 else
3228 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3229 lacc->offset - sad->left_offset,
3230 lacc, sad->new_gsi, true);
3231 if (lacc->grp_partial_lhs)
3232 rhs = force_gimple_operand_gsi (sad->new_gsi,
3233 rhs, true, NULL_TREE,
3234 false, GSI_NEW_STMT);
3237 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3238 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3239 gimple_set_location (stmt, sad->loc);
3240 update_stmt (stmt);
3241 sra_stats.subreplacements++;
3243 else
3245 if (sad->refreshed == SRA_UDH_NONE
3246 && lacc->grp_read && !lacc->grp_covered)
3247 handle_unscalarized_data_in_subtree (sad);
3249 if (lacc && lacc->grp_to_be_debug_replaced)
3251 gdebug *ds;
3252 tree drhs;
3253 struct access *racc = find_access_in_subtree (sad->top_racc,
3254 offset,
3255 lacc->size);
3257 if (racc && racc->grp_to_be_replaced)
3259 if (racc->grp_write || constant_decl_p (racc->base))
3260 drhs = get_access_replacement (racc);
3261 else
3262 drhs = NULL;
3264 else if (sad->refreshed == SRA_UDH_LEFT)
3265 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3266 lacc->offset, lacc);
3267 else if (sad->refreshed == SRA_UDH_RIGHT)
3268 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3269 offset, lacc);
3270 else
3271 drhs = NULL_TREE;
3272 if (drhs
3273 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3274 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3275 lacc->type, drhs);
3276 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3277 drhs, gsi_stmt (sad->old_gsi));
3278 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3282 if (lacc->first_child)
3283 load_assign_lhs_subreplacements (lacc, sad);
3287 /* Result code for SRA assignment modification. */
3288 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3289 SRA_AM_MODIFIED, /* stmt changed but not
3290 removed */
3291 SRA_AM_REMOVED }; /* stmt eliminated */
3293 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3294 to the assignment and GSI is the statement iterator pointing at it. Returns
3295 the same values as sra_modify_assign. */
3297 static enum assignment_mod_result
3298 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3300 tree lhs = gimple_assign_lhs (stmt);
3301 struct access *acc = get_access_for_expr (lhs);
3302 if (!acc)
3303 return SRA_AM_NONE;
3304 location_t loc = gimple_location (stmt);
3306 if (gimple_clobber_p (stmt))
3308 /* Clobber the replacement variable. */
3309 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3310 /* Remove clobbers of fully scalarized variables, they are dead. */
3311 if (acc->grp_covered)
3313 unlink_stmt_vdef (stmt);
3314 gsi_remove (gsi, true);
3315 release_defs (stmt);
3316 return SRA_AM_REMOVED;
3318 else
3319 return SRA_AM_MODIFIED;
3322 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
3324 /* I have never seen this code path trigger but if it can happen the
3325 following should handle it gracefully. */
3326 if (access_has_children_p (acc))
3327 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3328 true, true, loc);
3329 return SRA_AM_MODIFIED;
3332 if (acc->grp_covered)
3334 init_subtree_with_zero (acc, gsi, false, loc);
3335 unlink_stmt_vdef (stmt);
3336 gsi_remove (gsi, true);
3337 release_defs (stmt);
3338 return SRA_AM_REMOVED;
3340 else
3342 init_subtree_with_zero (acc, gsi, true, loc);
3343 return SRA_AM_MODIFIED;
3347 /* Create and return a new suitable default definition SSA_NAME for RACC which
3348 is an access describing an uninitialized part of an aggregate that is being
3349 loaded. */
3351 static tree
3352 get_repl_default_def_ssa_name (struct access *racc)
3354 gcc_checking_assert (!racc->grp_to_be_replaced
3355 && !racc->grp_to_be_debug_replaced);
3356 if (!racc->replacement_decl)
3357 racc->replacement_decl = create_access_replacement (racc);
3358 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3361 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3362 bit-field field declaration somewhere in it. */
3364 static inline bool
3365 contains_vce_or_bfcref_p (const_tree ref)
3367 while (handled_component_p (ref))
3369 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3370 || (TREE_CODE (ref) == COMPONENT_REF
3371 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3372 return true;
3373 ref = TREE_OPERAND (ref, 0);
3376 return false;
3379 /* Examine both sides of the assignment statement pointed to by STMT, replace
3380 them with a scalare replacement if there is one and generate copying of
3381 replacements if scalarized aggregates have been used in the assignment. GSI
3382 is used to hold generated statements for type conversions and subtree
3383 copying. */
3385 static enum assignment_mod_result
3386 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3388 struct access *lacc, *racc;
3389 tree lhs, rhs;
3390 bool modify_this_stmt = false;
3391 bool force_gimple_rhs = false;
3392 location_t loc;
3393 gimple_stmt_iterator orig_gsi = *gsi;
3395 if (!gimple_assign_single_p (stmt))
3396 return SRA_AM_NONE;
3397 lhs = gimple_assign_lhs (stmt);
3398 rhs = gimple_assign_rhs1 (stmt);
3400 if (TREE_CODE (rhs) == CONSTRUCTOR)
3401 return sra_modify_constructor_assign (stmt, gsi);
3403 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3404 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3405 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3407 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3408 gsi, false);
3409 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3410 gsi, true);
3411 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3414 lacc = get_access_for_expr (lhs);
3415 racc = get_access_for_expr (rhs);
3416 if (!lacc && !racc)
3417 return SRA_AM_NONE;
3418 /* Avoid modifying initializations of constant-pool replacements. */
3419 if (racc && (racc->replacement_decl == lhs))
3420 return SRA_AM_NONE;
3422 loc = gimple_location (stmt);
3423 if (lacc && lacc->grp_to_be_replaced)
3425 lhs = get_access_replacement (lacc);
3426 gimple_assign_set_lhs (stmt, lhs);
3427 modify_this_stmt = true;
3428 if (lacc->grp_partial_lhs)
3429 force_gimple_rhs = true;
3430 sra_stats.exprs++;
3433 if (racc && racc->grp_to_be_replaced)
3435 rhs = get_access_replacement (racc);
3436 modify_this_stmt = true;
3437 if (racc->grp_partial_lhs)
3438 force_gimple_rhs = true;
3439 sra_stats.exprs++;
3441 else if (racc
3442 && !racc->grp_unscalarized_data
3443 && !racc->grp_unscalarizable_region
3444 && TREE_CODE (lhs) == SSA_NAME
3445 && !access_has_replacements_p (racc))
3447 rhs = get_repl_default_def_ssa_name (racc);
3448 modify_this_stmt = true;
3449 sra_stats.exprs++;
3452 if (modify_this_stmt)
3454 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3456 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3457 ??? This should move to fold_stmt which we simply should
3458 call after building a VIEW_CONVERT_EXPR here. */
3459 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3460 && !contains_bitfld_component_ref_p (lhs))
3462 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3463 gimple_assign_set_lhs (stmt, lhs);
3465 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3466 && !contains_vce_or_bfcref_p (rhs))
3467 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3469 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3471 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3472 rhs);
3473 if (is_gimple_reg_type (TREE_TYPE (lhs))
3474 && TREE_CODE (lhs) != SSA_NAME)
3475 force_gimple_rhs = true;
3480 if (lacc && lacc->grp_to_be_debug_replaced)
3482 tree dlhs = get_access_replacement (lacc);
3483 tree drhs = unshare_expr (rhs);
3484 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3486 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3487 && !contains_vce_or_bfcref_p (drhs))
3488 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3489 if (drhs
3490 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3491 TREE_TYPE (drhs)))
3492 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3493 TREE_TYPE (dlhs), drhs);
3495 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3496 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3499 /* From this point on, the function deals with assignments in between
3500 aggregates when at least one has scalar reductions of some of its
3501 components. There are three possible scenarios: Both the LHS and RHS have
3502 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3504 In the first case, we would like to load the LHS components from RHS
3505 components whenever possible. If that is not possible, we would like to
3506 read it directly from the RHS (after updating it by storing in it its own
3507 components). If there are some necessary unscalarized data in the LHS,
3508 those will be loaded by the original assignment too. If neither of these
3509 cases happen, the original statement can be removed. Most of this is done
3510 by load_assign_lhs_subreplacements.
3512 In the second case, we would like to store all RHS scalarized components
3513 directly into LHS and if they cover the aggregate completely, remove the
3514 statement too. In the third case, we want the LHS components to be loaded
3515 directly from the RHS (DSE will remove the original statement if it
3516 becomes redundant).
3518 This is a bit complex but manageable when types match and when unions do
3519 not cause confusion in a way that we cannot really load a component of LHS
3520 from the RHS or vice versa (the access representing this level can have
3521 subaccesses that are accessible only through a different union field at a
3522 higher level - different from the one used in the examined expression).
3523 Unions are fun.
3525 Therefore, I specially handle a fourth case, happening when there is a
3526 specific type cast or it is impossible to locate a scalarized subaccess on
3527 the other side of the expression. If that happens, I simply "refresh" the
3528 RHS by storing in it is scalarized components leave the original statement
3529 there to do the copying and then load the scalar replacements of the LHS.
3530 This is what the first branch does. */
3532 if (modify_this_stmt
3533 || gimple_has_volatile_ops (stmt)
3534 || contains_vce_or_bfcref_p (rhs)
3535 || contains_vce_or_bfcref_p (lhs)
3536 || stmt_ends_bb_p (stmt))
3538 /* No need to copy into a constant-pool, it comes pre-initialized. */
3539 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3540 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3541 gsi, false, false, loc);
3542 if (access_has_children_p (lacc))
3544 gimple_stmt_iterator alt_gsi = gsi_none ();
3545 if (stmt_ends_bb_p (stmt))
3547 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3548 gsi = &alt_gsi;
3550 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3551 gsi, true, true, loc);
3553 sra_stats.separate_lhs_rhs_handling++;
3555 /* This gimplification must be done after generate_subtree_copies,
3556 lest we insert the subtree copies in the middle of the gimplified
3557 sequence. */
3558 if (force_gimple_rhs)
3559 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3560 true, GSI_SAME_STMT);
3561 if (gimple_assign_rhs1 (stmt) != rhs)
3563 modify_this_stmt = true;
3564 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3565 gcc_assert (stmt == gsi_stmt (orig_gsi));
3568 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3570 else
3572 if (access_has_children_p (lacc)
3573 && access_has_children_p (racc)
3574 /* When an access represents an unscalarizable region, it usually
3575 represents accesses with variable offset and thus must not be used
3576 to generate new memory accesses. */
3577 && !lacc->grp_unscalarizable_region
3578 && !racc->grp_unscalarizable_region)
3580 struct subreplacement_assignment_data sad;
3582 sad.left_offset = lacc->offset;
3583 sad.assignment_lhs = lhs;
3584 sad.assignment_rhs = rhs;
3585 sad.top_racc = racc;
3586 sad.old_gsi = *gsi;
3587 sad.new_gsi = gsi;
3588 sad.loc = gimple_location (stmt);
3589 sad.refreshed = SRA_UDH_NONE;
3591 if (lacc->grp_read && !lacc->grp_covered)
3592 handle_unscalarized_data_in_subtree (&sad);
3594 load_assign_lhs_subreplacements (lacc, &sad);
3595 if (sad.refreshed != SRA_UDH_RIGHT)
3597 gsi_next (gsi);
3598 unlink_stmt_vdef (stmt);
3599 gsi_remove (&sad.old_gsi, true);
3600 release_defs (stmt);
3601 sra_stats.deleted++;
3602 return SRA_AM_REMOVED;
3605 else
3607 if (access_has_children_p (racc)
3608 && !racc->grp_unscalarized_data
3609 && TREE_CODE (lhs) != SSA_NAME)
3611 if (dump_file)
3613 fprintf (dump_file, "Removing load: ");
3614 print_gimple_stmt (dump_file, stmt, 0);
3616 generate_subtree_copies (racc->first_child, lhs,
3617 racc->offset, 0, 0, gsi,
3618 false, false, loc);
3619 gcc_assert (stmt == gsi_stmt (*gsi));
3620 unlink_stmt_vdef (stmt);
3621 gsi_remove (gsi, true);
3622 release_defs (stmt);
3623 sra_stats.deleted++;
3624 return SRA_AM_REMOVED;
3626 /* Restore the aggregate RHS from its components so the
3627 prevailing aggregate copy does the right thing. */
3628 if (access_has_children_p (racc))
3629 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3630 gsi, false, false, loc);
3631 /* Re-load the components of the aggregate copy destination.
3632 But use the RHS aggregate to load from to expose more
3633 optimization opportunities. */
3634 if (access_has_children_p (lacc))
3635 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3636 0, 0, gsi, true, true, loc);
3639 return SRA_AM_NONE;
3643 /* Set any scalar replacements of values in the constant pool to the initial
3644 value of the constant. (Constant-pool decls like *.LC0 have effectively
3645 been initialized before the program starts, we must do the same for their
3646 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3647 the function's entry block. */
3649 static void
3650 initialize_constant_pool_replacements (void)
3652 gimple_seq seq = NULL;
3653 gimple_stmt_iterator gsi = gsi_start (seq);
3654 bitmap_iterator bi;
3655 unsigned i;
3657 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3659 tree var = candidate (i);
3660 if (!constant_decl_p (var))
3661 continue;
3662 vec<access_p> *access_vec = get_base_access_vector (var);
3663 if (!access_vec)
3664 continue;
3665 for (unsigned i = 0; i < access_vec->length (); i++)
3667 struct access *access = (*access_vec)[i];
3668 if (!access->replacement_decl)
3669 continue;
3670 gassign *stmt
3671 = gimple_build_assign (get_access_replacement (access),
3672 unshare_expr (access->expr));
3673 if (dump_file && (dump_flags & TDF_DETAILS))
3675 fprintf (dump_file, "Generating constant initializer: ");
3676 print_gimple_stmt (dump_file, stmt, 0);
3677 fprintf (dump_file, "\n");
3679 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3680 update_stmt (stmt);
3684 seq = gsi_seq (gsi);
3685 if (seq)
3686 gsi_insert_seq_on_edge_immediate (
3687 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3690 /* Traverse the function body and all modifications as decided in
3691 analyze_all_variable_accesses. Return true iff the CFG has been
3692 changed. */
3694 static bool
3695 sra_modify_function_body (void)
3697 bool cfg_changed = false;
3698 basic_block bb;
3700 initialize_constant_pool_replacements ();
3702 FOR_EACH_BB_FN (bb, cfun)
3704 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3705 while (!gsi_end_p (gsi))
3707 gimple *stmt = gsi_stmt (gsi);
3708 enum assignment_mod_result assign_result;
3709 bool modified = false, deleted = false;
3710 tree *t;
3711 unsigned i;
3713 switch (gimple_code (stmt))
3715 case GIMPLE_RETURN:
3716 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3717 if (*t != NULL_TREE)
3718 modified |= sra_modify_expr (t, &gsi, false);
3719 break;
3721 case GIMPLE_ASSIGN:
3722 assign_result = sra_modify_assign (stmt, &gsi);
3723 modified |= assign_result == SRA_AM_MODIFIED;
3724 deleted = assign_result == SRA_AM_REMOVED;
3725 break;
3727 case GIMPLE_CALL:
3728 /* Operands must be processed before the lhs. */
3729 for (i = 0; i < gimple_call_num_args (stmt); i++)
3731 t = gimple_call_arg_ptr (stmt, i);
3732 modified |= sra_modify_expr (t, &gsi, false);
3735 if (gimple_call_lhs (stmt))
3737 t = gimple_call_lhs_ptr (stmt);
3738 modified |= sra_modify_expr (t, &gsi, true);
3740 break;
3742 case GIMPLE_ASM:
3744 gasm *asm_stmt = as_a <gasm *> (stmt);
3745 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3747 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3748 modified |= sra_modify_expr (t, &gsi, false);
3750 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3752 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3753 modified |= sra_modify_expr (t, &gsi, true);
3756 break;
3758 default:
3759 break;
3762 if (modified)
3764 update_stmt (stmt);
3765 if (maybe_clean_eh_stmt (stmt)
3766 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3767 cfg_changed = true;
3769 if (!deleted)
3770 gsi_next (&gsi);
3774 gsi_commit_edge_inserts ();
3775 return cfg_changed;
3778 /* Generate statements initializing scalar replacements of parts of function
3779 parameters. */
3781 static void
3782 initialize_parameter_reductions (void)
3784 gimple_stmt_iterator gsi;
3785 gimple_seq seq = NULL;
3786 tree parm;
3788 gsi = gsi_start (seq);
3789 for (parm = DECL_ARGUMENTS (current_function_decl);
3790 parm;
3791 parm = DECL_CHAIN (parm))
3793 vec<access_p> *access_vec;
3794 struct access *access;
3796 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3797 continue;
3798 access_vec = get_base_access_vector (parm);
3799 if (!access_vec)
3800 continue;
3802 for (access = (*access_vec)[0];
3803 access;
3804 access = access->next_grp)
3805 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3806 EXPR_LOCATION (parm));
3809 seq = gsi_seq (gsi);
3810 if (seq)
3811 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3814 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3815 it reveals there are components of some aggregates to be scalarized, it runs
3816 the required transformations. */
3817 static unsigned int
3818 perform_intra_sra (void)
3820 int ret = 0;
3821 sra_initialize ();
3823 if (!find_var_candidates ())
3824 goto out;
3826 if (!scan_function ())
3827 goto out;
3829 if (!analyze_all_variable_accesses ())
3830 goto out;
3832 if (sra_modify_function_body ())
3833 ret = TODO_update_ssa | TODO_cleanup_cfg;
3834 else
3835 ret = TODO_update_ssa;
3836 initialize_parameter_reductions ();
3838 statistics_counter_event (cfun, "Scalar replacements created",
3839 sra_stats.replacements);
3840 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3841 statistics_counter_event (cfun, "Subtree copy stmts",
3842 sra_stats.subtree_copies);
3843 statistics_counter_event (cfun, "Subreplacement stmts",
3844 sra_stats.subreplacements);
3845 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3846 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3847 sra_stats.separate_lhs_rhs_handling);
3849 out:
3850 sra_deinitialize ();
3851 return ret;
3854 /* Perform early intraprocedural SRA. */
3855 static unsigned int
3856 early_intra_sra (void)
3858 sra_mode = SRA_MODE_EARLY_INTRA;
3859 return perform_intra_sra ();
3862 /* Perform "late" intraprocedural SRA. */
3863 static unsigned int
3864 late_intra_sra (void)
3866 sra_mode = SRA_MODE_INTRA;
3867 return perform_intra_sra ();
3871 static bool
3872 gate_intra_sra (void)
3874 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3878 namespace {
3880 const pass_data pass_data_sra_early =
3882 GIMPLE_PASS, /* type */
3883 "esra", /* name */
3884 OPTGROUP_NONE, /* optinfo_flags */
3885 TV_TREE_SRA, /* tv_id */
3886 ( PROP_cfg | PROP_ssa ), /* properties_required */
3887 0, /* properties_provided */
3888 0, /* properties_destroyed */
3889 0, /* todo_flags_start */
3890 TODO_update_ssa, /* todo_flags_finish */
3893 class pass_sra_early : public gimple_opt_pass
3895 public:
3896 pass_sra_early (gcc::context *ctxt)
3897 : gimple_opt_pass (pass_data_sra_early, ctxt)
3900 /* opt_pass methods: */
3901 virtual bool gate (function *) { return gate_intra_sra (); }
3902 virtual unsigned int execute (function *) { return early_intra_sra (); }
3904 }; // class pass_sra_early
3906 } // anon namespace
3908 gimple_opt_pass *
3909 make_pass_sra_early (gcc::context *ctxt)
3911 return new pass_sra_early (ctxt);
3914 namespace {
3916 const pass_data pass_data_sra =
3918 GIMPLE_PASS, /* type */
3919 "sra", /* name */
3920 OPTGROUP_NONE, /* optinfo_flags */
3921 TV_TREE_SRA, /* tv_id */
3922 ( PROP_cfg | PROP_ssa ), /* properties_required */
3923 0, /* properties_provided */
3924 0, /* properties_destroyed */
3925 TODO_update_address_taken, /* todo_flags_start */
3926 TODO_update_ssa, /* todo_flags_finish */
3929 class pass_sra : public gimple_opt_pass
3931 public:
3932 pass_sra (gcc::context *ctxt)
3933 : gimple_opt_pass (pass_data_sra, ctxt)
3936 /* opt_pass methods: */
3937 virtual bool gate (function *) { return gate_intra_sra (); }
3938 virtual unsigned int execute (function *) { return late_intra_sra (); }
3940 }; // class pass_sra
3942 } // anon namespace
3944 gimple_opt_pass *
3945 make_pass_sra (gcc::context *ctxt)
3947 return new pass_sra (ctxt);
3951 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3952 parameter. */
3954 static bool
3955 is_unused_scalar_param (tree parm)
3957 tree name;
3958 return (is_gimple_reg (parm)
3959 && (!(name = ssa_default_def (cfun, parm))
3960 || has_zero_uses (name)));
3963 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3964 examine whether there are any direct or otherwise infeasible ones. If so,
3965 return true, otherwise return false. PARM must be a gimple register with a
3966 non-NULL default definition. */
3968 static bool
3969 ptr_parm_has_direct_uses (tree parm)
3971 imm_use_iterator ui;
3972 gimple *stmt;
3973 tree name = ssa_default_def (cfun, parm);
3974 bool ret = false;
3976 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3978 int uses_ok = 0;
3979 use_operand_p use_p;
3981 if (is_gimple_debug (stmt))
3982 continue;
3984 /* Valid uses include dereferences on the lhs and the rhs. */
3985 if (gimple_has_lhs (stmt))
3987 tree lhs = gimple_get_lhs (stmt);
3988 while (handled_component_p (lhs))
3989 lhs = TREE_OPERAND (lhs, 0);
3990 if (TREE_CODE (lhs) == MEM_REF
3991 && TREE_OPERAND (lhs, 0) == name
3992 && integer_zerop (TREE_OPERAND (lhs, 1))
3993 && types_compatible_p (TREE_TYPE (lhs),
3994 TREE_TYPE (TREE_TYPE (name)))
3995 && !TREE_THIS_VOLATILE (lhs))
3996 uses_ok++;
3998 if (gimple_assign_single_p (stmt))
4000 tree rhs = gimple_assign_rhs1 (stmt);
4001 while (handled_component_p (rhs))
4002 rhs = TREE_OPERAND (rhs, 0);
4003 if (TREE_CODE (rhs) == MEM_REF
4004 && TREE_OPERAND (rhs, 0) == name
4005 && integer_zerop (TREE_OPERAND (rhs, 1))
4006 && types_compatible_p (TREE_TYPE (rhs),
4007 TREE_TYPE (TREE_TYPE (name)))
4008 && !TREE_THIS_VOLATILE (rhs))
4009 uses_ok++;
4011 else if (is_gimple_call (stmt))
4013 unsigned i;
4014 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4016 tree arg = gimple_call_arg (stmt, i);
4017 while (handled_component_p (arg))
4018 arg = TREE_OPERAND (arg, 0);
4019 if (TREE_CODE (arg) == MEM_REF
4020 && TREE_OPERAND (arg, 0) == name
4021 && integer_zerop (TREE_OPERAND (arg, 1))
4022 && types_compatible_p (TREE_TYPE (arg),
4023 TREE_TYPE (TREE_TYPE (name)))
4024 && !TREE_THIS_VOLATILE (arg))
4025 uses_ok++;
4029 /* If the number of valid uses does not match the number of
4030 uses in this stmt there is an unhandled use. */
4031 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4032 --uses_ok;
4034 if (uses_ok != 0)
4035 ret = true;
4037 if (ret)
4038 BREAK_FROM_IMM_USE_STMT (ui);
4041 return ret;
4044 /* Identify candidates for reduction for IPA-SRA based on their type and mark
4045 them in candidate_bitmap. Note that these do not necessarily include
4046 parameter which are unused and thus can be removed. Return true iff any
4047 such candidate has been found. */
4049 static bool
4050 find_param_candidates (void)
4052 tree parm;
4053 int count = 0;
4054 bool ret = false;
4055 const char *msg;
4057 for (parm = DECL_ARGUMENTS (current_function_decl);
4058 parm;
4059 parm = DECL_CHAIN (parm))
4061 tree type = TREE_TYPE (parm);
4062 tree_node **slot;
4064 count++;
4066 if (TREE_THIS_VOLATILE (parm)
4067 || TREE_ADDRESSABLE (parm)
4068 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
4069 continue;
4071 if (is_unused_scalar_param (parm))
4073 ret = true;
4074 continue;
4077 if (POINTER_TYPE_P (type))
4079 type = TREE_TYPE (type);
4081 if (TREE_CODE (type) == FUNCTION_TYPE
4082 || TYPE_VOLATILE (type)
4083 || (TREE_CODE (type) == ARRAY_TYPE
4084 && TYPE_NONALIASED_COMPONENT (type))
4085 || !is_gimple_reg (parm)
4086 || is_va_list_type (type)
4087 || ptr_parm_has_direct_uses (parm))
4088 continue;
4090 else if (!AGGREGATE_TYPE_P (type))
4091 continue;
4093 if (!COMPLETE_TYPE_P (type)
4094 || !tree_fits_uhwi_p (TYPE_SIZE (type))
4095 || tree_to_uhwi (TYPE_SIZE (type)) == 0
4096 || (AGGREGATE_TYPE_P (type)
4097 && type_internals_preclude_sra_p (type, &msg)))
4098 continue;
4100 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4101 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4102 *slot = parm;
4104 ret = true;
4105 if (dump_file && (dump_flags & TDF_DETAILS))
4107 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4108 print_generic_expr (dump_file, parm);
4109 fprintf (dump_file, "\n");
4113 func_param_count = count;
4114 return ret;
4117 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4118 maybe_modified. */
4120 static bool
4121 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4122 void *data)
4124 struct access *repr = (struct access *) data;
4126 repr->grp_maybe_modified = 1;
4127 return true;
4130 /* Analyze what representatives (in linked lists accessible from
4131 REPRESENTATIVES) can be modified by side effects of statements in the
4132 current function. */
4134 static void
4135 analyze_modified_params (vec<access_p> representatives)
4137 int i;
4139 for (i = 0; i < func_param_count; i++)
4141 struct access *repr;
4143 for (repr = representatives[i];
4144 repr;
4145 repr = repr->next_grp)
4147 struct access *access;
4148 bitmap visited;
4149 ao_ref ar;
4151 if (no_accesses_p (repr))
4152 continue;
4153 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4154 || repr->grp_maybe_modified)
4155 continue;
4157 ao_ref_init (&ar, repr->expr);
4158 visited = BITMAP_ALLOC (NULL);
4159 for (access = repr; access; access = access->next_sibling)
4161 /* All accesses are read ones, otherwise grp_maybe_modified would
4162 be trivially set. */
4163 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4164 mark_maybe_modified, repr, &visited);
4165 if (repr->grp_maybe_modified)
4166 break;
4168 BITMAP_FREE (visited);
4173 /* Propagate distances in bb_dereferences in the opposite direction than the
4174 control flow edges, in each step storing the maximum of the current value
4175 and the minimum of all successors. These steps are repeated until the table
4176 stabilizes. Note that BBs which might terminate the functions (according to
4177 final_bbs bitmap) never updated in this way. */
4179 static void
4180 propagate_dereference_distances (void)
4182 basic_block bb;
4184 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4185 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4186 FOR_EACH_BB_FN (bb, cfun)
4188 queue.quick_push (bb);
4189 bb->aux = bb;
4192 while (!queue.is_empty ())
4194 edge_iterator ei;
4195 edge e;
4196 bool change = false;
4197 int i;
4199 bb = queue.pop ();
4200 bb->aux = NULL;
4202 if (bitmap_bit_p (final_bbs, bb->index))
4203 continue;
4205 for (i = 0; i < func_param_count; i++)
4207 int idx = bb->index * func_param_count + i;
4208 bool first = true;
4209 HOST_WIDE_INT inh = 0;
4211 FOR_EACH_EDGE (e, ei, bb->succs)
4213 int succ_idx = e->dest->index * func_param_count + i;
4215 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4216 continue;
4218 if (first)
4220 first = false;
4221 inh = bb_dereferences [succ_idx];
4223 else if (bb_dereferences [succ_idx] < inh)
4224 inh = bb_dereferences [succ_idx];
4227 if (!first && bb_dereferences[idx] < inh)
4229 bb_dereferences[idx] = inh;
4230 change = true;
4234 if (change && !bitmap_bit_p (final_bbs, bb->index))
4235 FOR_EACH_EDGE (e, ei, bb->preds)
4237 if (e->src->aux)
4238 continue;
4240 e->src->aux = e->src;
4241 queue.quick_push (e->src);
4246 /* Dump a dereferences TABLE with heading STR to file F. */
4248 static void
4249 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4251 basic_block bb;
4253 fprintf (dump_file, "%s", str);
4254 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4255 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4257 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4258 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4260 int i;
4261 for (i = 0; i < func_param_count; i++)
4263 int idx = bb->index * func_param_count + i;
4264 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4267 fprintf (f, "\n");
4269 fprintf (dump_file, "\n");
4272 /* Determine what (parts of) parameters passed by reference that are not
4273 assigned to are not certainly dereferenced in this function and thus the
4274 dereferencing cannot be safely moved to the caller without potentially
4275 introducing a segfault. Mark such REPRESENTATIVES as
4276 grp_not_necessarilly_dereferenced.
4278 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4279 part is calculated rather than simple booleans are calculated for each
4280 pointer parameter to handle cases when only a fraction of the whole
4281 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4282 an example).
4284 The maximum dereference distances for each pointer parameter and BB are
4285 already stored in bb_dereference. This routine simply propagates these
4286 values upwards by propagate_dereference_distances and then compares the
4287 distances of individual parameters in the ENTRY BB to the equivalent
4288 distances of each representative of a (fraction of a) parameter. */
4290 static void
4291 analyze_caller_dereference_legality (vec<access_p> representatives)
4293 int i;
4295 if (dump_file && (dump_flags & TDF_DETAILS))
4296 dump_dereferences_table (dump_file,
4297 "Dereference table before propagation:\n",
4298 bb_dereferences);
4300 propagate_dereference_distances ();
4302 if (dump_file && (dump_flags & TDF_DETAILS))
4303 dump_dereferences_table (dump_file,
4304 "Dereference table after propagation:\n",
4305 bb_dereferences);
4307 for (i = 0; i < func_param_count; i++)
4309 struct access *repr = representatives[i];
4310 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4312 if (!repr || no_accesses_p (repr))
4313 continue;
4317 if ((repr->offset + repr->size) > bb_dereferences[idx])
4318 repr->grp_not_necessarilly_dereferenced = 1;
4319 repr = repr->next_grp;
4321 while (repr);
4325 /* Return the representative access for the parameter declaration PARM if it is
4326 a scalar passed by reference which is not written to and the pointer value
4327 is not used directly. Thus, if it is legal to dereference it in the caller
4328 and we can rule out modifications through aliases, such parameter should be
4329 turned into one passed by value. Return NULL otherwise. */
4331 static struct access *
4332 unmodified_by_ref_scalar_representative (tree parm)
4334 int i, access_count;
4335 struct access *repr;
4336 vec<access_p> *access_vec;
4338 access_vec = get_base_access_vector (parm);
4339 gcc_assert (access_vec);
4340 repr = (*access_vec)[0];
4341 if (repr->write)
4342 return NULL;
4343 repr->group_representative = repr;
4345 access_count = access_vec->length ();
4346 for (i = 1; i < access_count; i++)
4348 struct access *access = (*access_vec)[i];
4349 if (access->write)
4350 return NULL;
4351 access->group_representative = repr;
4352 access->next_sibling = repr->next_sibling;
4353 repr->next_sibling = access;
4356 repr->grp_read = 1;
4357 repr->grp_scalar_ptr = 1;
4358 return repr;
4361 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4362 associated with. REQ_ALIGN is the minimum required alignment. */
4364 static bool
4365 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4367 unsigned int exp_align;
4368 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4369 is incompatible assign in a call statement (and possibly even in asm
4370 statements). This can be relaxed by using a new temporary but only for
4371 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4372 intraprocedural SRA we deal with this by keeping the old aggregate around,
4373 something we cannot do in IPA-SRA.) */
4374 if (access->write
4375 && (is_gimple_call (access->stmt)
4376 || gimple_code (access->stmt) == GIMPLE_ASM))
4377 return true;
4379 exp_align = get_object_alignment (access->expr);
4380 if (exp_align < req_align)
4381 return true;
4383 return false;
4387 /* Sort collected accesses for parameter PARM, identify representatives for
4388 each accessed region and link them together. Return NULL if there are
4389 different but overlapping accesses, return the special ptr value meaning
4390 there are no accesses for this parameter if that is the case and return the
4391 first representative otherwise. Set *RO_GRP if there is a group of accesses
4392 with only read (i.e. no write) accesses. */
4394 static struct access *
4395 splice_param_accesses (tree parm, bool *ro_grp)
4397 int i, j, access_count, group_count;
4398 int agg_size, total_size = 0;
4399 struct access *access, *res, **prev_acc_ptr = &res;
4400 vec<access_p> *access_vec;
4402 access_vec = get_base_access_vector (parm);
4403 if (!access_vec)
4404 return &no_accesses_representant;
4405 access_count = access_vec->length ();
4407 access_vec->qsort (compare_access_positions);
4409 i = 0;
4410 total_size = 0;
4411 group_count = 0;
4412 while (i < access_count)
4414 bool modification;
4415 tree a1_alias_type;
4416 access = (*access_vec)[i];
4417 modification = access->write;
4418 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4419 return NULL;
4420 a1_alias_type = reference_alias_ptr_type (access->expr);
4422 /* Access is about to become group representative unless we find some
4423 nasty overlap which would preclude us from breaking this parameter
4424 apart. */
4426 j = i + 1;
4427 while (j < access_count)
4429 struct access *ac2 = (*access_vec)[j];
4430 if (ac2->offset != access->offset)
4432 /* All or nothing law for parameters. */
4433 if (access->offset + access->size > ac2->offset)
4434 return NULL;
4435 else
4436 break;
4438 else if (ac2->size != access->size)
4439 return NULL;
4441 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4442 || (ac2->type != access->type
4443 && (TREE_ADDRESSABLE (ac2->type)
4444 || TREE_ADDRESSABLE (access->type)))
4445 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4446 return NULL;
4448 modification |= ac2->write;
4449 ac2->group_representative = access;
4450 ac2->next_sibling = access->next_sibling;
4451 access->next_sibling = ac2;
4452 j++;
4455 group_count++;
4456 access->grp_maybe_modified = modification;
4457 if (!modification)
4458 *ro_grp = true;
4459 *prev_acc_ptr = access;
4460 prev_acc_ptr = &access->next_grp;
4461 total_size += access->size;
4462 i = j;
4465 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4466 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4467 else
4468 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4469 if (total_size >= agg_size)
4470 return NULL;
4472 gcc_assert (group_count > 0);
4473 return res;
4476 /* Decide whether parameters with representative accesses given by REPR should
4477 be reduced into components. */
4479 static int
4480 decide_one_param_reduction (struct access *repr)
4482 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4483 bool by_ref;
4484 tree parm;
4486 parm = repr->base;
4487 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4488 gcc_assert (cur_parm_size > 0);
4490 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4492 by_ref = true;
4493 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4495 else
4497 by_ref = false;
4498 agg_size = cur_parm_size;
4501 if (dump_file)
4503 struct access *acc;
4504 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4505 print_generic_expr (dump_file, parm);
4506 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4507 for (acc = repr; acc; acc = acc->next_grp)
4508 dump_access (dump_file, acc, true);
4511 total_size = 0;
4512 new_param_count = 0;
4514 for (; repr; repr = repr->next_grp)
4516 gcc_assert (parm == repr->base);
4518 /* Taking the address of a non-addressable field is verboten. */
4519 if (by_ref && repr->non_addressable)
4520 return 0;
4522 /* Do not decompose a non-BLKmode param in a way that would
4523 create BLKmode params. Especially for by-reference passing
4524 (thus, pointer-type param) this is hardly worthwhile. */
4525 if (DECL_MODE (parm) != BLKmode
4526 && TYPE_MODE (repr->type) == BLKmode)
4527 return 0;
4529 if (!by_ref || (!repr->grp_maybe_modified
4530 && !repr->grp_not_necessarilly_dereferenced))
4531 total_size += repr->size;
4532 else
4533 total_size += cur_parm_size;
4535 new_param_count++;
4538 gcc_assert (new_param_count > 0);
4540 if (optimize_function_for_size_p (cfun))
4541 parm_size_limit = cur_parm_size;
4542 else
4543 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4544 * cur_parm_size);
4546 if (total_size < agg_size
4547 && total_size <= parm_size_limit)
4549 if (dump_file)
4550 fprintf (dump_file, " ....will be split into %i components\n",
4551 new_param_count);
4552 return new_param_count;
4554 else
4555 return 0;
4558 /* The order of the following enums is important, we need to do extra work for
4559 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4560 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4561 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4563 /* Identify representatives of all accesses to all candidate parameters for
4564 IPA-SRA. Return result based on what representatives have been found. */
4566 static enum ipa_splicing_result
4567 splice_all_param_accesses (vec<access_p> &representatives)
4569 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4570 tree parm;
4571 struct access *repr;
4573 representatives.create (func_param_count);
4575 for (parm = DECL_ARGUMENTS (current_function_decl);
4576 parm;
4577 parm = DECL_CHAIN (parm))
4579 if (is_unused_scalar_param (parm))
4581 representatives.quick_push (&no_accesses_representant);
4582 if (result == NO_GOOD_ACCESS)
4583 result = UNUSED_PARAMS;
4585 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4586 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4587 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4589 repr = unmodified_by_ref_scalar_representative (parm);
4590 representatives.quick_push (repr);
4591 if (repr)
4592 result = UNMODIF_BY_REF_ACCESSES;
4594 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4596 bool ro_grp = false;
4597 repr = splice_param_accesses (parm, &ro_grp);
4598 representatives.quick_push (repr);
4600 if (repr && !no_accesses_p (repr))
4602 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4604 if (ro_grp)
4605 result = UNMODIF_BY_REF_ACCESSES;
4606 else if (result < MODIF_BY_REF_ACCESSES)
4607 result = MODIF_BY_REF_ACCESSES;
4609 else if (result < BY_VAL_ACCESSES)
4610 result = BY_VAL_ACCESSES;
4612 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4613 result = UNUSED_PARAMS;
4615 else
4616 representatives.quick_push (NULL);
4619 if (result == NO_GOOD_ACCESS)
4621 representatives.release ();
4622 return NO_GOOD_ACCESS;
4625 return result;
4628 /* Return the index of BASE in PARMS. Abort if it is not found. */
4630 static inline int
4631 get_param_index (tree base, vec<tree> parms)
4633 int i, len;
4635 len = parms.length ();
4636 for (i = 0; i < len; i++)
4637 if (parms[i] == base)
4638 return i;
4639 gcc_unreachable ();
4642 /* Convert the decisions made at the representative level into compact
4643 parameter adjustments. REPRESENTATIVES are pointers to first
4644 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4645 final number of adjustments. */
4647 static ipa_parm_adjustment_vec
4648 turn_representatives_into_adjustments (vec<access_p> representatives,
4649 int adjustments_count)
4651 vec<tree> parms;
4652 ipa_parm_adjustment_vec adjustments;
4653 tree parm;
4654 int i;
4656 gcc_assert (adjustments_count > 0);
4657 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4658 adjustments.create (adjustments_count);
4659 parm = DECL_ARGUMENTS (current_function_decl);
4660 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4662 struct access *repr = representatives[i];
4664 if (!repr || no_accesses_p (repr))
4666 struct ipa_parm_adjustment adj;
4668 memset (&adj, 0, sizeof (adj));
4669 adj.base_index = get_param_index (parm, parms);
4670 adj.base = parm;
4671 if (!repr)
4672 adj.op = IPA_PARM_OP_COPY;
4673 else
4674 adj.op = IPA_PARM_OP_REMOVE;
4675 adj.arg_prefix = "ISRA";
4676 adjustments.quick_push (adj);
4678 else
4680 struct ipa_parm_adjustment adj;
4681 int index = get_param_index (parm, parms);
4683 for (; repr; repr = repr->next_grp)
4685 memset (&adj, 0, sizeof (adj));
4686 gcc_assert (repr->base == parm);
4687 adj.base_index = index;
4688 adj.base = repr->base;
4689 adj.type = repr->type;
4690 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4691 adj.offset = repr->offset;
4692 adj.reverse = repr->reverse;
4693 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4694 && (repr->grp_maybe_modified
4695 || repr->grp_not_necessarilly_dereferenced));
4696 adj.arg_prefix = "ISRA";
4697 adjustments.quick_push (adj);
4701 parms.release ();
4702 return adjustments;
4705 /* Analyze the collected accesses and produce a plan what to do with the
4706 parameters in the form of adjustments, NULL meaning nothing. */
4708 static ipa_parm_adjustment_vec
4709 analyze_all_param_acesses (void)
4711 enum ipa_splicing_result repr_state;
4712 bool proceed = false;
4713 int i, adjustments_count = 0;
4714 vec<access_p> representatives;
4715 ipa_parm_adjustment_vec adjustments;
4717 repr_state = splice_all_param_accesses (representatives);
4718 if (repr_state == NO_GOOD_ACCESS)
4719 return ipa_parm_adjustment_vec ();
4721 /* If there are any parameters passed by reference which are not modified
4722 directly, we need to check whether they can be modified indirectly. */
4723 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4725 analyze_caller_dereference_legality (representatives);
4726 analyze_modified_params (representatives);
4729 for (i = 0; i < func_param_count; i++)
4731 struct access *repr = representatives[i];
4733 if (repr && !no_accesses_p (repr))
4735 if (repr->grp_scalar_ptr)
4737 adjustments_count++;
4738 if (repr->grp_not_necessarilly_dereferenced
4739 || repr->grp_maybe_modified)
4740 representatives[i] = NULL;
4741 else
4743 proceed = true;
4744 sra_stats.scalar_by_ref_to_by_val++;
4747 else
4749 int new_components = decide_one_param_reduction (repr);
4751 if (new_components == 0)
4753 representatives[i] = NULL;
4754 adjustments_count++;
4756 else
4758 adjustments_count += new_components;
4759 sra_stats.aggregate_params_reduced++;
4760 sra_stats.param_reductions_created += new_components;
4761 proceed = true;
4765 else
4767 if (no_accesses_p (repr))
4769 proceed = true;
4770 sra_stats.deleted_unused_parameters++;
4772 adjustments_count++;
4776 if (!proceed && dump_file)
4777 fprintf (dump_file, "NOT proceeding to change params.\n");
4779 if (proceed)
4780 adjustments = turn_representatives_into_adjustments (representatives,
4781 adjustments_count);
4782 else
4783 adjustments = ipa_parm_adjustment_vec ();
4785 representatives.release ();
4786 return adjustments;
4789 /* If a parameter replacement identified by ADJ does not yet exist in the form
4790 of declaration, create it and record it, otherwise return the previously
4791 created one. */
4793 static tree
4794 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4796 tree repl;
4797 if (!adj->new_ssa_base)
4799 char *pretty_name = make_fancy_name (adj->base);
4801 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4802 DECL_NAME (repl) = get_identifier (pretty_name);
4803 DECL_NAMELESS (repl) = 1;
4804 obstack_free (&name_obstack, pretty_name);
4806 adj->new_ssa_base = repl;
4808 else
4809 repl = adj->new_ssa_base;
4810 return repl;
4813 /* Find the first adjustment for a particular parameter BASE in a vector of
4814 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4815 adjustment. */
4817 static struct ipa_parm_adjustment *
4818 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4820 int i, len;
4822 len = adjustments.length ();
4823 for (i = 0; i < len; i++)
4825 struct ipa_parm_adjustment *adj;
4827 adj = &adjustments[i];
4828 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4829 return adj;
4832 return NULL;
4835 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4836 parameter which is to be removed because its value is not used, create a new
4837 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4838 original with it and return it. If there is no need to re-map, return NULL.
4839 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4841 static tree
4842 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4843 ipa_parm_adjustment_vec adjustments)
4845 struct ipa_parm_adjustment *adj;
4846 tree decl, repl, new_name;
4848 if (TREE_CODE (old_name) != SSA_NAME)
4849 return NULL;
4851 decl = SSA_NAME_VAR (old_name);
4852 if (decl == NULL_TREE
4853 || TREE_CODE (decl) != PARM_DECL)
4854 return NULL;
4856 adj = get_adjustment_for_base (adjustments, decl);
4857 if (!adj)
4858 return NULL;
4860 repl = get_replaced_param_substitute (adj);
4861 new_name = make_ssa_name (repl, stmt);
4862 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4863 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
4865 if (dump_file)
4867 fprintf (dump_file, "replacing an SSA name of a removed param ");
4868 print_generic_expr (dump_file, old_name);
4869 fprintf (dump_file, " with ");
4870 print_generic_expr (dump_file, new_name);
4871 fprintf (dump_file, "\n");
4874 replace_uses_by (old_name, new_name);
4875 return new_name;
4878 /* If the statement STMT contains any expressions that need to replaced with a
4879 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4880 incompatibilities (GSI is used to accommodate conversion statements and must
4881 point to the statement). Return true iff the statement was modified. */
4883 static bool
4884 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4885 ipa_parm_adjustment_vec adjustments)
4887 tree *lhs_p, *rhs_p;
4888 bool any;
4890 if (!gimple_assign_single_p (stmt))
4891 return false;
4893 rhs_p = gimple_assign_rhs1_ptr (stmt);
4894 lhs_p = gimple_assign_lhs_ptr (stmt);
4896 any = ipa_modify_expr (rhs_p, false, adjustments);
4897 any |= ipa_modify_expr (lhs_p, false, adjustments);
4898 if (any)
4900 tree new_rhs = NULL_TREE;
4902 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4904 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4906 /* V_C_Es of constructors can cause trouble (PR 42714). */
4907 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4908 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4909 else
4910 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4911 NULL);
4913 else
4914 new_rhs = fold_build1_loc (gimple_location (stmt),
4915 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4916 *rhs_p);
4918 else if (REFERENCE_CLASS_P (*rhs_p)
4919 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4920 && !is_gimple_reg (*lhs_p))
4921 /* This can happen when an assignment in between two single field
4922 structures is turned into an assignment in between two pointers to
4923 scalars (PR 42237). */
4924 new_rhs = *rhs_p;
4926 if (new_rhs)
4928 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4929 true, GSI_SAME_STMT);
4931 gimple_assign_set_rhs_from_tree (gsi, tmp);
4934 return true;
4937 return false;
4940 /* Traverse the function body and all modifications as described in
4941 ADJUSTMENTS. Return true iff the CFG has been changed. */
4943 bool
4944 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4946 bool cfg_changed = false;
4947 basic_block bb;
4949 FOR_EACH_BB_FN (bb, cfun)
4951 gimple_stmt_iterator gsi;
4953 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4955 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
4956 tree new_lhs, old_lhs = gimple_phi_result (phi);
4957 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
4958 if (new_lhs)
4960 gimple_phi_set_result (phi, new_lhs);
4961 release_ssa_name (old_lhs);
4965 gsi = gsi_start_bb (bb);
4966 while (!gsi_end_p (gsi))
4968 gimple *stmt = gsi_stmt (gsi);
4969 bool modified = false;
4970 tree *t;
4971 unsigned i;
4973 switch (gimple_code (stmt))
4975 case GIMPLE_RETURN:
4976 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4977 if (*t != NULL_TREE)
4978 modified |= ipa_modify_expr (t, true, adjustments);
4979 break;
4981 case GIMPLE_ASSIGN:
4982 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4983 break;
4985 case GIMPLE_CALL:
4986 /* Operands must be processed before the lhs. */
4987 for (i = 0; i < gimple_call_num_args (stmt); i++)
4989 t = gimple_call_arg_ptr (stmt, i);
4990 modified |= ipa_modify_expr (t, true, adjustments);
4993 if (gimple_call_lhs (stmt))
4995 t = gimple_call_lhs_ptr (stmt);
4996 modified |= ipa_modify_expr (t, false, adjustments);
4998 break;
5000 case GIMPLE_ASM:
5002 gasm *asm_stmt = as_a <gasm *> (stmt);
5003 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
5005 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
5006 modified |= ipa_modify_expr (t, true, adjustments);
5008 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
5010 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
5011 modified |= ipa_modify_expr (t, false, adjustments);
5014 break;
5016 default:
5017 break;
5020 def_operand_p defp;
5021 ssa_op_iter iter;
5022 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5024 tree old_def = DEF_FROM_PTR (defp);
5025 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
5026 adjustments))
5028 SET_DEF (defp, new_def);
5029 release_ssa_name (old_def);
5030 modified = true;
5034 if (modified)
5036 update_stmt (stmt);
5037 if (maybe_clean_eh_stmt (stmt)
5038 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5039 cfg_changed = true;
5041 gsi_next (&gsi);
5045 return cfg_changed;
5048 /* Call gimple_debug_bind_reset_value on all debug statements describing
5049 gimple register parameters that are being removed or replaced. */
5051 static void
5052 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
5054 int i, len;
5055 gimple_stmt_iterator *gsip = NULL, gsi;
5057 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
5059 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5060 gsip = &gsi;
5062 len = adjustments.length ();
5063 for (i = 0; i < len; i++)
5065 struct ipa_parm_adjustment *adj;
5066 imm_use_iterator ui;
5067 gimple *stmt;
5068 gdebug *def_temp;
5069 tree name, vexpr, copy = NULL_TREE;
5070 use_operand_p use_p;
5072 adj = &adjustments[i];
5073 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
5074 continue;
5075 name = ssa_default_def (cfun, adj->base);
5076 vexpr = NULL;
5077 if (name)
5078 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
5080 if (gimple_clobber_p (stmt))
5082 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
5083 unlink_stmt_vdef (stmt);
5084 gsi_remove (&cgsi, true);
5085 release_defs (stmt);
5086 continue;
5088 /* All other users must have been removed by
5089 ipa_sra_modify_function_body. */
5090 gcc_assert (is_gimple_debug (stmt));
5091 if (vexpr == NULL && gsip != NULL)
5093 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5094 vexpr = make_node (DEBUG_EXPR_DECL);
5095 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
5096 NULL);
5097 DECL_ARTIFICIAL (vexpr) = 1;
5098 TREE_TYPE (vexpr) = TREE_TYPE (name);
5099 SET_DECL_MODE (vexpr, DECL_MODE (adj->base));
5100 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5102 if (vexpr)
5104 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5105 SET_USE (use_p, vexpr);
5107 else
5108 gimple_debug_bind_reset_value (stmt);
5109 update_stmt (stmt);
5111 /* Create a VAR_DECL for debug info purposes. */
5112 if (!DECL_IGNORED_P (adj->base))
5114 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5115 VAR_DECL, DECL_NAME (adj->base),
5116 TREE_TYPE (adj->base));
5117 if (DECL_PT_UID_SET_P (adj->base))
5118 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5119 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5120 TREE_READONLY (copy) = TREE_READONLY (adj->base);
5121 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5122 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5123 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5124 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5125 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5126 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5127 SET_DECL_RTL (copy, 0);
5128 TREE_USED (copy) = 1;
5129 DECL_CONTEXT (copy) = current_function_decl;
5130 add_local_decl (cfun, copy);
5131 DECL_CHAIN (copy) =
5132 BLOCK_VARS (DECL_INITIAL (current_function_decl));
5133 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5135 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5137 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5138 if (vexpr)
5139 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5140 else
5141 def_temp = gimple_build_debug_source_bind (copy, adj->base,
5142 NULL);
5143 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5148 /* Return false if all callers have at least as many actual arguments as there
5149 are formal parameters in the current function and that their types
5150 match. */
5152 static bool
5153 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5154 void *data ATTRIBUTE_UNUSED)
5156 struct cgraph_edge *cs;
5157 for (cs = node->callers; cs; cs = cs->next_caller)
5158 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
5159 return true;
5161 return false;
5164 /* Return false if all callers have vuse attached to a call statement. */
5166 static bool
5167 some_callers_have_no_vuse_p (struct cgraph_node *node,
5168 void *data ATTRIBUTE_UNUSED)
5170 struct cgraph_edge *cs;
5171 for (cs = node->callers; cs; cs = cs->next_caller)
5172 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5173 return true;
5175 return false;
5178 /* Convert all callers of NODE. */
5180 static bool
5181 convert_callers_for_node (struct cgraph_node *node,
5182 void *data)
5184 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
5185 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
5186 struct cgraph_edge *cs;
5188 for (cs = node->callers; cs; cs = cs->next_caller)
5190 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
5192 if (dump_file)
5193 fprintf (dump_file, "Adjusting call %s -> %s\n",
5194 cs->caller->dump_name (), cs->callee->dump_name ());
5196 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5198 pop_cfun ();
5201 for (cs = node->callers; cs; cs = cs->next_caller)
5202 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
5203 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5204 compute_fn_summary (cs->caller, true);
5205 BITMAP_FREE (recomputed_callers);
5207 return true;
5210 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5212 static void
5213 convert_callers (struct cgraph_node *node, tree old_decl,
5214 ipa_parm_adjustment_vec adjustments)
5216 basic_block this_block;
5218 node->call_for_symbol_and_aliases (convert_callers_for_node,
5219 &adjustments, false);
5221 if (!encountered_recursive_call)
5222 return;
5224 FOR_EACH_BB_FN (this_block, cfun)
5226 gimple_stmt_iterator gsi;
5228 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5230 gcall *stmt;
5231 tree call_fndecl;
5232 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5233 if (!stmt)
5234 continue;
5235 call_fndecl = gimple_call_fndecl (stmt);
5236 if (call_fndecl == old_decl)
5238 if (dump_file)
5239 fprintf (dump_file, "Adjusting recursive call");
5240 gimple_call_set_fndecl (stmt, node->decl);
5241 ipa_modify_call_arguments (NULL, stmt, adjustments);
5246 return;
5249 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5250 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5252 static bool
5253 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5255 struct cgraph_node *new_node;
5256 bool cfg_changed;
5258 cgraph_edge::rebuild_edges ();
5259 free_dominance_info (CDI_DOMINATORS);
5260 pop_cfun ();
5262 /* This must be done after rebuilding cgraph edges for node above.
5263 Otherwise any recursive calls to node that are recorded in
5264 redirect_callers will be corrupted. */
5265 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5266 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5267 NULL, false, NULL, NULL,
5268 "isra");
5269 redirect_callers.release ();
5271 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5272 ipa_modify_formal_parameters (current_function_decl, adjustments);
5273 cfg_changed = ipa_sra_modify_function_body (adjustments);
5274 sra_ipa_reset_debug_stmts (adjustments);
5275 convert_callers (new_node, node->decl, adjustments);
5276 new_node->make_local ();
5277 return cfg_changed;
5280 /* Means of communication between ipa_sra_check_caller and
5281 ipa_sra_preliminary_function_checks. */
5283 struct ipa_sra_check_caller_data
5285 bool has_callers;
5286 bool bad_arg_alignment;
5287 bool has_thunk;
5290 /* If NODE has a caller, mark that fact in DATA which is pointer to
5291 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5292 calls if they are unit aligned and if not, set the appropriate flag in DATA
5293 too. */
5295 static bool
5296 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5298 if (!node->callers)
5299 return false;
5301 struct ipa_sra_check_caller_data *iscc;
5302 iscc = (struct ipa_sra_check_caller_data *) data;
5303 iscc->has_callers = true;
5305 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5307 if (cs->caller->thunk.thunk_p)
5309 iscc->has_thunk = true;
5310 return true;
5312 gimple *call_stmt = cs->call_stmt;
5313 unsigned count = gimple_call_num_args (call_stmt);
5314 for (unsigned i = 0; i < count; i++)
5316 tree arg = gimple_call_arg (call_stmt, i);
5317 if (is_gimple_reg (arg))
5318 continue;
5320 tree offset;
5321 HOST_WIDE_INT bitsize, bitpos;
5322 machine_mode mode;
5323 int unsignedp, reversep, volatilep = 0;
5324 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5325 &unsignedp, &reversep, &volatilep);
5326 if (bitpos % BITS_PER_UNIT)
5328 iscc->bad_arg_alignment = true;
5329 return true;
5334 return false;
5337 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5338 attributes, return true otherwise. NODE is the cgraph node of the current
5339 function. */
5341 static bool
5342 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5344 if (!node->can_be_local_p ())
5346 if (dump_file)
5347 fprintf (dump_file, "Function not local to this compilation unit.\n");
5348 return false;
5351 if (!node->local.can_change_signature)
5353 if (dump_file)
5354 fprintf (dump_file, "Function can not change signature.\n");
5355 return false;
5358 if (!tree_versionable_function_p (node->decl))
5360 if (dump_file)
5361 fprintf (dump_file, "Function is not versionable.\n");
5362 return false;
5365 if (!opt_for_fn (node->decl, optimize)
5366 || !opt_for_fn (node->decl, flag_ipa_sra))
5368 if (dump_file)
5369 fprintf (dump_file, "Function not optimized.\n");
5370 return false;
5373 if (DECL_VIRTUAL_P (current_function_decl))
5375 if (dump_file)
5376 fprintf (dump_file, "Function is a virtual method.\n");
5377 return false;
5380 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5381 && ipa_fn_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5383 if (dump_file)
5384 fprintf (dump_file, "Function too big to be made truly local.\n");
5385 return false;
5388 if (cfun->stdarg)
5390 if (dump_file)
5391 fprintf (dump_file, "Function uses stdarg. \n");
5392 return false;
5395 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5396 return false;
5398 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5400 if (dump_file)
5401 fprintf (dump_file, "Always inline function will be inlined "
5402 "anyway. \n");
5403 return false;
5406 struct ipa_sra_check_caller_data iscc;
5407 memset (&iscc, 0, sizeof(iscc));
5408 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5409 if (!iscc.has_callers)
5411 if (dump_file)
5412 fprintf (dump_file,
5413 "Function has no callers in this compilation unit.\n");
5414 return false;
5417 if (iscc.bad_arg_alignment)
5419 if (dump_file)
5420 fprintf (dump_file,
5421 "A function call has an argument with non-unit alignment.\n");
5422 return false;
5425 if (iscc.has_thunk)
5427 if (dump_file)
5428 fprintf (dump_file,
5429 "A has thunk.\n");
5430 return false;
5433 return true;
5436 /* Perform early interprocedural SRA. */
5438 static unsigned int
5439 ipa_early_sra (void)
5441 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5442 ipa_parm_adjustment_vec adjustments;
5443 int ret = 0;
5445 if (!ipa_sra_preliminary_function_checks (node))
5446 return 0;
5448 sra_initialize ();
5449 sra_mode = SRA_MODE_EARLY_IPA;
5451 if (!find_param_candidates ())
5453 if (dump_file)
5454 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5455 goto simple_out;
5458 if (node->call_for_symbol_and_aliases
5459 (some_callers_have_mismatched_arguments_p, NULL, true))
5461 if (dump_file)
5462 fprintf (dump_file, "There are callers with insufficient number of "
5463 "arguments or arguments with type mismatches.\n");
5464 goto simple_out;
5467 if (node->call_for_symbol_and_aliases
5468 (some_callers_have_no_vuse_p, NULL, true))
5470 if (dump_file)
5471 fprintf (dump_file, "There are callers with no VUSE attached "
5472 "to a call stmt.\n");
5473 goto simple_out;
5476 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5477 func_param_count
5478 * last_basic_block_for_fn (cfun));
5479 final_bbs = BITMAP_ALLOC (NULL);
5481 scan_function ();
5482 if (encountered_apply_args)
5484 if (dump_file)
5485 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5486 goto out;
5489 if (encountered_unchangable_recursive_call)
5491 if (dump_file)
5492 fprintf (dump_file, "Function calls itself with insufficient "
5493 "number of arguments.\n");
5494 goto out;
5497 adjustments = analyze_all_param_acesses ();
5498 if (!adjustments.exists ())
5499 goto out;
5500 if (dump_file)
5501 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5503 if (modify_function (node, adjustments))
5504 ret = TODO_update_ssa | TODO_cleanup_cfg;
5505 else
5506 ret = TODO_update_ssa;
5507 adjustments.release ();
5509 statistics_counter_event (cfun, "Unused parameters deleted",
5510 sra_stats.deleted_unused_parameters);
5511 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5512 sra_stats.scalar_by_ref_to_by_val);
5513 statistics_counter_event (cfun, "Aggregate parameters broken up",
5514 sra_stats.aggregate_params_reduced);
5515 statistics_counter_event (cfun, "Aggregate parameter components created",
5516 sra_stats.param_reductions_created);
5518 out:
5519 BITMAP_FREE (final_bbs);
5520 free (bb_dereferences);
5521 simple_out:
5522 sra_deinitialize ();
5523 return ret;
5526 namespace {
5528 const pass_data pass_data_early_ipa_sra =
5530 GIMPLE_PASS, /* type */
5531 "eipa_sra", /* name */
5532 OPTGROUP_NONE, /* optinfo_flags */
5533 TV_IPA_SRA, /* tv_id */
5534 0, /* properties_required */
5535 0, /* properties_provided */
5536 0, /* properties_destroyed */
5537 0, /* todo_flags_start */
5538 TODO_dump_symtab, /* todo_flags_finish */
5541 class pass_early_ipa_sra : public gimple_opt_pass
5543 public:
5544 pass_early_ipa_sra (gcc::context *ctxt)
5545 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5548 /* opt_pass methods: */
5549 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5550 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5552 }; // class pass_early_ipa_sra
5554 } // anon namespace
5556 gimple_opt_pass *
5557 make_pass_early_ipa_sra (gcc::context *ctxt)
5559 return new pass_early_ipa_sra (ctxt);