2017-11-04 Thomas Koenig <tkoenig@gcc.gnu.org>
[official-gcc.git] / gcc / tree-sra.c
blobbac593951e7206f5c5253bc5b1fd897af019f0a9
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2017 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-prop.h"
101 #include "params.h"
102 #include "dbgcnt.h"
103 #include "tree-inline.h"
104 #include "ipa-fnsummary.h"
105 #include "ipa-utils.h"
106 #include "builtins.h"
108 /* Enumeration of all aggregate reductions we can do. */
109 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
110 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
111 SRA_MODE_INTRA }; /* late intraprocedural SRA */
113 /* Global variable describing which aggregate reduction we are performing at
114 the moment. */
115 static enum sra_mode sra_mode;
117 struct assign_link;
119 /* ACCESS represents each access to an aggregate variable (as a whole or a
120 part). It can also represent a group of accesses that refer to exactly the
121 same fragment of an aggregate (i.e. those that have exactly the same offset
122 and size). Such representatives for a single aggregate, once determined,
123 are linked in a linked list and have the group fields set.
125 Moreover, when doing intraprocedural SRA, a tree is built from those
126 representatives (by the means of first_child and next_sibling pointers), in
127 which all items in a subtree are "within" the root, i.e. their offset is
128 greater or equal to offset of the root and offset+size is smaller or equal
129 to offset+size of the root. Children of an access are sorted by offset.
131 Note that accesses to parts of vector and complex number types always
132 represented by an access to the whole complex number or a vector. It is a
133 duty of the modifying functions to replace them appropriately. */
135 struct access
137 /* Values returned by `get_ref_base_and_extent' for each component reference
138 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
139 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
140 HOST_WIDE_INT offset;
141 HOST_WIDE_INT size;
142 tree base;
144 /* Expression. It is context dependent so do not use it to create new
145 expressions to access the original aggregate. See PR 42154 for a
146 testcase. */
147 tree expr;
148 /* Type. */
149 tree type;
151 /* The statement this access belongs to. */
152 gimple *stmt;
154 /* Next group representative for this aggregate. */
155 struct access *next_grp;
157 /* Pointer to the group representative. Pointer to itself if the struct is
158 the representative. */
159 struct access *group_representative;
161 /* After access tree has been constructed, this points to the parent of the
162 current access, if there is one. NULL for roots. */
163 struct access *parent;
165 /* If this access has any children (in terms of the definition above), this
166 points to the first one. */
167 struct access *first_child;
169 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
170 described above. In IPA-SRA this is a pointer to the next access
171 belonging to the same group (having the same representative). */
172 struct access *next_sibling;
174 /* Pointers to the first and last element in the linked list of assign
175 links. */
176 struct assign_link *first_link, *last_link;
178 /* Pointer to the next access in the work queue. */
179 struct access *next_queued;
181 /* Replacement variable for this access "region." Never to be accessed
182 directly, always only by the means of get_access_replacement() and only
183 when grp_to_be_replaced flag is set. */
184 tree replacement_decl;
186 /* Is this access an access to a non-addressable field? */
187 unsigned non_addressable : 1;
189 /* Is this access made in reverse storage order? */
190 unsigned reverse : 1;
192 /* Is this particular access write access? */
193 unsigned write : 1;
195 /* Is this access currently in the work queue? */
196 unsigned grp_queued : 1;
198 /* Does this group contain a write access? This flag is propagated down the
199 access tree. */
200 unsigned grp_write : 1;
202 /* Does this group contain a read access? This flag is propagated down the
203 access tree. */
204 unsigned grp_read : 1;
206 /* Does this group contain a read access that comes from an assignment
207 statement? This flag is propagated down the access tree. */
208 unsigned grp_assignment_read : 1;
210 /* Does this group contain a write access that comes from an assignment
211 statement? This flag is propagated down the access tree. */
212 unsigned grp_assignment_write : 1;
214 /* Does this group contain a read access through a scalar type? This flag is
215 not propagated in the access tree in any direction. */
216 unsigned grp_scalar_read : 1;
218 /* Does this group contain a write access through a scalar type? This flag
219 is not propagated in the access tree in any direction. */
220 unsigned grp_scalar_write : 1;
222 /* Is this access an artificial one created to scalarize some record
223 entirely? */
224 unsigned grp_total_scalarization : 1;
226 /* Other passes of the analysis use this bit to make function
227 analyze_access_subtree create scalar replacements for this group if
228 possible. */
229 unsigned grp_hint : 1;
231 /* Is the subtree rooted in this access fully covered by scalar
232 replacements? */
233 unsigned grp_covered : 1;
235 /* If set to true, this access and all below it in an access tree must not be
236 scalarized. */
237 unsigned grp_unscalarizable_region : 1;
239 /* Whether data have been written to parts of the aggregate covered by this
240 access which is not to be scalarized. This flag is propagated up in the
241 access tree. */
242 unsigned grp_unscalarized_data : 1;
244 /* Does this access and/or group contain a write access through a
245 BIT_FIELD_REF? */
246 unsigned grp_partial_lhs : 1;
248 /* Set when a scalar replacement should be created for this variable. */
249 unsigned grp_to_be_replaced : 1;
251 /* Set when we want a replacement for the sole purpose of having it in
252 generated debug statements. */
253 unsigned grp_to_be_debug_replaced : 1;
255 /* Should TREE_NO_WARNING of a replacement be set? */
256 unsigned grp_no_warning : 1;
258 /* Is it possible that the group refers to data which might be (directly or
259 otherwise) modified? */
260 unsigned grp_maybe_modified : 1;
262 /* Set when this is a representative of a pointer to scalar (i.e. by
263 reference) parameter which we consider for turning into a plain scalar
264 (i.e. a by value parameter). */
265 unsigned grp_scalar_ptr : 1;
267 /* Set when we discover that this pointer is not safe to dereference in the
268 caller. */
269 unsigned grp_not_necessarilly_dereferenced : 1;
272 typedef struct access *access_p;
275 /* Alloc pool for allocating access structures. */
276 static object_allocator<struct access> access_pool ("SRA accesses");
278 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
279 are used to propagate subaccesses from rhs to lhs as long as they don't
280 conflict with what is already there. */
281 struct assign_link
283 struct access *lacc, *racc;
284 struct assign_link *next;
287 /* Alloc pool for allocating assign link structures. */
288 static object_allocator<assign_link> assign_link_pool ("SRA links");
290 /* Base (tree) -> Vector (vec<access_p> *) map. */
291 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
293 /* Candidate hash table helpers. */
295 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
297 static inline hashval_t hash (const tree_node *);
298 static inline bool equal (const tree_node *, const tree_node *);
301 /* Hash a tree in a uid_decl_map. */
303 inline hashval_t
304 uid_decl_hasher::hash (const tree_node *item)
306 return item->decl_minimal.uid;
309 /* Return true if the DECL_UID in both trees are equal. */
311 inline bool
312 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
314 return (a->decl_minimal.uid == b->decl_minimal.uid);
317 /* Set of candidates. */
318 static bitmap candidate_bitmap;
319 static hash_table<uid_decl_hasher> *candidates;
321 /* For a candidate UID return the candidates decl. */
323 static inline tree
324 candidate (unsigned uid)
326 tree_node t;
327 t.decl_minimal.uid = uid;
328 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
331 /* Bitmap of candidates which we should try to entirely scalarize away and
332 those which cannot be (because they are and need be used as a whole). */
333 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
335 /* Bitmap of candidates in the constant pool, which cannot be scalarized
336 because this would produce non-constant expressions (e.g. Ada). */
337 static bitmap disqualified_constants;
339 /* Obstack for creation of fancy names. */
340 static struct obstack name_obstack;
342 /* Head of a linked list of accesses that need to have its subaccesses
343 propagated to their assignment counterparts. */
344 static struct access *work_queue_head;
346 /* Number of parameters of the analyzed function when doing early ipa SRA. */
347 static int func_param_count;
349 /* scan_function sets the following to true if it encounters a call to
350 __builtin_apply_args. */
351 static bool encountered_apply_args;
353 /* Set by scan_function when it finds a recursive call. */
354 static bool encountered_recursive_call;
356 /* Set by scan_function when it finds a recursive call with less actual
357 arguments than formal parameters.. */
358 static bool encountered_unchangable_recursive_call;
360 /* This is a table in which for each basic block and parameter there is a
361 distance (offset + size) in that parameter which is dereferenced and
362 accessed in that BB. */
363 static HOST_WIDE_INT *bb_dereferences;
364 /* Bitmap of BBs that can cause the function to "stop" progressing by
365 returning, throwing externally, looping infinitely or calling a function
366 which might abort etc.. */
367 static bitmap final_bbs;
369 /* Representative of no accesses at all. */
370 static struct access no_accesses_representant;
372 /* Predicate to test the special value. */
374 static inline bool
375 no_accesses_p (struct access *access)
377 return access == &no_accesses_representant;
380 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
381 representative fields are dumped, otherwise those which only describe the
382 individual access are. */
384 static struct
386 /* Number of processed aggregates is readily available in
387 analyze_all_variable_accesses and so is not stored here. */
389 /* Number of created scalar replacements. */
390 int replacements;
392 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
393 expression. */
394 int exprs;
396 /* Number of statements created by generate_subtree_copies. */
397 int subtree_copies;
399 /* Number of statements created by load_assign_lhs_subreplacements. */
400 int subreplacements;
402 /* Number of times sra_modify_assign has deleted a statement. */
403 int deleted;
405 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
406 RHS reparately due to type conversions or nonexistent matching
407 references. */
408 int separate_lhs_rhs_handling;
410 /* Number of parameters that were removed because they were unused. */
411 int deleted_unused_parameters;
413 /* Number of scalars passed as parameters by reference that have been
414 converted to be passed by value. */
415 int scalar_by_ref_to_by_val;
417 /* Number of aggregate parameters that were replaced by one or more of their
418 components. */
419 int aggregate_params_reduced;
421 /* Numbber of components created when splitting aggregate parameters. */
422 int param_reductions_created;
423 } sra_stats;
425 static void
426 dump_access (FILE *f, struct access *access, bool grp)
428 fprintf (f, "access { ");
429 fprintf (f, "base = (%d)'", DECL_UID (access->base));
430 print_generic_expr (f, access->base);
431 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
432 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
433 fprintf (f, ", expr = ");
434 print_generic_expr (f, access->expr);
435 fprintf (f, ", type = ");
436 print_generic_expr (f, access->type);
437 fprintf (f, ", non_addressable = %d, reverse = %d",
438 access->non_addressable, access->reverse);
439 if (grp)
440 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
441 "grp_assignment_write = %d, grp_scalar_read = %d, "
442 "grp_scalar_write = %d, grp_total_scalarization = %d, "
443 "grp_hint = %d, grp_covered = %d, "
444 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
445 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
446 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
447 "grp_not_necessarilly_dereferenced = %d\n",
448 access->grp_read, access->grp_write, access->grp_assignment_read,
449 access->grp_assignment_write, access->grp_scalar_read,
450 access->grp_scalar_write, access->grp_total_scalarization,
451 access->grp_hint, access->grp_covered,
452 access->grp_unscalarizable_region, access->grp_unscalarized_data,
453 access->grp_partial_lhs, access->grp_to_be_replaced,
454 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
455 access->grp_not_necessarilly_dereferenced);
456 else
457 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
458 "grp_partial_lhs = %d\n",
459 access->write, access->grp_total_scalarization,
460 access->grp_partial_lhs);
463 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
465 static void
466 dump_access_tree_1 (FILE *f, struct access *access, int level)
470 int i;
472 for (i = 0; i < level; i++)
473 fputs ("* ", f);
475 dump_access (f, access, true);
477 if (access->first_child)
478 dump_access_tree_1 (f, access->first_child, level + 1);
480 access = access->next_sibling;
482 while (access);
485 /* Dump all access trees for a variable, given the pointer to the first root in
486 ACCESS. */
488 static void
489 dump_access_tree (FILE *f, struct access *access)
491 for (; access; access = access->next_grp)
492 dump_access_tree_1 (f, access, 0);
495 /* Return true iff ACC is non-NULL and has subaccesses. */
497 static inline bool
498 access_has_children_p (struct access *acc)
500 return acc && acc->first_child;
503 /* Return true iff ACC is (partly) covered by at least one replacement. */
505 static bool
506 access_has_replacements_p (struct access *acc)
508 struct access *child;
509 if (acc->grp_to_be_replaced)
510 return true;
511 for (child = acc->first_child; child; child = child->next_sibling)
512 if (access_has_replacements_p (child))
513 return true;
514 return false;
517 /* Return a vector of pointers to accesses for the variable given in BASE or
518 NULL if there is none. */
520 static vec<access_p> *
521 get_base_access_vector (tree base)
523 return base_access_vec->get (base);
526 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
527 in ACCESS. Return NULL if it cannot be found. */
529 static struct access *
530 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
531 HOST_WIDE_INT size)
533 while (access && (access->offset != offset || access->size != size))
535 struct access *child = access->first_child;
537 while (child && (child->offset + child->size <= offset))
538 child = child->next_sibling;
539 access = child;
542 return access;
545 /* Return the first group representative for DECL or NULL if none exists. */
547 static struct access *
548 get_first_repr_for_decl (tree base)
550 vec<access_p> *access_vec;
552 access_vec = get_base_access_vector (base);
553 if (!access_vec)
554 return NULL;
556 return (*access_vec)[0];
559 /* Find an access representative for the variable BASE and given OFFSET and
560 SIZE. Requires that access trees have already been built. Return NULL if
561 it cannot be found. */
563 static struct access *
564 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
565 HOST_WIDE_INT size)
567 struct access *access;
569 access = get_first_repr_for_decl (base);
570 while (access && (access->offset + access->size <= offset))
571 access = access->next_grp;
572 if (!access)
573 return NULL;
575 return find_access_in_subtree (access, offset, size);
578 /* Add LINK to the linked list of assign links of RACC. */
579 static void
580 add_link_to_rhs (struct access *racc, struct assign_link *link)
582 gcc_assert (link->racc == racc);
584 if (!racc->first_link)
586 gcc_assert (!racc->last_link);
587 racc->first_link = link;
589 else
590 racc->last_link->next = link;
592 racc->last_link = link;
593 link->next = NULL;
596 /* Move all link structures in their linked list in OLD_RACC to the linked list
597 in NEW_RACC. */
598 static void
599 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
601 if (!old_racc->first_link)
603 gcc_assert (!old_racc->last_link);
604 return;
607 if (new_racc->first_link)
609 gcc_assert (!new_racc->last_link->next);
610 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
612 new_racc->last_link->next = old_racc->first_link;
613 new_racc->last_link = old_racc->last_link;
615 else
617 gcc_assert (!new_racc->last_link);
619 new_racc->first_link = old_racc->first_link;
620 new_racc->last_link = old_racc->last_link;
622 old_racc->first_link = old_racc->last_link = NULL;
625 /* Add ACCESS to the work queue (which is actually a stack). */
627 static void
628 add_access_to_work_queue (struct access *access)
630 if (access->first_link && !access->grp_queued)
632 gcc_assert (!access->next_queued);
633 access->next_queued = work_queue_head;
634 access->grp_queued = 1;
635 work_queue_head = access;
639 /* Pop an access from the work queue, and return it, assuming there is one. */
641 static struct access *
642 pop_access_from_work_queue (void)
644 struct access *access = work_queue_head;
646 work_queue_head = access->next_queued;
647 access->next_queued = NULL;
648 access->grp_queued = 0;
649 return access;
653 /* Allocate necessary structures. */
655 static void
656 sra_initialize (void)
658 candidate_bitmap = BITMAP_ALLOC (NULL);
659 candidates = new hash_table<uid_decl_hasher>
660 (vec_safe_length (cfun->local_decls) / 2);
661 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
662 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
663 disqualified_constants = BITMAP_ALLOC (NULL);
664 gcc_obstack_init (&name_obstack);
665 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
666 memset (&sra_stats, 0, sizeof (sra_stats));
667 encountered_apply_args = false;
668 encountered_recursive_call = false;
669 encountered_unchangable_recursive_call = false;
672 /* Deallocate all general structures. */
674 static void
675 sra_deinitialize (void)
677 BITMAP_FREE (candidate_bitmap);
678 delete candidates;
679 candidates = NULL;
680 BITMAP_FREE (should_scalarize_away_bitmap);
681 BITMAP_FREE (cannot_scalarize_away_bitmap);
682 BITMAP_FREE (disqualified_constants);
683 access_pool.release ();
684 assign_link_pool.release ();
685 obstack_free (&name_obstack, NULL);
687 delete base_access_vec;
690 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
692 static bool constant_decl_p (tree decl)
694 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
697 /* Remove DECL from candidates for SRA and write REASON to the dump file if
698 there is one. */
700 static void
701 disqualify_candidate (tree decl, const char *reason)
703 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
704 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
705 if (constant_decl_p (decl))
706 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
708 if (dump_file && (dump_flags & TDF_DETAILS))
710 fprintf (dump_file, "! Disqualifying ");
711 print_generic_expr (dump_file, decl);
712 fprintf (dump_file, " - %s\n", reason);
716 /* Return true iff the type contains a field or an element which does not allow
717 scalarization. */
719 static bool
720 type_internals_preclude_sra_p (tree type, const char **msg)
722 tree fld;
723 tree et;
725 switch (TREE_CODE (type))
727 case RECORD_TYPE:
728 case UNION_TYPE:
729 case QUAL_UNION_TYPE:
730 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
731 if (TREE_CODE (fld) == FIELD_DECL)
733 tree ft = TREE_TYPE (fld);
735 if (TREE_THIS_VOLATILE (fld))
737 *msg = "volatile structure field";
738 return true;
740 if (!DECL_FIELD_OFFSET (fld))
742 *msg = "no structure field offset";
743 return true;
745 if (!DECL_SIZE (fld))
747 *msg = "zero structure field size";
748 return true;
750 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
752 *msg = "structure field offset not fixed";
753 return true;
755 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
757 *msg = "structure field size not fixed";
758 return true;
760 if (!tree_fits_shwi_p (bit_position (fld)))
762 *msg = "structure field size too big";
763 return true;
765 if (AGGREGATE_TYPE_P (ft)
766 && int_bit_position (fld) % BITS_PER_UNIT != 0)
768 *msg = "structure field is bit field";
769 return true;
772 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
773 return true;
776 return false;
778 case ARRAY_TYPE:
779 et = TREE_TYPE (type);
781 if (TYPE_VOLATILE (et))
783 *msg = "element type is volatile";
784 return true;
787 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
788 return true;
790 return false;
792 default:
793 return false;
797 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
798 base variable if it is. Return T if it is not an SSA_NAME. */
800 static tree
801 get_ssa_base_param (tree t)
803 if (TREE_CODE (t) == SSA_NAME)
805 if (SSA_NAME_IS_DEFAULT_DEF (t))
806 return SSA_NAME_VAR (t);
807 else
808 return NULL_TREE;
810 return t;
813 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
814 belongs to, unless the BB has already been marked as a potentially
815 final. */
817 static void
818 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
820 basic_block bb = gimple_bb (stmt);
821 int idx, parm_index = 0;
822 tree parm;
824 if (bitmap_bit_p (final_bbs, bb->index))
825 return;
827 for (parm = DECL_ARGUMENTS (current_function_decl);
828 parm && parm != base;
829 parm = DECL_CHAIN (parm))
830 parm_index++;
832 gcc_assert (parm_index < func_param_count);
834 idx = bb->index * func_param_count + parm_index;
835 if (bb_dereferences[idx] < dist)
836 bb_dereferences[idx] = dist;
839 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
840 the three fields. Also add it to the vector of accesses corresponding to
841 the base. Finally, return the new access. */
843 static struct access *
844 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
846 struct access *access = access_pool.allocate ();
848 memset (access, 0, sizeof (struct access));
849 access->base = base;
850 access->offset = offset;
851 access->size = size;
853 base_access_vec->get_or_insert (base).safe_push (access);
855 return access;
858 static bool maybe_add_sra_candidate (tree);
860 /* Create and insert access for EXPR. Return created access, or NULL if it is
861 not possible. Also scan for uses of constant pool as we go along and add
862 to candidates. */
864 static struct access *
865 create_access (tree expr, gimple *stmt, bool write)
867 struct access *access;
868 HOST_WIDE_INT offset, size, max_size;
869 tree base = expr;
870 bool reverse, ptr, unscalarizable_region = false;
872 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
874 if (sra_mode == SRA_MODE_EARLY_IPA
875 && TREE_CODE (base) == MEM_REF)
877 base = get_ssa_base_param (TREE_OPERAND (base, 0));
878 if (!base)
879 return NULL;
880 ptr = true;
882 else
883 ptr = false;
885 /* For constant-pool entries, check we can substitute the constant value. */
886 if (constant_decl_p (base)
887 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
889 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
890 if (expr != base
891 && !is_gimple_reg_type (TREE_TYPE (expr))
892 && dump_file && (dump_flags & TDF_DETAILS))
894 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
895 and elements of multidimensional arrays (which are
896 multi-element arrays in their own right). */
897 fprintf (dump_file, "Allowing non-reg-type load of part"
898 " of constant-pool entry: ");
899 print_generic_expr (dump_file, expr);
901 maybe_add_sra_candidate (base);
904 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
905 return NULL;
907 if (sra_mode == SRA_MODE_EARLY_IPA)
909 if (size < 0 || size != max_size)
911 disqualify_candidate (base, "Encountered a variable sized access.");
912 return NULL;
914 if (TREE_CODE (expr) == COMPONENT_REF
915 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
917 disqualify_candidate (base, "Encountered a bit-field access.");
918 return NULL;
920 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
922 if (ptr)
923 mark_parm_dereference (base, offset + size, stmt);
925 else
927 if (size != max_size)
929 size = max_size;
930 unscalarizable_region = true;
932 if (size < 0)
934 disqualify_candidate (base, "Encountered an unconstrained access.");
935 return NULL;
939 access = create_access_1 (base, offset, size);
940 access->expr = expr;
941 access->type = TREE_TYPE (expr);
942 access->write = write;
943 access->grp_unscalarizable_region = unscalarizable_region;
944 access->stmt = stmt;
945 access->reverse = reverse;
947 if (TREE_CODE (expr) == COMPONENT_REF
948 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
949 access->non_addressable = 1;
951 return access;
955 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
956 ARRAY_TYPE with fields that are either of gimple register types (excluding
957 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
958 we are considering a decl from constant pool. If it is false, char arrays
959 will be refused. */
961 static bool
962 scalarizable_type_p (tree type, bool const_decl)
964 gcc_assert (!is_gimple_reg_type (type));
965 if (type_contains_placeholder_p (type))
966 return false;
968 switch (TREE_CODE (type))
970 case RECORD_TYPE:
971 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
972 if (TREE_CODE (fld) == FIELD_DECL)
974 tree ft = TREE_TYPE (fld);
976 if (DECL_BIT_FIELD (fld))
977 return false;
979 if (!is_gimple_reg_type (ft)
980 && !scalarizable_type_p (ft, const_decl))
981 return false;
984 return true;
986 case ARRAY_TYPE:
988 HOST_WIDE_INT min_elem_size;
989 if (const_decl)
990 min_elem_size = 0;
991 else
992 min_elem_size = BITS_PER_UNIT;
994 if (TYPE_DOMAIN (type) == NULL_TREE
995 || !tree_fits_shwi_p (TYPE_SIZE (type))
996 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
997 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
998 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
999 return false;
1000 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1001 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1002 /* Zero-element array, should not prevent scalarization. */
1004 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1005 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1006 /* Variable-length array, do not allow scalarization. */
1007 return false;
1009 tree elem = TREE_TYPE (type);
1010 if (!is_gimple_reg_type (elem)
1011 && !scalarizable_type_p (elem, const_decl))
1012 return false;
1013 return true;
1015 default:
1016 return false;
1020 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1022 /* Create total_scalarization accesses for all scalar fields of a member
1023 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1024 must be the top-most VAR_DECL representing the variable; within that,
1025 OFFSET locates the member and REF must be the memory reference expression for
1026 the member. */
1028 static void
1029 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1031 switch (TREE_CODE (decl_type))
1033 case RECORD_TYPE:
1034 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1035 if (TREE_CODE (fld) == FIELD_DECL)
1037 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1038 tree ft = TREE_TYPE (fld);
1039 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1041 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1042 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1043 nref, ft);
1045 break;
1046 case ARRAY_TYPE:
1048 tree elemtype = TREE_TYPE (decl_type);
1049 tree elem_size = TYPE_SIZE (elemtype);
1050 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1051 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1052 gcc_assert (el_size > 0);
1054 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1055 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1056 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1057 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1058 if (maxidx)
1060 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1061 tree domain = TYPE_DOMAIN (decl_type);
1062 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1063 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1064 offset_int idx = wi::to_offset (minidx);
1065 offset_int max = wi::to_offset (maxidx);
1066 if (!TYPE_UNSIGNED (domain))
1068 idx = wi::sext (idx, TYPE_PRECISION (domain));
1069 max = wi::sext (max, TYPE_PRECISION (domain));
1071 for (int el_off = offset; idx <= max; ++idx)
1073 tree nref = build4 (ARRAY_REF, elemtype,
1074 ref,
1075 wide_int_to_tree (domain, idx),
1076 NULL_TREE, NULL_TREE);
1077 scalarize_elem (base, el_off, el_size,
1078 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1079 nref, elemtype);
1080 el_off += el_size;
1084 break;
1085 default:
1086 gcc_unreachable ();
1090 /* Create total_scalarization accesses for a member of type TYPE, which must
1091 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1092 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1093 the member, REVERSE gives its torage order. and REF must be the reference
1094 expression for it. */
1096 static void
1097 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1098 tree ref, tree type)
1100 if (is_gimple_reg_type (type))
1102 struct access *access = create_access_1 (base, pos, size);
1103 access->expr = ref;
1104 access->type = type;
1105 access->grp_total_scalarization = 1;
1106 access->reverse = reverse;
1107 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1109 else
1110 completely_scalarize (base, type, pos, ref);
1113 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1114 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1116 static void
1117 create_total_scalarization_access (tree var)
1119 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1120 struct access *access;
1122 access = create_access_1 (var, 0, size);
1123 access->expr = var;
1124 access->type = TREE_TYPE (var);
1125 access->grp_total_scalarization = 1;
1128 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1130 static inline bool
1131 contains_view_convert_expr_p (const_tree ref)
1133 while (handled_component_p (ref))
1135 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1136 return true;
1137 ref = TREE_OPERAND (ref, 0);
1140 return false;
1143 /* Search the given tree for a declaration by skipping handled components and
1144 exclude it from the candidates. */
1146 static void
1147 disqualify_base_of_expr (tree t, const char *reason)
1149 t = get_base_address (t);
1150 if (sra_mode == SRA_MODE_EARLY_IPA
1151 && TREE_CODE (t) == MEM_REF)
1152 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1154 if (t && DECL_P (t))
1155 disqualify_candidate (t, reason);
1158 /* Scan expression EXPR and create access structures for all accesses to
1159 candidates for scalarization. Return the created access or NULL if none is
1160 created. */
1162 static struct access *
1163 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1165 struct access *ret = NULL;
1166 bool partial_ref;
1168 if (TREE_CODE (expr) == BIT_FIELD_REF
1169 || TREE_CODE (expr) == IMAGPART_EXPR
1170 || TREE_CODE (expr) == REALPART_EXPR)
1172 expr = TREE_OPERAND (expr, 0);
1173 partial_ref = true;
1175 else
1176 partial_ref = false;
1178 if (storage_order_barrier_p (expr))
1180 disqualify_base_of_expr (expr, "storage order barrier.");
1181 return NULL;
1184 /* We need to dive through V_C_Es in order to get the size of its parameter
1185 and not the result type. Ada produces such statements. We are also
1186 capable of handling the topmost V_C_E but not any of those buried in other
1187 handled components. */
1188 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1189 expr = TREE_OPERAND (expr, 0);
1191 if (contains_view_convert_expr_p (expr))
1193 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1194 "component.");
1195 return NULL;
1197 if (TREE_THIS_VOLATILE (expr))
1199 disqualify_base_of_expr (expr, "part of a volatile reference.");
1200 return NULL;
1203 switch (TREE_CODE (expr))
1205 case MEM_REF:
1206 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1207 && sra_mode != SRA_MODE_EARLY_IPA)
1208 return NULL;
1209 /* fall through */
1210 case VAR_DECL:
1211 case PARM_DECL:
1212 case RESULT_DECL:
1213 case COMPONENT_REF:
1214 case ARRAY_REF:
1215 case ARRAY_RANGE_REF:
1216 ret = create_access (expr, stmt, write);
1217 break;
1219 default:
1220 break;
1223 if (write && partial_ref && ret)
1224 ret->grp_partial_lhs = 1;
1226 return ret;
1229 /* Scan expression EXPR and create access structures for all accesses to
1230 candidates for scalarization. Return true if any access has been inserted.
1231 STMT must be the statement from which the expression is taken, WRITE must be
1232 true if the expression is a store and false otherwise. */
1234 static bool
1235 build_access_from_expr (tree expr, gimple *stmt, bool write)
1237 struct access *access;
1239 access = build_access_from_expr_1 (expr, stmt, write);
1240 if (access)
1242 /* This means the aggregate is accesses as a whole in a way other than an
1243 assign statement and thus cannot be removed even if we had a scalar
1244 replacement for everything. */
1245 if (cannot_scalarize_away_bitmap)
1246 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1247 return true;
1249 return false;
1252 /* Return the single non-EH successor edge of BB or NULL if there is none or
1253 more than one. */
1255 static edge
1256 single_non_eh_succ (basic_block bb)
1258 edge e, res = NULL;
1259 edge_iterator ei;
1261 FOR_EACH_EDGE (e, ei, bb->succs)
1262 if (!(e->flags & EDGE_EH))
1264 if (res)
1265 return NULL;
1266 res = e;
1269 return res;
1272 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1273 there is no alternative spot where to put statements SRA might need to
1274 generate after it. The spot we are looking for is an edge leading to a
1275 single non-EH successor, if it exists and is indeed single. RHS may be
1276 NULL, in that case ignore it. */
1278 static bool
1279 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1281 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1282 && stmt_ends_bb_p (stmt))
1284 if (single_non_eh_succ (gimple_bb (stmt)))
1285 return false;
1287 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1288 if (rhs)
1289 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1290 return true;
1292 return false;
1295 /* Return true if the nature of BASE is such that it contains data even if
1296 there is no write to it in the function. */
1298 static bool
1299 comes_initialized_p (tree base)
1301 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1304 /* Scan expressions occurring in STMT, create access structures for all accesses
1305 to candidates for scalarization and remove those candidates which occur in
1306 statements or expressions that prevent them from being split apart. Return
1307 true if any access has been inserted. */
1309 static bool
1310 build_accesses_from_assign (gimple *stmt)
1312 tree lhs, rhs;
1313 struct access *lacc, *racc;
1315 if (!gimple_assign_single_p (stmt)
1316 /* Scope clobbers don't influence scalarization. */
1317 || gimple_clobber_p (stmt))
1318 return false;
1320 lhs = gimple_assign_lhs (stmt);
1321 rhs = gimple_assign_rhs1 (stmt);
1323 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1324 return false;
1326 racc = build_access_from_expr_1 (rhs, stmt, false);
1327 lacc = build_access_from_expr_1 (lhs, stmt, true);
1329 if (lacc)
1331 lacc->grp_assignment_write = 1;
1332 if (storage_order_barrier_p (rhs))
1333 lacc->grp_unscalarizable_region = 1;
1336 if (racc)
1338 racc->grp_assignment_read = 1;
1339 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1340 && !is_gimple_reg_type (racc->type))
1341 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1342 if (storage_order_barrier_p (lhs))
1343 racc->grp_unscalarizable_region = 1;
1346 if (lacc && racc
1347 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1348 && !lacc->grp_unscalarizable_region
1349 && !racc->grp_unscalarizable_region
1350 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1351 && lacc->size == racc->size
1352 && useless_type_conversion_p (lacc->type, racc->type))
1354 struct assign_link *link;
1356 link = assign_link_pool.allocate ();
1357 memset (link, 0, sizeof (struct assign_link));
1359 link->lacc = lacc;
1360 link->racc = racc;
1361 add_link_to_rhs (racc, link);
1362 add_access_to_work_queue (racc);
1364 /* Let's delay marking the areas as written until propagation of accesses
1365 across link, unless the nature of rhs tells us that its data comes
1366 from elsewhere. */
1367 if (!comes_initialized_p (racc->base))
1368 lacc->write = false;
1371 return lacc || racc;
1374 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1375 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1377 static bool
1378 asm_visit_addr (gimple *, tree op, tree, void *)
1380 op = get_base_address (op);
1381 if (op
1382 && DECL_P (op))
1383 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1385 return false;
1388 /* Return true iff callsite CALL has at least as many actual arguments as there
1389 are formal parameters of the function currently processed by IPA-SRA and
1390 that their types match. */
1392 static inline bool
1393 callsite_arguments_match_p (gimple *call)
1395 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1396 return false;
1398 tree parm;
1399 int i;
1400 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1401 parm;
1402 parm = DECL_CHAIN (parm), i++)
1404 tree arg = gimple_call_arg (call, i);
1405 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1406 return false;
1408 return true;
1411 /* Scan function and look for interesting expressions and create access
1412 structures for them. Return true iff any access is created. */
1414 static bool
1415 scan_function (void)
1417 basic_block bb;
1418 bool ret = false;
1420 FOR_EACH_BB_FN (bb, cfun)
1422 gimple_stmt_iterator gsi;
1423 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1425 gimple *stmt = gsi_stmt (gsi);
1426 tree t;
1427 unsigned i;
1429 if (final_bbs && stmt_can_throw_external (stmt))
1430 bitmap_set_bit (final_bbs, bb->index);
1431 switch (gimple_code (stmt))
1433 case GIMPLE_RETURN:
1434 t = gimple_return_retval (as_a <greturn *> (stmt));
1435 if (t != NULL_TREE)
1436 ret |= build_access_from_expr (t, stmt, false);
1437 if (final_bbs)
1438 bitmap_set_bit (final_bbs, bb->index);
1439 break;
1441 case GIMPLE_ASSIGN:
1442 ret |= build_accesses_from_assign (stmt);
1443 break;
1445 case GIMPLE_CALL:
1446 for (i = 0; i < gimple_call_num_args (stmt); i++)
1447 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1448 stmt, false);
1450 if (sra_mode == SRA_MODE_EARLY_IPA)
1452 tree dest = gimple_call_fndecl (stmt);
1453 int flags = gimple_call_flags (stmt);
1455 if (dest)
1457 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1458 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1459 encountered_apply_args = true;
1460 if (recursive_call_p (current_function_decl, dest))
1462 encountered_recursive_call = true;
1463 if (!callsite_arguments_match_p (stmt))
1464 encountered_unchangable_recursive_call = true;
1468 if (final_bbs
1469 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1470 bitmap_set_bit (final_bbs, bb->index);
1473 t = gimple_call_lhs (stmt);
1474 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1475 ret |= build_access_from_expr (t, stmt, true);
1476 break;
1478 case GIMPLE_ASM:
1480 gasm *asm_stmt = as_a <gasm *> (stmt);
1481 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1482 asm_visit_addr);
1483 if (final_bbs)
1484 bitmap_set_bit (final_bbs, bb->index);
1486 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1488 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1489 ret |= build_access_from_expr (t, asm_stmt, false);
1491 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1493 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1494 ret |= build_access_from_expr (t, asm_stmt, true);
1497 break;
1499 default:
1500 break;
1505 return ret;
1508 /* Helper of QSORT function. There are pointers to accesses in the array. An
1509 access is considered smaller than another if it has smaller offset or if the
1510 offsets are the same but is size is bigger. */
1512 static int
1513 compare_access_positions (const void *a, const void *b)
1515 const access_p *fp1 = (const access_p *) a;
1516 const access_p *fp2 = (const access_p *) b;
1517 const access_p f1 = *fp1;
1518 const access_p f2 = *fp2;
1520 if (f1->offset != f2->offset)
1521 return f1->offset < f2->offset ? -1 : 1;
1523 if (f1->size == f2->size)
1525 if (f1->type == f2->type)
1526 return 0;
1527 /* Put any non-aggregate type before any aggregate type. */
1528 else if (!is_gimple_reg_type (f1->type)
1529 && is_gimple_reg_type (f2->type))
1530 return 1;
1531 else if (is_gimple_reg_type (f1->type)
1532 && !is_gimple_reg_type (f2->type))
1533 return -1;
1534 /* Put any complex or vector type before any other scalar type. */
1535 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1536 && TREE_CODE (f1->type) != VECTOR_TYPE
1537 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1538 || TREE_CODE (f2->type) == VECTOR_TYPE))
1539 return 1;
1540 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1541 || TREE_CODE (f1->type) == VECTOR_TYPE)
1542 && TREE_CODE (f2->type) != COMPLEX_TYPE
1543 && TREE_CODE (f2->type) != VECTOR_TYPE)
1544 return -1;
1545 /* Put any integral type before any non-integral type. When splicing, we
1546 make sure that those with insufficient precision and occupying the
1547 same space are not scalarized. */
1548 else if (INTEGRAL_TYPE_P (f1->type)
1549 && !INTEGRAL_TYPE_P (f2->type))
1550 return -1;
1551 else if (!INTEGRAL_TYPE_P (f1->type)
1552 && INTEGRAL_TYPE_P (f2->type))
1553 return 1;
1554 /* Put the integral type with the bigger precision first. */
1555 else if (INTEGRAL_TYPE_P (f1->type)
1556 && INTEGRAL_TYPE_P (f2->type)
1557 && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1558 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1559 /* Stabilize the sort. */
1560 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1563 /* We want the bigger accesses first, thus the opposite operator in the next
1564 line: */
1565 return f1->size > f2->size ? -1 : 1;
1569 /* Append a name of the declaration to the name obstack. A helper function for
1570 make_fancy_name. */
1572 static void
1573 make_fancy_decl_name (tree decl)
1575 char buffer[32];
1577 tree name = DECL_NAME (decl);
1578 if (name)
1579 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1580 IDENTIFIER_LENGTH (name));
1581 else
1583 sprintf (buffer, "D%u", DECL_UID (decl));
1584 obstack_grow (&name_obstack, buffer, strlen (buffer));
1588 /* Helper for make_fancy_name. */
1590 static void
1591 make_fancy_name_1 (tree expr)
1593 char buffer[32];
1594 tree index;
1596 if (DECL_P (expr))
1598 make_fancy_decl_name (expr);
1599 return;
1602 switch (TREE_CODE (expr))
1604 case COMPONENT_REF:
1605 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1606 obstack_1grow (&name_obstack, '$');
1607 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1608 break;
1610 case ARRAY_REF:
1611 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1612 obstack_1grow (&name_obstack, '$');
1613 /* Arrays with only one element may not have a constant as their
1614 index. */
1615 index = TREE_OPERAND (expr, 1);
1616 if (TREE_CODE (index) != INTEGER_CST)
1617 break;
1618 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1619 obstack_grow (&name_obstack, buffer, strlen (buffer));
1620 break;
1622 case ADDR_EXPR:
1623 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1624 break;
1626 case MEM_REF:
1627 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1628 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1630 obstack_1grow (&name_obstack, '$');
1631 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1632 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1633 obstack_grow (&name_obstack, buffer, strlen (buffer));
1635 break;
1637 case BIT_FIELD_REF:
1638 case REALPART_EXPR:
1639 case IMAGPART_EXPR:
1640 gcc_unreachable (); /* we treat these as scalars. */
1641 break;
1642 default:
1643 break;
1647 /* Create a human readable name for replacement variable of ACCESS. */
1649 static char *
1650 make_fancy_name (tree expr)
1652 make_fancy_name_1 (expr);
1653 obstack_1grow (&name_obstack, '\0');
1654 return XOBFINISH (&name_obstack, char *);
1657 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1658 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1659 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1660 be non-NULL and is used to insert new statements either before or below
1661 the current one as specified by INSERT_AFTER. This function is not capable
1662 of handling bitfields. */
1664 tree
1665 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1666 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1667 bool insert_after)
1669 tree prev_base = base;
1670 tree off;
1671 tree mem_ref;
1672 HOST_WIDE_INT base_offset;
1673 unsigned HOST_WIDE_INT misalign;
1674 unsigned int align;
1676 /* Preserve address-space information. */
1677 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1678 if (as != TYPE_ADDR_SPACE (exp_type))
1679 exp_type = build_qualified_type (exp_type,
1680 TYPE_QUALS (exp_type)
1681 | ENCODE_QUAL_ADDR_SPACE (as));
1683 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1684 get_object_alignment_1 (base, &align, &misalign);
1685 base = get_addr_base_and_unit_offset (base, &base_offset);
1687 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1688 offset such as array[var_index]. */
1689 if (!base)
1691 gassign *stmt;
1692 tree tmp, addr;
1694 gcc_checking_assert (gsi);
1695 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1696 addr = build_fold_addr_expr (unshare_expr (prev_base));
1697 STRIP_USELESS_TYPE_CONVERSION (addr);
1698 stmt = gimple_build_assign (tmp, addr);
1699 gimple_set_location (stmt, loc);
1700 if (insert_after)
1701 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1702 else
1703 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1705 off = build_int_cst (reference_alias_ptr_type (prev_base),
1706 offset / BITS_PER_UNIT);
1707 base = tmp;
1709 else if (TREE_CODE (base) == MEM_REF)
1711 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1712 base_offset + offset / BITS_PER_UNIT);
1713 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1714 base = unshare_expr (TREE_OPERAND (base, 0));
1716 else
1718 off = build_int_cst (reference_alias_ptr_type (prev_base),
1719 base_offset + offset / BITS_PER_UNIT);
1720 base = build_fold_addr_expr (unshare_expr (base));
1723 misalign = (misalign + offset) & (align - 1);
1724 if (misalign != 0)
1725 align = least_bit_hwi (misalign);
1726 if (align != TYPE_ALIGN (exp_type))
1727 exp_type = build_aligned_type (exp_type, align);
1729 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1730 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1731 if (TREE_THIS_VOLATILE (prev_base))
1732 TREE_THIS_VOLATILE (mem_ref) = 1;
1733 if (TREE_SIDE_EFFECTS (prev_base))
1734 TREE_SIDE_EFFECTS (mem_ref) = 1;
1735 return mem_ref;
1738 /* Construct a memory reference to a part of an aggregate BASE at the given
1739 OFFSET and of the same type as MODEL. In case this is a reference to a
1740 bit-field, the function will replicate the last component_ref of model's
1741 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1742 build_ref_for_offset. */
1744 static tree
1745 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1746 struct access *model, gimple_stmt_iterator *gsi,
1747 bool insert_after)
1749 if (TREE_CODE (model->expr) == COMPONENT_REF
1750 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1752 /* This access represents a bit-field. */
1753 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1755 offset -= int_bit_position (fld);
1756 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1757 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1758 gsi, insert_after);
1759 /* The flag will be set on the record type. */
1760 REF_REVERSE_STORAGE_ORDER (t) = 0;
1761 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1762 NULL_TREE);
1764 else
1765 return
1766 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1767 gsi, insert_after);
1770 /* Attempt to build a memory reference that we could but into a gimple
1771 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1772 create statements and return s NULL instead. This function also ignores
1773 alignment issues and so its results should never end up in non-debug
1774 statements. */
1776 static tree
1777 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1778 struct access *model)
1780 HOST_WIDE_INT base_offset;
1781 tree off;
1783 if (TREE_CODE (model->expr) == COMPONENT_REF
1784 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1785 return NULL_TREE;
1787 base = get_addr_base_and_unit_offset (base, &base_offset);
1788 if (!base)
1789 return NULL_TREE;
1790 if (TREE_CODE (base) == MEM_REF)
1792 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1793 base_offset + offset / BITS_PER_UNIT);
1794 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1795 base = unshare_expr (TREE_OPERAND (base, 0));
1797 else
1799 off = build_int_cst (reference_alias_ptr_type (base),
1800 base_offset + offset / BITS_PER_UNIT);
1801 base = build_fold_addr_expr (unshare_expr (base));
1804 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1807 /* Construct a memory reference consisting of component_refs and array_refs to
1808 a part of an aggregate *RES (which is of type TYPE). The requested part
1809 should have type EXP_TYPE at be the given OFFSET. This function might not
1810 succeed, it returns true when it does and only then *RES points to something
1811 meaningful. This function should be used only to build expressions that we
1812 might need to present to user (e.g. in warnings). In all other situations,
1813 build_ref_for_model or build_ref_for_offset should be used instead. */
1815 static bool
1816 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1817 tree exp_type)
1819 while (1)
1821 tree fld;
1822 tree tr_size, index, minidx;
1823 HOST_WIDE_INT el_size;
1825 if (offset == 0 && exp_type
1826 && types_compatible_p (exp_type, type))
1827 return true;
1829 switch (TREE_CODE (type))
1831 case UNION_TYPE:
1832 case QUAL_UNION_TYPE:
1833 case RECORD_TYPE:
1834 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1836 HOST_WIDE_INT pos, size;
1837 tree tr_pos, expr, *expr_ptr;
1839 if (TREE_CODE (fld) != FIELD_DECL)
1840 continue;
1842 tr_pos = bit_position (fld);
1843 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1844 continue;
1845 pos = tree_to_uhwi (tr_pos);
1846 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1847 tr_size = DECL_SIZE (fld);
1848 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1849 continue;
1850 size = tree_to_uhwi (tr_size);
1851 if (size == 0)
1853 if (pos != offset)
1854 continue;
1856 else if (pos > offset || (pos + size) <= offset)
1857 continue;
1859 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1860 NULL_TREE);
1861 expr_ptr = &expr;
1862 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1863 offset - pos, exp_type))
1865 *res = expr;
1866 return true;
1869 return false;
1871 case ARRAY_TYPE:
1872 tr_size = TYPE_SIZE (TREE_TYPE (type));
1873 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1874 return false;
1875 el_size = tree_to_uhwi (tr_size);
1877 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1878 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1879 return false;
1880 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1881 if (!integer_zerop (minidx))
1882 index = int_const_binop (PLUS_EXPR, index, minidx);
1883 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1884 NULL_TREE, NULL_TREE);
1885 offset = offset % el_size;
1886 type = TREE_TYPE (type);
1887 break;
1889 default:
1890 if (offset != 0)
1891 return false;
1893 if (exp_type)
1894 return false;
1895 else
1896 return true;
1901 /* Return true iff TYPE is stdarg va_list type. */
1903 static inline bool
1904 is_va_list_type (tree type)
1906 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1909 /* Print message to dump file why a variable was rejected. */
1911 static void
1912 reject (tree var, const char *msg)
1914 if (dump_file && (dump_flags & TDF_DETAILS))
1916 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1917 print_generic_expr (dump_file, var);
1918 fprintf (dump_file, "\n");
1922 /* Return true if VAR is a candidate for SRA. */
1924 static bool
1925 maybe_add_sra_candidate (tree var)
1927 tree type = TREE_TYPE (var);
1928 const char *msg;
1929 tree_node **slot;
1931 if (!AGGREGATE_TYPE_P (type))
1933 reject (var, "not aggregate");
1934 return false;
1936 /* Allow constant-pool entries (that "need to live in memory")
1937 unless we are doing IPA SRA. */
1938 if (needs_to_live_in_memory (var)
1939 && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
1941 reject (var, "needs to live in memory");
1942 return false;
1944 if (TREE_THIS_VOLATILE (var))
1946 reject (var, "is volatile");
1947 return false;
1949 if (!COMPLETE_TYPE_P (type))
1951 reject (var, "has incomplete type");
1952 return false;
1954 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1956 reject (var, "type size not fixed");
1957 return false;
1959 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1961 reject (var, "type size is zero");
1962 return false;
1964 if (type_internals_preclude_sra_p (type, &msg))
1966 reject (var, msg);
1967 return false;
1969 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1970 we also want to schedule it rather late. Thus we ignore it in
1971 the early pass. */
1972 (sra_mode == SRA_MODE_EARLY_INTRA
1973 && is_va_list_type (type)))
1975 reject (var, "is va_list");
1976 return false;
1979 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1980 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1981 *slot = var;
1983 if (dump_file && (dump_flags & TDF_DETAILS))
1985 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1986 print_generic_expr (dump_file, var);
1987 fprintf (dump_file, "\n");
1990 return true;
1993 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1994 those with type which is suitable for scalarization. */
1996 static bool
1997 find_var_candidates (void)
1999 tree var, parm;
2000 unsigned int i;
2001 bool ret = false;
2003 for (parm = DECL_ARGUMENTS (current_function_decl);
2004 parm;
2005 parm = DECL_CHAIN (parm))
2006 ret |= maybe_add_sra_candidate (parm);
2008 FOR_EACH_LOCAL_DECL (cfun, i, var)
2010 if (!VAR_P (var))
2011 continue;
2013 ret |= maybe_add_sra_candidate (var);
2016 return ret;
2019 /* Sort all accesses for the given variable, check for partial overlaps and
2020 return NULL if there are any. If there are none, pick a representative for
2021 each combination of offset and size and create a linked list out of them.
2022 Return the pointer to the first representative and make sure it is the first
2023 one in the vector of accesses. */
2025 static struct access *
2026 sort_and_splice_var_accesses (tree var)
2028 int i, j, access_count;
2029 struct access *res, **prev_acc_ptr = &res;
2030 vec<access_p> *access_vec;
2031 bool first = true;
2032 HOST_WIDE_INT low = -1, high = 0;
2034 access_vec = get_base_access_vector (var);
2035 if (!access_vec)
2036 return NULL;
2037 access_count = access_vec->length ();
2039 /* Sort by <OFFSET, SIZE>. */
2040 access_vec->qsort (compare_access_positions);
2042 i = 0;
2043 while (i < access_count)
2045 struct access *access = (*access_vec)[i];
2046 bool grp_write = access->write;
2047 bool grp_read = !access->write;
2048 bool grp_scalar_write = access->write
2049 && is_gimple_reg_type (access->type);
2050 bool grp_scalar_read = !access->write
2051 && is_gimple_reg_type (access->type);
2052 bool grp_assignment_read = access->grp_assignment_read;
2053 bool grp_assignment_write = access->grp_assignment_write;
2054 bool multiple_scalar_reads = false;
2055 bool total_scalarization = access->grp_total_scalarization;
2056 bool grp_partial_lhs = access->grp_partial_lhs;
2057 bool first_scalar = is_gimple_reg_type (access->type);
2058 bool unscalarizable_region = access->grp_unscalarizable_region;
2059 bool bf_non_full_precision
2060 = (INTEGRAL_TYPE_P (access->type)
2061 && TYPE_PRECISION (access->type) != access->size
2062 && TREE_CODE (access->expr) == COMPONENT_REF
2063 && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
2065 if (first || access->offset >= high)
2067 first = false;
2068 low = access->offset;
2069 high = access->offset + access->size;
2071 else if (access->offset > low && access->offset + access->size > high)
2072 return NULL;
2073 else
2074 gcc_assert (access->offset >= low
2075 && access->offset + access->size <= high);
2077 j = i + 1;
2078 while (j < access_count)
2080 struct access *ac2 = (*access_vec)[j];
2081 if (ac2->offset != access->offset || ac2->size != access->size)
2082 break;
2083 if (ac2->write)
2085 grp_write = true;
2086 grp_scalar_write = (grp_scalar_write
2087 || is_gimple_reg_type (ac2->type));
2089 else
2091 grp_read = true;
2092 if (is_gimple_reg_type (ac2->type))
2094 if (grp_scalar_read)
2095 multiple_scalar_reads = true;
2096 else
2097 grp_scalar_read = true;
2100 grp_assignment_read |= ac2->grp_assignment_read;
2101 grp_assignment_write |= ac2->grp_assignment_write;
2102 grp_partial_lhs |= ac2->grp_partial_lhs;
2103 unscalarizable_region |= ac2->grp_unscalarizable_region;
2104 total_scalarization |= ac2->grp_total_scalarization;
2105 relink_to_new_repr (access, ac2);
2107 /* If there are both aggregate-type and scalar-type accesses with
2108 this combination of size and offset, the comparison function
2109 should have put the scalars first. */
2110 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2111 /* It also prefers integral types to non-integral. However, when the
2112 precision of the selected type does not span the entire area and
2113 should also be used for a non-integer (i.e. float), we must not
2114 let that happen. Normally analyze_access_subtree expands the type
2115 to cover the entire area but for bit-fields it doesn't. */
2116 if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2118 if (dump_file && (dump_flags & TDF_DETAILS))
2120 fprintf (dump_file, "Cannot scalarize the following access "
2121 "because insufficient precision integer type was "
2122 "selected.\n ");
2123 dump_access (dump_file, access, false);
2125 unscalarizable_region = true;
2127 ac2->group_representative = access;
2128 j++;
2131 i = j;
2133 access->group_representative = access;
2134 access->grp_write = grp_write;
2135 access->grp_read = grp_read;
2136 access->grp_scalar_read = grp_scalar_read;
2137 access->grp_scalar_write = grp_scalar_write;
2138 access->grp_assignment_read = grp_assignment_read;
2139 access->grp_assignment_write = grp_assignment_write;
2140 access->grp_hint = total_scalarization
2141 || (multiple_scalar_reads && !constant_decl_p (var));
2142 access->grp_total_scalarization = total_scalarization;
2143 access->grp_partial_lhs = grp_partial_lhs;
2144 access->grp_unscalarizable_region = unscalarizable_region;
2146 *prev_acc_ptr = access;
2147 prev_acc_ptr = &access->next_grp;
2150 gcc_assert (res == (*access_vec)[0]);
2151 return res;
2154 /* Create a variable for the given ACCESS which determines the type, name and a
2155 few other properties. Return the variable declaration and store it also to
2156 ACCESS->replacement. */
2158 static tree
2159 create_access_replacement (struct access *access)
2161 tree repl;
2163 if (access->grp_to_be_debug_replaced)
2165 repl = create_tmp_var_raw (access->type);
2166 DECL_CONTEXT (repl) = current_function_decl;
2168 else
2169 /* Drop any special alignment on the type if it's not on the main
2170 variant. This avoids issues with weirdo ABIs like AAPCS. */
2171 repl = create_tmp_var (build_qualified_type
2172 (TYPE_MAIN_VARIANT (access->type),
2173 TYPE_QUALS (access->type)), "SR");
2174 if (TREE_CODE (access->type) == COMPLEX_TYPE
2175 || TREE_CODE (access->type) == VECTOR_TYPE)
2177 if (!access->grp_partial_lhs)
2178 DECL_GIMPLE_REG_P (repl) = 1;
2180 else if (access->grp_partial_lhs
2181 && is_gimple_reg_type (access->type))
2182 TREE_ADDRESSABLE (repl) = 1;
2184 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2185 DECL_ARTIFICIAL (repl) = 1;
2186 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2188 if (DECL_NAME (access->base)
2189 && !DECL_IGNORED_P (access->base)
2190 && !DECL_ARTIFICIAL (access->base))
2192 char *pretty_name = make_fancy_name (access->expr);
2193 tree debug_expr = unshare_expr_without_location (access->expr), d;
2194 bool fail = false;
2196 DECL_NAME (repl) = get_identifier (pretty_name);
2197 DECL_NAMELESS (repl) = 1;
2198 obstack_free (&name_obstack, pretty_name);
2200 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2201 as DECL_DEBUG_EXPR isn't considered when looking for still
2202 used SSA_NAMEs and thus they could be freed. All debug info
2203 generation cares is whether something is constant or variable
2204 and that get_ref_base_and_extent works properly on the
2205 expression. It cannot handle accesses at a non-constant offset
2206 though, so just give up in those cases. */
2207 for (d = debug_expr;
2208 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2209 d = TREE_OPERAND (d, 0))
2210 switch (TREE_CODE (d))
2212 case ARRAY_REF:
2213 case ARRAY_RANGE_REF:
2214 if (TREE_OPERAND (d, 1)
2215 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2216 fail = true;
2217 if (TREE_OPERAND (d, 3)
2218 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2219 fail = true;
2220 /* FALLTHRU */
2221 case COMPONENT_REF:
2222 if (TREE_OPERAND (d, 2)
2223 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2224 fail = true;
2225 break;
2226 case MEM_REF:
2227 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2228 fail = true;
2229 else
2230 d = TREE_OPERAND (d, 0);
2231 break;
2232 default:
2233 break;
2235 if (!fail)
2237 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2238 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2240 if (access->grp_no_warning)
2241 TREE_NO_WARNING (repl) = 1;
2242 else
2243 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2245 else
2246 TREE_NO_WARNING (repl) = 1;
2248 if (dump_file)
2250 if (access->grp_to_be_debug_replaced)
2252 fprintf (dump_file, "Created a debug-only replacement for ");
2253 print_generic_expr (dump_file, access->base);
2254 fprintf (dump_file, " offset: %u, size: %u\n",
2255 (unsigned) access->offset, (unsigned) access->size);
2257 else
2259 fprintf (dump_file, "Created a replacement for ");
2260 print_generic_expr (dump_file, access->base);
2261 fprintf (dump_file, " offset: %u, size: %u: ",
2262 (unsigned) access->offset, (unsigned) access->size);
2263 print_generic_expr (dump_file, repl);
2264 fprintf (dump_file, "\n");
2267 sra_stats.replacements++;
2269 return repl;
2272 /* Return ACCESS scalar replacement, which must exist. */
2274 static inline tree
2275 get_access_replacement (struct access *access)
2277 gcc_checking_assert (access->replacement_decl);
2278 return access->replacement_decl;
2282 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2283 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2284 to it is not "within" the root. Return false iff some accesses partially
2285 overlap. */
2287 static bool
2288 build_access_subtree (struct access **access)
2290 struct access *root = *access, *last_child = NULL;
2291 HOST_WIDE_INT limit = root->offset + root->size;
2293 *access = (*access)->next_grp;
2294 while (*access && (*access)->offset + (*access)->size <= limit)
2296 if (!last_child)
2297 root->first_child = *access;
2298 else
2299 last_child->next_sibling = *access;
2300 last_child = *access;
2301 (*access)->parent = root;
2302 (*access)->grp_write |= root->grp_write;
2304 if (!build_access_subtree (access))
2305 return false;
2308 if (*access && (*access)->offset < limit)
2309 return false;
2311 return true;
2314 /* Build a tree of access representatives, ACCESS is the pointer to the first
2315 one, others are linked in a list by the next_grp field. Return false iff
2316 some accesses partially overlap. */
2318 static bool
2319 build_access_trees (struct access *access)
2321 while (access)
2323 struct access *root = access;
2325 if (!build_access_subtree (&access))
2326 return false;
2327 root->next_grp = access;
2329 return true;
2332 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2333 array. */
2335 static bool
2336 expr_with_var_bounded_array_refs_p (tree expr)
2338 while (handled_component_p (expr))
2340 if (TREE_CODE (expr) == ARRAY_REF
2341 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2342 return true;
2343 expr = TREE_OPERAND (expr, 0);
2345 return false;
2348 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2349 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2350 sorts of access flags appropriately along the way, notably always set
2351 grp_read and grp_assign_read according to MARK_READ and grp_write when
2352 MARK_WRITE is true.
2354 Creating a replacement for a scalar access is considered beneficial if its
2355 grp_hint is set (this means we are either attempting total scalarization or
2356 there is more than one direct read access) or according to the following
2357 table:
2359 Access written to through a scalar type (once or more times)
2361 | Written to in an assignment statement
2363 | | Access read as scalar _once_
2364 | | |
2365 | | | Read in an assignment statement
2366 | | | |
2367 | | | | Scalarize Comment
2368 -----------------------------------------------------------------------------
2369 0 0 0 0 No access for the scalar
2370 0 0 0 1 No access for the scalar
2371 0 0 1 0 No Single read - won't help
2372 0 0 1 1 No The same case
2373 0 1 0 0 No access for the scalar
2374 0 1 0 1 No access for the scalar
2375 0 1 1 0 Yes s = *g; return s.i;
2376 0 1 1 1 Yes The same case as above
2377 1 0 0 0 No Won't help
2378 1 0 0 1 Yes s.i = 1; *g = s;
2379 1 0 1 0 Yes s.i = 5; g = s.i;
2380 1 0 1 1 Yes The same case as above
2381 1 1 0 0 No Won't help.
2382 1 1 0 1 Yes s.i = 1; *g = s;
2383 1 1 1 0 Yes s = *g; return s.i;
2384 1 1 1 1 Yes Any of the above yeses */
2386 static bool
2387 analyze_access_subtree (struct access *root, struct access *parent,
2388 bool allow_replacements)
2390 struct access *child;
2391 HOST_WIDE_INT limit = root->offset + root->size;
2392 HOST_WIDE_INT covered_to = root->offset;
2393 bool scalar = is_gimple_reg_type (root->type);
2394 bool hole = false, sth_created = false;
2396 if (parent)
2398 if (parent->grp_read)
2399 root->grp_read = 1;
2400 if (parent->grp_assignment_read)
2401 root->grp_assignment_read = 1;
2402 if (parent->grp_write)
2403 root->grp_write = 1;
2404 if (parent->grp_assignment_write)
2405 root->grp_assignment_write = 1;
2406 if (parent->grp_total_scalarization)
2407 root->grp_total_scalarization = 1;
2410 if (root->grp_unscalarizable_region)
2411 allow_replacements = false;
2413 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2414 allow_replacements = false;
2416 for (child = root->first_child; child; child = child->next_sibling)
2418 hole |= covered_to < child->offset;
2419 sth_created |= analyze_access_subtree (child, root,
2420 allow_replacements && !scalar);
2422 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2423 root->grp_total_scalarization &= child->grp_total_scalarization;
2424 if (child->grp_covered)
2425 covered_to += child->size;
2426 else
2427 hole = true;
2430 if (allow_replacements && scalar && !root->first_child
2431 && (root->grp_hint
2432 || ((root->grp_scalar_read || root->grp_assignment_read)
2433 && (root->grp_scalar_write || root->grp_assignment_write))))
2435 /* Always create access replacements that cover the whole access.
2436 For integral types this means the precision has to match.
2437 Avoid assumptions based on the integral type kind, too. */
2438 if (INTEGRAL_TYPE_P (root->type)
2439 && (TREE_CODE (root->type) != INTEGER_TYPE
2440 || TYPE_PRECISION (root->type) != root->size)
2441 /* But leave bitfield accesses alone. */
2442 && (TREE_CODE (root->expr) != COMPONENT_REF
2443 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2445 tree rt = root->type;
2446 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2447 && (root->size % BITS_PER_UNIT) == 0);
2448 root->type = build_nonstandard_integer_type (root->size,
2449 TYPE_UNSIGNED (rt));
2450 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2451 root->offset, root->reverse,
2452 root->type, NULL, false);
2454 if (dump_file && (dump_flags & TDF_DETAILS))
2456 fprintf (dump_file, "Changing the type of a replacement for ");
2457 print_generic_expr (dump_file, root->base);
2458 fprintf (dump_file, " offset: %u, size: %u ",
2459 (unsigned) root->offset, (unsigned) root->size);
2460 fprintf (dump_file, " to an integer.\n");
2464 root->grp_to_be_replaced = 1;
2465 root->replacement_decl = create_access_replacement (root);
2466 sth_created = true;
2467 hole = false;
2469 else
2471 if (allow_replacements
2472 && scalar && !root->first_child
2473 && (root->grp_scalar_write || root->grp_assignment_write)
2474 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2475 DECL_UID (root->base)))
2477 gcc_checking_assert (!root->grp_scalar_read
2478 && !root->grp_assignment_read);
2479 sth_created = true;
2480 if (MAY_HAVE_DEBUG_STMTS)
2482 root->grp_to_be_debug_replaced = 1;
2483 root->replacement_decl = create_access_replacement (root);
2487 if (covered_to < limit)
2488 hole = true;
2489 if (scalar || !allow_replacements)
2490 root->grp_total_scalarization = 0;
2493 if (!hole || root->grp_total_scalarization)
2494 root->grp_covered = 1;
2495 else if (root->grp_write || comes_initialized_p (root->base))
2496 root->grp_unscalarized_data = 1; /* not covered and written to */
2497 return sth_created;
2500 /* Analyze all access trees linked by next_grp by the means of
2501 analyze_access_subtree. */
2502 static bool
2503 analyze_access_trees (struct access *access)
2505 bool ret = false;
2507 while (access)
2509 if (analyze_access_subtree (access, NULL, true))
2510 ret = true;
2511 access = access->next_grp;
2514 return ret;
2517 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2518 SIZE would conflict with an already existing one. If exactly such a child
2519 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2521 static bool
2522 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2523 HOST_WIDE_INT size, struct access **exact_match)
2525 struct access *child;
2527 for (child = lacc->first_child; child; child = child->next_sibling)
2529 if (child->offset == norm_offset && child->size == size)
2531 *exact_match = child;
2532 return true;
2535 if (child->offset < norm_offset + size
2536 && child->offset + child->size > norm_offset)
2537 return true;
2540 return false;
2543 /* Create a new child access of PARENT, with all properties just like MODEL
2544 except for its offset and with its grp_write false and grp_read true.
2545 Return the new access or NULL if it cannot be created. Note that this
2546 access is created long after all splicing and sorting, it's not located in
2547 any access vector and is automatically a representative of its group. Set
2548 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2550 static struct access *
2551 create_artificial_child_access (struct access *parent, struct access *model,
2552 HOST_WIDE_INT new_offset,
2553 bool set_grp_write)
2555 struct access **child;
2556 tree expr = parent->base;
2558 gcc_assert (!model->grp_unscalarizable_region);
2560 struct access *access = access_pool.allocate ();
2561 memset (access, 0, sizeof (struct access));
2562 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2563 model->type))
2565 access->grp_no_warning = true;
2566 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2567 new_offset, model, NULL, false);
2570 access->base = parent->base;
2571 access->expr = expr;
2572 access->offset = new_offset;
2573 access->size = model->size;
2574 access->type = model->type;
2575 access->grp_write = set_grp_write;
2576 access->grp_read = false;
2577 access->reverse = model->reverse;
2579 child = &parent->first_child;
2580 while (*child && (*child)->offset < new_offset)
2581 child = &(*child)->next_sibling;
2583 access->next_sibling = *child;
2584 *child = access;
2586 return access;
2590 /* Beginning with ACCESS, traverse its whole access subtree and mark all
2591 sub-trees as written to. If any of them has not been marked so previously
2592 and has assignment links leading from it, re-enqueue it. */
2594 static void
2595 subtree_mark_written_and_enqueue (struct access *access)
2597 if (access->grp_write)
2598 return;
2599 access->grp_write = true;
2600 add_access_to_work_queue (access);
2602 struct access *child;
2603 for (child = access->first_child; child; child = child->next_sibling)
2604 subtree_mark_written_and_enqueue (child);
2607 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
2608 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2609 propagated transitively. Return true if anything changed. Additionally, if
2610 RACC is a scalar access but LACC is not, change the type of the latter, if
2611 possible. */
2613 static bool
2614 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2616 struct access *rchild;
2617 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2618 bool ret = false;
2620 /* IF the LHS is still not marked as being written to, we only need to do so
2621 if the RHS at this level actually was. */
2622 if (!lacc->grp_write)
2624 gcc_checking_assert (!comes_initialized_p (racc->base));
2625 if (racc->grp_write)
2627 subtree_mark_written_and_enqueue (lacc);
2628 ret = true;
2632 if (is_gimple_reg_type (lacc->type)
2633 || lacc->grp_unscalarizable_region
2634 || racc->grp_unscalarizable_region)
2636 if (!lacc->grp_write)
2638 ret = true;
2639 subtree_mark_written_and_enqueue (lacc);
2641 return ret;
2644 if (is_gimple_reg_type (racc->type))
2646 if (!lacc->grp_write)
2648 ret = true;
2649 subtree_mark_written_and_enqueue (lacc);
2651 if (!lacc->first_child && !racc->first_child)
2653 tree t = lacc->base;
2655 lacc->type = racc->type;
2656 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2657 lacc->offset, racc->type))
2658 lacc->expr = t;
2659 else
2661 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2662 lacc->base, lacc->offset,
2663 racc, NULL, false);
2664 lacc->grp_no_warning = true;
2667 return ret;
2670 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2672 struct access *new_acc = NULL;
2673 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2675 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2676 &new_acc))
2678 if (new_acc)
2680 if (!new_acc->grp_write && rchild->grp_write)
2682 gcc_assert (!lacc->grp_write);
2683 subtree_mark_written_and_enqueue (new_acc);
2684 ret = true;
2687 rchild->grp_hint = 1;
2688 new_acc->grp_hint |= new_acc->grp_read;
2689 if (rchild->first_child)
2690 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2692 else
2694 if (!lacc->grp_write)
2696 ret = true;
2697 subtree_mark_written_and_enqueue (lacc);
2700 continue;
2703 if (rchild->grp_unscalarizable_region)
2705 if (rchild->grp_write && !lacc->grp_write)
2707 ret = true;
2708 subtree_mark_written_and_enqueue (lacc);
2710 continue;
2713 rchild->grp_hint = 1;
2714 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2715 lacc->grp_write
2716 || rchild->grp_write);
2717 gcc_checking_assert (new_acc);
2718 if (racc->first_child)
2719 propagate_subaccesses_across_link (new_acc, rchild);
2721 add_access_to_work_queue (lacc);
2722 ret = true;
2725 return ret;
2728 /* Propagate all subaccesses across assignment links. */
2730 static void
2731 propagate_all_subaccesses (void)
2733 while (work_queue_head)
2735 struct access *racc = pop_access_from_work_queue ();
2736 struct assign_link *link;
2738 if (racc->group_representative)
2739 racc= racc->group_representative;
2740 gcc_assert (racc->first_link);
2742 for (link = racc->first_link; link; link = link->next)
2744 struct access *lacc = link->lacc;
2746 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2747 continue;
2748 lacc = lacc->group_representative;
2750 bool reque_parents = false;
2751 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2753 if (!lacc->grp_write)
2755 subtree_mark_written_and_enqueue (lacc);
2756 reque_parents = true;
2759 else if (propagate_subaccesses_across_link (lacc, racc))
2760 reque_parents = true;
2762 if (reque_parents)
2765 add_access_to_work_queue (lacc);
2766 lacc = lacc->parent;
2768 while (lacc);
2773 /* Go through all accesses collected throughout the (intraprocedural) analysis
2774 stage, exclude overlapping ones, identify representatives and build trees
2775 out of them, making decisions about scalarization on the way. Return true
2776 iff there are any to-be-scalarized variables after this stage. */
2778 static bool
2779 analyze_all_variable_accesses (void)
2781 int res = 0;
2782 bitmap tmp = BITMAP_ALLOC (NULL);
2783 bitmap_iterator bi;
2784 unsigned i;
2785 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2787 enum compiler_param param = optimize_speed_p
2788 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2789 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2791 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2792 fall back to a target default. */
2793 unsigned HOST_WIDE_INT max_scalarization_size
2794 = global_options_set.x_param_values[param]
2795 ? PARAM_VALUE (param)
2796 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2798 max_scalarization_size *= BITS_PER_UNIT;
2800 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2801 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2802 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2804 tree var = candidate (i);
2806 if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var),
2807 constant_decl_p (var)))
2809 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2810 <= max_scalarization_size)
2812 create_total_scalarization_access (var);
2813 completely_scalarize (var, TREE_TYPE (var), 0, var);
2814 statistics_counter_event (cfun,
2815 "Totally-scalarized aggregates", 1);
2816 if (dump_file && (dump_flags & TDF_DETAILS))
2818 fprintf (dump_file, "Will attempt to totally scalarize ");
2819 print_generic_expr (dump_file, var);
2820 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2823 else if (dump_file && (dump_flags & TDF_DETAILS))
2825 fprintf (dump_file, "Too big to totally scalarize: ");
2826 print_generic_expr (dump_file, var);
2827 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2832 bitmap_copy (tmp, candidate_bitmap);
2833 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2835 tree var = candidate (i);
2836 struct access *access;
2838 access = sort_and_splice_var_accesses (var);
2839 if (!access || !build_access_trees (access))
2840 disqualify_candidate (var,
2841 "No or inhibitingly overlapping accesses.");
2844 propagate_all_subaccesses ();
2846 bitmap_copy (tmp, candidate_bitmap);
2847 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2849 tree var = candidate (i);
2850 struct access *access = get_first_repr_for_decl (var);
2852 if (analyze_access_trees (access))
2854 res++;
2855 if (dump_file && (dump_flags & TDF_DETAILS))
2857 fprintf (dump_file, "\nAccess trees for ");
2858 print_generic_expr (dump_file, var);
2859 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2860 dump_access_tree (dump_file, access);
2861 fprintf (dump_file, "\n");
2864 else
2865 disqualify_candidate (var, "No scalar replacements to be created.");
2868 BITMAP_FREE (tmp);
2870 if (res)
2872 statistics_counter_event (cfun, "Scalarized aggregates", res);
2873 return true;
2875 else
2876 return false;
2879 /* Generate statements copying scalar replacements of accesses within a subtree
2880 into or out of AGG. ACCESS, all its children, siblings and their children
2881 are to be processed. AGG is an aggregate type expression (can be a
2882 declaration but does not have to be, it can for example also be a mem_ref or
2883 a series of handled components). TOP_OFFSET is the offset of the processed
2884 subtree which has to be subtracted from offsets of individual accesses to
2885 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2886 replacements in the interval <start_offset, start_offset + chunk_size>,
2887 otherwise copy all. GSI is a statement iterator used to place the new
2888 statements. WRITE should be true when the statements should write from AGG
2889 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2890 statements will be added after the current statement in GSI, they will be
2891 added before the statement otherwise. */
2893 static void
2894 generate_subtree_copies (struct access *access, tree agg,
2895 HOST_WIDE_INT top_offset,
2896 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2897 gimple_stmt_iterator *gsi, bool write,
2898 bool insert_after, location_t loc)
2900 /* Never write anything into constant pool decls. See PR70602. */
2901 if (!write && constant_decl_p (agg))
2902 return;
2905 if (chunk_size && access->offset >= start_offset + chunk_size)
2906 return;
2908 if (access->grp_to_be_replaced
2909 && (chunk_size == 0
2910 || access->offset + access->size > start_offset))
2912 tree expr, repl = get_access_replacement (access);
2913 gassign *stmt;
2915 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2916 access, gsi, insert_after);
2918 if (write)
2920 if (access->grp_partial_lhs)
2921 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2922 !insert_after,
2923 insert_after ? GSI_NEW_STMT
2924 : GSI_SAME_STMT);
2925 stmt = gimple_build_assign (repl, expr);
2927 else
2929 TREE_NO_WARNING (repl) = 1;
2930 if (access->grp_partial_lhs)
2931 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2932 !insert_after,
2933 insert_after ? GSI_NEW_STMT
2934 : GSI_SAME_STMT);
2935 stmt = gimple_build_assign (expr, repl);
2937 gimple_set_location (stmt, loc);
2939 if (insert_after)
2940 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2941 else
2942 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2943 update_stmt (stmt);
2944 sra_stats.subtree_copies++;
2946 else if (write
2947 && access->grp_to_be_debug_replaced
2948 && (chunk_size == 0
2949 || access->offset + access->size > start_offset))
2951 gdebug *ds;
2952 tree drhs = build_debug_ref_for_model (loc, agg,
2953 access->offset - top_offset,
2954 access);
2955 ds = gimple_build_debug_bind (get_access_replacement (access),
2956 drhs, gsi_stmt (*gsi));
2957 if (insert_after)
2958 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2959 else
2960 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2963 if (access->first_child)
2964 generate_subtree_copies (access->first_child, agg, top_offset,
2965 start_offset, chunk_size, gsi,
2966 write, insert_after, loc);
2968 access = access->next_sibling;
2970 while (access);
2973 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2974 root of the subtree to be processed. GSI is the statement iterator used
2975 for inserting statements which are added after the current statement if
2976 INSERT_AFTER is true or before it otherwise. */
2978 static void
2979 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2980 bool insert_after, location_t loc)
2983 struct access *child;
2985 if (access->grp_to_be_replaced)
2987 gassign *stmt;
2989 stmt = gimple_build_assign (get_access_replacement (access),
2990 build_zero_cst (access->type));
2991 if (insert_after)
2992 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2993 else
2994 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2995 update_stmt (stmt);
2996 gimple_set_location (stmt, loc);
2998 else if (access->grp_to_be_debug_replaced)
3000 gdebug *ds
3001 = gimple_build_debug_bind (get_access_replacement (access),
3002 build_zero_cst (access->type),
3003 gsi_stmt (*gsi));
3004 if (insert_after)
3005 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3006 else
3007 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3010 for (child = access->first_child; child; child = child->next_sibling)
3011 init_subtree_with_zero (child, gsi, insert_after, loc);
3014 /* Clobber all scalar replacements in an access subtree. ACCESS is the
3015 root of the subtree to be processed. GSI is the statement iterator used
3016 for inserting statements which are added after the current statement if
3017 INSERT_AFTER is true or before it otherwise. */
3019 static void
3020 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
3021 bool insert_after, location_t loc)
3024 struct access *child;
3026 if (access->grp_to_be_replaced)
3028 tree rep = get_access_replacement (access);
3029 tree clobber = build_constructor (access->type, NULL);
3030 TREE_THIS_VOLATILE (clobber) = 1;
3031 gimple *stmt = gimple_build_assign (rep, clobber);
3033 if (insert_after)
3034 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3035 else
3036 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3037 update_stmt (stmt);
3038 gimple_set_location (stmt, loc);
3041 for (child = access->first_child; child; child = child->next_sibling)
3042 clobber_subtree (child, gsi, insert_after, loc);
3045 /* Search for an access representative for the given expression EXPR and
3046 return it or NULL if it cannot be found. */
3048 static struct access *
3049 get_access_for_expr (tree expr)
3051 HOST_WIDE_INT offset, size, max_size;
3052 tree base;
3053 bool reverse;
3055 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3056 a different size than the size of its argument and we need the latter
3057 one. */
3058 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3059 expr = TREE_OPERAND (expr, 0);
3061 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
3062 if (max_size == -1 || !DECL_P (base))
3063 return NULL;
3065 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3066 return NULL;
3068 return get_var_base_offset_size_access (base, offset, max_size);
3071 /* Replace the expression EXPR with a scalar replacement if there is one and
3072 generate other statements to do type conversion or subtree copying if
3073 necessary. GSI is used to place newly created statements, WRITE is true if
3074 the expression is being written to (it is on a LHS of a statement or output
3075 in an assembly statement). */
3077 static bool
3078 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
3080 location_t loc;
3081 struct access *access;
3082 tree type, bfr, orig_expr;
3084 if (TREE_CODE (*expr) == BIT_FIELD_REF)
3086 bfr = *expr;
3087 expr = &TREE_OPERAND (*expr, 0);
3089 else
3090 bfr = NULL_TREE;
3092 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3093 expr = &TREE_OPERAND (*expr, 0);
3094 access = get_access_for_expr (*expr);
3095 if (!access)
3096 return false;
3097 type = TREE_TYPE (*expr);
3098 orig_expr = *expr;
3100 loc = gimple_location (gsi_stmt (*gsi));
3101 gimple_stmt_iterator alt_gsi = gsi_none ();
3102 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3104 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3105 gsi = &alt_gsi;
3108 if (access->grp_to_be_replaced)
3110 tree repl = get_access_replacement (access);
3111 /* If we replace a non-register typed access simply use the original
3112 access expression to extract the scalar component afterwards.
3113 This happens if scalarizing a function return value or parameter
3114 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3115 gcc.c-torture/compile/20011217-1.c.
3117 We also want to use this when accessing a complex or vector which can
3118 be accessed as a different type too, potentially creating a need for
3119 type conversion (see PR42196) and when scalarized unions are involved
3120 in assembler statements (see PR42398). */
3121 if (!useless_type_conversion_p (type, access->type))
3123 tree ref;
3125 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3127 if (write)
3129 gassign *stmt;
3131 if (access->grp_partial_lhs)
3132 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3133 false, GSI_NEW_STMT);
3134 stmt = gimple_build_assign (repl, ref);
3135 gimple_set_location (stmt, loc);
3136 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3138 else
3140 gassign *stmt;
3142 if (access->grp_partial_lhs)
3143 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3144 true, GSI_SAME_STMT);
3145 stmt = gimple_build_assign (ref, repl);
3146 gimple_set_location (stmt, loc);
3147 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3150 else
3151 *expr = repl;
3152 sra_stats.exprs++;
3154 else if (write && access->grp_to_be_debug_replaced)
3156 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3157 NULL_TREE,
3158 gsi_stmt (*gsi));
3159 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3162 if (access->first_child)
3164 HOST_WIDE_INT start_offset, chunk_size;
3165 if (bfr
3166 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3167 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3169 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3170 start_offset = access->offset
3171 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3173 else
3174 start_offset = chunk_size = 0;
3176 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3177 start_offset, chunk_size, gsi, write, write,
3178 loc);
3180 return true;
3183 /* Where scalar replacements of the RHS have been written to when a replacement
3184 of a LHS of an assigments cannot be direclty loaded from a replacement of
3185 the RHS. */
3186 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3187 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3188 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3190 struct subreplacement_assignment_data
3192 /* Offset of the access representing the lhs of the assignment. */
3193 HOST_WIDE_INT left_offset;
3195 /* LHS and RHS of the original assignment. */
3196 tree assignment_lhs, assignment_rhs;
3198 /* Access representing the rhs of the whole assignment. */
3199 struct access *top_racc;
3201 /* Stmt iterator used for statement insertions after the original assignment.
3202 It points to the main GSI used to traverse a BB during function body
3203 modification. */
3204 gimple_stmt_iterator *new_gsi;
3206 /* Stmt iterator used for statement insertions before the original
3207 assignment. Keeps on pointing to the original statement. */
3208 gimple_stmt_iterator old_gsi;
3210 /* Location of the assignment. */
3211 location_t loc;
3213 /* Keeps the information whether we have needed to refresh replacements of
3214 the LHS and from which side of the assignments this takes place. */
3215 enum unscalarized_data_handling refreshed;
3218 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3219 base aggregate if there are unscalarized data or directly to LHS of the
3220 statement that is pointed to by GSI otherwise. */
3222 static void
3223 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3225 tree src;
3226 if (sad->top_racc->grp_unscalarized_data)
3228 src = sad->assignment_rhs;
3229 sad->refreshed = SRA_UDH_RIGHT;
3231 else
3233 src = sad->assignment_lhs;
3234 sad->refreshed = SRA_UDH_LEFT;
3236 generate_subtree_copies (sad->top_racc->first_child, src,
3237 sad->top_racc->offset, 0, 0,
3238 &sad->old_gsi, false, false, sad->loc);
3241 /* Try to generate statements to load all sub-replacements in an access subtree
3242 formed by children of LACC from scalar replacements in the SAD->top_racc
3243 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3244 and load the accesses from it. */
3246 static void
3247 load_assign_lhs_subreplacements (struct access *lacc,
3248 struct subreplacement_assignment_data *sad)
3250 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3252 HOST_WIDE_INT offset;
3253 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3255 if (lacc->grp_to_be_replaced)
3257 struct access *racc;
3258 gassign *stmt;
3259 tree rhs;
3261 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3262 if (racc && racc->grp_to_be_replaced)
3264 rhs = get_access_replacement (racc);
3265 if (!useless_type_conversion_p (lacc->type, racc->type))
3266 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3267 lacc->type, rhs);
3269 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3270 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3271 NULL_TREE, true, GSI_SAME_STMT);
3273 else
3275 /* No suitable access on the right hand side, need to load from
3276 the aggregate. See if we have to update it first... */
3277 if (sad->refreshed == SRA_UDH_NONE)
3278 handle_unscalarized_data_in_subtree (sad);
3280 if (sad->refreshed == SRA_UDH_LEFT)
3281 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3282 lacc->offset - sad->left_offset,
3283 lacc, sad->new_gsi, true);
3284 else
3285 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3286 lacc->offset - sad->left_offset,
3287 lacc, sad->new_gsi, true);
3288 if (lacc->grp_partial_lhs)
3289 rhs = force_gimple_operand_gsi (sad->new_gsi,
3290 rhs, true, NULL_TREE,
3291 false, GSI_NEW_STMT);
3294 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3295 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3296 gimple_set_location (stmt, sad->loc);
3297 update_stmt (stmt);
3298 sra_stats.subreplacements++;
3300 else
3302 if (sad->refreshed == SRA_UDH_NONE
3303 && lacc->grp_read && !lacc->grp_covered)
3304 handle_unscalarized_data_in_subtree (sad);
3306 if (lacc && lacc->grp_to_be_debug_replaced)
3308 gdebug *ds;
3309 tree drhs;
3310 struct access *racc = find_access_in_subtree (sad->top_racc,
3311 offset,
3312 lacc->size);
3314 if (racc && racc->grp_to_be_replaced)
3316 if (racc->grp_write || constant_decl_p (racc->base))
3317 drhs = get_access_replacement (racc);
3318 else
3319 drhs = NULL;
3321 else if (sad->refreshed == SRA_UDH_LEFT)
3322 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3323 lacc->offset, lacc);
3324 else if (sad->refreshed == SRA_UDH_RIGHT)
3325 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3326 offset, lacc);
3327 else
3328 drhs = NULL_TREE;
3329 if (drhs
3330 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3331 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3332 lacc->type, drhs);
3333 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3334 drhs, gsi_stmt (sad->old_gsi));
3335 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3339 if (lacc->first_child)
3340 load_assign_lhs_subreplacements (lacc, sad);
3344 /* Result code for SRA assignment modification. */
3345 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3346 SRA_AM_MODIFIED, /* stmt changed but not
3347 removed */
3348 SRA_AM_REMOVED }; /* stmt eliminated */
3350 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3351 to the assignment and GSI is the statement iterator pointing at it. Returns
3352 the same values as sra_modify_assign. */
3354 static enum assignment_mod_result
3355 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3357 tree lhs = gimple_assign_lhs (stmt);
3358 struct access *acc = get_access_for_expr (lhs);
3359 if (!acc)
3360 return SRA_AM_NONE;
3361 location_t loc = gimple_location (stmt);
3363 if (gimple_clobber_p (stmt))
3365 /* Clobber the replacement variable. */
3366 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3367 /* Remove clobbers of fully scalarized variables, they are dead. */
3368 if (acc->grp_covered)
3370 unlink_stmt_vdef (stmt);
3371 gsi_remove (gsi, true);
3372 release_defs (stmt);
3373 return SRA_AM_REMOVED;
3375 else
3376 return SRA_AM_MODIFIED;
3379 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
3381 /* I have never seen this code path trigger but if it can happen the
3382 following should handle it gracefully. */
3383 if (access_has_children_p (acc))
3384 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3385 true, true, loc);
3386 return SRA_AM_MODIFIED;
3389 if (acc->grp_covered)
3391 init_subtree_with_zero (acc, gsi, false, loc);
3392 unlink_stmt_vdef (stmt);
3393 gsi_remove (gsi, true);
3394 release_defs (stmt);
3395 return SRA_AM_REMOVED;
3397 else
3399 init_subtree_with_zero (acc, gsi, true, loc);
3400 return SRA_AM_MODIFIED;
3404 /* Create and return a new suitable default definition SSA_NAME for RACC which
3405 is an access describing an uninitialized part of an aggregate that is being
3406 loaded. */
3408 static tree
3409 get_repl_default_def_ssa_name (struct access *racc)
3411 gcc_checking_assert (!racc->grp_to_be_replaced
3412 && !racc->grp_to_be_debug_replaced);
3413 if (!racc->replacement_decl)
3414 racc->replacement_decl = create_access_replacement (racc);
3415 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3418 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3419 bit-field field declaration somewhere in it. */
3421 static inline bool
3422 contains_vce_or_bfcref_p (const_tree ref)
3424 while (handled_component_p (ref))
3426 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3427 || (TREE_CODE (ref) == COMPONENT_REF
3428 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3429 return true;
3430 ref = TREE_OPERAND (ref, 0);
3433 return false;
3436 /* Examine both sides of the assignment statement pointed to by STMT, replace
3437 them with a scalare replacement if there is one and generate copying of
3438 replacements if scalarized aggregates have been used in the assignment. GSI
3439 is used to hold generated statements for type conversions and subtree
3440 copying. */
3442 static enum assignment_mod_result
3443 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3445 struct access *lacc, *racc;
3446 tree lhs, rhs;
3447 bool modify_this_stmt = false;
3448 bool force_gimple_rhs = false;
3449 location_t loc;
3450 gimple_stmt_iterator orig_gsi = *gsi;
3452 if (!gimple_assign_single_p (stmt))
3453 return SRA_AM_NONE;
3454 lhs = gimple_assign_lhs (stmt);
3455 rhs = gimple_assign_rhs1 (stmt);
3457 if (TREE_CODE (rhs) == CONSTRUCTOR)
3458 return sra_modify_constructor_assign (stmt, gsi);
3460 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3461 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3462 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3464 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3465 gsi, false);
3466 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3467 gsi, true);
3468 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3471 lacc = get_access_for_expr (lhs);
3472 racc = get_access_for_expr (rhs);
3473 if (!lacc && !racc)
3474 return SRA_AM_NONE;
3475 /* Avoid modifying initializations of constant-pool replacements. */
3476 if (racc && (racc->replacement_decl == lhs))
3477 return SRA_AM_NONE;
3479 loc = gimple_location (stmt);
3480 if (lacc && lacc->grp_to_be_replaced)
3482 lhs = get_access_replacement (lacc);
3483 gimple_assign_set_lhs (stmt, lhs);
3484 modify_this_stmt = true;
3485 if (lacc->grp_partial_lhs)
3486 force_gimple_rhs = true;
3487 sra_stats.exprs++;
3490 if (racc && racc->grp_to_be_replaced)
3492 rhs = get_access_replacement (racc);
3493 modify_this_stmt = true;
3494 if (racc->grp_partial_lhs)
3495 force_gimple_rhs = true;
3496 sra_stats.exprs++;
3498 else if (racc
3499 && !racc->grp_unscalarized_data
3500 && !racc->grp_unscalarizable_region
3501 && TREE_CODE (lhs) == SSA_NAME
3502 && !access_has_replacements_p (racc))
3504 rhs = get_repl_default_def_ssa_name (racc);
3505 modify_this_stmt = true;
3506 sra_stats.exprs++;
3509 if (modify_this_stmt)
3511 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3513 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3514 ??? This should move to fold_stmt which we simply should
3515 call after building a VIEW_CONVERT_EXPR here. */
3516 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3517 && !contains_bitfld_component_ref_p (lhs))
3519 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3520 gimple_assign_set_lhs (stmt, lhs);
3522 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3523 && !contains_vce_or_bfcref_p (rhs))
3524 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3526 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3528 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3529 rhs);
3530 if (is_gimple_reg_type (TREE_TYPE (lhs))
3531 && TREE_CODE (lhs) != SSA_NAME)
3532 force_gimple_rhs = true;
3537 if (lacc && lacc->grp_to_be_debug_replaced)
3539 tree dlhs = get_access_replacement (lacc);
3540 tree drhs = unshare_expr (rhs);
3541 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3543 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3544 && !contains_vce_or_bfcref_p (drhs))
3545 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3546 if (drhs
3547 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3548 TREE_TYPE (drhs)))
3549 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3550 TREE_TYPE (dlhs), drhs);
3552 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3553 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3556 /* From this point on, the function deals with assignments in between
3557 aggregates when at least one has scalar reductions of some of its
3558 components. There are three possible scenarios: Both the LHS and RHS have
3559 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3561 In the first case, we would like to load the LHS components from RHS
3562 components whenever possible. If that is not possible, we would like to
3563 read it directly from the RHS (after updating it by storing in it its own
3564 components). If there are some necessary unscalarized data in the LHS,
3565 those will be loaded by the original assignment too. If neither of these
3566 cases happen, the original statement can be removed. Most of this is done
3567 by load_assign_lhs_subreplacements.
3569 In the second case, we would like to store all RHS scalarized components
3570 directly into LHS and if they cover the aggregate completely, remove the
3571 statement too. In the third case, we want the LHS components to be loaded
3572 directly from the RHS (DSE will remove the original statement if it
3573 becomes redundant).
3575 This is a bit complex but manageable when types match and when unions do
3576 not cause confusion in a way that we cannot really load a component of LHS
3577 from the RHS or vice versa (the access representing this level can have
3578 subaccesses that are accessible only through a different union field at a
3579 higher level - different from the one used in the examined expression).
3580 Unions are fun.
3582 Therefore, I specially handle a fourth case, happening when there is a
3583 specific type cast or it is impossible to locate a scalarized subaccess on
3584 the other side of the expression. If that happens, I simply "refresh" the
3585 RHS by storing in it is scalarized components leave the original statement
3586 there to do the copying and then load the scalar replacements of the LHS.
3587 This is what the first branch does. */
3589 if (modify_this_stmt
3590 || gimple_has_volatile_ops (stmt)
3591 || contains_vce_or_bfcref_p (rhs)
3592 || contains_vce_or_bfcref_p (lhs)
3593 || stmt_ends_bb_p (stmt))
3595 /* No need to copy into a constant-pool, it comes pre-initialized. */
3596 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3597 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3598 gsi, false, false, loc);
3599 if (access_has_children_p (lacc))
3601 gimple_stmt_iterator alt_gsi = gsi_none ();
3602 if (stmt_ends_bb_p (stmt))
3604 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3605 gsi = &alt_gsi;
3607 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3608 gsi, true, true, loc);
3610 sra_stats.separate_lhs_rhs_handling++;
3612 /* This gimplification must be done after generate_subtree_copies,
3613 lest we insert the subtree copies in the middle of the gimplified
3614 sequence. */
3615 if (force_gimple_rhs)
3616 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3617 true, GSI_SAME_STMT);
3618 if (gimple_assign_rhs1 (stmt) != rhs)
3620 modify_this_stmt = true;
3621 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3622 gcc_assert (stmt == gsi_stmt (orig_gsi));
3625 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3627 else
3629 if (access_has_children_p (lacc)
3630 && access_has_children_p (racc)
3631 /* When an access represents an unscalarizable region, it usually
3632 represents accesses with variable offset and thus must not be used
3633 to generate new memory accesses. */
3634 && !lacc->grp_unscalarizable_region
3635 && !racc->grp_unscalarizable_region)
3637 struct subreplacement_assignment_data sad;
3639 sad.left_offset = lacc->offset;
3640 sad.assignment_lhs = lhs;
3641 sad.assignment_rhs = rhs;
3642 sad.top_racc = racc;
3643 sad.old_gsi = *gsi;
3644 sad.new_gsi = gsi;
3645 sad.loc = gimple_location (stmt);
3646 sad.refreshed = SRA_UDH_NONE;
3648 if (lacc->grp_read && !lacc->grp_covered)
3649 handle_unscalarized_data_in_subtree (&sad);
3651 load_assign_lhs_subreplacements (lacc, &sad);
3652 if (sad.refreshed != SRA_UDH_RIGHT)
3654 gsi_next (gsi);
3655 unlink_stmt_vdef (stmt);
3656 gsi_remove (&sad.old_gsi, true);
3657 release_defs (stmt);
3658 sra_stats.deleted++;
3659 return SRA_AM_REMOVED;
3662 else
3664 if (access_has_children_p (racc)
3665 && !racc->grp_unscalarized_data
3666 && TREE_CODE (lhs) != SSA_NAME)
3668 if (dump_file)
3670 fprintf (dump_file, "Removing load: ");
3671 print_gimple_stmt (dump_file, stmt, 0);
3673 generate_subtree_copies (racc->first_child, lhs,
3674 racc->offset, 0, 0, gsi,
3675 false, false, loc);
3676 gcc_assert (stmt == gsi_stmt (*gsi));
3677 unlink_stmt_vdef (stmt);
3678 gsi_remove (gsi, true);
3679 release_defs (stmt);
3680 sra_stats.deleted++;
3681 return SRA_AM_REMOVED;
3683 /* Restore the aggregate RHS from its components so the
3684 prevailing aggregate copy does the right thing. */
3685 if (access_has_children_p (racc))
3686 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3687 gsi, false, false, loc);
3688 /* Re-load the components of the aggregate copy destination.
3689 But use the RHS aggregate to load from to expose more
3690 optimization opportunities. */
3691 if (access_has_children_p (lacc))
3692 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3693 0, 0, gsi, true, true, loc);
3696 return SRA_AM_NONE;
3700 /* Set any scalar replacements of values in the constant pool to the initial
3701 value of the constant. (Constant-pool decls like *.LC0 have effectively
3702 been initialized before the program starts, we must do the same for their
3703 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3704 the function's entry block. */
3706 static void
3707 initialize_constant_pool_replacements (void)
3709 gimple_seq seq = NULL;
3710 gimple_stmt_iterator gsi = gsi_start (seq);
3711 bitmap_iterator bi;
3712 unsigned i;
3714 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3716 tree var = candidate (i);
3717 if (!constant_decl_p (var))
3718 continue;
3719 vec<access_p> *access_vec = get_base_access_vector (var);
3720 if (!access_vec)
3721 continue;
3722 for (unsigned i = 0; i < access_vec->length (); i++)
3724 struct access *access = (*access_vec)[i];
3725 if (!access->replacement_decl)
3726 continue;
3727 gassign *stmt
3728 = gimple_build_assign (get_access_replacement (access),
3729 unshare_expr (access->expr));
3730 if (dump_file && (dump_flags & TDF_DETAILS))
3732 fprintf (dump_file, "Generating constant initializer: ");
3733 print_gimple_stmt (dump_file, stmt, 0);
3734 fprintf (dump_file, "\n");
3736 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3737 update_stmt (stmt);
3741 seq = gsi_seq (gsi);
3742 if (seq)
3743 gsi_insert_seq_on_edge_immediate (
3744 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3747 /* Traverse the function body and all modifications as decided in
3748 analyze_all_variable_accesses. Return true iff the CFG has been
3749 changed. */
3751 static bool
3752 sra_modify_function_body (void)
3754 bool cfg_changed = false;
3755 basic_block bb;
3757 initialize_constant_pool_replacements ();
3759 FOR_EACH_BB_FN (bb, cfun)
3761 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3762 while (!gsi_end_p (gsi))
3764 gimple *stmt = gsi_stmt (gsi);
3765 enum assignment_mod_result assign_result;
3766 bool modified = false, deleted = false;
3767 tree *t;
3768 unsigned i;
3770 switch (gimple_code (stmt))
3772 case GIMPLE_RETURN:
3773 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3774 if (*t != NULL_TREE)
3775 modified |= sra_modify_expr (t, &gsi, false);
3776 break;
3778 case GIMPLE_ASSIGN:
3779 assign_result = sra_modify_assign (stmt, &gsi);
3780 modified |= assign_result == SRA_AM_MODIFIED;
3781 deleted = assign_result == SRA_AM_REMOVED;
3782 break;
3784 case GIMPLE_CALL:
3785 /* Operands must be processed before the lhs. */
3786 for (i = 0; i < gimple_call_num_args (stmt); i++)
3788 t = gimple_call_arg_ptr (stmt, i);
3789 modified |= sra_modify_expr (t, &gsi, false);
3792 if (gimple_call_lhs (stmt))
3794 t = gimple_call_lhs_ptr (stmt);
3795 modified |= sra_modify_expr (t, &gsi, true);
3797 break;
3799 case GIMPLE_ASM:
3801 gasm *asm_stmt = as_a <gasm *> (stmt);
3802 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3804 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3805 modified |= sra_modify_expr (t, &gsi, false);
3807 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3809 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3810 modified |= sra_modify_expr (t, &gsi, true);
3813 break;
3815 default:
3816 break;
3819 if (modified)
3821 update_stmt (stmt);
3822 if (maybe_clean_eh_stmt (stmt)
3823 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3824 cfg_changed = true;
3826 if (!deleted)
3827 gsi_next (&gsi);
3831 gsi_commit_edge_inserts ();
3832 return cfg_changed;
3835 /* Generate statements initializing scalar replacements of parts of function
3836 parameters. */
3838 static void
3839 initialize_parameter_reductions (void)
3841 gimple_stmt_iterator gsi;
3842 gimple_seq seq = NULL;
3843 tree parm;
3845 gsi = gsi_start (seq);
3846 for (parm = DECL_ARGUMENTS (current_function_decl);
3847 parm;
3848 parm = DECL_CHAIN (parm))
3850 vec<access_p> *access_vec;
3851 struct access *access;
3853 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3854 continue;
3855 access_vec = get_base_access_vector (parm);
3856 if (!access_vec)
3857 continue;
3859 for (access = (*access_vec)[0];
3860 access;
3861 access = access->next_grp)
3862 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3863 EXPR_LOCATION (parm));
3866 seq = gsi_seq (gsi);
3867 if (seq)
3868 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3871 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3872 it reveals there are components of some aggregates to be scalarized, it runs
3873 the required transformations. */
3874 static unsigned int
3875 perform_intra_sra (void)
3877 int ret = 0;
3878 sra_initialize ();
3880 if (!find_var_candidates ())
3881 goto out;
3883 if (!scan_function ())
3884 goto out;
3886 if (!analyze_all_variable_accesses ())
3887 goto out;
3889 if (sra_modify_function_body ())
3890 ret = TODO_update_ssa | TODO_cleanup_cfg;
3891 else
3892 ret = TODO_update_ssa;
3893 initialize_parameter_reductions ();
3895 statistics_counter_event (cfun, "Scalar replacements created",
3896 sra_stats.replacements);
3897 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3898 statistics_counter_event (cfun, "Subtree copy stmts",
3899 sra_stats.subtree_copies);
3900 statistics_counter_event (cfun, "Subreplacement stmts",
3901 sra_stats.subreplacements);
3902 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3903 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3904 sra_stats.separate_lhs_rhs_handling);
3906 out:
3907 sra_deinitialize ();
3908 return ret;
3911 /* Perform early intraprocedural SRA. */
3912 static unsigned int
3913 early_intra_sra (void)
3915 sra_mode = SRA_MODE_EARLY_INTRA;
3916 return perform_intra_sra ();
3919 /* Perform "late" intraprocedural SRA. */
3920 static unsigned int
3921 late_intra_sra (void)
3923 sra_mode = SRA_MODE_INTRA;
3924 return perform_intra_sra ();
3928 static bool
3929 gate_intra_sra (void)
3931 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3935 namespace {
3937 const pass_data pass_data_sra_early =
3939 GIMPLE_PASS, /* type */
3940 "esra", /* name */
3941 OPTGROUP_NONE, /* optinfo_flags */
3942 TV_TREE_SRA, /* tv_id */
3943 ( PROP_cfg | PROP_ssa ), /* properties_required */
3944 0, /* properties_provided */
3945 0, /* properties_destroyed */
3946 0, /* todo_flags_start */
3947 TODO_update_ssa, /* todo_flags_finish */
3950 class pass_sra_early : public gimple_opt_pass
3952 public:
3953 pass_sra_early (gcc::context *ctxt)
3954 : gimple_opt_pass (pass_data_sra_early, ctxt)
3957 /* opt_pass methods: */
3958 virtual bool gate (function *) { return gate_intra_sra (); }
3959 virtual unsigned int execute (function *) { return early_intra_sra (); }
3961 }; // class pass_sra_early
3963 } // anon namespace
3965 gimple_opt_pass *
3966 make_pass_sra_early (gcc::context *ctxt)
3968 return new pass_sra_early (ctxt);
3971 namespace {
3973 const pass_data pass_data_sra =
3975 GIMPLE_PASS, /* type */
3976 "sra", /* name */
3977 OPTGROUP_NONE, /* optinfo_flags */
3978 TV_TREE_SRA, /* tv_id */
3979 ( PROP_cfg | PROP_ssa ), /* properties_required */
3980 0, /* properties_provided */
3981 0, /* properties_destroyed */
3982 TODO_update_address_taken, /* todo_flags_start */
3983 TODO_update_ssa, /* todo_flags_finish */
3986 class pass_sra : public gimple_opt_pass
3988 public:
3989 pass_sra (gcc::context *ctxt)
3990 : gimple_opt_pass (pass_data_sra, ctxt)
3993 /* opt_pass methods: */
3994 virtual bool gate (function *) { return gate_intra_sra (); }
3995 virtual unsigned int execute (function *) { return late_intra_sra (); }
3997 }; // class pass_sra
3999 } // anon namespace
4001 gimple_opt_pass *
4002 make_pass_sra (gcc::context *ctxt)
4004 return new pass_sra (ctxt);
4008 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
4009 parameter. */
4011 static bool
4012 is_unused_scalar_param (tree parm)
4014 tree name;
4015 return (is_gimple_reg (parm)
4016 && (!(name = ssa_default_def (cfun, parm))
4017 || has_zero_uses (name)));
4020 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
4021 examine whether there are any direct or otherwise infeasible ones. If so,
4022 return true, otherwise return false. PARM must be a gimple register with a
4023 non-NULL default definition. */
4025 static bool
4026 ptr_parm_has_direct_uses (tree parm)
4028 imm_use_iterator ui;
4029 gimple *stmt;
4030 tree name = ssa_default_def (cfun, parm);
4031 bool ret = false;
4033 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4035 int uses_ok = 0;
4036 use_operand_p use_p;
4038 if (is_gimple_debug (stmt))
4039 continue;
4041 /* Valid uses include dereferences on the lhs and the rhs. */
4042 if (gimple_has_lhs (stmt))
4044 tree lhs = gimple_get_lhs (stmt);
4045 while (handled_component_p (lhs))
4046 lhs = TREE_OPERAND (lhs, 0);
4047 if (TREE_CODE (lhs) == MEM_REF
4048 && TREE_OPERAND (lhs, 0) == name
4049 && integer_zerop (TREE_OPERAND (lhs, 1))
4050 && types_compatible_p (TREE_TYPE (lhs),
4051 TREE_TYPE (TREE_TYPE (name)))
4052 && !TREE_THIS_VOLATILE (lhs))
4053 uses_ok++;
4055 if (gimple_assign_single_p (stmt))
4057 tree rhs = gimple_assign_rhs1 (stmt);
4058 while (handled_component_p (rhs))
4059 rhs = TREE_OPERAND (rhs, 0);
4060 if (TREE_CODE (rhs) == MEM_REF
4061 && TREE_OPERAND (rhs, 0) == name
4062 && integer_zerop (TREE_OPERAND (rhs, 1))
4063 && types_compatible_p (TREE_TYPE (rhs),
4064 TREE_TYPE (TREE_TYPE (name)))
4065 && !TREE_THIS_VOLATILE (rhs))
4066 uses_ok++;
4068 else if (is_gimple_call (stmt))
4070 unsigned i;
4071 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4073 tree arg = gimple_call_arg (stmt, i);
4074 while (handled_component_p (arg))
4075 arg = TREE_OPERAND (arg, 0);
4076 if (TREE_CODE (arg) == MEM_REF
4077 && TREE_OPERAND (arg, 0) == name
4078 && integer_zerop (TREE_OPERAND (arg, 1))
4079 && types_compatible_p (TREE_TYPE (arg),
4080 TREE_TYPE (TREE_TYPE (name)))
4081 && !TREE_THIS_VOLATILE (arg))
4082 uses_ok++;
4086 /* If the number of valid uses does not match the number of
4087 uses in this stmt there is an unhandled use. */
4088 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4089 --uses_ok;
4091 if (uses_ok != 0)
4092 ret = true;
4094 if (ret)
4095 BREAK_FROM_IMM_USE_STMT (ui);
4098 return ret;
4101 /* Identify candidates for reduction for IPA-SRA based on their type and mark
4102 them in candidate_bitmap. Note that these do not necessarily include
4103 parameter which are unused and thus can be removed. Return true iff any
4104 such candidate has been found. */
4106 static bool
4107 find_param_candidates (void)
4109 tree parm;
4110 int count = 0;
4111 bool ret = false;
4112 const char *msg;
4114 for (parm = DECL_ARGUMENTS (current_function_decl);
4115 parm;
4116 parm = DECL_CHAIN (parm))
4118 tree type = TREE_TYPE (parm);
4119 tree_node **slot;
4121 count++;
4123 if (TREE_THIS_VOLATILE (parm)
4124 || TREE_ADDRESSABLE (parm)
4125 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
4126 continue;
4128 if (is_unused_scalar_param (parm))
4130 ret = true;
4131 continue;
4134 if (POINTER_TYPE_P (type))
4136 type = TREE_TYPE (type);
4138 if (TREE_CODE (type) == FUNCTION_TYPE
4139 || TYPE_VOLATILE (type)
4140 || (TREE_CODE (type) == ARRAY_TYPE
4141 && TYPE_NONALIASED_COMPONENT (type))
4142 || !is_gimple_reg (parm)
4143 || is_va_list_type (type)
4144 || ptr_parm_has_direct_uses (parm))
4145 continue;
4147 else if (!AGGREGATE_TYPE_P (type))
4148 continue;
4150 if (!COMPLETE_TYPE_P (type)
4151 || !tree_fits_uhwi_p (TYPE_SIZE (type))
4152 || tree_to_uhwi (TYPE_SIZE (type)) == 0
4153 || (AGGREGATE_TYPE_P (type)
4154 && type_internals_preclude_sra_p (type, &msg)))
4155 continue;
4157 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4158 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4159 *slot = parm;
4161 ret = true;
4162 if (dump_file && (dump_flags & TDF_DETAILS))
4164 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4165 print_generic_expr (dump_file, parm);
4166 fprintf (dump_file, "\n");
4170 func_param_count = count;
4171 return ret;
4174 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4175 maybe_modified. */
4177 static bool
4178 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4179 void *data)
4181 struct access *repr = (struct access *) data;
4183 repr->grp_maybe_modified = 1;
4184 return true;
4187 /* Analyze what representatives (in linked lists accessible from
4188 REPRESENTATIVES) can be modified by side effects of statements in the
4189 current function. */
4191 static void
4192 analyze_modified_params (vec<access_p> representatives)
4194 int i;
4196 for (i = 0; i < func_param_count; i++)
4198 struct access *repr;
4200 for (repr = representatives[i];
4201 repr;
4202 repr = repr->next_grp)
4204 struct access *access;
4205 bitmap visited;
4206 ao_ref ar;
4208 if (no_accesses_p (repr))
4209 continue;
4210 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4211 || repr->grp_maybe_modified)
4212 continue;
4214 ao_ref_init (&ar, repr->expr);
4215 visited = BITMAP_ALLOC (NULL);
4216 for (access = repr; access; access = access->next_sibling)
4218 /* All accesses are read ones, otherwise grp_maybe_modified would
4219 be trivially set. */
4220 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4221 mark_maybe_modified, repr, &visited);
4222 if (repr->grp_maybe_modified)
4223 break;
4225 BITMAP_FREE (visited);
4230 /* Propagate distances in bb_dereferences in the opposite direction than the
4231 control flow edges, in each step storing the maximum of the current value
4232 and the minimum of all successors. These steps are repeated until the table
4233 stabilizes. Note that BBs which might terminate the functions (according to
4234 final_bbs bitmap) never updated in this way. */
4236 static void
4237 propagate_dereference_distances (void)
4239 basic_block bb;
4241 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4242 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4243 FOR_EACH_BB_FN (bb, cfun)
4245 queue.quick_push (bb);
4246 bb->aux = bb;
4249 while (!queue.is_empty ())
4251 edge_iterator ei;
4252 edge e;
4253 bool change = false;
4254 int i;
4256 bb = queue.pop ();
4257 bb->aux = NULL;
4259 if (bitmap_bit_p (final_bbs, bb->index))
4260 continue;
4262 for (i = 0; i < func_param_count; i++)
4264 int idx = bb->index * func_param_count + i;
4265 bool first = true;
4266 HOST_WIDE_INT inh = 0;
4268 FOR_EACH_EDGE (e, ei, bb->succs)
4270 int succ_idx = e->dest->index * func_param_count + i;
4272 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4273 continue;
4275 if (first)
4277 first = false;
4278 inh = bb_dereferences [succ_idx];
4280 else if (bb_dereferences [succ_idx] < inh)
4281 inh = bb_dereferences [succ_idx];
4284 if (!first && bb_dereferences[idx] < inh)
4286 bb_dereferences[idx] = inh;
4287 change = true;
4291 if (change && !bitmap_bit_p (final_bbs, bb->index))
4292 FOR_EACH_EDGE (e, ei, bb->preds)
4294 if (e->src->aux)
4295 continue;
4297 e->src->aux = e->src;
4298 queue.quick_push (e->src);
4303 /* Dump a dereferences TABLE with heading STR to file F. */
4305 static void
4306 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4308 basic_block bb;
4310 fprintf (dump_file, "%s", str);
4311 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4312 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4314 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4315 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4317 int i;
4318 for (i = 0; i < func_param_count; i++)
4320 int idx = bb->index * func_param_count + i;
4321 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4324 fprintf (f, "\n");
4326 fprintf (dump_file, "\n");
4329 /* Determine what (parts of) parameters passed by reference that are not
4330 assigned to are not certainly dereferenced in this function and thus the
4331 dereferencing cannot be safely moved to the caller without potentially
4332 introducing a segfault. Mark such REPRESENTATIVES as
4333 grp_not_necessarilly_dereferenced.
4335 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4336 part is calculated rather than simple booleans are calculated for each
4337 pointer parameter to handle cases when only a fraction of the whole
4338 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4339 an example).
4341 The maximum dereference distances for each pointer parameter and BB are
4342 already stored in bb_dereference. This routine simply propagates these
4343 values upwards by propagate_dereference_distances and then compares the
4344 distances of individual parameters in the ENTRY BB to the equivalent
4345 distances of each representative of a (fraction of a) parameter. */
4347 static void
4348 analyze_caller_dereference_legality (vec<access_p> representatives)
4350 int i;
4352 if (dump_file && (dump_flags & TDF_DETAILS))
4353 dump_dereferences_table (dump_file,
4354 "Dereference table before propagation:\n",
4355 bb_dereferences);
4357 propagate_dereference_distances ();
4359 if (dump_file && (dump_flags & TDF_DETAILS))
4360 dump_dereferences_table (dump_file,
4361 "Dereference table after propagation:\n",
4362 bb_dereferences);
4364 for (i = 0; i < func_param_count; i++)
4366 struct access *repr = representatives[i];
4367 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4369 if (!repr || no_accesses_p (repr))
4370 continue;
4374 if ((repr->offset + repr->size) > bb_dereferences[idx])
4375 repr->grp_not_necessarilly_dereferenced = 1;
4376 repr = repr->next_grp;
4378 while (repr);
4382 /* Return the representative access for the parameter declaration PARM if it is
4383 a scalar passed by reference which is not written to and the pointer value
4384 is not used directly. Thus, if it is legal to dereference it in the caller
4385 and we can rule out modifications through aliases, such parameter should be
4386 turned into one passed by value. Return NULL otherwise. */
4388 static struct access *
4389 unmodified_by_ref_scalar_representative (tree parm)
4391 int i, access_count;
4392 struct access *repr;
4393 vec<access_p> *access_vec;
4395 access_vec = get_base_access_vector (parm);
4396 gcc_assert (access_vec);
4397 repr = (*access_vec)[0];
4398 if (repr->write)
4399 return NULL;
4400 repr->group_representative = repr;
4402 access_count = access_vec->length ();
4403 for (i = 1; i < access_count; i++)
4405 struct access *access = (*access_vec)[i];
4406 if (access->write)
4407 return NULL;
4408 access->group_representative = repr;
4409 access->next_sibling = repr->next_sibling;
4410 repr->next_sibling = access;
4413 repr->grp_read = 1;
4414 repr->grp_scalar_ptr = 1;
4415 return repr;
4418 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4419 associated with. REQ_ALIGN is the minimum required alignment. */
4421 static bool
4422 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4424 unsigned int exp_align;
4425 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4426 is incompatible assign in a call statement (and possibly even in asm
4427 statements). This can be relaxed by using a new temporary but only for
4428 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4429 intraprocedural SRA we deal with this by keeping the old aggregate around,
4430 something we cannot do in IPA-SRA.) */
4431 if (access->write
4432 && (is_gimple_call (access->stmt)
4433 || gimple_code (access->stmt) == GIMPLE_ASM))
4434 return true;
4436 exp_align = get_object_alignment (access->expr);
4437 if (exp_align < req_align)
4438 return true;
4440 return false;
4444 /* Sort collected accesses for parameter PARM, identify representatives for
4445 each accessed region and link them together. Return NULL if there are
4446 different but overlapping accesses, return the special ptr value meaning
4447 there are no accesses for this parameter if that is the case and return the
4448 first representative otherwise. Set *RO_GRP if there is a group of accesses
4449 with only read (i.e. no write) accesses. */
4451 static struct access *
4452 splice_param_accesses (tree parm, bool *ro_grp)
4454 int i, j, access_count, group_count;
4455 int agg_size, total_size = 0;
4456 struct access *access, *res, **prev_acc_ptr = &res;
4457 vec<access_p> *access_vec;
4459 access_vec = get_base_access_vector (parm);
4460 if (!access_vec)
4461 return &no_accesses_representant;
4462 access_count = access_vec->length ();
4464 access_vec->qsort (compare_access_positions);
4466 i = 0;
4467 total_size = 0;
4468 group_count = 0;
4469 while (i < access_count)
4471 bool modification;
4472 tree a1_alias_type;
4473 access = (*access_vec)[i];
4474 modification = access->write;
4475 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4476 return NULL;
4477 a1_alias_type = reference_alias_ptr_type (access->expr);
4479 /* Access is about to become group representative unless we find some
4480 nasty overlap which would preclude us from breaking this parameter
4481 apart. */
4483 j = i + 1;
4484 while (j < access_count)
4486 struct access *ac2 = (*access_vec)[j];
4487 if (ac2->offset != access->offset)
4489 /* All or nothing law for parameters. */
4490 if (access->offset + access->size > ac2->offset)
4491 return NULL;
4492 else
4493 break;
4495 else if (ac2->size != access->size)
4496 return NULL;
4498 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4499 || (ac2->type != access->type
4500 && (TREE_ADDRESSABLE (ac2->type)
4501 || TREE_ADDRESSABLE (access->type)))
4502 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4503 return NULL;
4505 modification |= ac2->write;
4506 ac2->group_representative = access;
4507 ac2->next_sibling = access->next_sibling;
4508 access->next_sibling = ac2;
4509 j++;
4512 group_count++;
4513 access->grp_maybe_modified = modification;
4514 if (!modification)
4515 *ro_grp = true;
4516 *prev_acc_ptr = access;
4517 prev_acc_ptr = &access->next_grp;
4518 total_size += access->size;
4519 i = j;
4522 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4523 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4524 else
4525 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4526 if (total_size >= agg_size)
4527 return NULL;
4529 gcc_assert (group_count > 0);
4530 return res;
4533 /* Decide whether parameters with representative accesses given by REPR should
4534 be reduced into components. */
4536 static int
4537 decide_one_param_reduction (struct access *repr)
4539 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4540 bool by_ref;
4541 tree parm;
4543 parm = repr->base;
4544 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4545 gcc_assert (cur_parm_size > 0);
4547 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4549 by_ref = true;
4550 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4552 else
4554 by_ref = false;
4555 agg_size = cur_parm_size;
4558 if (dump_file)
4560 struct access *acc;
4561 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4562 print_generic_expr (dump_file, parm);
4563 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4564 for (acc = repr; acc; acc = acc->next_grp)
4565 dump_access (dump_file, acc, true);
4568 total_size = 0;
4569 new_param_count = 0;
4571 for (; repr; repr = repr->next_grp)
4573 gcc_assert (parm == repr->base);
4575 /* Taking the address of a non-addressable field is verboten. */
4576 if (by_ref && repr->non_addressable)
4577 return 0;
4579 /* Do not decompose a non-BLKmode param in a way that would
4580 create BLKmode params. Especially for by-reference passing
4581 (thus, pointer-type param) this is hardly worthwhile. */
4582 if (DECL_MODE (parm) != BLKmode
4583 && TYPE_MODE (repr->type) == BLKmode)
4584 return 0;
4586 if (!by_ref || (!repr->grp_maybe_modified
4587 && !repr->grp_not_necessarilly_dereferenced))
4588 total_size += repr->size;
4589 else
4590 total_size += cur_parm_size;
4592 new_param_count++;
4595 gcc_assert (new_param_count > 0);
4597 if (optimize_function_for_size_p (cfun))
4598 parm_size_limit = cur_parm_size;
4599 else
4600 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4601 * cur_parm_size);
4603 if (total_size < agg_size
4604 && total_size <= parm_size_limit)
4606 if (dump_file)
4607 fprintf (dump_file, " ....will be split into %i components\n",
4608 new_param_count);
4609 return new_param_count;
4611 else
4612 return 0;
4615 /* The order of the following enums is important, we need to do extra work for
4616 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4617 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4618 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4620 /* Identify representatives of all accesses to all candidate parameters for
4621 IPA-SRA. Return result based on what representatives have been found. */
4623 static enum ipa_splicing_result
4624 splice_all_param_accesses (vec<access_p> &representatives)
4626 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4627 tree parm;
4628 struct access *repr;
4630 representatives.create (func_param_count);
4632 for (parm = DECL_ARGUMENTS (current_function_decl);
4633 parm;
4634 parm = DECL_CHAIN (parm))
4636 if (is_unused_scalar_param (parm))
4638 representatives.quick_push (&no_accesses_representant);
4639 if (result == NO_GOOD_ACCESS)
4640 result = UNUSED_PARAMS;
4642 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4643 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4644 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4646 repr = unmodified_by_ref_scalar_representative (parm);
4647 representatives.quick_push (repr);
4648 if (repr)
4649 result = UNMODIF_BY_REF_ACCESSES;
4651 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4653 bool ro_grp = false;
4654 repr = splice_param_accesses (parm, &ro_grp);
4655 representatives.quick_push (repr);
4657 if (repr && !no_accesses_p (repr))
4659 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4661 if (ro_grp)
4662 result = UNMODIF_BY_REF_ACCESSES;
4663 else if (result < MODIF_BY_REF_ACCESSES)
4664 result = MODIF_BY_REF_ACCESSES;
4666 else if (result < BY_VAL_ACCESSES)
4667 result = BY_VAL_ACCESSES;
4669 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4670 result = UNUSED_PARAMS;
4672 else
4673 representatives.quick_push (NULL);
4676 if (result == NO_GOOD_ACCESS)
4678 representatives.release ();
4679 return NO_GOOD_ACCESS;
4682 return result;
4685 /* Return the index of BASE in PARMS. Abort if it is not found. */
4687 static inline int
4688 get_param_index (tree base, vec<tree> parms)
4690 int i, len;
4692 len = parms.length ();
4693 for (i = 0; i < len; i++)
4694 if (parms[i] == base)
4695 return i;
4696 gcc_unreachable ();
4699 /* Convert the decisions made at the representative level into compact
4700 parameter adjustments. REPRESENTATIVES are pointers to first
4701 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4702 final number of adjustments. */
4704 static ipa_parm_adjustment_vec
4705 turn_representatives_into_adjustments (vec<access_p> representatives,
4706 int adjustments_count)
4708 vec<tree> parms;
4709 ipa_parm_adjustment_vec adjustments;
4710 tree parm;
4711 int i;
4713 gcc_assert (adjustments_count > 0);
4714 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4715 adjustments.create (adjustments_count);
4716 parm = DECL_ARGUMENTS (current_function_decl);
4717 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4719 struct access *repr = representatives[i];
4721 if (!repr || no_accesses_p (repr))
4723 struct ipa_parm_adjustment adj;
4725 memset (&adj, 0, sizeof (adj));
4726 adj.base_index = get_param_index (parm, parms);
4727 adj.base = parm;
4728 if (!repr)
4729 adj.op = IPA_PARM_OP_COPY;
4730 else
4731 adj.op = IPA_PARM_OP_REMOVE;
4732 adj.arg_prefix = "ISRA";
4733 adjustments.quick_push (adj);
4735 else
4737 struct ipa_parm_adjustment adj;
4738 int index = get_param_index (parm, parms);
4740 for (; repr; repr = repr->next_grp)
4742 memset (&adj, 0, sizeof (adj));
4743 gcc_assert (repr->base == parm);
4744 adj.base_index = index;
4745 adj.base = repr->base;
4746 adj.type = repr->type;
4747 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4748 adj.offset = repr->offset;
4749 adj.reverse = repr->reverse;
4750 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4751 && (repr->grp_maybe_modified
4752 || repr->grp_not_necessarilly_dereferenced));
4753 adj.arg_prefix = "ISRA";
4754 adjustments.quick_push (adj);
4758 parms.release ();
4759 return adjustments;
4762 /* Analyze the collected accesses and produce a plan what to do with the
4763 parameters in the form of adjustments, NULL meaning nothing. */
4765 static ipa_parm_adjustment_vec
4766 analyze_all_param_acesses (void)
4768 enum ipa_splicing_result repr_state;
4769 bool proceed = false;
4770 int i, adjustments_count = 0;
4771 vec<access_p> representatives;
4772 ipa_parm_adjustment_vec adjustments;
4774 repr_state = splice_all_param_accesses (representatives);
4775 if (repr_state == NO_GOOD_ACCESS)
4776 return ipa_parm_adjustment_vec ();
4778 /* If there are any parameters passed by reference which are not modified
4779 directly, we need to check whether they can be modified indirectly. */
4780 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4782 analyze_caller_dereference_legality (representatives);
4783 analyze_modified_params (representatives);
4786 for (i = 0; i < func_param_count; i++)
4788 struct access *repr = representatives[i];
4790 if (repr && !no_accesses_p (repr))
4792 if (repr->grp_scalar_ptr)
4794 adjustments_count++;
4795 if (repr->grp_not_necessarilly_dereferenced
4796 || repr->grp_maybe_modified)
4797 representatives[i] = NULL;
4798 else
4800 proceed = true;
4801 sra_stats.scalar_by_ref_to_by_val++;
4804 else
4806 int new_components = decide_one_param_reduction (repr);
4808 if (new_components == 0)
4810 representatives[i] = NULL;
4811 adjustments_count++;
4813 else
4815 adjustments_count += new_components;
4816 sra_stats.aggregate_params_reduced++;
4817 sra_stats.param_reductions_created += new_components;
4818 proceed = true;
4822 else
4824 if (no_accesses_p (repr))
4826 proceed = true;
4827 sra_stats.deleted_unused_parameters++;
4829 adjustments_count++;
4833 if (!proceed && dump_file)
4834 fprintf (dump_file, "NOT proceeding to change params.\n");
4836 if (proceed)
4837 adjustments = turn_representatives_into_adjustments (representatives,
4838 adjustments_count);
4839 else
4840 adjustments = ipa_parm_adjustment_vec ();
4842 representatives.release ();
4843 return adjustments;
4846 /* If a parameter replacement identified by ADJ does not yet exist in the form
4847 of declaration, create it and record it, otherwise return the previously
4848 created one. */
4850 static tree
4851 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4853 tree repl;
4854 if (!adj->new_ssa_base)
4856 char *pretty_name = make_fancy_name (adj->base);
4858 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4859 DECL_NAME (repl) = get_identifier (pretty_name);
4860 DECL_NAMELESS (repl) = 1;
4861 obstack_free (&name_obstack, pretty_name);
4863 adj->new_ssa_base = repl;
4865 else
4866 repl = adj->new_ssa_base;
4867 return repl;
4870 /* Find the first adjustment for a particular parameter BASE in a vector of
4871 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4872 adjustment. */
4874 static struct ipa_parm_adjustment *
4875 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4877 int i, len;
4879 len = adjustments.length ();
4880 for (i = 0; i < len; i++)
4882 struct ipa_parm_adjustment *adj;
4884 adj = &adjustments[i];
4885 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4886 return adj;
4889 return NULL;
4892 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4893 parameter which is to be removed because its value is not used, create a new
4894 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4895 original with it and return it. If there is no need to re-map, return NULL.
4896 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4898 static tree
4899 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4900 ipa_parm_adjustment_vec adjustments)
4902 struct ipa_parm_adjustment *adj;
4903 tree decl, repl, new_name;
4905 if (TREE_CODE (old_name) != SSA_NAME)
4906 return NULL;
4908 decl = SSA_NAME_VAR (old_name);
4909 if (decl == NULL_TREE
4910 || TREE_CODE (decl) != PARM_DECL)
4911 return NULL;
4913 adj = get_adjustment_for_base (adjustments, decl);
4914 if (!adj)
4915 return NULL;
4917 repl = get_replaced_param_substitute (adj);
4918 new_name = make_ssa_name (repl, stmt);
4919 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4920 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
4922 if (dump_file)
4924 fprintf (dump_file, "replacing an SSA name of a removed param ");
4925 print_generic_expr (dump_file, old_name);
4926 fprintf (dump_file, " with ");
4927 print_generic_expr (dump_file, new_name);
4928 fprintf (dump_file, "\n");
4931 replace_uses_by (old_name, new_name);
4932 return new_name;
4935 /* If the statement STMT contains any expressions that need to replaced with a
4936 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4937 incompatibilities (GSI is used to accommodate conversion statements and must
4938 point to the statement). Return true iff the statement was modified. */
4940 static bool
4941 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4942 ipa_parm_adjustment_vec adjustments)
4944 tree *lhs_p, *rhs_p;
4945 bool any;
4947 if (!gimple_assign_single_p (stmt))
4948 return false;
4950 rhs_p = gimple_assign_rhs1_ptr (stmt);
4951 lhs_p = gimple_assign_lhs_ptr (stmt);
4953 any = ipa_modify_expr (rhs_p, false, adjustments);
4954 any |= ipa_modify_expr (lhs_p, false, adjustments);
4955 if (any)
4957 tree new_rhs = NULL_TREE;
4959 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4961 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4963 /* V_C_Es of constructors can cause trouble (PR 42714). */
4964 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4965 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4966 else
4967 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4968 NULL);
4970 else
4971 new_rhs = fold_build1_loc (gimple_location (stmt),
4972 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4973 *rhs_p);
4975 else if (REFERENCE_CLASS_P (*rhs_p)
4976 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4977 && !is_gimple_reg (*lhs_p))
4978 /* This can happen when an assignment in between two single field
4979 structures is turned into an assignment in between two pointers to
4980 scalars (PR 42237). */
4981 new_rhs = *rhs_p;
4983 if (new_rhs)
4985 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4986 true, GSI_SAME_STMT);
4988 gimple_assign_set_rhs_from_tree (gsi, tmp);
4991 return true;
4994 return false;
4997 /* Traverse the function body and all modifications as described in
4998 ADJUSTMENTS. Return true iff the CFG has been changed. */
5000 bool
5001 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
5003 bool cfg_changed = false;
5004 basic_block bb;
5006 FOR_EACH_BB_FN (bb, cfun)
5008 gimple_stmt_iterator gsi;
5010 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5012 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
5013 tree new_lhs, old_lhs = gimple_phi_result (phi);
5014 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
5015 if (new_lhs)
5017 gimple_phi_set_result (phi, new_lhs);
5018 release_ssa_name (old_lhs);
5022 gsi = gsi_start_bb (bb);
5023 while (!gsi_end_p (gsi))
5025 gimple *stmt = gsi_stmt (gsi);
5026 bool modified = false;
5027 tree *t;
5028 unsigned i;
5030 switch (gimple_code (stmt))
5032 case GIMPLE_RETURN:
5033 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
5034 if (*t != NULL_TREE)
5035 modified |= ipa_modify_expr (t, true, adjustments);
5036 break;
5038 case GIMPLE_ASSIGN:
5039 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
5040 break;
5042 case GIMPLE_CALL:
5043 /* Operands must be processed before the lhs. */
5044 for (i = 0; i < gimple_call_num_args (stmt); i++)
5046 t = gimple_call_arg_ptr (stmt, i);
5047 modified |= ipa_modify_expr (t, true, adjustments);
5050 if (gimple_call_lhs (stmt))
5052 t = gimple_call_lhs_ptr (stmt);
5053 modified |= ipa_modify_expr (t, false, adjustments);
5055 break;
5057 case GIMPLE_ASM:
5059 gasm *asm_stmt = as_a <gasm *> (stmt);
5060 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
5062 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
5063 modified |= ipa_modify_expr (t, true, adjustments);
5065 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
5067 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
5068 modified |= ipa_modify_expr (t, false, adjustments);
5071 break;
5073 default:
5074 break;
5077 def_operand_p defp;
5078 ssa_op_iter iter;
5079 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5081 tree old_def = DEF_FROM_PTR (defp);
5082 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
5083 adjustments))
5085 SET_DEF (defp, new_def);
5086 release_ssa_name (old_def);
5087 modified = true;
5091 if (modified)
5093 update_stmt (stmt);
5094 if (maybe_clean_eh_stmt (stmt)
5095 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5096 cfg_changed = true;
5098 gsi_next (&gsi);
5102 return cfg_changed;
5105 /* Call gimple_debug_bind_reset_value on all debug statements describing
5106 gimple register parameters that are being removed or replaced. */
5108 static void
5109 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
5111 int i, len;
5112 gimple_stmt_iterator *gsip = NULL, gsi;
5114 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
5116 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5117 gsip = &gsi;
5119 len = adjustments.length ();
5120 for (i = 0; i < len; i++)
5122 struct ipa_parm_adjustment *adj;
5123 imm_use_iterator ui;
5124 gimple *stmt;
5125 gdebug *def_temp;
5126 tree name, vexpr, copy = NULL_TREE;
5127 use_operand_p use_p;
5129 adj = &adjustments[i];
5130 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
5131 continue;
5132 name = ssa_default_def (cfun, adj->base);
5133 vexpr = NULL;
5134 if (name)
5135 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
5137 if (gimple_clobber_p (stmt))
5139 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
5140 unlink_stmt_vdef (stmt);
5141 gsi_remove (&cgsi, true);
5142 release_defs (stmt);
5143 continue;
5145 /* All other users must have been removed by
5146 ipa_sra_modify_function_body. */
5147 gcc_assert (is_gimple_debug (stmt));
5148 if (vexpr == NULL && gsip != NULL)
5150 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5151 vexpr = make_node (DEBUG_EXPR_DECL);
5152 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
5153 NULL);
5154 DECL_ARTIFICIAL (vexpr) = 1;
5155 TREE_TYPE (vexpr) = TREE_TYPE (name);
5156 SET_DECL_MODE (vexpr, DECL_MODE (adj->base));
5157 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5159 if (vexpr)
5161 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5162 SET_USE (use_p, vexpr);
5164 else
5165 gimple_debug_bind_reset_value (stmt);
5166 update_stmt (stmt);
5168 /* Create a VAR_DECL for debug info purposes. */
5169 if (!DECL_IGNORED_P (adj->base))
5171 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5172 VAR_DECL, DECL_NAME (adj->base),
5173 TREE_TYPE (adj->base));
5174 if (DECL_PT_UID_SET_P (adj->base))
5175 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5176 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5177 TREE_READONLY (copy) = TREE_READONLY (adj->base);
5178 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5179 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5180 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5181 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5182 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5183 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5184 SET_DECL_RTL (copy, 0);
5185 TREE_USED (copy) = 1;
5186 DECL_CONTEXT (copy) = current_function_decl;
5187 add_local_decl (cfun, copy);
5188 DECL_CHAIN (copy) =
5189 BLOCK_VARS (DECL_INITIAL (current_function_decl));
5190 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5192 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5194 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5195 if (vexpr)
5196 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5197 else
5198 def_temp = gimple_build_debug_source_bind (copy, adj->base,
5199 NULL);
5200 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5205 /* Return false if all callers have at least as many actual arguments as there
5206 are formal parameters in the current function and that their types
5207 match. */
5209 static bool
5210 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5211 void *data ATTRIBUTE_UNUSED)
5213 struct cgraph_edge *cs;
5214 for (cs = node->callers; cs; cs = cs->next_caller)
5215 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
5216 return true;
5218 return false;
5221 /* Return false if all callers have vuse attached to a call statement. */
5223 static bool
5224 some_callers_have_no_vuse_p (struct cgraph_node *node,
5225 void *data ATTRIBUTE_UNUSED)
5227 struct cgraph_edge *cs;
5228 for (cs = node->callers; cs; cs = cs->next_caller)
5229 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5230 return true;
5232 return false;
5235 /* Convert all callers of NODE. */
5237 static bool
5238 convert_callers_for_node (struct cgraph_node *node,
5239 void *data)
5241 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
5242 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
5243 struct cgraph_edge *cs;
5245 for (cs = node->callers; cs; cs = cs->next_caller)
5247 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
5249 if (dump_file)
5250 fprintf (dump_file, "Adjusting call %s -> %s\n",
5251 cs->caller->dump_name (), cs->callee->dump_name ());
5253 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5255 pop_cfun ();
5258 for (cs = node->callers; cs; cs = cs->next_caller)
5259 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
5260 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5261 compute_fn_summary (cs->caller, true);
5262 BITMAP_FREE (recomputed_callers);
5264 return true;
5267 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5269 static void
5270 convert_callers (struct cgraph_node *node, tree old_decl,
5271 ipa_parm_adjustment_vec adjustments)
5273 basic_block this_block;
5275 node->call_for_symbol_and_aliases (convert_callers_for_node,
5276 &adjustments, false);
5278 if (!encountered_recursive_call)
5279 return;
5281 FOR_EACH_BB_FN (this_block, cfun)
5283 gimple_stmt_iterator gsi;
5285 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5287 gcall *stmt;
5288 tree call_fndecl;
5289 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5290 if (!stmt)
5291 continue;
5292 call_fndecl = gimple_call_fndecl (stmt);
5293 if (call_fndecl == old_decl)
5295 if (dump_file)
5296 fprintf (dump_file, "Adjusting recursive call");
5297 gimple_call_set_fndecl (stmt, node->decl);
5298 ipa_modify_call_arguments (NULL, stmt, adjustments);
5303 return;
5306 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5307 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5309 static bool
5310 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5312 struct cgraph_node *new_node;
5313 bool cfg_changed;
5315 cgraph_edge::rebuild_edges ();
5316 free_dominance_info (CDI_DOMINATORS);
5317 pop_cfun ();
5319 /* This must be done after rebuilding cgraph edges for node above.
5320 Otherwise any recursive calls to node that are recorded in
5321 redirect_callers will be corrupted. */
5322 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5323 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5324 NULL, false, NULL, NULL,
5325 "isra");
5326 redirect_callers.release ();
5328 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5329 ipa_modify_formal_parameters (current_function_decl, adjustments);
5330 cfg_changed = ipa_sra_modify_function_body (adjustments);
5331 sra_ipa_reset_debug_stmts (adjustments);
5332 convert_callers (new_node, node->decl, adjustments);
5333 new_node->make_local ();
5334 return cfg_changed;
5337 /* Means of communication between ipa_sra_check_caller and
5338 ipa_sra_preliminary_function_checks. */
5340 struct ipa_sra_check_caller_data
5342 bool has_callers;
5343 bool bad_arg_alignment;
5344 bool has_thunk;
5347 /* If NODE has a caller, mark that fact in DATA which is pointer to
5348 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5349 calls if they are unit aligned and if not, set the appropriate flag in DATA
5350 too. */
5352 static bool
5353 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5355 if (!node->callers)
5356 return false;
5358 struct ipa_sra_check_caller_data *iscc;
5359 iscc = (struct ipa_sra_check_caller_data *) data;
5360 iscc->has_callers = true;
5362 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5364 if (cs->caller->thunk.thunk_p)
5366 iscc->has_thunk = true;
5367 return true;
5369 gimple *call_stmt = cs->call_stmt;
5370 unsigned count = gimple_call_num_args (call_stmt);
5371 for (unsigned i = 0; i < count; i++)
5373 tree arg = gimple_call_arg (call_stmt, i);
5374 if (is_gimple_reg (arg))
5375 continue;
5377 tree offset;
5378 HOST_WIDE_INT bitsize, bitpos;
5379 machine_mode mode;
5380 int unsignedp, reversep, volatilep = 0;
5381 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5382 &unsignedp, &reversep, &volatilep);
5383 if (bitpos % BITS_PER_UNIT)
5385 iscc->bad_arg_alignment = true;
5386 return true;
5391 return false;
5394 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5395 attributes, return true otherwise. NODE is the cgraph node of the current
5396 function. */
5398 static bool
5399 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5401 if (!node->can_be_local_p ())
5403 if (dump_file)
5404 fprintf (dump_file, "Function not local to this compilation unit.\n");
5405 return false;
5408 if (!node->local.can_change_signature)
5410 if (dump_file)
5411 fprintf (dump_file, "Function can not change signature.\n");
5412 return false;
5415 if (!tree_versionable_function_p (node->decl))
5417 if (dump_file)
5418 fprintf (dump_file, "Function is not versionable.\n");
5419 return false;
5422 if (!opt_for_fn (node->decl, optimize)
5423 || !opt_for_fn (node->decl, flag_ipa_sra))
5425 if (dump_file)
5426 fprintf (dump_file, "Function not optimized.\n");
5427 return false;
5430 if (DECL_VIRTUAL_P (current_function_decl))
5432 if (dump_file)
5433 fprintf (dump_file, "Function is a virtual method.\n");
5434 return false;
5437 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5438 && ipa_fn_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5440 if (dump_file)
5441 fprintf (dump_file, "Function too big to be made truly local.\n");
5442 return false;
5445 if (cfun->stdarg)
5447 if (dump_file)
5448 fprintf (dump_file, "Function uses stdarg. \n");
5449 return false;
5452 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5453 return false;
5455 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5457 if (dump_file)
5458 fprintf (dump_file, "Always inline function will be inlined "
5459 "anyway. \n");
5460 return false;
5463 struct ipa_sra_check_caller_data iscc;
5464 memset (&iscc, 0, sizeof(iscc));
5465 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5466 if (!iscc.has_callers)
5468 if (dump_file)
5469 fprintf (dump_file,
5470 "Function has no callers in this compilation unit.\n");
5471 return false;
5474 if (iscc.bad_arg_alignment)
5476 if (dump_file)
5477 fprintf (dump_file,
5478 "A function call has an argument with non-unit alignment.\n");
5479 return false;
5482 if (iscc.has_thunk)
5484 if (dump_file)
5485 fprintf (dump_file,
5486 "A has thunk.\n");
5487 return false;
5490 return true;
5493 /* Perform early interprocedural SRA. */
5495 static unsigned int
5496 ipa_early_sra (void)
5498 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5499 ipa_parm_adjustment_vec adjustments;
5500 int ret = 0;
5502 if (!ipa_sra_preliminary_function_checks (node))
5503 return 0;
5505 sra_initialize ();
5506 sra_mode = SRA_MODE_EARLY_IPA;
5508 if (!find_param_candidates ())
5510 if (dump_file)
5511 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5512 goto simple_out;
5515 if (node->call_for_symbol_and_aliases
5516 (some_callers_have_mismatched_arguments_p, NULL, true))
5518 if (dump_file)
5519 fprintf (dump_file, "There are callers with insufficient number of "
5520 "arguments or arguments with type mismatches.\n");
5521 goto simple_out;
5524 if (node->call_for_symbol_and_aliases
5525 (some_callers_have_no_vuse_p, NULL, true))
5527 if (dump_file)
5528 fprintf (dump_file, "There are callers with no VUSE attached "
5529 "to a call stmt.\n");
5530 goto simple_out;
5533 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5534 func_param_count
5535 * last_basic_block_for_fn (cfun));
5536 final_bbs = BITMAP_ALLOC (NULL);
5538 scan_function ();
5539 if (encountered_apply_args)
5541 if (dump_file)
5542 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5543 goto out;
5546 if (encountered_unchangable_recursive_call)
5548 if (dump_file)
5549 fprintf (dump_file, "Function calls itself with insufficient "
5550 "number of arguments.\n");
5551 goto out;
5554 adjustments = analyze_all_param_acesses ();
5555 if (!adjustments.exists ())
5556 goto out;
5557 if (dump_file)
5558 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5560 if (modify_function (node, adjustments))
5561 ret = TODO_update_ssa | TODO_cleanup_cfg;
5562 else
5563 ret = TODO_update_ssa;
5564 adjustments.release ();
5566 statistics_counter_event (cfun, "Unused parameters deleted",
5567 sra_stats.deleted_unused_parameters);
5568 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5569 sra_stats.scalar_by_ref_to_by_val);
5570 statistics_counter_event (cfun, "Aggregate parameters broken up",
5571 sra_stats.aggregate_params_reduced);
5572 statistics_counter_event (cfun, "Aggregate parameter components created",
5573 sra_stats.param_reductions_created);
5575 out:
5576 BITMAP_FREE (final_bbs);
5577 free (bb_dereferences);
5578 simple_out:
5579 sra_deinitialize ();
5580 return ret;
5583 namespace {
5585 const pass_data pass_data_early_ipa_sra =
5587 GIMPLE_PASS, /* type */
5588 "eipa_sra", /* name */
5589 OPTGROUP_NONE, /* optinfo_flags */
5590 TV_IPA_SRA, /* tv_id */
5591 0, /* properties_required */
5592 0, /* properties_provided */
5593 0, /* properties_destroyed */
5594 0, /* todo_flags_start */
5595 TODO_dump_symtab, /* todo_flags_finish */
5598 class pass_early_ipa_sra : public gimple_opt_pass
5600 public:
5601 pass_early_ipa_sra (gcc::context *ctxt)
5602 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5605 /* opt_pass methods: */
5606 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5607 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5609 }; // class pass_early_ipa_sra
5611 } // anon namespace
5613 gimple_opt_pass *
5614 make_pass_early_ipa_sra (gcc::context *ctxt)
5616 return new pass_early_ipa_sra (ctxt);