[AArch64] PR target/68129: Define TARGET_SUPPORTS_WIDE_INT
[official-gcc.git] / gcc / tree-sra.c
blob30aee19aae746377ebd38eac877d9dd4d845463e
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-prop.h"
101 #include "params.h"
102 #include "dbgcnt.h"
103 #include "tree-inline.h"
104 #include "ipa-inline.h"
105 #include "ipa-utils.h"
106 #include "builtins.h"
108 /* Enumeration of all aggregate reductions we can do. */
109 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
110 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
111 SRA_MODE_INTRA }; /* late intraprocedural SRA */
113 /* Global variable describing which aggregate reduction we are performing at
114 the moment. */
115 static enum sra_mode sra_mode;
117 struct assign_link;
119 /* ACCESS represents each access to an aggregate variable (as a whole or a
120 part). It can also represent a group of accesses that refer to exactly the
121 same fragment of an aggregate (i.e. those that have exactly the same offset
122 and size). Such representatives for a single aggregate, once determined,
123 are linked in a linked list and have the group fields set.
125 Moreover, when doing intraprocedural SRA, a tree is built from those
126 representatives (by the means of first_child and next_sibling pointers), in
127 which all items in a subtree are "within" the root, i.e. their offset is
128 greater or equal to offset of the root and offset+size is smaller or equal
129 to offset+size of the root. Children of an access are sorted by offset.
131 Note that accesses to parts of vector and complex number types always
132 represented by an access to the whole complex number or a vector. It is a
133 duty of the modifying functions to replace them appropriately. */
135 struct access
137 /* Values returned by `get_ref_base_and_extent' for each component reference
138 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
139 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
140 HOST_WIDE_INT offset;
141 HOST_WIDE_INT size;
142 tree base;
144 /* Expression. It is context dependent so do not use it to create new
145 expressions to access the original aggregate. See PR 42154 for a
146 testcase. */
147 tree expr;
148 /* Type. */
149 tree type;
151 /* The statement this access belongs to. */
152 gimple *stmt;
154 /* Next group representative for this aggregate. */
155 struct access *next_grp;
157 /* Pointer to the group representative. Pointer to itself if the struct is
158 the representative. */
159 struct access *group_representative;
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access *first_child;
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. In IPA-SRA this is a pointer to the next access
167 belonging to the same group (having the same representative). */
168 struct access *next_sibling;
170 /* Pointers to the first and last element in the linked list of assign
171 links. */
172 struct assign_link *first_link, *last_link;
174 /* Pointer to the next access in the work queue. */
175 struct access *next_queued;
177 /* Replacement variable for this access "region." Never to be accessed
178 directly, always only by the means of get_access_replacement() and only
179 when grp_to_be_replaced flag is set. */
180 tree replacement_decl;
182 /* Is this access an access to a non-addressable field? */
183 unsigned non_addressable : 1;
185 /* Is this access made in reverse storage order? */
186 unsigned reverse : 1;
188 /* Is this particular access write access? */
189 unsigned write : 1;
191 /* Is this access currently in the work queue? */
192 unsigned grp_queued : 1;
194 /* Does this group contain a write access? This flag is propagated down the
195 access tree. */
196 unsigned grp_write : 1;
198 /* Does this group contain a read access? This flag is propagated down the
199 access tree. */
200 unsigned grp_read : 1;
202 /* Does this group contain a read access that comes from an assignment
203 statement? This flag is propagated down the access tree. */
204 unsigned grp_assignment_read : 1;
206 /* Does this group contain a write access that comes from an assignment
207 statement? This flag is propagated down the access tree. */
208 unsigned grp_assignment_write : 1;
210 /* Does this group contain a read access through a scalar type? This flag is
211 not propagated in the access tree in any direction. */
212 unsigned grp_scalar_read : 1;
214 /* Does this group contain a write access through a scalar type? This flag
215 is not propagated in the access tree in any direction. */
216 unsigned grp_scalar_write : 1;
218 /* Is this access an artificial one created to scalarize some record
219 entirely? */
220 unsigned grp_total_scalarization : 1;
222 /* Other passes of the analysis use this bit to make function
223 analyze_access_subtree create scalar replacements for this group if
224 possible. */
225 unsigned grp_hint : 1;
227 /* Is the subtree rooted in this access fully covered by scalar
228 replacements? */
229 unsigned grp_covered : 1;
231 /* If set to true, this access and all below it in an access tree must not be
232 scalarized. */
233 unsigned grp_unscalarizable_region : 1;
235 /* Whether data have been written to parts of the aggregate covered by this
236 access which is not to be scalarized. This flag is propagated up in the
237 access tree. */
238 unsigned grp_unscalarized_data : 1;
240 /* Does this access and/or group contain a write access through a
241 BIT_FIELD_REF? */
242 unsigned grp_partial_lhs : 1;
244 /* Set when a scalar replacement should be created for this variable. */
245 unsigned grp_to_be_replaced : 1;
247 /* Set when we want a replacement for the sole purpose of having it in
248 generated debug statements. */
249 unsigned grp_to_be_debug_replaced : 1;
251 /* Should TREE_NO_WARNING of a replacement be set? */
252 unsigned grp_no_warning : 1;
254 /* Is it possible that the group refers to data which might be (directly or
255 otherwise) modified? */
256 unsigned grp_maybe_modified : 1;
258 /* Set when this is a representative of a pointer to scalar (i.e. by
259 reference) parameter which we consider for turning into a plain scalar
260 (i.e. a by value parameter). */
261 unsigned grp_scalar_ptr : 1;
263 /* Set when we discover that this pointer is not safe to dereference in the
264 caller. */
265 unsigned grp_not_necessarilly_dereferenced : 1;
268 typedef struct access *access_p;
271 /* Alloc pool for allocating access structures. */
272 static object_allocator<struct access> access_pool ("SRA accesses");
274 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
275 are used to propagate subaccesses from rhs to lhs as long as they don't
276 conflict with what is already there. */
277 struct assign_link
279 struct access *lacc, *racc;
280 struct assign_link *next;
283 /* Alloc pool for allocating assign link structures. */
284 static object_allocator<assign_link> assign_link_pool ("SRA links");
286 /* Base (tree) -> Vector (vec<access_p> *) map. */
287 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
289 /* Candidate hash table helpers. */
291 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
293 static inline hashval_t hash (const tree_node *);
294 static inline bool equal (const tree_node *, const tree_node *);
297 /* Hash a tree in a uid_decl_map. */
299 inline hashval_t
300 uid_decl_hasher::hash (const tree_node *item)
302 return item->decl_minimal.uid;
305 /* Return true if the DECL_UID in both trees are equal. */
307 inline bool
308 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
310 return (a->decl_minimal.uid == b->decl_minimal.uid);
313 /* Set of candidates. */
314 static bitmap candidate_bitmap;
315 static hash_table<uid_decl_hasher> *candidates;
317 /* For a candidate UID return the candidates decl. */
319 static inline tree
320 candidate (unsigned uid)
322 tree_node t;
323 t.decl_minimal.uid = uid;
324 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
327 /* Bitmap of candidates which we should try to entirely scalarize away and
328 those which cannot be (because they are and need be used as a whole). */
329 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
331 /* Obstack for creation of fancy names. */
332 static struct obstack name_obstack;
334 /* Head of a linked list of accesses that need to have its subaccesses
335 propagated to their assignment counterparts. */
336 static struct access *work_queue_head;
338 /* Number of parameters of the analyzed function when doing early ipa SRA. */
339 static int func_param_count;
341 /* scan_function sets the following to true if it encounters a call to
342 __builtin_apply_args. */
343 static bool encountered_apply_args;
345 /* Set by scan_function when it finds a recursive call. */
346 static bool encountered_recursive_call;
348 /* Set by scan_function when it finds a recursive call with less actual
349 arguments than formal parameters.. */
350 static bool encountered_unchangable_recursive_call;
352 /* This is a table in which for each basic block and parameter there is a
353 distance (offset + size) in that parameter which is dereferenced and
354 accessed in that BB. */
355 static HOST_WIDE_INT *bb_dereferences;
356 /* Bitmap of BBs that can cause the function to "stop" progressing by
357 returning, throwing externally, looping infinitely or calling a function
358 which might abort etc.. */
359 static bitmap final_bbs;
361 /* Representative of no accesses at all. */
362 static struct access no_accesses_representant;
364 /* Predicate to test the special value. */
366 static inline bool
367 no_accesses_p (struct access *access)
369 return access == &no_accesses_representant;
372 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
373 representative fields are dumped, otherwise those which only describe the
374 individual access are. */
376 static struct
378 /* Number of processed aggregates is readily available in
379 analyze_all_variable_accesses and so is not stored here. */
381 /* Number of created scalar replacements. */
382 int replacements;
384 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
385 expression. */
386 int exprs;
388 /* Number of statements created by generate_subtree_copies. */
389 int subtree_copies;
391 /* Number of statements created by load_assign_lhs_subreplacements. */
392 int subreplacements;
394 /* Number of times sra_modify_assign has deleted a statement. */
395 int deleted;
397 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
398 RHS reparately due to type conversions or nonexistent matching
399 references. */
400 int separate_lhs_rhs_handling;
402 /* Number of parameters that were removed because they were unused. */
403 int deleted_unused_parameters;
405 /* Number of scalars passed as parameters by reference that have been
406 converted to be passed by value. */
407 int scalar_by_ref_to_by_val;
409 /* Number of aggregate parameters that were replaced by one or more of their
410 components. */
411 int aggregate_params_reduced;
413 /* Numbber of components created when splitting aggregate parameters. */
414 int param_reductions_created;
415 } sra_stats;
417 static void
418 dump_access (FILE *f, struct access *access, bool grp)
420 fprintf (f, "access { ");
421 fprintf (f, "base = (%d)'", DECL_UID (access->base));
422 print_generic_expr (f, access->base, 0);
423 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
424 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
425 fprintf (f, ", expr = ");
426 print_generic_expr (f, access->expr, 0);
427 fprintf (f, ", type = ");
428 print_generic_expr (f, access->type, 0);
429 fprintf (f, ", non_addressable = %d, reverse = %d",
430 access->non_addressable, access->reverse);
431 if (grp)
432 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
433 "grp_assignment_write = %d, grp_scalar_read = %d, "
434 "grp_scalar_write = %d, grp_total_scalarization = %d, "
435 "grp_hint = %d, grp_covered = %d, "
436 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
437 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
438 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
439 "grp_not_necessarilly_dereferenced = %d\n",
440 access->grp_read, access->grp_write, access->grp_assignment_read,
441 access->grp_assignment_write, access->grp_scalar_read,
442 access->grp_scalar_write, access->grp_total_scalarization,
443 access->grp_hint, access->grp_covered,
444 access->grp_unscalarizable_region, access->grp_unscalarized_data,
445 access->grp_partial_lhs, access->grp_to_be_replaced,
446 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
447 access->grp_not_necessarilly_dereferenced);
448 else
449 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
450 "grp_partial_lhs = %d\n",
451 access->write, access->grp_total_scalarization,
452 access->grp_partial_lhs);
455 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
457 static void
458 dump_access_tree_1 (FILE *f, struct access *access, int level)
462 int i;
464 for (i = 0; i < level; i++)
465 fputs ("* ", dump_file);
467 dump_access (f, access, true);
469 if (access->first_child)
470 dump_access_tree_1 (f, access->first_child, level + 1);
472 access = access->next_sibling;
474 while (access);
477 /* Dump all access trees for a variable, given the pointer to the first root in
478 ACCESS. */
480 static void
481 dump_access_tree (FILE *f, struct access *access)
483 for (; access; access = access->next_grp)
484 dump_access_tree_1 (f, access, 0);
487 /* Return true iff ACC is non-NULL and has subaccesses. */
489 static inline bool
490 access_has_children_p (struct access *acc)
492 return acc && acc->first_child;
495 /* Return true iff ACC is (partly) covered by at least one replacement. */
497 static bool
498 access_has_replacements_p (struct access *acc)
500 struct access *child;
501 if (acc->grp_to_be_replaced)
502 return true;
503 for (child = acc->first_child; child; child = child->next_sibling)
504 if (access_has_replacements_p (child))
505 return true;
506 return false;
509 /* Return a vector of pointers to accesses for the variable given in BASE or
510 NULL if there is none. */
512 static vec<access_p> *
513 get_base_access_vector (tree base)
515 return base_access_vec->get (base);
518 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
519 in ACCESS. Return NULL if it cannot be found. */
521 static struct access *
522 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
523 HOST_WIDE_INT size)
525 while (access && (access->offset != offset || access->size != size))
527 struct access *child = access->first_child;
529 while (child && (child->offset + child->size <= offset))
530 child = child->next_sibling;
531 access = child;
534 return access;
537 /* Return the first group representative for DECL or NULL if none exists. */
539 static struct access *
540 get_first_repr_for_decl (tree base)
542 vec<access_p> *access_vec;
544 access_vec = get_base_access_vector (base);
545 if (!access_vec)
546 return NULL;
548 return (*access_vec)[0];
551 /* Find an access representative for the variable BASE and given OFFSET and
552 SIZE. Requires that access trees have already been built. Return NULL if
553 it cannot be found. */
555 static struct access *
556 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
557 HOST_WIDE_INT size)
559 struct access *access;
561 access = get_first_repr_for_decl (base);
562 while (access && (access->offset + access->size <= offset))
563 access = access->next_grp;
564 if (!access)
565 return NULL;
567 return find_access_in_subtree (access, offset, size);
570 /* Add LINK to the linked list of assign links of RACC. */
571 static void
572 add_link_to_rhs (struct access *racc, struct assign_link *link)
574 gcc_assert (link->racc == racc);
576 if (!racc->first_link)
578 gcc_assert (!racc->last_link);
579 racc->first_link = link;
581 else
582 racc->last_link->next = link;
584 racc->last_link = link;
585 link->next = NULL;
588 /* Move all link structures in their linked list in OLD_RACC to the linked list
589 in NEW_RACC. */
590 static void
591 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
593 if (!old_racc->first_link)
595 gcc_assert (!old_racc->last_link);
596 return;
599 if (new_racc->first_link)
601 gcc_assert (!new_racc->last_link->next);
602 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
604 new_racc->last_link->next = old_racc->first_link;
605 new_racc->last_link = old_racc->last_link;
607 else
609 gcc_assert (!new_racc->last_link);
611 new_racc->first_link = old_racc->first_link;
612 new_racc->last_link = old_racc->last_link;
614 old_racc->first_link = old_racc->last_link = NULL;
617 /* Add ACCESS to the work queue (which is actually a stack). */
619 static void
620 add_access_to_work_queue (struct access *access)
622 if (!access->grp_queued)
624 gcc_assert (!access->next_queued);
625 access->next_queued = work_queue_head;
626 access->grp_queued = 1;
627 work_queue_head = access;
631 /* Pop an access from the work queue, and return it, assuming there is one. */
633 static struct access *
634 pop_access_from_work_queue (void)
636 struct access *access = work_queue_head;
638 work_queue_head = access->next_queued;
639 access->next_queued = NULL;
640 access->grp_queued = 0;
641 return access;
645 /* Allocate necessary structures. */
647 static void
648 sra_initialize (void)
650 candidate_bitmap = BITMAP_ALLOC (NULL);
651 candidates = new hash_table<uid_decl_hasher>
652 (vec_safe_length (cfun->local_decls) / 2);
653 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
654 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
655 gcc_obstack_init (&name_obstack);
656 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
657 memset (&sra_stats, 0, sizeof (sra_stats));
658 encountered_apply_args = false;
659 encountered_recursive_call = false;
660 encountered_unchangable_recursive_call = false;
663 /* Deallocate all general structures. */
665 static void
666 sra_deinitialize (void)
668 BITMAP_FREE (candidate_bitmap);
669 delete candidates;
670 candidates = NULL;
671 BITMAP_FREE (should_scalarize_away_bitmap);
672 BITMAP_FREE (cannot_scalarize_away_bitmap);
673 access_pool.release ();
674 assign_link_pool.release ();
675 obstack_free (&name_obstack, NULL);
677 /* TODO: hash_map does not support traits that can release
678 value type of the hash_map. */
679 for (hash_map<tree, auto_vec<access_p> >::iterator it =
680 base_access_vec->begin (); it != base_access_vec->end (); ++it)
681 (*it).second.release ();
683 delete base_access_vec;
686 /* Remove DECL from candidates for SRA and write REASON to the dump file if
687 there is one. */
688 static void
689 disqualify_candidate (tree decl, const char *reason)
691 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
692 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
694 if (dump_file && (dump_flags & TDF_DETAILS))
696 fprintf (dump_file, "! Disqualifying ");
697 print_generic_expr (dump_file, decl, 0);
698 fprintf (dump_file, " - %s\n", reason);
702 /* Return true iff the type contains a field or an element which does not allow
703 scalarization. */
705 static bool
706 type_internals_preclude_sra_p (tree type, const char **msg)
708 tree fld;
709 tree et;
711 switch (TREE_CODE (type))
713 case RECORD_TYPE:
714 case UNION_TYPE:
715 case QUAL_UNION_TYPE:
716 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
717 if (TREE_CODE (fld) == FIELD_DECL)
719 tree ft = TREE_TYPE (fld);
721 if (TREE_THIS_VOLATILE (fld))
723 *msg = "volatile structure field";
724 return true;
726 if (!DECL_FIELD_OFFSET (fld))
728 *msg = "no structure field offset";
729 return true;
731 if (!DECL_SIZE (fld))
733 *msg = "zero structure field size";
734 return true;
736 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
738 *msg = "structure field offset not fixed";
739 return true;
741 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
743 *msg = "structure field size not fixed";
744 return true;
746 if (!tree_fits_shwi_p (bit_position (fld)))
748 *msg = "structure field size too big";
749 return true;
751 if (AGGREGATE_TYPE_P (ft)
752 && int_bit_position (fld) % BITS_PER_UNIT != 0)
754 *msg = "structure field is bit field";
755 return true;
758 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
759 return true;
762 return false;
764 case ARRAY_TYPE:
765 et = TREE_TYPE (type);
767 if (TYPE_VOLATILE (et))
769 *msg = "element type is volatile";
770 return true;
773 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
774 return true;
776 return false;
778 default:
779 return false;
783 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
784 base variable if it is. Return T if it is not an SSA_NAME. */
786 static tree
787 get_ssa_base_param (tree t)
789 if (TREE_CODE (t) == SSA_NAME)
791 if (SSA_NAME_IS_DEFAULT_DEF (t))
792 return SSA_NAME_VAR (t);
793 else
794 return NULL_TREE;
796 return t;
799 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
800 belongs to, unless the BB has already been marked as a potentially
801 final. */
803 static void
804 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
806 basic_block bb = gimple_bb (stmt);
807 int idx, parm_index = 0;
808 tree parm;
810 if (bitmap_bit_p (final_bbs, bb->index))
811 return;
813 for (parm = DECL_ARGUMENTS (current_function_decl);
814 parm && parm != base;
815 parm = DECL_CHAIN (parm))
816 parm_index++;
818 gcc_assert (parm_index < func_param_count);
820 idx = bb->index * func_param_count + parm_index;
821 if (bb_dereferences[idx] < dist)
822 bb_dereferences[idx] = dist;
825 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
826 the three fields. Also add it to the vector of accesses corresponding to
827 the base. Finally, return the new access. */
829 static struct access *
830 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
832 struct access *access = access_pool.allocate ();
834 memset (access, 0, sizeof (struct access));
835 access->base = base;
836 access->offset = offset;
837 access->size = size;
839 base_access_vec->get_or_insert (base).safe_push (access);
841 return access;
844 /* Create and insert access for EXPR. Return created access, or NULL if it is
845 not possible. */
847 static struct access *
848 create_access (tree expr, gimple *stmt, bool write)
850 struct access *access;
851 HOST_WIDE_INT offset, size, max_size;
852 tree base = expr;
853 bool reverse, ptr, unscalarizable_region = false;
855 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
857 if (sra_mode == SRA_MODE_EARLY_IPA
858 && TREE_CODE (base) == MEM_REF)
860 base = get_ssa_base_param (TREE_OPERAND (base, 0));
861 if (!base)
862 return NULL;
863 ptr = true;
865 else
866 ptr = false;
868 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
869 return NULL;
871 if (sra_mode == SRA_MODE_EARLY_IPA)
873 if (size < 0 || size != max_size)
875 disqualify_candidate (base, "Encountered a variable sized access.");
876 return NULL;
878 if (TREE_CODE (expr) == COMPONENT_REF
879 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
881 disqualify_candidate (base, "Encountered a bit-field access.");
882 return NULL;
884 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
886 if (ptr)
887 mark_parm_dereference (base, offset + size, stmt);
889 else
891 if (size != max_size)
893 size = max_size;
894 unscalarizable_region = true;
896 if (size < 0)
898 disqualify_candidate (base, "Encountered an unconstrained access.");
899 return NULL;
903 access = create_access_1 (base, offset, size);
904 access->expr = expr;
905 access->type = TREE_TYPE (expr);
906 access->write = write;
907 access->grp_unscalarizable_region = unscalarizable_region;
908 access->stmt = stmt;
909 access->reverse = reverse;
911 if (TREE_CODE (expr) == COMPONENT_REF
912 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
913 access->non_addressable = 1;
915 return access;
919 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
920 ARRAY_TYPE with fields that are either of gimple register types (excluding
921 bit-fields) or (recursively) scalarizable types. */
923 static bool
924 scalarizable_type_p (tree type)
926 gcc_assert (!is_gimple_reg_type (type));
928 switch (TREE_CODE (type))
930 case RECORD_TYPE:
931 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
932 if (TREE_CODE (fld) == FIELD_DECL)
934 tree ft = TREE_TYPE (fld);
936 if (DECL_BIT_FIELD (fld))
937 return false;
939 if (!is_gimple_reg_type (ft)
940 && !scalarizable_type_p (ft))
941 return false;
944 return true;
946 case ARRAY_TYPE:
948 if (TYPE_DOMAIN (type) == NULL_TREE
949 || !tree_fits_shwi_p (TYPE_SIZE (type))
950 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
951 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= 0)
952 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
953 return false;
954 if (tree_to_shwi (TYPE_SIZE (type)) == 0
955 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
956 /* Zero-element array, should not prevent scalarization. */
958 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
959 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
960 /* Variable-length array, do not allow scalarization. */
961 return false;
963 tree elem = TREE_TYPE (type);
964 if (!is_gimple_reg_type (elem)
965 && !scalarizable_type_p (elem))
966 return false;
967 return true;
969 default:
970 return false;
974 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
976 /* Create total_scalarization accesses for all scalar fields of a member
977 of type DECL_TYPE conforming to scalarizable_type_p. BASE
978 must be the top-most VAR_DECL representing the variable; within that,
979 OFFSET locates the member and REF must be the memory reference expression for
980 the member. */
982 static void
983 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
985 switch (TREE_CODE (decl_type))
987 case RECORD_TYPE:
988 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
989 if (TREE_CODE (fld) == FIELD_DECL)
991 HOST_WIDE_INT pos = offset + int_bit_position (fld);
992 tree ft = TREE_TYPE (fld);
993 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
995 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
996 TYPE_REVERSE_STORAGE_ORDER (decl_type),
997 nref, ft);
999 break;
1000 case ARRAY_TYPE:
1002 tree elemtype = TREE_TYPE (decl_type);
1003 tree elem_size = TYPE_SIZE (elemtype);
1004 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1005 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1006 gcc_assert (el_size > 0);
1008 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1009 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1010 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1011 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1012 if (maxidx)
1014 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1015 tree domain = TYPE_DOMAIN (decl_type);
1016 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1017 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1018 offset_int idx = wi::to_offset (minidx);
1019 offset_int max = wi::to_offset (maxidx);
1020 if (!TYPE_UNSIGNED (domain))
1022 idx = wi::sext (idx, TYPE_PRECISION (domain));
1023 max = wi::sext (max, TYPE_PRECISION (domain));
1025 for (int el_off = offset; wi::les_p (idx, max); ++idx)
1027 tree nref = build4 (ARRAY_REF, elemtype,
1028 ref,
1029 wide_int_to_tree (domain, idx),
1030 NULL_TREE, NULL_TREE);
1031 scalarize_elem (base, el_off, el_size,
1032 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1033 nref, elemtype);
1034 el_off += el_size;
1038 break;
1039 default:
1040 gcc_unreachable ();
1044 /* Create total_scalarization accesses for a member of type TYPE, which must
1045 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1046 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1047 the member, REVERSE gives its torage order. and REF must be the reference
1048 expression for it. */
1050 static void
1051 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1052 tree ref, tree type)
1054 if (is_gimple_reg_type (type))
1056 struct access *access = create_access_1 (base, pos, size);
1057 access->expr = ref;
1058 access->type = type;
1059 access->grp_total_scalarization = 1;
1060 access->reverse = reverse;
1061 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1063 else
1064 completely_scalarize (base, type, pos, ref);
1067 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1068 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1070 static void
1071 create_total_scalarization_access (tree var)
1073 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1074 struct access *access;
1076 access = create_access_1 (var, 0, size);
1077 access->expr = var;
1078 access->type = TREE_TYPE (var);
1079 access->grp_total_scalarization = 1;
1082 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1084 static inline bool
1085 contains_view_convert_expr_p (const_tree ref)
1087 while (handled_component_p (ref))
1089 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1090 return true;
1091 ref = TREE_OPERAND (ref, 0);
1094 return false;
1097 /* Search the given tree for a declaration by skipping handled components and
1098 exclude it from the candidates. */
1100 static void
1101 disqualify_base_of_expr (tree t, const char *reason)
1103 t = get_base_address (t);
1104 if (sra_mode == SRA_MODE_EARLY_IPA
1105 && TREE_CODE (t) == MEM_REF)
1106 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1108 if (t && DECL_P (t))
1109 disqualify_candidate (t, reason);
1112 /* Scan expression EXPR and create access structures for all accesses to
1113 candidates for scalarization. Return the created access or NULL if none is
1114 created. */
1116 static struct access *
1117 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1119 struct access *ret = NULL;
1120 bool partial_ref;
1122 if (TREE_CODE (expr) == BIT_FIELD_REF
1123 || TREE_CODE (expr) == IMAGPART_EXPR
1124 || TREE_CODE (expr) == REALPART_EXPR)
1126 expr = TREE_OPERAND (expr, 0);
1127 partial_ref = true;
1129 else
1130 partial_ref = false;
1132 /* We need to dive through V_C_Es in order to get the size of its parameter
1133 and not the result type. Ada produces such statements. We are also
1134 capable of handling the topmost V_C_E but not any of those buried in other
1135 handled components. */
1136 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR && !storage_order_barrier_p (expr))
1137 expr = TREE_OPERAND (expr, 0);
1139 if (contains_view_convert_expr_p (expr))
1141 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1142 "component.");
1143 return NULL;
1145 if (TREE_THIS_VOLATILE (expr))
1147 disqualify_base_of_expr (expr, "part of a volatile reference.");
1148 return NULL;
1151 switch (TREE_CODE (expr))
1153 case MEM_REF:
1154 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1155 && sra_mode != SRA_MODE_EARLY_IPA)
1156 return NULL;
1157 /* fall through */
1158 case VAR_DECL:
1159 case PARM_DECL:
1160 case RESULT_DECL:
1161 case COMPONENT_REF:
1162 case ARRAY_REF:
1163 case ARRAY_RANGE_REF:
1164 ret = create_access (expr, stmt, write);
1165 break;
1167 default:
1168 break;
1171 if (write && partial_ref && ret)
1172 ret->grp_partial_lhs = 1;
1174 return ret;
1177 /* Scan expression EXPR and create access structures for all accesses to
1178 candidates for scalarization. Return true if any access has been inserted.
1179 STMT must be the statement from which the expression is taken, WRITE must be
1180 true if the expression is a store and false otherwise. */
1182 static bool
1183 build_access_from_expr (tree expr, gimple *stmt, bool write)
1185 struct access *access;
1187 access = build_access_from_expr_1 (expr, stmt, write);
1188 if (access)
1190 /* This means the aggregate is accesses as a whole in a way other than an
1191 assign statement and thus cannot be removed even if we had a scalar
1192 replacement for everything. */
1193 if (cannot_scalarize_away_bitmap)
1194 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1195 return true;
1197 return false;
1200 /* Return the single non-EH successor edge of BB or NULL if there is none or
1201 more than one. */
1203 static edge
1204 single_non_eh_succ (basic_block bb)
1206 edge e, res = NULL;
1207 edge_iterator ei;
1209 FOR_EACH_EDGE (e, ei, bb->succs)
1210 if (!(e->flags & EDGE_EH))
1212 if (res)
1213 return NULL;
1214 res = e;
1217 return res;
1220 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1221 there is no alternative spot where to put statements SRA might need to
1222 generate after it. The spot we are looking for is an edge leading to a
1223 single non-EH successor, if it exists and is indeed single. RHS may be
1224 NULL, in that case ignore it. */
1226 static bool
1227 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1229 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1230 && stmt_ends_bb_p (stmt))
1232 if (single_non_eh_succ (gimple_bb (stmt)))
1233 return false;
1235 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1236 if (rhs)
1237 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1238 return true;
1240 return false;
1243 /* Scan expressions occurring in STMT, create access structures for all accesses
1244 to candidates for scalarization and remove those candidates which occur in
1245 statements or expressions that prevent them from being split apart. Return
1246 true if any access has been inserted. */
1248 static bool
1249 build_accesses_from_assign (gimple *stmt)
1251 tree lhs, rhs;
1252 struct access *lacc, *racc;
1254 if (!gimple_assign_single_p (stmt)
1255 /* Scope clobbers don't influence scalarization. */
1256 || gimple_clobber_p (stmt))
1257 return false;
1259 lhs = gimple_assign_lhs (stmt);
1260 rhs = gimple_assign_rhs1 (stmt);
1262 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1263 return false;
1265 racc = build_access_from_expr_1 (rhs, stmt, false);
1266 lacc = build_access_from_expr_1 (lhs, stmt, true);
1268 if (lacc)
1270 lacc->grp_assignment_write = 1;
1271 if (storage_order_barrier_p (rhs))
1272 lacc->grp_unscalarizable_region = 1;
1275 if (racc)
1277 racc->grp_assignment_read = 1;
1278 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1279 && !is_gimple_reg_type (racc->type))
1280 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1281 if (storage_order_barrier_p (lhs))
1282 racc->grp_unscalarizable_region = 1;
1285 if (lacc && racc
1286 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1287 && !lacc->grp_unscalarizable_region
1288 && !racc->grp_unscalarizable_region
1289 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1290 && lacc->size == racc->size
1291 && useless_type_conversion_p (lacc->type, racc->type))
1293 struct assign_link *link;
1295 link = assign_link_pool.allocate ();
1296 memset (link, 0, sizeof (struct assign_link));
1298 link->lacc = lacc;
1299 link->racc = racc;
1301 add_link_to_rhs (racc, link);
1304 return lacc || racc;
1307 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1308 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1310 static bool
1311 asm_visit_addr (gimple *, tree op, tree, void *)
1313 op = get_base_address (op);
1314 if (op
1315 && DECL_P (op))
1316 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1318 return false;
1321 /* Return true iff callsite CALL has at least as many actual arguments as there
1322 are formal parameters of the function currently processed by IPA-SRA and
1323 that their types match. */
1325 static inline bool
1326 callsite_arguments_match_p (gimple *call)
1328 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1329 return false;
1331 tree parm;
1332 int i;
1333 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1334 parm;
1335 parm = DECL_CHAIN (parm), i++)
1337 tree arg = gimple_call_arg (call, i);
1338 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1339 return false;
1341 return true;
1344 /* Scan function and look for interesting expressions and create access
1345 structures for them. Return true iff any access is created. */
1347 static bool
1348 scan_function (void)
1350 basic_block bb;
1351 bool ret = false;
1353 FOR_EACH_BB_FN (bb, cfun)
1355 gimple_stmt_iterator gsi;
1356 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1358 gimple *stmt = gsi_stmt (gsi);
1359 tree t;
1360 unsigned i;
1362 if (final_bbs && stmt_can_throw_external (stmt))
1363 bitmap_set_bit (final_bbs, bb->index);
1364 switch (gimple_code (stmt))
1366 case GIMPLE_RETURN:
1367 t = gimple_return_retval (as_a <greturn *> (stmt));
1368 if (t != NULL_TREE)
1369 ret |= build_access_from_expr (t, stmt, false);
1370 if (final_bbs)
1371 bitmap_set_bit (final_bbs, bb->index);
1372 break;
1374 case GIMPLE_ASSIGN:
1375 ret |= build_accesses_from_assign (stmt);
1376 break;
1378 case GIMPLE_CALL:
1379 for (i = 0; i < gimple_call_num_args (stmt); i++)
1380 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1381 stmt, false);
1383 if (sra_mode == SRA_MODE_EARLY_IPA)
1385 tree dest = gimple_call_fndecl (stmt);
1386 int flags = gimple_call_flags (stmt);
1388 if (dest)
1390 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1391 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1392 encountered_apply_args = true;
1393 if (recursive_call_p (current_function_decl, dest))
1395 encountered_recursive_call = true;
1396 if (!callsite_arguments_match_p (stmt))
1397 encountered_unchangable_recursive_call = true;
1401 if (final_bbs
1402 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1403 bitmap_set_bit (final_bbs, bb->index);
1406 t = gimple_call_lhs (stmt);
1407 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1408 ret |= build_access_from_expr (t, stmt, true);
1409 break;
1411 case GIMPLE_ASM:
1413 gasm *asm_stmt = as_a <gasm *> (stmt);
1414 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1415 asm_visit_addr);
1416 if (final_bbs)
1417 bitmap_set_bit (final_bbs, bb->index);
1419 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1421 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1422 ret |= build_access_from_expr (t, asm_stmt, false);
1424 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1426 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1427 ret |= build_access_from_expr (t, asm_stmt, true);
1430 break;
1432 default:
1433 break;
1438 return ret;
1441 /* Helper of QSORT function. There are pointers to accesses in the array. An
1442 access is considered smaller than another if it has smaller offset or if the
1443 offsets are the same but is size is bigger. */
1445 static int
1446 compare_access_positions (const void *a, const void *b)
1448 const access_p *fp1 = (const access_p *) a;
1449 const access_p *fp2 = (const access_p *) b;
1450 const access_p f1 = *fp1;
1451 const access_p f2 = *fp2;
1453 if (f1->offset != f2->offset)
1454 return f1->offset < f2->offset ? -1 : 1;
1456 if (f1->size == f2->size)
1458 if (f1->type == f2->type)
1459 return 0;
1460 /* Put any non-aggregate type before any aggregate type. */
1461 else if (!is_gimple_reg_type (f1->type)
1462 && is_gimple_reg_type (f2->type))
1463 return 1;
1464 else if (is_gimple_reg_type (f1->type)
1465 && !is_gimple_reg_type (f2->type))
1466 return -1;
1467 /* Put any complex or vector type before any other scalar type. */
1468 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1469 && TREE_CODE (f1->type) != VECTOR_TYPE
1470 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1471 || TREE_CODE (f2->type) == VECTOR_TYPE))
1472 return 1;
1473 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1474 || TREE_CODE (f1->type) == VECTOR_TYPE)
1475 && TREE_CODE (f2->type) != COMPLEX_TYPE
1476 && TREE_CODE (f2->type) != VECTOR_TYPE)
1477 return -1;
1478 /* Put the integral type with the bigger precision first. */
1479 else if (INTEGRAL_TYPE_P (f1->type)
1480 && INTEGRAL_TYPE_P (f2->type))
1481 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1482 /* Put any integral type with non-full precision last. */
1483 else if (INTEGRAL_TYPE_P (f1->type)
1484 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1485 != TYPE_PRECISION (f1->type)))
1486 return 1;
1487 else if (INTEGRAL_TYPE_P (f2->type)
1488 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1489 != TYPE_PRECISION (f2->type)))
1490 return -1;
1491 /* Stabilize the sort. */
1492 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1495 /* We want the bigger accesses first, thus the opposite operator in the next
1496 line: */
1497 return f1->size > f2->size ? -1 : 1;
1501 /* Append a name of the declaration to the name obstack. A helper function for
1502 make_fancy_name. */
1504 static void
1505 make_fancy_decl_name (tree decl)
1507 char buffer[32];
1509 tree name = DECL_NAME (decl);
1510 if (name)
1511 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1512 IDENTIFIER_LENGTH (name));
1513 else
1515 sprintf (buffer, "D%u", DECL_UID (decl));
1516 obstack_grow (&name_obstack, buffer, strlen (buffer));
1520 /* Helper for make_fancy_name. */
1522 static void
1523 make_fancy_name_1 (tree expr)
1525 char buffer[32];
1526 tree index;
1528 if (DECL_P (expr))
1530 make_fancy_decl_name (expr);
1531 return;
1534 switch (TREE_CODE (expr))
1536 case COMPONENT_REF:
1537 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1538 obstack_1grow (&name_obstack, '$');
1539 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1540 break;
1542 case ARRAY_REF:
1543 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1544 obstack_1grow (&name_obstack, '$');
1545 /* Arrays with only one element may not have a constant as their
1546 index. */
1547 index = TREE_OPERAND (expr, 1);
1548 if (TREE_CODE (index) != INTEGER_CST)
1549 break;
1550 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1551 obstack_grow (&name_obstack, buffer, strlen (buffer));
1552 break;
1554 case ADDR_EXPR:
1555 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1556 break;
1558 case MEM_REF:
1559 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1560 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1562 obstack_1grow (&name_obstack, '$');
1563 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1564 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1565 obstack_grow (&name_obstack, buffer, strlen (buffer));
1567 break;
1569 case BIT_FIELD_REF:
1570 case REALPART_EXPR:
1571 case IMAGPART_EXPR:
1572 gcc_unreachable (); /* we treat these as scalars. */
1573 break;
1574 default:
1575 break;
1579 /* Create a human readable name for replacement variable of ACCESS. */
1581 static char *
1582 make_fancy_name (tree expr)
1584 make_fancy_name_1 (expr);
1585 obstack_1grow (&name_obstack, '\0');
1586 return XOBFINISH (&name_obstack, char *);
1589 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1590 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1591 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1592 be non-NULL and is used to insert new statements either before or below
1593 the current one as specified by INSERT_AFTER. This function is not capable
1594 of handling bitfields. */
1596 tree
1597 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1598 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1599 bool insert_after)
1601 tree prev_base = base;
1602 tree off;
1603 tree mem_ref;
1604 HOST_WIDE_INT base_offset;
1605 unsigned HOST_WIDE_INT misalign;
1606 unsigned int align;
1608 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1609 get_object_alignment_1 (base, &align, &misalign);
1610 base = get_addr_base_and_unit_offset (base, &base_offset);
1612 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1613 offset such as array[var_index]. */
1614 if (!base)
1616 gassign *stmt;
1617 tree tmp, addr;
1619 gcc_checking_assert (gsi);
1620 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1621 addr = build_fold_addr_expr (unshare_expr (prev_base));
1622 STRIP_USELESS_TYPE_CONVERSION (addr);
1623 stmt = gimple_build_assign (tmp, addr);
1624 gimple_set_location (stmt, loc);
1625 if (insert_after)
1626 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1627 else
1628 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1630 off = build_int_cst (reference_alias_ptr_type (prev_base),
1631 offset / BITS_PER_UNIT);
1632 base = tmp;
1634 else if (TREE_CODE (base) == MEM_REF)
1636 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1637 base_offset + offset / BITS_PER_UNIT);
1638 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1639 base = unshare_expr (TREE_OPERAND (base, 0));
1641 else
1643 off = build_int_cst (reference_alias_ptr_type (base),
1644 base_offset + offset / BITS_PER_UNIT);
1645 base = build_fold_addr_expr (unshare_expr (base));
1648 misalign = (misalign + offset) & (align - 1);
1649 if (misalign != 0)
1650 align = (misalign & -misalign);
1651 if (align != TYPE_ALIGN (exp_type))
1652 exp_type = build_aligned_type (exp_type, align);
1654 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1655 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1656 if (TREE_THIS_VOLATILE (prev_base))
1657 TREE_THIS_VOLATILE (mem_ref) = 1;
1658 if (TREE_SIDE_EFFECTS (prev_base))
1659 TREE_SIDE_EFFECTS (mem_ref) = 1;
1660 return mem_ref;
1663 /* Construct a memory reference to a part of an aggregate BASE at the given
1664 OFFSET and of the same type as MODEL. In case this is a reference to a
1665 bit-field, the function will replicate the last component_ref of model's
1666 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1667 build_ref_for_offset. */
1669 static tree
1670 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1671 struct access *model, gimple_stmt_iterator *gsi,
1672 bool insert_after)
1674 if (TREE_CODE (model->expr) == COMPONENT_REF
1675 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1677 /* This access represents a bit-field. */
1678 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1680 offset -= int_bit_position (fld);
1681 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1682 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1683 gsi, insert_after);
1684 /* The flag will be set on the record type. */
1685 REF_REVERSE_STORAGE_ORDER (t) = 0;
1686 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1687 NULL_TREE);
1689 else
1690 return
1691 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1692 gsi, insert_after);
1695 /* Attempt to build a memory reference that we could but into a gimple
1696 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1697 create statements and return s NULL instead. This function also ignores
1698 alignment issues and so its results should never end up in non-debug
1699 statements. */
1701 static tree
1702 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1703 struct access *model)
1705 HOST_WIDE_INT base_offset;
1706 tree off;
1708 if (TREE_CODE (model->expr) == COMPONENT_REF
1709 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1710 return NULL_TREE;
1712 base = get_addr_base_and_unit_offset (base, &base_offset);
1713 if (!base)
1714 return NULL_TREE;
1715 if (TREE_CODE (base) == MEM_REF)
1717 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1718 base_offset + offset / BITS_PER_UNIT);
1719 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1720 base = unshare_expr (TREE_OPERAND (base, 0));
1722 else
1724 off = build_int_cst (reference_alias_ptr_type (base),
1725 base_offset + offset / BITS_PER_UNIT);
1726 base = build_fold_addr_expr (unshare_expr (base));
1729 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1732 /* Construct a memory reference consisting of component_refs and array_refs to
1733 a part of an aggregate *RES (which is of type TYPE). The requested part
1734 should have type EXP_TYPE at be the given OFFSET. This function might not
1735 succeed, it returns true when it does and only then *RES points to something
1736 meaningful. This function should be used only to build expressions that we
1737 might need to present to user (e.g. in warnings). In all other situations,
1738 build_ref_for_model or build_ref_for_offset should be used instead. */
1740 static bool
1741 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1742 tree exp_type)
1744 while (1)
1746 tree fld;
1747 tree tr_size, index, minidx;
1748 HOST_WIDE_INT el_size;
1750 if (offset == 0 && exp_type
1751 && types_compatible_p (exp_type, type))
1752 return true;
1754 switch (TREE_CODE (type))
1756 case UNION_TYPE:
1757 case QUAL_UNION_TYPE:
1758 case RECORD_TYPE:
1759 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1761 HOST_WIDE_INT pos, size;
1762 tree tr_pos, expr, *expr_ptr;
1764 if (TREE_CODE (fld) != FIELD_DECL)
1765 continue;
1767 tr_pos = bit_position (fld);
1768 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1769 continue;
1770 pos = tree_to_uhwi (tr_pos);
1771 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1772 tr_size = DECL_SIZE (fld);
1773 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1774 continue;
1775 size = tree_to_uhwi (tr_size);
1776 if (size == 0)
1778 if (pos != offset)
1779 continue;
1781 else if (pos > offset || (pos + size) <= offset)
1782 continue;
1784 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1785 NULL_TREE);
1786 expr_ptr = &expr;
1787 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1788 offset - pos, exp_type))
1790 *res = expr;
1791 return true;
1794 return false;
1796 case ARRAY_TYPE:
1797 tr_size = TYPE_SIZE (TREE_TYPE (type));
1798 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1799 return false;
1800 el_size = tree_to_uhwi (tr_size);
1802 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1803 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1804 return false;
1805 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1806 if (!integer_zerop (minidx))
1807 index = int_const_binop (PLUS_EXPR, index, minidx);
1808 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1809 NULL_TREE, NULL_TREE);
1810 offset = offset % el_size;
1811 type = TREE_TYPE (type);
1812 break;
1814 default:
1815 if (offset != 0)
1816 return false;
1818 if (exp_type)
1819 return false;
1820 else
1821 return true;
1826 /* Return true iff TYPE is stdarg va_list type. */
1828 static inline bool
1829 is_va_list_type (tree type)
1831 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1834 /* Print message to dump file why a variable was rejected. */
1836 static void
1837 reject (tree var, const char *msg)
1839 if (dump_file && (dump_flags & TDF_DETAILS))
1841 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1842 print_generic_expr (dump_file, var, 0);
1843 fprintf (dump_file, "\n");
1847 /* Return true if VAR is a candidate for SRA. */
1849 static bool
1850 maybe_add_sra_candidate (tree var)
1852 tree type = TREE_TYPE (var);
1853 const char *msg;
1854 tree_node **slot;
1856 if (!AGGREGATE_TYPE_P (type))
1858 reject (var, "not aggregate");
1859 return false;
1861 if (needs_to_live_in_memory (var))
1863 reject (var, "needs to live in memory");
1864 return false;
1866 if (TREE_THIS_VOLATILE (var))
1868 reject (var, "is volatile");
1869 return false;
1871 if (!COMPLETE_TYPE_P (type))
1873 reject (var, "has incomplete type");
1874 return false;
1876 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1878 reject (var, "type size not fixed");
1879 return false;
1881 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1883 reject (var, "type size is zero");
1884 return false;
1886 if (type_internals_preclude_sra_p (type, &msg))
1888 reject (var, msg);
1889 return false;
1891 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1892 we also want to schedule it rather late. Thus we ignore it in
1893 the early pass. */
1894 (sra_mode == SRA_MODE_EARLY_INTRA
1895 && is_va_list_type (type)))
1897 reject (var, "is va_list");
1898 return false;
1901 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1902 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1903 *slot = var;
1905 if (dump_file && (dump_flags & TDF_DETAILS))
1907 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1908 print_generic_expr (dump_file, var, 0);
1909 fprintf (dump_file, "\n");
1912 return true;
1915 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1916 those with type which is suitable for scalarization. */
1918 static bool
1919 find_var_candidates (void)
1921 tree var, parm;
1922 unsigned int i;
1923 bool ret = false;
1925 for (parm = DECL_ARGUMENTS (current_function_decl);
1926 parm;
1927 parm = DECL_CHAIN (parm))
1928 ret |= maybe_add_sra_candidate (parm);
1930 FOR_EACH_LOCAL_DECL (cfun, i, var)
1932 if (TREE_CODE (var) != VAR_DECL)
1933 continue;
1935 ret |= maybe_add_sra_candidate (var);
1938 return ret;
1941 /* Sort all accesses for the given variable, check for partial overlaps and
1942 return NULL if there are any. If there are none, pick a representative for
1943 each combination of offset and size and create a linked list out of them.
1944 Return the pointer to the first representative and make sure it is the first
1945 one in the vector of accesses. */
1947 static struct access *
1948 sort_and_splice_var_accesses (tree var)
1950 int i, j, access_count;
1951 struct access *res, **prev_acc_ptr = &res;
1952 vec<access_p> *access_vec;
1953 bool first = true;
1954 HOST_WIDE_INT low = -1, high = 0;
1956 access_vec = get_base_access_vector (var);
1957 if (!access_vec)
1958 return NULL;
1959 access_count = access_vec->length ();
1961 /* Sort by <OFFSET, SIZE>. */
1962 access_vec->qsort (compare_access_positions);
1964 i = 0;
1965 while (i < access_count)
1967 struct access *access = (*access_vec)[i];
1968 bool grp_write = access->write;
1969 bool grp_read = !access->write;
1970 bool grp_scalar_write = access->write
1971 && is_gimple_reg_type (access->type);
1972 bool grp_scalar_read = !access->write
1973 && is_gimple_reg_type (access->type);
1974 bool grp_assignment_read = access->grp_assignment_read;
1975 bool grp_assignment_write = access->grp_assignment_write;
1976 bool multiple_scalar_reads = false;
1977 bool total_scalarization = access->grp_total_scalarization;
1978 bool grp_partial_lhs = access->grp_partial_lhs;
1979 bool first_scalar = is_gimple_reg_type (access->type);
1980 bool unscalarizable_region = access->grp_unscalarizable_region;
1982 if (first || access->offset >= high)
1984 first = false;
1985 low = access->offset;
1986 high = access->offset + access->size;
1988 else if (access->offset > low && access->offset + access->size > high)
1989 return NULL;
1990 else
1991 gcc_assert (access->offset >= low
1992 && access->offset + access->size <= high);
1994 j = i + 1;
1995 while (j < access_count)
1997 struct access *ac2 = (*access_vec)[j];
1998 if (ac2->offset != access->offset || ac2->size != access->size)
1999 break;
2000 if (ac2->write)
2002 grp_write = true;
2003 grp_scalar_write = (grp_scalar_write
2004 || is_gimple_reg_type (ac2->type));
2006 else
2008 grp_read = true;
2009 if (is_gimple_reg_type (ac2->type))
2011 if (grp_scalar_read)
2012 multiple_scalar_reads = true;
2013 else
2014 grp_scalar_read = true;
2017 grp_assignment_read |= ac2->grp_assignment_read;
2018 grp_assignment_write |= ac2->grp_assignment_write;
2019 grp_partial_lhs |= ac2->grp_partial_lhs;
2020 unscalarizable_region |= ac2->grp_unscalarizable_region;
2021 total_scalarization |= ac2->grp_total_scalarization;
2022 relink_to_new_repr (access, ac2);
2024 /* If there are both aggregate-type and scalar-type accesses with
2025 this combination of size and offset, the comparison function
2026 should have put the scalars first. */
2027 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2028 ac2->group_representative = access;
2029 j++;
2032 i = j;
2034 access->group_representative = access;
2035 access->grp_write = grp_write;
2036 access->grp_read = grp_read;
2037 access->grp_scalar_read = grp_scalar_read;
2038 access->grp_scalar_write = grp_scalar_write;
2039 access->grp_assignment_read = grp_assignment_read;
2040 access->grp_assignment_write = grp_assignment_write;
2041 access->grp_hint = multiple_scalar_reads || total_scalarization;
2042 access->grp_total_scalarization = total_scalarization;
2043 access->grp_partial_lhs = grp_partial_lhs;
2044 access->grp_unscalarizable_region = unscalarizable_region;
2045 if (access->first_link)
2046 add_access_to_work_queue (access);
2048 *prev_acc_ptr = access;
2049 prev_acc_ptr = &access->next_grp;
2052 gcc_assert (res == (*access_vec)[0]);
2053 return res;
2056 /* Create a variable for the given ACCESS which determines the type, name and a
2057 few other properties. Return the variable declaration and store it also to
2058 ACCESS->replacement. */
2060 static tree
2061 create_access_replacement (struct access *access)
2063 tree repl;
2065 if (access->grp_to_be_debug_replaced)
2067 repl = create_tmp_var_raw (access->type);
2068 DECL_CONTEXT (repl) = current_function_decl;
2070 else
2071 /* Drop any special alignment on the type if it's not on the main
2072 variant. This avoids issues with weirdo ABIs like AAPCS. */
2073 repl = create_tmp_var (build_qualified_type
2074 (TYPE_MAIN_VARIANT (access->type),
2075 TYPE_QUALS (access->type)), "SR");
2076 if (TREE_CODE (access->type) == COMPLEX_TYPE
2077 || TREE_CODE (access->type) == VECTOR_TYPE)
2079 if (!access->grp_partial_lhs)
2080 DECL_GIMPLE_REG_P (repl) = 1;
2082 else if (access->grp_partial_lhs
2083 && is_gimple_reg_type (access->type))
2084 TREE_ADDRESSABLE (repl) = 1;
2086 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2087 DECL_ARTIFICIAL (repl) = 1;
2088 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2090 if (DECL_NAME (access->base)
2091 && !DECL_IGNORED_P (access->base)
2092 && !DECL_ARTIFICIAL (access->base))
2094 char *pretty_name = make_fancy_name (access->expr);
2095 tree debug_expr = unshare_expr_without_location (access->expr), d;
2096 bool fail = false;
2098 DECL_NAME (repl) = get_identifier (pretty_name);
2099 obstack_free (&name_obstack, pretty_name);
2101 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2102 as DECL_DEBUG_EXPR isn't considered when looking for still
2103 used SSA_NAMEs and thus they could be freed. All debug info
2104 generation cares is whether something is constant or variable
2105 and that get_ref_base_and_extent works properly on the
2106 expression. It cannot handle accesses at a non-constant offset
2107 though, so just give up in those cases. */
2108 for (d = debug_expr;
2109 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2110 d = TREE_OPERAND (d, 0))
2111 switch (TREE_CODE (d))
2113 case ARRAY_REF:
2114 case ARRAY_RANGE_REF:
2115 if (TREE_OPERAND (d, 1)
2116 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2117 fail = true;
2118 if (TREE_OPERAND (d, 3)
2119 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2120 fail = true;
2121 /* FALLTHRU */
2122 case COMPONENT_REF:
2123 if (TREE_OPERAND (d, 2)
2124 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2125 fail = true;
2126 break;
2127 case MEM_REF:
2128 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2129 fail = true;
2130 else
2131 d = TREE_OPERAND (d, 0);
2132 break;
2133 default:
2134 break;
2136 if (!fail)
2138 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2139 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2141 if (access->grp_no_warning)
2142 TREE_NO_WARNING (repl) = 1;
2143 else
2144 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2146 else
2147 TREE_NO_WARNING (repl) = 1;
2149 if (dump_file)
2151 if (access->grp_to_be_debug_replaced)
2153 fprintf (dump_file, "Created a debug-only replacement for ");
2154 print_generic_expr (dump_file, access->base, 0);
2155 fprintf (dump_file, " offset: %u, size: %u\n",
2156 (unsigned) access->offset, (unsigned) access->size);
2158 else
2160 fprintf (dump_file, "Created a replacement for ");
2161 print_generic_expr (dump_file, access->base, 0);
2162 fprintf (dump_file, " offset: %u, size: %u: ",
2163 (unsigned) access->offset, (unsigned) access->size);
2164 print_generic_expr (dump_file, repl, 0);
2165 fprintf (dump_file, "\n");
2168 sra_stats.replacements++;
2170 return repl;
2173 /* Return ACCESS scalar replacement, which must exist. */
2175 static inline tree
2176 get_access_replacement (struct access *access)
2178 gcc_checking_assert (access->replacement_decl);
2179 return access->replacement_decl;
2183 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2184 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2185 to it is not "within" the root. Return false iff some accesses partially
2186 overlap. */
2188 static bool
2189 build_access_subtree (struct access **access)
2191 struct access *root = *access, *last_child = NULL;
2192 HOST_WIDE_INT limit = root->offset + root->size;
2194 *access = (*access)->next_grp;
2195 while (*access && (*access)->offset + (*access)->size <= limit)
2197 if (!last_child)
2198 root->first_child = *access;
2199 else
2200 last_child->next_sibling = *access;
2201 last_child = *access;
2203 if (!build_access_subtree (access))
2204 return false;
2207 if (*access && (*access)->offset < limit)
2208 return false;
2210 return true;
2213 /* Build a tree of access representatives, ACCESS is the pointer to the first
2214 one, others are linked in a list by the next_grp field. Return false iff
2215 some accesses partially overlap. */
2217 static bool
2218 build_access_trees (struct access *access)
2220 while (access)
2222 struct access *root = access;
2224 if (!build_access_subtree (&access))
2225 return false;
2226 root->next_grp = access;
2228 return true;
2231 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2232 array. */
2234 static bool
2235 expr_with_var_bounded_array_refs_p (tree expr)
2237 while (handled_component_p (expr))
2239 if (TREE_CODE (expr) == ARRAY_REF
2240 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2241 return true;
2242 expr = TREE_OPERAND (expr, 0);
2244 return false;
2247 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2248 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2249 sorts of access flags appropriately along the way, notably always set
2250 grp_read and grp_assign_read according to MARK_READ and grp_write when
2251 MARK_WRITE is true.
2253 Creating a replacement for a scalar access is considered beneficial if its
2254 grp_hint is set (this means we are either attempting total scalarization or
2255 there is more than one direct read access) or according to the following
2256 table:
2258 Access written to through a scalar type (once or more times)
2260 | Written to in an assignment statement
2262 | | Access read as scalar _once_
2263 | | |
2264 | | | Read in an assignment statement
2265 | | | |
2266 | | | | Scalarize Comment
2267 -----------------------------------------------------------------------------
2268 0 0 0 0 No access for the scalar
2269 0 0 0 1 No access for the scalar
2270 0 0 1 0 No Single read - won't help
2271 0 0 1 1 No The same case
2272 0 1 0 0 No access for the scalar
2273 0 1 0 1 No access for the scalar
2274 0 1 1 0 Yes s = *g; return s.i;
2275 0 1 1 1 Yes The same case as above
2276 1 0 0 0 No Won't help
2277 1 0 0 1 Yes s.i = 1; *g = s;
2278 1 0 1 0 Yes s.i = 5; g = s.i;
2279 1 0 1 1 Yes The same case as above
2280 1 1 0 0 No Won't help.
2281 1 1 0 1 Yes s.i = 1; *g = s;
2282 1 1 1 0 Yes s = *g; return s.i;
2283 1 1 1 1 Yes Any of the above yeses */
2285 static bool
2286 analyze_access_subtree (struct access *root, struct access *parent,
2287 bool allow_replacements)
2289 struct access *child;
2290 HOST_WIDE_INT limit = root->offset + root->size;
2291 HOST_WIDE_INT covered_to = root->offset;
2292 bool scalar = is_gimple_reg_type (root->type);
2293 bool hole = false, sth_created = false;
2295 if (parent)
2297 if (parent->grp_read)
2298 root->grp_read = 1;
2299 if (parent->grp_assignment_read)
2300 root->grp_assignment_read = 1;
2301 if (parent->grp_write)
2302 root->grp_write = 1;
2303 if (parent->grp_assignment_write)
2304 root->grp_assignment_write = 1;
2305 if (parent->grp_total_scalarization)
2306 root->grp_total_scalarization = 1;
2309 if (root->grp_unscalarizable_region)
2310 allow_replacements = false;
2312 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2313 allow_replacements = false;
2315 for (child = root->first_child; child; child = child->next_sibling)
2317 hole |= covered_to < child->offset;
2318 sth_created |= analyze_access_subtree (child, root,
2319 allow_replacements && !scalar);
2321 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2322 root->grp_total_scalarization &= child->grp_total_scalarization;
2323 if (child->grp_covered)
2324 covered_to += child->size;
2325 else
2326 hole = true;
2329 if (allow_replacements && scalar && !root->first_child
2330 && (root->grp_hint
2331 || ((root->grp_scalar_read || root->grp_assignment_read)
2332 && (root->grp_scalar_write || root->grp_assignment_write))))
2334 /* Always create access replacements that cover the whole access.
2335 For integral types this means the precision has to match.
2336 Avoid assumptions based on the integral type kind, too. */
2337 if (INTEGRAL_TYPE_P (root->type)
2338 && (TREE_CODE (root->type) != INTEGER_TYPE
2339 || TYPE_PRECISION (root->type) != root->size)
2340 /* But leave bitfield accesses alone. */
2341 && (TREE_CODE (root->expr) != COMPONENT_REF
2342 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2344 tree rt = root->type;
2345 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2346 && (root->size % BITS_PER_UNIT) == 0);
2347 root->type = build_nonstandard_integer_type (root->size,
2348 TYPE_UNSIGNED (rt));
2349 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2350 root->offset, root->reverse,
2351 root->type, NULL, false);
2353 if (dump_file && (dump_flags & TDF_DETAILS))
2355 fprintf (dump_file, "Changing the type of a replacement for ");
2356 print_generic_expr (dump_file, root->base, 0);
2357 fprintf (dump_file, " offset: %u, size: %u ",
2358 (unsigned) root->offset, (unsigned) root->size);
2359 fprintf (dump_file, " to an integer.\n");
2363 root->grp_to_be_replaced = 1;
2364 root->replacement_decl = create_access_replacement (root);
2365 sth_created = true;
2366 hole = false;
2368 else
2370 if (allow_replacements
2371 && scalar && !root->first_child
2372 && (root->grp_scalar_write || root->grp_assignment_write)
2373 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2374 DECL_UID (root->base)))
2376 gcc_checking_assert (!root->grp_scalar_read
2377 && !root->grp_assignment_read);
2378 sth_created = true;
2379 if (MAY_HAVE_DEBUG_STMTS)
2381 root->grp_to_be_debug_replaced = 1;
2382 root->replacement_decl = create_access_replacement (root);
2386 if (covered_to < limit)
2387 hole = true;
2388 if (scalar)
2389 root->grp_total_scalarization = 0;
2392 if (!hole || root->grp_total_scalarization)
2393 root->grp_covered = 1;
2394 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2395 root->grp_unscalarized_data = 1; /* not covered and written to */
2396 return sth_created;
2399 /* Analyze all access trees linked by next_grp by the means of
2400 analyze_access_subtree. */
2401 static bool
2402 analyze_access_trees (struct access *access)
2404 bool ret = false;
2406 while (access)
2408 if (analyze_access_subtree (access, NULL, true))
2409 ret = true;
2410 access = access->next_grp;
2413 return ret;
2416 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2417 SIZE would conflict with an already existing one. If exactly such a child
2418 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2420 static bool
2421 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2422 HOST_WIDE_INT size, struct access **exact_match)
2424 struct access *child;
2426 for (child = lacc->first_child; child; child = child->next_sibling)
2428 if (child->offset == norm_offset && child->size == size)
2430 *exact_match = child;
2431 return true;
2434 if (child->offset < norm_offset + size
2435 && child->offset + child->size > norm_offset)
2436 return true;
2439 return false;
2442 /* Create a new child access of PARENT, with all properties just like MODEL
2443 except for its offset and with its grp_write false and grp_read true.
2444 Return the new access or NULL if it cannot be created. Note that this access
2445 is created long after all splicing and sorting, it's not located in any
2446 access vector and is automatically a representative of its group. */
2448 static struct access *
2449 create_artificial_child_access (struct access *parent, struct access *model,
2450 HOST_WIDE_INT new_offset)
2452 struct access **child;
2453 tree expr = parent->base;
2455 gcc_assert (!model->grp_unscalarizable_region);
2457 struct access *access = access_pool.allocate ();
2458 memset (access, 0, sizeof (struct access));
2459 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2460 model->type))
2462 access->grp_no_warning = true;
2463 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2464 new_offset, model, NULL, false);
2467 access->base = parent->base;
2468 access->expr = expr;
2469 access->offset = new_offset;
2470 access->size = model->size;
2471 access->type = model->type;
2472 access->grp_write = true;
2473 access->grp_read = false;
2474 access->reverse = model->reverse;
2476 child = &parent->first_child;
2477 while (*child && (*child)->offset < new_offset)
2478 child = &(*child)->next_sibling;
2480 access->next_sibling = *child;
2481 *child = access;
2483 return access;
2487 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2488 true if any new subaccess was created. Additionally, if RACC is a scalar
2489 access but LACC is not, change the type of the latter, if possible. */
2491 static bool
2492 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2494 struct access *rchild;
2495 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2496 bool ret = false;
2498 if (is_gimple_reg_type (lacc->type)
2499 || lacc->grp_unscalarizable_region
2500 || racc->grp_unscalarizable_region)
2501 return false;
2503 if (is_gimple_reg_type (racc->type))
2505 if (!lacc->first_child && !racc->first_child)
2507 tree t = lacc->base;
2509 lacc->type = racc->type;
2510 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2511 lacc->offset, racc->type))
2512 lacc->expr = t;
2513 else
2515 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2516 lacc->base, lacc->offset,
2517 racc, NULL, false);
2518 lacc->grp_no_warning = true;
2521 return false;
2524 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2526 struct access *new_acc = NULL;
2527 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2529 if (rchild->grp_unscalarizable_region)
2530 continue;
2532 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2533 &new_acc))
2535 if (new_acc)
2537 rchild->grp_hint = 1;
2538 new_acc->grp_hint |= new_acc->grp_read;
2539 if (rchild->first_child)
2540 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2542 continue;
2545 rchild->grp_hint = 1;
2546 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2547 if (new_acc)
2549 ret = true;
2550 if (racc->first_child)
2551 propagate_subaccesses_across_link (new_acc, rchild);
2555 return ret;
2558 /* Propagate all subaccesses across assignment links. */
2560 static void
2561 propagate_all_subaccesses (void)
2563 while (work_queue_head)
2565 struct access *racc = pop_access_from_work_queue ();
2566 struct assign_link *link;
2568 gcc_assert (racc->first_link);
2570 for (link = racc->first_link; link; link = link->next)
2572 struct access *lacc = link->lacc;
2574 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2575 continue;
2576 lacc = lacc->group_representative;
2577 if (propagate_subaccesses_across_link (lacc, racc)
2578 && lacc->first_link)
2579 add_access_to_work_queue (lacc);
2584 /* Go through all accesses collected throughout the (intraprocedural) analysis
2585 stage, exclude overlapping ones, identify representatives and build trees
2586 out of them, making decisions about scalarization on the way. Return true
2587 iff there are any to-be-scalarized variables after this stage. */
2589 static bool
2590 analyze_all_variable_accesses (void)
2592 int res = 0;
2593 bitmap tmp = BITMAP_ALLOC (NULL);
2594 bitmap_iterator bi;
2595 unsigned i;
2596 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2598 enum compiler_param param = optimize_speed_p
2599 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2600 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2602 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2603 fall back to a target default. */
2604 unsigned HOST_WIDE_INT max_scalarization_size
2605 = global_options_set.x_param_values[param]
2606 ? PARAM_VALUE (param)
2607 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2609 max_scalarization_size *= BITS_PER_UNIT;
2611 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2612 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2613 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2615 tree var = candidate (i);
2617 if (TREE_CODE (var) == VAR_DECL
2618 && scalarizable_type_p (TREE_TYPE (var)))
2620 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2621 <= max_scalarization_size)
2623 create_total_scalarization_access (var);
2624 completely_scalarize (var, TREE_TYPE (var), 0, var);
2625 if (dump_file && (dump_flags & TDF_DETAILS))
2627 fprintf (dump_file, "Will attempt to totally scalarize ");
2628 print_generic_expr (dump_file, var, 0);
2629 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2632 else if (dump_file && (dump_flags & TDF_DETAILS))
2634 fprintf (dump_file, "Too big to totally scalarize: ");
2635 print_generic_expr (dump_file, var, 0);
2636 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2641 bitmap_copy (tmp, candidate_bitmap);
2642 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2644 tree var = candidate (i);
2645 struct access *access;
2647 access = sort_and_splice_var_accesses (var);
2648 if (!access || !build_access_trees (access))
2649 disqualify_candidate (var,
2650 "No or inhibitingly overlapping accesses.");
2653 propagate_all_subaccesses ();
2655 bitmap_copy (tmp, candidate_bitmap);
2656 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2658 tree var = candidate (i);
2659 struct access *access = get_first_repr_for_decl (var);
2661 if (analyze_access_trees (access))
2663 res++;
2664 if (dump_file && (dump_flags & TDF_DETAILS))
2666 fprintf (dump_file, "\nAccess trees for ");
2667 print_generic_expr (dump_file, var, 0);
2668 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2669 dump_access_tree (dump_file, access);
2670 fprintf (dump_file, "\n");
2673 else
2674 disqualify_candidate (var, "No scalar replacements to be created.");
2677 BITMAP_FREE (tmp);
2679 if (res)
2681 statistics_counter_event (cfun, "Scalarized aggregates", res);
2682 return true;
2684 else
2685 return false;
2688 /* Generate statements copying scalar replacements of accesses within a subtree
2689 into or out of AGG. ACCESS, all its children, siblings and their children
2690 are to be processed. AGG is an aggregate type expression (can be a
2691 declaration but does not have to be, it can for example also be a mem_ref or
2692 a series of handled components). TOP_OFFSET is the offset of the processed
2693 subtree which has to be subtracted from offsets of individual accesses to
2694 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2695 replacements in the interval <start_offset, start_offset + chunk_size>,
2696 otherwise copy all. GSI is a statement iterator used to place the new
2697 statements. WRITE should be true when the statements should write from AGG
2698 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2699 statements will be added after the current statement in GSI, they will be
2700 added before the statement otherwise. */
2702 static void
2703 generate_subtree_copies (struct access *access, tree agg,
2704 HOST_WIDE_INT top_offset,
2705 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2706 gimple_stmt_iterator *gsi, bool write,
2707 bool insert_after, location_t loc)
2711 if (chunk_size && access->offset >= start_offset + chunk_size)
2712 return;
2714 if (access->grp_to_be_replaced
2715 && (chunk_size == 0
2716 || access->offset + access->size > start_offset))
2718 tree expr, repl = get_access_replacement (access);
2719 gassign *stmt;
2721 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2722 access, gsi, insert_after);
2724 if (write)
2726 if (access->grp_partial_lhs)
2727 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2728 !insert_after,
2729 insert_after ? GSI_NEW_STMT
2730 : GSI_SAME_STMT);
2731 stmt = gimple_build_assign (repl, expr);
2733 else
2735 TREE_NO_WARNING (repl) = 1;
2736 if (access->grp_partial_lhs)
2737 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2738 !insert_after,
2739 insert_after ? GSI_NEW_STMT
2740 : GSI_SAME_STMT);
2741 stmt = gimple_build_assign (expr, repl);
2743 gimple_set_location (stmt, loc);
2745 if (insert_after)
2746 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2747 else
2748 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2749 update_stmt (stmt);
2750 sra_stats.subtree_copies++;
2752 else if (write
2753 && access->grp_to_be_debug_replaced
2754 && (chunk_size == 0
2755 || access->offset + access->size > start_offset))
2757 gdebug *ds;
2758 tree drhs = build_debug_ref_for_model (loc, agg,
2759 access->offset - top_offset,
2760 access);
2761 ds = gimple_build_debug_bind (get_access_replacement (access),
2762 drhs, gsi_stmt (*gsi));
2763 if (insert_after)
2764 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2765 else
2766 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2769 if (access->first_child)
2770 generate_subtree_copies (access->first_child, agg, top_offset,
2771 start_offset, chunk_size, gsi,
2772 write, insert_after, loc);
2774 access = access->next_sibling;
2776 while (access);
2779 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2780 the root of the subtree to be processed. GSI is the statement iterator used
2781 for inserting statements which are added after the current statement if
2782 INSERT_AFTER is true or before it otherwise. */
2784 static void
2785 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2786 bool insert_after, location_t loc)
2789 struct access *child;
2791 if (access->grp_to_be_replaced)
2793 gassign *stmt;
2795 stmt = gimple_build_assign (get_access_replacement (access),
2796 build_zero_cst (access->type));
2797 if (insert_after)
2798 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2799 else
2800 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2801 update_stmt (stmt);
2802 gimple_set_location (stmt, loc);
2804 else if (access->grp_to_be_debug_replaced)
2806 gdebug *ds
2807 = gimple_build_debug_bind (get_access_replacement (access),
2808 build_zero_cst (access->type),
2809 gsi_stmt (*gsi));
2810 if (insert_after)
2811 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2812 else
2813 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2816 for (child = access->first_child; child; child = child->next_sibling)
2817 init_subtree_with_zero (child, gsi, insert_after, loc);
2820 /* Clobber all scalar replacements in an access subtree. ACCESS is the the
2821 root of the subtree to be processed. GSI is the statement iterator used
2822 for inserting statements which are added after the current statement if
2823 INSERT_AFTER is true or before it otherwise. */
2825 static void
2826 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2827 bool insert_after, location_t loc)
2830 struct access *child;
2832 if (access->grp_to_be_replaced)
2834 tree rep = get_access_replacement (access);
2835 tree clobber = build_constructor (access->type, NULL);
2836 TREE_THIS_VOLATILE (clobber) = 1;
2837 gimple *stmt = gimple_build_assign (rep, clobber);
2839 if (insert_after)
2840 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2841 else
2842 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2843 update_stmt (stmt);
2844 gimple_set_location (stmt, loc);
2847 for (child = access->first_child; child; child = child->next_sibling)
2848 clobber_subtree (child, gsi, insert_after, loc);
2851 /* Search for an access representative for the given expression EXPR and
2852 return it or NULL if it cannot be found. */
2854 static struct access *
2855 get_access_for_expr (tree expr)
2857 HOST_WIDE_INT offset, size, max_size;
2858 tree base;
2859 bool reverse;
2861 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2862 a different size than the size of its argument and we need the latter
2863 one. */
2864 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2865 expr = TREE_OPERAND (expr, 0);
2867 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
2868 if (max_size == -1 || !DECL_P (base))
2869 return NULL;
2871 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2872 return NULL;
2874 return get_var_base_offset_size_access (base, offset, max_size);
2877 /* Replace the expression EXPR with a scalar replacement if there is one and
2878 generate other statements to do type conversion or subtree copying if
2879 necessary. GSI is used to place newly created statements, WRITE is true if
2880 the expression is being written to (it is on a LHS of a statement or output
2881 in an assembly statement). */
2883 static bool
2884 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2886 location_t loc;
2887 struct access *access;
2888 tree type, bfr, orig_expr;
2890 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2892 bfr = *expr;
2893 expr = &TREE_OPERAND (*expr, 0);
2895 else
2896 bfr = NULL_TREE;
2898 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2899 expr = &TREE_OPERAND (*expr, 0);
2900 access = get_access_for_expr (*expr);
2901 if (!access)
2902 return false;
2903 type = TREE_TYPE (*expr);
2904 orig_expr = *expr;
2906 loc = gimple_location (gsi_stmt (*gsi));
2907 gimple_stmt_iterator alt_gsi = gsi_none ();
2908 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2910 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2911 gsi = &alt_gsi;
2914 if (access->grp_to_be_replaced)
2916 tree repl = get_access_replacement (access);
2917 /* If we replace a non-register typed access simply use the original
2918 access expression to extract the scalar component afterwards.
2919 This happens if scalarizing a function return value or parameter
2920 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2921 gcc.c-torture/compile/20011217-1.c.
2923 We also want to use this when accessing a complex or vector which can
2924 be accessed as a different type too, potentially creating a need for
2925 type conversion (see PR42196) and when scalarized unions are involved
2926 in assembler statements (see PR42398). */
2927 if (!useless_type_conversion_p (type, access->type))
2929 tree ref;
2931 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2933 if (write)
2935 gassign *stmt;
2937 if (access->grp_partial_lhs)
2938 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2939 false, GSI_NEW_STMT);
2940 stmt = gimple_build_assign (repl, ref);
2941 gimple_set_location (stmt, loc);
2942 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2944 else
2946 gassign *stmt;
2948 if (access->grp_partial_lhs)
2949 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2950 true, GSI_SAME_STMT);
2951 stmt = gimple_build_assign (ref, repl);
2952 gimple_set_location (stmt, loc);
2953 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2956 else
2957 *expr = repl;
2958 sra_stats.exprs++;
2960 else if (write && access->grp_to_be_debug_replaced)
2962 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
2963 NULL_TREE,
2964 gsi_stmt (*gsi));
2965 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2968 if (access->first_child)
2970 HOST_WIDE_INT start_offset, chunk_size;
2971 if (bfr
2972 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
2973 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
2975 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
2976 start_offset = access->offset
2977 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
2979 else
2980 start_offset = chunk_size = 0;
2982 generate_subtree_copies (access->first_child, orig_expr, access->offset,
2983 start_offset, chunk_size, gsi, write, write,
2984 loc);
2986 return true;
2989 /* Where scalar replacements of the RHS have been written to when a replacement
2990 of a LHS of an assigments cannot be direclty loaded from a replacement of
2991 the RHS. */
2992 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2993 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2994 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2996 struct subreplacement_assignment_data
2998 /* Offset of the access representing the lhs of the assignment. */
2999 HOST_WIDE_INT left_offset;
3001 /* LHS and RHS of the original assignment. */
3002 tree assignment_lhs, assignment_rhs;
3004 /* Access representing the rhs of the whole assignment. */
3005 struct access *top_racc;
3007 /* Stmt iterator used for statement insertions after the original assignment.
3008 It points to the main GSI used to traverse a BB during function body
3009 modification. */
3010 gimple_stmt_iterator *new_gsi;
3012 /* Stmt iterator used for statement insertions before the original
3013 assignment. Keeps on pointing to the original statement. */
3014 gimple_stmt_iterator old_gsi;
3016 /* Location of the assignment. */
3017 location_t loc;
3019 /* Keeps the information whether we have needed to refresh replacements of
3020 the LHS and from which side of the assignments this takes place. */
3021 enum unscalarized_data_handling refreshed;
3024 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3025 base aggregate if there are unscalarized data or directly to LHS of the
3026 statement that is pointed to by GSI otherwise. */
3028 static void
3029 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3031 tree src;
3032 if (sad->top_racc->grp_unscalarized_data)
3034 src = sad->assignment_rhs;
3035 sad->refreshed = SRA_UDH_RIGHT;
3037 else
3039 src = sad->assignment_lhs;
3040 sad->refreshed = SRA_UDH_LEFT;
3042 generate_subtree_copies (sad->top_racc->first_child, src,
3043 sad->top_racc->offset, 0, 0,
3044 &sad->old_gsi, false, false, sad->loc);
3047 /* Try to generate statements to load all sub-replacements in an access subtree
3048 formed by children of LACC from scalar replacements in the SAD->top_racc
3049 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3050 and load the accesses from it. */
3052 static void
3053 load_assign_lhs_subreplacements (struct access *lacc,
3054 struct subreplacement_assignment_data *sad)
3056 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3058 HOST_WIDE_INT offset;
3059 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3061 if (lacc->grp_to_be_replaced)
3063 struct access *racc;
3064 gassign *stmt;
3065 tree rhs;
3067 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3068 if (racc && racc->grp_to_be_replaced)
3070 rhs = get_access_replacement (racc);
3071 if (!useless_type_conversion_p (lacc->type, racc->type))
3072 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3073 lacc->type, rhs);
3075 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3076 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3077 NULL_TREE, true, GSI_SAME_STMT);
3079 else
3081 /* No suitable access on the right hand side, need to load from
3082 the aggregate. See if we have to update it first... */
3083 if (sad->refreshed == SRA_UDH_NONE)
3084 handle_unscalarized_data_in_subtree (sad);
3086 if (sad->refreshed == SRA_UDH_LEFT)
3087 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3088 lacc->offset - sad->left_offset,
3089 lacc, sad->new_gsi, true);
3090 else
3091 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3092 lacc->offset - sad->left_offset,
3093 lacc, sad->new_gsi, true);
3094 if (lacc->grp_partial_lhs)
3095 rhs = force_gimple_operand_gsi (sad->new_gsi,
3096 rhs, true, NULL_TREE,
3097 false, GSI_NEW_STMT);
3100 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3101 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3102 gimple_set_location (stmt, sad->loc);
3103 update_stmt (stmt);
3104 sra_stats.subreplacements++;
3106 else
3108 if (sad->refreshed == SRA_UDH_NONE
3109 && lacc->grp_read && !lacc->grp_covered)
3110 handle_unscalarized_data_in_subtree (sad);
3112 if (lacc && lacc->grp_to_be_debug_replaced)
3114 gdebug *ds;
3115 tree drhs;
3116 struct access *racc = find_access_in_subtree (sad->top_racc,
3117 offset,
3118 lacc->size);
3120 if (racc && racc->grp_to_be_replaced)
3122 if (racc->grp_write)
3123 drhs = get_access_replacement (racc);
3124 else
3125 drhs = NULL;
3127 else if (sad->refreshed == SRA_UDH_LEFT)
3128 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3129 lacc->offset, lacc);
3130 else if (sad->refreshed == SRA_UDH_RIGHT)
3131 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3132 offset, lacc);
3133 else
3134 drhs = NULL_TREE;
3135 if (drhs
3136 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3137 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3138 lacc->type, drhs);
3139 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3140 drhs, gsi_stmt (sad->old_gsi));
3141 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3145 if (lacc->first_child)
3146 load_assign_lhs_subreplacements (lacc, sad);
3150 /* Result code for SRA assignment modification. */
3151 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3152 SRA_AM_MODIFIED, /* stmt changed but not
3153 removed */
3154 SRA_AM_REMOVED }; /* stmt eliminated */
3156 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3157 to the assignment and GSI is the statement iterator pointing at it. Returns
3158 the same values as sra_modify_assign. */
3160 static enum assignment_mod_result
3161 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3163 tree lhs = gimple_assign_lhs (stmt);
3164 struct access *acc = get_access_for_expr (lhs);
3165 if (!acc)
3166 return SRA_AM_NONE;
3167 location_t loc = gimple_location (stmt);
3169 if (gimple_clobber_p (stmt))
3171 /* Clobber the replacement variable. */
3172 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3173 /* Remove clobbers of fully scalarized variables, they are dead. */
3174 if (acc->grp_covered)
3176 unlink_stmt_vdef (stmt);
3177 gsi_remove (gsi, true);
3178 release_defs (stmt);
3179 return SRA_AM_REMOVED;
3181 else
3182 return SRA_AM_MODIFIED;
3185 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt))) > 0)
3187 /* I have never seen this code path trigger but if it can happen the
3188 following should handle it gracefully. */
3189 if (access_has_children_p (acc))
3190 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3191 true, true, loc);
3192 return SRA_AM_MODIFIED;
3195 if (acc->grp_covered)
3197 init_subtree_with_zero (acc, gsi, false, loc);
3198 unlink_stmt_vdef (stmt);
3199 gsi_remove (gsi, true);
3200 release_defs (stmt);
3201 return SRA_AM_REMOVED;
3203 else
3205 init_subtree_with_zero (acc, gsi, true, loc);
3206 return SRA_AM_MODIFIED;
3210 /* Create and return a new suitable default definition SSA_NAME for RACC which
3211 is an access describing an uninitialized part of an aggregate that is being
3212 loaded. */
3214 static tree
3215 get_repl_default_def_ssa_name (struct access *racc)
3217 gcc_checking_assert (!racc->grp_to_be_replaced
3218 && !racc->grp_to_be_debug_replaced);
3219 if (!racc->replacement_decl)
3220 racc->replacement_decl = create_access_replacement (racc);
3221 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3224 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3225 bit-field field declaration somewhere in it. */
3227 static inline bool
3228 contains_vce_or_bfcref_p (const_tree ref)
3230 while (handled_component_p (ref))
3232 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3233 || (TREE_CODE (ref) == COMPONENT_REF
3234 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3235 return true;
3236 ref = TREE_OPERAND (ref, 0);
3239 return false;
3242 /* Examine both sides of the assignment statement pointed to by STMT, replace
3243 them with a scalare replacement if there is one and generate copying of
3244 replacements if scalarized aggregates have been used in the assignment. GSI
3245 is used to hold generated statements for type conversions and subtree
3246 copying. */
3248 static enum assignment_mod_result
3249 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3251 struct access *lacc, *racc;
3252 tree lhs, rhs;
3253 bool modify_this_stmt = false;
3254 bool force_gimple_rhs = false;
3255 location_t loc;
3256 gimple_stmt_iterator orig_gsi = *gsi;
3258 if (!gimple_assign_single_p (stmt))
3259 return SRA_AM_NONE;
3260 lhs = gimple_assign_lhs (stmt);
3261 rhs = gimple_assign_rhs1 (stmt);
3263 if (TREE_CODE (rhs) == CONSTRUCTOR)
3264 return sra_modify_constructor_assign (stmt, gsi);
3266 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3267 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3268 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3270 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3271 gsi, false);
3272 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3273 gsi, true);
3274 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3277 lacc = get_access_for_expr (lhs);
3278 racc = get_access_for_expr (rhs);
3279 if (!lacc && !racc)
3280 return SRA_AM_NONE;
3282 loc = gimple_location (stmt);
3283 if (lacc && lacc->grp_to_be_replaced)
3285 lhs = get_access_replacement (lacc);
3286 gimple_assign_set_lhs (stmt, lhs);
3287 modify_this_stmt = true;
3288 if (lacc->grp_partial_lhs)
3289 force_gimple_rhs = true;
3290 sra_stats.exprs++;
3293 if (racc && racc->grp_to_be_replaced)
3295 rhs = get_access_replacement (racc);
3296 modify_this_stmt = true;
3297 if (racc->grp_partial_lhs)
3298 force_gimple_rhs = true;
3299 sra_stats.exprs++;
3301 else if (racc
3302 && !racc->grp_unscalarized_data
3303 && TREE_CODE (lhs) == SSA_NAME
3304 && !access_has_replacements_p (racc))
3306 rhs = get_repl_default_def_ssa_name (racc);
3307 modify_this_stmt = true;
3308 sra_stats.exprs++;
3311 if (modify_this_stmt)
3313 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3315 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3316 ??? This should move to fold_stmt which we simply should
3317 call after building a VIEW_CONVERT_EXPR here. */
3318 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3319 && !contains_bitfld_component_ref_p (lhs))
3321 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3322 gimple_assign_set_lhs (stmt, lhs);
3324 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3325 && !contains_vce_or_bfcref_p (rhs))
3326 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3328 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3330 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3331 rhs);
3332 if (is_gimple_reg_type (TREE_TYPE (lhs))
3333 && TREE_CODE (lhs) != SSA_NAME)
3334 force_gimple_rhs = true;
3339 if (lacc && lacc->grp_to_be_debug_replaced)
3341 tree dlhs = get_access_replacement (lacc);
3342 tree drhs = unshare_expr (rhs);
3343 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3345 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3346 && !contains_vce_or_bfcref_p (drhs))
3347 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3348 if (drhs
3349 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3350 TREE_TYPE (drhs)))
3351 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3352 TREE_TYPE (dlhs), drhs);
3354 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3355 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3358 /* From this point on, the function deals with assignments in between
3359 aggregates when at least one has scalar reductions of some of its
3360 components. There are three possible scenarios: Both the LHS and RHS have
3361 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3363 In the first case, we would like to load the LHS components from RHS
3364 components whenever possible. If that is not possible, we would like to
3365 read it directly from the RHS (after updating it by storing in it its own
3366 components). If there are some necessary unscalarized data in the LHS,
3367 those will be loaded by the original assignment too. If neither of these
3368 cases happen, the original statement can be removed. Most of this is done
3369 by load_assign_lhs_subreplacements.
3371 In the second case, we would like to store all RHS scalarized components
3372 directly into LHS and if they cover the aggregate completely, remove the
3373 statement too. In the third case, we want the LHS components to be loaded
3374 directly from the RHS (DSE will remove the original statement if it
3375 becomes redundant).
3377 This is a bit complex but manageable when types match and when unions do
3378 not cause confusion in a way that we cannot really load a component of LHS
3379 from the RHS or vice versa (the access representing this level can have
3380 subaccesses that are accessible only through a different union field at a
3381 higher level - different from the one used in the examined expression).
3382 Unions are fun.
3384 Therefore, I specially handle a fourth case, happening when there is a
3385 specific type cast or it is impossible to locate a scalarized subaccess on
3386 the other side of the expression. If that happens, I simply "refresh" the
3387 RHS by storing in it is scalarized components leave the original statement
3388 there to do the copying and then load the scalar replacements of the LHS.
3389 This is what the first branch does. */
3391 if (modify_this_stmt
3392 || gimple_has_volatile_ops (stmt)
3393 || contains_vce_or_bfcref_p (rhs)
3394 || contains_vce_or_bfcref_p (lhs)
3395 || stmt_ends_bb_p (stmt))
3397 if (access_has_children_p (racc))
3398 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3399 gsi, false, false, loc);
3400 if (access_has_children_p (lacc))
3402 gimple_stmt_iterator alt_gsi = gsi_none ();
3403 if (stmt_ends_bb_p (stmt))
3405 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3406 gsi = &alt_gsi;
3408 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3409 gsi, true, true, loc);
3411 sra_stats.separate_lhs_rhs_handling++;
3413 /* This gimplification must be done after generate_subtree_copies,
3414 lest we insert the subtree copies in the middle of the gimplified
3415 sequence. */
3416 if (force_gimple_rhs)
3417 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3418 true, GSI_SAME_STMT);
3419 if (gimple_assign_rhs1 (stmt) != rhs)
3421 modify_this_stmt = true;
3422 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3423 gcc_assert (stmt == gsi_stmt (orig_gsi));
3426 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3428 else
3430 if (access_has_children_p (lacc)
3431 && access_has_children_p (racc)
3432 /* When an access represents an unscalarizable region, it usually
3433 represents accesses with variable offset and thus must not be used
3434 to generate new memory accesses. */
3435 && !lacc->grp_unscalarizable_region
3436 && !racc->grp_unscalarizable_region)
3438 struct subreplacement_assignment_data sad;
3440 sad.left_offset = lacc->offset;
3441 sad.assignment_lhs = lhs;
3442 sad.assignment_rhs = rhs;
3443 sad.top_racc = racc;
3444 sad.old_gsi = *gsi;
3445 sad.new_gsi = gsi;
3446 sad.loc = gimple_location (stmt);
3447 sad.refreshed = SRA_UDH_NONE;
3449 if (lacc->grp_read && !lacc->grp_covered)
3450 handle_unscalarized_data_in_subtree (&sad);
3452 load_assign_lhs_subreplacements (lacc, &sad);
3453 if (sad.refreshed != SRA_UDH_RIGHT)
3455 gsi_next (gsi);
3456 unlink_stmt_vdef (stmt);
3457 gsi_remove (&sad.old_gsi, true);
3458 release_defs (stmt);
3459 sra_stats.deleted++;
3460 return SRA_AM_REMOVED;
3463 else
3465 if (access_has_children_p (racc)
3466 && !racc->grp_unscalarized_data)
3468 if (dump_file)
3470 fprintf (dump_file, "Removing load: ");
3471 print_gimple_stmt (dump_file, stmt, 0, 0);
3473 generate_subtree_copies (racc->first_child, lhs,
3474 racc->offset, 0, 0, gsi,
3475 false, false, loc);
3476 gcc_assert (stmt == gsi_stmt (*gsi));
3477 unlink_stmt_vdef (stmt);
3478 gsi_remove (gsi, true);
3479 release_defs (stmt);
3480 sra_stats.deleted++;
3481 return SRA_AM_REMOVED;
3483 /* Restore the aggregate RHS from its components so the
3484 prevailing aggregate copy does the right thing. */
3485 if (access_has_children_p (racc))
3486 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3487 gsi, false, false, loc);
3488 /* Re-load the components of the aggregate copy destination.
3489 But use the RHS aggregate to load from to expose more
3490 optimization opportunities. */
3491 if (access_has_children_p (lacc))
3492 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3493 0, 0, gsi, true, true, loc);
3496 return SRA_AM_NONE;
3500 /* Traverse the function body and all modifications as decided in
3501 analyze_all_variable_accesses. Return true iff the CFG has been
3502 changed. */
3504 static bool
3505 sra_modify_function_body (void)
3507 bool cfg_changed = false;
3508 basic_block bb;
3510 FOR_EACH_BB_FN (bb, cfun)
3512 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3513 while (!gsi_end_p (gsi))
3515 gimple *stmt = gsi_stmt (gsi);
3516 enum assignment_mod_result assign_result;
3517 bool modified = false, deleted = false;
3518 tree *t;
3519 unsigned i;
3521 switch (gimple_code (stmt))
3523 case GIMPLE_RETURN:
3524 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3525 if (*t != NULL_TREE)
3526 modified |= sra_modify_expr (t, &gsi, false);
3527 break;
3529 case GIMPLE_ASSIGN:
3530 assign_result = sra_modify_assign (stmt, &gsi);
3531 modified |= assign_result == SRA_AM_MODIFIED;
3532 deleted = assign_result == SRA_AM_REMOVED;
3533 break;
3535 case GIMPLE_CALL:
3536 /* Operands must be processed before the lhs. */
3537 for (i = 0; i < gimple_call_num_args (stmt); i++)
3539 t = gimple_call_arg_ptr (stmt, i);
3540 modified |= sra_modify_expr (t, &gsi, false);
3543 if (gimple_call_lhs (stmt))
3545 t = gimple_call_lhs_ptr (stmt);
3546 modified |= sra_modify_expr (t, &gsi, true);
3548 break;
3550 case GIMPLE_ASM:
3552 gasm *asm_stmt = as_a <gasm *> (stmt);
3553 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3555 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3556 modified |= sra_modify_expr (t, &gsi, false);
3558 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3560 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3561 modified |= sra_modify_expr (t, &gsi, true);
3564 break;
3566 default:
3567 break;
3570 if (modified)
3572 update_stmt (stmt);
3573 if (maybe_clean_eh_stmt (stmt)
3574 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3575 cfg_changed = true;
3577 if (!deleted)
3578 gsi_next (&gsi);
3582 gsi_commit_edge_inserts ();
3583 return cfg_changed;
3586 /* Generate statements initializing scalar replacements of parts of function
3587 parameters. */
3589 static void
3590 initialize_parameter_reductions (void)
3592 gimple_stmt_iterator gsi;
3593 gimple_seq seq = NULL;
3594 tree parm;
3596 gsi = gsi_start (seq);
3597 for (parm = DECL_ARGUMENTS (current_function_decl);
3598 parm;
3599 parm = DECL_CHAIN (parm))
3601 vec<access_p> *access_vec;
3602 struct access *access;
3604 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3605 continue;
3606 access_vec = get_base_access_vector (parm);
3607 if (!access_vec)
3608 continue;
3610 for (access = (*access_vec)[0];
3611 access;
3612 access = access->next_grp)
3613 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3614 EXPR_LOCATION (parm));
3617 seq = gsi_seq (gsi);
3618 if (seq)
3619 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3622 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3623 it reveals there are components of some aggregates to be scalarized, it runs
3624 the required transformations. */
3625 static unsigned int
3626 perform_intra_sra (void)
3628 int ret = 0;
3629 sra_initialize ();
3631 if (!find_var_candidates ())
3632 goto out;
3634 if (!scan_function ())
3635 goto out;
3637 if (!analyze_all_variable_accesses ())
3638 goto out;
3640 if (sra_modify_function_body ())
3641 ret = TODO_update_ssa | TODO_cleanup_cfg;
3642 else
3643 ret = TODO_update_ssa;
3644 initialize_parameter_reductions ();
3646 statistics_counter_event (cfun, "Scalar replacements created",
3647 sra_stats.replacements);
3648 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3649 statistics_counter_event (cfun, "Subtree copy stmts",
3650 sra_stats.subtree_copies);
3651 statistics_counter_event (cfun, "Subreplacement stmts",
3652 sra_stats.subreplacements);
3653 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3654 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3655 sra_stats.separate_lhs_rhs_handling);
3657 out:
3658 sra_deinitialize ();
3659 return ret;
3662 /* Perform early intraprocedural SRA. */
3663 static unsigned int
3664 early_intra_sra (void)
3666 sra_mode = SRA_MODE_EARLY_INTRA;
3667 return perform_intra_sra ();
3670 /* Perform "late" intraprocedural SRA. */
3671 static unsigned int
3672 late_intra_sra (void)
3674 sra_mode = SRA_MODE_INTRA;
3675 return perform_intra_sra ();
3679 static bool
3680 gate_intra_sra (void)
3682 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3686 namespace {
3688 const pass_data pass_data_sra_early =
3690 GIMPLE_PASS, /* type */
3691 "esra", /* name */
3692 OPTGROUP_NONE, /* optinfo_flags */
3693 TV_TREE_SRA, /* tv_id */
3694 ( PROP_cfg | PROP_ssa ), /* properties_required */
3695 0, /* properties_provided */
3696 0, /* properties_destroyed */
3697 0, /* todo_flags_start */
3698 TODO_update_ssa, /* todo_flags_finish */
3701 class pass_sra_early : public gimple_opt_pass
3703 public:
3704 pass_sra_early (gcc::context *ctxt)
3705 : gimple_opt_pass (pass_data_sra_early, ctxt)
3708 /* opt_pass methods: */
3709 virtual bool gate (function *) { return gate_intra_sra (); }
3710 virtual unsigned int execute (function *) { return early_intra_sra (); }
3712 }; // class pass_sra_early
3714 } // anon namespace
3716 gimple_opt_pass *
3717 make_pass_sra_early (gcc::context *ctxt)
3719 return new pass_sra_early (ctxt);
3722 namespace {
3724 const pass_data pass_data_sra =
3726 GIMPLE_PASS, /* type */
3727 "sra", /* name */
3728 OPTGROUP_NONE, /* optinfo_flags */
3729 TV_TREE_SRA, /* tv_id */
3730 ( PROP_cfg | PROP_ssa ), /* properties_required */
3731 0, /* properties_provided */
3732 0, /* properties_destroyed */
3733 TODO_update_address_taken, /* todo_flags_start */
3734 TODO_update_ssa, /* todo_flags_finish */
3737 class pass_sra : public gimple_opt_pass
3739 public:
3740 pass_sra (gcc::context *ctxt)
3741 : gimple_opt_pass (pass_data_sra, ctxt)
3744 /* opt_pass methods: */
3745 virtual bool gate (function *) { return gate_intra_sra (); }
3746 virtual unsigned int execute (function *) { return late_intra_sra (); }
3748 }; // class pass_sra
3750 } // anon namespace
3752 gimple_opt_pass *
3753 make_pass_sra (gcc::context *ctxt)
3755 return new pass_sra (ctxt);
3759 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3760 parameter. */
3762 static bool
3763 is_unused_scalar_param (tree parm)
3765 tree name;
3766 return (is_gimple_reg (parm)
3767 && (!(name = ssa_default_def (cfun, parm))
3768 || has_zero_uses (name)));
3771 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3772 examine whether there are any direct or otherwise infeasible ones. If so,
3773 return true, otherwise return false. PARM must be a gimple register with a
3774 non-NULL default definition. */
3776 static bool
3777 ptr_parm_has_direct_uses (tree parm)
3779 imm_use_iterator ui;
3780 gimple *stmt;
3781 tree name = ssa_default_def (cfun, parm);
3782 bool ret = false;
3784 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3786 int uses_ok = 0;
3787 use_operand_p use_p;
3789 if (is_gimple_debug (stmt))
3790 continue;
3792 /* Valid uses include dereferences on the lhs and the rhs. */
3793 if (gimple_has_lhs (stmt))
3795 tree lhs = gimple_get_lhs (stmt);
3796 while (handled_component_p (lhs))
3797 lhs = TREE_OPERAND (lhs, 0);
3798 if (TREE_CODE (lhs) == MEM_REF
3799 && TREE_OPERAND (lhs, 0) == name
3800 && integer_zerop (TREE_OPERAND (lhs, 1))
3801 && types_compatible_p (TREE_TYPE (lhs),
3802 TREE_TYPE (TREE_TYPE (name)))
3803 && !TREE_THIS_VOLATILE (lhs))
3804 uses_ok++;
3806 if (gimple_assign_single_p (stmt))
3808 tree rhs = gimple_assign_rhs1 (stmt);
3809 while (handled_component_p (rhs))
3810 rhs = TREE_OPERAND (rhs, 0);
3811 if (TREE_CODE (rhs) == MEM_REF
3812 && TREE_OPERAND (rhs, 0) == name
3813 && integer_zerop (TREE_OPERAND (rhs, 1))
3814 && types_compatible_p (TREE_TYPE (rhs),
3815 TREE_TYPE (TREE_TYPE (name)))
3816 && !TREE_THIS_VOLATILE (rhs))
3817 uses_ok++;
3819 else if (is_gimple_call (stmt))
3821 unsigned i;
3822 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3824 tree arg = gimple_call_arg (stmt, i);
3825 while (handled_component_p (arg))
3826 arg = TREE_OPERAND (arg, 0);
3827 if (TREE_CODE (arg) == MEM_REF
3828 && TREE_OPERAND (arg, 0) == name
3829 && integer_zerop (TREE_OPERAND (arg, 1))
3830 && types_compatible_p (TREE_TYPE (arg),
3831 TREE_TYPE (TREE_TYPE (name)))
3832 && !TREE_THIS_VOLATILE (arg))
3833 uses_ok++;
3837 /* If the number of valid uses does not match the number of
3838 uses in this stmt there is an unhandled use. */
3839 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3840 --uses_ok;
3842 if (uses_ok != 0)
3843 ret = true;
3845 if (ret)
3846 BREAK_FROM_IMM_USE_STMT (ui);
3849 return ret;
3852 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3853 them in candidate_bitmap. Note that these do not necessarily include
3854 parameter which are unused and thus can be removed. Return true iff any
3855 such candidate has been found. */
3857 static bool
3858 find_param_candidates (void)
3860 tree parm;
3861 int count = 0;
3862 bool ret = false;
3863 const char *msg;
3865 for (parm = DECL_ARGUMENTS (current_function_decl);
3866 parm;
3867 parm = DECL_CHAIN (parm))
3869 tree type = TREE_TYPE (parm);
3870 tree_node **slot;
3872 count++;
3874 if (TREE_THIS_VOLATILE (parm)
3875 || TREE_ADDRESSABLE (parm)
3876 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3877 continue;
3879 if (is_unused_scalar_param (parm))
3881 ret = true;
3882 continue;
3885 if (POINTER_TYPE_P (type))
3887 type = TREE_TYPE (type);
3889 if (TREE_CODE (type) == FUNCTION_TYPE
3890 || TYPE_VOLATILE (type)
3891 || (TREE_CODE (type) == ARRAY_TYPE
3892 && TYPE_NONALIASED_COMPONENT (type))
3893 || !is_gimple_reg (parm)
3894 || is_va_list_type (type)
3895 || ptr_parm_has_direct_uses (parm))
3896 continue;
3898 else if (!AGGREGATE_TYPE_P (type))
3899 continue;
3901 if (!COMPLETE_TYPE_P (type)
3902 || !tree_fits_uhwi_p (TYPE_SIZE (type))
3903 || tree_to_uhwi (TYPE_SIZE (type)) == 0
3904 || (AGGREGATE_TYPE_P (type)
3905 && type_internals_preclude_sra_p (type, &msg)))
3906 continue;
3908 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3909 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
3910 *slot = parm;
3912 ret = true;
3913 if (dump_file && (dump_flags & TDF_DETAILS))
3915 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3916 print_generic_expr (dump_file, parm, 0);
3917 fprintf (dump_file, "\n");
3921 func_param_count = count;
3922 return ret;
3925 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3926 maybe_modified. */
3928 static bool
3929 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3930 void *data)
3932 struct access *repr = (struct access *) data;
3934 repr->grp_maybe_modified = 1;
3935 return true;
3938 /* Analyze what representatives (in linked lists accessible from
3939 REPRESENTATIVES) can be modified by side effects of statements in the
3940 current function. */
3942 static void
3943 analyze_modified_params (vec<access_p> representatives)
3945 int i;
3947 for (i = 0; i < func_param_count; i++)
3949 struct access *repr;
3951 for (repr = representatives[i];
3952 repr;
3953 repr = repr->next_grp)
3955 struct access *access;
3956 bitmap visited;
3957 ao_ref ar;
3959 if (no_accesses_p (repr))
3960 continue;
3961 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3962 || repr->grp_maybe_modified)
3963 continue;
3965 ao_ref_init (&ar, repr->expr);
3966 visited = BITMAP_ALLOC (NULL);
3967 for (access = repr; access; access = access->next_sibling)
3969 /* All accesses are read ones, otherwise grp_maybe_modified would
3970 be trivially set. */
3971 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3972 mark_maybe_modified, repr, &visited);
3973 if (repr->grp_maybe_modified)
3974 break;
3976 BITMAP_FREE (visited);
3981 /* Propagate distances in bb_dereferences in the opposite direction than the
3982 control flow edges, in each step storing the maximum of the current value
3983 and the minimum of all successors. These steps are repeated until the table
3984 stabilizes. Note that BBs which might terminate the functions (according to
3985 final_bbs bitmap) never updated in this way. */
3987 static void
3988 propagate_dereference_distances (void)
3990 basic_block bb;
3992 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
3993 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3994 FOR_EACH_BB_FN (bb, cfun)
3996 queue.quick_push (bb);
3997 bb->aux = bb;
4000 while (!queue.is_empty ())
4002 edge_iterator ei;
4003 edge e;
4004 bool change = false;
4005 int i;
4007 bb = queue.pop ();
4008 bb->aux = NULL;
4010 if (bitmap_bit_p (final_bbs, bb->index))
4011 continue;
4013 for (i = 0; i < func_param_count; i++)
4015 int idx = bb->index * func_param_count + i;
4016 bool first = true;
4017 HOST_WIDE_INT inh = 0;
4019 FOR_EACH_EDGE (e, ei, bb->succs)
4021 int succ_idx = e->dest->index * func_param_count + i;
4023 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4024 continue;
4026 if (first)
4028 first = false;
4029 inh = bb_dereferences [succ_idx];
4031 else if (bb_dereferences [succ_idx] < inh)
4032 inh = bb_dereferences [succ_idx];
4035 if (!first && bb_dereferences[idx] < inh)
4037 bb_dereferences[idx] = inh;
4038 change = true;
4042 if (change && !bitmap_bit_p (final_bbs, bb->index))
4043 FOR_EACH_EDGE (e, ei, bb->preds)
4045 if (e->src->aux)
4046 continue;
4048 e->src->aux = e->src;
4049 queue.quick_push (e->src);
4054 /* Dump a dereferences TABLE with heading STR to file F. */
4056 static void
4057 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4059 basic_block bb;
4061 fprintf (dump_file, "%s", str);
4062 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4063 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4065 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4066 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4068 int i;
4069 for (i = 0; i < func_param_count; i++)
4071 int idx = bb->index * func_param_count + i;
4072 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4075 fprintf (f, "\n");
4077 fprintf (dump_file, "\n");
4080 /* Determine what (parts of) parameters passed by reference that are not
4081 assigned to are not certainly dereferenced in this function and thus the
4082 dereferencing cannot be safely moved to the caller without potentially
4083 introducing a segfault. Mark such REPRESENTATIVES as
4084 grp_not_necessarilly_dereferenced.
4086 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4087 part is calculated rather than simple booleans are calculated for each
4088 pointer parameter to handle cases when only a fraction of the whole
4089 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4090 an example).
4092 The maximum dereference distances for each pointer parameter and BB are
4093 already stored in bb_dereference. This routine simply propagates these
4094 values upwards by propagate_dereference_distances and then compares the
4095 distances of individual parameters in the ENTRY BB to the equivalent
4096 distances of each representative of a (fraction of a) parameter. */
4098 static void
4099 analyze_caller_dereference_legality (vec<access_p> representatives)
4101 int i;
4103 if (dump_file && (dump_flags & TDF_DETAILS))
4104 dump_dereferences_table (dump_file,
4105 "Dereference table before propagation:\n",
4106 bb_dereferences);
4108 propagate_dereference_distances ();
4110 if (dump_file && (dump_flags & TDF_DETAILS))
4111 dump_dereferences_table (dump_file,
4112 "Dereference table after propagation:\n",
4113 bb_dereferences);
4115 for (i = 0; i < func_param_count; i++)
4117 struct access *repr = representatives[i];
4118 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4120 if (!repr || no_accesses_p (repr))
4121 continue;
4125 if ((repr->offset + repr->size) > bb_dereferences[idx])
4126 repr->grp_not_necessarilly_dereferenced = 1;
4127 repr = repr->next_grp;
4129 while (repr);
4133 /* Return the representative access for the parameter declaration PARM if it is
4134 a scalar passed by reference which is not written to and the pointer value
4135 is not used directly. Thus, if it is legal to dereference it in the caller
4136 and we can rule out modifications through aliases, such parameter should be
4137 turned into one passed by value. Return NULL otherwise. */
4139 static struct access *
4140 unmodified_by_ref_scalar_representative (tree parm)
4142 int i, access_count;
4143 struct access *repr;
4144 vec<access_p> *access_vec;
4146 access_vec = get_base_access_vector (parm);
4147 gcc_assert (access_vec);
4148 repr = (*access_vec)[0];
4149 if (repr->write)
4150 return NULL;
4151 repr->group_representative = repr;
4153 access_count = access_vec->length ();
4154 for (i = 1; i < access_count; i++)
4156 struct access *access = (*access_vec)[i];
4157 if (access->write)
4158 return NULL;
4159 access->group_representative = repr;
4160 access->next_sibling = repr->next_sibling;
4161 repr->next_sibling = access;
4164 repr->grp_read = 1;
4165 repr->grp_scalar_ptr = 1;
4166 return repr;
4169 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4170 associated with. REQ_ALIGN is the minimum required alignment. */
4172 static bool
4173 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4175 unsigned int exp_align;
4176 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4177 is incompatible assign in a call statement (and possibly even in asm
4178 statements). This can be relaxed by using a new temporary but only for
4179 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4180 intraprocedural SRA we deal with this by keeping the old aggregate around,
4181 something we cannot do in IPA-SRA.) */
4182 if (access->write
4183 && (is_gimple_call (access->stmt)
4184 || gimple_code (access->stmt) == GIMPLE_ASM))
4185 return true;
4187 exp_align = get_object_alignment (access->expr);
4188 if (exp_align < req_align)
4189 return true;
4191 return false;
4195 /* Sort collected accesses for parameter PARM, identify representatives for
4196 each accessed region and link them together. Return NULL if there are
4197 different but overlapping accesses, return the special ptr value meaning
4198 there are no accesses for this parameter if that is the case and return the
4199 first representative otherwise. Set *RO_GRP if there is a group of accesses
4200 with only read (i.e. no write) accesses. */
4202 static struct access *
4203 splice_param_accesses (tree parm, bool *ro_grp)
4205 int i, j, access_count, group_count;
4206 int agg_size, total_size = 0;
4207 struct access *access, *res, **prev_acc_ptr = &res;
4208 vec<access_p> *access_vec;
4210 access_vec = get_base_access_vector (parm);
4211 if (!access_vec)
4212 return &no_accesses_representant;
4213 access_count = access_vec->length ();
4215 access_vec->qsort (compare_access_positions);
4217 i = 0;
4218 total_size = 0;
4219 group_count = 0;
4220 while (i < access_count)
4222 bool modification;
4223 tree a1_alias_type;
4224 access = (*access_vec)[i];
4225 modification = access->write;
4226 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4227 return NULL;
4228 a1_alias_type = reference_alias_ptr_type (access->expr);
4230 /* Access is about to become group representative unless we find some
4231 nasty overlap which would preclude us from breaking this parameter
4232 apart. */
4234 j = i + 1;
4235 while (j < access_count)
4237 struct access *ac2 = (*access_vec)[j];
4238 if (ac2->offset != access->offset)
4240 /* All or nothing law for parameters. */
4241 if (access->offset + access->size > ac2->offset)
4242 return NULL;
4243 else
4244 break;
4246 else if (ac2->size != access->size)
4247 return NULL;
4249 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4250 || (ac2->type != access->type
4251 && (TREE_ADDRESSABLE (ac2->type)
4252 || TREE_ADDRESSABLE (access->type)))
4253 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4254 return NULL;
4256 modification |= ac2->write;
4257 ac2->group_representative = access;
4258 ac2->next_sibling = access->next_sibling;
4259 access->next_sibling = ac2;
4260 j++;
4263 group_count++;
4264 access->grp_maybe_modified = modification;
4265 if (!modification)
4266 *ro_grp = true;
4267 *prev_acc_ptr = access;
4268 prev_acc_ptr = &access->next_grp;
4269 total_size += access->size;
4270 i = j;
4273 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4274 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4275 else
4276 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4277 if (total_size >= agg_size)
4278 return NULL;
4280 gcc_assert (group_count > 0);
4281 return res;
4284 /* Decide whether parameters with representative accesses given by REPR should
4285 be reduced into components. */
4287 static int
4288 decide_one_param_reduction (struct access *repr)
4290 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4291 bool by_ref;
4292 tree parm;
4294 parm = repr->base;
4295 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4296 gcc_assert (cur_parm_size > 0);
4298 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4300 by_ref = true;
4301 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4303 else
4305 by_ref = false;
4306 agg_size = cur_parm_size;
4309 if (dump_file)
4311 struct access *acc;
4312 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4313 print_generic_expr (dump_file, parm, 0);
4314 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4315 for (acc = repr; acc; acc = acc->next_grp)
4316 dump_access (dump_file, acc, true);
4319 total_size = 0;
4320 new_param_count = 0;
4322 for (; repr; repr = repr->next_grp)
4324 gcc_assert (parm == repr->base);
4326 /* Taking the address of a non-addressable field is verboten. */
4327 if (by_ref && repr->non_addressable)
4328 return 0;
4330 /* Do not decompose a non-BLKmode param in a way that would
4331 create BLKmode params. Especially for by-reference passing
4332 (thus, pointer-type param) this is hardly worthwhile. */
4333 if (DECL_MODE (parm) != BLKmode
4334 && TYPE_MODE (repr->type) == BLKmode)
4335 return 0;
4337 if (!by_ref || (!repr->grp_maybe_modified
4338 && !repr->grp_not_necessarilly_dereferenced))
4339 total_size += repr->size;
4340 else
4341 total_size += cur_parm_size;
4343 new_param_count++;
4346 gcc_assert (new_param_count > 0);
4348 if (optimize_function_for_size_p (cfun))
4349 parm_size_limit = cur_parm_size;
4350 else
4351 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4352 * cur_parm_size);
4354 if (total_size < agg_size
4355 && total_size <= parm_size_limit)
4357 if (dump_file)
4358 fprintf (dump_file, " ....will be split into %i components\n",
4359 new_param_count);
4360 return new_param_count;
4362 else
4363 return 0;
4366 /* The order of the following enums is important, we need to do extra work for
4367 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4368 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4369 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4371 /* Identify representatives of all accesses to all candidate parameters for
4372 IPA-SRA. Return result based on what representatives have been found. */
4374 static enum ipa_splicing_result
4375 splice_all_param_accesses (vec<access_p> &representatives)
4377 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4378 tree parm;
4379 struct access *repr;
4381 representatives.create (func_param_count);
4383 for (parm = DECL_ARGUMENTS (current_function_decl);
4384 parm;
4385 parm = DECL_CHAIN (parm))
4387 if (is_unused_scalar_param (parm))
4389 representatives.quick_push (&no_accesses_representant);
4390 if (result == NO_GOOD_ACCESS)
4391 result = UNUSED_PARAMS;
4393 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4394 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4395 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4397 repr = unmodified_by_ref_scalar_representative (parm);
4398 representatives.quick_push (repr);
4399 if (repr)
4400 result = UNMODIF_BY_REF_ACCESSES;
4402 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4404 bool ro_grp = false;
4405 repr = splice_param_accesses (parm, &ro_grp);
4406 representatives.quick_push (repr);
4408 if (repr && !no_accesses_p (repr))
4410 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4412 if (ro_grp)
4413 result = UNMODIF_BY_REF_ACCESSES;
4414 else if (result < MODIF_BY_REF_ACCESSES)
4415 result = MODIF_BY_REF_ACCESSES;
4417 else if (result < BY_VAL_ACCESSES)
4418 result = BY_VAL_ACCESSES;
4420 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4421 result = UNUSED_PARAMS;
4423 else
4424 representatives.quick_push (NULL);
4427 if (result == NO_GOOD_ACCESS)
4429 representatives.release ();
4430 return NO_GOOD_ACCESS;
4433 return result;
4436 /* Return the index of BASE in PARMS. Abort if it is not found. */
4438 static inline int
4439 get_param_index (tree base, vec<tree> parms)
4441 int i, len;
4443 len = parms.length ();
4444 for (i = 0; i < len; i++)
4445 if (parms[i] == base)
4446 return i;
4447 gcc_unreachable ();
4450 /* Convert the decisions made at the representative level into compact
4451 parameter adjustments. REPRESENTATIVES are pointers to first
4452 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4453 final number of adjustments. */
4455 static ipa_parm_adjustment_vec
4456 turn_representatives_into_adjustments (vec<access_p> representatives,
4457 int adjustments_count)
4459 vec<tree> parms;
4460 ipa_parm_adjustment_vec adjustments;
4461 tree parm;
4462 int i;
4464 gcc_assert (adjustments_count > 0);
4465 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4466 adjustments.create (adjustments_count);
4467 parm = DECL_ARGUMENTS (current_function_decl);
4468 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4470 struct access *repr = representatives[i];
4472 if (!repr || no_accesses_p (repr))
4474 struct ipa_parm_adjustment adj;
4476 memset (&adj, 0, sizeof (adj));
4477 adj.base_index = get_param_index (parm, parms);
4478 adj.base = parm;
4479 if (!repr)
4480 adj.op = IPA_PARM_OP_COPY;
4481 else
4482 adj.op = IPA_PARM_OP_REMOVE;
4483 adj.arg_prefix = "ISRA";
4484 adjustments.quick_push (adj);
4486 else
4488 struct ipa_parm_adjustment adj;
4489 int index = get_param_index (parm, parms);
4491 for (; repr; repr = repr->next_grp)
4493 memset (&adj, 0, sizeof (adj));
4494 gcc_assert (repr->base == parm);
4495 adj.base_index = index;
4496 adj.base = repr->base;
4497 adj.type = repr->type;
4498 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4499 adj.offset = repr->offset;
4500 adj.reverse = repr->reverse;
4501 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4502 && (repr->grp_maybe_modified
4503 || repr->grp_not_necessarilly_dereferenced));
4504 adj.arg_prefix = "ISRA";
4505 adjustments.quick_push (adj);
4509 parms.release ();
4510 return adjustments;
4513 /* Analyze the collected accesses and produce a plan what to do with the
4514 parameters in the form of adjustments, NULL meaning nothing. */
4516 static ipa_parm_adjustment_vec
4517 analyze_all_param_acesses (void)
4519 enum ipa_splicing_result repr_state;
4520 bool proceed = false;
4521 int i, adjustments_count = 0;
4522 vec<access_p> representatives;
4523 ipa_parm_adjustment_vec adjustments;
4525 repr_state = splice_all_param_accesses (representatives);
4526 if (repr_state == NO_GOOD_ACCESS)
4527 return ipa_parm_adjustment_vec ();
4529 /* If there are any parameters passed by reference which are not modified
4530 directly, we need to check whether they can be modified indirectly. */
4531 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4533 analyze_caller_dereference_legality (representatives);
4534 analyze_modified_params (representatives);
4537 for (i = 0; i < func_param_count; i++)
4539 struct access *repr = representatives[i];
4541 if (repr && !no_accesses_p (repr))
4543 if (repr->grp_scalar_ptr)
4545 adjustments_count++;
4546 if (repr->grp_not_necessarilly_dereferenced
4547 || repr->grp_maybe_modified)
4548 representatives[i] = NULL;
4549 else
4551 proceed = true;
4552 sra_stats.scalar_by_ref_to_by_val++;
4555 else
4557 int new_components = decide_one_param_reduction (repr);
4559 if (new_components == 0)
4561 representatives[i] = NULL;
4562 adjustments_count++;
4564 else
4566 adjustments_count += new_components;
4567 sra_stats.aggregate_params_reduced++;
4568 sra_stats.param_reductions_created += new_components;
4569 proceed = true;
4573 else
4575 if (no_accesses_p (repr))
4577 proceed = true;
4578 sra_stats.deleted_unused_parameters++;
4580 adjustments_count++;
4584 if (!proceed && dump_file)
4585 fprintf (dump_file, "NOT proceeding to change params.\n");
4587 if (proceed)
4588 adjustments = turn_representatives_into_adjustments (representatives,
4589 adjustments_count);
4590 else
4591 adjustments = ipa_parm_adjustment_vec ();
4593 representatives.release ();
4594 return adjustments;
4597 /* If a parameter replacement identified by ADJ does not yet exist in the form
4598 of declaration, create it and record it, otherwise return the previously
4599 created one. */
4601 static tree
4602 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4604 tree repl;
4605 if (!adj->new_ssa_base)
4607 char *pretty_name = make_fancy_name (adj->base);
4609 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4610 DECL_NAME (repl) = get_identifier (pretty_name);
4611 obstack_free (&name_obstack, pretty_name);
4613 adj->new_ssa_base = repl;
4615 else
4616 repl = adj->new_ssa_base;
4617 return repl;
4620 /* Find the first adjustment for a particular parameter BASE in a vector of
4621 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4622 adjustment. */
4624 static struct ipa_parm_adjustment *
4625 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4627 int i, len;
4629 len = adjustments.length ();
4630 for (i = 0; i < len; i++)
4632 struct ipa_parm_adjustment *adj;
4634 adj = &adjustments[i];
4635 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4636 return adj;
4639 return NULL;
4642 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4643 parameter which is to be removed because its value is not used, create a new
4644 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4645 original with it and return it. If there is no need to re-map, return NULL.
4646 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4648 static tree
4649 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4650 ipa_parm_adjustment_vec adjustments)
4652 struct ipa_parm_adjustment *adj;
4653 tree decl, repl, new_name;
4655 if (TREE_CODE (old_name) != SSA_NAME)
4656 return NULL;
4658 decl = SSA_NAME_VAR (old_name);
4659 if (decl == NULL_TREE
4660 || TREE_CODE (decl) != PARM_DECL)
4661 return NULL;
4663 adj = get_adjustment_for_base (adjustments, decl);
4664 if (!adj)
4665 return NULL;
4667 repl = get_replaced_param_substitute (adj);
4668 new_name = make_ssa_name (repl, stmt);
4670 if (dump_file)
4672 fprintf (dump_file, "replacing an SSA name of a removed param ");
4673 print_generic_expr (dump_file, old_name, 0);
4674 fprintf (dump_file, " with ");
4675 print_generic_expr (dump_file, new_name, 0);
4676 fprintf (dump_file, "\n");
4679 replace_uses_by (old_name, new_name);
4680 return new_name;
4683 /* If the statement STMT contains any expressions that need to replaced with a
4684 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4685 incompatibilities (GSI is used to accommodate conversion statements and must
4686 point to the statement). Return true iff the statement was modified. */
4688 static bool
4689 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4690 ipa_parm_adjustment_vec adjustments)
4692 tree *lhs_p, *rhs_p;
4693 bool any;
4695 if (!gimple_assign_single_p (stmt))
4696 return false;
4698 rhs_p = gimple_assign_rhs1_ptr (stmt);
4699 lhs_p = gimple_assign_lhs_ptr (stmt);
4701 any = ipa_modify_expr (rhs_p, false, adjustments);
4702 any |= ipa_modify_expr (lhs_p, false, adjustments);
4703 if (any)
4705 tree new_rhs = NULL_TREE;
4707 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4709 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4711 /* V_C_Es of constructors can cause trouble (PR 42714). */
4712 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4713 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4714 else
4715 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4716 NULL);
4718 else
4719 new_rhs = fold_build1_loc (gimple_location (stmt),
4720 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4721 *rhs_p);
4723 else if (REFERENCE_CLASS_P (*rhs_p)
4724 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4725 && !is_gimple_reg (*lhs_p))
4726 /* This can happen when an assignment in between two single field
4727 structures is turned into an assignment in between two pointers to
4728 scalars (PR 42237). */
4729 new_rhs = *rhs_p;
4731 if (new_rhs)
4733 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4734 true, GSI_SAME_STMT);
4736 gimple_assign_set_rhs_from_tree (gsi, tmp);
4739 return true;
4742 return false;
4745 /* Traverse the function body and all modifications as described in
4746 ADJUSTMENTS. Return true iff the CFG has been changed. */
4748 bool
4749 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4751 bool cfg_changed = false;
4752 basic_block bb;
4754 FOR_EACH_BB_FN (bb, cfun)
4756 gimple_stmt_iterator gsi;
4758 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4760 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
4761 tree new_lhs, old_lhs = gimple_phi_result (phi);
4762 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
4763 if (new_lhs)
4765 gimple_phi_set_result (phi, new_lhs);
4766 release_ssa_name (old_lhs);
4770 gsi = gsi_start_bb (bb);
4771 while (!gsi_end_p (gsi))
4773 gimple *stmt = gsi_stmt (gsi);
4774 bool modified = false;
4775 tree *t;
4776 unsigned i;
4778 switch (gimple_code (stmt))
4780 case GIMPLE_RETURN:
4781 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4782 if (*t != NULL_TREE)
4783 modified |= ipa_modify_expr (t, true, adjustments);
4784 break;
4786 case GIMPLE_ASSIGN:
4787 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4788 break;
4790 case GIMPLE_CALL:
4791 /* Operands must be processed before the lhs. */
4792 for (i = 0; i < gimple_call_num_args (stmt); i++)
4794 t = gimple_call_arg_ptr (stmt, i);
4795 modified |= ipa_modify_expr (t, true, adjustments);
4798 if (gimple_call_lhs (stmt))
4800 t = gimple_call_lhs_ptr (stmt);
4801 modified |= ipa_modify_expr (t, false, adjustments);
4803 break;
4805 case GIMPLE_ASM:
4807 gasm *asm_stmt = as_a <gasm *> (stmt);
4808 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4810 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4811 modified |= ipa_modify_expr (t, true, adjustments);
4813 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4815 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4816 modified |= ipa_modify_expr (t, false, adjustments);
4819 break;
4821 default:
4822 break;
4825 def_operand_p defp;
4826 ssa_op_iter iter;
4827 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4829 tree old_def = DEF_FROM_PTR (defp);
4830 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
4831 adjustments))
4833 SET_DEF (defp, new_def);
4834 release_ssa_name (old_def);
4835 modified = true;
4839 if (modified)
4841 update_stmt (stmt);
4842 if (maybe_clean_eh_stmt (stmt)
4843 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4844 cfg_changed = true;
4846 gsi_next (&gsi);
4850 return cfg_changed;
4853 /* Call gimple_debug_bind_reset_value on all debug statements describing
4854 gimple register parameters that are being removed or replaced. */
4856 static void
4857 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4859 int i, len;
4860 gimple_stmt_iterator *gsip = NULL, gsi;
4862 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4864 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4865 gsip = &gsi;
4867 len = adjustments.length ();
4868 for (i = 0; i < len; i++)
4870 struct ipa_parm_adjustment *adj;
4871 imm_use_iterator ui;
4872 gimple *stmt;
4873 gdebug *def_temp;
4874 tree name, vexpr, copy = NULL_TREE;
4875 use_operand_p use_p;
4877 adj = &adjustments[i];
4878 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4879 continue;
4880 name = ssa_default_def (cfun, adj->base);
4881 vexpr = NULL;
4882 if (name)
4883 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4885 if (gimple_clobber_p (stmt))
4887 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4888 unlink_stmt_vdef (stmt);
4889 gsi_remove (&cgsi, true);
4890 release_defs (stmt);
4891 continue;
4893 /* All other users must have been removed by
4894 ipa_sra_modify_function_body. */
4895 gcc_assert (is_gimple_debug (stmt));
4896 if (vexpr == NULL && gsip != NULL)
4898 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4899 vexpr = make_node (DEBUG_EXPR_DECL);
4900 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4901 NULL);
4902 DECL_ARTIFICIAL (vexpr) = 1;
4903 TREE_TYPE (vexpr) = TREE_TYPE (name);
4904 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4905 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4907 if (vexpr)
4909 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4910 SET_USE (use_p, vexpr);
4912 else
4913 gimple_debug_bind_reset_value (stmt);
4914 update_stmt (stmt);
4916 /* Create a VAR_DECL for debug info purposes. */
4917 if (!DECL_IGNORED_P (adj->base))
4919 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4920 VAR_DECL, DECL_NAME (adj->base),
4921 TREE_TYPE (adj->base));
4922 if (DECL_PT_UID_SET_P (adj->base))
4923 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4924 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4925 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4926 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4927 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4928 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4929 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4930 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4931 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4932 SET_DECL_RTL (copy, 0);
4933 TREE_USED (copy) = 1;
4934 DECL_CONTEXT (copy) = current_function_decl;
4935 add_local_decl (cfun, copy);
4936 DECL_CHAIN (copy) =
4937 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4938 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4940 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4942 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4943 if (vexpr)
4944 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4945 else
4946 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4947 NULL);
4948 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4953 /* Return false if all callers have at least as many actual arguments as there
4954 are formal parameters in the current function and that their types
4955 match. */
4957 static bool
4958 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
4959 void *data ATTRIBUTE_UNUSED)
4961 struct cgraph_edge *cs;
4962 for (cs = node->callers; cs; cs = cs->next_caller)
4963 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
4964 return true;
4966 return false;
4969 /* Return false if all callers have vuse attached to a call statement. */
4971 static bool
4972 some_callers_have_no_vuse_p (struct cgraph_node *node,
4973 void *data ATTRIBUTE_UNUSED)
4975 struct cgraph_edge *cs;
4976 for (cs = node->callers; cs; cs = cs->next_caller)
4977 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
4978 return true;
4980 return false;
4983 /* Convert all callers of NODE. */
4985 static bool
4986 convert_callers_for_node (struct cgraph_node *node,
4987 void *data)
4989 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4990 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4991 struct cgraph_edge *cs;
4993 for (cs = node->callers; cs; cs = cs->next_caller)
4995 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4997 if (dump_file)
4998 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
4999 xstrdup (cs->caller->name ()),
5000 cs->caller->order,
5001 xstrdup (cs->callee->name ()),
5002 cs->callee->order);
5004 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5006 pop_cfun ();
5009 for (cs = node->callers; cs; cs = cs->next_caller)
5010 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
5011 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5012 compute_inline_parameters (cs->caller, true);
5013 BITMAP_FREE (recomputed_callers);
5015 return true;
5018 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5020 static void
5021 convert_callers (struct cgraph_node *node, tree old_decl,
5022 ipa_parm_adjustment_vec adjustments)
5024 basic_block this_block;
5026 node->call_for_symbol_and_aliases (convert_callers_for_node,
5027 &adjustments, false);
5029 if (!encountered_recursive_call)
5030 return;
5032 FOR_EACH_BB_FN (this_block, cfun)
5034 gimple_stmt_iterator gsi;
5036 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5038 gcall *stmt;
5039 tree call_fndecl;
5040 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5041 if (!stmt)
5042 continue;
5043 call_fndecl = gimple_call_fndecl (stmt);
5044 if (call_fndecl == old_decl)
5046 if (dump_file)
5047 fprintf (dump_file, "Adjusting recursive call");
5048 gimple_call_set_fndecl (stmt, node->decl);
5049 ipa_modify_call_arguments (NULL, stmt, adjustments);
5054 return;
5057 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5058 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5060 static bool
5061 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5063 struct cgraph_node *new_node;
5064 bool cfg_changed;
5066 cgraph_edge::rebuild_edges ();
5067 free_dominance_info (CDI_DOMINATORS);
5068 pop_cfun ();
5070 /* This must be done after rebuilding cgraph edges for node above.
5071 Otherwise any recursive calls to node that are recorded in
5072 redirect_callers will be corrupted. */
5073 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5074 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5075 NULL, false, NULL, NULL,
5076 "isra");
5077 redirect_callers.release ();
5079 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5080 ipa_modify_formal_parameters (current_function_decl, adjustments);
5081 cfg_changed = ipa_sra_modify_function_body (adjustments);
5082 sra_ipa_reset_debug_stmts (adjustments);
5083 convert_callers (new_node, node->decl, adjustments);
5084 new_node->make_local ();
5085 return cfg_changed;
5088 /* Means of communication between ipa_sra_check_caller and
5089 ipa_sra_preliminary_function_checks. */
5091 struct ipa_sra_check_caller_data
5093 bool has_callers;
5094 bool bad_arg_alignment;
5095 bool has_thunk;
5098 /* If NODE has a caller, mark that fact in DATA which is pointer to
5099 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5100 calls if they are unit aligned and if not, set the appropriate flag in DATA
5101 too. */
5103 static bool
5104 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5106 if (!node->callers)
5107 return false;
5109 struct ipa_sra_check_caller_data *iscc;
5110 iscc = (struct ipa_sra_check_caller_data *) data;
5111 iscc->has_callers = true;
5113 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5115 if (cs->caller->thunk.thunk_p)
5117 iscc->has_thunk = true;
5118 return true;
5120 gimple *call_stmt = cs->call_stmt;
5121 unsigned count = gimple_call_num_args (call_stmt);
5122 for (unsigned i = 0; i < count; i++)
5124 tree arg = gimple_call_arg (call_stmt, i);
5125 if (is_gimple_reg (arg))
5126 continue;
5128 tree offset;
5129 HOST_WIDE_INT bitsize, bitpos;
5130 machine_mode mode;
5131 int unsignedp, reversep, volatilep = 0;
5132 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5133 &unsignedp, &reversep, &volatilep, false);
5134 if (bitpos % BITS_PER_UNIT)
5136 iscc->bad_arg_alignment = true;
5137 return true;
5142 return false;
5145 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5146 attributes, return true otherwise. NODE is the cgraph node of the current
5147 function. */
5149 static bool
5150 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5152 if (!node->can_be_local_p ())
5154 if (dump_file)
5155 fprintf (dump_file, "Function not local to this compilation unit.\n");
5156 return false;
5159 if (!node->local.can_change_signature)
5161 if (dump_file)
5162 fprintf (dump_file, "Function can not change signature.\n");
5163 return false;
5166 if (!tree_versionable_function_p (node->decl))
5168 if (dump_file)
5169 fprintf (dump_file, "Function is not versionable.\n");
5170 return false;
5173 if (!opt_for_fn (node->decl, optimize)
5174 || !opt_for_fn (node->decl, flag_ipa_sra))
5176 if (dump_file)
5177 fprintf (dump_file, "Function not optimized.\n");
5178 return false;
5181 if (DECL_VIRTUAL_P (current_function_decl))
5183 if (dump_file)
5184 fprintf (dump_file, "Function is a virtual method.\n");
5185 return false;
5188 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5189 && inline_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5191 if (dump_file)
5192 fprintf (dump_file, "Function too big to be made truly local.\n");
5193 return false;
5196 if (cfun->stdarg)
5198 if (dump_file)
5199 fprintf (dump_file, "Function uses stdarg. \n");
5200 return false;
5203 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5204 return false;
5206 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5208 if (dump_file)
5209 fprintf (dump_file, "Always inline function will be inlined "
5210 "anyway. \n");
5211 return false;
5214 struct ipa_sra_check_caller_data iscc;
5215 memset (&iscc, 0, sizeof(iscc));
5216 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5217 if (!iscc.has_callers)
5219 if (dump_file)
5220 fprintf (dump_file,
5221 "Function has no callers in this compilation unit.\n");
5222 return false;
5225 if (iscc.bad_arg_alignment)
5227 if (dump_file)
5228 fprintf (dump_file,
5229 "A function call has an argument with non-unit alignment.\n");
5230 return false;
5233 if (iscc.has_thunk)
5235 if (dump_file)
5236 fprintf (dump_file,
5237 "A has thunk.\n");
5238 return false;
5241 return true;
5244 /* Perform early interprocedural SRA. */
5246 static unsigned int
5247 ipa_early_sra (void)
5249 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5250 ipa_parm_adjustment_vec adjustments;
5251 int ret = 0;
5253 if (!ipa_sra_preliminary_function_checks (node))
5254 return 0;
5256 sra_initialize ();
5257 sra_mode = SRA_MODE_EARLY_IPA;
5259 if (!find_param_candidates ())
5261 if (dump_file)
5262 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5263 goto simple_out;
5266 if (node->call_for_symbol_and_aliases
5267 (some_callers_have_mismatched_arguments_p, NULL, true))
5269 if (dump_file)
5270 fprintf (dump_file, "There are callers with insufficient number of "
5271 "arguments or arguments with type mismatches.\n");
5272 goto simple_out;
5275 if (node->call_for_symbol_and_aliases
5276 (some_callers_have_no_vuse_p, NULL, true))
5278 if (dump_file)
5279 fprintf (dump_file, "There are callers with no VUSE attached "
5280 "to a call stmt.\n");
5281 goto simple_out;
5284 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5285 func_param_count
5286 * last_basic_block_for_fn (cfun));
5287 final_bbs = BITMAP_ALLOC (NULL);
5289 scan_function ();
5290 if (encountered_apply_args)
5292 if (dump_file)
5293 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5294 goto out;
5297 if (encountered_unchangable_recursive_call)
5299 if (dump_file)
5300 fprintf (dump_file, "Function calls itself with insufficient "
5301 "number of arguments.\n");
5302 goto out;
5305 adjustments = analyze_all_param_acesses ();
5306 if (!adjustments.exists ())
5307 goto out;
5308 if (dump_file)
5309 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5311 if (modify_function (node, adjustments))
5312 ret = TODO_update_ssa | TODO_cleanup_cfg;
5313 else
5314 ret = TODO_update_ssa;
5315 adjustments.release ();
5317 statistics_counter_event (cfun, "Unused parameters deleted",
5318 sra_stats.deleted_unused_parameters);
5319 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5320 sra_stats.scalar_by_ref_to_by_val);
5321 statistics_counter_event (cfun, "Aggregate parameters broken up",
5322 sra_stats.aggregate_params_reduced);
5323 statistics_counter_event (cfun, "Aggregate parameter components created",
5324 sra_stats.param_reductions_created);
5326 out:
5327 BITMAP_FREE (final_bbs);
5328 free (bb_dereferences);
5329 simple_out:
5330 sra_deinitialize ();
5331 return ret;
5334 namespace {
5336 const pass_data pass_data_early_ipa_sra =
5338 GIMPLE_PASS, /* type */
5339 "eipa_sra", /* name */
5340 OPTGROUP_NONE, /* optinfo_flags */
5341 TV_IPA_SRA, /* tv_id */
5342 0, /* properties_required */
5343 0, /* properties_provided */
5344 0, /* properties_destroyed */
5345 0, /* todo_flags_start */
5346 TODO_dump_symtab, /* todo_flags_finish */
5349 class pass_early_ipa_sra : public gimple_opt_pass
5351 public:
5352 pass_early_ipa_sra (gcc::context *ctxt)
5353 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5356 /* opt_pass methods: */
5357 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5358 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5360 }; // class pass_early_ipa_sra
5362 } // anon namespace
5364 gimple_opt_pass *
5365 make_pass_early_ipa_sra (gcc::context *ctxt)
5367 return new pass_early_ipa_sra (ctxt);