[Patch AArch64 1/3] Enable CRC by default for armv8.1-a
[official-gcc.git] / gcc / tree-sra.c
blob738c1a77bda5894e92e37259eb04b28f2f8bfa37
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2016 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-prop.h"
101 #include "params.h"
102 #include "dbgcnt.h"
103 #include "tree-inline.h"
104 #include "ipa-inline.h"
105 #include "ipa-utils.h"
106 #include "builtins.h"
108 /* Enumeration of all aggregate reductions we can do. */
109 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
110 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
111 SRA_MODE_INTRA }; /* late intraprocedural SRA */
113 /* Global variable describing which aggregate reduction we are performing at
114 the moment. */
115 static enum sra_mode sra_mode;
117 struct assign_link;
119 /* ACCESS represents each access to an aggregate variable (as a whole or a
120 part). It can also represent a group of accesses that refer to exactly the
121 same fragment of an aggregate (i.e. those that have exactly the same offset
122 and size). Such representatives for a single aggregate, once determined,
123 are linked in a linked list and have the group fields set.
125 Moreover, when doing intraprocedural SRA, a tree is built from those
126 representatives (by the means of first_child and next_sibling pointers), in
127 which all items in a subtree are "within" the root, i.e. their offset is
128 greater or equal to offset of the root and offset+size is smaller or equal
129 to offset+size of the root. Children of an access are sorted by offset.
131 Note that accesses to parts of vector and complex number types always
132 represented by an access to the whole complex number or a vector. It is a
133 duty of the modifying functions to replace them appropriately. */
135 struct access
137 /* Values returned by `get_ref_base_and_extent' for each component reference
138 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
139 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
140 HOST_WIDE_INT offset;
141 HOST_WIDE_INT size;
142 tree base;
144 /* Expression. It is context dependent so do not use it to create new
145 expressions to access the original aggregate. See PR 42154 for a
146 testcase. */
147 tree expr;
148 /* Type. */
149 tree type;
151 /* The statement this access belongs to. */
152 gimple *stmt;
154 /* Next group representative for this aggregate. */
155 struct access *next_grp;
157 /* Pointer to the group representative. Pointer to itself if the struct is
158 the representative. */
159 struct access *group_representative;
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access *first_child;
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. In IPA-SRA this is a pointer to the next access
167 belonging to the same group (having the same representative). */
168 struct access *next_sibling;
170 /* Pointers to the first and last element in the linked list of assign
171 links. */
172 struct assign_link *first_link, *last_link;
174 /* Pointer to the next access in the work queue. */
175 struct access *next_queued;
177 /* Replacement variable for this access "region." Never to be accessed
178 directly, always only by the means of get_access_replacement() and only
179 when grp_to_be_replaced flag is set. */
180 tree replacement_decl;
182 /* Is this access an access to a non-addressable field? */
183 unsigned non_addressable : 1;
185 /* Is this access made in reverse storage order? */
186 unsigned reverse : 1;
188 /* Is this particular access write access? */
189 unsigned write : 1;
191 /* Is this access currently in the work queue? */
192 unsigned grp_queued : 1;
194 /* Does this group contain a write access? This flag is propagated down the
195 access tree. */
196 unsigned grp_write : 1;
198 /* Does this group contain a read access? This flag is propagated down the
199 access tree. */
200 unsigned grp_read : 1;
202 /* Does this group contain a read access that comes from an assignment
203 statement? This flag is propagated down the access tree. */
204 unsigned grp_assignment_read : 1;
206 /* Does this group contain a write access that comes from an assignment
207 statement? This flag is propagated down the access tree. */
208 unsigned grp_assignment_write : 1;
210 /* Does this group contain a read access through a scalar type? This flag is
211 not propagated in the access tree in any direction. */
212 unsigned grp_scalar_read : 1;
214 /* Does this group contain a write access through a scalar type? This flag
215 is not propagated in the access tree in any direction. */
216 unsigned grp_scalar_write : 1;
218 /* Is this access an artificial one created to scalarize some record
219 entirely? */
220 unsigned grp_total_scalarization : 1;
222 /* Other passes of the analysis use this bit to make function
223 analyze_access_subtree create scalar replacements for this group if
224 possible. */
225 unsigned grp_hint : 1;
227 /* Is the subtree rooted in this access fully covered by scalar
228 replacements? */
229 unsigned grp_covered : 1;
231 /* If set to true, this access and all below it in an access tree must not be
232 scalarized. */
233 unsigned grp_unscalarizable_region : 1;
235 /* Whether data have been written to parts of the aggregate covered by this
236 access which is not to be scalarized. This flag is propagated up in the
237 access tree. */
238 unsigned grp_unscalarized_data : 1;
240 /* Does this access and/or group contain a write access through a
241 BIT_FIELD_REF? */
242 unsigned grp_partial_lhs : 1;
244 /* Set when a scalar replacement should be created for this variable. */
245 unsigned grp_to_be_replaced : 1;
247 /* Set when we want a replacement for the sole purpose of having it in
248 generated debug statements. */
249 unsigned grp_to_be_debug_replaced : 1;
251 /* Should TREE_NO_WARNING of a replacement be set? */
252 unsigned grp_no_warning : 1;
254 /* Is it possible that the group refers to data which might be (directly or
255 otherwise) modified? */
256 unsigned grp_maybe_modified : 1;
258 /* Set when this is a representative of a pointer to scalar (i.e. by
259 reference) parameter which we consider for turning into a plain scalar
260 (i.e. a by value parameter). */
261 unsigned grp_scalar_ptr : 1;
263 /* Set when we discover that this pointer is not safe to dereference in the
264 caller. */
265 unsigned grp_not_necessarilly_dereferenced : 1;
268 typedef struct access *access_p;
271 /* Alloc pool for allocating access structures. */
272 static object_allocator<struct access> access_pool ("SRA accesses");
274 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
275 are used to propagate subaccesses from rhs to lhs as long as they don't
276 conflict with what is already there. */
277 struct assign_link
279 struct access *lacc, *racc;
280 struct assign_link *next;
283 /* Alloc pool for allocating assign link structures. */
284 static object_allocator<assign_link> assign_link_pool ("SRA links");
286 /* Base (tree) -> Vector (vec<access_p> *) map. */
287 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
289 /* Candidate hash table helpers. */
291 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
293 static inline hashval_t hash (const tree_node *);
294 static inline bool equal (const tree_node *, const tree_node *);
297 /* Hash a tree in a uid_decl_map. */
299 inline hashval_t
300 uid_decl_hasher::hash (const tree_node *item)
302 return item->decl_minimal.uid;
305 /* Return true if the DECL_UID in both trees are equal. */
307 inline bool
308 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
310 return (a->decl_minimal.uid == b->decl_minimal.uid);
313 /* Set of candidates. */
314 static bitmap candidate_bitmap;
315 static hash_table<uid_decl_hasher> *candidates;
317 /* For a candidate UID return the candidates decl. */
319 static inline tree
320 candidate (unsigned uid)
322 tree_node t;
323 t.decl_minimal.uid = uid;
324 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
327 /* Bitmap of candidates which we should try to entirely scalarize away and
328 those which cannot be (because they are and need be used as a whole). */
329 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
331 /* Bitmap of candidates in the constant pool, which cannot be scalarized
332 because this would produce non-constant expressions (e.g. Ada). */
333 static bitmap disqualified_constants;
335 /* Obstack for creation of fancy names. */
336 static struct obstack name_obstack;
338 /* Head of a linked list of accesses that need to have its subaccesses
339 propagated to their assignment counterparts. */
340 static struct access *work_queue_head;
342 /* Number of parameters of the analyzed function when doing early ipa SRA. */
343 static int func_param_count;
345 /* scan_function sets the following to true if it encounters a call to
346 __builtin_apply_args. */
347 static bool encountered_apply_args;
349 /* Set by scan_function when it finds a recursive call. */
350 static bool encountered_recursive_call;
352 /* Set by scan_function when it finds a recursive call with less actual
353 arguments than formal parameters.. */
354 static bool encountered_unchangable_recursive_call;
356 /* This is a table in which for each basic block and parameter there is a
357 distance (offset + size) in that parameter which is dereferenced and
358 accessed in that BB. */
359 static HOST_WIDE_INT *bb_dereferences;
360 /* Bitmap of BBs that can cause the function to "stop" progressing by
361 returning, throwing externally, looping infinitely or calling a function
362 which might abort etc.. */
363 static bitmap final_bbs;
365 /* Representative of no accesses at all. */
366 static struct access no_accesses_representant;
368 /* Predicate to test the special value. */
370 static inline bool
371 no_accesses_p (struct access *access)
373 return access == &no_accesses_representant;
376 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
377 representative fields are dumped, otherwise those which only describe the
378 individual access are. */
380 static struct
382 /* Number of processed aggregates is readily available in
383 analyze_all_variable_accesses and so is not stored here. */
385 /* Number of created scalar replacements. */
386 int replacements;
388 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
389 expression. */
390 int exprs;
392 /* Number of statements created by generate_subtree_copies. */
393 int subtree_copies;
395 /* Number of statements created by load_assign_lhs_subreplacements. */
396 int subreplacements;
398 /* Number of times sra_modify_assign has deleted a statement. */
399 int deleted;
401 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
402 RHS reparately due to type conversions or nonexistent matching
403 references. */
404 int separate_lhs_rhs_handling;
406 /* Number of parameters that were removed because they were unused. */
407 int deleted_unused_parameters;
409 /* Number of scalars passed as parameters by reference that have been
410 converted to be passed by value. */
411 int scalar_by_ref_to_by_val;
413 /* Number of aggregate parameters that were replaced by one or more of their
414 components. */
415 int aggregate_params_reduced;
417 /* Numbber of components created when splitting aggregate parameters. */
418 int param_reductions_created;
419 } sra_stats;
421 static void
422 dump_access (FILE *f, struct access *access, bool grp)
424 fprintf (f, "access { ");
425 fprintf (f, "base = (%d)'", DECL_UID (access->base));
426 print_generic_expr (f, access->base, 0);
427 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
428 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
429 fprintf (f, ", expr = ");
430 print_generic_expr (f, access->expr, 0);
431 fprintf (f, ", type = ");
432 print_generic_expr (f, access->type, 0);
433 fprintf (f, ", non_addressable = %d, reverse = %d",
434 access->non_addressable, access->reverse);
435 if (grp)
436 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
437 "grp_assignment_write = %d, grp_scalar_read = %d, "
438 "grp_scalar_write = %d, grp_total_scalarization = %d, "
439 "grp_hint = %d, grp_covered = %d, "
440 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
441 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
442 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
443 "grp_not_necessarilly_dereferenced = %d\n",
444 access->grp_read, access->grp_write, access->grp_assignment_read,
445 access->grp_assignment_write, access->grp_scalar_read,
446 access->grp_scalar_write, access->grp_total_scalarization,
447 access->grp_hint, access->grp_covered,
448 access->grp_unscalarizable_region, access->grp_unscalarized_data,
449 access->grp_partial_lhs, access->grp_to_be_replaced,
450 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
451 access->grp_not_necessarilly_dereferenced);
452 else
453 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
454 "grp_partial_lhs = %d\n",
455 access->write, access->grp_total_scalarization,
456 access->grp_partial_lhs);
459 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
461 static void
462 dump_access_tree_1 (FILE *f, struct access *access, int level)
466 int i;
468 for (i = 0; i < level; i++)
469 fputs ("* ", dump_file);
471 dump_access (f, access, true);
473 if (access->first_child)
474 dump_access_tree_1 (f, access->first_child, level + 1);
476 access = access->next_sibling;
478 while (access);
481 /* Dump all access trees for a variable, given the pointer to the first root in
482 ACCESS. */
484 static void
485 dump_access_tree (FILE *f, struct access *access)
487 for (; access; access = access->next_grp)
488 dump_access_tree_1 (f, access, 0);
491 /* Return true iff ACC is non-NULL and has subaccesses. */
493 static inline bool
494 access_has_children_p (struct access *acc)
496 return acc && acc->first_child;
499 /* Return true iff ACC is (partly) covered by at least one replacement. */
501 static bool
502 access_has_replacements_p (struct access *acc)
504 struct access *child;
505 if (acc->grp_to_be_replaced)
506 return true;
507 for (child = acc->first_child; child; child = child->next_sibling)
508 if (access_has_replacements_p (child))
509 return true;
510 return false;
513 /* Return a vector of pointers to accesses for the variable given in BASE or
514 NULL if there is none. */
516 static vec<access_p> *
517 get_base_access_vector (tree base)
519 return base_access_vec->get (base);
522 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
523 in ACCESS. Return NULL if it cannot be found. */
525 static struct access *
526 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
527 HOST_WIDE_INT size)
529 while (access && (access->offset != offset || access->size != size))
531 struct access *child = access->first_child;
533 while (child && (child->offset + child->size <= offset))
534 child = child->next_sibling;
535 access = child;
538 return access;
541 /* Return the first group representative for DECL or NULL if none exists. */
543 static struct access *
544 get_first_repr_for_decl (tree base)
546 vec<access_p> *access_vec;
548 access_vec = get_base_access_vector (base);
549 if (!access_vec)
550 return NULL;
552 return (*access_vec)[0];
555 /* Find an access representative for the variable BASE and given OFFSET and
556 SIZE. Requires that access trees have already been built. Return NULL if
557 it cannot be found. */
559 static struct access *
560 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
561 HOST_WIDE_INT size)
563 struct access *access;
565 access = get_first_repr_for_decl (base);
566 while (access && (access->offset + access->size <= offset))
567 access = access->next_grp;
568 if (!access)
569 return NULL;
571 return find_access_in_subtree (access, offset, size);
574 /* Add LINK to the linked list of assign links of RACC. */
575 static void
576 add_link_to_rhs (struct access *racc, struct assign_link *link)
578 gcc_assert (link->racc == racc);
580 if (!racc->first_link)
582 gcc_assert (!racc->last_link);
583 racc->first_link = link;
585 else
586 racc->last_link->next = link;
588 racc->last_link = link;
589 link->next = NULL;
592 /* Move all link structures in their linked list in OLD_RACC to the linked list
593 in NEW_RACC. */
594 static void
595 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
597 if (!old_racc->first_link)
599 gcc_assert (!old_racc->last_link);
600 return;
603 if (new_racc->first_link)
605 gcc_assert (!new_racc->last_link->next);
606 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
608 new_racc->last_link->next = old_racc->first_link;
609 new_racc->last_link = old_racc->last_link;
611 else
613 gcc_assert (!new_racc->last_link);
615 new_racc->first_link = old_racc->first_link;
616 new_racc->last_link = old_racc->last_link;
618 old_racc->first_link = old_racc->last_link = NULL;
621 /* Add ACCESS to the work queue (which is actually a stack). */
623 static void
624 add_access_to_work_queue (struct access *access)
626 if (!access->grp_queued)
628 gcc_assert (!access->next_queued);
629 access->next_queued = work_queue_head;
630 access->grp_queued = 1;
631 work_queue_head = access;
635 /* Pop an access from the work queue, and return it, assuming there is one. */
637 static struct access *
638 pop_access_from_work_queue (void)
640 struct access *access = work_queue_head;
642 work_queue_head = access->next_queued;
643 access->next_queued = NULL;
644 access->grp_queued = 0;
645 return access;
649 /* Allocate necessary structures. */
651 static void
652 sra_initialize (void)
654 candidate_bitmap = BITMAP_ALLOC (NULL);
655 candidates = new hash_table<uid_decl_hasher>
656 (vec_safe_length (cfun->local_decls) / 2);
657 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
658 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
659 disqualified_constants = BITMAP_ALLOC (NULL);
660 gcc_obstack_init (&name_obstack);
661 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
662 memset (&sra_stats, 0, sizeof (sra_stats));
663 encountered_apply_args = false;
664 encountered_recursive_call = false;
665 encountered_unchangable_recursive_call = false;
668 /* Deallocate all general structures. */
670 static void
671 sra_deinitialize (void)
673 BITMAP_FREE (candidate_bitmap);
674 delete candidates;
675 candidates = NULL;
676 BITMAP_FREE (should_scalarize_away_bitmap);
677 BITMAP_FREE (cannot_scalarize_away_bitmap);
678 BITMAP_FREE (disqualified_constants);
679 access_pool.release ();
680 assign_link_pool.release ();
681 obstack_free (&name_obstack, NULL);
683 delete base_access_vec;
686 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
688 static bool constant_decl_p (tree decl)
690 return TREE_CODE (decl) == VAR_DECL && DECL_IN_CONSTANT_POOL (decl);
693 /* Remove DECL from candidates for SRA and write REASON to the dump file if
694 there is one. */
695 static void
696 disqualify_candidate (tree decl, const char *reason)
698 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
699 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
700 if (constant_decl_p (decl))
701 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
703 if (dump_file && (dump_flags & TDF_DETAILS))
705 fprintf (dump_file, "! Disqualifying ");
706 print_generic_expr (dump_file, decl, 0);
707 fprintf (dump_file, " - %s\n", reason);
711 /* Return true iff the type contains a field or an element which does not allow
712 scalarization. */
714 static bool
715 type_internals_preclude_sra_p (tree type, const char **msg)
717 tree fld;
718 tree et;
720 switch (TREE_CODE (type))
722 case RECORD_TYPE:
723 case UNION_TYPE:
724 case QUAL_UNION_TYPE:
725 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
726 if (TREE_CODE (fld) == FIELD_DECL)
728 tree ft = TREE_TYPE (fld);
730 if (TREE_THIS_VOLATILE (fld))
732 *msg = "volatile structure field";
733 return true;
735 if (!DECL_FIELD_OFFSET (fld))
737 *msg = "no structure field offset";
738 return true;
740 if (!DECL_SIZE (fld))
742 *msg = "zero structure field size";
743 return true;
745 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
747 *msg = "structure field offset not fixed";
748 return true;
750 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
752 *msg = "structure field size not fixed";
753 return true;
755 if (!tree_fits_shwi_p (bit_position (fld)))
757 *msg = "structure field size too big";
758 return true;
760 if (AGGREGATE_TYPE_P (ft)
761 && int_bit_position (fld) % BITS_PER_UNIT != 0)
763 *msg = "structure field is bit field";
764 return true;
767 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
768 return true;
771 return false;
773 case ARRAY_TYPE:
774 et = TREE_TYPE (type);
776 if (TYPE_VOLATILE (et))
778 *msg = "element type is volatile";
779 return true;
782 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
783 return true;
785 return false;
787 default:
788 return false;
792 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
793 base variable if it is. Return T if it is not an SSA_NAME. */
795 static tree
796 get_ssa_base_param (tree t)
798 if (TREE_CODE (t) == SSA_NAME)
800 if (SSA_NAME_IS_DEFAULT_DEF (t))
801 return SSA_NAME_VAR (t);
802 else
803 return NULL_TREE;
805 return t;
808 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
809 belongs to, unless the BB has already been marked as a potentially
810 final. */
812 static void
813 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
815 basic_block bb = gimple_bb (stmt);
816 int idx, parm_index = 0;
817 tree parm;
819 if (bitmap_bit_p (final_bbs, bb->index))
820 return;
822 for (parm = DECL_ARGUMENTS (current_function_decl);
823 parm && parm != base;
824 parm = DECL_CHAIN (parm))
825 parm_index++;
827 gcc_assert (parm_index < func_param_count);
829 idx = bb->index * func_param_count + parm_index;
830 if (bb_dereferences[idx] < dist)
831 bb_dereferences[idx] = dist;
834 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
835 the three fields. Also add it to the vector of accesses corresponding to
836 the base. Finally, return the new access. */
838 static struct access *
839 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
841 struct access *access = access_pool.allocate ();
843 memset (access, 0, sizeof (struct access));
844 access->base = base;
845 access->offset = offset;
846 access->size = size;
848 base_access_vec->get_or_insert (base).safe_push (access);
850 return access;
853 static bool maybe_add_sra_candidate (tree);
855 /* Create and insert access for EXPR. Return created access, or NULL if it is
856 not possible. Also scan for uses of constant pool as we go along and add
857 to candidates. */
859 static struct access *
860 create_access (tree expr, gimple *stmt, bool write)
862 struct access *access;
863 HOST_WIDE_INT offset, size, max_size;
864 tree base = expr;
865 bool reverse, ptr, unscalarizable_region = false;
867 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
869 if (sra_mode == SRA_MODE_EARLY_IPA
870 && TREE_CODE (base) == MEM_REF)
872 base = get_ssa_base_param (TREE_OPERAND (base, 0));
873 if (!base)
874 return NULL;
875 ptr = true;
877 else
878 ptr = false;
880 /* For constant-pool entries, check we can substitute the constant value. */
881 if (constant_decl_p (base)
882 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
884 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
885 if (expr != base
886 && !is_gimple_reg_type (TREE_TYPE (expr))
887 && dump_file && (dump_flags & TDF_DETAILS))
889 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
890 and elements of multidimensional arrays (which are
891 multi-element arrays in their own right). */
892 fprintf (dump_file, "Allowing non-reg-type load of part"
893 " of constant-pool entry: ");
894 print_generic_expr (dump_file, expr, 0);
896 maybe_add_sra_candidate (base);
899 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
900 return NULL;
902 if (sra_mode == SRA_MODE_EARLY_IPA)
904 if (size < 0 || size != max_size)
906 disqualify_candidate (base, "Encountered a variable sized access.");
907 return NULL;
909 if (TREE_CODE (expr) == COMPONENT_REF
910 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
912 disqualify_candidate (base, "Encountered a bit-field access.");
913 return NULL;
915 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
917 if (ptr)
918 mark_parm_dereference (base, offset + size, stmt);
920 else
922 if (size != max_size)
924 size = max_size;
925 unscalarizable_region = true;
927 if (size < 0)
929 disqualify_candidate (base, "Encountered an unconstrained access.");
930 return NULL;
934 access = create_access_1 (base, offset, size);
935 access->expr = expr;
936 access->type = TREE_TYPE (expr);
937 access->write = write;
938 access->grp_unscalarizable_region = unscalarizable_region;
939 access->stmt = stmt;
940 access->reverse = reverse;
942 if (TREE_CODE (expr) == COMPONENT_REF
943 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
944 access->non_addressable = 1;
946 return access;
950 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
951 ARRAY_TYPE with fields that are either of gimple register types (excluding
952 bit-fields) or (recursively) scalarizable types. */
954 static bool
955 scalarizable_type_p (tree type)
957 gcc_assert (!is_gimple_reg_type (type));
958 if (type_contains_placeholder_p (type))
959 return false;
961 switch (TREE_CODE (type))
963 case RECORD_TYPE:
964 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
965 if (TREE_CODE (fld) == FIELD_DECL)
967 tree ft = TREE_TYPE (fld);
969 if (DECL_BIT_FIELD (fld))
970 return false;
972 if (!is_gimple_reg_type (ft)
973 && !scalarizable_type_p (ft))
974 return false;
977 return true;
979 case ARRAY_TYPE:
981 if (TYPE_DOMAIN (type) == NULL_TREE
982 || !tree_fits_shwi_p (TYPE_SIZE (type))
983 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
984 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= 0)
985 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
986 return false;
987 if (tree_to_shwi (TYPE_SIZE (type)) == 0
988 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
989 /* Zero-element array, should not prevent scalarization. */
991 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
992 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
993 /* Variable-length array, do not allow scalarization. */
994 return false;
996 tree elem = TREE_TYPE (type);
997 if (!is_gimple_reg_type (elem)
998 && !scalarizable_type_p (elem))
999 return false;
1000 return true;
1002 default:
1003 return false;
1007 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1009 /* Create total_scalarization accesses for all scalar fields of a member
1010 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1011 must be the top-most VAR_DECL representing the variable; within that,
1012 OFFSET locates the member and REF must be the memory reference expression for
1013 the member. */
1015 static void
1016 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1018 switch (TREE_CODE (decl_type))
1020 case RECORD_TYPE:
1021 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1022 if (TREE_CODE (fld) == FIELD_DECL)
1024 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1025 tree ft = TREE_TYPE (fld);
1026 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1028 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1029 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1030 nref, ft);
1032 break;
1033 case ARRAY_TYPE:
1035 tree elemtype = TREE_TYPE (decl_type);
1036 tree elem_size = TYPE_SIZE (elemtype);
1037 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1038 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1039 gcc_assert (el_size > 0);
1041 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1042 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1043 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1044 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1045 if (maxidx)
1047 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1048 tree domain = TYPE_DOMAIN (decl_type);
1049 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1050 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1051 offset_int idx = wi::to_offset (minidx);
1052 offset_int max = wi::to_offset (maxidx);
1053 if (!TYPE_UNSIGNED (domain))
1055 idx = wi::sext (idx, TYPE_PRECISION (domain));
1056 max = wi::sext (max, TYPE_PRECISION (domain));
1058 for (int el_off = offset; wi::les_p (idx, max); ++idx)
1060 tree nref = build4 (ARRAY_REF, elemtype,
1061 ref,
1062 wide_int_to_tree (domain, idx),
1063 NULL_TREE, NULL_TREE);
1064 scalarize_elem (base, el_off, el_size,
1065 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1066 nref, elemtype);
1067 el_off += el_size;
1071 break;
1072 default:
1073 gcc_unreachable ();
1077 /* Create total_scalarization accesses for a member of type TYPE, which must
1078 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1079 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1080 the member, REVERSE gives its torage order. and REF must be the reference
1081 expression for it. */
1083 static void
1084 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1085 tree ref, tree type)
1087 if (is_gimple_reg_type (type))
1089 struct access *access = create_access_1 (base, pos, size);
1090 access->expr = ref;
1091 access->type = type;
1092 access->grp_total_scalarization = 1;
1093 access->reverse = reverse;
1094 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1096 else
1097 completely_scalarize (base, type, pos, ref);
1100 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1101 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1103 static void
1104 create_total_scalarization_access (tree var)
1106 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1107 struct access *access;
1109 access = create_access_1 (var, 0, size);
1110 access->expr = var;
1111 access->type = TREE_TYPE (var);
1112 access->grp_total_scalarization = 1;
1115 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1117 static inline bool
1118 contains_view_convert_expr_p (const_tree ref)
1120 while (handled_component_p (ref))
1122 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1123 return true;
1124 ref = TREE_OPERAND (ref, 0);
1127 return false;
1130 /* Search the given tree for a declaration by skipping handled components and
1131 exclude it from the candidates. */
1133 static void
1134 disqualify_base_of_expr (tree t, const char *reason)
1136 t = get_base_address (t);
1137 if (sra_mode == SRA_MODE_EARLY_IPA
1138 && TREE_CODE (t) == MEM_REF)
1139 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1141 if (t && DECL_P (t))
1142 disqualify_candidate (t, reason);
1145 /* Scan expression EXPR and create access structures for all accesses to
1146 candidates for scalarization. Return the created access or NULL if none is
1147 created. */
1149 static struct access *
1150 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1152 struct access *ret = NULL;
1153 bool partial_ref;
1155 if (TREE_CODE (expr) == BIT_FIELD_REF
1156 || TREE_CODE (expr) == IMAGPART_EXPR
1157 || TREE_CODE (expr) == REALPART_EXPR)
1159 expr = TREE_OPERAND (expr, 0);
1160 partial_ref = true;
1162 else
1163 partial_ref = false;
1165 /* We need to dive through V_C_Es in order to get the size of its parameter
1166 and not the result type. Ada produces such statements. We are also
1167 capable of handling the topmost V_C_E but not any of those buried in other
1168 handled components. */
1169 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR && !storage_order_barrier_p (expr))
1170 expr = TREE_OPERAND (expr, 0);
1172 if (contains_view_convert_expr_p (expr))
1174 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1175 "component.");
1176 return NULL;
1178 if (TREE_THIS_VOLATILE (expr))
1180 disqualify_base_of_expr (expr, "part of a volatile reference.");
1181 return NULL;
1184 switch (TREE_CODE (expr))
1186 case MEM_REF:
1187 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1188 && sra_mode != SRA_MODE_EARLY_IPA)
1189 return NULL;
1190 /* fall through */
1191 case VAR_DECL:
1192 case PARM_DECL:
1193 case RESULT_DECL:
1194 case COMPONENT_REF:
1195 case ARRAY_REF:
1196 case ARRAY_RANGE_REF:
1197 ret = create_access (expr, stmt, write);
1198 break;
1200 default:
1201 break;
1204 if (write && partial_ref && ret)
1205 ret->grp_partial_lhs = 1;
1207 return ret;
1210 /* Scan expression EXPR and create access structures for all accesses to
1211 candidates for scalarization. Return true if any access has been inserted.
1212 STMT must be the statement from which the expression is taken, WRITE must be
1213 true if the expression is a store and false otherwise. */
1215 static bool
1216 build_access_from_expr (tree expr, gimple *stmt, bool write)
1218 struct access *access;
1220 access = build_access_from_expr_1 (expr, stmt, write);
1221 if (access)
1223 /* This means the aggregate is accesses as a whole in a way other than an
1224 assign statement and thus cannot be removed even if we had a scalar
1225 replacement for everything. */
1226 if (cannot_scalarize_away_bitmap)
1227 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1228 return true;
1230 return false;
1233 /* Return the single non-EH successor edge of BB or NULL if there is none or
1234 more than one. */
1236 static edge
1237 single_non_eh_succ (basic_block bb)
1239 edge e, res = NULL;
1240 edge_iterator ei;
1242 FOR_EACH_EDGE (e, ei, bb->succs)
1243 if (!(e->flags & EDGE_EH))
1245 if (res)
1246 return NULL;
1247 res = e;
1250 return res;
1253 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1254 there is no alternative spot where to put statements SRA might need to
1255 generate after it. The spot we are looking for is an edge leading to a
1256 single non-EH successor, if it exists and is indeed single. RHS may be
1257 NULL, in that case ignore it. */
1259 static bool
1260 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1262 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1263 && stmt_ends_bb_p (stmt))
1265 if (single_non_eh_succ (gimple_bb (stmt)))
1266 return false;
1268 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1269 if (rhs)
1270 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1271 return true;
1273 return false;
1276 /* Scan expressions occurring in STMT, create access structures for all accesses
1277 to candidates for scalarization and remove those candidates which occur in
1278 statements or expressions that prevent them from being split apart. Return
1279 true if any access has been inserted. */
1281 static bool
1282 build_accesses_from_assign (gimple *stmt)
1284 tree lhs, rhs;
1285 struct access *lacc, *racc;
1287 if (!gimple_assign_single_p (stmt)
1288 /* Scope clobbers don't influence scalarization. */
1289 || gimple_clobber_p (stmt))
1290 return false;
1292 lhs = gimple_assign_lhs (stmt);
1293 rhs = gimple_assign_rhs1 (stmt);
1295 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1296 return false;
1298 racc = build_access_from_expr_1 (rhs, stmt, false);
1299 lacc = build_access_from_expr_1 (lhs, stmt, true);
1301 if (lacc)
1303 lacc->grp_assignment_write = 1;
1304 if (storage_order_barrier_p (rhs))
1305 lacc->grp_unscalarizable_region = 1;
1308 if (racc)
1310 racc->grp_assignment_read = 1;
1311 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1312 && !is_gimple_reg_type (racc->type))
1313 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1314 if (storage_order_barrier_p (lhs))
1315 racc->grp_unscalarizable_region = 1;
1318 if (lacc && racc
1319 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1320 && !lacc->grp_unscalarizable_region
1321 && !racc->grp_unscalarizable_region
1322 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1323 && lacc->size == racc->size
1324 && useless_type_conversion_p (lacc->type, racc->type))
1326 struct assign_link *link;
1328 link = assign_link_pool.allocate ();
1329 memset (link, 0, sizeof (struct assign_link));
1331 link->lacc = lacc;
1332 link->racc = racc;
1334 add_link_to_rhs (racc, link);
1337 return lacc || racc;
1340 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1341 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1343 static bool
1344 asm_visit_addr (gimple *, tree op, tree, void *)
1346 op = get_base_address (op);
1347 if (op
1348 && DECL_P (op))
1349 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1351 return false;
1354 /* Return true iff callsite CALL has at least as many actual arguments as there
1355 are formal parameters of the function currently processed by IPA-SRA and
1356 that their types match. */
1358 static inline bool
1359 callsite_arguments_match_p (gimple *call)
1361 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1362 return false;
1364 tree parm;
1365 int i;
1366 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1367 parm;
1368 parm = DECL_CHAIN (parm), i++)
1370 tree arg = gimple_call_arg (call, i);
1371 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1372 return false;
1374 return true;
1377 /* Scan function and look for interesting expressions and create access
1378 structures for them. Return true iff any access is created. */
1380 static bool
1381 scan_function (void)
1383 basic_block bb;
1384 bool ret = false;
1386 FOR_EACH_BB_FN (bb, cfun)
1388 gimple_stmt_iterator gsi;
1389 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1391 gimple *stmt = gsi_stmt (gsi);
1392 tree t;
1393 unsigned i;
1395 if (final_bbs && stmt_can_throw_external (stmt))
1396 bitmap_set_bit (final_bbs, bb->index);
1397 switch (gimple_code (stmt))
1399 case GIMPLE_RETURN:
1400 t = gimple_return_retval (as_a <greturn *> (stmt));
1401 if (t != NULL_TREE)
1402 ret |= build_access_from_expr (t, stmt, false);
1403 if (final_bbs)
1404 bitmap_set_bit (final_bbs, bb->index);
1405 break;
1407 case GIMPLE_ASSIGN:
1408 ret |= build_accesses_from_assign (stmt);
1409 break;
1411 case GIMPLE_CALL:
1412 for (i = 0; i < gimple_call_num_args (stmt); i++)
1413 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1414 stmt, false);
1416 if (sra_mode == SRA_MODE_EARLY_IPA)
1418 tree dest = gimple_call_fndecl (stmt);
1419 int flags = gimple_call_flags (stmt);
1421 if (dest)
1423 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1424 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1425 encountered_apply_args = true;
1426 if (recursive_call_p (current_function_decl, dest))
1428 encountered_recursive_call = true;
1429 if (!callsite_arguments_match_p (stmt))
1430 encountered_unchangable_recursive_call = true;
1434 if (final_bbs
1435 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1436 bitmap_set_bit (final_bbs, bb->index);
1439 t = gimple_call_lhs (stmt);
1440 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1441 ret |= build_access_from_expr (t, stmt, true);
1442 break;
1444 case GIMPLE_ASM:
1446 gasm *asm_stmt = as_a <gasm *> (stmt);
1447 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1448 asm_visit_addr);
1449 if (final_bbs)
1450 bitmap_set_bit (final_bbs, bb->index);
1452 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1454 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1455 ret |= build_access_from_expr (t, asm_stmt, false);
1457 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1459 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1460 ret |= build_access_from_expr (t, asm_stmt, true);
1463 break;
1465 default:
1466 break;
1471 return ret;
1474 /* Helper of QSORT function. There are pointers to accesses in the array. An
1475 access is considered smaller than another if it has smaller offset or if the
1476 offsets are the same but is size is bigger. */
1478 static int
1479 compare_access_positions (const void *a, const void *b)
1481 const access_p *fp1 = (const access_p *) a;
1482 const access_p *fp2 = (const access_p *) b;
1483 const access_p f1 = *fp1;
1484 const access_p f2 = *fp2;
1486 if (f1->offset != f2->offset)
1487 return f1->offset < f2->offset ? -1 : 1;
1489 if (f1->size == f2->size)
1491 if (f1->type == f2->type)
1492 return 0;
1493 /* Put any non-aggregate type before any aggregate type. */
1494 else if (!is_gimple_reg_type (f1->type)
1495 && is_gimple_reg_type (f2->type))
1496 return 1;
1497 else if (is_gimple_reg_type (f1->type)
1498 && !is_gimple_reg_type (f2->type))
1499 return -1;
1500 /* Put any complex or vector type before any other scalar type. */
1501 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1502 && TREE_CODE (f1->type) != VECTOR_TYPE
1503 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1504 || TREE_CODE (f2->type) == VECTOR_TYPE))
1505 return 1;
1506 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1507 || TREE_CODE (f1->type) == VECTOR_TYPE)
1508 && TREE_CODE (f2->type) != COMPLEX_TYPE
1509 && TREE_CODE (f2->type) != VECTOR_TYPE)
1510 return -1;
1511 /* Put the integral type with the bigger precision first. */
1512 else if (INTEGRAL_TYPE_P (f1->type)
1513 && INTEGRAL_TYPE_P (f2->type))
1514 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1515 /* Put any integral type with non-full precision last. */
1516 else if (INTEGRAL_TYPE_P (f1->type)
1517 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1518 != TYPE_PRECISION (f1->type)))
1519 return 1;
1520 else if (INTEGRAL_TYPE_P (f2->type)
1521 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1522 != TYPE_PRECISION (f2->type)))
1523 return -1;
1524 /* Stabilize the sort. */
1525 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1528 /* We want the bigger accesses first, thus the opposite operator in the next
1529 line: */
1530 return f1->size > f2->size ? -1 : 1;
1534 /* Append a name of the declaration to the name obstack. A helper function for
1535 make_fancy_name. */
1537 static void
1538 make_fancy_decl_name (tree decl)
1540 char buffer[32];
1542 tree name = DECL_NAME (decl);
1543 if (name)
1544 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1545 IDENTIFIER_LENGTH (name));
1546 else
1548 sprintf (buffer, "D%u", DECL_UID (decl));
1549 obstack_grow (&name_obstack, buffer, strlen (buffer));
1553 /* Helper for make_fancy_name. */
1555 static void
1556 make_fancy_name_1 (tree expr)
1558 char buffer[32];
1559 tree index;
1561 if (DECL_P (expr))
1563 make_fancy_decl_name (expr);
1564 return;
1567 switch (TREE_CODE (expr))
1569 case COMPONENT_REF:
1570 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1571 obstack_1grow (&name_obstack, '$');
1572 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1573 break;
1575 case ARRAY_REF:
1576 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1577 obstack_1grow (&name_obstack, '$');
1578 /* Arrays with only one element may not have a constant as their
1579 index. */
1580 index = TREE_OPERAND (expr, 1);
1581 if (TREE_CODE (index) != INTEGER_CST)
1582 break;
1583 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1584 obstack_grow (&name_obstack, buffer, strlen (buffer));
1585 break;
1587 case ADDR_EXPR:
1588 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1589 break;
1591 case MEM_REF:
1592 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1593 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1595 obstack_1grow (&name_obstack, '$');
1596 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1597 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1598 obstack_grow (&name_obstack, buffer, strlen (buffer));
1600 break;
1602 case BIT_FIELD_REF:
1603 case REALPART_EXPR:
1604 case IMAGPART_EXPR:
1605 gcc_unreachable (); /* we treat these as scalars. */
1606 break;
1607 default:
1608 break;
1612 /* Create a human readable name for replacement variable of ACCESS. */
1614 static char *
1615 make_fancy_name (tree expr)
1617 make_fancy_name_1 (expr);
1618 obstack_1grow (&name_obstack, '\0');
1619 return XOBFINISH (&name_obstack, char *);
1622 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1623 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1624 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1625 be non-NULL and is used to insert new statements either before or below
1626 the current one as specified by INSERT_AFTER. This function is not capable
1627 of handling bitfields. */
1629 tree
1630 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1631 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1632 bool insert_after)
1634 tree prev_base = base;
1635 tree off;
1636 tree mem_ref;
1637 HOST_WIDE_INT base_offset;
1638 unsigned HOST_WIDE_INT misalign;
1639 unsigned int align;
1641 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1642 get_object_alignment_1 (base, &align, &misalign);
1643 base = get_addr_base_and_unit_offset (base, &base_offset);
1645 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1646 offset such as array[var_index]. */
1647 if (!base)
1649 gassign *stmt;
1650 tree tmp, addr;
1652 gcc_checking_assert (gsi);
1653 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1654 addr = build_fold_addr_expr (unshare_expr (prev_base));
1655 STRIP_USELESS_TYPE_CONVERSION (addr);
1656 stmt = gimple_build_assign (tmp, addr);
1657 gimple_set_location (stmt, loc);
1658 if (insert_after)
1659 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1660 else
1661 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1663 off = build_int_cst (reference_alias_ptr_type (prev_base),
1664 offset / BITS_PER_UNIT);
1665 base = tmp;
1667 else if (TREE_CODE (base) == MEM_REF)
1669 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1670 base_offset + offset / BITS_PER_UNIT);
1671 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1672 base = unshare_expr (TREE_OPERAND (base, 0));
1674 else
1676 off = build_int_cst (reference_alias_ptr_type (prev_base),
1677 base_offset + offset / BITS_PER_UNIT);
1678 base = build_fold_addr_expr (unshare_expr (base));
1681 misalign = (misalign + offset) & (align - 1);
1682 if (misalign != 0)
1683 align = (misalign & -misalign);
1684 if (align != TYPE_ALIGN (exp_type))
1685 exp_type = build_aligned_type (exp_type, align);
1687 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1688 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1689 if (TREE_THIS_VOLATILE (prev_base))
1690 TREE_THIS_VOLATILE (mem_ref) = 1;
1691 if (TREE_SIDE_EFFECTS (prev_base))
1692 TREE_SIDE_EFFECTS (mem_ref) = 1;
1693 return mem_ref;
1696 /* Construct a memory reference to a part of an aggregate BASE at the given
1697 OFFSET and of the same type as MODEL. In case this is a reference to a
1698 bit-field, the function will replicate the last component_ref of model's
1699 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1700 build_ref_for_offset. */
1702 static tree
1703 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1704 struct access *model, gimple_stmt_iterator *gsi,
1705 bool insert_after)
1707 if (TREE_CODE (model->expr) == COMPONENT_REF
1708 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1710 /* This access represents a bit-field. */
1711 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1713 offset -= int_bit_position (fld);
1714 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1715 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1716 gsi, insert_after);
1717 /* The flag will be set on the record type. */
1718 REF_REVERSE_STORAGE_ORDER (t) = 0;
1719 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1720 NULL_TREE);
1722 else
1723 return
1724 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1725 gsi, insert_after);
1728 /* Attempt to build a memory reference that we could but into a gimple
1729 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1730 create statements and return s NULL instead. This function also ignores
1731 alignment issues and so its results should never end up in non-debug
1732 statements. */
1734 static tree
1735 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1736 struct access *model)
1738 HOST_WIDE_INT base_offset;
1739 tree off;
1741 if (TREE_CODE (model->expr) == COMPONENT_REF
1742 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1743 return NULL_TREE;
1745 base = get_addr_base_and_unit_offset (base, &base_offset);
1746 if (!base)
1747 return NULL_TREE;
1748 if (TREE_CODE (base) == MEM_REF)
1750 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1751 base_offset + offset / BITS_PER_UNIT);
1752 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1753 base = unshare_expr (TREE_OPERAND (base, 0));
1755 else
1757 off = build_int_cst (reference_alias_ptr_type (base),
1758 base_offset + offset / BITS_PER_UNIT);
1759 base = build_fold_addr_expr (unshare_expr (base));
1762 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1765 /* Construct a memory reference consisting of component_refs and array_refs to
1766 a part of an aggregate *RES (which is of type TYPE). The requested part
1767 should have type EXP_TYPE at be the given OFFSET. This function might not
1768 succeed, it returns true when it does and only then *RES points to something
1769 meaningful. This function should be used only to build expressions that we
1770 might need to present to user (e.g. in warnings). In all other situations,
1771 build_ref_for_model or build_ref_for_offset should be used instead. */
1773 static bool
1774 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1775 tree exp_type)
1777 while (1)
1779 tree fld;
1780 tree tr_size, index, minidx;
1781 HOST_WIDE_INT el_size;
1783 if (offset == 0 && exp_type
1784 && types_compatible_p (exp_type, type))
1785 return true;
1787 switch (TREE_CODE (type))
1789 case UNION_TYPE:
1790 case QUAL_UNION_TYPE:
1791 case RECORD_TYPE:
1792 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1794 HOST_WIDE_INT pos, size;
1795 tree tr_pos, expr, *expr_ptr;
1797 if (TREE_CODE (fld) != FIELD_DECL)
1798 continue;
1800 tr_pos = bit_position (fld);
1801 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1802 continue;
1803 pos = tree_to_uhwi (tr_pos);
1804 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1805 tr_size = DECL_SIZE (fld);
1806 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1807 continue;
1808 size = tree_to_uhwi (tr_size);
1809 if (size == 0)
1811 if (pos != offset)
1812 continue;
1814 else if (pos > offset || (pos + size) <= offset)
1815 continue;
1817 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1818 NULL_TREE);
1819 expr_ptr = &expr;
1820 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1821 offset - pos, exp_type))
1823 *res = expr;
1824 return true;
1827 return false;
1829 case ARRAY_TYPE:
1830 tr_size = TYPE_SIZE (TREE_TYPE (type));
1831 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1832 return false;
1833 el_size = tree_to_uhwi (tr_size);
1835 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1836 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1837 return false;
1838 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1839 if (!integer_zerop (minidx))
1840 index = int_const_binop (PLUS_EXPR, index, minidx);
1841 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1842 NULL_TREE, NULL_TREE);
1843 offset = offset % el_size;
1844 type = TREE_TYPE (type);
1845 break;
1847 default:
1848 if (offset != 0)
1849 return false;
1851 if (exp_type)
1852 return false;
1853 else
1854 return true;
1859 /* Return true iff TYPE is stdarg va_list type. */
1861 static inline bool
1862 is_va_list_type (tree type)
1864 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1867 /* Print message to dump file why a variable was rejected. */
1869 static void
1870 reject (tree var, const char *msg)
1872 if (dump_file && (dump_flags & TDF_DETAILS))
1874 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1875 print_generic_expr (dump_file, var, 0);
1876 fprintf (dump_file, "\n");
1880 /* Return true if VAR is a candidate for SRA. */
1882 static bool
1883 maybe_add_sra_candidate (tree var)
1885 tree type = TREE_TYPE (var);
1886 const char *msg;
1887 tree_node **slot;
1889 if (!AGGREGATE_TYPE_P (type))
1891 reject (var, "not aggregate");
1892 return false;
1894 /* Allow constant-pool entries (that "need to live in memory")
1895 unless we are doing IPA SRA. */
1896 if (needs_to_live_in_memory (var)
1897 && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
1899 reject (var, "needs to live in memory");
1900 return false;
1902 if (TREE_THIS_VOLATILE (var))
1904 reject (var, "is volatile");
1905 return false;
1907 if (!COMPLETE_TYPE_P (type))
1909 reject (var, "has incomplete type");
1910 return false;
1912 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1914 reject (var, "type size not fixed");
1915 return false;
1917 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1919 reject (var, "type size is zero");
1920 return false;
1922 if (type_internals_preclude_sra_p (type, &msg))
1924 reject (var, msg);
1925 return false;
1927 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1928 we also want to schedule it rather late. Thus we ignore it in
1929 the early pass. */
1930 (sra_mode == SRA_MODE_EARLY_INTRA
1931 && is_va_list_type (type)))
1933 reject (var, "is va_list");
1934 return false;
1937 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1938 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1939 *slot = var;
1941 if (dump_file && (dump_flags & TDF_DETAILS))
1943 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1944 print_generic_expr (dump_file, var, 0);
1945 fprintf (dump_file, "\n");
1948 return true;
1951 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1952 those with type which is suitable for scalarization. */
1954 static bool
1955 find_var_candidates (void)
1957 tree var, parm;
1958 unsigned int i;
1959 bool ret = false;
1961 for (parm = DECL_ARGUMENTS (current_function_decl);
1962 parm;
1963 parm = DECL_CHAIN (parm))
1964 ret |= maybe_add_sra_candidate (parm);
1966 FOR_EACH_LOCAL_DECL (cfun, i, var)
1968 if (TREE_CODE (var) != VAR_DECL)
1969 continue;
1971 ret |= maybe_add_sra_candidate (var);
1974 return ret;
1977 /* Sort all accesses for the given variable, check for partial overlaps and
1978 return NULL if there are any. If there are none, pick a representative for
1979 each combination of offset and size and create a linked list out of them.
1980 Return the pointer to the first representative and make sure it is the first
1981 one in the vector of accesses. */
1983 static struct access *
1984 sort_and_splice_var_accesses (tree var)
1986 int i, j, access_count;
1987 struct access *res, **prev_acc_ptr = &res;
1988 vec<access_p> *access_vec;
1989 bool first = true;
1990 HOST_WIDE_INT low = -1, high = 0;
1992 access_vec = get_base_access_vector (var);
1993 if (!access_vec)
1994 return NULL;
1995 access_count = access_vec->length ();
1997 /* Sort by <OFFSET, SIZE>. */
1998 access_vec->qsort (compare_access_positions);
2000 i = 0;
2001 while (i < access_count)
2003 struct access *access = (*access_vec)[i];
2004 bool grp_write = access->write;
2005 bool grp_read = !access->write;
2006 bool grp_scalar_write = access->write
2007 && is_gimple_reg_type (access->type);
2008 bool grp_scalar_read = !access->write
2009 && is_gimple_reg_type (access->type);
2010 bool grp_assignment_read = access->grp_assignment_read;
2011 bool grp_assignment_write = access->grp_assignment_write;
2012 bool multiple_scalar_reads = false;
2013 bool total_scalarization = access->grp_total_scalarization;
2014 bool grp_partial_lhs = access->grp_partial_lhs;
2015 bool first_scalar = is_gimple_reg_type (access->type);
2016 bool unscalarizable_region = access->grp_unscalarizable_region;
2018 if (first || access->offset >= high)
2020 first = false;
2021 low = access->offset;
2022 high = access->offset + access->size;
2024 else if (access->offset > low && access->offset + access->size > high)
2025 return NULL;
2026 else
2027 gcc_assert (access->offset >= low
2028 && access->offset + access->size <= high);
2030 j = i + 1;
2031 while (j < access_count)
2033 struct access *ac2 = (*access_vec)[j];
2034 if (ac2->offset != access->offset || ac2->size != access->size)
2035 break;
2036 if (ac2->write)
2038 grp_write = true;
2039 grp_scalar_write = (grp_scalar_write
2040 || is_gimple_reg_type (ac2->type));
2042 else
2044 grp_read = true;
2045 if (is_gimple_reg_type (ac2->type))
2047 if (grp_scalar_read)
2048 multiple_scalar_reads = true;
2049 else
2050 grp_scalar_read = true;
2053 grp_assignment_read |= ac2->grp_assignment_read;
2054 grp_assignment_write |= ac2->grp_assignment_write;
2055 grp_partial_lhs |= ac2->grp_partial_lhs;
2056 unscalarizable_region |= ac2->grp_unscalarizable_region;
2057 total_scalarization |= ac2->grp_total_scalarization;
2058 relink_to_new_repr (access, ac2);
2060 /* If there are both aggregate-type and scalar-type accesses with
2061 this combination of size and offset, the comparison function
2062 should have put the scalars first. */
2063 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2064 ac2->group_representative = access;
2065 j++;
2068 i = j;
2070 access->group_representative = access;
2071 access->grp_write = grp_write;
2072 access->grp_read = grp_read;
2073 access->grp_scalar_read = grp_scalar_read;
2074 access->grp_scalar_write = grp_scalar_write;
2075 access->grp_assignment_read = grp_assignment_read;
2076 access->grp_assignment_write = grp_assignment_write;
2077 access->grp_hint = multiple_scalar_reads || total_scalarization;
2078 access->grp_total_scalarization = total_scalarization;
2079 access->grp_partial_lhs = grp_partial_lhs;
2080 access->grp_unscalarizable_region = unscalarizable_region;
2081 if (access->first_link)
2082 add_access_to_work_queue (access);
2084 *prev_acc_ptr = access;
2085 prev_acc_ptr = &access->next_grp;
2088 gcc_assert (res == (*access_vec)[0]);
2089 return res;
2092 /* Create a variable for the given ACCESS which determines the type, name and a
2093 few other properties. Return the variable declaration and store it also to
2094 ACCESS->replacement. */
2096 static tree
2097 create_access_replacement (struct access *access)
2099 tree repl;
2101 if (access->grp_to_be_debug_replaced)
2103 repl = create_tmp_var_raw (access->type);
2104 DECL_CONTEXT (repl) = current_function_decl;
2106 else
2107 /* Drop any special alignment on the type if it's not on the main
2108 variant. This avoids issues with weirdo ABIs like AAPCS. */
2109 repl = create_tmp_var (build_qualified_type
2110 (TYPE_MAIN_VARIANT (access->type),
2111 TYPE_QUALS (access->type)), "SR");
2112 if (TREE_CODE (access->type) == COMPLEX_TYPE
2113 || TREE_CODE (access->type) == VECTOR_TYPE)
2115 if (!access->grp_partial_lhs)
2116 DECL_GIMPLE_REG_P (repl) = 1;
2118 else if (access->grp_partial_lhs
2119 && is_gimple_reg_type (access->type))
2120 TREE_ADDRESSABLE (repl) = 1;
2122 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2123 DECL_ARTIFICIAL (repl) = 1;
2124 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2126 if (DECL_NAME (access->base)
2127 && !DECL_IGNORED_P (access->base)
2128 && !DECL_ARTIFICIAL (access->base))
2130 char *pretty_name = make_fancy_name (access->expr);
2131 tree debug_expr = unshare_expr_without_location (access->expr), d;
2132 bool fail = false;
2134 DECL_NAME (repl) = get_identifier (pretty_name);
2135 obstack_free (&name_obstack, pretty_name);
2137 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2138 as DECL_DEBUG_EXPR isn't considered when looking for still
2139 used SSA_NAMEs and thus they could be freed. All debug info
2140 generation cares is whether something is constant or variable
2141 and that get_ref_base_and_extent works properly on the
2142 expression. It cannot handle accesses at a non-constant offset
2143 though, so just give up in those cases. */
2144 for (d = debug_expr;
2145 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2146 d = TREE_OPERAND (d, 0))
2147 switch (TREE_CODE (d))
2149 case ARRAY_REF:
2150 case ARRAY_RANGE_REF:
2151 if (TREE_OPERAND (d, 1)
2152 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2153 fail = true;
2154 if (TREE_OPERAND (d, 3)
2155 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2156 fail = true;
2157 /* FALLTHRU */
2158 case COMPONENT_REF:
2159 if (TREE_OPERAND (d, 2)
2160 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2161 fail = true;
2162 break;
2163 case MEM_REF:
2164 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2165 fail = true;
2166 else
2167 d = TREE_OPERAND (d, 0);
2168 break;
2169 default:
2170 break;
2172 if (!fail)
2174 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2175 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2177 if (access->grp_no_warning)
2178 TREE_NO_WARNING (repl) = 1;
2179 else
2180 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2182 else
2183 TREE_NO_WARNING (repl) = 1;
2185 if (dump_file)
2187 if (access->grp_to_be_debug_replaced)
2189 fprintf (dump_file, "Created a debug-only replacement for ");
2190 print_generic_expr (dump_file, access->base, 0);
2191 fprintf (dump_file, " offset: %u, size: %u\n",
2192 (unsigned) access->offset, (unsigned) access->size);
2194 else
2196 fprintf (dump_file, "Created a replacement for ");
2197 print_generic_expr (dump_file, access->base, 0);
2198 fprintf (dump_file, " offset: %u, size: %u: ",
2199 (unsigned) access->offset, (unsigned) access->size);
2200 print_generic_expr (dump_file, repl, 0);
2201 fprintf (dump_file, "\n");
2204 sra_stats.replacements++;
2206 return repl;
2209 /* Return ACCESS scalar replacement, which must exist. */
2211 static inline tree
2212 get_access_replacement (struct access *access)
2214 gcc_checking_assert (access->replacement_decl);
2215 return access->replacement_decl;
2219 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2220 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2221 to it is not "within" the root. Return false iff some accesses partially
2222 overlap. */
2224 static bool
2225 build_access_subtree (struct access **access)
2227 struct access *root = *access, *last_child = NULL;
2228 HOST_WIDE_INT limit = root->offset + root->size;
2230 *access = (*access)->next_grp;
2231 while (*access && (*access)->offset + (*access)->size <= limit)
2233 if (!last_child)
2234 root->first_child = *access;
2235 else
2236 last_child->next_sibling = *access;
2237 last_child = *access;
2239 if (!build_access_subtree (access))
2240 return false;
2243 if (*access && (*access)->offset < limit)
2244 return false;
2246 return true;
2249 /* Build a tree of access representatives, ACCESS is the pointer to the first
2250 one, others are linked in a list by the next_grp field. Return false iff
2251 some accesses partially overlap. */
2253 static bool
2254 build_access_trees (struct access *access)
2256 while (access)
2258 struct access *root = access;
2260 if (!build_access_subtree (&access))
2261 return false;
2262 root->next_grp = access;
2264 return true;
2267 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2268 array. */
2270 static bool
2271 expr_with_var_bounded_array_refs_p (tree expr)
2273 while (handled_component_p (expr))
2275 if (TREE_CODE (expr) == ARRAY_REF
2276 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2277 return true;
2278 expr = TREE_OPERAND (expr, 0);
2280 return false;
2283 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2284 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2285 sorts of access flags appropriately along the way, notably always set
2286 grp_read and grp_assign_read according to MARK_READ and grp_write when
2287 MARK_WRITE is true.
2289 Creating a replacement for a scalar access is considered beneficial if its
2290 grp_hint is set (this means we are either attempting total scalarization or
2291 there is more than one direct read access) or according to the following
2292 table:
2294 Access written to through a scalar type (once or more times)
2296 | Written to in an assignment statement
2298 | | Access read as scalar _once_
2299 | | |
2300 | | | Read in an assignment statement
2301 | | | |
2302 | | | | Scalarize Comment
2303 -----------------------------------------------------------------------------
2304 0 0 0 0 No access for the scalar
2305 0 0 0 1 No access for the scalar
2306 0 0 1 0 No Single read - won't help
2307 0 0 1 1 No The same case
2308 0 1 0 0 No access for the scalar
2309 0 1 0 1 No access for the scalar
2310 0 1 1 0 Yes s = *g; return s.i;
2311 0 1 1 1 Yes The same case as above
2312 1 0 0 0 No Won't help
2313 1 0 0 1 Yes s.i = 1; *g = s;
2314 1 0 1 0 Yes s.i = 5; g = s.i;
2315 1 0 1 1 Yes The same case as above
2316 1 1 0 0 No Won't help.
2317 1 1 0 1 Yes s.i = 1; *g = s;
2318 1 1 1 0 Yes s = *g; return s.i;
2319 1 1 1 1 Yes Any of the above yeses */
2321 static bool
2322 analyze_access_subtree (struct access *root, struct access *parent,
2323 bool allow_replacements)
2325 struct access *child;
2326 HOST_WIDE_INT limit = root->offset + root->size;
2327 HOST_WIDE_INT covered_to = root->offset;
2328 bool scalar = is_gimple_reg_type (root->type);
2329 bool hole = false, sth_created = false;
2331 if (parent)
2333 if (parent->grp_read)
2334 root->grp_read = 1;
2335 if (parent->grp_assignment_read)
2336 root->grp_assignment_read = 1;
2337 if (parent->grp_write)
2338 root->grp_write = 1;
2339 if (parent->grp_assignment_write)
2340 root->grp_assignment_write = 1;
2341 if (parent->grp_total_scalarization)
2342 root->grp_total_scalarization = 1;
2345 if (root->grp_unscalarizable_region)
2346 allow_replacements = false;
2348 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2349 allow_replacements = false;
2351 for (child = root->first_child; child; child = child->next_sibling)
2353 hole |= covered_to < child->offset;
2354 sth_created |= analyze_access_subtree (child, root,
2355 allow_replacements && !scalar);
2357 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2358 root->grp_total_scalarization &= child->grp_total_scalarization;
2359 if (child->grp_covered)
2360 covered_to += child->size;
2361 else
2362 hole = true;
2365 if (allow_replacements && scalar && !root->first_child
2366 && (root->grp_hint
2367 || ((root->grp_scalar_read || root->grp_assignment_read)
2368 && (root->grp_scalar_write || root->grp_assignment_write))))
2370 /* Always create access replacements that cover the whole access.
2371 For integral types this means the precision has to match.
2372 Avoid assumptions based on the integral type kind, too. */
2373 if (INTEGRAL_TYPE_P (root->type)
2374 && (TREE_CODE (root->type) != INTEGER_TYPE
2375 || TYPE_PRECISION (root->type) != root->size)
2376 /* But leave bitfield accesses alone. */
2377 && (TREE_CODE (root->expr) != COMPONENT_REF
2378 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2380 tree rt = root->type;
2381 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2382 && (root->size % BITS_PER_UNIT) == 0);
2383 root->type = build_nonstandard_integer_type (root->size,
2384 TYPE_UNSIGNED (rt));
2385 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2386 root->offset, root->reverse,
2387 root->type, NULL, false);
2389 if (dump_file && (dump_flags & TDF_DETAILS))
2391 fprintf (dump_file, "Changing the type of a replacement for ");
2392 print_generic_expr (dump_file, root->base, 0);
2393 fprintf (dump_file, " offset: %u, size: %u ",
2394 (unsigned) root->offset, (unsigned) root->size);
2395 fprintf (dump_file, " to an integer.\n");
2399 root->grp_to_be_replaced = 1;
2400 root->replacement_decl = create_access_replacement (root);
2401 sth_created = true;
2402 hole = false;
2404 else
2406 if (allow_replacements
2407 && scalar && !root->first_child
2408 && (root->grp_scalar_write || root->grp_assignment_write)
2409 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2410 DECL_UID (root->base)))
2412 gcc_checking_assert (!root->grp_scalar_read
2413 && !root->grp_assignment_read);
2414 sth_created = true;
2415 if (MAY_HAVE_DEBUG_STMTS)
2417 root->grp_to_be_debug_replaced = 1;
2418 root->replacement_decl = create_access_replacement (root);
2422 if (covered_to < limit)
2423 hole = true;
2424 if (scalar || !allow_replacements)
2425 root->grp_total_scalarization = 0;
2428 if (!hole || root->grp_total_scalarization)
2429 root->grp_covered = 1;
2430 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL
2431 || constant_decl_p (root->base))
2432 root->grp_unscalarized_data = 1; /* not covered and written to */
2433 return sth_created;
2436 /* Analyze all access trees linked by next_grp by the means of
2437 analyze_access_subtree. */
2438 static bool
2439 analyze_access_trees (struct access *access)
2441 bool ret = false;
2443 while (access)
2445 if (analyze_access_subtree (access, NULL, true))
2446 ret = true;
2447 access = access->next_grp;
2450 return ret;
2453 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2454 SIZE would conflict with an already existing one. If exactly such a child
2455 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2457 static bool
2458 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2459 HOST_WIDE_INT size, struct access **exact_match)
2461 struct access *child;
2463 for (child = lacc->first_child; child; child = child->next_sibling)
2465 if (child->offset == norm_offset && child->size == size)
2467 *exact_match = child;
2468 return true;
2471 if (child->offset < norm_offset + size
2472 && child->offset + child->size > norm_offset)
2473 return true;
2476 return false;
2479 /* Create a new child access of PARENT, with all properties just like MODEL
2480 except for its offset and with its grp_write false and grp_read true.
2481 Return the new access or NULL if it cannot be created. Note that this access
2482 is created long after all splicing and sorting, it's not located in any
2483 access vector and is automatically a representative of its group. */
2485 static struct access *
2486 create_artificial_child_access (struct access *parent, struct access *model,
2487 HOST_WIDE_INT new_offset)
2489 struct access **child;
2490 tree expr = parent->base;
2492 gcc_assert (!model->grp_unscalarizable_region);
2494 struct access *access = access_pool.allocate ();
2495 memset (access, 0, sizeof (struct access));
2496 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2497 model->type))
2499 access->grp_no_warning = true;
2500 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2501 new_offset, model, NULL, false);
2504 access->base = parent->base;
2505 access->expr = expr;
2506 access->offset = new_offset;
2507 access->size = model->size;
2508 access->type = model->type;
2509 access->grp_write = true;
2510 access->grp_read = false;
2511 access->reverse = model->reverse;
2513 child = &parent->first_child;
2514 while (*child && (*child)->offset < new_offset)
2515 child = &(*child)->next_sibling;
2517 access->next_sibling = *child;
2518 *child = access;
2520 return access;
2524 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2525 true if any new subaccess was created. Additionally, if RACC is a scalar
2526 access but LACC is not, change the type of the latter, if possible. */
2528 static bool
2529 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2531 struct access *rchild;
2532 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2533 bool ret = false;
2535 if (is_gimple_reg_type (lacc->type)
2536 || lacc->grp_unscalarizable_region
2537 || racc->grp_unscalarizable_region)
2538 return false;
2540 if (is_gimple_reg_type (racc->type))
2542 if (!lacc->first_child && !racc->first_child)
2544 tree t = lacc->base;
2546 lacc->type = racc->type;
2547 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2548 lacc->offset, racc->type))
2549 lacc->expr = t;
2550 else
2552 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2553 lacc->base, lacc->offset,
2554 racc, NULL, false);
2555 lacc->grp_no_warning = true;
2558 return false;
2561 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2563 struct access *new_acc = NULL;
2564 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2566 if (rchild->grp_unscalarizable_region)
2567 continue;
2569 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2570 &new_acc))
2572 if (new_acc)
2574 rchild->grp_hint = 1;
2575 new_acc->grp_hint |= new_acc->grp_read;
2576 if (rchild->first_child)
2577 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2579 continue;
2582 rchild->grp_hint = 1;
2583 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2584 if (new_acc)
2586 ret = true;
2587 if (racc->first_child)
2588 propagate_subaccesses_across_link (new_acc, rchild);
2592 return ret;
2595 /* Propagate all subaccesses across assignment links. */
2597 static void
2598 propagate_all_subaccesses (void)
2600 while (work_queue_head)
2602 struct access *racc = pop_access_from_work_queue ();
2603 struct assign_link *link;
2605 gcc_assert (racc->first_link);
2607 for (link = racc->first_link; link; link = link->next)
2609 struct access *lacc = link->lacc;
2611 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2612 continue;
2613 lacc = lacc->group_representative;
2614 if (propagate_subaccesses_across_link (lacc, racc)
2615 && lacc->first_link)
2616 add_access_to_work_queue (lacc);
2621 /* Go through all accesses collected throughout the (intraprocedural) analysis
2622 stage, exclude overlapping ones, identify representatives and build trees
2623 out of them, making decisions about scalarization on the way. Return true
2624 iff there are any to-be-scalarized variables after this stage. */
2626 static bool
2627 analyze_all_variable_accesses (void)
2629 int res = 0;
2630 bitmap tmp = BITMAP_ALLOC (NULL);
2631 bitmap_iterator bi;
2632 unsigned i;
2633 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2635 enum compiler_param param = optimize_speed_p
2636 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2637 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2639 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2640 fall back to a target default. */
2641 unsigned HOST_WIDE_INT max_scalarization_size
2642 = global_options_set.x_param_values[param]
2643 ? PARAM_VALUE (param)
2644 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2646 max_scalarization_size *= BITS_PER_UNIT;
2648 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2649 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2650 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2652 tree var = candidate (i);
2654 if (TREE_CODE (var) == VAR_DECL
2655 && scalarizable_type_p (TREE_TYPE (var)))
2657 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2658 <= max_scalarization_size)
2660 create_total_scalarization_access (var);
2661 completely_scalarize (var, TREE_TYPE (var), 0, var);
2662 if (dump_file && (dump_flags & TDF_DETAILS))
2664 fprintf (dump_file, "Will attempt to totally scalarize ");
2665 print_generic_expr (dump_file, var, 0);
2666 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2669 else if (dump_file && (dump_flags & TDF_DETAILS))
2671 fprintf (dump_file, "Too big to totally scalarize: ");
2672 print_generic_expr (dump_file, var, 0);
2673 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2678 bitmap_copy (tmp, candidate_bitmap);
2679 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2681 tree var = candidate (i);
2682 struct access *access;
2684 access = sort_and_splice_var_accesses (var);
2685 if (!access || !build_access_trees (access))
2686 disqualify_candidate (var,
2687 "No or inhibitingly overlapping accesses.");
2690 propagate_all_subaccesses ();
2692 bitmap_copy (tmp, candidate_bitmap);
2693 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2695 tree var = candidate (i);
2696 struct access *access = get_first_repr_for_decl (var);
2698 if (analyze_access_trees (access))
2700 res++;
2701 if (dump_file && (dump_flags & TDF_DETAILS))
2703 fprintf (dump_file, "\nAccess trees for ");
2704 print_generic_expr (dump_file, var, 0);
2705 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2706 dump_access_tree (dump_file, access);
2707 fprintf (dump_file, "\n");
2710 else
2711 disqualify_candidate (var, "No scalar replacements to be created.");
2714 BITMAP_FREE (tmp);
2716 if (res)
2718 statistics_counter_event (cfun, "Scalarized aggregates", res);
2719 return true;
2721 else
2722 return false;
2725 /* Generate statements copying scalar replacements of accesses within a subtree
2726 into or out of AGG. ACCESS, all its children, siblings and their children
2727 are to be processed. AGG is an aggregate type expression (can be a
2728 declaration but does not have to be, it can for example also be a mem_ref or
2729 a series of handled components). TOP_OFFSET is the offset of the processed
2730 subtree which has to be subtracted from offsets of individual accesses to
2731 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2732 replacements in the interval <start_offset, start_offset + chunk_size>,
2733 otherwise copy all. GSI is a statement iterator used to place the new
2734 statements. WRITE should be true when the statements should write from AGG
2735 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2736 statements will be added after the current statement in GSI, they will be
2737 added before the statement otherwise. */
2739 static void
2740 generate_subtree_copies (struct access *access, tree agg,
2741 HOST_WIDE_INT top_offset,
2742 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2743 gimple_stmt_iterator *gsi, bool write,
2744 bool insert_after, location_t loc)
2748 if (chunk_size && access->offset >= start_offset + chunk_size)
2749 return;
2751 if (access->grp_to_be_replaced
2752 && (chunk_size == 0
2753 || access->offset + access->size > start_offset))
2755 tree expr, repl = get_access_replacement (access);
2756 gassign *stmt;
2758 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2759 access, gsi, insert_after);
2761 if (write)
2763 if (access->grp_partial_lhs)
2764 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2765 !insert_after,
2766 insert_after ? GSI_NEW_STMT
2767 : GSI_SAME_STMT);
2768 stmt = gimple_build_assign (repl, expr);
2770 else
2772 TREE_NO_WARNING (repl) = 1;
2773 if (access->grp_partial_lhs)
2774 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2775 !insert_after,
2776 insert_after ? GSI_NEW_STMT
2777 : GSI_SAME_STMT);
2778 stmt = gimple_build_assign (expr, repl);
2780 gimple_set_location (stmt, loc);
2782 if (insert_after)
2783 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2784 else
2785 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2786 update_stmt (stmt);
2787 sra_stats.subtree_copies++;
2789 else if (write
2790 && access->grp_to_be_debug_replaced
2791 && (chunk_size == 0
2792 || access->offset + access->size > start_offset))
2794 gdebug *ds;
2795 tree drhs = build_debug_ref_for_model (loc, agg,
2796 access->offset - top_offset,
2797 access);
2798 ds = gimple_build_debug_bind (get_access_replacement (access),
2799 drhs, gsi_stmt (*gsi));
2800 if (insert_after)
2801 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2802 else
2803 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2806 if (access->first_child)
2807 generate_subtree_copies (access->first_child, agg, top_offset,
2808 start_offset, chunk_size, gsi,
2809 write, insert_after, loc);
2811 access = access->next_sibling;
2813 while (access);
2816 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2817 root of the subtree to be processed. GSI is the statement iterator used
2818 for inserting statements which are added after the current statement if
2819 INSERT_AFTER is true or before it otherwise. */
2821 static void
2822 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2823 bool insert_after, location_t loc)
2826 struct access *child;
2828 if (access->grp_to_be_replaced)
2830 gassign *stmt;
2832 stmt = gimple_build_assign (get_access_replacement (access),
2833 build_zero_cst (access->type));
2834 if (insert_after)
2835 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2836 else
2837 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2838 update_stmt (stmt);
2839 gimple_set_location (stmt, loc);
2841 else if (access->grp_to_be_debug_replaced)
2843 gdebug *ds
2844 = gimple_build_debug_bind (get_access_replacement (access),
2845 build_zero_cst (access->type),
2846 gsi_stmt (*gsi));
2847 if (insert_after)
2848 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2849 else
2850 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2853 for (child = access->first_child; child; child = child->next_sibling)
2854 init_subtree_with_zero (child, gsi, insert_after, loc);
2857 /* Clobber all scalar replacements in an access subtree. ACCESS is the
2858 root of the subtree to be processed. GSI is the statement iterator used
2859 for inserting statements which are added after the current statement if
2860 INSERT_AFTER is true or before it otherwise. */
2862 static void
2863 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2864 bool insert_after, location_t loc)
2867 struct access *child;
2869 if (access->grp_to_be_replaced)
2871 tree rep = get_access_replacement (access);
2872 tree clobber = build_constructor (access->type, NULL);
2873 TREE_THIS_VOLATILE (clobber) = 1;
2874 gimple *stmt = gimple_build_assign (rep, clobber);
2876 if (insert_after)
2877 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2878 else
2879 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2880 update_stmt (stmt);
2881 gimple_set_location (stmt, loc);
2884 for (child = access->first_child; child; child = child->next_sibling)
2885 clobber_subtree (child, gsi, insert_after, loc);
2888 /* Search for an access representative for the given expression EXPR and
2889 return it or NULL if it cannot be found. */
2891 static struct access *
2892 get_access_for_expr (tree expr)
2894 HOST_WIDE_INT offset, size, max_size;
2895 tree base;
2896 bool reverse;
2898 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2899 a different size than the size of its argument and we need the latter
2900 one. */
2901 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2902 expr = TREE_OPERAND (expr, 0);
2904 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
2905 if (max_size == -1 || !DECL_P (base))
2906 return NULL;
2908 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2909 return NULL;
2911 return get_var_base_offset_size_access (base, offset, max_size);
2914 /* Replace the expression EXPR with a scalar replacement if there is one and
2915 generate other statements to do type conversion or subtree copying if
2916 necessary. GSI is used to place newly created statements, WRITE is true if
2917 the expression is being written to (it is on a LHS of a statement or output
2918 in an assembly statement). */
2920 static bool
2921 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2923 location_t loc;
2924 struct access *access;
2925 tree type, bfr, orig_expr;
2927 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2929 bfr = *expr;
2930 expr = &TREE_OPERAND (*expr, 0);
2932 else
2933 bfr = NULL_TREE;
2935 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2936 expr = &TREE_OPERAND (*expr, 0);
2937 access = get_access_for_expr (*expr);
2938 if (!access)
2939 return false;
2940 type = TREE_TYPE (*expr);
2941 orig_expr = *expr;
2943 loc = gimple_location (gsi_stmt (*gsi));
2944 gimple_stmt_iterator alt_gsi = gsi_none ();
2945 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2947 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2948 gsi = &alt_gsi;
2951 if (access->grp_to_be_replaced)
2953 tree repl = get_access_replacement (access);
2954 /* If we replace a non-register typed access simply use the original
2955 access expression to extract the scalar component afterwards.
2956 This happens if scalarizing a function return value or parameter
2957 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2958 gcc.c-torture/compile/20011217-1.c.
2960 We also want to use this when accessing a complex or vector which can
2961 be accessed as a different type too, potentially creating a need for
2962 type conversion (see PR42196) and when scalarized unions are involved
2963 in assembler statements (see PR42398). */
2964 if (!useless_type_conversion_p (type, access->type))
2966 tree ref;
2968 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2970 if (write)
2972 gassign *stmt;
2974 if (access->grp_partial_lhs)
2975 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2976 false, GSI_NEW_STMT);
2977 stmt = gimple_build_assign (repl, ref);
2978 gimple_set_location (stmt, loc);
2979 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2981 else
2983 gassign *stmt;
2985 if (access->grp_partial_lhs)
2986 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2987 true, GSI_SAME_STMT);
2988 stmt = gimple_build_assign (ref, repl);
2989 gimple_set_location (stmt, loc);
2990 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2993 else
2994 *expr = repl;
2995 sra_stats.exprs++;
2997 else if (write && access->grp_to_be_debug_replaced)
2999 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3000 NULL_TREE,
3001 gsi_stmt (*gsi));
3002 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3005 if (access->first_child)
3007 HOST_WIDE_INT start_offset, chunk_size;
3008 if (bfr
3009 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3010 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3012 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3013 start_offset = access->offset
3014 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3016 else
3017 start_offset = chunk_size = 0;
3019 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3020 start_offset, chunk_size, gsi, write, write,
3021 loc);
3023 return true;
3026 /* Where scalar replacements of the RHS have been written to when a replacement
3027 of a LHS of an assigments cannot be direclty loaded from a replacement of
3028 the RHS. */
3029 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3030 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3031 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3033 struct subreplacement_assignment_data
3035 /* Offset of the access representing the lhs of the assignment. */
3036 HOST_WIDE_INT left_offset;
3038 /* LHS and RHS of the original assignment. */
3039 tree assignment_lhs, assignment_rhs;
3041 /* Access representing the rhs of the whole assignment. */
3042 struct access *top_racc;
3044 /* Stmt iterator used for statement insertions after the original assignment.
3045 It points to the main GSI used to traverse a BB during function body
3046 modification. */
3047 gimple_stmt_iterator *new_gsi;
3049 /* Stmt iterator used for statement insertions before the original
3050 assignment. Keeps on pointing to the original statement. */
3051 gimple_stmt_iterator old_gsi;
3053 /* Location of the assignment. */
3054 location_t loc;
3056 /* Keeps the information whether we have needed to refresh replacements of
3057 the LHS and from which side of the assignments this takes place. */
3058 enum unscalarized_data_handling refreshed;
3061 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3062 base aggregate if there are unscalarized data or directly to LHS of the
3063 statement that is pointed to by GSI otherwise. */
3065 static void
3066 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3068 tree src;
3069 if (sad->top_racc->grp_unscalarized_data)
3071 src = sad->assignment_rhs;
3072 sad->refreshed = SRA_UDH_RIGHT;
3074 else
3076 src = sad->assignment_lhs;
3077 sad->refreshed = SRA_UDH_LEFT;
3079 generate_subtree_copies (sad->top_racc->first_child, src,
3080 sad->top_racc->offset, 0, 0,
3081 &sad->old_gsi, false, false, sad->loc);
3084 /* Try to generate statements to load all sub-replacements in an access subtree
3085 formed by children of LACC from scalar replacements in the SAD->top_racc
3086 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3087 and load the accesses from it. */
3089 static void
3090 load_assign_lhs_subreplacements (struct access *lacc,
3091 struct subreplacement_assignment_data *sad)
3093 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3095 HOST_WIDE_INT offset;
3096 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3098 if (lacc->grp_to_be_replaced)
3100 struct access *racc;
3101 gassign *stmt;
3102 tree rhs;
3104 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3105 if (racc && racc->grp_to_be_replaced)
3107 rhs = get_access_replacement (racc);
3108 if (!useless_type_conversion_p (lacc->type, racc->type))
3109 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3110 lacc->type, rhs);
3112 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3113 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3114 NULL_TREE, true, GSI_SAME_STMT);
3116 else
3118 /* No suitable access on the right hand side, need to load from
3119 the aggregate. See if we have to update it first... */
3120 if (sad->refreshed == SRA_UDH_NONE)
3121 handle_unscalarized_data_in_subtree (sad);
3123 if (sad->refreshed == SRA_UDH_LEFT)
3124 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3125 lacc->offset - sad->left_offset,
3126 lacc, sad->new_gsi, true);
3127 else
3128 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3129 lacc->offset - sad->left_offset,
3130 lacc, sad->new_gsi, true);
3131 if (lacc->grp_partial_lhs)
3132 rhs = force_gimple_operand_gsi (sad->new_gsi,
3133 rhs, true, NULL_TREE,
3134 false, GSI_NEW_STMT);
3137 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3138 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3139 gimple_set_location (stmt, sad->loc);
3140 update_stmt (stmt);
3141 sra_stats.subreplacements++;
3143 else
3145 if (sad->refreshed == SRA_UDH_NONE
3146 && lacc->grp_read && !lacc->grp_covered)
3147 handle_unscalarized_data_in_subtree (sad);
3149 if (lacc && lacc->grp_to_be_debug_replaced)
3151 gdebug *ds;
3152 tree drhs;
3153 struct access *racc = find_access_in_subtree (sad->top_racc,
3154 offset,
3155 lacc->size);
3157 if (racc && racc->grp_to_be_replaced)
3159 if (racc->grp_write || constant_decl_p (racc->base))
3160 drhs = get_access_replacement (racc);
3161 else
3162 drhs = NULL;
3164 else if (sad->refreshed == SRA_UDH_LEFT)
3165 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3166 lacc->offset, lacc);
3167 else if (sad->refreshed == SRA_UDH_RIGHT)
3168 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3169 offset, lacc);
3170 else
3171 drhs = NULL_TREE;
3172 if (drhs
3173 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3174 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3175 lacc->type, drhs);
3176 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3177 drhs, gsi_stmt (sad->old_gsi));
3178 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3182 if (lacc->first_child)
3183 load_assign_lhs_subreplacements (lacc, sad);
3187 /* Result code for SRA assignment modification. */
3188 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3189 SRA_AM_MODIFIED, /* stmt changed but not
3190 removed */
3191 SRA_AM_REMOVED }; /* stmt eliminated */
3193 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3194 to the assignment and GSI is the statement iterator pointing at it. Returns
3195 the same values as sra_modify_assign. */
3197 static enum assignment_mod_result
3198 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3200 tree lhs = gimple_assign_lhs (stmt);
3201 struct access *acc = get_access_for_expr (lhs);
3202 if (!acc)
3203 return SRA_AM_NONE;
3204 location_t loc = gimple_location (stmt);
3206 if (gimple_clobber_p (stmt))
3208 /* Clobber the replacement variable. */
3209 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3210 /* Remove clobbers of fully scalarized variables, they are dead. */
3211 if (acc->grp_covered)
3213 unlink_stmt_vdef (stmt);
3214 gsi_remove (gsi, true);
3215 release_defs (stmt);
3216 return SRA_AM_REMOVED;
3218 else
3219 return SRA_AM_MODIFIED;
3222 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt))) > 0)
3224 /* I have never seen this code path trigger but if it can happen the
3225 following should handle it gracefully. */
3226 if (access_has_children_p (acc))
3227 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3228 true, true, loc);
3229 return SRA_AM_MODIFIED;
3232 if (acc->grp_covered)
3234 init_subtree_with_zero (acc, gsi, false, loc);
3235 unlink_stmt_vdef (stmt);
3236 gsi_remove (gsi, true);
3237 release_defs (stmt);
3238 return SRA_AM_REMOVED;
3240 else
3242 init_subtree_with_zero (acc, gsi, true, loc);
3243 return SRA_AM_MODIFIED;
3247 /* Create and return a new suitable default definition SSA_NAME for RACC which
3248 is an access describing an uninitialized part of an aggregate that is being
3249 loaded. */
3251 static tree
3252 get_repl_default_def_ssa_name (struct access *racc)
3254 gcc_checking_assert (!racc->grp_to_be_replaced
3255 && !racc->grp_to_be_debug_replaced);
3256 if (!racc->replacement_decl)
3257 racc->replacement_decl = create_access_replacement (racc);
3258 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3261 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3262 bit-field field declaration somewhere in it. */
3264 static inline bool
3265 contains_vce_or_bfcref_p (const_tree ref)
3267 while (handled_component_p (ref))
3269 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3270 || (TREE_CODE (ref) == COMPONENT_REF
3271 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3272 return true;
3273 ref = TREE_OPERAND (ref, 0);
3276 return false;
3279 /* Examine both sides of the assignment statement pointed to by STMT, replace
3280 them with a scalare replacement if there is one and generate copying of
3281 replacements if scalarized aggregates have been used in the assignment. GSI
3282 is used to hold generated statements for type conversions and subtree
3283 copying. */
3285 static enum assignment_mod_result
3286 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3288 struct access *lacc, *racc;
3289 tree lhs, rhs;
3290 bool modify_this_stmt = false;
3291 bool force_gimple_rhs = false;
3292 location_t loc;
3293 gimple_stmt_iterator orig_gsi = *gsi;
3295 if (!gimple_assign_single_p (stmt))
3296 return SRA_AM_NONE;
3297 lhs = gimple_assign_lhs (stmt);
3298 rhs = gimple_assign_rhs1 (stmt);
3300 if (TREE_CODE (rhs) == CONSTRUCTOR)
3301 return sra_modify_constructor_assign (stmt, gsi);
3303 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3304 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3305 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3307 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3308 gsi, false);
3309 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3310 gsi, true);
3311 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3314 lacc = get_access_for_expr (lhs);
3315 racc = get_access_for_expr (rhs);
3316 if (!lacc && !racc)
3317 return SRA_AM_NONE;
3318 /* Avoid modifying initializations of constant-pool replacements. */
3319 if (racc && (racc->replacement_decl == lhs))
3320 return SRA_AM_NONE;
3322 loc = gimple_location (stmt);
3323 if (lacc && lacc->grp_to_be_replaced)
3325 lhs = get_access_replacement (lacc);
3326 gimple_assign_set_lhs (stmt, lhs);
3327 modify_this_stmt = true;
3328 if (lacc->grp_partial_lhs)
3329 force_gimple_rhs = true;
3330 sra_stats.exprs++;
3333 if (racc && racc->grp_to_be_replaced)
3335 rhs = get_access_replacement (racc);
3336 modify_this_stmt = true;
3337 if (racc->grp_partial_lhs)
3338 force_gimple_rhs = true;
3339 sra_stats.exprs++;
3341 else if (racc
3342 && !racc->grp_unscalarized_data
3343 && !racc->grp_unscalarizable_region
3344 && TREE_CODE (lhs) == SSA_NAME
3345 && !access_has_replacements_p (racc))
3347 rhs = get_repl_default_def_ssa_name (racc);
3348 modify_this_stmt = true;
3349 sra_stats.exprs++;
3352 if (modify_this_stmt)
3354 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3356 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3357 ??? This should move to fold_stmt which we simply should
3358 call after building a VIEW_CONVERT_EXPR here. */
3359 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3360 && !contains_bitfld_component_ref_p (lhs))
3362 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3363 gimple_assign_set_lhs (stmt, lhs);
3365 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3366 && !contains_vce_or_bfcref_p (rhs))
3367 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3369 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3371 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3372 rhs);
3373 if (is_gimple_reg_type (TREE_TYPE (lhs))
3374 && TREE_CODE (lhs) != SSA_NAME)
3375 force_gimple_rhs = true;
3380 if (lacc && lacc->grp_to_be_debug_replaced)
3382 tree dlhs = get_access_replacement (lacc);
3383 tree drhs = unshare_expr (rhs);
3384 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3386 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3387 && !contains_vce_or_bfcref_p (drhs))
3388 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3389 if (drhs
3390 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3391 TREE_TYPE (drhs)))
3392 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3393 TREE_TYPE (dlhs), drhs);
3395 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3396 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3399 /* From this point on, the function deals with assignments in between
3400 aggregates when at least one has scalar reductions of some of its
3401 components. There are three possible scenarios: Both the LHS and RHS have
3402 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3404 In the first case, we would like to load the LHS components from RHS
3405 components whenever possible. If that is not possible, we would like to
3406 read it directly from the RHS (after updating it by storing in it its own
3407 components). If there are some necessary unscalarized data in the LHS,
3408 those will be loaded by the original assignment too. If neither of these
3409 cases happen, the original statement can be removed. Most of this is done
3410 by load_assign_lhs_subreplacements.
3412 In the second case, we would like to store all RHS scalarized components
3413 directly into LHS and if they cover the aggregate completely, remove the
3414 statement too. In the third case, we want the LHS components to be loaded
3415 directly from the RHS (DSE will remove the original statement if it
3416 becomes redundant).
3418 This is a bit complex but manageable when types match and when unions do
3419 not cause confusion in a way that we cannot really load a component of LHS
3420 from the RHS or vice versa (the access representing this level can have
3421 subaccesses that are accessible only through a different union field at a
3422 higher level - different from the one used in the examined expression).
3423 Unions are fun.
3425 Therefore, I specially handle a fourth case, happening when there is a
3426 specific type cast or it is impossible to locate a scalarized subaccess on
3427 the other side of the expression. If that happens, I simply "refresh" the
3428 RHS by storing in it is scalarized components leave the original statement
3429 there to do the copying and then load the scalar replacements of the LHS.
3430 This is what the first branch does. */
3432 if (modify_this_stmt
3433 || gimple_has_volatile_ops (stmt)
3434 || contains_vce_or_bfcref_p (rhs)
3435 || contains_vce_or_bfcref_p (lhs)
3436 || stmt_ends_bb_p (stmt))
3438 /* No need to copy into a constant-pool, it comes pre-initialized. */
3439 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3440 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3441 gsi, false, false, loc);
3442 if (access_has_children_p (lacc))
3444 gimple_stmt_iterator alt_gsi = gsi_none ();
3445 if (stmt_ends_bb_p (stmt))
3447 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3448 gsi = &alt_gsi;
3450 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3451 gsi, true, true, loc);
3453 sra_stats.separate_lhs_rhs_handling++;
3455 /* This gimplification must be done after generate_subtree_copies,
3456 lest we insert the subtree copies in the middle of the gimplified
3457 sequence. */
3458 if (force_gimple_rhs)
3459 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3460 true, GSI_SAME_STMT);
3461 if (gimple_assign_rhs1 (stmt) != rhs)
3463 modify_this_stmt = true;
3464 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3465 gcc_assert (stmt == gsi_stmt (orig_gsi));
3468 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3470 else
3472 if (access_has_children_p (lacc)
3473 && access_has_children_p (racc)
3474 /* When an access represents an unscalarizable region, it usually
3475 represents accesses with variable offset and thus must not be used
3476 to generate new memory accesses. */
3477 && !lacc->grp_unscalarizable_region
3478 && !racc->grp_unscalarizable_region)
3480 struct subreplacement_assignment_data sad;
3482 sad.left_offset = lacc->offset;
3483 sad.assignment_lhs = lhs;
3484 sad.assignment_rhs = rhs;
3485 sad.top_racc = racc;
3486 sad.old_gsi = *gsi;
3487 sad.new_gsi = gsi;
3488 sad.loc = gimple_location (stmt);
3489 sad.refreshed = SRA_UDH_NONE;
3491 if (lacc->grp_read && !lacc->grp_covered)
3492 handle_unscalarized_data_in_subtree (&sad);
3494 load_assign_lhs_subreplacements (lacc, &sad);
3495 if (sad.refreshed != SRA_UDH_RIGHT)
3497 gsi_next (gsi);
3498 unlink_stmt_vdef (stmt);
3499 gsi_remove (&sad.old_gsi, true);
3500 release_defs (stmt);
3501 sra_stats.deleted++;
3502 return SRA_AM_REMOVED;
3505 else
3507 if (access_has_children_p (racc)
3508 && !racc->grp_unscalarized_data
3509 && TREE_CODE (lhs) != SSA_NAME)
3511 if (dump_file)
3513 fprintf (dump_file, "Removing load: ");
3514 print_gimple_stmt (dump_file, stmt, 0, 0);
3516 generate_subtree_copies (racc->first_child, lhs,
3517 racc->offset, 0, 0, gsi,
3518 false, false, loc);
3519 gcc_assert (stmt == gsi_stmt (*gsi));
3520 unlink_stmt_vdef (stmt);
3521 gsi_remove (gsi, true);
3522 release_defs (stmt);
3523 sra_stats.deleted++;
3524 return SRA_AM_REMOVED;
3526 /* Restore the aggregate RHS from its components so the
3527 prevailing aggregate copy does the right thing. */
3528 if (access_has_children_p (racc))
3529 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3530 gsi, false, false, loc);
3531 /* Re-load the components of the aggregate copy destination.
3532 But use the RHS aggregate to load from to expose more
3533 optimization opportunities. */
3534 if (access_has_children_p (lacc))
3535 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3536 0, 0, gsi, true, true, loc);
3539 return SRA_AM_NONE;
3543 /* Set any scalar replacements of values in the constant pool to the initial
3544 value of the constant. (Constant-pool decls like *.LC0 have effectively
3545 been initialized before the program starts, we must do the same for their
3546 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3547 the function's entry block. */
3549 static void
3550 initialize_constant_pool_replacements (void)
3552 gimple_seq seq = NULL;
3553 gimple_stmt_iterator gsi = gsi_start (seq);
3554 bitmap_iterator bi;
3555 unsigned i;
3557 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3558 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
3559 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
3561 tree var = candidate (i);
3562 if (!constant_decl_p (var))
3563 continue;
3564 vec<access_p> *access_vec = get_base_access_vector (var);
3565 if (!access_vec)
3566 continue;
3567 for (unsigned i = 0; i < access_vec->length (); i++)
3569 struct access *access = (*access_vec)[i];
3570 if (!access->replacement_decl)
3571 continue;
3572 gassign *stmt = gimple_build_assign (
3573 get_access_replacement (access), unshare_expr (access->expr));
3574 if (dump_file && (dump_flags & TDF_DETAILS))
3576 fprintf (dump_file, "Generating constant initializer: ");
3577 print_gimple_stmt (dump_file, stmt, 0, 1);
3578 fprintf (dump_file, "\n");
3580 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3581 update_stmt (stmt);
3585 seq = gsi_seq (gsi);
3586 if (seq)
3587 gsi_insert_seq_on_edge_immediate (
3588 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3591 /* Traverse the function body and all modifications as decided in
3592 analyze_all_variable_accesses. Return true iff the CFG has been
3593 changed. */
3595 static bool
3596 sra_modify_function_body (void)
3598 bool cfg_changed = false;
3599 basic_block bb;
3601 initialize_constant_pool_replacements ();
3603 FOR_EACH_BB_FN (bb, cfun)
3605 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3606 while (!gsi_end_p (gsi))
3608 gimple *stmt = gsi_stmt (gsi);
3609 enum assignment_mod_result assign_result;
3610 bool modified = false, deleted = false;
3611 tree *t;
3612 unsigned i;
3614 switch (gimple_code (stmt))
3616 case GIMPLE_RETURN:
3617 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3618 if (*t != NULL_TREE)
3619 modified |= sra_modify_expr (t, &gsi, false);
3620 break;
3622 case GIMPLE_ASSIGN:
3623 assign_result = sra_modify_assign (stmt, &gsi);
3624 modified |= assign_result == SRA_AM_MODIFIED;
3625 deleted = assign_result == SRA_AM_REMOVED;
3626 break;
3628 case GIMPLE_CALL:
3629 /* Operands must be processed before the lhs. */
3630 for (i = 0; i < gimple_call_num_args (stmt); i++)
3632 t = gimple_call_arg_ptr (stmt, i);
3633 modified |= sra_modify_expr (t, &gsi, false);
3636 if (gimple_call_lhs (stmt))
3638 t = gimple_call_lhs_ptr (stmt);
3639 modified |= sra_modify_expr (t, &gsi, true);
3641 break;
3643 case GIMPLE_ASM:
3645 gasm *asm_stmt = as_a <gasm *> (stmt);
3646 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3648 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3649 modified |= sra_modify_expr (t, &gsi, false);
3651 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3653 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3654 modified |= sra_modify_expr (t, &gsi, true);
3657 break;
3659 default:
3660 break;
3663 if (modified)
3665 update_stmt (stmt);
3666 if (maybe_clean_eh_stmt (stmt)
3667 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3668 cfg_changed = true;
3670 if (!deleted)
3671 gsi_next (&gsi);
3675 gsi_commit_edge_inserts ();
3676 return cfg_changed;
3679 /* Generate statements initializing scalar replacements of parts of function
3680 parameters. */
3682 static void
3683 initialize_parameter_reductions (void)
3685 gimple_stmt_iterator gsi;
3686 gimple_seq seq = NULL;
3687 tree parm;
3689 gsi = gsi_start (seq);
3690 for (parm = DECL_ARGUMENTS (current_function_decl);
3691 parm;
3692 parm = DECL_CHAIN (parm))
3694 vec<access_p> *access_vec;
3695 struct access *access;
3697 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3698 continue;
3699 access_vec = get_base_access_vector (parm);
3700 if (!access_vec)
3701 continue;
3703 for (access = (*access_vec)[0];
3704 access;
3705 access = access->next_grp)
3706 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3707 EXPR_LOCATION (parm));
3710 seq = gsi_seq (gsi);
3711 if (seq)
3712 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3715 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3716 it reveals there are components of some aggregates to be scalarized, it runs
3717 the required transformations. */
3718 static unsigned int
3719 perform_intra_sra (void)
3721 int ret = 0;
3722 sra_initialize ();
3724 if (!find_var_candidates ())
3725 goto out;
3727 if (!scan_function ())
3728 goto out;
3730 if (!analyze_all_variable_accesses ())
3731 goto out;
3733 if (sra_modify_function_body ())
3734 ret = TODO_update_ssa | TODO_cleanup_cfg;
3735 else
3736 ret = TODO_update_ssa;
3737 initialize_parameter_reductions ();
3739 statistics_counter_event (cfun, "Scalar replacements created",
3740 sra_stats.replacements);
3741 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3742 statistics_counter_event (cfun, "Subtree copy stmts",
3743 sra_stats.subtree_copies);
3744 statistics_counter_event (cfun, "Subreplacement stmts",
3745 sra_stats.subreplacements);
3746 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3747 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3748 sra_stats.separate_lhs_rhs_handling);
3750 out:
3751 sra_deinitialize ();
3752 return ret;
3755 /* Perform early intraprocedural SRA. */
3756 static unsigned int
3757 early_intra_sra (void)
3759 sra_mode = SRA_MODE_EARLY_INTRA;
3760 return perform_intra_sra ();
3763 /* Perform "late" intraprocedural SRA. */
3764 static unsigned int
3765 late_intra_sra (void)
3767 sra_mode = SRA_MODE_INTRA;
3768 return perform_intra_sra ();
3772 static bool
3773 gate_intra_sra (void)
3775 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3779 namespace {
3781 const pass_data pass_data_sra_early =
3783 GIMPLE_PASS, /* type */
3784 "esra", /* name */
3785 OPTGROUP_NONE, /* optinfo_flags */
3786 TV_TREE_SRA, /* tv_id */
3787 ( PROP_cfg | PROP_ssa ), /* properties_required */
3788 0, /* properties_provided */
3789 0, /* properties_destroyed */
3790 0, /* todo_flags_start */
3791 TODO_update_ssa, /* todo_flags_finish */
3794 class pass_sra_early : public gimple_opt_pass
3796 public:
3797 pass_sra_early (gcc::context *ctxt)
3798 : gimple_opt_pass (pass_data_sra_early, ctxt)
3801 /* opt_pass methods: */
3802 virtual bool gate (function *) { return gate_intra_sra (); }
3803 virtual unsigned int execute (function *) { return early_intra_sra (); }
3805 }; // class pass_sra_early
3807 } // anon namespace
3809 gimple_opt_pass *
3810 make_pass_sra_early (gcc::context *ctxt)
3812 return new pass_sra_early (ctxt);
3815 namespace {
3817 const pass_data pass_data_sra =
3819 GIMPLE_PASS, /* type */
3820 "sra", /* name */
3821 OPTGROUP_NONE, /* optinfo_flags */
3822 TV_TREE_SRA, /* tv_id */
3823 ( PROP_cfg | PROP_ssa ), /* properties_required */
3824 0, /* properties_provided */
3825 0, /* properties_destroyed */
3826 TODO_update_address_taken, /* todo_flags_start */
3827 TODO_update_ssa, /* todo_flags_finish */
3830 class pass_sra : public gimple_opt_pass
3832 public:
3833 pass_sra (gcc::context *ctxt)
3834 : gimple_opt_pass (pass_data_sra, ctxt)
3837 /* opt_pass methods: */
3838 virtual bool gate (function *) { return gate_intra_sra (); }
3839 virtual unsigned int execute (function *) { return late_intra_sra (); }
3841 }; // class pass_sra
3843 } // anon namespace
3845 gimple_opt_pass *
3846 make_pass_sra (gcc::context *ctxt)
3848 return new pass_sra (ctxt);
3852 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3853 parameter. */
3855 static bool
3856 is_unused_scalar_param (tree parm)
3858 tree name;
3859 return (is_gimple_reg (parm)
3860 && (!(name = ssa_default_def (cfun, parm))
3861 || has_zero_uses (name)));
3864 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3865 examine whether there are any direct or otherwise infeasible ones. If so,
3866 return true, otherwise return false. PARM must be a gimple register with a
3867 non-NULL default definition. */
3869 static bool
3870 ptr_parm_has_direct_uses (tree parm)
3872 imm_use_iterator ui;
3873 gimple *stmt;
3874 tree name = ssa_default_def (cfun, parm);
3875 bool ret = false;
3877 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3879 int uses_ok = 0;
3880 use_operand_p use_p;
3882 if (is_gimple_debug (stmt))
3883 continue;
3885 /* Valid uses include dereferences on the lhs and the rhs. */
3886 if (gimple_has_lhs (stmt))
3888 tree lhs = gimple_get_lhs (stmt);
3889 while (handled_component_p (lhs))
3890 lhs = TREE_OPERAND (lhs, 0);
3891 if (TREE_CODE (lhs) == MEM_REF
3892 && TREE_OPERAND (lhs, 0) == name
3893 && integer_zerop (TREE_OPERAND (lhs, 1))
3894 && types_compatible_p (TREE_TYPE (lhs),
3895 TREE_TYPE (TREE_TYPE (name)))
3896 && !TREE_THIS_VOLATILE (lhs))
3897 uses_ok++;
3899 if (gimple_assign_single_p (stmt))
3901 tree rhs = gimple_assign_rhs1 (stmt);
3902 while (handled_component_p (rhs))
3903 rhs = TREE_OPERAND (rhs, 0);
3904 if (TREE_CODE (rhs) == MEM_REF
3905 && TREE_OPERAND (rhs, 0) == name
3906 && integer_zerop (TREE_OPERAND (rhs, 1))
3907 && types_compatible_p (TREE_TYPE (rhs),
3908 TREE_TYPE (TREE_TYPE (name)))
3909 && !TREE_THIS_VOLATILE (rhs))
3910 uses_ok++;
3912 else if (is_gimple_call (stmt))
3914 unsigned i;
3915 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3917 tree arg = gimple_call_arg (stmt, i);
3918 while (handled_component_p (arg))
3919 arg = TREE_OPERAND (arg, 0);
3920 if (TREE_CODE (arg) == MEM_REF
3921 && TREE_OPERAND (arg, 0) == name
3922 && integer_zerop (TREE_OPERAND (arg, 1))
3923 && types_compatible_p (TREE_TYPE (arg),
3924 TREE_TYPE (TREE_TYPE (name)))
3925 && !TREE_THIS_VOLATILE (arg))
3926 uses_ok++;
3930 /* If the number of valid uses does not match the number of
3931 uses in this stmt there is an unhandled use. */
3932 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3933 --uses_ok;
3935 if (uses_ok != 0)
3936 ret = true;
3938 if (ret)
3939 BREAK_FROM_IMM_USE_STMT (ui);
3942 return ret;
3945 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3946 them in candidate_bitmap. Note that these do not necessarily include
3947 parameter which are unused and thus can be removed. Return true iff any
3948 such candidate has been found. */
3950 static bool
3951 find_param_candidates (void)
3953 tree parm;
3954 int count = 0;
3955 bool ret = false;
3956 const char *msg;
3958 for (parm = DECL_ARGUMENTS (current_function_decl);
3959 parm;
3960 parm = DECL_CHAIN (parm))
3962 tree type = TREE_TYPE (parm);
3963 tree_node **slot;
3965 count++;
3967 if (TREE_THIS_VOLATILE (parm)
3968 || TREE_ADDRESSABLE (parm)
3969 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3970 continue;
3972 if (is_unused_scalar_param (parm))
3974 ret = true;
3975 continue;
3978 if (POINTER_TYPE_P (type))
3980 type = TREE_TYPE (type);
3982 if (TREE_CODE (type) == FUNCTION_TYPE
3983 || TYPE_VOLATILE (type)
3984 || (TREE_CODE (type) == ARRAY_TYPE
3985 && TYPE_NONALIASED_COMPONENT (type))
3986 || !is_gimple_reg (parm)
3987 || is_va_list_type (type)
3988 || ptr_parm_has_direct_uses (parm))
3989 continue;
3991 else if (!AGGREGATE_TYPE_P (type))
3992 continue;
3994 if (!COMPLETE_TYPE_P (type)
3995 || !tree_fits_uhwi_p (TYPE_SIZE (type))
3996 || tree_to_uhwi (TYPE_SIZE (type)) == 0
3997 || (AGGREGATE_TYPE_P (type)
3998 && type_internals_preclude_sra_p (type, &msg)))
3999 continue;
4001 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4002 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4003 *slot = parm;
4005 ret = true;
4006 if (dump_file && (dump_flags & TDF_DETAILS))
4008 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4009 print_generic_expr (dump_file, parm, 0);
4010 fprintf (dump_file, "\n");
4014 func_param_count = count;
4015 return ret;
4018 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4019 maybe_modified. */
4021 static bool
4022 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4023 void *data)
4025 struct access *repr = (struct access *) data;
4027 repr->grp_maybe_modified = 1;
4028 return true;
4031 /* Analyze what representatives (in linked lists accessible from
4032 REPRESENTATIVES) can be modified by side effects of statements in the
4033 current function. */
4035 static void
4036 analyze_modified_params (vec<access_p> representatives)
4038 int i;
4040 for (i = 0; i < func_param_count; i++)
4042 struct access *repr;
4044 for (repr = representatives[i];
4045 repr;
4046 repr = repr->next_grp)
4048 struct access *access;
4049 bitmap visited;
4050 ao_ref ar;
4052 if (no_accesses_p (repr))
4053 continue;
4054 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4055 || repr->grp_maybe_modified)
4056 continue;
4058 ao_ref_init (&ar, repr->expr);
4059 visited = BITMAP_ALLOC (NULL);
4060 for (access = repr; access; access = access->next_sibling)
4062 /* All accesses are read ones, otherwise grp_maybe_modified would
4063 be trivially set. */
4064 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4065 mark_maybe_modified, repr, &visited);
4066 if (repr->grp_maybe_modified)
4067 break;
4069 BITMAP_FREE (visited);
4074 /* Propagate distances in bb_dereferences in the opposite direction than the
4075 control flow edges, in each step storing the maximum of the current value
4076 and the minimum of all successors. These steps are repeated until the table
4077 stabilizes. Note that BBs which might terminate the functions (according to
4078 final_bbs bitmap) never updated in this way. */
4080 static void
4081 propagate_dereference_distances (void)
4083 basic_block bb;
4085 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4086 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4087 FOR_EACH_BB_FN (bb, cfun)
4089 queue.quick_push (bb);
4090 bb->aux = bb;
4093 while (!queue.is_empty ())
4095 edge_iterator ei;
4096 edge e;
4097 bool change = false;
4098 int i;
4100 bb = queue.pop ();
4101 bb->aux = NULL;
4103 if (bitmap_bit_p (final_bbs, bb->index))
4104 continue;
4106 for (i = 0; i < func_param_count; i++)
4108 int idx = bb->index * func_param_count + i;
4109 bool first = true;
4110 HOST_WIDE_INT inh = 0;
4112 FOR_EACH_EDGE (e, ei, bb->succs)
4114 int succ_idx = e->dest->index * func_param_count + i;
4116 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4117 continue;
4119 if (first)
4121 first = false;
4122 inh = bb_dereferences [succ_idx];
4124 else if (bb_dereferences [succ_idx] < inh)
4125 inh = bb_dereferences [succ_idx];
4128 if (!first && bb_dereferences[idx] < inh)
4130 bb_dereferences[idx] = inh;
4131 change = true;
4135 if (change && !bitmap_bit_p (final_bbs, bb->index))
4136 FOR_EACH_EDGE (e, ei, bb->preds)
4138 if (e->src->aux)
4139 continue;
4141 e->src->aux = e->src;
4142 queue.quick_push (e->src);
4147 /* Dump a dereferences TABLE with heading STR to file F. */
4149 static void
4150 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4152 basic_block bb;
4154 fprintf (dump_file, "%s", str);
4155 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4156 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4158 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4159 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4161 int i;
4162 for (i = 0; i < func_param_count; i++)
4164 int idx = bb->index * func_param_count + i;
4165 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4168 fprintf (f, "\n");
4170 fprintf (dump_file, "\n");
4173 /* Determine what (parts of) parameters passed by reference that are not
4174 assigned to are not certainly dereferenced in this function and thus the
4175 dereferencing cannot be safely moved to the caller without potentially
4176 introducing a segfault. Mark such REPRESENTATIVES as
4177 grp_not_necessarilly_dereferenced.
4179 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4180 part is calculated rather than simple booleans are calculated for each
4181 pointer parameter to handle cases when only a fraction of the whole
4182 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4183 an example).
4185 The maximum dereference distances for each pointer parameter and BB are
4186 already stored in bb_dereference. This routine simply propagates these
4187 values upwards by propagate_dereference_distances and then compares the
4188 distances of individual parameters in the ENTRY BB to the equivalent
4189 distances of each representative of a (fraction of a) parameter. */
4191 static void
4192 analyze_caller_dereference_legality (vec<access_p> representatives)
4194 int i;
4196 if (dump_file && (dump_flags & TDF_DETAILS))
4197 dump_dereferences_table (dump_file,
4198 "Dereference table before propagation:\n",
4199 bb_dereferences);
4201 propagate_dereference_distances ();
4203 if (dump_file && (dump_flags & TDF_DETAILS))
4204 dump_dereferences_table (dump_file,
4205 "Dereference table after propagation:\n",
4206 bb_dereferences);
4208 for (i = 0; i < func_param_count; i++)
4210 struct access *repr = representatives[i];
4211 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4213 if (!repr || no_accesses_p (repr))
4214 continue;
4218 if ((repr->offset + repr->size) > bb_dereferences[idx])
4219 repr->grp_not_necessarilly_dereferenced = 1;
4220 repr = repr->next_grp;
4222 while (repr);
4226 /* Return the representative access for the parameter declaration PARM if it is
4227 a scalar passed by reference which is not written to and the pointer value
4228 is not used directly. Thus, if it is legal to dereference it in the caller
4229 and we can rule out modifications through aliases, such parameter should be
4230 turned into one passed by value. Return NULL otherwise. */
4232 static struct access *
4233 unmodified_by_ref_scalar_representative (tree parm)
4235 int i, access_count;
4236 struct access *repr;
4237 vec<access_p> *access_vec;
4239 access_vec = get_base_access_vector (parm);
4240 gcc_assert (access_vec);
4241 repr = (*access_vec)[0];
4242 if (repr->write)
4243 return NULL;
4244 repr->group_representative = repr;
4246 access_count = access_vec->length ();
4247 for (i = 1; i < access_count; i++)
4249 struct access *access = (*access_vec)[i];
4250 if (access->write)
4251 return NULL;
4252 access->group_representative = repr;
4253 access->next_sibling = repr->next_sibling;
4254 repr->next_sibling = access;
4257 repr->grp_read = 1;
4258 repr->grp_scalar_ptr = 1;
4259 return repr;
4262 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4263 associated with. REQ_ALIGN is the minimum required alignment. */
4265 static bool
4266 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4268 unsigned int exp_align;
4269 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4270 is incompatible assign in a call statement (and possibly even in asm
4271 statements). This can be relaxed by using a new temporary but only for
4272 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4273 intraprocedural SRA we deal with this by keeping the old aggregate around,
4274 something we cannot do in IPA-SRA.) */
4275 if (access->write
4276 && (is_gimple_call (access->stmt)
4277 || gimple_code (access->stmt) == GIMPLE_ASM))
4278 return true;
4280 exp_align = get_object_alignment (access->expr);
4281 if (exp_align < req_align)
4282 return true;
4284 return false;
4288 /* Sort collected accesses for parameter PARM, identify representatives for
4289 each accessed region and link them together. Return NULL if there are
4290 different but overlapping accesses, return the special ptr value meaning
4291 there are no accesses for this parameter if that is the case and return the
4292 first representative otherwise. Set *RO_GRP if there is a group of accesses
4293 with only read (i.e. no write) accesses. */
4295 static struct access *
4296 splice_param_accesses (tree parm, bool *ro_grp)
4298 int i, j, access_count, group_count;
4299 int agg_size, total_size = 0;
4300 struct access *access, *res, **prev_acc_ptr = &res;
4301 vec<access_p> *access_vec;
4303 access_vec = get_base_access_vector (parm);
4304 if (!access_vec)
4305 return &no_accesses_representant;
4306 access_count = access_vec->length ();
4308 access_vec->qsort (compare_access_positions);
4310 i = 0;
4311 total_size = 0;
4312 group_count = 0;
4313 while (i < access_count)
4315 bool modification;
4316 tree a1_alias_type;
4317 access = (*access_vec)[i];
4318 modification = access->write;
4319 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4320 return NULL;
4321 a1_alias_type = reference_alias_ptr_type (access->expr);
4323 /* Access is about to become group representative unless we find some
4324 nasty overlap which would preclude us from breaking this parameter
4325 apart. */
4327 j = i + 1;
4328 while (j < access_count)
4330 struct access *ac2 = (*access_vec)[j];
4331 if (ac2->offset != access->offset)
4333 /* All or nothing law for parameters. */
4334 if (access->offset + access->size > ac2->offset)
4335 return NULL;
4336 else
4337 break;
4339 else if (ac2->size != access->size)
4340 return NULL;
4342 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4343 || (ac2->type != access->type
4344 && (TREE_ADDRESSABLE (ac2->type)
4345 || TREE_ADDRESSABLE (access->type)))
4346 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4347 return NULL;
4349 modification |= ac2->write;
4350 ac2->group_representative = access;
4351 ac2->next_sibling = access->next_sibling;
4352 access->next_sibling = ac2;
4353 j++;
4356 group_count++;
4357 access->grp_maybe_modified = modification;
4358 if (!modification)
4359 *ro_grp = true;
4360 *prev_acc_ptr = access;
4361 prev_acc_ptr = &access->next_grp;
4362 total_size += access->size;
4363 i = j;
4366 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4367 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4368 else
4369 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4370 if (total_size >= agg_size)
4371 return NULL;
4373 gcc_assert (group_count > 0);
4374 return res;
4377 /* Decide whether parameters with representative accesses given by REPR should
4378 be reduced into components. */
4380 static int
4381 decide_one_param_reduction (struct access *repr)
4383 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4384 bool by_ref;
4385 tree parm;
4387 parm = repr->base;
4388 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4389 gcc_assert (cur_parm_size > 0);
4391 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4393 by_ref = true;
4394 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4396 else
4398 by_ref = false;
4399 agg_size = cur_parm_size;
4402 if (dump_file)
4404 struct access *acc;
4405 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4406 print_generic_expr (dump_file, parm, 0);
4407 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4408 for (acc = repr; acc; acc = acc->next_grp)
4409 dump_access (dump_file, acc, true);
4412 total_size = 0;
4413 new_param_count = 0;
4415 for (; repr; repr = repr->next_grp)
4417 gcc_assert (parm == repr->base);
4419 /* Taking the address of a non-addressable field is verboten. */
4420 if (by_ref && repr->non_addressable)
4421 return 0;
4423 /* Do not decompose a non-BLKmode param in a way that would
4424 create BLKmode params. Especially for by-reference passing
4425 (thus, pointer-type param) this is hardly worthwhile. */
4426 if (DECL_MODE (parm) != BLKmode
4427 && TYPE_MODE (repr->type) == BLKmode)
4428 return 0;
4430 if (!by_ref || (!repr->grp_maybe_modified
4431 && !repr->grp_not_necessarilly_dereferenced))
4432 total_size += repr->size;
4433 else
4434 total_size += cur_parm_size;
4436 new_param_count++;
4439 gcc_assert (new_param_count > 0);
4441 if (optimize_function_for_size_p (cfun))
4442 parm_size_limit = cur_parm_size;
4443 else
4444 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4445 * cur_parm_size);
4447 if (total_size < agg_size
4448 && total_size <= parm_size_limit)
4450 if (dump_file)
4451 fprintf (dump_file, " ....will be split into %i components\n",
4452 new_param_count);
4453 return new_param_count;
4455 else
4456 return 0;
4459 /* The order of the following enums is important, we need to do extra work for
4460 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4461 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4462 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4464 /* Identify representatives of all accesses to all candidate parameters for
4465 IPA-SRA. Return result based on what representatives have been found. */
4467 static enum ipa_splicing_result
4468 splice_all_param_accesses (vec<access_p> &representatives)
4470 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4471 tree parm;
4472 struct access *repr;
4474 representatives.create (func_param_count);
4476 for (parm = DECL_ARGUMENTS (current_function_decl);
4477 parm;
4478 parm = DECL_CHAIN (parm))
4480 if (is_unused_scalar_param (parm))
4482 representatives.quick_push (&no_accesses_representant);
4483 if (result == NO_GOOD_ACCESS)
4484 result = UNUSED_PARAMS;
4486 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4487 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4488 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4490 repr = unmodified_by_ref_scalar_representative (parm);
4491 representatives.quick_push (repr);
4492 if (repr)
4493 result = UNMODIF_BY_REF_ACCESSES;
4495 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4497 bool ro_grp = false;
4498 repr = splice_param_accesses (parm, &ro_grp);
4499 representatives.quick_push (repr);
4501 if (repr && !no_accesses_p (repr))
4503 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4505 if (ro_grp)
4506 result = UNMODIF_BY_REF_ACCESSES;
4507 else if (result < MODIF_BY_REF_ACCESSES)
4508 result = MODIF_BY_REF_ACCESSES;
4510 else if (result < BY_VAL_ACCESSES)
4511 result = BY_VAL_ACCESSES;
4513 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4514 result = UNUSED_PARAMS;
4516 else
4517 representatives.quick_push (NULL);
4520 if (result == NO_GOOD_ACCESS)
4522 representatives.release ();
4523 return NO_GOOD_ACCESS;
4526 return result;
4529 /* Return the index of BASE in PARMS. Abort if it is not found. */
4531 static inline int
4532 get_param_index (tree base, vec<tree> parms)
4534 int i, len;
4536 len = parms.length ();
4537 for (i = 0; i < len; i++)
4538 if (parms[i] == base)
4539 return i;
4540 gcc_unreachable ();
4543 /* Convert the decisions made at the representative level into compact
4544 parameter adjustments. REPRESENTATIVES are pointers to first
4545 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4546 final number of adjustments. */
4548 static ipa_parm_adjustment_vec
4549 turn_representatives_into_adjustments (vec<access_p> representatives,
4550 int adjustments_count)
4552 vec<tree> parms;
4553 ipa_parm_adjustment_vec adjustments;
4554 tree parm;
4555 int i;
4557 gcc_assert (adjustments_count > 0);
4558 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4559 adjustments.create (adjustments_count);
4560 parm = DECL_ARGUMENTS (current_function_decl);
4561 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4563 struct access *repr = representatives[i];
4565 if (!repr || no_accesses_p (repr))
4567 struct ipa_parm_adjustment adj;
4569 memset (&adj, 0, sizeof (adj));
4570 adj.base_index = get_param_index (parm, parms);
4571 adj.base = parm;
4572 if (!repr)
4573 adj.op = IPA_PARM_OP_COPY;
4574 else
4575 adj.op = IPA_PARM_OP_REMOVE;
4576 adj.arg_prefix = "ISRA";
4577 adjustments.quick_push (adj);
4579 else
4581 struct ipa_parm_adjustment adj;
4582 int index = get_param_index (parm, parms);
4584 for (; repr; repr = repr->next_grp)
4586 memset (&adj, 0, sizeof (adj));
4587 gcc_assert (repr->base == parm);
4588 adj.base_index = index;
4589 adj.base = repr->base;
4590 adj.type = repr->type;
4591 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4592 adj.offset = repr->offset;
4593 adj.reverse = repr->reverse;
4594 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4595 && (repr->grp_maybe_modified
4596 || repr->grp_not_necessarilly_dereferenced));
4597 adj.arg_prefix = "ISRA";
4598 adjustments.quick_push (adj);
4602 parms.release ();
4603 return adjustments;
4606 /* Analyze the collected accesses and produce a plan what to do with the
4607 parameters in the form of adjustments, NULL meaning nothing. */
4609 static ipa_parm_adjustment_vec
4610 analyze_all_param_acesses (void)
4612 enum ipa_splicing_result repr_state;
4613 bool proceed = false;
4614 int i, adjustments_count = 0;
4615 vec<access_p> representatives;
4616 ipa_parm_adjustment_vec adjustments;
4618 repr_state = splice_all_param_accesses (representatives);
4619 if (repr_state == NO_GOOD_ACCESS)
4620 return ipa_parm_adjustment_vec ();
4622 /* If there are any parameters passed by reference which are not modified
4623 directly, we need to check whether they can be modified indirectly. */
4624 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4626 analyze_caller_dereference_legality (representatives);
4627 analyze_modified_params (representatives);
4630 for (i = 0; i < func_param_count; i++)
4632 struct access *repr = representatives[i];
4634 if (repr && !no_accesses_p (repr))
4636 if (repr->grp_scalar_ptr)
4638 adjustments_count++;
4639 if (repr->grp_not_necessarilly_dereferenced
4640 || repr->grp_maybe_modified)
4641 representatives[i] = NULL;
4642 else
4644 proceed = true;
4645 sra_stats.scalar_by_ref_to_by_val++;
4648 else
4650 int new_components = decide_one_param_reduction (repr);
4652 if (new_components == 0)
4654 representatives[i] = NULL;
4655 adjustments_count++;
4657 else
4659 adjustments_count += new_components;
4660 sra_stats.aggregate_params_reduced++;
4661 sra_stats.param_reductions_created += new_components;
4662 proceed = true;
4666 else
4668 if (no_accesses_p (repr))
4670 proceed = true;
4671 sra_stats.deleted_unused_parameters++;
4673 adjustments_count++;
4677 if (!proceed && dump_file)
4678 fprintf (dump_file, "NOT proceeding to change params.\n");
4680 if (proceed)
4681 adjustments = turn_representatives_into_adjustments (representatives,
4682 adjustments_count);
4683 else
4684 adjustments = ipa_parm_adjustment_vec ();
4686 representatives.release ();
4687 return adjustments;
4690 /* If a parameter replacement identified by ADJ does not yet exist in the form
4691 of declaration, create it and record it, otherwise return the previously
4692 created one. */
4694 static tree
4695 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4697 tree repl;
4698 if (!adj->new_ssa_base)
4700 char *pretty_name = make_fancy_name (adj->base);
4702 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4703 DECL_NAME (repl) = get_identifier (pretty_name);
4704 obstack_free (&name_obstack, pretty_name);
4706 adj->new_ssa_base = repl;
4708 else
4709 repl = adj->new_ssa_base;
4710 return repl;
4713 /* Find the first adjustment for a particular parameter BASE in a vector of
4714 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4715 adjustment. */
4717 static struct ipa_parm_adjustment *
4718 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4720 int i, len;
4722 len = adjustments.length ();
4723 for (i = 0; i < len; i++)
4725 struct ipa_parm_adjustment *adj;
4727 adj = &adjustments[i];
4728 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4729 return adj;
4732 return NULL;
4735 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4736 parameter which is to be removed because its value is not used, create a new
4737 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4738 original with it and return it. If there is no need to re-map, return NULL.
4739 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4741 static tree
4742 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4743 ipa_parm_adjustment_vec adjustments)
4745 struct ipa_parm_adjustment *adj;
4746 tree decl, repl, new_name;
4748 if (TREE_CODE (old_name) != SSA_NAME)
4749 return NULL;
4751 decl = SSA_NAME_VAR (old_name);
4752 if (decl == NULL_TREE
4753 || TREE_CODE (decl) != PARM_DECL)
4754 return NULL;
4756 adj = get_adjustment_for_base (adjustments, decl);
4757 if (!adj)
4758 return NULL;
4760 repl = get_replaced_param_substitute (adj);
4761 new_name = make_ssa_name (repl, stmt);
4762 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4763 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
4765 if (dump_file)
4767 fprintf (dump_file, "replacing an SSA name of a removed param ");
4768 print_generic_expr (dump_file, old_name, 0);
4769 fprintf (dump_file, " with ");
4770 print_generic_expr (dump_file, new_name, 0);
4771 fprintf (dump_file, "\n");
4774 replace_uses_by (old_name, new_name);
4775 return new_name;
4778 /* If the statement STMT contains any expressions that need to replaced with a
4779 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4780 incompatibilities (GSI is used to accommodate conversion statements and must
4781 point to the statement). Return true iff the statement was modified. */
4783 static bool
4784 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4785 ipa_parm_adjustment_vec adjustments)
4787 tree *lhs_p, *rhs_p;
4788 bool any;
4790 if (!gimple_assign_single_p (stmt))
4791 return false;
4793 rhs_p = gimple_assign_rhs1_ptr (stmt);
4794 lhs_p = gimple_assign_lhs_ptr (stmt);
4796 any = ipa_modify_expr (rhs_p, false, adjustments);
4797 any |= ipa_modify_expr (lhs_p, false, adjustments);
4798 if (any)
4800 tree new_rhs = NULL_TREE;
4802 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4804 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4806 /* V_C_Es of constructors can cause trouble (PR 42714). */
4807 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4808 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4809 else
4810 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4811 NULL);
4813 else
4814 new_rhs = fold_build1_loc (gimple_location (stmt),
4815 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4816 *rhs_p);
4818 else if (REFERENCE_CLASS_P (*rhs_p)
4819 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4820 && !is_gimple_reg (*lhs_p))
4821 /* This can happen when an assignment in between two single field
4822 structures is turned into an assignment in between two pointers to
4823 scalars (PR 42237). */
4824 new_rhs = *rhs_p;
4826 if (new_rhs)
4828 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4829 true, GSI_SAME_STMT);
4831 gimple_assign_set_rhs_from_tree (gsi, tmp);
4834 return true;
4837 return false;
4840 /* Traverse the function body and all modifications as described in
4841 ADJUSTMENTS. Return true iff the CFG has been changed. */
4843 bool
4844 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4846 bool cfg_changed = false;
4847 basic_block bb;
4849 FOR_EACH_BB_FN (bb, cfun)
4851 gimple_stmt_iterator gsi;
4853 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4855 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
4856 tree new_lhs, old_lhs = gimple_phi_result (phi);
4857 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
4858 if (new_lhs)
4860 gimple_phi_set_result (phi, new_lhs);
4861 release_ssa_name (old_lhs);
4865 gsi = gsi_start_bb (bb);
4866 while (!gsi_end_p (gsi))
4868 gimple *stmt = gsi_stmt (gsi);
4869 bool modified = false;
4870 tree *t;
4871 unsigned i;
4873 switch (gimple_code (stmt))
4875 case GIMPLE_RETURN:
4876 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4877 if (*t != NULL_TREE)
4878 modified |= ipa_modify_expr (t, true, adjustments);
4879 break;
4881 case GIMPLE_ASSIGN:
4882 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4883 break;
4885 case GIMPLE_CALL:
4886 /* Operands must be processed before the lhs. */
4887 for (i = 0; i < gimple_call_num_args (stmt); i++)
4889 t = gimple_call_arg_ptr (stmt, i);
4890 modified |= ipa_modify_expr (t, true, adjustments);
4893 if (gimple_call_lhs (stmt))
4895 t = gimple_call_lhs_ptr (stmt);
4896 modified |= ipa_modify_expr (t, false, adjustments);
4898 break;
4900 case GIMPLE_ASM:
4902 gasm *asm_stmt = as_a <gasm *> (stmt);
4903 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4905 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4906 modified |= ipa_modify_expr (t, true, adjustments);
4908 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4910 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4911 modified |= ipa_modify_expr (t, false, adjustments);
4914 break;
4916 default:
4917 break;
4920 def_operand_p defp;
4921 ssa_op_iter iter;
4922 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4924 tree old_def = DEF_FROM_PTR (defp);
4925 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
4926 adjustments))
4928 SET_DEF (defp, new_def);
4929 release_ssa_name (old_def);
4930 modified = true;
4934 if (modified)
4936 update_stmt (stmt);
4937 if (maybe_clean_eh_stmt (stmt)
4938 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4939 cfg_changed = true;
4941 gsi_next (&gsi);
4945 return cfg_changed;
4948 /* Call gimple_debug_bind_reset_value on all debug statements describing
4949 gimple register parameters that are being removed or replaced. */
4951 static void
4952 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4954 int i, len;
4955 gimple_stmt_iterator *gsip = NULL, gsi;
4957 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4959 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4960 gsip = &gsi;
4962 len = adjustments.length ();
4963 for (i = 0; i < len; i++)
4965 struct ipa_parm_adjustment *adj;
4966 imm_use_iterator ui;
4967 gimple *stmt;
4968 gdebug *def_temp;
4969 tree name, vexpr, copy = NULL_TREE;
4970 use_operand_p use_p;
4972 adj = &adjustments[i];
4973 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4974 continue;
4975 name = ssa_default_def (cfun, adj->base);
4976 vexpr = NULL;
4977 if (name)
4978 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4980 if (gimple_clobber_p (stmt))
4982 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4983 unlink_stmt_vdef (stmt);
4984 gsi_remove (&cgsi, true);
4985 release_defs (stmt);
4986 continue;
4988 /* All other users must have been removed by
4989 ipa_sra_modify_function_body. */
4990 gcc_assert (is_gimple_debug (stmt));
4991 if (vexpr == NULL && gsip != NULL)
4993 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4994 vexpr = make_node (DEBUG_EXPR_DECL);
4995 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4996 NULL);
4997 DECL_ARTIFICIAL (vexpr) = 1;
4998 TREE_TYPE (vexpr) = TREE_TYPE (name);
4999 DECL_MODE (vexpr) = DECL_MODE (adj->base);
5000 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5002 if (vexpr)
5004 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5005 SET_USE (use_p, vexpr);
5007 else
5008 gimple_debug_bind_reset_value (stmt);
5009 update_stmt (stmt);
5011 /* Create a VAR_DECL for debug info purposes. */
5012 if (!DECL_IGNORED_P (adj->base))
5014 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5015 VAR_DECL, DECL_NAME (adj->base),
5016 TREE_TYPE (adj->base));
5017 if (DECL_PT_UID_SET_P (adj->base))
5018 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5019 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5020 TREE_READONLY (copy) = TREE_READONLY (adj->base);
5021 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5022 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5023 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5024 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5025 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5026 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5027 SET_DECL_RTL (copy, 0);
5028 TREE_USED (copy) = 1;
5029 DECL_CONTEXT (copy) = current_function_decl;
5030 add_local_decl (cfun, copy);
5031 DECL_CHAIN (copy) =
5032 BLOCK_VARS (DECL_INITIAL (current_function_decl));
5033 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5035 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5037 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5038 if (vexpr)
5039 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5040 else
5041 def_temp = gimple_build_debug_source_bind (copy, adj->base,
5042 NULL);
5043 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5048 /* Return false if all callers have at least as many actual arguments as there
5049 are formal parameters in the current function and that their types
5050 match. */
5052 static bool
5053 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5054 void *data ATTRIBUTE_UNUSED)
5056 struct cgraph_edge *cs;
5057 for (cs = node->callers; cs; cs = cs->next_caller)
5058 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
5059 return true;
5061 return false;
5064 /* Return false if all callers have vuse attached to a call statement. */
5066 static bool
5067 some_callers_have_no_vuse_p (struct cgraph_node *node,
5068 void *data ATTRIBUTE_UNUSED)
5070 struct cgraph_edge *cs;
5071 for (cs = node->callers; cs; cs = cs->next_caller)
5072 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5073 return true;
5075 return false;
5078 /* Convert all callers of NODE. */
5080 static bool
5081 convert_callers_for_node (struct cgraph_node *node,
5082 void *data)
5084 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
5085 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
5086 struct cgraph_edge *cs;
5088 for (cs = node->callers; cs; cs = cs->next_caller)
5090 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
5092 if (dump_file)
5093 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
5094 xstrdup_for_dump (cs->caller->name ()),
5095 cs->caller->order,
5096 xstrdup_for_dump (cs->callee->name ()),
5097 cs->callee->order);
5099 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5101 pop_cfun ();
5104 for (cs = node->callers; cs; cs = cs->next_caller)
5105 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
5106 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5107 compute_inline_parameters (cs->caller, true);
5108 BITMAP_FREE (recomputed_callers);
5110 return true;
5113 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5115 static void
5116 convert_callers (struct cgraph_node *node, tree old_decl,
5117 ipa_parm_adjustment_vec adjustments)
5119 basic_block this_block;
5121 node->call_for_symbol_and_aliases (convert_callers_for_node,
5122 &adjustments, false);
5124 if (!encountered_recursive_call)
5125 return;
5127 FOR_EACH_BB_FN (this_block, cfun)
5129 gimple_stmt_iterator gsi;
5131 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5133 gcall *stmt;
5134 tree call_fndecl;
5135 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5136 if (!stmt)
5137 continue;
5138 call_fndecl = gimple_call_fndecl (stmt);
5139 if (call_fndecl == old_decl)
5141 if (dump_file)
5142 fprintf (dump_file, "Adjusting recursive call");
5143 gimple_call_set_fndecl (stmt, node->decl);
5144 ipa_modify_call_arguments (NULL, stmt, adjustments);
5149 return;
5152 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5153 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5155 static bool
5156 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5158 struct cgraph_node *new_node;
5159 bool cfg_changed;
5161 cgraph_edge::rebuild_edges ();
5162 free_dominance_info (CDI_DOMINATORS);
5163 pop_cfun ();
5165 /* This must be done after rebuilding cgraph edges for node above.
5166 Otherwise any recursive calls to node that are recorded in
5167 redirect_callers will be corrupted. */
5168 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5169 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5170 NULL, false, NULL, NULL,
5171 "isra");
5172 redirect_callers.release ();
5174 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5175 ipa_modify_formal_parameters (current_function_decl, adjustments);
5176 cfg_changed = ipa_sra_modify_function_body (adjustments);
5177 sra_ipa_reset_debug_stmts (adjustments);
5178 convert_callers (new_node, node->decl, adjustments);
5179 new_node->make_local ();
5180 return cfg_changed;
5183 /* Means of communication between ipa_sra_check_caller and
5184 ipa_sra_preliminary_function_checks. */
5186 struct ipa_sra_check_caller_data
5188 bool has_callers;
5189 bool bad_arg_alignment;
5190 bool has_thunk;
5193 /* If NODE has a caller, mark that fact in DATA which is pointer to
5194 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5195 calls if they are unit aligned and if not, set the appropriate flag in DATA
5196 too. */
5198 static bool
5199 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5201 if (!node->callers)
5202 return false;
5204 struct ipa_sra_check_caller_data *iscc;
5205 iscc = (struct ipa_sra_check_caller_data *) data;
5206 iscc->has_callers = true;
5208 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5210 if (cs->caller->thunk.thunk_p)
5212 iscc->has_thunk = true;
5213 return true;
5215 gimple *call_stmt = cs->call_stmt;
5216 unsigned count = gimple_call_num_args (call_stmt);
5217 for (unsigned i = 0; i < count; i++)
5219 tree arg = gimple_call_arg (call_stmt, i);
5220 if (is_gimple_reg (arg))
5221 continue;
5223 tree offset;
5224 HOST_WIDE_INT bitsize, bitpos;
5225 machine_mode mode;
5226 int unsignedp, reversep, volatilep = 0;
5227 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5228 &unsignedp, &reversep, &volatilep, false);
5229 if (bitpos % BITS_PER_UNIT)
5231 iscc->bad_arg_alignment = true;
5232 return true;
5237 return false;
5240 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5241 attributes, return true otherwise. NODE is the cgraph node of the current
5242 function. */
5244 static bool
5245 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5247 if (!node->can_be_local_p ())
5249 if (dump_file)
5250 fprintf (dump_file, "Function not local to this compilation unit.\n");
5251 return false;
5254 if (!node->local.can_change_signature)
5256 if (dump_file)
5257 fprintf (dump_file, "Function can not change signature.\n");
5258 return false;
5261 if (!tree_versionable_function_p (node->decl))
5263 if (dump_file)
5264 fprintf (dump_file, "Function is not versionable.\n");
5265 return false;
5268 if (!opt_for_fn (node->decl, optimize)
5269 || !opt_for_fn (node->decl, flag_ipa_sra))
5271 if (dump_file)
5272 fprintf (dump_file, "Function not optimized.\n");
5273 return false;
5276 if (DECL_VIRTUAL_P (current_function_decl))
5278 if (dump_file)
5279 fprintf (dump_file, "Function is a virtual method.\n");
5280 return false;
5283 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5284 && inline_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5286 if (dump_file)
5287 fprintf (dump_file, "Function too big to be made truly local.\n");
5288 return false;
5291 if (cfun->stdarg)
5293 if (dump_file)
5294 fprintf (dump_file, "Function uses stdarg. \n");
5295 return false;
5298 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5299 return false;
5301 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5303 if (dump_file)
5304 fprintf (dump_file, "Always inline function will be inlined "
5305 "anyway. \n");
5306 return false;
5309 struct ipa_sra_check_caller_data iscc;
5310 memset (&iscc, 0, sizeof(iscc));
5311 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5312 if (!iscc.has_callers)
5314 if (dump_file)
5315 fprintf (dump_file,
5316 "Function has no callers in this compilation unit.\n");
5317 return false;
5320 if (iscc.bad_arg_alignment)
5322 if (dump_file)
5323 fprintf (dump_file,
5324 "A function call has an argument with non-unit alignment.\n");
5325 return false;
5328 if (iscc.has_thunk)
5330 if (dump_file)
5331 fprintf (dump_file,
5332 "A has thunk.\n");
5333 return false;
5336 return true;
5339 /* Perform early interprocedural SRA. */
5341 static unsigned int
5342 ipa_early_sra (void)
5344 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5345 ipa_parm_adjustment_vec adjustments;
5346 int ret = 0;
5348 if (!ipa_sra_preliminary_function_checks (node))
5349 return 0;
5351 sra_initialize ();
5352 sra_mode = SRA_MODE_EARLY_IPA;
5354 if (!find_param_candidates ())
5356 if (dump_file)
5357 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5358 goto simple_out;
5361 if (node->call_for_symbol_and_aliases
5362 (some_callers_have_mismatched_arguments_p, NULL, true))
5364 if (dump_file)
5365 fprintf (dump_file, "There are callers with insufficient number of "
5366 "arguments or arguments with type mismatches.\n");
5367 goto simple_out;
5370 if (node->call_for_symbol_and_aliases
5371 (some_callers_have_no_vuse_p, NULL, true))
5373 if (dump_file)
5374 fprintf (dump_file, "There are callers with no VUSE attached "
5375 "to a call stmt.\n");
5376 goto simple_out;
5379 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5380 func_param_count
5381 * last_basic_block_for_fn (cfun));
5382 final_bbs = BITMAP_ALLOC (NULL);
5384 scan_function ();
5385 if (encountered_apply_args)
5387 if (dump_file)
5388 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5389 goto out;
5392 if (encountered_unchangable_recursive_call)
5394 if (dump_file)
5395 fprintf (dump_file, "Function calls itself with insufficient "
5396 "number of arguments.\n");
5397 goto out;
5400 adjustments = analyze_all_param_acesses ();
5401 if (!adjustments.exists ())
5402 goto out;
5403 if (dump_file)
5404 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5406 if (modify_function (node, adjustments))
5407 ret = TODO_update_ssa | TODO_cleanup_cfg;
5408 else
5409 ret = TODO_update_ssa;
5410 adjustments.release ();
5412 statistics_counter_event (cfun, "Unused parameters deleted",
5413 sra_stats.deleted_unused_parameters);
5414 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5415 sra_stats.scalar_by_ref_to_by_val);
5416 statistics_counter_event (cfun, "Aggregate parameters broken up",
5417 sra_stats.aggregate_params_reduced);
5418 statistics_counter_event (cfun, "Aggregate parameter components created",
5419 sra_stats.param_reductions_created);
5421 out:
5422 BITMAP_FREE (final_bbs);
5423 free (bb_dereferences);
5424 simple_out:
5425 sra_deinitialize ();
5426 return ret;
5429 namespace {
5431 const pass_data pass_data_early_ipa_sra =
5433 GIMPLE_PASS, /* type */
5434 "eipa_sra", /* name */
5435 OPTGROUP_NONE, /* optinfo_flags */
5436 TV_IPA_SRA, /* tv_id */
5437 0, /* properties_required */
5438 0, /* properties_provided */
5439 0, /* properties_destroyed */
5440 0, /* todo_flags_start */
5441 TODO_dump_symtab, /* todo_flags_finish */
5444 class pass_early_ipa_sra : public gimple_opt_pass
5446 public:
5447 pass_early_ipa_sra (gcc::context *ctxt)
5448 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5451 /* opt_pass methods: */
5452 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5453 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5455 }; // class pass_early_ipa_sra
5457 } // anon namespace
5459 gimple_opt_pass *
5460 make_pass_early_ipa_sra (gcc::context *ctxt)
5462 return new pass_early_ipa_sra (ctxt);