[ARM] Add source mode to coprocessor pattern SETs
[official-gcc.git] / gcc / tree-sra.c
blob31834ed7af748d588c37c15f03bb80929be0b8d5
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2017 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-prop.h"
101 #include "params.h"
102 #include "dbgcnt.h"
103 #include "tree-inline.h"
104 #include "ipa-inline.h"
105 #include "ipa-utils.h"
106 #include "builtins.h"
108 /* Enumeration of all aggregate reductions we can do. */
109 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
110 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
111 SRA_MODE_INTRA }; /* late intraprocedural SRA */
113 /* Global variable describing which aggregate reduction we are performing at
114 the moment. */
115 static enum sra_mode sra_mode;
117 struct assign_link;
119 /* ACCESS represents each access to an aggregate variable (as a whole or a
120 part). It can also represent a group of accesses that refer to exactly the
121 same fragment of an aggregate (i.e. those that have exactly the same offset
122 and size). Such representatives for a single aggregate, once determined,
123 are linked in a linked list and have the group fields set.
125 Moreover, when doing intraprocedural SRA, a tree is built from those
126 representatives (by the means of first_child and next_sibling pointers), in
127 which all items in a subtree are "within" the root, i.e. their offset is
128 greater or equal to offset of the root and offset+size is smaller or equal
129 to offset+size of the root. Children of an access are sorted by offset.
131 Note that accesses to parts of vector and complex number types always
132 represented by an access to the whole complex number or a vector. It is a
133 duty of the modifying functions to replace them appropriately. */
135 struct access
137 /* Values returned by `get_ref_base_and_extent' for each component reference
138 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
139 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
140 HOST_WIDE_INT offset;
141 HOST_WIDE_INT size;
142 tree base;
144 /* Expression. It is context dependent so do not use it to create new
145 expressions to access the original aggregate. See PR 42154 for a
146 testcase. */
147 tree expr;
148 /* Type. */
149 tree type;
151 /* The statement this access belongs to. */
152 gimple *stmt;
154 /* Next group representative for this aggregate. */
155 struct access *next_grp;
157 /* Pointer to the group representative. Pointer to itself if the struct is
158 the representative. */
159 struct access *group_representative;
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access *first_child;
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. In IPA-SRA this is a pointer to the next access
167 belonging to the same group (having the same representative). */
168 struct access *next_sibling;
170 /* Pointers to the first and last element in the linked list of assign
171 links. */
172 struct assign_link *first_link, *last_link;
174 /* Pointer to the next access in the work queue. */
175 struct access *next_queued;
177 /* Replacement variable for this access "region." Never to be accessed
178 directly, always only by the means of get_access_replacement() and only
179 when grp_to_be_replaced flag is set. */
180 tree replacement_decl;
182 /* Is this access an access to a non-addressable field? */
183 unsigned non_addressable : 1;
185 /* Is this access made in reverse storage order? */
186 unsigned reverse : 1;
188 /* Is this particular access write access? */
189 unsigned write : 1;
191 /* Is this access currently in the work queue? */
192 unsigned grp_queued : 1;
194 /* Does this group contain a write access? This flag is propagated down the
195 access tree. */
196 unsigned grp_write : 1;
198 /* Does this group contain a read access? This flag is propagated down the
199 access tree. */
200 unsigned grp_read : 1;
202 /* Does this group contain a read access that comes from an assignment
203 statement? This flag is propagated down the access tree. */
204 unsigned grp_assignment_read : 1;
206 /* Does this group contain a write access that comes from an assignment
207 statement? This flag is propagated down the access tree. */
208 unsigned grp_assignment_write : 1;
210 /* Does this group contain a read access through a scalar type? This flag is
211 not propagated in the access tree in any direction. */
212 unsigned grp_scalar_read : 1;
214 /* Does this group contain a write access through a scalar type? This flag
215 is not propagated in the access tree in any direction. */
216 unsigned grp_scalar_write : 1;
218 /* Is this access an artificial one created to scalarize some record
219 entirely? */
220 unsigned grp_total_scalarization : 1;
222 /* Other passes of the analysis use this bit to make function
223 analyze_access_subtree create scalar replacements for this group if
224 possible. */
225 unsigned grp_hint : 1;
227 /* Is the subtree rooted in this access fully covered by scalar
228 replacements? */
229 unsigned grp_covered : 1;
231 /* If set to true, this access and all below it in an access tree must not be
232 scalarized. */
233 unsigned grp_unscalarizable_region : 1;
235 /* Whether data have been written to parts of the aggregate covered by this
236 access which is not to be scalarized. This flag is propagated up in the
237 access tree. */
238 unsigned grp_unscalarized_data : 1;
240 /* Does this access and/or group contain a write access through a
241 BIT_FIELD_REF? */
242 unsigned grp_partial_lhs : 1;
244 /* Set when a scalar replacement should be created for this variable. */
245 unsigned grp_to_be_replaced : 1;
247 /* Set when we want a replacement for the sole purpose of having it in
248 generated debug statements. */
249 unsigned grp_to_be_debug_replaced : 1;
251 /* Should TREE_NO_WARNING of a replacement be set? */
252 unsigned grp_no_warning : 1;
254 /* Is it possible that the group refers to data which might be (directly or
255 otherwise) modified? */
256 unsigned grp_maybe_modified : 1;
258 /* Set when this is a representative of a pointer to scalar (i.e. by
259 reference) parameter which we consider for turning into a plain scalar
260 (i.e. a by value parameter). */
261 unsigned grp_scalar_ptr : 1;
263 /* Set when we discover that this pointer is not safe to dereference in the
264 caller. */
265 unsigned grp_not_necessarilly_dereferenced : 1;
268 typedef struct access *access_p;
271 /* Alloc pool for allocating access structures. */
272 static object_allocator<struct access> access_pool ("SRA accesses");
274 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
275 are used to propagate subaccesses from rhs to lhs as long as they don't
276 conflict with what is already there. */
277 struct assign_link
279 struct access *lacc, *racc;
280 struct assign_link *next;
283 /* Alloc pool for allocating assign link structures. */
284 static object_allocator<assign_link> assign_link_pool ("SRA links");
286 /* Base (tree) -> Vector (vec<access_p> *) map. */
287 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
289 /* Candidate hash table helpers. */
291 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
293 static inline hashval_t hash (const tree_node *);
294 static inline bool equal (const tree_node *, const tree_node *);
297 /* Hash a tree in a uid_decl_map. */
299 inline hashval_t
300 uid_decl_hasher::hash (const tree_node *item)
302 return item->decl_minimal.uid;
305 /* Return true if the DECL_UID in both trees are equal. */
307 inline bool
308 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
310 return (a->decl_minimal.uid == b->decl_minimal.uid);
313 /* Set of candidates. */
314 static bitmap candidate_bitmap;
315 static hash_table<uid_decl_hasher> *candidates;
317 /* For a candidate UID return the candidates decl. */
319 static inline tree
320 candidate (unsigned uid)
322 tree_node t;
323 t.decl_minimal.uid = uid;
324 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
327 /* Bitmap of candidates which we should try to entirely scalarize away and
328 those which cannot be (because they are and need be used as a whole). */
329 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
331 /* Bitmap of candidates in the constant pool, which cannot be scalarized
332 because this would produce non-constant expressions (e.g. Ada). */
333 static bitmap disqualified_constants;
335 /* Obstack for creation of fancy names. */
336 static struct obstack name_obstack;
338 /* Head of a linked list of accesses that need to have its subaccesses
339 propagated to their assignment counterparts. */
340 static struct access *work_queue_head;
342 /* Number of parameters of the analyzed function when doing early ipa SRA. */
343 static int func_param_count;
345 /* scan_function sets the following to true if it encounters a call to
346 __builtin_apply_args. */
347 static bool encountered_apply_args;
349 /* Set by scan_function when it finds a recursive call. */
350 static bool encountered_recursive_call;
352 /* Set by scan_function when it finds a recursive call with less actual
353 arguments than formal parameters.. */
354 static bool encountered_unchangable_recursive_call;
356 /* This is a table in which for each basic block and parameter there is a
357 distance (offset + size) in that parameter which is dereferenced and
358 accessed in that BB. */
359 static HOST_WIDE_INT *bb_dereferences;
360 /* Bitmap of BBs that can cause the function to "stop" progressing by
361 returning, throwing externally, looping infinitely or calling a function
362 which might abort etc.. */
363 static bitmap final_bbs;
365 /* Representative of no accesses at all. */
366 static struct access no_accesses_representant;
368 /* Predicate to test the special value. */
370 static inline bool
371 no_accesses_p (struct access *access)
373 return access == &no_accesses_representant;
376 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
377 representative fields are dumped, otherwise those which only describe the
378 individual access are. */
380 static struct
382 /* Number of processed aggregates is readily available in
383 analyze_all_variable_accesses and so is not stored here. */
385 /* Number of created scalar replacements. */
386 int replacements;
388 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
389 expression. */
390 int exprs;
392 /* Number of statements created by generate_subtree_copies. */
393 int subtree_copies;
395 /* Number of statements created by load_assign_lhs_subreplacements. */
396 int subreplacements;
398 /* Number of times sra_modify_assign has deleted a statement. */
399 int deleted;
401 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
402 RHS reparately due to type conversions or nonexistent matching
403 references. */
404 int separate_lhs_rhs_handling;
406 /* Number of parameters that were removed because they were unused. */
407 int deleted_unused_parameters;
409 /* Number of scalars passed as parameters by reference that have been
410 converted to be passed by value. */
411 int scalar_by_ref_to_by_val;
413 /* Number of aggregate parameters that were replaced by one or more of their
414 components. */
415 int aggregate_params_reduced;
417 /* Numbber of components created when splitting aggregate parameters. */
418 int param_reductions_created;
419 } sra_stats;
421 static void
422 dump_access (FILE *f, struct access *access, bool grp)
424 fprintf (f, "access { ");
425 fprintf (f, "base = (%d)'", DECL_UID (access->base));
426 print_generic_expr (f, access->base, 0);
427 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
428 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
429 fprintf (f, ", expr = ");
430 print_generic_expr (f, access->expr, 0);
431 fprintf (f, ", type = ");
432 print_generic_expr (f, access->type, 0);
433 fprintf (f, ", non_addressable = %d, reverse = %d",
434 access->non_addressable, access->reverse);
435 if (grp)
436 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
437 "grp_assignment_write = %d, grp_scalar_read = %d, "
438 "grp_scalar_write = %d, grp_total_scalarization = %d, "
439 "grp_hint = %d, grp_covered = %d, "
440 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
441 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
442 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
443 "grp_not_necessarilly_dereferenced = %d\n",
444 access->grp_read, access->grp_write, access->grp_assignment_read,
445 access->grp_assignment_write, access->grp_scalar_read,
446 access->grp_scalar_write, access->grp_total_scalarization,
447 access->grp_hint, access->grp_covered,
448 access->grp_unscalarizable_region, access->grp_unscalarized_data,
449 access->grp_partial_lhs, access->grp_to_be_replaced,
450 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
451 access->grp_not_necessarilly_dereferenced);
452 else
453 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
454 "grp_partial_lhs = %d\n",
455 access->write, access->grp_total_scalarization,
456 access->grp_partial_lhs);
459 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
461 static void
462 dump_access_tree_1 (FILE *f, struct access *access, int level)
466 int i;
468 for (i = 0; i < level; i++)
469 fputs ("* ", dump_file);
471 dump_access (f, access, true);
473 if (access->first_child)
474 dump_access_tree_1 (f, access->first_child, level + 1);
476 access = access->next_sibling;
478 while (access);
481 /* Dump all access trees for a variable, given the pointer to the first root in
482 ACCESS. */
484 static void
485 dump_access_tree (FILE *f, struct access *access)
487 for (; access; access = access->next_grp)
488 dump_access_tree_1 (f, access, 0);
491 /* Return true iff ACC is non-NULL and has subaccesses. */
493 static inline bool
494 access_has_children_p (struct access *acc)
496 return acc && acc->first_child;
499 /* Return true iff ACC is (partly) covered by at least one replacement. */
501 static bool
502 access_has_replacements_p (struct access *acc)
504 struct access *child;
505 if (acc->grp_to_be_replaced)
506 return true;
507 for (child = acc->first_child; child; child = child->next_sibling)
508 if (access_has_replacements_p (child))
509 return true;
510 return false;
513 /* Return a vector of pointers to accesses for the variable given in BASE or
514 NULL if there is none. */
516 static vec<access_p> *
517 get_base_access_vector (tree base)
519 return base_access_vec->get (base);
522 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
523 in ACCESS. Return NULL if it cannot be found. */
525 static struct access *
526 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
527 HOST_WIDE_INT size)
529 while (access && (access->offset != offset || access->size != size))
531 struct access *child = access->first_child;
533 while (child && (child->offset + child->size <= offset))
534 child = child->next_sibling;
535 access = child;
538 return access;
541 /* Return the first group representative for DECL or NULL if none exists. */
543 static struct access *
544 get_first_repr_for_decl (tree base)
546 vec<access_p> *access_vec;
548 access_vec = get_base_access_vector (base);
549 if (!access_vec)
550 return NULL;
552 return (*access_vec)[0];
555 /* Find an access representative for the variable BASE and given OFFSET and
556 SIZE. Requires that access trees have already been built. Return NULL if
557 it cannot be found. */
559 static struct access *
560 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
561 HOST_WIDE_INT size)
563 struct access *access;
565 access = get_first_repr_for_decl (base);
566 while (access && (access->offset + access->size <= offset))
567 access = access->next_grp;
568 if (!access)
569 return NULL;
571 return find_access_in_subtree (access, offset, size);
574 /* Add LINK to the linked list of assign links of RACC. */
575 static void
576 add_link_to_rhs (struct access *racc, struct assign_link *link)
578 gcc_assert (link->racc == racc);
580 if (!racc->first_link)
582 gcc_assert (!racc->last_link);
583 racc->first_link = link;
585 else
586 racc->last_link->next = link;
588 racc->last_link = link;
589 link->next = NULL;
592 /* Move all link structures in their linked list in OLD_RACC to the linked list
593 in NEW_RACC. */
594 static void
595 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
597 if (!old_racc->first_link)
599 gcc_assert (!old_racc->last_link);
600 return;
603 if (new_racc->first_link)
605 gcc_assert (!new_racc->last_link->next);
606 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
608 new_racc->last_link->next = old_racc->first_link;
609 new_racc->last_link = old_racc->last_link;
611 else
613 gcc_assert (!new_racc->last_link);
615 new_racc->first_link = old_racc->first_link;
616 new_racc->last_link = old_racc->last_link;
618 old_racc->first_link = old_racc->last_link = NULL;
621 /* Add ACCESS to the work queue (which is actually a stack). */
623 static void
624 add_access_to_work_queue (struct access *access)
626 if (!access->grp_queued)
628 gcc_assert (!access->next_queued);
629 access->next_queued = work_queue_head;
630 access->grp_queued = 1;
631 work_queue_head = access;
635 /* Pop an access from the work queue, and return it, assuming there is one. */
637 static struct access *
638 pop_access_from_work_queue (void)
640 struct access *access = work_queue_head;
642 work_queue_head = access->next_queued;
643 access->next_queued = NULL;
644 access->grp_queued = 0;
645 return access;
649 /* Allocate necessary structures. */
651 static void
652 sra_initialize (void)
654 candidate_bitmap = BITMAP_ALLOC (NULL);
655 candidates = new hash_table<uid_decl_hasher>
656 (vec_safe_length (cfun->local_decls) / 2);
657 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
658 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
659 disqualified_constants = BITMAP_ALLOC (NULL);
660 gcc_obstack_init (&name_obstack);
661 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
662 memset (&sra_stats, 0, sizeof (sra_stats));
663 encountered_apply_args = false;
664 encountered_recursive_call = false;
665 encountered_unchangable_recursive_call = false;
668 /* Deallocate all general structures. */
670 static void
671 sra_deinitialize (void)
673 BITMAP_FREE (candidate_bitmap);
674 delete candidates;
675 candidates = NULL;
676 BITMAP_FREE (should_scalarize_away_bitmap);
677 BITMAP_FREE (cannot_scalarize_away_bitmap);
678 BITMAP_FREE (disqualified_constants);
679 access_pool.release ();
680 assign_link_pool.release ();
681 obstack_free (&name_obstack, NULL);
683 delete base_access_vec;
686 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
688 static bool constant_decl_p (tree decl)
690 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
693 /* Remove DECL from candidates for SRA and write REASON to the dump file if
694 there is one. */
695 static void
696 disqualify_candidate (tree decl, const char *reason)
698 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
699 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
700 if (constant_decl_p (decl))
701 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
703 if (dump_file && (dump_flags & TDF_DETAILS))
705 fprintf (dump_file, "! Disqualifying ");
706 print_generic_expr (dump_file, decl, 0);
707 fprintf (dump_file, " - %s\n", reason);
711 /* Return true iff the type contains a field or an element which does not allow
712 scalarization. */
714 static bool
715 type_internals_preclude_sra_p (tree type, const char **msg)
717 tree fld;
718 tree et;
720 switch (TREE_CODE (type))
722 case RECORD_TYPE:
723 case UNION_TYPE:
724 case QUAL_UNION_TYPE:
725 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
726 if (TREE_CODE (fld) == FIELD_DECL)
728 tree ft = TREE_TYPE (fld);
730 if (TREE_THIS_VOLATILE (fld))
732 *msg = "volatile structure field";
733 return true;
735 if (!DECL_FIELD_OFFSET (fld))
737 *msg = "no structure field offset";
738 return true;
740 if (!DECL_SIZE (fld))
742 *msg = "zero structure field size";
743 return true;
745 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
747 *msg = "structure field offset not fixed";
748 return true;
750 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
752 *msg = "structure field size not fixed";
753 return true;
755 if (!tree_fits_shwi_p (bit_position (fld)))
757 *msg = "structure field size too big";
758 return true;
760 if (AGGREGATE_TYPE_P (ft)
761 && int_bit_position (fld) % BITS_PER_UNIT != 0)
763 *msg = "structure field is bit field";
764 return true;
767 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
768 return true;
771 return false;
773 case ARRAY_TYPE:
774 et = TREE_TYPE (type);
776 if (TYPE_VOLATILE (et))
778 *msg = "element type is volatile";
779 return true;
782 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
783 return true;
785 return false;
787 default:
788 return false;
792 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
793 base variable if it is. Return T if it is not an SSA_NAME. */
795 static tree
796 get_ssa_base_param (tree t)
798 if (TREE_CODE (t) == SSA_NAME)
800 if (SSA_NAME_IS_DEFAULT_DEF (t))
801 return SSA_NAME_VAR (t);
802 else
803 return NULL_TREE;
805 return t;
808 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
809 belongs to, unless the BB has already been marked as a potentially
810 final. */
812 static void
813 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
815 basic_block bb = gimple_bb (stmt);
816 int idx, parm_index = 0;
817 tree parm;
819 if (bitmap_bit_p (final_bbs, bb->index))
820 return;
822 for (parm = DECL_ARGUMENTS (current_function_decl);
823 parm && parm != base;
824 parm = DECL_CHAIN (parm))
825 parm_index++;
827 gcc_assert (parm_index < func_param_count);
829 idx = bb->index * func_param_count + parm_index;
830 if (bb_dereferences[idx] < dist)
831 bb_dereferences[idx] = dist;
834 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
835 the three fields. Also add it to the vector of accesses corresponding to
836 the base. Finally, return the new access. */
838 static struct access *
839 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
841 struct access *access = access_pool.allocate ();
843 memset (access, 0, sizeof (struct access));
844 access->base = base;
845 access->offset = offset;
846 access->size = size;
848 base_access_vec->get_or_insert (base).safe_push (access);
850 return access;
853 static bool maybe_add_sra_candidate (tree);
855 /* Create and insert access for EXPR. Return created access, or NULL if it is
856 not possible. Also scan for uses of constant pool as we go along and add
857 to candidates. */
859 static struct access *
860 create_access (tree expr, gimple *stmt, bool write)
862 struct access *access;
863 HOST_WIDE_INT offset, size, max_size;
864 tree base = expr;
865 bool reverse, ptr, unscalarizable_region = false;
867 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
869 if (sra_mode == SRA_MODE_EARLY_IPA
870 && TREE_CODE (base) == MEM_REF)
872 base = get_ssa_base_param (TREE_OPERAND (base, 0));
873 if (!base)
874 return NULL;
875 ptr = true;
877 else
878 ptr = false;
880 /* For constant-pool entries, check we can substitute the constant value. */
881 if (constant_decl_p (base)
882 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
884 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
885 if (expr != base
886 && !is_gimple_reg_type (TREE_TYPE (expr))
887 && dump_file && (dump_flags & TDF_DETAILS))
889 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
890 and elements of multidimensional arrays (which are
891 multi-element arrays in their own right). */
892 fprintf (dump_file, "Allowing non-reg-type load of part"
893 " of constant-pool entry: ");
894 print_generic_expr (dump_file, expr, 0);
896 maybe_add_sra_candidate (base);
899 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
900 return NULL;
902 if (sra_mode == SRA_MODE_EARLY_IPA)
904 if (size < 0 || size != max_size)
906 disqualify_candidate (base, "Encountered a variable sized access.");
907 return NULL;
909 if (TREE_CODE (expr) == COMPONENT_REF
910 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
912 disqualify_candidate (base, "Encountered a bit-field access.");
913 return NULL;
915 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
917 if (ptr)
918 mark_parm_dereference (base, offset + size, stmt);
920 else
922 if (size != max_size)
924 size = max_size;
925 unscalarizable_region = true;
927 if (size < 0)
929 disqualify_candidate (base, "Encountered an unconstrained access.");
930 return NULL;
934 access = create_access_1 (base, offset, size);
935 access->expr = expr;
936 access->type = TREE_TYPE (expr);
937 access->write = write;
938 access->grp_unscalarizable_region = unscalarizable_region;
939 access->stmt = stmt;
940 access->reverse = reverse;
942 if (TREE_CODE (expr) == COMPONENT_REF
943 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
944 access->non_addressable = 1;
946 return access;
950 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
951 ARRAY_TYPE with fields that are either of gimple register types (excluding
952 bit-fields) or (recursively) scalarizable types. */
954 static bool
955 scalarizable_type_p (tree type)
957 gcc_assert (!is_gimple_reg_type (type));
958 if (type_contains_placeholder_p (type))
959 return false;
961 switch (TREE_CODE (type))
963 case RECORD_TYPE:
964 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
965 if (TREE_CODE (fld) == FIELD_DECL)
967 tree ft = TREE_TYPE (fld);
969 if (DECL_BIT_FIELD (fld))
970 return false;
972 if (!is_gimple_reg_type (ft)
973 && !scalarizable_type_p (ft))
974 return false;
977 return true;
979 case ARRAY_TYPE:
981 if (TYPE_DOMAIN (type) == NULL_TREE
982 || !tree_fits_shwi_p (TYPE_SIZE (type))
983 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
984 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= 0)
985 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
986 return false;
987 if (tree_to_shwi (TYPE_SIZE (type)) == 0
988 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
989 /* Zero-element array, should not prevent scalarization. */
991 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
992 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
993 /* Variable-length array, do not allow scalarization. */
994 return false;
996 tree elem = TREE_TYPE (type);
997 if (!is_gimple_reg_type (elem)
998 && !scalarizable_type_p (elem))
999 return false;
1000 return true;
1002 default:
1003 return false;
1007 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1009 /* Create total_scalarization accesses for all scalar fields of a member
1010 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1011 must be the top-most VAR_DECL representing the variable; within that,
1012 OFFSET locates the member and REF must be the memory reference expression for
1013 the member. */
1015 static void
1016 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1018 switch (TREE_CODE (decl_type))
1020 case RECORD_TYPE:
1021 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1022 if (TREE_CODE (fld) == FIELD_DECL)
1024 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1025 tree ft = TREE_TYPE (fld);
1026 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1028 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1029 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1030 nref, ft);
1032 break;
1033 case ARRAY_TYPE:
1035 tree elemtype = TREE_TYPE (decl_type);
1036 tree elem_size = TYPE_SIZE (elemtype);
1037 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1038 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1039 gcc_assert (el_size > 0);
1041 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1042 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1043 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1044 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1045 if (maxidx)
1047 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1048 tree domain = TYPE_DOMAIN (decl_type);
1049 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1050 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1051 offset_int idx = wi::to_offset (minidx);
1052 offset_int max = wi::to_offset (maxidx);
1053 if (!TYPE_UNSIGNED (domain))
1055 idx = wi::sext (idx, TYPE_PRECISION (domain));
1056 max = wi::sext (max, TYPE_PRECISION (domain));
1058 for (int el_off = offset; idx <= max; ++idx)
1060 tree nref = build4 (ARRAY_REF, elemtype,
1061 ref,
1062 wide_int_to_tree (domain, idx),
1063 NULL_TREE, NULL_TREE);
1064 scalarize_elem (base, el_off, el_size,
1065 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1066 nref, elemtype);
1067 el_off += el_size;
1071 break;
1072 default:
1073 gcc_unreachable ();
1077 /* Create total_scalarization accesses for a member of type TYPE, which must
1078 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1079 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1080 the member, REVERSE gives its torage order. and REF must be the reference
1081 expression for it. */
1083 static void
1084 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1085 tree ref, tree type)
1087 if (is_gimple_reg_type (type))
1089 struct access *access = create_access_1 (base, pos, size);
1090 access->expr = ref;
1091 access->type = type;
1092 access->grp_total_scalarization = 1;
1093 access->reverse = reverse;
1094 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1096 else
1097 completely_scalarize (base, type, pos, ref);
1100 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1101 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1103 static void
1104 create_total_scalarization_access (tree var)
1106 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1107 struct access *access;
1109 access = create_access_1 (var, 0, size);
1110 access->expr = var;
1111 access->type = TREE_TYPE (var);
1112 access->grp_total_scalarization = 1;
1115 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1117 static inline bool
1118 contains_view_convert_expr_p (const_tree ref)
1120 while (handled_component_p (ref))
1122 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1123 return true;
1124 ref = TREE_OPERAND (ref, 0);
1127 return false;
1130 /* Search the given tree for a declaration by skipping handled components and
1131 exclude it from the candidates. */
1133 static void
1134 disqualify_base_of_expr (tree t, const char *reason)
1136 t = get_base_address (t);
1137 if (sra_mode == SRA_MODE_EARLY_IPA
1138 && TREE_CODE (t) == MEM_REF)
1139 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1141 if (t && DECL_P (t))
1142 disqualify_candidate (t, reason);
1145 /* Scan expression EXPR and create access structures for all accesses to
1146 candidates for scalarization. Return the created access or NULL if none is
1147 created. */
1149 static struct access *
1150 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1152 struct access *ret = NULL;
1153 bool partial_ref;
1155 if (TREE_CODE (expr) == BIT_FIELD_REF
1156 || TREE_CODE (expr) == IMAGPART_EXPR
1157 || TREE_CODE (expr) == REALPART_EXPR)
1159 expr = TREE_OPERAND (expr, 0);
1160 partial_ref = true;
1162 else
1163 partial_ref = false;
1165 /* We need to dive through V_C_Es in order to get the size of its parameter
1166 and not the result type. Ada produces such statements. We are also
1167 capable of handling the topmost V_C_E but not any of those buried in other
1168 handled components. */
1169 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR && !storage_order_barrier_p (expr))
1170 expr = TREE_OPERAND (expr, 0);
1172 if (contains_view_convert_expr_p (expr))
1174 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1175 "component.");
1176 return NULL;
1178 if (TREE_THIS_VOLATILE (expr))
1180 disqualify_base_of_expr (expr, "part of a volatile reference.");
1181 return NULL;
1184 switch (TREE_CODE (expr))
1186 case MEM_REF:
1187 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1188 && sra_mode != SRA_MODE_EARLY_IPA)
1189 return NULL;
1190 /* fall through */
1191 case VAR_DECL:
1192 case PARM_DECL:
1193 case RESULT_DECL:
1194 case COMPONENT_REF:
1195 case ARRAY_REF:
1196 case ARRAY_RANGE_REF:
1197 ret = create_access (expr, stmt, write);
1198 break;
1200 default:
1201 break;
1204 if (write && partial_ref && ret)
1205 ret->grp_partial_lhs = 1;
1207 return ret;
1210 /* Scan expression EXPR and create access structures for all accesses to
1211 candidates for scalarization. Return true if any access has been inserted.
1212 STMT must be the statement from which the expression is taken, WRITE must be
1213 true if the expression is a store and false otherwise. */
1215 static bool
1216 build_access_from_expr (tree expr, gimple *stmt, bool write)
1218 struct access *access;
1220 access = build_access_from_expr_1 (expr, stmt, write);
1221 if (access)
1223 /* This means the aggregate is accesses as a whole in a way other than an
1224 assign statement and thus cannot be removed even if we had a scalar
1225 replacement for everything. */
1226 if (cannot_scalarize_away_bitmap)
1227 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1228 return true;
1230 return false;
1233 /* Return the single non-EH successor edge of BB or NULL if there is none or
1234 more than one. */
1236 static edge
1237 single_non_eh_succ (basic_block bb)
1239 edge e, res = NULL;
1240 edge_iterator ei;
1242 FOR_EACH_EDGE (e, ei, bb->succs)
1243 if (!(e->flags & EDGE_EH))
1245 if (res)
1246 return NULL;
1247 res = e;
1250 return res;
1253 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1254 there is no alternative spot where to put statements SRA might need to
1255 generate after it. The spot we are looking for is an edge leading to a
1256 single non-EH successor, if it exists and is indeed single. RHS may be
1257 NULL, in that case ignore it. */
1259 static bool
1260 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1262 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1263 && stmt_ends_bb_p (stmt))
1265 if (single_non_eh_succ (gimple_bb (stmt)))
1266 return false;
1268 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1269 if (rhs)
1270 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1271 return true;
1273 return false;
1276 /* Scan expressions occurring in STMT, create access structures for all accesses
1277 to candidates for scalarization and remove those candidates which occur in
1278 statements or expressions that prevent them from being split apart. Return
1279 true if any access has been inserted. */
1281 static bool
1282 build_accesses_from_assign (gimple *stmt)
1284 tree lhs, rhs;
1285 struct access *lacc, *racc;
1287 if (!gimple_assign_single_p (stmt)
1288 /* Scope clobbers don't influence scalarization. */
1289 || gimple_clobber_p (stmt))
1290 return false;
1292 lhs = gimple_assign_lhs (stmt);
1293 rhs = gimple_assign_rhs1 (stmt);
1295 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1296 return false;
1298 racc = build_access_from_expr_1 (rhs, stmt, false);
1299 lacc = build_access_from_expr_1 (lhs, stmt, true);
1301 if (lacc)
1303 lacc->grp_assignment_write = 1;
1304 if (storage_order_barrier_p (rhs))
1305 lacc->grp_unscalarizable_region = 1;
1308 if (racc)
1310 racc->grp_assignment_read = 1;
1311 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1312 && !is_gimple_reg_type (racc->type))
1313 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1314 if (storage_order_barrier_p (lhs))
1315 racc->grp_unscalarizable_region = 1;
1318 if (lacc && racc
1319 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1320 && !lacc->grp_unscalarizable_region
1321 && !racc->grp_unscalarizable_region
1322 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1323 && lacc->size == racc->size
1324 && useless_type_conversion_p (lacc->type, racc->type))
1326 struct assign_link *link;
1328 link = assign_link_pool.allocate ();
1329 memset (link, 0, sizeof (struct assign_link));
1331 link->lacc = lacc;
1332 link->racc = racc;
1334 add_link_to_rhs (racc, link);
1337 return lacc || racc;
1340 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1341 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1343 static bool
1344 asm_visit_addr (gimple *, tree op, tree, void *)
1346 op = get_base_address (op);
1347 if (op
1348 && DECL_P (op))
1349 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1351 return false;
1354 /* Return true iff callsite CALL has at least as many actual arguments as there
1355 are formal parameters of the function currently processed by IPA-SRA and
1356 that their types match. */
1358 static inline bool
1359 callsite_arguments_match_p (gimple *call)
1361 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1362 return false;
1364 tree parm;
1365 int i;
1366 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1367 parm;
1368 parm = DECL_CHAIN (parm), i++)
1370 tree arg = gimple_call_arg (call, i);
1371 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1372 return false;
1374 return true;
1377 /* Scan function and look for interesting expressions and create access
1378 structures for them. Return true iff any access is created. */
1380 static bool
1381 scan_function (void)
1383 basic_block bb;
1384 bool ret = false;
1386 FOR_EACH_BB_FN (bb, cfun)
1388 gimple_stmt_iterator gsi;
1389 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1391 gimple *stmt = gsi_stmt (gsi);
1392 tree t;
1393 unsigned i;
1395 if (final_bbs && stmt_can_throw_external (stmt))
1396 bitmap_set_bit (final_bbs, bb->index);
1397 switch (gimple_code (stmt))
1399 case GIMPLE_RETURN:
1400 t = gimple_return_retval (as_a <greturn *> (stmt));
1401 if (t != NULL_TREE)
1402 ret |= build_access_from_expr (t, stmt, false);
1403 if (final_bbs)
1404 bitmap_set_bit (final_bbs, bb->index);
1405 break;
1407 case GIMPLE_ASSIGN:
1408 ret |= build_accesses_from_assign (stmt);
1409 break;
1411 case GIMPLE_CALL:
1412 for (i = 0; i < gimple_call_num_args (stmt); i++)
1413 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1414 stmt, false);
1416 if (sra_mode == SRA_MODE_EARLY_IPA)
1418 tree dest = gimple_call_fndecl (stmt);
1419 int flags = gimple_call_flags (stmt);
1421 if (dest)
1423 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1424 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1425 encountered_apply_args = true;
1426 if (recursive_call_p (current_function_decl, dest))
1428 encountered_recursive_call = true;
1429 if (!callsite_arguments_match_p (stmt))
1430 encountered_unchangable_recursive_call = true;
1434 if (final_bbs
1435 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1436 bitmap_set_bit (final_bbs, bb->index);
1439 t = gimple_call_lhs (stmt);
1440 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1441 ret |= build_access_from_expr (t, stmt, true);
1442 break;
1444 case GIMPLE_ASM:
1446 gasm *asm_stmt = as_a <gasm *> (stmt);
1447 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1448 asm_visit_addr);
1449 if (final_bbs)
1450 bitmap_set_bit (final_bbs, bb->index);
1452 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1454 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1455 ret |= build_access_from_expr (t, asm_stmt, false);
1457 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1459 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1460 ret |= build_access_from_expr (t, asm_stmt, true);
1463 break;
1465 default:
1466 break;
1471 return ret;
1474 /* Helper of QSORT function. There are pointers to accesses in the array. An
1475 access is considered smaller than another if it has smaller offset or if the
1476 offsets are the same but is size is bigger. */
1478 static int
1479 compare_access_positions (const void *a, const void *b)
1481 const access_p *fp1 = (const access_p *) a;
1482 const access_p *fp2 = (const access_p *) b;
1483 const access_p f1 = *fp1;
1484 const access_p f2 = *fp2;
1486 if (f1->offset != f2->offset)
1487 return f1->offset < f2->offset ? -1 : 1;
1489 if (f1->size == f2->size)
1491 if (f1->type == f2->type)
1492 return 0;
1493 /* Put any non-aggregate type before any aggregate type. */
1494 else if (!is_gimple_reg_type (f1->type)
1495 && is_gimple_reg_type (f2->type))
1496 return 1;
1497 else if (is_gimple_reg_type (f1->type)
1498 && !is_gimple_reg_type (f2->type))
1499 return -1;
1500 /* Put any complex or vector type before any other scalar type. */
1501 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1502 && TREE_CODE (f1->type) != VECTOR_TYPE
1503 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1504 || TREE_CODE (f2->type) == VECTOR_TYPE))
1505 return 1;
1506 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1507 || TREE_CODE (f1->type) == VECTOR_TYPE)
1508 && TREE_CODE (f2->type) != COMPLEX_TYPE
1509 && TREE_CODE (f2->type) != VECTOR_TYPE)
1510 return -1;
1511 /* Put the integral type with the bigger precision first. */
1512 else if (INTEGRAL_TYPE_P (f1->type)
1513 && INTEGRAL_TYPE_P (f2->type))
1514 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1515 /* Put any integral type with non-full precision last. */
1516 else if (INTEGRAL_TYPE_P (f1->type)
1517 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1518 != TYPE_PRECISION (f1->type)))
1519 return 1;
1520 else if (INTEGRAL_TYPE_P (f2->type)
1521 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1522 != TYPE_PRECISION (f2->type)))
1523 return -1;
1524 /* Stabilize the sort. */
1525 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1528 /* We want the bigger accesses first, thus the opposite operator in the next
1529 line: */
1530 return f1->size > f2->size ? -1 : 1;
1534 /* Append a name of the declaration to the name obstack. A helper function for
1535 make_fancy_name. */
1537 static void
1538 make_fancy_decl_name (tree decl)
1540 char buffer[32];
1542 tree name = DECL_NAME (decl);
1543 if (name)
1544 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1545 IDENTIFIER_LENGTH (name));
1546 else
1548 sprintf (buffer, "D%u", DECL_UID (decl));
1549 obstack_grow (&name_obstack, buffer, strlen (buffer));
1553 /* Helper for make_fancy_name. */
1555 static void
1556 make_fancy_name_1 (tree expr)
1558 char buffer[32];
1559 tree index;
1561 if (DECL_P (expr))
1563 make_fancy_decl_name (expr);
1564 return;
1567 switch (TREE_CODE (expr))
1569 case COMPONENT_REF:
1570 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1571 obstack_1grow (&name_obstack, '$');
1572 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1573 break;
1575 case ARRAY_REF:
1576 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1577 obstack_1grow (&name_obstack, '$');
1578 /* Arrays with only one element may not have a constant as their
1579 index. */
1580 index = TREE_OPERAND (expr, 1);
1581 if (TREE_CODE (index) != INTEGER_CST)
1582 break;
1583 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1584 obstack_grow (&name_obstack, buffer, strlen (buffer));
1585 break;
1587 case ADDR_EXPR:
1588 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1589 break;
1591 case MEM_REF:
1592 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1593 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1595 obstack_1grow (&name_obstack, '$');
1596 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1597 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1598 obstack_grow (&name_obstack, buffer, strlen (buffer));
1600 break;
1602 case BIT_FIELD_REF:
1603 case REALPART_EXPR:
1604 case IMAGPART_EXPR:
1605 gcc_unreachable (); /* we treat these as scalars. */
1606 break;
1607 default:
1608 break;
1612 /* Create a human readable name for replacement variable of ACCESS. */
1614 static char *
1615 make_fancy_name (tree expr)
1617 make_fancy_name_1 (expr);
1618 obstack_1grow (&name_obstack, '\0');
1619 return XOBFINISH (&name_obstack, char *);
1622 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1623 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1624 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1625 be non-NULL and is used to insert new statements either before or below
1626 the current one as specified by INSERT_AFTER. This function is not capable
1627 of handling bitfields. */
1629 tree
1630 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1631 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1632 bool insert_after)
1634 tree prev_base = base;
1635 tree off;
1636 tree mem_ref;
1637 HOST_WIDE_INT base_offset;
1638 unsigned HOST_WIDE_INT misalign;
1639 unsigned int align;
1641 /* Preserve address-space information. */
1642 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1643 if (as != TYPE_ADDR_SPACE (exp_type))
1644 exp_type = build_qualified_type (exp_type,
1645 TYPE_QUALS (exp_type)
1646 | ENCODE_QUAL_ADDR_SPACE (as));
1648 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1649 get_object_alignment_1 (base, &align, &misalign);
1650 base = get_addr_base_and_unit_offset (base, &base_offset);
1652 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1653 offset such as array[var_index]. */
1654 if (!base)
1656 gassign *stmt;
1657 tree tmp, addr;
1659 gcc_checking_assert (gsi);
1660 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1661 addr = build_fold_addr_expr (unshare_expr (prev_base));
1662 STRIP_USELESS_TYPE_CONVERSION (addr);
1663 stmt = gimple_build_assign (tmp, addr);
1664 gimple_set_location (stmt, loc);
1665 if (insert_after)
1666 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1667 else
1668 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1670 off = build_int_cst (reference_alias_ptr_type (prev_base),
1671 offset / BITS_PER_UNIT);
1672 base = tmp;
1674 else if (TREE_CODE (base) == MEM_REF)
1676 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1677 base_offset + offset / BITS_PER_UNIT);
1678 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1679 base = unshare_expr (TREE_OPERAND (base, 0));
1681 else
1683 off = build_int_cst (reference_alias_ptr_type (prev_base),
1684 base_offset + offset / BITS_PER_UNIT);
1685 base = build_fold_addr_expr (unshare_expr (base));
1688 misalign = (misalign + offset) & (align - 1);
1689 if (misalign != 0)
1690 align = least_bit_hwi (misalign);
1691 if (align != TYPE_ALIGN (exp_type))
1692 exp_type = build_aligned_type (exp_type, align);
1694 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1695 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1696 if (TREE_THIS_VOLATILE (prev_base))
1697 TREE_THIS_VOLATILE (mem_ref) = 1;
1698 if (TREE_SIDE_EFFECTS (prev_base))
1699 TREE_SIDE_EFFECTS (mem_ref) = 1;
1700 return mem_ref;
1703 /* Construct a memory reference to a part of an aggregate BASE at the given
1704 OFFSET and of the same type as MODEL. In case this is a reference to a
1705 bit-field, the function will replicate the last component_ref of model's
1706 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1707 build_ref_for_offset. */
1709 static tree
1710 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1711 struct access *model, gimple_stmt_iterator *gsi,
1712 bool insert_after)
1714 if (TREE_CODE (model->expr) == COMPONENT_REF
1715 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1717 /* This access represents a bit-field. */
1718 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1720 offset -= int_bit_position (fld);
1721 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1722 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1723 gsi, insert_after);
1724 /* The flag will be set on the record type. */
1725 REF_REVERSE_STORAGE_ORDER (t) = 0;
1726 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1727 NULL_TREE);
1729 else
1730 return
1731 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1732 gsi, insert_after);
1735 /* Attempt to build a memory reference that we could but into a gimple
1736 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1737 create statements and return s NULL instead. This function also ignores
1738 alignment issues and so its results should never end up in non-debug
1739 statements. */
1741 static tree
1742 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1743 struct access *model)
1745 HOST_WIDE_INT base_offset;
1746 tree off;
1748 if (TREE_CODE (model->expr) == COMPONENT_REF
1749 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1750 return NULL_TREE;
1752 base = get_addr_base_and_unit_offset (base, &base_offset);
1753 if (!base)
1754 return NULL_TREE;
1755 if (TREE_CODE (base) == MEM_REF)
1757 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1758 base_offset + offset / BITS_PER_UNIT);
1759 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1760 base = unshare_expr (TREE_OPERAND (base, 0));
1762 else
1764 off = build_int_cst (reference_alias_ptr_type (base),
1765 base_offset + offset / BITS_PER_UNIT);
1766 base = build_fold_addr_expr (unshare_expr (base));
1769 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1772 /* Construct a memory reference consisting of component_refs and array_refs to
1773 a part of an aggregate *RES (which is of type TYPE). The requested part
1774 should have type EXP_TYPE at be the given OFFSET. This function might not
1775 succeed, it returns true when it does and only then *RES points to something
1776 meaningful. This function should be used only to build expressions that we
1777 might need to present to user (e.g. in warnings). In all other situations,
1778 build_ref_for_model or build_ref_for_offset should be used instead. */
1780 static bool
1781 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1782 tree exp_type)
1784 while (1)
1786 tree fld;
1787 tree tr_size, index, minidx;
1788 HOST_WIDE_INT el_size;
1790 if (offset == 0 && exp_type
1791 && types_compatible_p (exp_type, type))
1792 return true;
1794 switch (TREE_CODE (type))
1796 case UNION_TYPE:
1797 case QUAL_UNION_TYPE:
1798 case RECORD_TYPE:
1799 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1801 HOST_WIDE_INT pos, size;
1802 tree tr_pos, expr, *expr_ptr;
1804 if (TREE_CODE (fld) != FIELD_DECL)
1805 continue;
1807 tr_pos = bit_position (fld);
1808 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1809 continue;
1810 pos = tree_to_uhwi (tr_pos);
1811 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1812 tr_size = DECL_SIZE (fld);
1813 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1814 continue;
1815 size = tree_to_uhwi (tr_size);
1816 if (size == 0)
1818 if (pos != offset)
1819 continue;
1821 else if (pos > offset || (pos + size) <= offset)
1822 continue;
1824 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1825 NULL_TREE);
1826 expr_ptr = &expr;
1827 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1828 offset - pos, exp_type))
1830 *res = expr;
1831 return true;
1834 return false;
1836 case ARRAY_TYPE:
1837 tr_size = TYPE_SIZE (TREE_TYPE (type));
1838 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1839 return false;
1840 el_size = tree_to_uhwi (tr_size);
1842 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1843 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1844 return false;
1845 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1846 if (!integer_zerop (minidx))
1847 index = int_const_binop (PLUS_EXPR, index, minidx);
1848 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1849 NULL_TREE, NULL_TREE);
1850 offset = offset % el_size;
1851 type = TREE_TYPE (type);
1852 break;
1854 default:
1855 if (offset != 0)
1856 return false;
1858 if (exp_type)
1859 return false;
1860 else
1861 return true;
1866 /* Return true iff TYPE is stdarg va_list type. */
1868 static inline bool
1869 is_va_list_type (tree type)
1871 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1874 /* Print message to dump file why a variable was rejected. */
1876 static void
1877 reject (tree var, const char *msg)
1879 if (dump_file && (dump_flags & TDF_DETAILS))
1881 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1882 print_generic_expr (dump_file, var, 0);
1883 fprintf (dump_file, "\n");
1887 /* Return true if VAR is a candidate for SRA. */
1889 static bool
1890 maybe_add_sra_candidate (tree var)
1892 tree type = TREE_TYPE (var);
1893 const char *msg;
1894 tree_node **slot;
1896 if (!AGGREGATE_TYPE_P (type))
1898 reject (var, "not aggregate");
1899 return false;
1901 /* Allow constant-pool entries (that "need to live in memory")
1902 unless we are doing IPA SRA. */
1903 if (needs_to_live_in_memory (var)
1904 && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
1906 reject (var, "needs to live in memory");
1907 return false;
1909 if (TREE_THIS_VOLATILE (var))
1911 reject (var, "is volatile");
1912 return false;
1914 if (!COMPLETE_TYPE_P (type))
1916 reject (var, "has incomplete type");
1917 return false;
1919 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1921 reject (var, "type size not fixed");
1922 return false;
1924 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1926 reject (var, "type size is zero");
1927 return false;
1929 if (type_internals_preclude_sra_p (type, &msg))
1931 reject (var, msg);
1932 return false;
1934 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1935 we also want to schedule it rather late. Thus we ignore it in
1936 the early pass. */
1937 (sra_mode == SRA_MODE_EARLY_INTRA
1938 && is_va_list_type (type)))
1940 reject (var, "is va_list");
1941 return false;
1944 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1945 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1946 *slot = var;
1948 if (dump_file && (dump_flags & TDF_DETAILS))
1950 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1951 print_generic_expr (dump_file, var, 0);
1952 fprintf (dump_file, "\n");
1955 return true;
1958 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1959 those with type which is suitable for scalarization. */
1961 static bool
1962 find_var_candidates (void)
1964 tree var, parm;
1965 unsigned int i;
1966 bool ret = false;
1968 for (parm = DECL_ARGUMENTS (current_function_decl);
1969 parm;
1970 parm = DECL_CHAIN (parm))
1971 ret |= maybe_add_sra_candidate (parm);
1973 FOR_EACH_LOCAL_DECL (cfun, i, var)
1975 if (!VAR_P (var))
1976 continue;
1978 ret |= maybe_add_sra_candidate (var);
1981 return ret;
1984 /* Sort all accesses for the given variable, check for partial overlaps and
1985 return NULL if there are any. If there are none, pick a representative for
1986 each combination of offset and size and create a linked list out of them.
1987 Return the pointer to the first representative and make sure it is the first
1988 one in the vector of accesses. */
1990 static struct access *
1991 sort_and_splice_var_accesses (tree var)
1993 int i, j, access_count;
1994 struct access *res, **prev_acc_ptr = &res;
1995 vec<access_p> *access_vec;
1996 bool first = true;
1997 HOST_WIDE_INT low = -1, high = 0;
1999 access_vec = get_base_access_vector (var);
2000 if (!access_vec)
2001 return NULL;
2002 access_count = access_vec->length ();
2004 /* Sort by <OFFSET, SIZE>. */
2005 access_vec->qsort (compare_access_positions);
2007 i = 0;
2008 while (i < access_count)
2010 struct access *access = (*access_vec)[i];
2011 bool grp_write = access->write;
2012 bool grp_read = !access->write;
2013 bool grp_scalar_write = access->write
2014 && is_gimple_reg_type (access->type);
2015 bool grp_scalar_read = !access->write
2016 && is_gimple_reg_type (access->type);
2017 bool grp_assignment_read = access->grp_assignment_read;
2018 bool grp_assignment_write = access->grp_assignment_write;
2019 bool multiple_scalar_reads = false;
2020 bool total_scalarization = access->grp_total_scalarization;
2021 bool grp_partial_lhs = access->grp_partial_lhs;
2022 bool first_scalar = is_gimple_reg_type (access->type);
2023 bool unscalarizable_region = access->grp_unscalarizable_region;
2025 if (first || access->offset >= high)
2027 first = false;
2028 low = access->offset;
2029 high = access->offset + access->size;
2031 else if (access->offset > low && access->offset + access->size > high)
2032 return NULL;
2033 else
2034 gcc_assert (access->offset >= low
2035 && access->offset + access->size <= high);
2037 j = i + 1;
2038 while (j < access_count)
2040 struct access *ac2 = (*access_vec)[j];
2041 if (ac2->offset != access->offset || ac2->size != access->size)
2042 break;
2043 if (ac2->write)
2045 grp_write = true;
2046 grp_scalar_write = (grp_scalar_write
2047 || is_gimple_reg_type (ac2->type));
2049 else
2051 grp_read = true;
2052 if (is_gimple_reg_type (ac2->type))
2054 if (grp_scalar_read)
2055 multiple_scalar_reads = true;
2056 else
2057 grp_scalar_read = true;
2060 grp_assignment_read |= ac2->grp_assignment_read;
2061 grp_assignment_write |= ac2->grp_assignment_write;
2062 grp_partial_lhs |= ac2->grp_partial_lhs;
2063 unscalarizable_region |= ac2->grp_unscalarizable_region;
2064 total_scalarization |= ac2->grp_total_scalarization;
2065 relink_to_new_repr (access, ac2);
2067 /* If there are both aggregate-type and scalar-type accesses with
2068 this combination of size and offset, the comparison function
2069 should have put the scalars first. */
2070 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2071 ac2->group_representative = access;
2072 j++;
2075 i = j;
2077 access->group_representative = access;
2078 access->grp_write = grp_write;
2079 access->grp_read = grp_read;
2080 access->grp_scalar_read = grp_scalar_read;
2081 access->grp_scalar_write = grp_scalar_write;
2082 access->grp_assignment_read = grp_assignment_read;
2083 access->grp_assignment_write = grp_assignment_write;
2084 access->grp_hint = total_scalarization
2085 || (multiple_scalar_reads && !constant_decl_p (var));
2086 access->grp_total_scalarization = total_scalarization;
2087 access->grp_partial_lhs = grp_partial_lhs;
2088 access->grp_unscalarizable_region = unscalarizable_region;
2089 if (access->first_link)
2090 add_access_to_work_queue (access);
2092 *prev_acc_ptr = access;
2093 prev_acc_ptr = &access->next_grp;
2096 gcc_assert (res == (*access_vec)[0]);
2097 return res;
2100 /* Create a variable for the given ACCESS which determines the type, name and a
2101 few other properties. Return the variable declaration and store it also to
2102 ACCESS->replacement. */
2104 static tree
2105 create_access_replacement (struct access *access)
2107 tree repl;
2109 if (access->grp_to_be_debug_replaced)
2111 repl = create_tmp_var_raw (access->type);
2112 DECL_CONTEXT (repl) = current_function_decl;
2114 else
2115 /* Drop any special alignment on the type if it's not on the main
2116 variant. This avoids issues with weirdo ABIs like AAPCS. */
2117 repl = create_tmp_var (build_qualified_type
2118 (TYPE_MAIN_VARIANT (access->type),
2119 TYPE_QUALS (access->type)), "SR");
2120 if (TREE_CODE (access->type) == COMPLEX_TYPE
2121 || TREE_CODE (access->type) == VECTOR_TYPE)
2123 if (!access->grp_partial_lhs)
2124 DECL_GIMPLE_REG_P (repl) = 1;
2126 else if (access->grp_partial_lhs
2127 && is_gimple_reg_type (access->type))
2128 TREE_ADDRESSABLE (repl) = 1;
2130 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2131 DECL_ARTIFICIAL (repl) = 1;
2132 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2134 if (DECL_NAME (access->base)
2135 && !DECL_IGNORED_P (access->base)
2136 && !DECL_ARTIFICIAL (access->base))
2138 char *pretty_name = make_fancy_name (access->expr);
2139 tree debug_expr = unshare_expr_without_location (access->expr), d;
2140 bool fail = false;
2142 DECL_NAME (repl) = get_identifier (pretty_name);
2143 DECL_NAMELESS (repl) = 1;
2144 obstack_free (&name_obstack, pretty_name);
2146 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2147 as DECL_DEBUG_EXPR isn't considered when looking for still
2148 used SSA_NAMEs and thus they could be freed. All debug info
2149 generation cares is whether something is constant or variable
2150 and that get_ref_base_and_extent works properly on the
2151 expression. It cannot handle accesses at a non-constant offset
2152 though, so just give up in those cases. */
2153 for (d = debug_expr;
2154 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2155 d = TREE_OPERAND (d, 0))
2156 switch (TREE_CODE (d))
2158 case ARRAY_REF:
2159 case ARRAY_RANGE_REF:
2160 if (TREE_OPERAND (d, 1)
2161 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2162 fail = true;
2163 if (TREE_OPERAND (d, 3)
2164 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2165 fail = true;
2166 /* FALLTHRU */
2167 case COMPONENT_REF:
2168 if (TREE_OPERAND (d, 2)
2169 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2170 fail = true;
2171 break;
2172 case MEM_REF:
2173 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2174 fail = true;
2175 else
2176 d = TREE_OPERAND (d, 0);
2177 break;
2178 default:
2179 break;
2181 if (!fail)
2183 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2184 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2186 if (access->grp_no_warning)
2187 TREE_NO_WARNING (repl) = 1;
2188 else
2189 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2191 else
2192 TREE_NO_WARNING (repl) = 1;
2194 if (dump_file)
2196 if (access->grp_to_be_debug_replaced)
2198 fprintf (dump_file, "Created a debug-only replacement for ");
2199 print_generic_expr (dump_file, access->base, 0);
2200 fprintf (dump_file, " offset: %u, size: %u\n",
2201 (unsigned) access->offset, (unsigned) access->size);
2203 else
2205 fprintf (dump_file, "Created a replacement for ");
2206 print_generic_expr (dump_file, access->base, 0);
2207 fprintf (dump_file, " offset: %u, size: %u: ",
2208 (unsigned) access->offset, (unsigned) access->size);
2209 print_generic_expr (dump_file, repl, 0);
2210 fprintf (dump_file, "\n");
2213 sra_stats.replacements++;
2215 return repl;
2218 /* Return ACCESS scalar replacement, which must exist. */
2220 static inline tree
2221 get_access_replacement (struct access *access)
2223 gcc_checking_assert (access->replacement_decl);
2224 return access->replacement_decl;
2228 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2229 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2230 to it is not "within" the root. Return false iff some accesses partially
2231 overlap. */
2233 static bool
2234 build_access_subtree (struct access **access)
2236 struct access *root = *access, *last_child = NULL;
2237 HOST_WIDE_INT limit = root->offset + root->size;
2239 *access = (*access)->next_grp;
2240 while (*access && (*access)->offset + (*access)->size <= limit)
2242 if (!last_child)
2243 root->first_child = *access;
2244 else
2245 last_child->next_sibling = *access;
2246 last_child = *access;
2248 if (!build_access_subtree (access))
2249 return false;
2252 if (*access && (*access)->offset < limit)
2253 return false;
2255 return true;
2258 /* Build a tree of access representatives, ACCESS is the pointer to the first
2259 one, others are linked in a list by the next_grp field. Return false iff
2260 some accesses partially overlap. */
2262 static bool
2263 build_access_trees (struct access *access)
2265 while (access)
2267 struct access *root = access;
2269 if (!build_access_subtree (&access))
2270 return false;
2271 root->next_grp = access;
2273 return true;
2276 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2277 array. */
2279 static bool
2280 expr_with_var_bounded_array_refs_p (tree expr)
2282 while (handled_component_p (expr))
2284 if (TREE_CODE (expr) == ARRAY_REF
2285 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2286 return true;
2287 expr = TREE_OPERAND (expr, 0);
2289 return false;
2292 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2293 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2294 sorts of access flags appropriately along the way, notably always set
2295 grp_read and grp_assign_read according to MARK_READ and grp_write when
2296 MARK_WRITE is true.
2298 Creating a replacement for a scalar access is considered beneficial if its
2299 grp_hint is set (this means we are either attempting total scalarization or
2300 there is more than one direct read access) or according to the following
2301 table:
2303 Access written to through a scalar type (once or more times)
2305 | Written to in an assignment statement
2307 | | Access read as scalar _once_
2308 | | |
2309 | | | Read in an assignment statement
2310 | | | |
2311 | | | | Scalarize Comment
2312 -----------------------------------------------------------------------------
2313 0 0 0 0 No access for the scalar
2314 0 0 0 1 No access for the scalar
2315 0 0 1 0 No Single read - won't help
2316 0 0 1 1 No The same case
2317 0 1 0 0 No access for the scalar
2318 0 1 0 1 No access for the scalar
2319 0 1 1 0 Yes s = *g; return s.i;
2320 0 1 1 1 Yes The same case as above
2321 1 0 0 0 No Won't help
2322 1 0 0 1 Yes s.i = 1; *g = s;
2323 1 0 1 0 Yes s.i = 5; g = s.i;
2324 1 0 1 1 Yes The same case as above
2325 1 1 0 0 No Won't help.
2326 1 1 0 1 Yes s.i = 1; *g = s;
2327 1 1 1 0 Yes s = *g; return s.i;
2328 1 1 1 1 Yes Any of the above yeses */
2330 static bool
2331 analyze_access_subtree (struct access *root, struct access *parent,
2332 bool allow_replacements)
2334 struct access *child;
2335 HOST_WIDE_INT limit = root->offset + root->size;
2336 HOST_WIDE_INT covered_to = root->offset;
2337 bool scalar = is_gimple_reg_type (root->type);
2338 bool hole = false, sth_created = false;
2340 if (parent)
2342 if (parent->grp_read)
2343 root->grp_read = 1;
2344 if (parent->grp_assignment_read)
2345 root->grp_assignment_read = 1;
2346 if (parent->grp_write)
2347 root->grp_write = 1;
2348 if (parent->grp_assignment_write)
2349 root->grp_assignment_write = 1;
2350 if (parent->grp_total_scalarization)
2351 root->grp_total_scalarization = 1;
2354 if (root->grp_unscalarizable_region)
2355 allow_replacements = false;
2357 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2358 allow_replacements = false;
2360 for (child = root->first_child; child; child = child->next_sibling)
2362 hole |= covered_to < child->offset;
2363 sth_created |= analyze_access_subtree (child, root,
2364 allow_replacements && !scalar);
2366 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2367 root->grp_total_scalarization &= child->grp_total_scalarization;
2368 if (child->grp_covered)
2369 covered_to += child->size;
2370 else
2371 hole = true;
2374 if (allow_replacements && scalar && !root->first_child
2375 && (root->grp_hint
2376 || ((root->grp_scalar_read || root->grp_assignment_read)
2377 && (root->grp_scalar_write || root->grp_assignment_write))))
2379 /* Always create access replacements that cover the whole access.
2380 For integral types this means the precision has to match.
2381 Avoid assumptions based on the integral type kind, too. */
2382 if (INTEGRAL_TYPE_P (root->type)
2383 && (TREE_CODE (root->type) != INTEGER_TYPE
2384 || TYPE_PRECISION (root->type) != root->size)
2385 /* But leave bitfield accesses alone. */
2386 && (TREE_CODE (root->expr) != COMPONENT_REF
2387 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2389 tree rt = root->type;
2390 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2391 && (root->size % BITS_PER_UNIT) == 0);
2392 root->type = build_nonstandard_integer_type (root->size,
2393 TYPE_UNSIGNED (rt));
2394 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2395 root->offset, root->reverse,
2396 root->type, NULL, false);
2398 if (dump_file && (dump_flags & TDF_DETAILS))
2400 fprintf (dump_file, "Changing the type of a replacement for ");
2401 print_generic_expr (dump_file, root->base, 0);
2402 fprintf (dump_file, " offset: %u, size: %u ",
2403 (unsigned) root->offset, (unsigned) root->size);
2404 fprintf (dump_file, " to an integer.\n");
2408 root->grp_to_be_replaced = 1;
2409 root->replacement_decl = create_access_replacement (root);
2410 sth_created = true;
2411 hole = false;
2413 else
2415 if (allow_replacements
2416 && scalar && !root->first_child
2417 && (root->grp_scalar_write || root->grp_assignment_write)
2418 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2419 DECL_UID (root->base)))
2421 gcc_checking_assert (!root->grp_scalar_read
2422 && !root->grp_assignment_read);
2423 sth_created = true;
2424 if (MAY_HAVE_DEBUG_STMTS)
2426 root->grp_to_be_debug_replaced = 1;
2427 root->replacement_decl = create_access_replacement (root);
2431 if (covered_to < limit)
2432 hole = true;
2433 if (scalar || !allow_replacements)
2434 root->grp_total_scalarization = 0;
2437 if (!hole || root->grp_total_scalarization)
2438 root->grp_covered = 1;
2439 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL
2440 || constant_decl_p (root->base))
2441 root->grp_unscalarized_data = 1; /* not covered and written to */
2442 return sth_created;
2445 /* Analyze all access trees linked by next_grp by the means of
2446 analyze_access_subtree. */
2447 static bool
2448 analyze_access_trees (struct access *access)
2450 bool ret = false;
2452 while (access)
2454 if (analyze_access_subtree (access, NULL, true))
2455 ret = true;
2456 access = access->next_grp;
2459 return ret;
2462 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2463 SIZE would conflict with an already existing one. If exactly such a child
2464 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2466 static bool
2467 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2468 HOST_WIDE_INT size, struct access **exact_match)
2470 struct access *child;
2472 for (child = lacc->first_child; child; child = child->next_sibling)
2474 if (child->offset == norm_offset && child->size == size)
2476 *exact_match = child;
2477 return true;
2480 if (child->offset < norm_offset + size
2481 && child->offset + child->size > norm_offset)
2482 return true;
2485 return false;
2488 /* Create a new child access of PARENT, with all properties just like MODEL
2489 except for its offset and with its grp_write false and grp_read true.
2490 Return the new access or NULL if it cannot be created. Note that this access
2491 is created long after all splicing and sorting, it's not located in any
2492 access vector and is automatically a representative of its group. */
2494 static struct access *
2495 create_artificial_child_access (struct access *parent, struct access *model,
2496 HOST_WIDE_INT new_offset)
2498 struct access **child;
2499 tree expr = parent->base;
2501 gcc_assert (!model->grp_unscalarizable_region);
2503 struct access *access = access_pool.allocate ();
2504 memset (access, 0, sizeof (struct access));
2505 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2506 model->type))
2508 access->grp_no_warning = true;
2509 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2510 new_offset, model, NULL, false);
2513 access->base = parent->base;
2514 access->expr = expr;
2515 access->offset = new_offset;
2516 access->size = model->size;
2517 access->type = model->type;
2518 access->grp_write = true;
2519 access->grp_read = false;
2520 access->reverse = model->reverse;
2522 child = &parent->first_child;
2523 while (*child && (*child)->offset < new_offset)
2524 child = &(*child)->next_sibling;
2526 access->next_sibling = *child;
2527 *child = access;
2529 return access;
2533 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2534 true if any new subaccess was created. Additionally, if RACC is a scalar
2535 access but LACC is not, change the type of the latter, if possible. */
2537 static bool
2538 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2540 struct access *rchild;
2541 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2542 bool ret = false;
2544 if (is_gimple_reg_type (lacc->type)
2545 || lacc->grp_unscalarizable_region
2546 || racc->grp_unscalarizable_region)
2547 return false;
2549 if (is_gimple_reg_type (racc->type))
2551 if (!lacc->first_child && !racc->first_child)
2553 tree t = lacc->base;
2555 lacc->type = racc->type;
2556 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2557 lacc->offset, racc->type))
2558 lacc->expr = t;
2559 else
2561 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2562 lacc->base, lacc->offset,
2563 racc, NULL, false);
2564 lacc->grp_no_warning = true;
2567 return false;
2570 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2572 struct access *new_acc = NULL;
2573 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2575 if (rchild->grp_unscalarizable_region)
2576 continue;
2578 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2579 &new_acc))
2581 if (new_acc)
2583 rchild->grp_hint = 1;
2584 new_acc->grp_hint |= new_acc->grp_read;
2585 if (rchild->first_child)
2586 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2588 continue;
2591 rchild->grp_hint = 1;
2592 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2593 if (new_acc)
2595 ret = true;
2596 if (racc->first_child)
2597 propagate_subaccesses_across_link (new_acc, rchild);
2601 return ret;
2604 /* Propagate all subaccesses across assignment links. */
2606 static void
2607 propagate_all_subaccesses (void)
2609 while (work_queue_head)
2611 struct access *racc = pop_access_from_work_queue ();
2612 struct assign_link *link;
2614 gcc_assert (racc->first_link);
2616 for (link = racc->first_link; link; link = link->next)
2618 struct access *lacc = link->lacc;
2620 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2621 continue;
2622 lacc = lacc->group_representative;
2623 if (propagate_subaccesses_across_link (lacc, racc)
2624 && lacc->first_link)
2625 add_access_to_work_queue (lacc);
2630 /* Go through all accesses collected throughout the (intraprocedural) analysis
2631 stage, exclude overlapping ones, identify representatives and build trees
2632 out of them, making decisions about scalarization on the way. Return true
2633 iff there are any to-be-scalarized variables after this stage. */
2635 static bool
2636 analyze_all_variable_accesses (void)
2638 int res = 0;
2639 bitmap tmp = BITMAP_ALLOC (NULL);
2640 bitmap_iterator bi;
2641 unsigned i;
2642 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2644 enum compiler_param param = optimize_speed_p
2645 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2646 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2648 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2649 fall back to a target default. */
2650 unsigned HOST_WIDE_INT max_scalarization_size
2651 = global_options_set.x_param_values[param]
2652 ? PARAM_VALUE (param)
2653 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2655 max_scalarization_size *= BITS_PER_UNIT;
2657 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2658 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2659 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2661 tree var = candidate (i);
2663 if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var)))
2665 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2666 <= max_scalarization_size)
2668 create_total_scalarization_access (var);
2669 completely_scalarize (var, TREE_TYPE (var), 0, var);
2670 if (dump_file && (dump_flags & TDF_DETAILS))
2672 fprintf (dump_file, "Will attempt to totally scalarize ");
2673 print_generic_expr (dump_file, var, 0);
2674 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2677 else if (dump_file && (dump_flags & TDF_DETAILS))
2679 fprintf (dump_file, "Too big to totally scalarize: ");
2680 print_generic_expr (dump_file, var, 0);
2681 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2686 bitmap_copy (tmp, candidate_bitmap);
2687 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2689 tree var = candidate (i);
2690 struct access *access;
2692 access = sort_and_splice_var_accesses (var);
2693 if (!access || !build_access_trees (access))
2694 disqualify_candidate (var,
2695 "No or inhibitingly overlapping accesses.");
2698 propagate_all_subaccesses ();
2700 bitmap_copy (tmp, candidate_bitmap);
2701 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2703 tree var = candidate (i);
2704 struct access *access = get_first_repr_for_decl (var);
2706 if (analyze_access_trees (access))
2708 res++;
2709 if (dump_file && (dump_flags & TDF_DETAILS))
2711 fprintf (dump_file, "\nAccess trees for ");
2712 print_generic_expr (dump_file, var, 0);
2713 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2714 dump_access_tree (dump_file, access);
2715 fprintf (dump_file, "\n");
2718 else
2719 disqualify_candidate (var, "No scalar replacements to be created.");
2722 BITMAP_FREE (tmp);
2724 if (res)
2726 statistics_counter_event (cfun, "Scalarized aggregates", res);
2727 return true;
2729 else
2730 return false;
2733 /* Generate statements copying scalar replacements of accesses within a subtree
2734 into or out of AGG. ACCESS, all its children, siblings and their children
2735 are to be processed. AGG is an aggregate type expression (can be a
2736 declaration but does not have to be, it can for example also be a mem_ref or
2737 a series of handled components). TOP_OFFSET is the offset of the processed
2738 subtree which has to be subtracted from offsets of individual accesses to
2739 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2740 replacements in the interval <start_offset, start_offset + chunk_size>,
2741 otherwise copy all. GSI is a statement iterator used to place the new
2742 statements. WRITE should be true when the statements should write from AGG
2743 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2744 statements will be added after the current statement in GSI, they will be
2745 added before the statement otherwise. */
2747 static void
2748 generate_subtree_copies (struct access *access, tree agg,
2749 HOST_WIDE_INT top_offset,
2750 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2751 gimple_stmt_iterator *gsi, bool write,
2752 bool insert_after, location_t loc)
2754 /* Never write anything into constant pool decls. See PR70602. */
2755 if (!write && constant_decl_p (agg))
2756 return;
2759 if (chunk_size && access->offset >= start_offset + chunk_size)
2760 return;
2762 if (access->grp_to_be_replaced
2763 && (chunk_size == 0
2764 || access->offset + access->size > start_offset))
2766 tree expr, repl = get_access_replacement (access);
2767 gassign *stmt;
2769 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2770 access, gsi, insert_after);
2772 if (write)
2774 if (access->grp_partial_lhs)
2775 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2776 !insert_after,
2777 insert_after ? GSI_NEW_STMT
2778 : GSI_SAME_STMT);
2779 stmt = gimple_build_assign (repl, expr);
2781 else
2783 TREE_NO_WARNING (repl) = 1;
2784 if (access->grp_partial_lhs)
2785 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2786 !insert_after,
2787 insert_after ? GSI_NEW_STMT
2788 : GSI_SAME_STMT);
2789 stmt = gimple_build_assign (expr, repl);
2791 gimple_set_location (stmt, loc);
2793 if (insert_after)
2794 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2795 else
2796 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2797 update_stmt (stmt);
2798 sra_stats.subtree_copies++;
2800 else if (write
2801 && access->grp_to_be_debug_replaced
2802 && (chunk_size == 0
2803 || access->offset + access->size > start_offset))
2805 gdebug *ds;
2806 tree drhs = build_debug_ref_for_model (loc, agg,
2807 access->offset - top_offset,
2808 access);
2809 ds = gimple_build_debug_bind (get_access_replacement (access),
2810 drhs, gsi_stmt (*gsi));
2811 if (insert_after)
2812 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2813 else
2814 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2817 if (access->first_child)
2818 generate_subtree_copies (access->first_child, agg, top_offset,
2819 start_offset, chunk_size, gsi,
2820 write, insert_after, loc);
2822 access = access->next_sibling;
2824 while (access);
2827 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2828 root of the subtree to be processed. GSI is the statement iterator used
2829 for inserting statements which are added after the current statement if
2830 INSERT_AFTER is true or before it otherwise. */
2832 static void
2833 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2834 bool insert_after, location_t loc)
2837 struct access *child;
2839 if (access->grp_to_be_replaced)
2841 gassign *stmt;
2843 stmt = gimple_build_assign (get_access_replacement (access),
2844 build_zero_cst (access->type));
2845 if (insert_after)
2846 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2847 else
2848 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2849 update_stmt (stmt);
2850 gimple_set_location (stmt, loc);
2852 else if (access->grp_to_be_debug_replaced)
2854 gdebug *ds
2855 = gimple_build_debug_bind (get_access_replacement (access),
2856 build_zero_cst (access->type),
2857 gsi_stmt (*gsi));
2858 if (insert_after)
2859 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2860 else
2861 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2864 for (child = access->first_child; child; child = child->next_sibling)
2865 init_subtree_with_zero (child, gsi, insert_after, loc);
2868 /* Clobber all scalar replacements in an access subtree. ACCESS is the
2869 root of the subtree to be processed. GSI is the statement iterator used
2870 for inserting statements which are added after the current statement if
2871 INSERT_AFTER is true or before it otherwise. */
2873 static void
2874 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2875 bool insert_after, location_t loc)
2878 struct access *child;
2880 if (access->grp_to_be_replaced)
2882 tree rep = get_access_replacement (access);
2883 tree clobber = build_constructor (access->type, NULL);
2884 TREE_THIS_VOLATILE (clobber) = 1;
2885 gimple *stmt = gimple_build_assign (rep, clobber);
2887 if (insert_after)
2888 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2889 else
2890 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2891 update_stmt (stmt);
2892 gimple_set_location (stmt, loc);
2895 for (child = access->first_child; child; child = child->next_sibling)
2896 clobber_subtree (child, gsi, insert_after, loc);
2899 /* Search for an access representative for the given expression EXPR and
2900 return it or NULL if it cannot be found. */
2902 static struct access *
2903 get_access_for_expr (tree expr)
2905 HOST_WIDE_INT offset, size, max_size;
2906 tree base;
2907 bool reverse;
2909 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2910 a different size than the size of its argument and we need the latter
2911 one. */
2912 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2913 expr = TREE_OPERAND (expr, 0);
2915 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
2916 if (max_size == -1 || !DECL_P (base))
2917 return NULL;
2919 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2920 return NULL;
2922 return get_var_base_offset_size_access (base, offset, max_size);
2925 /* Replace the expression EXPR with a scalar replacement if there is one and
2926 generate other statements to do type conversion or subtree copying if
2927 necessary. GSI is used to place newly created statements, WRITE is true if
2928 the expression is being written to (it is on a LHS of a statement or output
2929 in an assembly statement). */
2931 static bool
2932 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2934 location_t loc;
2935 struct access *access;
2936 tree type, bfr, orig_expr;
2938 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2940 bfr = *expr;
2941 expr = &TREE_OPERAND (*expr, 0);
2943 else
2944 bfr = NULL_TREE;
2946 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2947 expr = &TREE_OPERAND (*expr, 0);
2948 access = get_access_for_expr (*expr);
2949 if (!access)
2950 return false;
2951 type = TREE_TYPE (*expr);
2952 orig_expr = *expr;
2954 loc = gimple_location (gsi_stmt (*gsi));
2955 gimple_stmt_iterator alt_gsi = gsi_none ();
2956 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2958 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2959 gsi = &alt_gsi;
2962 if (access->grp_to_be_replaced)
2964 tree repl = get_access_replacement (access);
2965 /* If we replace a non-register typed access simply use the original
2966 access expression to extract the scalar component afterwards.
2967 This happens if scalarizing a function return value or parameter
2968 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2969 gcc.c-torture/compile/20011217-1.c.
2971 We also want to use this when accessing a complex or vector which can
2972 be accessed as a different type too, potentially creating a need for
2973 type conversion (see PR42196) and when scalarized unions are involved
2974 in assembler statements (see PR42398). */
2975 if (!useless_type_conversion_p (type, access->type))
2977 tree ref;
2979 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2981 if (write)
2983 gassign *stmt;
2985 if (access->grp_partial_lhs)
2986 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2987 false, GSI_NEW_STMT);
2988 stmt = gimple_build_assign (repl, ref);
2989 gimple_set_location (stmt, loc);
2990 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2992 else
2994 gassign *stmt;
2996 if (access->grp_partial_lhs)
2997 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2998 true, GSI_SAME_STMT);
2999 stmt = gimple_build_assign (ref, repl);
3000 gimple_set_location (stmt, loc);
3001 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3004 else
3005 *expr = repl;
3006 sra_stats.exprs++;
3008 else if (write && access->grp_to_be_debug_replaced)
3010 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3011 NULL_TREE,
3012 gsi_stmt (*gsi));
3013 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3016 if (access->first_child)
3018 HOST_WIDE_INT start_offset, chunk_size;
3019 if (bfr
3020 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3021 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3023 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3024 start_offset = access->offset
3025 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3027 else
3028 start_offset = chunk_size = 0;
3030 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3031 start_offset, chunk_size, gsi, write, write,
3032 loc);
3034 return true;
3037 /* Where scalar replacements of the RHS have been written to when a replacement
3038 of a LHS of an assigments cannot be direclty loaded from a replacement of
3039 the RHS. */
3040 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3041 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3042 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3044 struct subreplacement_assignment_data
3046 /* Offset of the access representing the lhs of the assignment. */
3047 HOST_WIDE_INT left_offset;
3049 /* LHS and RHS of the original assignment. */
3050 tree assignment_lhs, assignment_rhs;
3052 /* Access representing the rhs of the whole assignment. */
3053 struct access *top_racc;
3055 /* Stmt iterator used for statement insertions after the original assignment.
3056 It points to the main GSI used to traverse a BB during function body
3057 modification. */
3058 gimple_stmt_iterator *new_gsi;
3060 /* Stmt iterator used for statement insertions before the original
3061 assignment. Keeps on pointing to the original statement. */
3062 gimple_stmt_iterator old_gsi;
3064 /* Location of the assignment. */
3065 location_t loc;
3067 /* Keeps the information whether we have needed to refresh replacements of
3068 the LHS and from which side of the assignments this takes place. */
3069 enum unscalarized_data_handling refreshed;
3072 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3073 base aggregate if there are unscalarized data or directly to LHS of the
3074 statement that is pointed to by GSI otherwise. */
3076 static void
3077 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3079 tree src;
3080 if (sad->top_racc->grp_unscalarized_data)
3082 src = sad->assignment_rhs;
3083 sad->refreshed = SRA_UDH_RIGHT;
3085 else
3087 src = sad->assignment_lhs;
3088 sad->refreshed = SRA_UDH_LEFT;
3090 generate_subtree_copies (sad->top_racc->first_child, src,
3091 sad->top_racc->offset, 0, 0,
3092 &sad->old_gsi, false, false, sad->loc);
3095 /* Try to generate statements to load all sub-replacements in an access subtree
3096 formed by children of LACC from scalar replacements in the SAD->top_racc
3097 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3098 and load the accesses from it. */
3100 static void
3101 load_assign_lhs_subreplacements (struct access *lacc,
3102 struct subreplacement_assignment_data *sad)
3104 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3106 HOST_WIDE_INT offset;
3107 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3109 if (lacc->grp_to_be_replaced)
3111 struct access *racc;
3112 gassign *stmt;
3113 tree rhs;
3115 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3116 if (racc && racc->grp_to_be_replaced)
3118 rhs = get_access_replacement (racc);
3119 if (!useless_type_conversion_p (lacc->type, racc->type))
3120 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3121 lacc->type, rhs);
3123 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3124 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3125 NULL_TREE, true, GSI_SAME_STMT);
3127 else
3129 /* No suitable access on the right hand side, need to load from
3130 the aggregate. See if we have to update it first... */
3131 if (sad->refreshed == SRA_UDH_NONE)
3132 handle_unscalarized_data_in_subtree (sad);
3134 if (sad->refreshed == SRA_UDH_LEFT)
3135 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3136 lacc->offset - sad->left_offset,
3137 lacc, sad->new_gsi, true);
3138 else
3139 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3140 lacc->offset - sad->left_offset,
3141 lacc, sad->new_gsi, true);
3142 if (lacc->grp_partial_lhs)
3143 rhs = force_gimple_operand_gsi (sad->new_gsi,
3144 rhs, true, NULL_TREE,
3145 false, GSI_NEW_STMT);
3148 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3149 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3150 gimple_set_location (stmt, sad->loc);
3151 update_stmt (stmt);
3152 sra_stats.subreplacements++;
3154 else
3156 if (sad->refreshed == SRA_UDH_NONE
3157 && lacc->grp_read && !lacc->grp_covered)
3158 handle_unscalarized_data_in_subtree (sad);
3160 if (lacc && lacc->grp_to_be_debug_replaced)
3162 gdebug *ds;
3163 tree drhs;
3164 struct access *racc = find_access_in_subtree (sad->top_racc,
3165 offset,
3166 lacc->size);
3168 if (racc && racc->grp_to_be_replaced)
3170 if (racc->grp_write || constant_decl_p (racc->base))
3171 drhs = get_access_replacement (racc);
3172 else
3173 drhs = NULL;
3175 else if (sad->refreshed == SRA_UDH_LEFT)
3176 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3177 lacc->offset, lacc);
3178 else if (sad->refreshed == SRA_UDH_RIGHT)
3179 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3180 offset, lacc);
3181 else
3182 drhs = NULL_TREE;
3183 if (drhs
3184 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3185 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3186 lacc->type, drhs);
3187 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3188 drhs, gsi_stmt (sad->old_gsi));
3189 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3193 if (lacc->first_child)
3194 load_assign_lhs_subreplacements (lacc, sad);
3198 /* Result code for SRA assignment modification. */
3199 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3200 SRA_AM_MODIFIED, /* stmt changed but not
3201 removed */
3202 SRA_AM_REMOVED }; /* stmt eliminated */
3204 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3205 to the assignment and GSI is the statement iterator pointing at it. Returns
3206 the same values as sra_modify_assign. */
3208 static enum assignment_mod_result
3209 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3211 tree lhs = gimple_assign_lhs (stmt);
3212 struct access *acc = get_access_for_expr (lhs);
3213 if (!acc)
3214 return SRA_AM_NONE;
3215 location_t loc = gimple_location (stmt);
3217 if (gimple_clobber_p (stmt))
3219 /* Clobber the replacement variable. */
3220 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3221 /* Remove clobbers of fully scalarized variables, they are dead. */
3222 if (acc->grp_covered)
3224 unlink_stmt_vdef (stmt);
3225 gsi_remove (gsi, true);
3226 release_defs (stmt);
3227 return SRA_AM_REMOVED;
3229 else
3230 return SRA_AM_MODIFIED;
3233 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
3235 /* I have never seen this code path trigger but if it can happen the
3236 following should handle it gracefully. */
3237 if (access_has_children_p (acc))
3238 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3239 true, true, loc);
3240 return SRA_AM_MODIFIED;
3243 if (acc->grp_covered)
3245 init_subtree_with_zero (acc, gsi, false, loc);
3246 unlink_stmt_vdef (stmt);
3247 gsi_remove (gsi, true);
3248 release_defs (stmt);
3249 return SRA_AM_REMOVED;
3251 else
3253 init_subtree_with_zero (acc, gsi, true, loc);
3254 return SRA_AM_MODIFIED;
3258 /* Create and return a new suitable default definition SSA_NAME for RACC which
3259 is an access describing an uninitialized part of an aggregate that is being
3260 loaded. */
3262 static tree
3263 get_repl_default_def_ssa_name (struct access *racc)
3265 gcc_checking_assert (!racc->grp_to_be_replaced
3266 && !racc->grp_to_be_debug_replaced);
3267 if (!racc->replacement_decl)
3268 racc->replacement_decl = create_access_replacement (racc);
3269 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3272 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3273 bit-field field declaration somewhere in it. */
3275 static inline bool
3276 contains_vce_or_bfcref_p (const_tree ref)
3278 while (handled_component_p (ref))
3280 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3281 || (TREE_CODE (ref) == COMPONENT_REF
3282 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3283 return true;
3284 ref = TREE_OPERAND (ref, 0);
3287 return false;
3290 /* Examine both sides of the assignment statement pointed to by STMT, replace
3291 them with a scalare replacement if there is one and generate copying of
3292 replacements if scalarized aggregates have been used in the assignment. GSI
3293 is used to hold generated statements for type conversions and subtree
3294 copying. */
3296 static enum assignment_mod_result
3297 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3299 struct access *lacc, *racc;
3300 tree lhs, rhs;
3301 bool modify_this_stmt = false;
3302 bool force_gimple_rhs = false;
3303 location_t loc;
3304 gimple_stmt_iterator orig_gsi = *gsi;
3306 if (!gimple_assign_single_p (stmt))
3307 return SRA_AM_NONE;
3308 lhs = gimple_assign_lhs (stmt);
3309 rhs = gimple_assign_rhs1 (stmt);
3311 if (TREE_CODE (rhs) == CONSTRUCTOR)
3312 return sra_modify_constructor_assign (stmt, gsi);
3314 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3315 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3316 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3318 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3319 gsi, false);
3320 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3321 gsi, true);
3322 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3325 lacc = get_access_for_expr (lhs);
3326 racc = get_access_for_expr (rhs);
3327 if (!lacc && !racc)
3328 return SRA_AM_NONE;
3329 /* Avoid modifying initializations of constant-pool replacements. */
3330 if (racc && (racc->replacement_decl == lhs))
3331 return SRA_AM_NONE;
3333 loc = gimple_location (stmt);
3334 if (lacc && lacc->grp_to_be_replaced)
3336 lhs = get_access_replacement (lacc);
3337 gimple_assign_set_lhs (stmt, lhs);
3338 modify_this_stmt = true;
3339 if (lacc->grp_partial_lhs)
3340 force_gimple_rhs = true;
3341 sra_stats.exprs++;
3344 if (racc && racc->grp_to_be_replaced)
3346 rhs = get_access_replacement (racc);
3347 modify_this_stmt = true;
3348 if (racc->grp_partial_lhs)
3349 force_gimple_rhs = true;
3350 sra_stats.exprs++;
3352 else if (racc
3353 && !racc->grp_unscalarized_data
3354 && !racc->grp_unscalarizable_region
3355 && TREE_CODE (lhs) == SSA_NAME
3356 && !access_has_replacements_p (racc))
3358 rhs = get_repl_default_def_ssa_name (racc);
3359 modify_this_stmt = true;
3360 sra_stats.exprs++;
3363 if (modify_this_stmt)
3365 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3367 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3368 ??? This should move to fold_stmt which we simply should
3369 call after building a VIEW_CONVERT_EXPR here. */
3370 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3371 && !contains_bitfld_component_ref_p (lhs))
3373 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3374 gimple_assign_set_lhs (stmt, lhs);
3376 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3377 && !contains_vce_or_bfcref_p (rhs))
3378 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3380 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3382 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3383 rhs);
3384 if (is_gimple_reg_type (TREE_TYPE (lhs))
3385 && TREE_CODE (lhs) != SSA_NAME)
3386 force_gimple_rhs = true;
3391 if (lacc && lacc->grp_to_be_debug_replaced)
3393 tree dlhs = get_access_replacement (lacc);
3394 tree drhs = unshare_expr (rhs);
3395 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3397 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3398 && !contains_vce_or_bfcref_p (drhs))
3399 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3400 if (drhs
3401 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3402 TREE_TYPE (drhs)))
3403 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3404 TREE_TYPE (dlhs), drhs);
3406 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3407 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3410 /* From this point on, the function deals with assignments in between
3411 aggregates when at least one has scalar reductions of some of its
3412 components. There are three possible scenarios: Both the LHS and RHS have
3413 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3415 In the first case, we would like to load the LHS components from RHS
3416 components whenever possible. If that is not possible, we would like to
3417 read it directly from the RHS (after updating it by storing in it its own
3418 components). If there are some necessary unscalarized data in the LHS,
3419 those will be loaded by the original assignment too. If neither of these
3420 cases happen, the original statement can be removed. Most of this is done
3421 by load_assign_lhs_subreplacements.
3423 In the second case, we would like to store all RHS scalarized components
3424 directly into LHS and if they cover the aggregate completely, remove the
3425 statement too. In the third case, we want the LHS components to be loaded
3426 directly from the RHS (DSE will remove the original statement if it
3427 becomes redundant).
3429 This is a bit complex but manageable when types match and when unions do
3430 not cause confusion in a way that we cannot really load a component of LHS
3431 from the RHS or vice versa (the access representing this level can have
3432 subaccesses that are accessible only through a different union field at a
3433 higher level - different from the one used in the examined expression).
3434 Unions are fun.
3436 Therefore, I specially handle a fourth case, happening when there is a
3437 specific type cast or it is impossible to locate a scalarized subaccess on
3438 the other side of the expression. If that happens, I simply "refresh" the
3439 RHS by storing in it is scalarized components leave the original statement
3440 there to do the copying and then load the scalar replacements of the LHS.
3441 This is what the first branch does. */
3443 if (modify_this_stmt
3444 || gimple_has_volatile_ops (stmt)
3445 || contains_vce_or_bfcref_p (rhs)
3446 || contains_vce_or_bfcref_p (lhs)
3447 || stmt_ends_bb_p (stmt))
3449 /* No need to copy into a constant-pool, it comes pre-initialized. */
3450 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3451 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3452 gsi, false, false, loc);
3453 if (access_has_children_p (lacc))
3455 gimple_stmt_iterator alt_gsi = gsi_none ();
3456 if (stmt_ends_bb_p (stmt))
3458 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3459 gsi = &alt_gsi;
3461 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3462 gsi, true, true, loc);
3464 sra_stats.separate_lhs_rhs_handling++;
3466 /* This gimplification must be done after generate_subtree_copies,
3467 lest we insert the subtree copies in the middle of the gimplified
3468 sequence. */
3469 if (force_gimple_rhs)
3470 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3471 true, GSI_SAME_STMT);
3472 if (gimple_assign_rhs1 (stmt) != rhs)
3474 modify_this_stmt = true;
3475 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3476 gcc_assert (stmt == gsi_stmt (orig_gsi));
3479 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3481 else
3483 if (access_has_children_p (lacc)
3484 && access_has_children_p (racc)
3485 /* When an access represents an unscalarizable region, it usually
3486 represents accesses with variable offset and thus must not be used
3487 to generate new memory accesses. */
3488 && !lacc->grp_unscalarizable_region
3489 && !racc->grp_unscalarizable_region)
3491 struct subreplacement_assignment_data sad;
3493 sad.left_offset = lacc->offset;
3494 sad.assignment_lhs = lhs;
3495 sad.assignment_rhs = rhs;
3496 sad.top_racc = racc;
3497 sad.old_gsi = *gsi;
3498 sad.new_gsi = gsi;
3499 sad.loc = gimple_location (stmt);
3500 sad.refreshed = SRA_UDH_NONE;
3502 if (lacc->grp_read && !lacc->grp_covered)
3503 handle_unscalarized_data_in_subtree (&sad);
3505 load_assign_lhs_subreplacements (lacc, &sad);
3506 if (sad.refreshed != SRA_UDH_RIGHT)
3508 gsi_next (gsi);
3509 unlink_stmt_vdef (stmt);
3510 gsi_remove (&sad.old_gsi, true);
3511 release_defs (stmt);
3512 sra_stats.deleted++;
3513 return SRA_AM_REMOVED;
3516 else
3518 if (access_has_children_p (racc)
3519 && !racc->grp_unscalarized_data
3520 && TREE_CODE (lhs) != SSA_NAME)
3522 if (dump_file)
3524 fprintf (dump_file, "Removing load: ");
3525 print_gimple_stmt (dump_file, stmt, 0, 0);
3527 generate_subtree_copies (racc->first_child, lhs,
3528 racc->offset, 0, 0, gsi,
3529 false, false, loc);
3530 gcc_assert (stmt == gsi_stmt (*gsi));
3531 unlink_stmt_vdef (stmt);
3532 gsi_remove (gsi, true);
3533 release_defs (stmt);
3534 sra_stats.deleted++;
3535 return SRA_AM_REMOVED;
3537 /* Restore the aggregate RHS from its components so the
3538 prevailing aggregate copy does the right thing. */
3539 if (access_has_children_p (racc))
3540 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3541 gsi, false, false, loc);
3542 /* Re-load the components of the aggregate copy destination.
3543 But use the RHS aggregate to load from to expose more
3544 optimization opportunities. */
3545 if (access_has_children_p (lacc))
3546 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3547 0, 0, gsi, true, true, loc);
3550 return SRA_AM_NONE;
3554 /* Set any scalar replacements of values in the constant pool to the initial
3555 value of the constant. (Constant-pool decls like *.LC0 have effectively
3556 been initialized before the program starts, we must do the same for their
3557 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3558 the function's entry block. */
3560 static void
3561 initialize_constant_pool_replacements (void)
3563 gimple_seq seq = NULL;
3564 gimple_stmt_iterator gsi = gsi_start (seq);
3565 bitmap_iterator bi;
3566 unsigned i;
3568 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3570 tree var = candidate (i);
3571 if (!constant_decl_p (var))
3572 continue;
3573 vec<access_p> *access_vec = get_base_access_vector (var);
3574 if (!access_vec)
3575 continue;
3576 for (unsigned i = 0; i < access_vec->length (); i++)
3578 struct access *access = (*access_vec)[i];
3579 if (!access->replacement_decl)
3580 continue;
3581 gassign *stmt
3582 = gimple_build_assign (get_access_replacement (access),
3583 unshare_expr (access->expr));
3584 if (dump_file && (dump_flags & TDF_DETAILS))
3586 fprintf (dump_file, "Generating constant initializer: ");
3587 print_gimple_stmt (dump_file, stmt, 0, 1);
3588 fprintf (dump_file, "\n");
3590 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3591 update_stmt (stmt);
3595 seq = gsi_seq (gsi);
3596 if (seq)
3597 gsi_insert_seq_on_edge_immediate (
3598 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3601 /* Traverse the function body and all modifications as decided in
3602 analyze_all_variable_accesses. Return true iff the CFG has been
3603 changed. */
3605 static bool
3606 sra_modify_function_body (void)
3608 bool cfg_changed = false;
3609 basic_block bb;
3611 initialize_constant_pool_replacements ();
3613 FOR_EACH_BB_FN (bb, cfun)
3615 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3616 while (!gsi_end_p (gsi))
3618 gimple *stmt = gsi_stmt (gsi);
3619 enum assignment_mod_result assign_result;
3620 bool modified = false, deleted = false;
3621 tree *t;
3622 unsigned i;
3624 switch (gimple_code (stmt))
3626 case GIMPLE_RETURN:
3627 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3628 if (*t != NULL_TREE)
3629 modified |= sra_modify_expr (t, &gsi, false);
3630 break;
3632 case GIMPLE_ASSIGN:
3633 assign_result = sra_modify_assign (stmt, &gsi);
3634 modified |= assign_result == SRA_AM_MODIFIED;
3635 deleted = assign_result == SRA_AM_REMOVED;
3636 break;
3638 case GIMPLE_CALL:
3639 /* Operands must be processed before the lhs. */
3640 for (i = 0; i < gimple_call_num_args (stmt); i++)
3642 t = gimple_call_arg_ptr (stmt, i);
3643 modified |= sra_modify_expr (t, &gsi, false);
3646 if (gimple_call_lhs (stmt))
3648 t = gimple_call_lhs_ptr (stmt);
3649 modified |= sra_modify_expr (t, &gsi, true);
3651 break;
3653 case GIMPLE_ASM:
3655 gasm *asm_stmt = as_a <gasm *> (stmt);
3656 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3658 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3659 modified |= sra_modify_expr (t, &gsi, false);
3661 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3663 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3664 modified |= sra_modify_expr (t, &gsi, true);
3667 break;
3669 default:
3670 break;
3673 if (modified)
3675 update_stmt (stmt);
3676 if (maybe_clean_eh_stmt (stmt)
3677 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3678 cfg_changed = true;
3680 if (!deleted)
3681 gsi_next (&gsi);
3685 gsi_commit_edge_inserts ();
3686 return cfg_changed;
3689 /* Generate statements initializing scalar replacements of parts of function
3690 parameters. */
3692 static void
3693 initialize_parameter_reductions (void)
3695 gimple_stmt_iterator gsi;
3696 gimple_seq seq = NULL;
3697 tree parm;
3699 gsi = gsi_start (seq);
3700 for (parm = DECL_ARGUMENTS (current_function_decl);
3701 parm;
3702 parm = DECL_CHAIN (parm))
3704 vec<access_p> *access_vec;
3705 struct access *access;
3707 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3708 continue;
3709 access_vec = get_base_access_vector (parm);
3710 if (!access_vec)
3711 continue;
3713 for (access = (*access_vec)[0];
3714 access;
3715 access = access->next_grp)
3716 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3717 EXPR_LOCATION (parm));
3720 seq = gsi_seq (gsi);
3721 if (seq)
3722 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3725 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3726 it reveals there are components of some aggregates to be scalarized, it runs
3727 the required transformations. */
3728 static unsigned int
3729 perform_intra_sra (void)
3731 int ret = 0;
3732 sra_initialize ();
3734 if (!find_var_candidates ())
3735 goto out;
3737 if (!scan_function ())
3738 goto out;
3740 if (!analyze_all_variable_accesses ())
3741 goto out;
3743 if (sra_modify_function_body ())
3744 ret = TODO_update_ssa | TODO_cleanup_cfg;
3745 else
3746 ret = TODO_update_ssa;
3747 initialize_parameter_reductions ();
3749 statistics_counter_event (cfun, "Scalar replacements created",
3750 sra_stats.replacements);
3751 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3752 statistics_counter_event (cfun, "Subtree copy stmts",
3753 sra_stats.subtree_copies);
3754 statistics_counter_event (cfun, "Subreplacement stmts",
3755 sra_stats.subreplacements);
3756 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3757 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3758 sra_stats.separate_lhs_rhs_handling);
3760 out:
3761 sra_deinitialize ();
3762 return ret;
3765 /* Perform early intraprocedural SRA. */
3766 static unsigned int
3767 early_intra_sra (void)
3769 sra_mode = SRA_MODE_EARLY_INTRA;
3770 return perform_intra_sra ();
3773 /* Perform "late" intraprocedural SRA. */
3774 static unsigned int
3775 late_intra_sra (void)
3777 sra_mode = SRA_MODE_INTRA;
3778 return perform_intra_sra ();
3782 static bool
3783 gate_intra_sra (void)
3785 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3789 namespace {
3791 const pass_data pass_data_sra_early =
3793 GIMPLE_PASS, /* type */
3794 "esra", /* name */
3795 OPTGROUP_NONE, /* optinfo_flags */
3796 TV_TREE_SRA, /* tv_id */
3797 ( PROP_cfg | PROP_ssa ), /* properties_required */
3798 0, /* properties_provided */
3799 0, /* properties_destroyed */
3800 0, /* todo_flags_start */
3801 TODO_update_ssa, /* todo_flags_finish */
3804 class pass_sra_early : public gimple_opt_pass
3806 public:
3807 pass_sra_early (gcc::context *ctxt)
3808 : gimple_opt_pass (pass_data_sra_early, ctxt)
3811 /* opt_pass methods: */
3812 virtual bool gate (function *) { return gate_intra_sra (); }
3813 virtual unsigned int execute (function *) { return early_intra_sra (); }
3815 }; // class pass_sra_early
3817 } // anon namespace
3819 gimple_opt_pass *
3820 make_pass_sra_early (gcc::context *ctxt)
3822 return new pass_sra_early (ctxt);
3825 namespace {
3827 const pass_data pass_data_sra =
3829 GIMPLE_PASS, /* type */
3830 "sra", /* name */
3831 OPTGROUP_NONE, /* optinfo_flags */
3832 TV_TREE_SRA, /* tv_id */
3833 ( PROP_cfg | PROP_ssa ), /* properties_required */
3834 0, /* properties_provided */
3835 0, /* properties_destroyed */
3836 TODO_update_address_taken, /* todo_flags_start */
3837 TODO_update_ssa, /* todo_flags_finish */
3840 class pass_sra : public gimple_opt_pass
3842 public:
3843 pass_sra (gcc::context *ctxt)
3844 : gimple_opt_pass (pass_data_sra, ctxt)
3847 /* opt_pass methods: */
3848 virtual bool gate (function *) { return gate_intra_sra (); }
3849 virtual unsigned int execute (function *) { return late_intra_sra (); }
3851 }; // class pass_sra
3853 } // anon namespace
3855 gimple_opt_pass *
3856 make_pass_sra (gcc::context *ctxt)
3858 return new pass_sra (ctxt);
3862 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3863 parameter. */
3865 static bool
3866 is_unused_scalar_param (tree parm)
3868 tree name;
3869 return (is_gimple_reg (parm)
3870 && (!(name = ssa_default_def (cfun, parm))
3871 || has_zero_uses (name)));
3874 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3875 examine whether there are any direct or otherwise infeasible ones. If so,
3876 return true, otherwise return false. PARM must be a gimple register with a
3877 non-NULL default definition. */
3879 static bool
3880 ptr_parm_has_direct_uses (tree parm)
3882 imm_use_iterator ui;
3883 gimple *stmt;
3884 tree name = ssa_default_def (cfun, parm);
3885 bool ret = false;
3887 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3889 int uses_ok = 0;
3890 use_operand_p use_p;
3892 if (is_gimple_debug (stmt))
3893 continue;
3895 /* Valid uses include dereferences on the lhs and the rhs. */
3896 if (gimple_has_lhs (stmt))
3898 tree lhs = gimple_get_lhs (stmt);
3899 while (handled_component_p (lhs))
3900 lhs = TREE_OPERAND (lhs, 0);
3901 if (TREE_CODE (lhs) == MEM_REF
3902 && TREE_OPERAND (lhs, 0) == name
3903 && integer_zerop (TREE_OPERAND (lhs, 1))
3904 && types_compatible_p (TREE_TYPE (lhs),
3905 TREE_TYPE (TREE_TYPE (name)))
3906 && !TREE_THIS_VOLATILE (lhs))
3907 uses_ok++;
3909 if (gimple_assign_single_p (stmt))
3911 tree rhs = gimple_assign_rhs1 (stmt);
3912 while (handled_component_p (rhs))
3913 rhs = TREE_OPERAND (rhs, 0);
3914 if (TREE_CODE (rhs) == MEM_REF
3915 && TREE_OPERAND (rhs, 0) == name
3916 && integer_zerop (TREE_OPERAND (rhs, 1))
3917 && types_compatible_p (TREE_TYPE (rhs),
3918 TREE_TYPE (TREE_TYPE (name)))
3919 && !TREE_THIS_VOLATILE (rhs))
3920 uses_ok++;
3922 else if (is_gimple_call (stmt))
3924 unsigned i;
3925 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3927 tree arg = gimple_call_arg (stmt, i);
3928 while (handled_component_p (arg))
3929 arg = TREE_OPERAND (arg, 0);
3930 if (TREE_CODE (arg) == MEM_REF
3931 && TREE_OPERAND (arg, 0) == name
3932 && integer_zerop (TREE_OPERAND (arg, 1))
3933 && types_compatible_p (TREE_TYPE (arg),
3934 TREE_TYPE (TREE_TYPE (name)))
3935 && !TREE_THIS_VOLATILE (arg))
3936 uses_ok++;
3940 /* If the number of valid uses does not match the number of
3941 uses in this stmt there is an unhandled use. */
3942 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3943 --uses_ok;
3945 if (uses_ok != 0)
3946 ret = true;
3948 if (ret)
3949 BREAK_FROM_IMM_USE_STMT (ui);
3952 return ret;
3955 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3956 them in candidate_bitmap. Note that these do not necessarily include
3957 parameter which are unused and thus can be removed. Return true iff any
3958 such candidate has been found. */
3960 static bool
3961 find_param_candidates (void)
3963 tree parm;
3964 int count = 0;
3965 bool ret = false;
3966 const char *msg;
3968 for (parm = DECL_ARGUMENTS (current_function_decl);
3969 parm;
3970 parm = DECL_CHAIN (parm))
3972 tree type = TREE_TYPE (parm);
3973 tree_node **slot;
3975 count++;
3977 if (TREE_THIS_VOLATILE (parm)
3978 || TREE_ADDRESSABLE (parm)
3979 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3980 continue;
3982 if (is_unused_scalar_param (parm))
3984 ret = true;
3985 continue;
3988 if (POINTER_TYPE_P (type))
3990 type = TREE_TYPE (type);
3992 if (TREE_CODE (type) == FUNCTION_TYPE
3993 || TYPE_VOLATILE (type)
3994 || (TREE_CODE (type) == ARRAY_TYPE
3995 && TYPE_NONALIASED_COMPONENT (type))
3996 || !is_gimple_reg (parm)
3997 || is_va_list_type (type)
3998 || ptr_parm_has_direct_uses (parm))
3999 continue;
4001 else if (!AGGREGATE_TYPE_P (type))
4002 continue;
4004 if (!COMPLETE_TYPE_P (type)
4005 || !tree_fits_uhwi_p (TYPE_SIZE (type))
4006 || tree_to_uhwi (TYPE_SIZE (type)) == 0
4007 || (AGGREGATE_TYPE_P (type)
4008 && type_internals_preclude_sra_p (type, &msg)))
4009 continue;
4011 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4012 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4013 *slot = parm;
4015 ret = true;
4016 if (dump_file && (dump_flags & TDF_DETAILS))
4018 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4019 print_generic_expr (dump_file, parm, 0);
4020 fprintf (dump_file, "\n");
4024 func_param_count = count;
4025 return ret;
4028 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4029 maybe_modified. */
4031 static bool
4032 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4033 void *data)
4035 struct access *repr = (struct access *) data;
4037 repr->grp_maybe_modified = 1;
4038 return true;
4041 /* Analyze what representatives (in linked lists accessible from
4042 REPRESENTATIVES) can be modified by side effects of statements in the
4043 current function. */
4045 static void
4046 analyze_modified_params (vec<access_p> representatives)
4048 int i;
4050 for (i = 0; i < func_param_count; i++)
4052 struct access *repr;
4054 for (repr = representatives[i];
4055 repr;
4056 repr = repr->next_grp)
4058 struct access *access;
4059 bitmap visited;
4060 ao_ref ar;
4062 if (no_accesses_p (repr))
4063 continue;
4064 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4065 || repr->grp_maybe_modified)
4066 continue;
4068 ao_ref_init (&ar, repr->expr);
4069 visited = BITMAP_ALLOC (NULL);
4070 for (access = repr; access; access = access->next_sibling)
4072 /* All accesses are read ones, otherwise grp_maybe_modified would
4073 be trivially set. */
4074 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4075 mark_maybe_modified, repr, &visited);
4076 if (repr->grp_maybe_modified)
4077 break;
4079 BITMAP_FREE (visited);
4084 /* Propagate distances in bb_dereferences in the opposite direction than the
4085 control flow edges, in each step storing the maximum of the current value
4086 and the minimum of all successors. These steps are repeated until the table
4087 stabilizes. Note that BBs which might terminate the functions (according to
4088 final_bbs bitmap) never updated in this way. */
4090 static void
4091 propagate_dereference_distances (void)
4093 basic_block bb;
4095 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4096 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4097 FOR_EACH_BB_FN (bb, cfun)
4099 queue.quick_push (bb);
4100 bb->aux = bb;
4103 while (!queue.is_empty ())
4105 edge_iterator ei;
4106 edge e;
4107 bool change = false;
4108 int i;
4110 bb = queue.pop ();
4111 bb->aux = NULL;
4113 if (bitmap_bit_p (final_bbs, bb->index))
4114 continue;
4116 for (i = 0; i < func_param_count; i++)
4118 int idx = bb->index * func_param_count + i;
4119 bool first = true;
4120 HOST_WIDE_INT inh = 0;
4122 FOR_EACH_EDGE (e, ei, bb->succs)
4124 int succ_idx = e->dest->index * func_param_count + i;
4126 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4127 continue;
4129 if (first)
4131 first = false;
4132 inh = bb_dereferences [succ_idx];
4134 else if (bb_dereferences [succ_idx] < inh)
4135 inh = bb_dereferences [succ_idx];
4138 if (!first && bb_dereferences[idx] < inh)
4140 bb_dereferences[idx] = inh;
4141 change = true;
4145 if (change && !bitmap_bit_p (final_bbs, bb->index))
4146 FOR_EACH_EDGE (e, ei, bb->preds)
4148 if (e->src->aux)
4149 continue;
4151 e->src->aux = e->src;
4152 queue.quick_push (e->src);
4157 /* Dump a dereferences TABLE with heading STR to file F. */
4159 static void
4160 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4162 basic_block bb;
4164 fprintf (dump_file, "%s", str);
4165 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4166 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4168 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4169 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4171 int i;
4172 for (i = 0; i < func_param_count; i++)
4174 int idx = bb->index * func_param_count + i;
4175 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4178 fprintf (f, "\n");
4180 fprintf (dump_file, "\n");
4183 /* Determine what (parts of) parameters passed by reference that are not
4184 assigned to are not certainly dereferenced in this function and thus the
4185 dereferencing cannot be safely moved to the caller without potentially
4186 introducing a segfault. Mark such REPRESENTATIVES as
4187 grp_not_necessarilly_dereferenced.
4189 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4190 part is calculated rather than simple booleans are calculated for each
4191 pointer parameter to handle cases when only a fraction of the whole
4192 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4193 an example).
4195 The maximum dereference distances for each pointer parameter and BB are
4196 already stored in bb_dereference. This routine simply propagates these
4197 values upwards by propagate_dereference_distances and then compares the
4198 distances of individual parameters in the ENTRY BB to the equivalent
4199 distances of each representative of a (fraction of a) parameter. */
4201 static void
4202 analyze_caller_dereference_legality (vec<access_p> representatives)
4204 int i;
4206 if (dump_file && (dump_flags & TDF_DETAILS))
4207 dump_dereferences_table (dump_file,
4208 "Dereference table before propagation:\n",
4209 bb_dereferences);
4211 propagate_dereference_distances ();
4213 if (dump_file && (dump_flags & TDF_DETAILS))
4214 dump_dereferences_table (dump_file,
4215 "Dereference table after propagation:\n",
4216 bb_dereferences);
4218 for (i = 0; i < func_param_count; i++)
4220 struct access *repr = representatives[i];
4221 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4223 if (!repr || no_accesses_p (repr))
4224 continue;
4228 if ((repr->offset + repr->size) > bb_dereferences[idx])
4229 repr->grp_not_necessarilly_dereferenced = 1;
4230 repr = repr->next_grp;
4232 while (repr);
4236 /* Return the representative access for the parameter declaration PARM if it is
4237 a scalar passed by reference which is not written to and the pointer value
4238 is not used directly. Thus, if it is legal to dereference it in the caller
4239 and we can rule out modifications through aliases, such parameter should be
4240 turned into one passed by value. Return NULL otherwise. */
4242 static struct access *
4243 unmodified_by_ref_scalar_representative (tree parm)
4245 int i, access_count;
4246 struct access *repr;
4247 vec<access_p> *access_vec;
4249 access_vec = get_base_access_vector (parm);
4250 gcc_assert (access_vec);
4251 repr = (*access_vec)[0];
4252 if (repr->write)
4253 return NULL;
4254 repr->group_representative = repr;
4256 access_count = access_vec->length ();
4257 for (i = 1; i < access_count; i++)
4259 struct access *access = (*access_vec)[i];
4260 if (access->write)
4261 return NULL;
4262 access->group_representative = repr;
4263 access->next_sibling = repr->next_sibling;
4264 repr->next_sibling = access;
4267 repr->grp_read = 1;
4268 repr->grp_scalar_ptr = 1;
4269 return repr;
4272 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4273 associated with. REQ_ALIGN is the minimum required alignment. */
4275 static bool
4276 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4278 unsigned int exp_align;
4279 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4280 is incompatible assign in a call statement (and possibly even in asm
4281 statements). This can be relaxed by using a new temporary but only for
4282 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4283 intraprocedural SRA we deal with this by keeping the old aggregate around,
4284 something we cannot do in IPA-SRA.) */
4285 if (access->write
4286 && (is_gimple_call (access->stmt)
4287 || gimple_code (access->stmt) == GIMPLE_ASM))
4288 return true;
4290 exp_align = get_object_alignment (access->expr);
4291 if (exp_align < req_align)
4292 return true;
4294 return false;
4298 /* Sort collected accesses for parameter PARM, identify representatives for
4299 each accessed region and link them together. Return NULL if there are
4300 different but overlapping accesses, return the special ptr value meaning
4301 there are no accesses for this parameter if that is the case and return the
4302 first representative otherwise. Set *RO_GRP if there is a group of accesses
4303 with only read (i.e. no write) accesses. */
4305 static struct access *
4306 splice_param_accesses (tree parm, bool *ro_grp)
4308 int i, j, access_count, group_count;
4309 int agg_size, total_size = 0;
4310 struct access *access, *res, **prev_acc_ptr = &res;
4311 vec<access_p> *access_vec;
4313 access_vec = get_base_access_vector (parm);
4314 if (!access_vec)
4315 return &no_accesses_representant;
4316 access_count = access_vec->length ();
4318 access_vec->qsort (compare_access_positions);
4320 i = 0;
4321 total_size = 0;
4322 group_count = 0;
4323 while (i < access_count)
4325 bool modification;
4326 tree a1_alias_type;
4327 access = (*access_vec)[i];
4328 modification = access->write;
4329 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4330 return NULL;
4331 a1_alias_type = reference_alias_ptr_type (access->expr);
4333 /* Access is about to become group representative unless we find some
4334 nasty overlap which would preclude us from breaking this parameter
4335 apart. */
4337 j = i + 1;
4338 while (j < access_count)
4340 struct access *ac2 = (*access_vec)[j];
4341 if (ac2->offset != access->offset)
4343 /* All or nothing law for parameters. */
4344 if (access->offset + access->size > ac2->offset)
4345 return NULL;
4346 else
4347 break;
4349 else if (ac2->size != access->size)
4350 return NULL;
4352 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4353 || (ac2->type != access->type
4354 && (TREE_ADDRESSABLE (ac2->type)
4355 || TREE_ADDRESSABLE (access->type)))
4356 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4357 return NULL;
4359 modification |= ac2->write;
4360 ac2->group_representative = access;
4361 ac2->next_sibling = access->next_sibling;
4362 access->next_sibling = ac2;
4363 j++;
4366 group_count++;
4367 access->grp_maybe_modified = modification;
4368 if (!modification)
4369 *ro_grp = true;
4370 *prev_acc_ptr = access;
4371 prev_acc_ptr = &access->next_grp;
4372 total_size += access->size;
4373 i = j;
4376 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4377 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4378 else
4379 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4380 if (total_size >= agg_size)
4381 return NULL;
4383 gcc_assert (group_count > 0);
4384 return res;
4387 /* Decide whether parameters with representative accesses given by REPR should
4388 be reduced into components. */
4390 static int
4391 decide_one_param_reduction (struct access *repr)
4393 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4394 bool by_ref;
4395 tree parm;
4397 parm = repr->base;
4398 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4399 gcc_assert (cur_parm_size > 0);
4401 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4403 by_ref = true;
4404 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4406 else
4408 by_ref = false;
4409 agg_size = cur_parm_size;
4412 if (dump_file)
4414 struct access *acc;
4415 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4416 print_generic_expr (dump_file, parm, 0);
4417 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4418 for (acc = repr; acc; acc = acc->next_grp)
4419 dump_access (dump_file, acc, true);
4422 total_size = 0;
4423 new_param_count = 0;
4425 for (; repr; repr = repr->next_grp)
4427 gcc_assert (parm == repr->base);
4429 /* Taking the address of a non-addressable field is verboten. */
4430 if (by_ref && repr->non_addressable)
4431 return 0;
4433 /* Do not decompose a non-BLKmode param in a way that would
4434 create BLKmode params. Especially for by-reference passing
4435 (thus, pointer-type param) this is hardly worthwhile. */
4436 if (DECL_MODE (parm) != BLKmode
4437 && TYPE_MODE (repr->type) == BLKmode)
4438 return 0;
4440 if (!by_ref || (!repr->grp_maybe_modified
4441 && !repr->grp_not_necessarilly_dereferenced))
4442 total_size += repr->size;
4443 else
4444 total_size += cur_parm_size;
4446 new_param_count++;
4449 gcc_assert (new_param_count > 0);
4451 if (optimize_function_for_size_p (cfun))
4452 parm_size_limit = cur_parm_size;
4453 else
4454 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4455 * cur_parm_size);
4457 if (total_size < agg_size
4458 && total_size <= parm_size_limit)
4460 if (dump_file)
4461 fprintf (dump_file, " ....will be split into %i components\n",
4462 new_param_count);
4463 return new_param_count;
4465 else
4466 return 0;
4469 /* The order of the following enums is important, we need to do extra work for
4470 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4471 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4472 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4474 /* Identify representatives of all accesses to all candidate parameters for
4475 IPA-SRA. Return result based on what representatives have been found. */
4477 static enum ipa_splicing_result
4478 splice_all_param_accesses (vec<access_p> &representatives)
4480 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4481 tree parm;
4482 struct access *repr;
4484 representatives.create (func_param_count);
4486 for (parm = DECL_ARGUMENTS (current_function_decl);
4487 parm;
4488 parm = DECL_CHAIN (parm))
4490 if (is_unused_scalar_param (parm))
4492 representatives.quick_push (&no_accesses_representant);
4493 if (result == NO_GOOD_ACCESS)
4494 result = UNUSED_PARAMS;
4496 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4497 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4498 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4500 repr = unmodified_by_ref_scalar_representative (parm);
4501 representatives.quick_push (repr);
4502 if (repr)
4503 result = UNMODIF_BY_REF_ACCESSES;
4505 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4507 bool ro_grp = false;
4508 repr = splice_param_accesses (parm, &ro_grp);
4509 representatives.quick_push (repr);
4511 if (repr && !no_accesses_p (repr))
4513 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4515 if (ro_grp)
4516 result = UNMODIF_BY_REF_ACCESSES;
4517 else if (result < MODIF_BY_REF_ACCESSES)
4518 result = MODIF_BY_REF_ACCESSES;
4520 else if (result < BY_VAL_ACCESSES)
4521 result = BY_VAL_ACCESSES;
4523 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4524 result = UNUSED_PARAMS;
4526 else
4527 representatives.quick_push (NULL);
4530 if (result == NO_GOOD_ACCESS)
4532 representatives.release ();
4533 return NO_GOOD_ACCESS;
4536 return result;
4539 /* Return the index of BASE in PARMS. Abort if it is not found. */
4541 static inline int
4542 get_param_index (tree base, vec<tree> parms)
4544 int i, len;
4546 len = parms.length ();
4547 for (i = 0; i < len; i++)
4548 if (parms[i] == base)
4549 return i;
4550 gcc_unreachable ();
4553 /* Convert the decisions made at the representative level into compact
4554 parameter adjustments. REPRESENTATIVES are pointers to first
4555 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4556 final number of adjustments. */
4558 static ipa_parm_adjustment_vec
4559 turn_representatives_into_adjustments (vec<access_p> representatives,
4560 int adjustments_count)
4562 vec<tree> parms;
4563 ipa_parm_adjustment_vec adjustments;
4564 tree parm;
4565 int i;
4567 gcc_assert (adjustments_count > 0);
4568 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4569 adjustments.create (adjustments_count);
4570 parm = DECL_ARGUMENTS (current_function_decl);
4571 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4573 struct access *repr = representatives[i];
4575 if (!repr || no_accesses_p (repr))
4577 struct ipa_parm_adjustment adj;
4579 memset (&adj, 0, sizeof (adj));
4580 adj.base_index = get_param_index (parm, parms);
4581 adj.base = parm;
4582 if (!repr)
4583 adj.op = IPA_PARM_OP_COPY;
4584 else
4585 adj.op = IPA_PARM_OP_REMOVE;
4586 adj.arg_prefix = "ISRA";
4587 adjustments.quick_push (adj);
4589 else
4591 struct ipa_parm_adjustment adj;
4592 int index = get_param_index (parm, parms);
4594 for (; repr; repr = repr->next_grp)
4596 memset (&adj, 0, sizeof (adj));
4597 gcc_assert (repr->base == parm);
4598 adj.base_index = index;
4599 adj.base = repr->base;
4600 adj.type = repr->type;
4601 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4602 adj.offset = repr->offset;
4603 adj.reverse = repr->reverse;
4604 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4605 && (repr->grp_maybe_modified
4606 || repr->grp_not_necessarilly_dereferenced));
4607 adj.arg_prefix = "ISRA";
4608 adjustments.quick_push (adj);
4612 parms.release ();
4613 return adjustments;
4616 /* Analyze the collected accesses and produce a plan what to do with the
4617 parameters in the form of adjustments, NULL meaning nothing. */
4619 static ipa_parm_adjustment_vec
4620 analyze_all_param_acesses (void)
4622 enum ipa_splicing_result repr_state;
4623 bool proceed = false;
4624 int i, adjustments_count = 0;
4625 vec<access_p> representatives;
4626 ipa_parm_adjustment_vec adjustments;
4628 repr_state = splice_all_param_accesses (representatives);
4629 if (repr_state == NO_GOOD_ACCESS)
4630 return ipa_parm_adjustment_vec ();
4632 /* If there are any parameters passed by reference which are not modified
4633 directly, we need to check whether they can be modified indirectly. */
4634 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4636 analyze_caller_dereference_legality (representatives);
4637 analyze_modified_params (representatives);
4640 for (i = 0; i < func_param_count; i++)
4642 struct access *repr = representatives[i];
4644 if (repr && !no_accesses_p (repr))
4646 if (repr->grp_scalar_ptr)
4648 adjustments_count++;
4649 if (repr->grp_not_necessarilly_dereferenced
4650 || repr->grp_maybe_modified)
4651 representatives[i] = NULL;
4652 else
4654 proceed = true;
4655 sra_stats.scalar_by_ref_to_by_val++;
4658 else
4660 int new_components = decide_one_param_reduction (repr);
4662 if (new_components == 0)
4664 representatives[i] = NULL;
4665 adjustments_count++;
4667 else
4669 adjustments_count += new_components;
4670 sra_stats.aggregate_params_reduced++;
4671 sra_stats.param_reductions_created += new_components;
4672 proceed = true;
4676 else
4678 if (no_accesses_p (repr))
4680 proceed = true;
4681 sra_stats.deleted_unused_parameters++;
4683 adjustments_count++;
4687 if (!proceed && dump_file)
4688 fprintf (dump_file, "NOT proceeding to change params.\n");
4690 if (proceed)
4691 adjustments = turn_representatives_into_adjustments (representatives,
4692 adjustments_count);
4693 else
4694 adjustments = ipa_parm_adjustment_vec ();
4696 representatives.release ();
4697 return adjustments;
4700 /* If a parameter replacement identified by ADJ does not yet exist in the form
4701 of declaration, create it and record it, otherwise return the previously
4702 created one. */
4704 static tree
4705 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4707 tree repl;
4708 if (!adj->new_ssa_base)
4710 char *pretty_name = make_fancy_name (adj->base);
4712 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4713 DECL_NAME (repl) = get_identifier (pretty_name);
4714 DECL_NAMELESS (repl) = 1;
4715 obstack_free (&name_obstack, pretty_name);
4717 adj->new_ssa_base = repl;
4719 else
4720 repl = adj->new_ssa_base;
4721 return repl;
4724 /* Find the first adjustment for a particular parameter BASE in a vector of
4725 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4726 adjustment. */
4728 static struct ipa_parm_adjustment *
4729 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4731 int i, len;
4733 len = adjustments.length ();
4734 for (i = 0; i < len; i++)
4736 struct ipa_parm_adjustment *adj;
4738 adj = &adjustments[i];
4739 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4740 return adj;
4743 return NULL;
4746 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4747 parameter which is to be removed because its value is not used, create a new
4748 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4749 original with it and return it. If there is no need to re-map, return NULL.
4750 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4752 static tree
4753 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4754 ipa_parm_adjustment_vec adjustments)
4756 struct ipa_parm_adjustment *adj;
4757 tree decl, repl, new_name;
4759 if (TREE_CODE (old_name) != SSA_NAME)
4760 return NULL;
4762 decl = SSA_NAME_VAR (old_name);
4763 if (decl == NULL_TREE
4764 || TREE_CODE (decl) != PARM_DECL)
4765 return NULL;
4767 adj = get_adjustment_for_base (adjustments, decl);
4768 if (!adj)
4769 return NULL;
4771 repl = get_replaced_param_substitute (adj);
4772 new_name = make_ssa_name (repl, stmt);
4773 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4774 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
4776 if (dump_file)
4778 fprintf (dump_file, "replacing an SSA name of a removed param ");
4779 print_generic_expr (dump_file, old_name, 0);
4780 fprintf (dump_file, " with ");
4781 print_generic_expr (dump_file, new_name, 0);
4782 fprintf (dump_file, "\n");
4785 replace_uses_by (old_name, new_name);
4786 return new_name;
4789 /* If the statement STMT contains any expressions that need to replaced with a
4790 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4791 incompatibilities (GSI is used to accommodate conversion statements and must
4792 point to the statement). Return true iff the statement was modified. */
4794 static bool
4795 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4796 ipa_parm_adjustment_vec adjustments)
4798 tree *lhs_p, *rhs_p;
4799 bool any;
4801 if (!gimple_assign_single_p (stmt))
4802 return false;
4804 rhs_p = gimple_assign_rhs1_ptr (stmt);
4805 lhs_p = gimple_assign_lhs_ptr (stmt);
4807 any = ipa_modify_expr (rhs_p, false, adjustments);
4808 any |= ipa_modify_expr (lhs_p, false, adjustments);
4809 if (any)
4811 tree new_rhs = NULL_TREE;
4813 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4815 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4817 /* V_C_Es of constructors can cause trouble (PR 42714). */
4818 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4819 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4820 else
4821 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4822 NULL);
4824 else
4825 new_rhs = fold_build1_loc (gimple_location (stmt),
4826 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4827 *rhs_p);
4829 else if (REFERENCE_CLASS_P (*rhs_p)
4830 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4831 && !is_gimple_reg (*lhs_p))
4832 /* This can happen when an assignment in between two single field
4833 structures is turned into an assignment in between two pointers to
4834 scalars (PR 42237). */
4835 new_rhs = *rhs_p;
4837 if (new_rhs)
4839 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4840 true, GSI_SAME_STMT);
4842 gimple_assign_set_rhs_from_tree (gsi, tmp);
4845 return true;
4848 return false;
4851 /* Traverse the function body and all modifications as described in
4852 ADJUSTMENTS. Return true iff the CFG has been changed. */
4854 bool
4855 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4857 bool cfg_changed = false;
4858 basic_block bb;
4860 FOR_EACH_BB_FN (bb, cfun)
4862 gimple_stmt_iterator gsi;
4864 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4866 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
4867 tree new_lhs, old_lhs = gimple_phi_result (phi);
4868 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
4869 if (new_lhs)
4871 gimple_phi_set_result (phi, new_lhs);
4872 release_ssa_name (old_lhs);
4876 gsi = gsi_start_bb (bb);
4877 while (!gsi_end_p (gsi))
4879 gimple *stmt = gsi_stmt (gsi);
4880 bool modified = false;
4881 tree *t;
4882 unsigned i;
4884 switch (gimple_code (stmt))
4886 case GIMPLE_RETURN:
4887 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4888 if (*t != NULL_TREE)
4889 modified |= ipa_modify_expr (t, true, adjustments);
4890 break;
4892 case GIMPLE_ASSIGN:
4893 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4894 break;
4896 case GIMPLE_CALL:
4897 /* Operands must be processed before the lhs. */
4898 for (i = 0; i < gimple_call_num_args (stmt); i++)
4900 t = gimple_call_arg_ptr (stmt, i);
4901 modified |= ipa_modify_expr (t, true, adjustments);
4904 if (gimple_call_lhs (stmt))
4906 t = gimple_call_lhs_ptr (stmt);
4907 modified |= ipa_modify_expr (t, false, adjustments);
4909 break;
4911 case GIMPLE_ASM:
4913 gasm *asm_stmt = as_a <gasm *> (stmt);
4914 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4916 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4917 modified |= ipa_modify_expr (t, true, adjustments);
4919 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4921 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4922 modified |= ipa_modify_expr (t, false, adjustments);
4925 break;
4927 default:
4928 break;
4931 def_operand_p defp;
4932 ssa_op_iter iter;
4933 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4935 tree old_def = DEF_FROM_PTR (defp);
4936 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
4937 adjustments))
4939 SET_DEF (defp, new_def);
4940 release_ssa_name (old_def);
4941 modified = true;
4945 if (modified)
4947 update_stmt (stmt);
4948 if (maybe_clean_eh_stmt (stmt)
4949 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4950 cfg_changed = true;
4952 gsi_next (&gsi);
4956 return cfg_changed;
4959 /* Call gimple_debug_bind_reset_value on all debug statements describing
4960 gimple register parameters that are being removed or replaced. */
4962 static void
4963 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4965 int i, len;
4966 gimple_stmt_iterator *gsip = NULL, gsi;
4968 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4970 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4971 gsip = &gsi;
4973 len = adjustments.length ();
4974 for (i = 0; i < len; i++)
4976 struct ipa_parm_adjustment *adj;
4977 imm_use_iterator ui;
4978 gimple *stmt;
4979 gdebug *def_temp;
4980 tree name, vexpr, copy = NULL_TREE;
4981 use_operand_p use_p;
4983 adj = &adjustments[i];
4984 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4985 continue;
4986 name = ssa_default_def (cfun, adj->base);
4987 vexpr = NULL;
4988 if (name)
4989 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4991 if (gimple_clobber_p (stmt))
4993 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4994 unlink_stmt_vdef (stmt);
4995 gsi_remove (&cgsi, true);
4996 release_defs (stmt);
4997 continue;
4999 /* All other users must have been removed by
5000 ipa_sra_modify_function_body. */
5001 gcc_assert (is_gimple_debug (stmt));
5002 if (vexpr == NULL && gsip != NULL)
5004 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5005 vexpr = make_node (DEBUG_EXPR_DECL);
5006 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
5007 NULL);
5008 DECL_ARTIFICIAL (vexpr) = 1;
5009 TREE_TYPE (vexpr) = TREE_TYPE (name);
5010 SET_DECL_MODE (vexpr, DECL_MODE (adj->base));
5011 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5013 if (vexpr)
5015 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5016 SET_USE (use_p, vexpr);
5018 else
5019 gimple_debug_bind_reset_value (stmt);
5020 update_stmt (stmt);
5022 /* Create a VAR_DECL for debug info purposes. */
5023 if (!DECL_IGNORED_P (adj->base))
5025 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5026 VAR_DECL, DECL_NAME (adj->base),
5027 TREE_TYPE (adj->base));
5028 if (DECL_PT_UID_SET_P (adj->base))
5029 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5030 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5031 TREE_READONLY (copy) = TREE_READONLY (adj->base);
5032 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5033 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5034 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5035 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5036 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5037 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5038 SET_DECL_RTL (copy, 0);
5039 TREE_USED (copy) = 1;
5040 DECL_CONTEXT (copy) = current_function_decl;
5041 add_local_decl (cfun, copy);
5042 DECL_CHAIN (copy) =
5043 BLOCK_VARS (DECL_INITIAL (current_function_decl));
5044 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5046 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5048 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5049 if (vexpr)
5050 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5051 else
5052 def_temp = gimple_build_debug_source_bind (copy, adj->base,
5053 NULL);
5054 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5059 /* Return false if all callers have at least as many actual arguments as there
5060 are formal parameters in the current function and that their types
5061 match. */
5063 static bool
5064 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5065 void *data ATTRIBUTE_UNUSED)
5067 struct cgraph_edge *cs;
5068 for (cs = node->callers; cs; cs = cs->next_caller)
5069 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
5070 return true;
5072 return false;
5075 /* Return false if all callers have vuse attached to a call statement. */
5077 static bool
5078 some_callers_have_no_vuse_p (struct cgraph_node *node,
5079 void *data ATTRIBUTE_UNUSED)
5081 struct cgraph_edge *cs;
5082 for (cs = node->callers; cs; cs = cs->next_caller)
5083 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5084 return true;
5086 return false;
5089 /* Convert all callers of NODE. */
5091 static bool
5092 convert_callers_for_node (struct cgraph_node *node,
5093 void *data)
5095 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
5096 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
5097 struct cgraph_edge *cs;
5099 for (cs = node->callers; cs; cs = cs->next_caller)
5101 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
5103 if (dump_file)
5104 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
5105 xstrdup_for_dump (cs->caller->name ()),
5106 cs->caller->order,
5107 xstrdup_for_dump (cs->callee->name ()),
5108 cs->callee->order);
5110 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5112 pop_cfun ();
5115 for (cs = node->callers; cs; cs = cs->next_caller)
5116 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
5117 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5118 compute_inline_parameters (cs->caller, true);
5119 BITMAP_FREE (recomputed_callers);
5121 return true;
5124 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5126 static void
5127 convert_callers (struct cgraph_node *node, tree old_decl,
5128 ipa_parm_adjustment_vec adjustments)
5130 basic_block this_block;
5132 node->call_for_symbol_and_aliases (convert_callers_for_node,
5133 &adjustments, false);
5135 if (!encountered_recursive_call)
5136 return;
5138 FOR_EACH_BB_FN (this_block, cfun)
5140 gimple_stmt_iterator gsi;
5142 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5144 gcall *stmt;
5145 tree call_fndecl;
5146 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5147 if (!stmt)
5148 continue;
5149 call_fndecl = gimple_call_fndecl (stmt);
5150 if (call_fndecl == old_decl)
5152 if (dump_file)
5153 fprintf (dump_file, "Adjusting recursive call");
5154 gimple_call_set_fndecl (stmt, node->decl);
5155 ipa_modify_call_arguments (NULL, stmt, adjustments);
5160 return;
5163 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5164 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5166 static bool
5167 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5169 struct cgraph_node *new_node;
5170 bool cfg_changed;
5172 cgraph_edge::rebuild_edges ();
5173 free_dominance_info (CDI_DOMINATORS);
5174 pop_cfun ();
5176 /* This must be done after rebuilding cgraph edges for node above.
5177 Otherwise any recursive calls to node that are recorded in
5178 redirect_callers will be corrupted. */
5179 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5180 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5181 NULL, false, NULL, NULL,
5182 "isra");
5183 redirect_callers.release ();
5185 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5186 ipa_modify_formal_parameters (current_function_decl, adjustments);
5187 cfg_changed = ipa_sra_modify_function_body (adjustments);
5188 sra_ipa_reset_debug_stmts (adjustments);
5189 convert_callers (new_node, node->decl, adjustments);
5190 new_node->make_local ();
5191 return cfg_changed;
5194 /* Means of communication between ipa_sra_check_caller and
5195 ipa_sra_preliminary_function_checks. */
5197 struct ipa_sra_check_caller_data
5199 bool has_callers;
5200 bool bad_arg_alignment;
5201 bool has_thunk;
5204 /* If NODE has a caller, mark that fact in DATA which is pointer to
5205 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5206 calls if they are unit aligned and if not, set the appropriate flag in DATA
5207 too. */
5209 static bool
5210 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5212 if (!node->callers)
5213 return false;
5215 struct ipa_sra_check_caller_data *iscc;
5216 iscc = (struct ipa_sra_check_caller_data *) data;
5217 iscc->has_callers = true;
5219 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5221 if (cs->caller->thunk.thunk_p)
5223 iscc->has_thunk = true;
5224 return true;
5226 gimple *call_stmt = cs->call_stmt;
5227 unsigned count = gimple_call_num_args (call_stmt);
5228 for (unsigned i = 0; i < count; i++)
5230 tree arg = gimple_call_arg (call_stmt, i);
5231 if (is_gimple_reg (arg))
5232 continue;
5234 tree offset;
5235 HOST_WIDE_INT bitsize, bitpos;
5236 machine_mode mode;
5237 int unsignedp, reversep, volatilep = 0;
5238 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5239 &unsignedp, &reversep, &volatilep);
5240 if (bitpos % BITS_PER_UNIT)
5242 iscc->bad_arg_alignment = true;
5243 return true;
5248 return false;
5251 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5252 attributes, return true otherwise. NODE is the cgraph node of the current
5253 function. */
5255 static bool
5256 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5258 if (!node->can_be_local_p ())
5260 if (dump_file)
5261 fprintf (dump_file, "Function not local to this compilation unit.\n");
5262 return false;
5265 if (!node->local.can_change_signature)
5267 if (dump_file)
5268 fprintf (dump_file, "Function can not change signature.\n");
5269 return false;
5272 if (!tree_versionable_function_p (node->decl))
5274 if (dump_file)
5275 fprintf (dump_file, "Function is not versionable.\n");
5276 return false;
5279 if (!opt_for_fn (node->decl, optimize)
5280 || !opt_for_fn (node->decl, flag_ipa_sra))
5282 if (dump_file)
5283 fprintf (dump_file, "Function not optimized.\n");
5284 return false;
5287 if (DECL_VIRTUAL_P (current_function_decl))
5289 if (dump_file)
5290 fprintf (dump_file, "Function is a virtual method.\n");
5291 return false;
5294 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5295 && inline_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5297 if (dump_file)
5298 fprintf (dump_file, "Function too big to be made truly local.\n");
5299 return false;
5302 if (cfun->stdarg)
5304 if (dump_file)
5305 fprintf (dump_file, "Function uses stdarg. \n");
5306 return false;
5309 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5310 return false;
5312 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5314 if (dump_file)
5315 fprintf (dump_file, "Always inline function will be inlined "
5316 "anyway. \n");
5317 return false;
5320 struct ipa_sra_check_caller_data iscc;
5321 memset (&iscc, 0, sizeof(iscc));
5322 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5323 if (!iscc.has_callers)
5325 if (dump_file)
5326 fprintf (dump_file,
5327 "Function has no callers in this compilation unit.\n");
5328 return false;
5331 if (iscc.bad_arg_alignment)
5333 if (dump_file)
5334 fprintf (dump_file,
5335 "A function call has an argument with non-unit alignment.\n");
5336 return false;
5339 if (iscc.has_thunk)
5341 if (dump_file)
5342 fprintf (dump_file,
5343 "A has thunk.\n");
5344 return false;
5347 return true;
5350 /* Perform early interprocedural SRA. */
5352 static unsigned int
5353 ipa_early_sra (void)
5355 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5356 ipa_parm_adjustment_vec adjustments;
5357 int ret = 0;
5359 if (!ipa_sra_preliminary_function_checks (node))
5360 return 0;
5362 sra_initialize ();
5363 sra_mode = SRA_MODE_EARLY_IPA;
5365 if (!find_param_candidates ())
5367 if (dump_file)
5368 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5369 goto simple_out;
5372 if (node->call_for_symbol_and_aliases
5373 (some_callers_have_mismatched_arguments_p, NULL, true))
5375 if (dump_file)
5376 fprintf (dump_file, "There are callers with insufficient number of "
5377 "arguments or arguments with type mismatches.\n");
5378 goto simple_out;
5381 if (node->call_for_symbol_and_aliases
5382 (some_callers_have_no_vuse_p, NULL, true))
5384 if (dump_file)
5385 fprintf (dump_file, "There are callers with no VUSE attached "
5386 "to a call stmt.\n");
5387 goto simple_out;
5390 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5391 func_param_count
5392 * last_basic_block_for_fn (cfun));
5393 final_bbs = BITMAP_ALLOC (NULL);
5395 scan_function ();
5396 if (encountered_apply_args)
5398 if (dump_file)
5399 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5400 goto out;
5403 if (encountered_unchangable_recursive_call)
5405 if (dump_file)
5406 fprintf (dump_file, "Function calls itself with insufficient "
5407 "number of arguments.\n");
5408 goto out;
5411 adjustments = analyze_all_param_acesses ();
5412 if (!adjustments.exists ())
5413 goto out;
5414 if (dump_file)
5415 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5417 if (modify_function (node, adjustments))
5418 ret = TODO_update_ssa | TODO_cleanup_cfg;
5419 else
5420 ret = TODO_update_ssa;
5421 adjustments.release ();
5423 statistics_counter_event (cfun, "Unused parameters deleted",
5424 sra_stats.deleted_unused_parameters);
5425 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5426 sra_stats.scalar_by_ref_to_by_val);
5427 statistics_counter_event (cfun, "Aggregate parameters broken up",
5428 sra_stats.aggregate_params_reduced);
5429 statistics_counter_event (cfun, "Aggregate parameter components created",
5430 sra_stats.param_reductions_created);
5432 out:
5433 BITMAP_FREE (final_bbs);
5434 free (bb_dereferences);
5435 simple_out:
5436 sra_deinitialize ();
5437 return ret;
5440 namespace {
5442 const pass_data pass_data_early_ipa_sra =
5444 GIMPLE_PASS, /* type */
5445 "eipa_sra", /* name */
5446 OPTGROUP_NONE, /* optinfo_flags */
5447 TV_IPA_SRA, /* tv_id */
5448 0, /* properties_required */
5449 0, /* properties_provided */
5450 0, /* properties_destroyed */
5451 0, /* todo_flags_start */
5452 TODO_dump_symtab, /* todo_flags_finish */
5455 class pass_early_ipa_sra : public gimple_opt_pass
5457 public:
5458 pass_early_ipa_sra (gcc::context *ctxt)
5459 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5462 /* opt_pass methods: */
5463 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5464 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5466 }; // class pass_early_ipa_sra
5468 } // anon namespace
5470 gimple_opt_pass *
5471 make_pass_early_ipa_sra (gcc::context *ctxt)
5473 return new pass_early_ipa_sra (ctxt);