* c-c++-common/ubsan/float-cast-overflow-6.c: Add i?86-*-* target.
[official-gcc.git] / gcc / tree-sra.c
blobb723ca52b668bf90d352f0d322771e1e5ab55ed9
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2014 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "hash-map.h"
78 #include "hash-table.h"
79 #include "alloc-pool.h"
80 #include "tm.h"
81 #include "tree.h"
82 #include "predict.h"
83 #include "vec.h"
84 #include "hashtab.h"
85 #include "hash-set.h"
86 #include "machmode.h"
87 #include "hard-reg-set.h"
88 #include "input.h"
89 #include "function.h"
90 #include "dominance.h"
91 #include "cfg.h"
92 #include "basic-block.h"
93 #include "tree-ssa-alias.h"
94 #include "internal-fn.h"
95 #include "tree-eh.h"
96 #include "gimple-expr.h"
97 #include "is-a.h"
98 #include "gimple.h"
99 #include "stor-layout.h"
100 #include "gimplify.h"
101 #include "gimple-iterator.h"
102 #include "gimplify-me.h"
103 #include "gimple-walk.h"
104 #include "bitmap.h"
105 #include "gimple-ssa.h"
106 #include "tree-cfg.h"
107 #include "tree-phinodes.h"
108 #include "ssa-iterators.h"
109 #include "stringpool.h"
110 #include "tree-ssanames.h"
111 #include "expr.h"
112 #include "tree-dfa.h"
113 #include "tree-ssa.h"
114 #include "tree-pass.h"
115 #include "plugin-api.h"
116 #include "ipa-ref.h"
117 #include "cgraph.h"
118 #include "ipa-prop.h"
119 #include "statistics.h"
120 #include "params.h"
121 #include "target.h"
122 #include "flags.h"
123 #include "dbgcnt.h"
124 #include "tree-inline.h"
125 #include "gimple-pretty-print.h"
126 #include "ipa-inline.h"
127 #include "ipa-utils.h"
128 #include "builtins.h"
130 /* Enumeration of all aggregate reductions we can do. */
131 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
132 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
133 SRA_MODE_INTRA }; /* late intraprocedural SRA */
135 /* Global variable describing which aggregate reduction we are performing at
136 the moment. */
137 static enum sra_mode sra_mode;
139 struct assign_link;
141 /* ACCESS represents each access to an aggregate variable (as a whole or a
142 part). It can also represent a group of accesses that refer to exactly the
143 same fragment of an aggregate (i.e. those that have exactly the same offset
144 and size). Such representatives for a single aggregate, once determined,
145 are linked in a linked list and have the group fields set.
147 Moreover, when doing intraprocedural SRA, a tree is built from those
148 representatives (by the means of first_child and next_sibling pointers), in
149 which all items in a subtree are "within" the root, i.e. their offset is
150 greater or equal to offset of the root and offset+size is smaller or equal
151 to offset+size of the root. Children of an access are sorted by offset.
153 Note that accesses to parts of vector and complex number types always
154 represented by an access to the whole complex number or a vector. It is a
155 duty of the modifying functions to replace them appropriately. */
157 struct access
159 /* Values returned by `get_ref_base_and_extent' for each component reference
160 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
161 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
162 HOST_WIDE_INT offset;
163 HOST_WIDE_INT size;
164 tree base;
166 /* Expression. It is context dependent so do not use it to create new
167 expressions to access the original aggregate. See PR 42154 for a
168 testcase. */
169 tree expr;
170 /* Type. */
171 tree type;
173 /* The statement this access belongs to. */
174 gimple stmt;
176 /* Next group representative for this aggregate. */
177 struct access *next_grp;
179 /* Pointer to the group representative. Pointer to itself if the struct is
180 the representative. */
181 struct access *group_representative;
183 /* If this access has any children (in terms of the definition above), this
184 points to the first one. */
185 struct access *first_child;
187 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
188 described above. In IPA-SRA this is a pointer to the next access
189 belonging to the same group (having the same representative). */
190 struct access *next_sibling;
192 /* Pointers to the first and last element in the linked list of assign
193 links. */
194 struct assign_link *first_link, *last_link;
196 /* Pointer to the next access in the work queue. */
197 struct access *next_queued;
199 /* Replacement variable for this access "region." Never to be accessed
200 directly, always only by the means of get_access_replacement() and only
201 when grp_to_be_replaced flag is set. */
202 tree replacement_decl;
204 /* Is this particular access write access? */
205 unsigned write : 1;
207 /* Is this access an access to a non-addressable field? */
208 unsigned non_addressable : 1;
210 /* Is this access currently in the work queue? */
211 unsigned grp_queued : 1;
213 /* Does this group contain a write access? This flag is propagated down the
214 access tree. */
215 unsigned grp_write : 1;
217 /* Does this group contain a read access? This flag is propagated down the
218 access tree. */
219 unsigned grp_read : 1;
221 /* Does this group contain a read access that comes from an assignment
222 statement? This flag is propagated down the access tree. */
223 unsigned grp_assignment_read : 1;
225 /* Does this group contain a write access that comes from an assignment
226 statement? This flag is propagated down the access tree. */
227 unsigned grp_assignment_write : 1;
229 /* Does this group contain a read access through a scalar type? This flag is
230 not propagated in the access tree in any direction. */
231 unsigned grp_scalar_read : 1;
233 /* Does this group contain a write access through a scalar type? This flag
234 is not propagated in the access tree in any direction. */
235 unsigned grp_scalar_write : 1;
237 /* Is this access an artificial one created to scalarize some record
238 entirely? */
239 unsigned grp_total_scalarization : 1;
241 /* Other passes of the analysis use this bit to make function
242 analyze_access_subtree create scalar replacements for this group if
243 possible. */
244 unsigned grp_hint : 1;
246 /* Is the subtree rooted in this access fully covered by scalar
247 replacements? */
248 unsigned grp_covered : 1;
250 /* If set to true, this access and all below it in an access tree must not be
251 scalarized. */
252 unsigned grp_unscalarizable_region : 1;
254 /* Whether data have been written to parts of the aggregate covered by this
255 access which is not to be scalarized. This flag is propagated up in the
256 access tree. */
257 unsigned grp_unscalarized_data : 1;
259 /* Does this access and/or group contain a write access through a
260 BIT_FIELD_REF? */
261 unsigned grp_partial_lhs : 1;
263 /* Set when a scalar replacement should be created for this variable. */
264 unsigned grp_to_be_replaced : 1;
266 /* Set when we want a replacement for the sole purpose of having it in
267 generated debug statements. */
268 unsigned grp_to_be_debug_replaced : 1;
270 /* Should TREE_NO_WARNING of a replacement be set? */
271 unsigned grp_no_warning : 1;
273 /* Is it possible that the group refers to data which might be (directly or
274 otherwise) modified? */
275 unsigned grp_maybe_modified : 1;
277 /* Set when this is a representative of a pointer to scalar (i.e. by
278 reference) parameter which we consider for turning into a plain scalar
279 (i.e. a by value parameter). */
280 unsigned grp_scalar_ptr : 1;
282 /* Set when we discover that this pointer is not safe to dereference in the
283 caller. */
284 unsigned grp_not_necessarilly_dereferenced : 1;
287 typedef struct access *access_p;
290 /* Alloc pool for allocating access structures. */
291 static alloc_pool access_pool;
293 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
294 are used to propagate subaccesses from rhs to lhs as long as they don't
295 conflict with what is already there. */
296 struct assign_link
298 struct access *lacc, *racc;
299 struct assign_link *next;
302 /* Alloc pool for allocating assign link structures. */
303 static alloc_pool link_pool;
305 /* Base (tree) -> Vector (vec<access_p> *) map. */
306 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
308 /* Candidate hash table helpers. */
310 struct uid_decl_hasher : typed_noop_remove <tree_node>
312 typedef tree_node value_type;
313 typedef tree_node compare_type;
314 static inline hashval_t hash (const value_type *);
315 static inline bool equal (const value_type *, const compare_type *);
318 /* Hash a tree in a uid_decl_map. */
320 inline hashval_t
321 uid_decl_hasher::hash (const value_type *item)
323 return item->decl_minimal.uid;
326 /* Return true if the DECL_UID in both trees are equal. */
328 inline bool
329 uid_decl_hasher::equal (const value_type *a, const compare_type *b)
331 return (a->decl_minimal.uid == b->decl_minimal.uid);
334 /* Set of candidates. */
335 static bitmap candidate_bitmap;
336 static hash_table<uid_decl_hasher> *candidates;
338 /* For a candidate UID return the candidates decl. */
340 static inline tree
341 candidate (unsigned uid)
343 tree_node t;
344 t.decl_minimal.uid = uid;
345 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
348 /* Bitmap of candidates which we should try to entirely scalarize away and
349 those which cannot be (because they are and need be used as a whole). */
350 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
352 /* Obstack for creation of fancy names. */
353 static struct obstack name_obstack;
355 /* Head of a linked list of accesses that need to have its subaccesses
356 propagated to their assignment counterparts. */
357 static struct access *work_queue_head;
359 /* Number of parameters of the analyzed function when doing early ipa SRA. */
360 static int func_param_count;
362 /* scan_function sets the following to true if it encounters a call to
363 __builtin_apply_args. */
364 static bool encountered_apply_args;
366 /* Set by scan_function when it finds a recursive call. */
367 static bool encountered_recursive_call;
369 /* Set by scan_function when it finds a recursive call with less actual
370 arguments than formal parameters.. */
371 static bool encountered_unchangable_recursive_call;
373 /* This is a table in which for each basic block and parameter there is a
374 distance (offset + size) in that parameter which is dereferenced and
375 accessed in that BB. */
376 static HOST_WIDE_INT *bb_dereferences;
377 /* Bitmap of BBs that can cause the function to "stop" progressing by
378 returning, throwing externally, looping infinitely or calling a function
379 which might abort etc.. */
380 static bitmap final_bbs;
382 /* Representative of no accesses at all. */
383 static struct access no_accesses_representant;
385 /* Predicate to test the special value. */
387 static inline bool
388 no_accesses_p (struct access *access)
390 return access == &no_accesses_representant;
393 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
394 representative fields are dumped, otherwise those which only describe the
395 individual access are. */
397 static struct
399 /* Number of processed aggregates is readily available in
400 analyze_all_variable_accesses and so is not stored here. */
402 /* Number of created scalar replacements. */
403 int replacements;
405 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
406 expression. */
407 int exprs;
409 /* Number of statements created by generate_subtree_copies. */
410 int subtree_copies;
412 /* Number of statements created by load_assign_lhs_subreplacements. */
413 int subreplacements;
415 /* Number of times sra_modify_assign has deleted a statement. */
416 int deleted;
418 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
419 RHS reparately due to type conversions or nonexistent matching
420 references. */
421 int separate_lhs_rhs_handling;
423 /* Number of parameters that were removed because they were unused. */
424 int deleted_unused_parameters;
426 /* Number of scalars passed as parameters by reference that have been
427 converted to be passed by value. */
428 int scalar_by_ref_to_by_val;
430 /* Number of aggregate parameters that were replaced by one or more of their
431 components. */
432 int aggregate_params_reduced;
434 /* Numbber of components created when splitting aggregate parameters. */
435 int param_reductions_created;
436 } sra_stats;
438 static void
439 dump_access (FILE *f, struct access *access, bool grp)
441 fprintf (f, "access { ");
442 fprintf (f, "base = (%d)'", DECL_UID (access->base));
443 print_generic_expr (f, access->base, 0);
444 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
445 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
446 fprintf (f, ", expr = ");
447 print_generic_expr (f, access->expr, 0);
448 fprintf (f, ", type = ");
449 print_generic_expr (f, access->type, 0);
450 if (grp)
451 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
452 "grp_assignment_write = %d, grp_scalar_read = %d, "
453 "grp_scalar_write = %d, grp_total_scalarization = %d, "
454 "grp_hint = %d, grp_covered = %d, "
455 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
456 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
457 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
458 "grp_not_necessarilly_dereferenced = %d\n",
459 access->grp_read, access->grp_write, access->grp_assignment_read,
460 access->grp_assignment_write, access->grp_scalar_read,
461 access->grp_scalar_write, access->grp_total_scalarization,
462 access->grp_hint, access->grp_covered,
463 access->grp_unscalarizable_region, access->grp_unscalarized_data,
464 access->grp_partial_lhs, access->grp_to_be_replaced,
465 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
466 access->grp_not_necessarilly_dereferenced);
467 else
468 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
469 "grp_partial_lhs = %d\n",
470 access->write, access->grp_total_scalarization,
471 access->grp_partial_lhs);
474 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
476 static void
477 dump_access_tree_1 (FILE *f, struct access *access, int level)
481 int i;
483 for (i = 0; i < level; i++)
484 fputs ("* ", dump_file);
486 dump_access (f, access, true);
488 if (access->first_child)
489 dump_access_tree_1 (f, access->first_child, level + 1);
491 access = access->next_sibling;
493 while (access);
496 /* Dump all access trees for a variable, given the pointer to the first root in
497 ACCESS. */
499 static void
500 dump_access_tree (FILE *f, struct access *access)
502 for (; access; access = access->next_grp)
503 dump_access_tree_1 (f, access, 0);
506 /* Return true iff ACC is non-NULL and has subaccesses. */
508 static inline bool
509 access_has_children_p (struct access *acc)
511 return acc && acc->first_child;
514 /* Return true iff ACC is (partly) covered by at least one replacement. */
516 static bool
517 access_has_replacements_p (struct access *acc)
519 struct access *child;
520 if (acc->grp_to_be_replaced)
521 return true;
522 for (child = acc->first_child; child; child = child->next_sibling)
523 if (access_has_replacements_p (child))
524 return true;
525 return false;
528 /* Return a vector of pointers to accesses for the variable given in BASE or
529 NULL if there is none. */
531 static vec<access_p> *
532 get_base_access_vector (tree base)
534 return base_access_vec->get (base);
537 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
538 in ACCESS. Return NULL if it cannot be found. */
540 static struct access *
541 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
542 HOST_WIDE_INT size)
544 while (access && (access->offset != offset || access->size != size))
546 struct access *child = access->first_child;
548 while (child && (child->offset + child->size <= offset))
549 child = child->next_sibling;
550 access = child;
553 return access;
556 /* Return the first group representative for DECL or NULL if none exists. */
558 static struct access *
559 get_first_repr_for_decl (tree base)
561 vec<access_p> *access_vec;
563 access_vec = get_base_access_vector (base);
564 if (!access_vec)
565 return NULL;
567 return (*access_vec)[0];
570 /* Find an access representative for the variable BASE and given OFFSET and
571 SIZE. Requires that access trees have already been built. Return NULL if
572 it cannot be found. */
574 static struct access *
575 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
576 HOST_WIDE_INT size)
578 struct access *access;
580 access = get_first_repr_for_decl (base);
581 while (access && (access->offset + access->size <= offset))
582 access = access->next_grp;
583 if (!access)
584 return NULL;
586 return find_access_in_subtree (access, offset, size);
589 /* Add LINK to the linked list of assign links of RACC. */
590 static void
591 add_link_to_rhs (struct access *racc, struct assign_link *link)
593 gcc_assert (link->racc == racc);
595 if (!racc->first_link)
597 gcc_assert (!racc->last_link);
598 racc->first_link = link;
600 else
601 racc->last_link->next = link;
603 racc->last_link = link;
604 link->next = NULL;
607 /* Move all link structures in their linked list in OLD_RACC to the linked list
608 in NEW_RACC. */
609 static void
610 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
612 if (!old_racc->first_link)
614 gcc_assert (!old_racc->last_link);
615 return;
618 if (new_racc->first_link)
620 gcc_assert (!new_racc->last_link->next);
621 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
623 new_racc->last_link->next = old_racc->first_link;
624 new_racc->last_link = old_racc->last_link;
626 else
628 gcc_assert (!new_racc->last_link);
630 new_racc->first_link = old_racc->first_link;
631 new_racc->last_link = old_racc->last_link;
633 old_racc->first_link = old_racc->last_link = NULL;
636 /* Add ACCESS to the work queue (which is actually a stack). */
638 static void
639 add_access_to_work_queue (struct access *access)
641 if (!access->grp_queued)
643 gcc_assert (!access->next_queued);
644 access->next_queued = work_queue_head;
645 access->grp_queued = 1;
646 work_queue_head = access;
650 /* Pop an access from the work queue, and return it, assuming there is one. */
652 static struct access *
653 pop_access_from_work_queue (void)
655 struct access *access = work_queue_head;
657 work_queue_head = access->next_queued;
658 access->next_queued = NULL;
659 access->grp_queued = 0;
660 return access;
664 /* Allocate necessary structures. */
666 static void
667 sra_initialize (void)
669 candidate_bitmap = BITMAP_ALLOC (NULL);
670 candidates = new hash_table<uid_decl_hasher>
671 (vec_safe_length (cfun->local_decls) / 2);
672 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
673 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
674 gcc_obstack_init (&name_obstack);
675 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
676 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
677 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
678 memset (&sra_stats, 0, sizeof (sra_stats));
679 encountered_apply_args = false;
680 encountered_recursive_call = false;
681 encountered_unchangable_recursive_call = false;
684 /* Deallocate all general structures. */
686 static void
687 sra_deinitialize (void)
689 BITMAP_FREE (candidate_bitmap);
690 delete candidates;
691 candidates = NULL;
692 BITMAP_FREE (should_scalarize_away_bitmap);
693 BITMAP_FREE (cannot_scalarize_away_bitmap);
694 free_alloc_pool (access_pool);
695 free_alloc_pool (link_pool);
696 obstack_free (&name_obstack, NULL);
698 delete base_access_vec;
701 /* Remove DECL from candidates for SRA and write REASON to the dump file if
702 there is one. */
703 static void
704 disqualify_candidate (tree decl, const char *reason)
706 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
707 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
709 if (dump_file && (dump_flags & TDF_DETAILS))
711 fprintf (dump_file, "! Disqualifying ");
712 print_generic_expr (dump_file, decl, 0);
713 fprintf (dump_file, " - %s\n", reason);
717 /* Return true iff the type contains a field or an element which does not allow
718 scalarization. */
720 static bool
721 type_internals_preclude_sra_p (tree type, const char **msg)
723 tree fld;
724 tree et;
726 switch (TREE_CODE (type))
728 case RECORD_TYPE:
729 case UNION_TYPE:
730 case QUAL_UNION_TYPE:
731 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
732 if (TREE_CODE (fld) == FIELD_DECL)
734 tree ft = TREE_TYPE (fld);
736 if (TREE_THIS_VOLATILE (fld))
738 *msg = "volatile structure field";
739 return true;
741 if (!DECL_FIELD_OFFSET (fld))
743 *msg = "no structure field offset";
744 return true;
746 if (!DECL_SIZE (fld))
748 *msg = "zero structure field size";
749 return true;
751 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
753 *msg = "structure field offset not fixed";
754 return true;
756 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
758 *msg = "structure field size not fixed";
759 return true;
761 if (!tree_fits_shwi_p (bit_position (fld)))
763 *msg = "structure field size too big";
764 return true;
766 if (AGGREGATE_TYPE_P (ft)
767 && int_bit_position (fld) % BITS_PER_UNIT != 0)
769 *msg = "structure field is bit field";
770 return true;
773 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
774 return true;
777 return false;
779 case ARRAY_TYPE:
780 et = TREE_TYPE (type);
782 if (TYPE_VOLATILE (et))
784 *msg = "element type is volatile";
785 return true;
788 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
789 return true;
791 return false;
793 default:
794 return false;
798 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
799 base variable if it is. Return T if it is not an SSA_NAME. */
801 static tree
802 get_ssa_base_param (tree t)
804 if (TREE_CODE (t) == SSA_NAME)
806 if (SSA_NAME_IS_DEFAULT_DEF (t))
807 return SSA_NAME_VAR (t);
808 else
809 return NULL_TREE;
811 return t;
814 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
815 belongs to, unless the BB has already been marked as a potentially
816 final. */
818 static void
819 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
821 basic_block bb = gimple_bb (stmt);
822 int idx, parm_index = 0;
823 tree parm;
825 if (bitmap_bit_p (final_bbs, bb->index))
826 return;
828 for (parm = DECL_ARGUMENTS (current_function_decl);
829 parm && parm != base;
830 parm = DECL_CHAIN (parm))
831 parm_index++;
833 gcc_assert (parm_index < func_param_count);
835 idx = bb->index * func_param_count + parm_index;
836 if (bb_dereferences[idx] < dist)
837 bb_dereferences[idx] = dist;
840 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
841 the three fields. Also add it to the vector of accesses corresponding to
842 the base. Finally, return the new access. */
844 static struct access *
845 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
847 struct access *access;
849 access = (struct access *) pool_alloc (access_pool);
850 memset (access, 0, sizeof (struct access));
851 access->base = base;
852 access->offset = offset;
853 access->size = size;
855 base_access_vec->get_or_insert (base).safe_push (access);
857 return access;
860 /* Create and insert access for EXPR. Return created access, or NULL if it is
861 not possible. */
863 static struct access *
864 create_access (tree expr, gimple stmt, bool write)
866 struct access *access;
867 HOST_WIDE_INT offset, size, max_size;
868 tree base = expr;
869 bool ptr, unscalarizable_region = false;
871 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
873 if (sra_mode == SRA_MODE_EARLY_IPA
874 && TREE_CODE (base) == MEM_REF)
876 base = get_ssa_base_param (TREE_OPERAND (base, 0));
877 if (!base)
878 return NULL;
879 ptr = true;
881 else
882 ptr = false;
884 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
885 return NULL;
887 if (sra_mode == SRA_MODE_EARLY_IPA)
889 if (size < 0 || size != max_size)
891 disqualify_candidate (base, "Encountered a variable sized access.");
892 return NULL;
894 if (TREE_CODE (expr) == COMPONENT_REF
895 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
897 disqualify_candidate (base, "Encountered a bit-field access.");
898 return NULL;
900 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
902 if (ptr)
903 mark_parm_dereference (base, offset + size, stmt);
905 else
907 if (size != max_size)
909 size = max_size;
910 unscalarizable_region = true;
912 if (size < 0)
914 disqualify_candidate (base, "Encountered an unconstrained access.");
915 return NULL;
919 access = create_access_1 (base, offset, size);
920 access->expr = expr;
921 access->type = TREE_TYPE (expr);
922 access->write = write;
923 access->grp_unscalarizable_region = unscalarizable_region;
924 access->stmt = stmt;
926 if (TREE_CODE (expr) == COMPONENT_REF
927 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
928 access->non_addressable = 1;
930 return access;
934 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
935 register types or (recursively) records with only these two kinds of fields.
936 It also returns false if any of these records contains a bit-field. */
938 static bool
939 type_consists_of_records_p (tree type)
941 tree fld;
943 if (TREE_CODE (type) != RECORD_TYPE)
944 return false;
946 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
947 if (TREE_CODE (fld) == FIELD_DECL)
949 tree ft = TREE_TYPE (fld);
951 if (DECL_BIT_FIELD (fld))
952 return false;
954 if (!is_gimple_reg_type (ft)
955 && !type_consists_of_records_p (ft))
956 return false;
959 return true;
962 /* Create total_scalarization accesses for all scalar type fields in DECL that
963 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
964 must be the top-most VAR_DECL representing the variable, OFFSET must be the
965 offset of DECL within BASE. REF must be the memory reference expression for
966 the given decl. */
968 static void
969 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
970 tree ref)
972 tree fld, decl_type = TREE_TYPE (decl);
974 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
975 if (TREE_CODE (fld) == FIELD_DECL)
977 HOST_WIDE_INT pos = offset + int_bit_position (fld);
978 tree ft = TREE_TYPE (fld);
979 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
980 NULL_TREE);
982 if (is_gimple_reg_type (ft))
984 struct access *access;
985 HOST_WIDE_INT size;
987 size = tree_to_uhwi (DECL_SIZE (fld));
988 access = create_access_1 (base, pos, size);
989 access->expr = nref;
990 access->type = ft;
991 access->grp_total_scalarization = 1;
992 /* Accesses for intraprocedural SRA can have their stmt NULL. */
994 else
995 completely_scalarize_record (base, fld, pos, nref);
999 /* Create total_scalarization accesses for all scalar type fields in VAR and
1000 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
1001 type_consists_of_records_p. */
1003 static void
1004 completely_scalarize_var (tree var)
1006 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1007 struct access *access;
1009 access = create_access_1 (var, 0, size);
1010 access->expr = var;
1011 access->type = TREE_TYPE (var);
1012 access->grp_total_scalarization = 1;
1014 completely_scalarize_record (var, var, 0, var);
1017 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1019 static inline bool
1020 contains_view_convert_expr_p (const_tree ref)
1022 while (handled_component_p (ref))
1024 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1025 return true;
1026 ref = TREE_OPERAND (ref, 0);
1029 return false;
1032 /* Search the given tree for a declaration by skipping handled components and
1033 exclude it from the candidates. */
1035 static void
1036 disqualify_base_of_expr (tree t, const char *reason)
1038 t = get_base_address (t);
1039 if (sra_mode == SRA_MODE_EARLY_IPA
1040 && TREE_CODE (t) == MEM_REF)
1041 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1043 if (t && DECL_P (t))
1044 disqualify_candidate (t, reason);
1047 /* Scan expression EXPR and create access structures for all accesses to
1048 candidates for scalarization. Return the created access or NULL if none is
1049 created. */
1051 static struct access *
1052 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1054 struct access *ret = NULL;
1055 bool partial_ref;
1057 if (TREE_CODE (expr) == BIT_FIELD_REF
1058 || TREE_CODE (expr) == IMAGPART_EXPR
1059 || TREE_CODE (expr) == REALPART_EXPR)
1061 expr = TREE_OPERAND (expr, 0);
1062 partial_ref = true;
1064 else
1065 partial_ref = false;
1067 /* We need to dive through V_C_Es in order to get the size of its parameter
1068 and not the result type. Ada produces such statements. We are also
1069 capable of handling the topmost V_C_E but not any of those buried in other
1070 handled components. */
1071 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1072 expr = TREE_OPERAND (expr, 0);
1074 if (contains_view_convert_expr_p (expr))
1076 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1077 "component.");
1078 return NULL;
1080 if (TREE_THIS_VOLATILE (expr))
1082 disqualify_base_of_expr (expr, "part of a volatile reference.");
1083 return NULL;
1086 switch (TREE_CODE (expr))
1088 case MEM_REF:
1089 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1090 && sra_mode != SRA_MODE_EARLY_IPA)
1091 return NULL;
1092 /* fall through */
1093 case VAR_DECL:
1094 case PARM_DECL:
1095 case RESULT_DECL:
1096 case COMPONENT_REF:
1097 case ARRAY_REF:
1098 case ARRAY_RANGE_REF:
1099 ret = create_access (expr, stmt, write);
1100 break;
1102 default:
1103 break;
1106 if (write && partial_ref && ret)
1107 ret->grp_partial_lhs = 1;
1109 return ret;
1112 /* Scan expression EXPR and create access structures for all accesses to
1113 candidates for scalarization. Return true if any access has been inserted.
1114 STMT must be the statement from which the expression is taken, WRITE must be
1115 true if the expression is a store and false otherwise. */
1117 static bool
1118 build_access_from_expr (tree expr, gimple stmt, bool write)
1120 struct access *access;
1122 access = build_access_from_expr_1 (expr, stmt, write);
1123 if (access)
1125 /* This means the aggregate is accesses as a whole in a way other than an
1126 assign statement and thus cannot be removed even if we had a scalar
1127 replacement for everything. */
1128 if (cannot_scalarize_away_bitmap)
1129 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1130 return true;
1132 return false;
1135 /* Return the single non-EH successor edge of BB or NULL if there is none or
1136 more than one. */
1138 static edge
1139 single_non_eh_succ (basic_block bb)
1141 edge e, res = NULL;
1142 edge_iterator ei;
1144 FOR_EACH_EDGE (e, ei, bb->succs)
1145 if (!(e->flags & EDGE_EH))
1147 if (res)
1148 return NULL;
1149 res = e;
1152 return res;
1155 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1156 there is no alternative spot where to put statements SRA might need to
1157 generate after it. The spot we are looking for is an edge leading to a
1158 single non-EH successor, if it exists and is indeed single. RHS may be
1159 NULL, in that case ignore it. */
1161 static bool
1162 disqualify_if_bad_bb_terminating_stmt (gimple stmt, tree lhs, tree rhs)
1164 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1165 && stmt_ends_bb_p (stmt))
1167 if (single_non_eh_succ (gimple_bb (stmt)))
1168 return false;
1170 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1171 if (rhs)
1172 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1173 return true;
1175 return false;
1178 /* Scan expressions occurring in STMT, create access structures for all accesses
1179 to candidates for scalarization and remove those candidates which occur in
1180 statements or expressions that prevent them from being split apart. Return
1181 true if any access has been inserted. */
1183 static bool
1184 build_accesses_from_assign (gimple stmt)
1186 tree lhs, rhs;
1187 struct access *lacc, *racc;
1189 if (!gimple_assign_single_p (stmt)
1190 /* Scope clobbers don't influence scalarization. */
1191 || gimple_clobber_p (stmt))
1192 return false;
1194 lhs = gimple_assign_lhs (stmt);
1195 rhs = gimple_assign_rhs1 (stmt);
1197 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1198 return false;
1200 racc = build_access_from_expr_1 (rhs, stmt, false);
1201 lacc = build_access_from_expr_1 (lhs, stmt, true);
1203 if (lacc)
1204 lacc->grp_assignment_write = 1;
1206 if (racc)
1208 racc->grp_assignment_read = 1;
1209 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1210 && !is_gimple_reg_type (racc->type))
1211 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1214 if (lacc && racc
1215 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1216 && !lacc->grp_unscalarizable_region
1217 && !racc->grp_unscalarizable_region
1218 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1219 && lacc->size == racc->size
1220 && useless_type_conversion_p (lacc->type, racc->type))
1222 struct assign_link *link;
1224 link = (struct assign_link *) pool_alloc (link_pool);
1225 memset (link, 0, sizeof (struct assign_link));
1227 link->lacc = lacc;
1228 link->racc = racc;
1230 add_link_to_rhs (racc, link);
1233 return lacc || racc;
1236 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1237 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1239 static bool
1240 asm_visit_addr (gimple, tree op, tree, void *)
1242 op = get_base_address (op);
1243 if (op
1244 && DECL_P (op))
1245 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1247 return false;
1250 /* Return true iff callsite CALL has at least as many actual arguments as there
1251 are formal parameters of the function currently processed by IPA-SRA and
1252 that their types match. */
1254 static inline bool
1255 callsite_arguments_match_p (gimple call)
1257 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1258 return false;
1260 tree parm;
1261 int i;
1262 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1263 parm;
1264 parm = DECL_CHAIN (parm), i++)
1266 tree arg = gimple_call_arg (call, i);
1267 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1268 return false;
1270 return true;
1273 /* Scan function and look for interesting expressions and create access
1274 structures for them. Return true iff any access is created. */
1276 static bool
1277 scan_function (void)
1279 basic_block bb;
1280 bool ret = false;
1282 FOR_EACH_BB_FN (bb, cfun)
1284 gimple_stmt_iterator gsi;
1285 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1287 gimple stmt = gsi_stmt (gsi);
1288 tree t;
1289 unsigned i;
1291 if (final_bbs && stmt_can_throw_external (stmt))
1292 bitmap_set_bit (final_bbs, bb->index);
1293 switch (gimple_code (stmt))
1295 case GIMPLE_RETURN:
1296 t = gimple_return_retval (stmt);
1297 if (t != NULL_TREE)
1298 ret |= build_access_from_expr (t, stmt, false);
1299 if (final_bbs)
1300 bitmap_set_bit (final_bbs, bb->index);
1301 break;
1303 case GIMPLE_ASSIGN:
1304 ret |= build_accesses_from_assign (stmt);
1305 break;
1307 case GIMPLE_CALL:
1308 for (i = 0; i < gimple_call_num_args (stmt); i++)
1309 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1310 stmt, false);
1312 if (sra_mode == SRA_MODE_EARLY_IPA)
1314 tree dest = gimple_call_fndecl (stmt);
1315 int flags = gimple_call_flags (stmt);
1317 if (dest)
1319 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1320 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1321 encountered_apply_args = true;
1322 if (recursive_call_p (current_function_decl, dest))
1324 encountered_recursive_call = true;
1325 if (!callsite_arguments_match_p (stmt))
1326 encountered_unchangable_recursive_call = true;
1330 if (final_bbs
1331 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1332 bitmap_set_bit (final_bbs, bb->index);
1335 t = gimple_call_lhs (stmt);
1336 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1337 ret |= build_access_from_expr (t, stmt, true);
1338 break;
1340 case GIMPLE_ASM:
1341 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1342 asm_visit_addr);
1343 if (final_bbs)
1344 bitmap_set_bit (final_bbs, bb->index);
1346 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1348 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1349 ret |= build_access_from_expr (t, stmt, false);
1351 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1353 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1354 ret |= build_access_from_expr (t, stmt, true);
1356 break;
1358 default:
1359 break;
1364 return ret;
1367 /* Helper of QSORT function. There are pointers to accesses in the array. An
1368 access is considered smaller than another if it has smaller offset or if the
1369 offsets are the same but is size is bigger. */
1371 static int
1372 compare_access_positions (const void *a, const void *b)
1374 const access_p *fp1 = (const access_p *) a;
1375 const access_p *fp2 = (const access_p *) b;
1376 const access_p f1 = *fp1;
1377 const access_p f2 = *fp2;
1379 if (f1->offset != f2->offset)
1380 return f1->offset < f2->offset ? -1 : 1;
1382 if (f1->size == f2->size)
1384 if (f1->type == f2->type)
1385 return 0;
1386 /* Put any non-aggregate type before any aggregate type. */
1387 else if (!is_gimple_reg_type (f1->type)
1388 && is_gimple_reg_type (f2->type))
1389 return 1;
1390 else if (is_gimple_reg_type (f1->type)
1391 && !is_gimple_reg_type (f2->type))
1392 return -1;
1393 /* Put any complex or vector type before any other scalar type. */
1394 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1395 && TREE_CODE (f1->type) != VECTOR_TYPE
1396 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1397 || TREE_CODE (f2->type) == VECTOR_TYPE))
1398 return 1;
1399 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1400 || TREE_CODE (f1->type) == VECTOR_TYPE)
1401 && TREE_CODE (f2->type) != COMPLEX_TYPE
1402 && TREE_CODE (f2->type) != VECTOR_TYPE)
1403 return -1;
1404 /* Put the integral type with the bigger precision first. */
1405 else if (INTEGRAL_TYPE_P (f1->type)
1406 && INTEGRAL_TYPE_P (f2->type))
1407 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1408 /* Put any integral type with non-full precision last. */
1409 else if (INTEGRAL_TYPE_P (f1->type)
1410 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1411 != TYPE_PRECISION (f1->type)))
1412 return 1;
1413 else if (INTEGRAL_TYPE_P (f2->type)
1414 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1415 != TYPE_PRECISION (f2->type)))
1416 return -1;
1417 /* Stabilize the sort. */
1418 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1421 /* We want the bigger accesses first, thus the opposite operator in the next
1422 line: */
1423 return f1->size > f2->size ? -1 : 1;
1427 /* Append a name of the declaration to the name obstack. A helper function for
1428 make_fancy_name. */
1430 static void
1431 make_fancy_decl_name (tree decl)
1433 char buffer[32];
1435 tree name = DECL_NAME (decl);
1436 if (name)
1437 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1438 IDENTIFIER_LENGTH (name));
1439 else
1441 sprintf (buffer, "D%u", DECL_UID (decl));
1442 obstack_grow (&name_obstack, buffer, strlen (buffer));
1446 /* Helper for make_fancy_name. */
1448 static void
1449 make_fancy_name_1 (tree expr)
1451 char buffer[32];
1452 tree index;
1454 if (DECL_P (expr))
1456 make_fancy_decl_name (expr);
1457 return;
1460 switch (TREE_CODE (expr))
1462 case COMPONENT_REF:
1463 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1464 obstack_1grow (&name_obstack, '$');
1465 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1466 break;
1468 case ARRAY_REF:
1469 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1470 obstack_1grow (&name_obstack, '$');
1471 /* Arrays with only one element may not have a constant as their
1472 index. */
1473 index = TREE_OPERAND (expr, 1);
1474 if (TREE_CODE (index) != INTEGER_CST)
1475 break;
1476 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1477 obstack_grow (&name_obstack, buffer, strlen (buffer));
1478 break;
1480 case ADDR_EXPR:
1481 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1482 break;
1484 case MEM_REF:
1485 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1486 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1488 obstack_1grow (&name_obstack, '$');
1489 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1490 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1491 obstack_grow (&name_obstack, buffer, strlen (buffer));
1493 break;
1495 case BIT_FIELD_REF:
1496 case REALPART_EXPR:
1497 case IMAGPART_EXPR:
1498 gcc_unreachable (); /* we treat these as scalars. */
1499 break;
1500 default:
1501 break;
1505 /* Create a human readable name for replacement variable of ACCESS. */
1507 static char *
1508 make_fancy_name (tree expr)
1510 make_fancy_name_1 (expr);
1511 obstack_1grow (&name_obstack, '\0');
1512 return XOBFINISH (&name_obstack, char *);
1515 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1516 EXP_TYPE at the given OFFSET. If BASE is something for which
1517 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1518 to insert new statements either before or below the current one as specified
1519 by INSERT_AFTER. This function is not capable of handling bitfields.
1521 BASE must be either a declaration or a memory reference that has correct
1522 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1524 tree
1525 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1526 tree exp_type, gimple_stmt_iterator *gsi,
1527 bool insert_after)
1529 tree prev_base = base;
1530 tree off;
1531 tree mem_ref;
1532 HOST_WIDE_INT base_offset;
1533 unsigned HOST_WIDE_INT misalign;
1534 unsigned int align;
1536 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1537 get_object_alignment_1 (base, &align, &misalign);
1538 base = get_addr_base_and_unit_offset (base, &base_offset);
1540 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1541 offset such as array[var_index]. */
1542 if (!base)
1544 gimple stmt;
1545 tree tmp, addr;
1547 gcc_checking_assert (gsi);
1548 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1549 addr = build_fold_addr_expr (unshare_expr (prev_base));
1550 STRIP_USELESS_TYPE_CONVERSION (addr);
1551 stmt = gimple_build_assign (tmp, addr);
1552 gimple_set_location (stmt, loc);
1553 if (insert_after)
1554 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1555 else
1556 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1558 off = build_int_cst (reference_alias_ptr_type (prev_base),
1559 offset / BITS_PER_UNIT);
1560 base = tmp;
1562 else if (TREE_CODE (base) == MEM_REF)
1564 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1565 base_offset + offset / BITS_PER_UNIT);
1566 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1567 base = unshare_expr (TREE_OPERAND (base, 0));
1569 else
1571 off = build_int_cst (reference_alias_ptr_type (base),
1572 base_offset + offset / BITS_PER_UNIT);
1573 base = build_fold_addr_expr (unshare_expr (base));
1576 misalign = (misalign + offset) & (align - 1);
1577 if (misalign != 0)
1578 align = (misalign & -misalign);
1579 if (align < TYPE_ALIGN (exp_type))
1580 exp_type = build_aligned_type (exp_type, align);
1582 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1583 if (TREE_THIS_VOLATILE (prev_base))
1584 TREE_THIS_VOLATILE (mem_ref) = 1;
1585 if (TREE_SIDE_EFFECTS (prev_base))
1586 TREE_SIDE_EFFECTS (mem_ref) = 1;
1587 return mem_ref;
1590 /* Construct a memory reference to a part of an aggregate BASE at the given
1591 OFFSET and of the same type as MODEL. In case this is a reference to a
1592 bit-field, the function will replicate the last component_ref of model's
1593 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1594 build_ref_for_offset. */
1596 static tree
1597 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1598 struct access *model, gimple_stmt_iterator *gsi,
1599 bool insert_after)
1601 if (TREE_CODE (model->expr) == COMPONENT_REF
1602 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1604 /* This access represents a bit-field. */
1605 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1607 offset -= int_bit_position (fld);
1608 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1609 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1610 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1611 NULL_TREE);
1613 else
1614 return build_ref_for_offset (loc, base, offset, model->type,
1615 gsi, insert_after);
1618 /* Attempt to build a memory reference that we could but into a gimple
1619 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1620 create statements and return s NULL instead. This function also ignores
1621 alignment issues and so its results should never end up in non-debug
1622 statements. */
1624 static tree
1625 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1626 struct access *model)
1628 HOST_WIDE_INT base_offset;
1629 tree off;
1631 if (TREE_CODE (model->expr) == COMPONENT_REF
1632 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1633 return NULL_TREE;
1635 base = get_addr_base_and_unit_offset (base, &base_offset);
1636 if (!base)
1637 return NULL_TREE;
1638 if (TREE_CODE (base) == MEM_REF)
1640 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1641 base_offset + offset / BITS_PER_UNIT);
1642 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1643 base = unshare_expr (TREE_OPERAND (base, 0));
1645 else
1647 off = build_int_cst (reference_alias_ptr_type (base),
1648 base_offset + offset / BITS_PER_UNIT);
1649 base = build_fold_addr_expr (unshare_expr (base));
1652 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1655 /* Construct a memory reference consisting of component_refs and array_refs to
1656 a part of an aggregate *RES (which is of type TYPE). The requested part
1657 should have type EXP_TYPE at be the given OFFSET. This function might not
1658 succeed, it returns true when it does and only then *RES points to something
1659 meaningful. This function should be used only to build expressions that we
1660 might need to present to user (e.g. in warnings). In all other situations,
1661 build_ref_for_model or build_ref_for_offset should be used instead. */
1663 static bool
1664 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1665 tree exp_type)
1667 while (1)
1669 tree fld;
1670 tree tr_size, index, minidx;
1671 HOST_WIDE_INT el_size;
1673 if (offset == 0 && exp_type
1674 && types_compatible_p (exp_type, type))
1675 return true;
1677 switch (TREE_CODE (type))
1679 case UNION_TYPE:
1680 case QUAL_UNION_TYPE:
1681 case RECORD_TYPE:
1682 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1684 HOST_WIDE_INT pos, size;
1685 tree tr_pos, expr, *expr_ptr;
1687 if (TREE_CODE (fld) != FIELD_DECL)
1688 continue;
1690 tr_pos = bit_position (fld);
1691 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1692 continue;
1693 pos = tree_to_uhwi (tr_pos);
1694 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1695 tr_size = DECL_SIZE (fld);
1696 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1697 continue;
1698 size = tree_to_uhwi (tr_size);
1699 if (size == 0)
1701 if (pos != offset)
1702 continue;
1704 else if (pos > offset || (pos + size) <= offset)
1705 continue;
1707 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1708 NULL_TREE);
1709 expr_ptr = &expr;
1710 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1711 offset - pos, exp_type))
1713 *res = expr;
1714 return true;
1717 return false;
1719 case ARRAY_TYPE:
1720 tr_size = TYPE_SIZE (TREE_TYPE (type));
1721 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1722 return false;
1723 el_size = tree_to_uhwi (tr_size);
1725 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1726 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1727 return false;
1728 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1729 if (!integer_zerop (minidx))
1730 index = int_const_binop (PLUS_EXPR, index, minidx);
1731 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1732 NULL_TREE, NULL_TREE);
1733 offset = offset % el_size;
1734 type = TREE_TYPE (type);
1735 break;
1737 default:
1738 if (offset != 0)
1739 return false;
1741 if (exp_type)
1742 return false;
1743 else
1744 return true;
1749 /* Return true iff TYPE is stdarg va_list type. */
1751 static inline bool
1752 is_va_list_type (tree type)
1754 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1757 /* Print message to dump file why a variable was rejected. */
1759 static void
1760 reject (tree var, const char *msg)
1762 if (dump_file && (dump_flags & TDF_DETAILS))
1764 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1765 print_generic_expr (dump_file, var, 0);
1766 fprintf (dump_file, "\n");
1770 /* Return true if VAR is a candidate for SRA. */
1772 static bool
1773 maybe_add_sra_candidate (tree var)
1775 tree type = TREE_TYPE (var);
1776 const char *msg;
1777 tree_node **slot;
1779 if (!AGGREGATE_TYPE_P (type))
1781 reject (var, "not aggregate");
1782 return false;
1784 if (needs_to_live_in_memory (var))
1786 reject (var, "needs to live in memory");
1787 return false;
1789 if (TREE_THIS_VOLATILE (var))
1791 reject (var, "is volatile");
1792 return false;
1794 if (!COMPLETE_TYPE_P (type))
1796 reject (var, "has incomplete type");
1797 return false;
1799 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1801 reject (var, "type size not fixed");
1802 return false;
1804 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1806 reject (var, "type size is zero");
1807 return false;
1809 if (type_internals_preclude_sra_p (type, &msg))
1811 reject (var, msg);
1812 return false;
1814 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1815 we also want to schedule it rather late. Thus we ignore it in
1816 the early pass. */
1817 (sra_mode == SRA_MODE_EARLY_INTRA
1818 && is_va_list_type (type)))
1820 reject (var, "is va_list");
1821 return false;
1824 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1825 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1826 *slot = var;
1828 if (dump_file && (dump_flags & TDF_DETAILS))
1830 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1831 print_generic_expr (dump_file, var, 0);
1832 fprintf (dump_file, "\n");
1835 return true;
1838 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1839 those with type which is suitable for scalarization. */
1841 static bool
1842 find_var_candidates (void)
1844 tree var, parm;
1845 unsigned int i;
1846 bool ret = false;
1848 for (parm = DECL_ARGUMENTS (current_function_decl);
1849 parm;
1850 parm = DECL_CHAIN (parm))
1851 ret |= maybe_add_sra_candidate (parm);
1853 FOR_EACH_LOCAL_DECL (cfun, i, var)
1855 if (TREE_CODE (var) != VAR_DECL)
1856 continue;
1858 ret |= maybe_add_sra_candidate (var);
1861 return ret;
1864 /* Sort all accesses for the given variable, check for partial overlaps and
1865 return NULL if there are any. If there are none, pick a representative for
1866 each combination of offset and size and create a linked list out of them.
1867 Return the pointer to the first representative and make sure it is the first
1868 one in the vector of accesses. */
1870 static struct access *
1871 sort_and_splice_var_accesses (tree var)
1873 int i, j, access_count;
1874 struct access *res, **prev_acc_ptr = &res;
1875 vec<access_p> *access_vec;
1876 bool first = true;
1877 HOST_WIDE_INT low = -1, high = 0;
1879 access_vec = get_base_access_vector (var);
1880 if (!access_vec)
1881 return NULL;
1882 access_count = access_vec->length ();
1884 /* Sort by <OFFSET, SIZE>. */
1885 access_vec->qsort (compare_access_positions);
1887 i = 0;
1888 while (i < access_count)
1890 struct access *access = (*access_vec)[i];
1891 bool grp_write = access->write;
1892 bool grp_read = !access->write;
1893 bool grp_scalar_write = access->write
1894 && is_gimple_reg_type (access->type);
1895 bool grp_scalar_read = !access->write
1896 && is_gimple_reg_type (access->type);
1897 bool grp_assignment_read = access->grp_assignment_read;
1898 bool grp_assignment_write = access->grp_assignment_write;
1899 bool multiple_scalar_reads = false;
1900 bool total_scalarization = access->grp_total_scalarization;
1901 bool grp_partial_lhs = access->grp_partial_lhs;
1902 bool first_scalar = is_gimple_reg_type (access->type);
1903 bool unscalarizable_region = access->grp_unscalarizable_region;
1905 if (first || access->offset >= high)
1907 first = false;
1908 low = access->offset;
1909 high = access->offset + access->size;
1911 else if (access->offset > low && access->offset + access->size > high)
1912 return NULL;
1913 else
1914 gcc_assert (access->offset >= low
1915 && access->offset + access->size <= high);
1917 j = i + 1;
1918 while (j < access_count)
1920 struct access *ac2 = (*access_vec)[j];
1921 if (ac2->offset != access->offset || ac2->size != access->size)
1922 break;
1923 if (ac2->write)
1925 grp_write = true;
1926 grp_scalar_write = (grp_scalar_write
1927 || is_gimple_reg_type (ac2->type));
1929 else
1931 grp_read = true;
1932 if (is_gimple_reg_type (ac2->type))
1934 if (grp_scalar_read)
1935 multiple_scalar_reads = true;
1936 else
1937 grp_scalar_read = true;
1940 grp_assignment_read |= ac2->grp_assignment_read;
1941 grp_assignment_write |= ac2->grp_assignment_write;
1942 grp_partial_lhs |= ac2->grp_partial_lhs;
1943 unscalarizable_region |= ac2->grp_unscalarizable_region;
1944 total_scalarization |= ac2->grp_total_scalarization;
1945 relink_to_new_repr (access, ac2);
1947 /* If there are both aggregate-type and scalar-type accesses with
1948 this combination of size and offset, the comparison function
1949 should have put the scalars first. */
1950 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1951 ac2->group_representative = access;
1952 j++;
1955 i = j;
1957 access->group_representative = access;
1958 access->grp_write = grp_write;
1959 access->grp_read = grp_read;
1960 access->grp_scalar_read = grp_scalar_read;
1961 access->grp_scalar_write = grp_scalar_write;
1962 access->grp_assignment_read = grp_assignment_read;
1963 access->grp_assignment_write = grp_assignment_write;
1964 access->grp_hint = multiple_scalar_reads || total_scalarization;
1965 access->grp_total_scalarization = total_scalarization;
1966 access->grp_partial_lhs = grp_partial_lhs;
1967 access->grp_unscalarizable_region = unscalarizable_region;
1968 if (access->first_link)
1969 add_access_to_work_queue (access);
1971 *prev_acc_ptr = access;
1972 prev_acc_ptr = &access->next_grp;
1975 gcc_assert (res == (*access_vec)[0]);
1976 return res;
1979 /* Create a variable for the given ACCESS which determines the type, name and a
1980 few other properties. Return the variable declaration and store it also to
1981 ACCESS->replacement. */
1983 static tree
1984 create_access_replacement (struct access *access)
1986 tree repl;
1988 if (access->grp_to_be_debug_replaced)
1990 repl = create_tmp_var_raw (access->type, NULL);
1991 DECL_CONTEXT (repl) = current_function_decl;
1993 else
1994 repl = create_tmp_var (access->type, "SR");
1995 if (TREE_CODE (access->type) == COMPLEX_TYPE
1996 || TREE_CODE (access->type) == VECTOR_TYPE)
1998 if (!access->grp_partial_lhs)
1999 DECL_GIMPLE_REG_P (repl) = 1;
2001 else if (access->grp_partial_lhs
2002 && is_gimple_reg_type (access->type))
2003 TREE_ADDRESSABLE (repl) = 1;
2005 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2006 DECL_ARTIFICIAL (repl) = 1;
2007 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2009 if (DECL_NAME (access->base)
2010 && !DECL_IGNORED_P (access->base)
2011 && !DECL_ARTIFICIAL (access->base))
2013 char *pretty_name = make_fancy_name (access->expr);
2014 tree debug_expr = unshare_expr_without_location (access->expr), d;
2015 bool fail = false;
2017 DECL_NAME (repl) = get_identifier (pretty_name);
2018 obstack_free (&name_obstack, pretty_name);
2020 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2021 as DECL_DEBUG_EXPR isn't considered when looking for still
2022 used SSA_NAMEs and thus they could be freed. All debug info
2023 generation cares is whether something is constant or variable
2024 and that get_ref_base_and_extent works properly on the
2025 expression. It cannot handle accesses at a non-constant offset
2026 though, so just give up in those cases. */
2027 for (d = debug_expr;
2028 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2029 d = TREE_OPERAND (d, 0))
2030 switch (TREE_CODE (d))
2032 case ARRAY_REF:
2033 case ARRAY_RANGE_REF:
2034 if (TREE_OPERAND (d, 1)
2035 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2036 fail = true;
2037 if (TREE_OPERAND (d, 3)
2038 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2039 fail = true;
2040 /* FALLTHRU */
2041 case COMPONENT_REF:
2042 if (TREE_OPERAND (d, 2)
2043 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2044 fail = true;
2045 break;
2046 case MEM_REF:
2047 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2048 fail = true;
2049 else
2050 d = TREE_OPERAND (d, 0);
2051 break;
2052 default:
2053 break;
2055 if (!fail)
2057 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2058 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2060 if (access->grp_no_warning)
2061 TREE_NO_WARNING (repl) = 1;
2062 else
2063 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2065 else
2066 TREE_NO_WARNING (repl) = 1;
2068 if (dump_file)
2070 if (access->grp_to_be_debug_replaced)
2072 fprintf (dump_file, "Created a debug-only replacement for ");
2073 print_generic_expr (dump_file, access->base, 0);
2074 fprintf (dump_file, " offset: %u, size: %u\n",
2075 (unsigned) access->offset, (unsigned) access->size);
2077 else
2079 fprintf (dump_file, "Created a replacement for ");
2080 print_generic_expr (dump_file, access->base, 0);
2081 fprintf (dump_file, " offset: %u, size: %u: ",
2082 (unsigned) access->offset, (unsigned) access->size);
2083 print_generic_expr (dump_file, repl, 0);
2084 fprintf (dump_file, "\n");
2087 sra_stats.replacements++;
2089 return repl;
2092 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2094 static inline tree
2095 get_access_replacement (struct access *access)
2097 gcc_checking_assert (access->replacement_decl);
2098 return access->replacement_decl;
2102 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2103 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2104 to it is not "within" the root. Return false iff some accesses partially
2105 overlap. */
2107 static bool
2108 build_access_subtree (struct access **access)
2110 struct access *root = *access, *last_child = NULL;
2111 HOST_WIDE_INT limit = root->offset + root->size;
2113 *access = (*access)->next_grp;
2114 while (*access && (*access)->offset + (*access)->size <= limit)
2116 if (!last_child)
2117 root->first_child = *access;
2118 else
2119 last_child->next_sibling = *access;
2120 last_child = *access;
2122 if (!build_access_subtree (access))
2123 return false;
2126 if (*access && (*access)->offset < limit)
2127 return false;
2129 return true;
2132 /* Build a tree of access representatives, ACCESS is the pointer to the first
2133 one, others are linked in a list by the next_grp field. Return false iff
2134 some accesses partially overlap. */
2136 static bool
2137 build_access_trees (struct access *access)
2139 while (access)
2141 struct access *root = access;
2143 if (!build_access_subtree (&access))
2144 return false;
2145 root->next_grp = access;
2147 return true;
2150 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2151 array. */
2153 static bool
2154 expr_with_var_bounded_array_refs_p (tree expr)
2156 while (handled_component_p (expr))
2158 if (TREE_CODE (expr) == ARRAY_REF
2159 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2160 return true;
2161 expr = TREE_OPERAND (expr, 0);
2163 return false;
2166 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2167 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2168 sorts of access flags appropriately along the way, notably always set
2169 grp_read and grp_assign_read according to MARK_READ and grp_write when
2170 MARK_WRITE is true.
2172 Creating a replacement for a scalar access is considered beneficial if its
2173 grp_hint is set (this means we are either attempting total scalarization or
2174 there is more than one direct read access) or according to the following
2175 table:
2177 Access written to through a scalar type (once or more times)
2179 | Written to in an assignment statement
2181 | | Access read as scalar _once_
2182 | | |
2183 | | | Read in an assignment statement
2184 | | | |
2185 | | | | Scalarize Comment
2186 -----------------------------------------------------------------------------
2187 0 0 0 0 No access for the scalar
2188 0 0 0 1 No access for the scalar
2189 0 0 1 0 No Single read - won't help
2190 0 0 1 1 No The same case
2191 0 1 0 0 No access for the scalar
2192 0 1 0 1 No access for the scalar
2193 0 1 1 0 Yes s = *g; return s.i;
2194 0 1 1 1 Yes The same case as above
2195 1 0 0 0 No Won't help
2196 1 0 0 1 Yes s.i = 1; *g = s;
2197 1 0 1 0 Yes s.i = 5; g = s.i;
2198 1 0 1 1 Yes The same case as above
2199 1 1 0 0 No Won't help.
2200 1 1 0 1 Yes s.i = 1; *g = s;
2201 1 1 1 0 Yes s = *g; return s.i;
2202 1 1 1 1 Yes Any of the above yeses */
2204 static bool
2205 analyze_access_subtree (struct access *root, struct access *parent,
2206 bool allow_replacements)
2208 struct access *child;
2209 HOST_WIDE_INT limit = root->offset + root->size;
2210 HOST_WIDE_INT covered_to = root->offset;
2211 bool scalar = is_gimple_reg_type (root->type);
2212 bool hole = false, sth_created = false;
2214 if (parent)
2216 if (parent->grp_read)
2217 root->grp_read = 1;
2218 if (parent->grp_assignment_read)
2219 root->grp_assignment_read = 1;
2220 if (parent->grp_write)
2221 root->grp_write = 1;
2222 if (parent->grp_assignment_write)
2223 root->grp_assignment_write = 1;
2224 if (parent->grp_total_scalarization)
2225 root->grp_total_scalarization = 1;
2228 if (root->grp_unscalarizable_region)
2229 allow_replacements = false;
2231 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2232 allow_replacements = false;
2234 for (child = root->first_child; child; child = child->next_sibling)
2236 hole |= covered_to < child->offset;
2237 sth_created |= analyze_access_subtree (child, root,
2238 allow_replacements && !scalar);
2240 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2241 root->grp_total_scalarization &= child->grp_total_scalarization;
2242 if (child->grp_covered)
2243 covered_to += child->size;
2244 else
2245 hole = true;
2248 if (allow_replacements && scalar && !root->first_child
2249 && (root->grp_hint
2250 || ((root->grp_scalar_read || root->grp_assignment_read)
2251 && (root->grp_scalar_write || root->grp_assignment_write))))
2253 /* Always create access replacements that cover the whole access.
2254 For integral types this means the precision has to match.
2255 Avoid assumptions based on the integral type kind, too. */
2256 if (INTEGRAL_TYPE_P (root->type)
2257 && (TREE_CODE (root->type) != INTEGER_TYPE
2258 || TYPE_PRECISION (root->type) != root->size)
2259 /* But leave bitfield accesses alone. */
2260 && (TREE_CODE (root->expr) != COMPONENT_REF
2261 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2263 tree rt = root->type;
2264 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2265 && (root->size % BITS_PER_UNIT) == 0);
2266 root->type = build_nonstandard_integer_type (root->size,
2267 TYPE_UNSIGNED (rt));
2268 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2269 root->base, root->offset,
2270 root->type, NULL, false);
2272 if (dump_file && (dump_flags & TDF_DETAILS))
2274 fprintf (dump_file, "Changing the type of a replacement for ");
2275 print_generic_expr (dump_file, root->base, 0);
2276 fprintf (dump_file, " offset: %u, size: %u ",
2277 (unsigned) root->offset, (unsigned) root->size);
2278 fprintf (dump_file, " to an integer.\n");
2282 root->grp_to_be_replaced = 1;
2283 root->replacement_decl = create_access_replacement (root);
2284 sth_created = true;
2285 hole = false;
2287 else
2289 if (allow_replacements
2290 && scalar && !root->first_child
2291 && (root->grp_scalar_write || root->grp_assignment_write)
2292 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2293 DECL_UID (root->base)))
2295 gcc_checking_assert (!root->grp_scalar_read
2296 && !root->grp_assignment_read);
2297 sth_created = true;
2298 if (MAY_HAVE_DEBUG_STMTS)
2300 root->grp_to_be_debug_replaced = 1;
2301 root->replacement_decl = create_access_replacement (root);
2305 if (covered_to < limit)
2306 hole = true;
2307 if (scalar)
2308 root->grp_total_scalarization = 0;
2311 if (!hole || root->grp_total_scalarization)
2312 root->grp_covered = 1;
2313 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2314 root->grp_unscalarized_data = 1; /* not covered and written to */
2315 return sth_created;
2318 /* Analyze all access trees linked by next_grp by the means of
2319 analyze_access_subtree. */
2320 static bool
2321 analyze_access_trees (struct access *access)
2323 bool ret = false;
2325 while (access)
2327 if (analyze_access_subtree (access, NULL, true))
2328 ret = true;
2329 access = access->next_grp;
2332 return ret;
2335 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2336 SIZE would conflict with an already existing one. If exactly such a child
2337 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2339 static bool
2340 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2341 HOST_WIDE_INT size, struct access **exact_match)
2343 struct access *child;
2345 for (child = lacc->first_child; child; child = child->next_sibling)
2347 if (child->offset == norm_offset && child->size == size)
2349 *exact_match = child;
2350 return true;
2353 if (child->offset < norm_offset + size
2354 && child->offset + child->size > norm_offset)
2355 return true;
2358 return false;
2361 /* Create a new child access of PARENT, with all properties just like MODEL
2362 except for its offset and with its grp_write false and grp_read true.
2363 Return the new access or NULL if it cannot be created. Note that this access
2364 is created long after all splicing and sorting, it's not located in any
2365 access vector and is automatically a representative of its group. */
2367 static struct access *
2368 create_artificial_child_access (struct access *parent, struct access *model,
2369 HOST_WIDE_INT new_offset)
2371 struct access *access;
2372 struct access **child;
2373 tree expr = parent->base;
2375 gcc_assert (!model->grp_unscalarizable_region);
2377 access = (struct access *) pool_alloc (access_pool);
2378 memset (access, 0, sizeof (struct access));
2379 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2380 model->type))
2382 access->grp_no_warning = true;
2383 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2384 new_offset, model, NULL, false);
2387 access->base = parent->base;
2388 access->expr = expr;
2389 access->offset = new_offset;
2390 access->size = model->size;
2391 access->type = model->type;
2392 access->grp_write = true;
2393 access->grp_read = false;
2395 child = &parent->first_child;
2396 while (*child && (*child)->offset < new_offset)
2397 child = &(*child)->next_sibling;
2399 access->next_sibling = *child;
2400 *child = access;
2402 return access;
2406 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2407 true if any new subaccess was created. Additionally, if RACC is a scalar
2408 access but LACC is not, change the type of the latter, if possible. */
2410 static bool
2411 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2413 struct access *rchild;
2414 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2415 bool ret = false;
2417 if (is_gimple_reg_type (lacc->type)
2418 || lacc->grp_unscalarizable_region
2419 || racc->grp_unscalarizable_region)
2420 return false;
2422 if (is_gimple_reg_type (racc->type))
2424 if (!lacc->first_child && !racc->first_child)
2426 tree t = lacc->base;
2428 lacc->type = racc->type;
2429 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2430 lacc->offset, racc->type))
2431 lacc->expr = t;
2432 else
2434 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2435 lacc->base, lacc->offset,
2436 racc, NULL, false);
2437 lacc->grp_no_warning = true;
2440 return false;
2443 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2445 struct access *new_acc = NULL;
2446 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2448 if (rchild->grp_unscalarizable_region)
2449 continue;
2451 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2452 &new_acc))
2454 if (new_acc)
2456 rchild->grp_hint = 1;
2457 new_acc->grp_hint |= new_acc->grp_read;
2458 if (rchild->first_child)
2459 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2461 continue;
2464 rchild->grp_hint = 1;
2465 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2466 if (new_acc)
2468 ret = true;
2469 if (racc->first_child)
2470 propagate_subaccesses_across_link (new_acc, rchild);
2474 return ret;
2477 /* Propagate all subaccesses across assignment links. */
2479 static void
2480 propagate_all_subaccesses (void)
2482 while (work_queue_head)
2484 struct access *racc = pop_access_from_work_queue ();
2485 struct assign_link *link;
2487 gcc_assert (racc->first_link);
2489 for (link = racc->first_link; link; link = link->next)
2491 struct access *lacc = link->lacc;
2493 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2494 continue;
2495 lacc = lacc->group_representative;
2496 if (propagate_subaccesses_across_link (lacc, racc)
2497 && lacc->first_link)
2498 add_access_to_work_queue (lacc);
2503 /* Go through all accesses collected throughout the (intraprocedural) analysis
2504 stage, exclude overlapping ones, identify representatives and build trees
2505 out of them, making decisions about scalarization on the way. Return true
2506 iff there are any to-be-scalarized variables after this stage. */
2508 static bool
2509 analyze_all_variable_accesses (void)
2511 int res = 0;
2512 bitmap tmp = BITMAP_ALLOC (NULL);
2513 bitmap_iterator bi;
2514 unsigned i, max_total_scalarization_size;
2516 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2517 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2519 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2520 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2521 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2523 tree var = candidate (i);
2525 if (TREE_CODE (var) == VAR_DECL
2526 && type_consists_of_records_p (TREE_TYPE (var)))
2528 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2529 <= max_total_scalarization_size)
2531 completely_scalarize_var (var);
2532 if (dump_file && (dump_flags & TDF_DETAILS))
2534 fprintf (dump_file, "Will attempt to totally scalarize ");
2535 print_generic_expr (dump_file, var, 0);
2536 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2539 else if (dump_file && (dump_flags & TDF_DETAILS))
2541 fprintf (dump_file, "Too big to totally scalarize: ");
2542 print_generic_expr (dump_file, var, 0);
2543 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2548 bitmap_copy (tmp, candidate_bitmap);
2549 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2551 tree var = candidate (i);
2552 struct access *access;
2554 access = sort_and_splice_var_accesses (var);
2555 if (!access || !build_access_trees (access))
2556 disqualify_candidate (var,
2557 "No or inhibitingly overlapping accesses.");
2560 propagate_all_subaccesses ();
2562 bitmap_copy (tmp, candidate_bitmap);
2563 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2565 tree var = candidate (i);
2566 struct access *access = get_first_repr_for_decl (var);
2568 if (analyze_access_trees (access))
2570 res++;
2571 if (dump_file && (dump_flags & TDF_DETAILS))
2573 fprintf (dump_file, "\nAccess trees for ");
2574 print_generic_expr (dump_file, var, 0);
2575 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2576 dump_access_tree (dump_file, access);
2577 fprintf (dump_file, "\n");
2580 else
2581 disqualify_candidate (var, "No scalar replacements to be created.");
2584 BITMAP_FREE (tmp);
2586 if (res)
2588 statistics_counter_event (cfun, "Scalarized aggregates", res);
2589 return true;
2591 else
2592 return false;
2595 /* Generate statements copying scalar replacements of accesses within a subtree
2596 into or out of AGG. ACCESS, all its children, siblings and their children
2597 are to be processed. AGG is an aggregate type expression (can be a
2598 declaration but does not have to be, it can for example also be a mem_ref or
2599 a series of handled components). TOP_OFFSET is the offset of the processed
2600 subtree which has to be subtracted from offsets of individual accesses to
2601 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2602 replacements in the interval <start_offset, start_offset + chunk_size>,
2603 otherwise copy all. GSI is a statement iterator used to place the new
2604 statements. WRITE should be true when the statements should write from AGG
2605 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2606 statements will be added after the current statement in GSI, they will be
2607 added before the statement otherwise. */
2609 static void
2610 generate_subtree_copies (struct access *access, tree agg,
2611 HOST_WIDE_INT top_offset,
2612 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2613 gimple_stmt_iterator *gsi, bool write,
2614 bool insert_after, location_t loc)
2618 if (chunk_size && access->offset >= start_offset + chunk_size)
2619 return;
2621 if (access->grp_to_be_replaced
2622 && (chunk_size == 0
2623 || access->offset + access->size > start_offset))
2625 tree expr, repl = get_access_replacement (access);
2626 gimple stmt;
2628 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2629 access, gsi, insert_after);
2631 if (write)
2633 if (access->grp_partial_lhs)
2634 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2635 !insert_after,
2636 insert_after ? GSI_NEW_STMT
2637 : GSI_SAME_STMT);
2638 stmt = gimple_build_assign (repl, expr);
2640 else
2642 TREE_NO_WARNING (repl) = 1;
2643 if (access->grp_partial_lhs)
2644 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2645 !insert_after,
2646 insert_after ? GSI_NEW_STMT
2647 : GSI_SAME_STMT);
2648 stmt = gimple_build_assign (expr, repl);
2650 gimple_set_location (stmt, loc);
2652 if (insert_after)
2653 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2654 else
2655 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2656 update_stmt (stmt);
2657 sra_stats.subtree_copies++;
2659 else if (write
2660 && access->grp_to_be_debug_replaced
2661 && (chunk_size == 0
2662 || access->offset + access->size > start_offset))
2664 gimple ds;
2665 tree drhs = build_debug_ref_for_model (loc, agg,
2666 access->offset - top_offset,
2667 access);
2668 ds = gimple_build_debug_bind (get_access_replacement (access),
2669 drhs, gsi_stmt (*gsi));
2670 if (insert_after)
2671 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2672 else
2673 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2676 if (access->first_child)
2677 generate_subtree_copies (access->first_child, agg, top_offset,
2678 start_offset, chunk_size, gsi,
2679 write, insert_after, loc);
2681 access = access->next_sibling;
2683 while (access);
2686 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2687 the root of the subtree to be processed. GSI is the statement iterator used
2688 for inserting statements which are added after the current statement if
2689 INSERT_AFTER is true or before it otherwise. */
2691 static void
2692 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2693 bool insert_after, location_t loc)
2696 struct access *child;
2698 if (access->grp_to_be_replaced)
2700 gimple stmt;
2702 stmt = gimple_build_assign (get_access_replacement (access),
2703 build_zero_cst (access->type));
2704 if (insert_after)
2705 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2706 else
2707 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2708 update_stmt (stmt);
2709 gimple_set_location (stmt, loc);
2711 else if (access->grp_to_be_debug_replaced)
2713 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2714 build_zero_cst (access->type),
2715 gsi_stmt (*gsi));
2716 if (insert_after)
2717 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2718 else
2719 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2722 for (child = access->first_child; child; child = child->next_sibling)
2723 init_subtree_with_zero (child, gsi, insert_after, loc);
2726 /* Search for an access representative for the given expression EXPR and
2727 return it or NULL if it cannot be found. */
2729 static struct access *
2730 get_access_for_expr (tree expr)
2732 HOST_WIDE_INT offset, size, max_size;
2733 tree base;
2735 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2736 a different size than the size of its argument and we need the latter
2737 one. */
2738 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2739 expr = TREE_OPERAND (expr, 0);
2741 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2742 if (max_size == -1 || !DECL_P (base))
2743 return NULL;
2745 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2746 return NULL;
2748 return get_var_base_offset_size_access (base, offset, max_size);
2751 /* Replace the expression EXPR with a scalar replacement if there is one and
2752 generate other statements to do type conversion or subtree copying if
2753 necessary. GSI is used to place newly created statements, WRITE is true if
2754 the expression is being written to (it is on a LHS of a statement or output
2755 in an assembly statement). */
2757 static bool
2758 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2760 location_t loc;
2761 struct access *access;
2762 tree type, bfr, orig_expr;
2764 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2766 bfr = *expr;
2767 expr = &TREE_OPERAND (*expr, 0);
2769 else
2770 bfr = NULL_TREE;
2772 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2773 expr = &TREE_OPERAND (*expr, 0);
2774 access = get_access_for_expr (*expr);
2775 if (!access)
2776 return false;
2777 type = TREE_TYPE (*expr);
2778 orig_expr = *expr;
2780 loc = gimple_location (gsi_stmt (*gsi));
2781 gimple_stmt_iterator alt_gsi = gsi_none ();
2782 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2784 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2785 gsi = &alt_gsi;
2788 if (access->grp_to_be_replaced)
2790 tree repl = get_access_replacement (access);
2791 /* If we replace a non-register typed access simply use the original
2792 access expression to extract the scalar component afterwards.
2793 This happens if scalarizing a function return value or parameter
2794 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2795 gcc.c-torture/compile/20011217-1.c.
2797 We also want to use this when accessing a complex or vector which can
2798 be accessed as a different type too, potentially creating a need for
2799 type conversion (see PR42196) and when scalarized unions are involved
2800 in assembler statements (see PR42398). */
2801 if (!useless_type_conversion_p (type, access->type))
2803 tree ref;
2805 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2807 if (write)
2809 gimple stmt;
2811 if (access->grp_partial_lhs)
2812 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2813 false, GSI_NEW_STMT);
2814 stmt = gimple_build_assign (repl, ref);
2815 gimple_set_location (stmt, loc);
2816 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2818 else
2820 gimple stmt;
2822 if (access->grp_partial_lhs)
2823 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2824 true, GSI_SAME_STMT);
2825 stmt = gimple_build_assign (ref, repl);
2826 gimple_set_location (stmt, loc);
2827 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2830 else
2831 *expr = repl;
2832 sra_stats.exprs++;
2834 else if (write && access->grp_to_be_debug_replaced)
2836 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2837 NULL_TREE,
2838 gsi_stmt (*gsi));
2839 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2842 if (access->first_child)
2844 HOST_WIDE_INT start_offset, chunk_size;
2845 if (bfr
2846 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
2847 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
2849 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
2850 start_offset = access->offset
2851 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
2853 else
2854 start_offset = chunk_size = 0;
2856 generate_subtree_copies (access->first_child, orig_expr, access->offset,
2857 start_offset, chunk_size, gsi, write, write,
2858 loc);
2860 return true;
2863 /* Where scalar replacements of the RHS have been written to when a replacement
2864 of a LHS of an assigments cannot be direclty loaded from a replacement of
2865 the RHS. */
2866 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2867 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2868 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2870 struct subreplacement_assignment_data
2872 /* Offset of the access representing the lhs of the assignment. */
2873 HOST_WIDE_INT left_offset;
2875 /* LHS and RHS of the original assignment. */
2876 tree assignment_lhs, assignment_rhs;
2878 /* Access representing the rhs of the whole assignment. */
2879 struct access *top_racc;
2881 /* Stmt iterator used for statement insertions after the original assignment.
2882 It points to the main GSI used to traverse a BB during function body
2883 modification. */
2884 gimple_stmt_iterator *new_gsi;
2886 /* Stmt iterator used for statement insertions before the original
2887 assignment. Keeps on pointing to the original statement. */
2888 gimple_stmt_iterator old_gsi;
2890 /* Location of the assignment. */
2891 location_t loc;
2893 /* Keeps the information whether we have needed to refresh replacements of
2894 the LHS and from which side of the assignments this takes place. */
2895 enum unscalarized_data_handling refreshed;
2898 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2899 base aggregate if there are unscalarized data or directly to LHS of the
2900 statement that is pointed to by GSI otherwise. */
2902 static void
2903 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
2905 tree src;
2906 if (sad->top_racc->grp_unscalarized_data)
2908 src = sad->assignment_rhs;
2909 sad->refreshed = SRA_UDH_RIGHT;
2911 else
2913 src = sad->assignment_lhs;
2914 sad->refreshed = SRA_UDH_LEFT;
2916 generate_subtree_copies (sad->top_racc->first_child, src,
2917 sad->top_racc->offset, 0, 0,
2918 &sad->old_gsi, false, false, sad->loc);
2921 /* Try to generate statements to load all sub-replacements in an access subtree
2922 formed by children of LACC from scalar replacements in the SAD->top_racc
2923 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
2924 and load the accesses from it. */
2926 static void
2927 load_assign_lhs_subreplacements (struct access *lacc,
2928 struct subreplacement_assignment_data *sad)
2930 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2932 HOST_WIDE_INT offset;
2933 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
2935 if (lacc->grp_to_be_replaced)
2937 struct access *racc;
2938 gimple stmt;
2939 tree rhs;
2941 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
2942 if (racc && racc->grp_to_be_replaced)
2944 rhs = get_access_replacement (racc);
2945 if (!useless_type_conversion_p (lacc->type, racc->type))
2946 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
2947 lacc->type, rhs);
2949 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2950 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
2951 NULL_TREE, true, GSI_SAME_STMT);
2953 else
2955 /* No suitable access on the right hand side, need to load from
2956 the aggregate. See if we have to update it first... */
2957 if (sad->refreshed == SRA_UDH_NONE)
2958 handle_unscalarized_data_in_subtree (sad);
2960 if (sad->refreshed == SRA_UDH_LEFT)
2961 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
2962 lacc->offset - sad->left_offset,
2963 lacc, sad->new_gsi, true);
2964 else
2965 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
2966 lacc->offset - sad->left_offset,
2967 lacc, sad->new_gsi, true);
2968 if (lacc->grp_partial_lhs)
2969 rhs = force_gimple_operand_gsi (sad->new_gsi,
2970 rhs, true, NULL_TREE,
2971 false, GSI_NEW_STMT);
2974 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2975 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
2976 gimple_set_location (stmt, sad->loc);
2977 update_stmt (stmt);
2978 sra_stats.subreplacements++;
2980 else
2982 if (sad->refreshed == SRA_UDH_NONE
2983 && lacc->grp_read && !lacc->grp_covered)
2984 handle_unscalarized_data_in_subtree (sad);
2986 if (lacc && lacc->grp_to_be_debug_replaced)
2988 gimple ds;
2989 tree drhs;
2990 struct access *racc = find_access_in_subtree (sad->top_racc,
2991 offset,
2992 lacc->size);
2994 if (racc && racc->grp_to_be_replaced)
2996 if (racc->grp_write)
2997 drhs = get_access_replacement (racc);
2998 else
2999 drhs = NULL;
3001 else if (sad->refreshed == SRA_UDH_LEFT)
3002 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3003 lacc->offset, lacc);
3004 else if (sad->refreshed == SRA_UDH_RIGHT)
3005 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3006 offset, lacc);
3007 else
3008 drhs = NULL_TREE;
3009 if (drhs
3010 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3011 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3012 lacc->type, drhs);
3013 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3014 drhs, gsi_stmt (sad->old_gsi));
3015 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3019 if (lacc->first_child)
3020 load_assign_lhs_subreplacements (lacc, sad);
3024 /* Result code for SRA assignment modification. */
3025 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3026 SRA_AM_MODIFIED, /* stmt changed but not
3027 removed */
3028 SRA_AM_REMOVED }; /* stmt eliminated */
3030 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3031 to the assignment and GSI is the statement iterator pointing at it. Returns
3032 the same values as sra_modify_assign. */
3034 static enum assignment_mod_result
3035 sra_modify_constructor_assign (gimple stmt, gimple_stmt_iterator *gsi)
3037 tree lhs = gimple_assign_lhs (stmt);
3038 struct access *acc;
3039 location_t loc;
3041 acc = get_access_for_expr (lhs);
3042 if (!acc)
3043 return SRA_AM_NONE;
3045 if (gimple_clobber_p (stmt))
3047 /* Remove clobbers of fully scalarized variables, otherwise
3048 do nothing. */
3049 if (acc->grp_covered)
3051 unlink_stmt_vdef (stmt);
3052 gsi_remove (gsi, true);
3053 release_defs (stmt);
3054 return SRA_AM_REMOVED;
3056 else
3057 return SRA_AM_NONE;
3060 loc = gimple_location (stmt);
3061 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt))) > 0)
3063 /* I have never seen this code path trigger but if it can happen the
3064 following should handle it gracefully. */
3065 if (access_has_children_p (acc))
3066 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3067 true, true, loc);
3068 return SRA_AM_MODIFIED;
3071 if (acc->grp_covered)
3073 init_subtree_with_zero (acc, gsi, false, loc);
3074 unlink_stmt_vdef (stmt);
3075 gsi_remove (gsi, true);
3076 release_defs (stmt);
3077 return SRA_AM_REMOVED;
3079 else
3081 init_subtree_with_zero (acc, gsi, true, loc);
3082 return SRA_AM_MODIFIED;
3086 /* Create and return a new suitable default definition SSA_NAME for RACC which
3087 is an access describing an uninitialized part of an aggregate that is being
3088 loaded. */
3090 static tree
3091 get_repl_default_def_ssa_name (struct access *racc)
3093 gcc_checking_assert (!racc->grp_to_be_replaced
3094 && !racc->grp_to_be_debug_replaced);
3095 if (!racc->replacement_decl)
3096 racc->replacement_decl = create_access_replacement (racc);
3097 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3100 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3101 bit-field field declaration somewhere in it. */
3103 static inline bool
3104 contains_vce_or_bfcref_p (const_tree ref)
3106 while (handled_component_p (ref))
3108 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3109 || (TREE_CODE (ref) == COMPONENT_REF
3110 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3111 return true;
3112 ref = TREE_OPERAND (ref, 0);
3115 return false;
3118 /* Examine both sides of the assignment statement pointed to by STMT, replace
3119 them with a scalare replacement if there is one and generate copying of
3120 replacements if scalarized aggregates have been used in the assignment. GSI
3121 is used to hold generated statements for type conversions and subtree
3122 copying. */
3124 static enum assignment_mod_result
3125 sra_modify_assign (gimple stmt, gimple_stmt_iterator *gsi)
3127 struct access *lacc, *racc;
3128 tree lhs, rhs;
3129 bool modify_this_stmt = false;
3130 bool force_gimple_rhs = false;
3131 location_t loc;
3132 gimple_stmt_iterator orig_gsi = *gsi;
3134 if (!gimple_assign_single_p (stmt))
3135 return SRA_AM_NONE;
3136 lhs = gimple_assign_lhs (stmt);
3137 rhs = gimple_assign_rhs1 (stmt);
3139 if (TREE_CODE (rhs) == CONSTRUCTOR)
3140 return sra_modify_constructor_assign (stmt, gsi);
3142 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3143 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3144 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3146 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3147 gsi, false);
3148 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3149 gsi, true);
3150 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3153 lacc = get_access_for_expr (lhs);
3154 racc = get_access_for_expr (rhs);
3155 if (!lacc && !racc)
3156 return SRA_AM_NONE;
3158 loc = gimple_location (stmt);
3159 if (lacc && lacc->grp_to_be_replaced)
3161 lhs = get_access_replacement (lacc);
3162 gimple_assign_set_lhs (stmt, lhs);
3163 modify_this_stmt = true;
3164 if (lacc->grp_partial_lhs)
3165 force_gimple_rhs = true;
3166 sra_stats.exprs++;
3169 if (racc && racc->grp_to_be_replaced)
3171 rhs = get_access_replacement (racc);
3172 modify_this_stmt = true;
3173 if (racc->grp_partial_lhs)
3174 force_gimple_rhs = true;
3175 sra_stats.exprs++;
3177 else if (racc
3178 && !racc->grp_unscalarized_data
3179 && TREE_CODE (lhs) == SSA_NAME
3180 && !access_has_replacements_p (racc))
3182 rhs = get_repl_default_def_ssa_name (racc);
3183 modify_this_stmt = true;
3184 sra_stats.exprs++;
3187 if (modify_this_stmt)
3189 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3191 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3192 ??? This should move to fold_stmt which we simply should
3193 call after building a VIEW_CONVERT_EXPR here. */
3194 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3195 && !contains_bitfld_component_ref_p (lhs))
3197 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3198 gimple_assign_set_lhs (stmt, lhs);
3200 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3201 && !contains_vce_or_bfcref_p (rhs))
3202 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3204 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3206 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3207 rhs);
3208 if (is_gimple_reg_type (TREE_TYPE (lhs))
3209 && TREE_CODE (lhs) != SSA_NAME)
3210 force_gimple_rhs = true;
3215 if (lacc && lacc->grp_to_be_debug_replaced)
3217 tree dlhs = get_access_replacement (lacc);
3218 tree drhs = unshare_expr (rhs);
3219 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3221 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3222 && !contains_vce_or_bfcref_p (drhs))
3223 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3224 if (drhs
3225 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3226 TREE_TYPE (drhs)))
3227 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3228 TREE_TYPE (dlhs), drhs);
3230 gimple ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3231 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3234 /* From this point on, the function deals with assignments in between
3235 aggregates when at least one has scalar reductions of some of its
3236 components. There are three possible scenarios: Both the LHS and RHS have
3237 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3239 In the first case, we would like to load the LHS components from RHS
3240 components whenever possible. If that is not possible, we would like to
3241 read it directly from the RHS (after updating it by storing in it its own
3242 components). If there are some necessary unscalarized data in the LHS,
3243 those will be loaded by the original assignment too. If neither of these
3244 cases happen, the original statement can be removed. Most of this is done
3245 by load_assign_lhs_subreplacements.
3247 In the second case, we would like to store all RHS scalarized components
3248 directly into LHS and if they cover the aggregate completely, remove the
3249 statement too. In the third case, we want the LHS components to be loaded
3250 directly from the RHS (DSE will remove the original statement if it
3251 becomes redundant).
3253 This is a bit complex but manageable when types match and when unions do
3254 not cause confusion in a way that we cannot really load a component of LHS
3255 from the RHS or vice versa (the access representing this level can have
3256 subaccesses that are accessible only through a different union field at a
3257 higher level - different from the one used in the examined expression).
3258 Unions are fun.
3260 Therefore, I specially handle a fourth case, happening when there is a
3261 specific type cast or it is impossible to locate a scalarized subaccess on
3262 the other side of the expression. If that happens, I simply "refresh" the
3263 RHS by storing in it is scalarized components leave the original statement
3264 there to do the copying and then load the scalar replacements of the LHS.
3265 This is what the first branch does. */
3267 if (modify_this_stmt
3268 || gimple_has_volatile_ops (stmt)
3269 || contains_vce_or_bfcref_p (rhs)
3270 || contains_vce_or_bfcref_p (lhs)
3271 || stmt_ends_bb_p (stmt))
3273 if (access_has_children_p (racc))
3274 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3275 gsi, false, false, loc);
3276 if (access_has_children_p (lacc))
3278 gimple_stmt_iterator alt_gsi = gsi_none ();
3279 if (stmt_ends_bb_p (stmt))
3281 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3282 gsi = &alt_gsi;
3284 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3285 gsi, true, true, loc);
3287 sra_stats.separate_lhs_rhs_handling++;
3289 /* This gimplification must be done after generate_subtree_copies,
3290 lest we insert the subtree copies in the middle of the gimplified
3291 sequence. */
3292 if (force_gimple_rhs)
3293 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3294 true, GSI_SAME_STMT);
3295 if (gimple_assign_rhs1 (stmt) != rhs)
3297 modify_this_stmt = true;
3298 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3299 gcc_assert (stmt == gsi_stmt (orig_gsi));
3302 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3304 else
3306 if (access_has_children_p (lacc)
3307 && access_has_children_p (racc)
3308 /* When an access represents an unscalarizable region, it usually
3309 represents accesses with variable offset and thus must not be used
3310 to generate new memory accesses. */
3311 && !lacc->grp_unscalarizable_region
3312 && !racc->grp_unscalarizable_region)
3314 struct subreplacement_assignment_data sad;
3316 sad.left_offset = lacc->offset;
3317 sad.assignment_lhs = lhs;
3318 sad.assignment_rhs = rhs;
3319 sad.top_racc = racc;
3320 sad.old_gsi = *gsi;
3321 sad.new_gsi = gsi;
3322 sad.loc = gimple_location (stmt);
3323 sad.refreshed = SRA_UDH_NONE;
3325 if (lacc->grp_read && !lacc->grp_covered)
3326 handle_unscalarized_data_in_subtree (&sad);
3328 load_assign_lhs_subreplacements (lacc, &sad);
3329 if (sad.refreshed != SRA_UDH_RIGHT)
3331 gsi_next (gsi);
3332 unlink_stmt_vdef (stmt);
3333 gsi_remove (&sad.old_gsi, true);
3334 release_defs (stmt);
3335 sra_stats.deleted++;
3336 return SRA_AM_REMOVED;
3339 else
3341 if (access_has_children_p (racc)
3342 && !racc->grp_unscalarized_data)
3344 if (dump_file)
3346 fprintf (dump_file, "Removing load: ");
3347 print_gimple_stmt (dump_file, stmt, 0, 0);
3349 generate_subtree_copies (racc->first_child, lhs,
3350 racc->offset, 0, 0, gsi,
3351 false, false, loc);
3352 gcc_assert (stmt == gsi_stmt (*gsi));
3353 unlink_stmt_vdef (stmt);
3354 gsi_remove (gsi, true);
3355 release_defs (stmt);
3356 sra_stats.deleted++;
3357 return SRA_AM_REMOVED;
3359 /* Restore the aggregate RHS from its components so the
3360 prevailing aggregate copy does the right thing. */
3361 if (access_has_children_p (racc))
3362 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3363 gsi, false, false, loc);
3364 /* Re-load the components of the aggregate copy destination.
3365 But use the RHS aggregate to load from to expose more
3366 optimization opportunities. */
3367 if (access_has_children_p (lacc))
3368 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3369 0, 0, gsi, true, true, loc);
3372 return SRA_AM_NONE;
3376 /* Traverse the function body and all modifications as decided in
3377 analyze_all_variable_accesses. Return true iff the CFG has been
3378 changed. */
3380 static bool
3381 sra_modify_function_body (void)
3383 bool cfg_changed = false;
3384 basic_block bb;
3386 FOR_EACH_BB_FN (bb, cfun)
3388 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3389 while (!gsi_end_p (gsi))
3391 gimple stmt = gsi_stmt (gsi);
3392 enum assignment_mod_result assign_result;
3393 bool modified = false, deleted = false;
3394 tree *t;
3395 unsigned i;
3397 switch (gimple_code (stmt))
3399 case GIMPLE_RETURN:
3400 t = gimple_return_retval_ptr (stmt);
3401 if (*t != NULL_TREE)
3402 modified |= sra_modify_expr (t, &gsi, false);
3403 break;
3405 case GIMPLE_ASSIGN:
3406 assign_result = sra_modify_assign (stmt, &gsi);
3407 modified |= assign_result == SRA_AM_MODIFIED;
3408 deleted = assign_result == SRA_AM_REMOVED;
3409 break;
3411 case GIMPLE_CALL:
3412 /* Operands must be processed before the lhs. */
3413 for (i = 0; i < gimple_call_num_args (stmt); i++)
3415 t = gimple_call_arg_ptr (stmt, i);
3416 modified |= sra_modify_expr (t, &gsi, false);
3419 if (gimple_call_lhs (stmt))
3421 t = gimple_call_lhs_ptr (stmt);
3422 modified |= sra_modify_expr (t, &gsi, true);
3424 break;
3426 case GIMPLE_ASM:
3427 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3429 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3430 modified |= sra_modify_expr (t, &gsi, false);
3432 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3434 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3435 modified |= sra_modify_expr (t, &gsi, true);
3437 break;
3439 default:
3440 break;
3443 if (modified)
3445 update_stmt (stmt);
3446 if (maybe_clean_eh_stmt (stmt)
3447 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3448 cfg_changed = true;
3450 if (!deleted)
3451 gsi_next (&gsi);
3455 gsi_commit_edge_inserts ();
3456 return cfg_changed;
3459 /* Generate statements initializing scalar replacements of parts of function
3460 parameters. */
3462 static void
3463 initialize_parameter_reductions (void)
3465 gimple_stmt_iterator gsi;
3466 gimple_seq seq = NULL;
3467 tree parm;
3469 gsi = gsi_start (seq);
3470 for (parm = DECL_ARGUMENTS (current_function_decl);
3471 parm;
3472 parm = DECL_CHAIN (parm))
3474 vec<access_p> *access_vec;
3475 struct access *access;
3477 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3478 continue;
3479 access_vec = get_base_access_vector (parm);
3480 if (!access_vec)
3481 continue;
3483 for (access = (*access_vec)[0];
3484 access;
3485 access = access->next_grp)
3486 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3487 EXPR_LOCATION (parm));
3490 seq = gsi_seq (gsi);
3491 if (seq)
3492 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3495 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3496 it reveals there are components of some aggregates to be scalarized, it runs
3497 the required transformations. */
3498 static unsigned int
3499 perform_intra_sra (void)
3501 int ret = 0;
3502 sra_initialize ();
3504 if (!find_var_candidates ())
3505 goto out;
3507 if (!scan_function ())
3508 goto out;
3510 if (!analyze_all_variable_accesses ())
3511 goto out;
3513 if (sra_modify_function_body ())
3514 ret = TODO_update_ssa | TODO_cleanup_cfg;
3515 else
3516 ret = TODO_update_ssa;
3517 initialize_parameter_reductions ();
3519 statistics_counter_event (cfun, "Scalar replacements created",
3520 sra_stats.replacements);
3521 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3522 statistics_counter_event (cfun, "Subtree copy stmts",
3523 sra_stats.subtree_copies);
3524 statistics_counter_event (cfun, "Subreplacement stmts",
3525 sra_stats.subreplacements);
3526 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3527 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3528 sra_stats.separate_lhs_rhs_handling);
3530 out:
3531 sra_deinitialize ();
3532 return ret;
3535 /* Perform early intraprocedural SRA. */
3536 static unsigned int
3537 early_intra_sra (void)
3539 sra_mode = SRA_MODE_EARLY_INTRA;
3540 return perform_intra_sra ();
3543 /* Perform "late" intraprocedural SRA. */
3544 static unsigned int
3545 late_intra_sra (void)
3547 sra_mode = SRA_MODE_INTRA;
3548 return perform_intra_sra ();
3552 static bool
3553 gate_intra_sra (void)
3555 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3559 namespace {
3561 const pass_data pass_data_sra_early =
3563 GIMPLE_PASS, /* type */
3564 "esra", /* name */
3565 OPTGROUP_NONE, /* optinfo_flags */
3566 TV_TREE_SRA, /* tv_id */
3567 ( PROP_cfg | PROP_ssa ), /* properties_required */
3568 0, /* properties_provided */
3569 0, /* properties_destroyed */
3570 0, /* todo_flags_start */
3571 TODO_update_ssa, /* todo_flags_finish */
3574 class pass_sra_early : public gimple_opt_pass
3576 public:
3577 pass_sra_early (gcc::context *ctxt)
3578 : gimple_opt_pass (pass_data_sra_early, ctxt)
3581 /* opt_pass methods: */
3582 virtual bool gate (function *) { return gate_intra_sra (); }
3583 virtual unsigned int execute (function *) { return early_intra_sra (); }
3585 }; // class pass_sra_early
3587 } // anon namespace
3589 gimple_opt_pass *
3590 make_pass_sra_early (gcc::context *ctxt)
3592 return new pass_sra_early (ctxt);
3595 namespace {
3597 const pass_data pass_data_sra =
3599 GIMPLE_PASS, /* type */
3600 "sra", /* name */
3601 OPTGROUP_NONE, /* optinfo_flags */
3602 TV_TREE_SRA, /* tv_id */
3603 ( PROP_cfg | PROP_ssa ), /* properties_required */
3604 0, /* properties_provided */
3605 0, /* properties_destroyed */
3606 TODO_update_address_taken, /* todo_flags_start */
3607 TODO_update_ssa, /* todo_flags_finish */
3610 class pass_sra : public gimple_opt_pass
3612 public:
3613 pass_sra (gcc::context *ctxt)
3614 : gimple_opt_pass (pass_data_sra, ctxt)
3617 /* opt_pass methods: */
3618 virtual bool gate (function *) { return gate_intra_sra (); }
3619 virtual unsigned int execute (function *) { return late_intra_sra (); }
3621 }; // class pass_sra
3623 } // anon namespace
3625 gimple_opt_pass *
3626 make_pass_sra (gcc::context *ctxt)
3628 return new pass_sra (ctxt);
3632 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3633 parameter. */
3635 static bool
3636 is_unused_scalar_param (tree parm)
3638 tree name;
3639 return (is_gimple_reg (parm)
3640 && (!(name = ssa_default_def (cfun, parm))
3641 || has_zero_uses (name)));
3644 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3645 examine whether there are any direct or otherwise infeasible ones. If so,
3646 return true, otherwise return false. PARM must be a gimple register with a
3647 non-NULL default definition. */
3649 static bool
3650 ptr_parm_has_direct_uses (tree parm)
3652 imm_use_iterator ui;
3653 gimple stmt;
3654 tree name = ssa_default_def (cfun, parm);
3655 bool ret = false;
3657 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3659 int uses_ok = 0;
3660 use_operand_p use_p;
3662 if (is_gimple_debug (stmt))
3663 continue;
3665 /* Valid uses include dereferences on the lhs and the rhs. */
3666 if (gimple_has_lhs (stmt))
3668 tree lhs = gimple_get_lhs (stmt);
3669 while (handled_component_p (lhs))
3670 lhs = TREE_OPERAND (lhs, 0);
3671 if (TREE_CODE (lhs) == MEM_REF
3672 && TREE_OPERAND (lhs, 0) == name
3673 && integer_zerop (TREE_OPERAND (lhs, 1))
3674 && types_compatible_p (TREE_TYPE (lhs),
3675 TREE_TYPE (TREE_TYPE (name)))
3676 && !TREE_THIS_VOLATILE (lhs))
3677 uses_ok++;
3679 if (gimple_assign_single_p (stmt))
3681 tree rhs = gimple_assign_rhs1 (stmt);
3682 while (handled_component_p (rhs))
3683 rhs = TREE_OPERAND (rhs, 0);
3684 if (TREE_CODE (rhs) == MEM_REF
3685 && TREE_OPERAND (rhs, 0) == name
3686 && integer_zerop (TREE_OPERAND (rhs, 1))
3687 && types_compatible_p (TREE_TYPE (rhs),
3688 TREE_TYPE (TREE_TYPE (name)))
3689 && !TREE_THIS_VOLATILE (rhs))
3690 uses_ok++;
3692 else if (is_gimple_call (stmt))
3694 unsigned i;
3695 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3697 tree arg = gimple_call_arg (stmt, i);
3698 while (handled_component_p (arg))
3699 arg = TREE_OPERAND (arg, 0);
3700 if (TREE_CODE (arg) == MEM_REF
3701 && TREE_OPERAND (arg, 0) == name
3702 && integer_zerop (TREE_OPERAND (arg, 1))
3703 && types_compatible_p (TREE_TYPE (arg),
3704 TREE_TYPE (TREE_TYPE (name)))
3705 && !TREE_THIS_VOLATILE (arg))
3706 uses_ok++;
3710 /* If the number of valid uses does not match the number of
3711 uses in this stmt there is an unhandled use. */
3712 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3713 --uses_ok;
3715 if (uses_ok != 0)
3716 ret = true;
3718 if (ret)
3719 BREAK_FROM_IMM_USE_STMT (ui);
3722 return ret;
3725 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3726 them in candidate_bitmap. Note that these do not necessarily include
3727 parameter which are unused and thus can be removed. Return true iff any
3728 such candidate has been found. */
3730 static bool
3731 find_param_candidates (void)
3733 tree parm;
3734 int count = 0;
3735 bool ret = false;
3736 const char *msg;
3738 for (parm = DECL_ARGUMENTS (current_function_decl);
3739 parm;
3740 parm = DECL_CHAIN (parm))
3742 tree type = TREE_TYPE (parm);
3743 tree_node **slot;
3745 count++;
3747 if (TREE_THIS_VOLATILE (parm)
3748 || TREE_ADDRESSABLE (parm)
3749 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3750 continue;
3752 if (is_unused_scalar_param (parm))
3754 ret = true;
3755 continue;
3758 if (POINTER_TYPE_P (type))
3760 type = TREE_TYPE (type);
3762 if (TREE_CODE (type) == FUNCTION_TYPE
3763 || TYPE_VOLATILE (type)
3764 || (TREE_CODE (type) == ARRAY_TYPE
3765 && TYPE_NONALIASED_COMPONENT (type))
3766 || !is_gimple_reg (parm)
3767 || is_va_list_type (type)
3768 || ptr_parm_has_direct_uses (parm))
3769 continue;
3771 else if (!AGGREGATE_TYPE_P (type))
3772 continue;
3774 if (!COMPLETE_TYPE_P (type)
3775 || !tree_fits_uhwi_p (TYPE_SIZE (type))
3776 || tree_to_uhwi (TYPE_SIZE (type)) == 0
3777 || (AGGREGATE_TYPE_P (type)
3778 && type_internals_preclude_sra_p (type, &msg)))
3779 continue;
3781 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3782 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
3783 *slot = parm;
3785 ret = true;
3786 if (dump_file && (dump_flags & TDF_DETAILS))
3788 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3789 print_generic_expr (dump_file, parm, 0);
3790 fprintf (dump_file, "\n");
3794 func_param_count = count;
3795 return ret;
3798 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3799 maybe_modified. */
3801 static bool
3802 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3803 void *data)
3805 struct access *repr = (struct access *) data;
3807 repr->grp_maybe_modified = 1;
3808 return true;
3811 /* Analyze what representatives (in linked lists accessible from
3812 REPRESENTATIVES) can be modified by side effects of statements in the
3813 current function. */
3815 static void
3816 analyze_modified_params (vec<access_p> representatives)
3818 int i;
3820 for (i = 0; i < func_param_count; i++)
3822 struct access *repr;
3824 for (repr = representatives[i];
3825 repr;
3826 repr = repr->next_grp)
3828 struct access *access;
3829 bitmap visited;
3830 ao_ref ar;
3832 if (no_accesses_p (repr))
3833 continue;
3834 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3835 || repr->grp_maybe_modified)
3836 continue;
3838 ao_ref_init (&ar, repr->expr);
3839 visited = BITMAP_ALLOC (NULL);
3840 for (access = repr; access; access = access->next_sibling)
3842 /* All accesses are read ones, otherwise grp_maybe_modified would
3843 be trivially set. */
3844 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3845 mark_maybe_modified, repr, &visited);
3846 if (repr->grp_maybe_modified)
3847 break;
3849 BITMAP_FREE (visited);
3854 /* Propagate distances in bb_dereferences in the opposite direction than the
3855 control flow edges, in each step storing the maximum of the current value
3856 and the minimum of all successors. These steps are repeated until the table
3857 stabilizes. Note that BBs which might terminate the functions (according to
3858 final_bbs bitmap) never updated in this way. */
3860 static void
3861 propagate_dereference_distances (void)
3863 basic_block bb;
3865 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
3866 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3867 FOR_EACH_BB_FN (bb, cfun)
3869 queue.quick_push (bb);
3870 bb->aux = bb;
3873 while (!queue.is_empty ())
3875 edge_iterator ei;
3876 edge e;
3877 bool change = false;
3878 int i;
3880 bb = queue.pop ();
3881 bb->aux = NULL;
3883 if (bitmap_bit_p (final_bbs, bb->index))
3884 continue;
3886 for (i = 0; i < func_param_count; i++)
3888 int idx = bb->index * func_param_count + i;
3889 bool first = true;
3890 HOST_WIDE_INT inh = 0;
3892 FOR_EACH_EDGE (e, ei, bb->succs)
3894 int succ_idx = e->dest->index * func_param_count + i;
3896 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
3897 continue;
3899 if (first)
3901 first = false;
3902 inh = bb_dereferences [succ_idx];
3904 else if (bb_dereferences [succ_idx] < inh)
3905 inh = bb_dereferences [succ_idx];
3908 if (!first && bb_dereferences[idx] < inh)
3910 bb_dereferences[idx] = inh;
3911 change = true;
3915 if (change && !bitmap_bit_p (final_bbs, bb->index))
3916 FOR_EACH_EDGE (e, ei, bb->preds)
3918 if (e->src->aux)
3919 continue;
3921 e->src->aux = e->src;
3922 queue.quick_push (e->src);
3927 /* Dump a dereferences TABLE with heading STR to file F. */
3929 static void
3930 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3932 basic_block bb;
3934 fprintf (dump_file, str);
3935 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
3936 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3938 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3939 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3941 int i;
3942 for (i = 0; i < func_param_count; i++)
3944 int idx = bb->index * func_param_count + i;
3945 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3948 fprintf (f, "\n");
3950 fprintf (dump_file, "\n");
3953 /* Determine what (parts of) parameters passed by reference that are not
3954 assigned to are not certainly dereferenced in this function and thus the
3955 dereferencing cannot be safely moved to the caller without potentially
3956 introducing a segfault. Mark such REPRESENTATIVES as
3957 grp_not_necessarilly_dereferenced.
3959 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3960 part is calculated rather than simple booleans are calculated for each
3961 pointer parameter to handle cases when only a fraction of the whole
3962 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3963 an example).
3965 The maximum dereference distances for each pointer parameter and BB are
3966 already stored in bb_dereference. This routine simply propagates these
3967 values upwards by propagate_dereference_distances and then compares the
3968 distances of individual parameters in the ENTRY BB to the equivalent
3969 distances of each representative of a (fraction of a) parameter. */
3971 static void
3972 analyze_caller_dereference_legality (vec<access_p> representatives)
3974 int i;
3976 if (dump_file && (dump_flags & TDF_DETAILS))
3977 dump_dereferences_table (dump_file,
3978 "Dereference table before propagation:\n",
3979 bb_dereferences);
3981 propagate_dereference_distances ();
3983 if (dump_file && (dump_flags & TDF_DETAILS))
3984 dump_dereferences_table (dump_file,
3985 "Dereference table after propagation:\n",
3986 bb_dereferences);
3988 for (i = 0; i < func_param_count; i++)
3990 struct access *repr = representatives[i];
3991 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
3993 if (!repr || no_accesses_p (repr))
3994 continue;
3998 if ((repr->offset + repr->size) > bb_dereferences[idx])
3999 repr->grp_not_necessarilly_dereferenced = 1;
4000 repr = repr->next_grp;
4002 while (repr);
4006 /* Return the representative access for the parameter declaration PARM if it is
4007 a scalar passed by reference which is not written to and the pointer value
4008 is not used directly. Thus, if it is legal to dereference it in the caller
4009 and we can rule out modifications through aliases, such parameter should be
4010 turned into one passed by value. Return NULL otherwise. */
4012 static struct access *
4013 unmodified_by_ref_scalar_representative (tree parm)
4015 int i, access_count;
4016 struct access *repr;
4017 vec<access_p> *access_vec;
4019 access_vec = get_base_access_vector (parm);
4020 gcc_assert (access_vec);
4021 repr = (*access_vec)[0];
4022 if (repr->write)
4023 return NULL;
4024 repr->group_representative = repr;
4026 access_count = access_vec->length ();
4027 for (i = 1; i < access_count; i++)
4029 struct access *access = (*access_vec)[i];
4030 if (access->write)
4031 return NULL;
4032 access->group_representative = repr;
4033 access->next_sibling = repr->next_sibling;
4034 repr->next_sibling = access;
4037 repr->grp_read = 1;
4038 repr->grp_scalar_ptr = 1;
4039 return repr;
4042 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4043 associated with. REQ_ALIGN is the minimum required alignment. */
4045 static bool
4046 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4048 unsigned int exp_align;
4049 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4050 is incompatible assign in a call statement (and possibly even in asm
4051 statements). This can be relaxed by using a new temporary but only for
4052 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4053 intraprocedural SRA we deal with this by keeping the old aggregate around,
4054 something we cannot do in IPA-SRA.) */
4055 if (access->write
4056 && (is_gimple_call (access->stmt)
4057 || gimple_code (access->stmt) == GIMPLE_ASM))
4058 return true;
4060 exp_align = get_object_alignment (access->expr);
4061 if (exp_align < req_align)
4062 return true;
4064 return false;
4068 /* Sort collected accesses for parameter PARM, identify representatives for
4069 each accessed region and link them together. Return NULL if there are
4070 different but overlapping accesses, return the special ptr value meaning
4071 there are no accesses for this parameter if that is the case and return the
4072 first representative otherwise. Set *RO_GRP if there is a group of accesses
4073 with only read (i.e. no write) accesses. */
4075 static struct access *
4076 splice_param_accesses (tree parm, bool *ro_grp)
4078 int i, j, access_count, group_count;
4079 int agg_size, total_size = 0;
4080 struct access *access, *res, **prev_acc_ptr = &res;
4081 vec<access_p> *access_vec;
4083 access_vec = get_base_access_vector (parm);
4084 if (!access_vec)
4085 return &no_accesses_representant;
4086 access_count = access_vec->length ();
4088 access_vec->qsort (compare_access_positions);
4090 i = 0;
4091 total_size = 0;
4092 group_count = 0;
4093 while (i < access_count)
4095 bool modification;
4096 tree a1_alias_type;
4097 access = (*access_vec)[i];
4098 modification = access->write;
4099 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4100 return NULL;
4101 a1_alias_type = reference_alias_ptr_type (access->expr);
4103 /* Access is about to become group representative unless we find some
4104 nasty overlap which would preclude us from breaking this parameter
4105 apart. */
4107 j = i + 1;
4108 while (j < access_count)
4110 struct access *ac2 = (*access_vec)[j];
4111 if (ac2->offset != access->offset)
4113 /* All or nothing law for parameters. */
4114 if (access->offset + access->size > ac2->offset)
4115 return NULL;
4116 else
4117 break;
4119 else if (ac2->size != access->size)
4120 return NULL;
4122 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4123 || (ac2->type != access->type
4124 && (TREE_ADDRESSABLE (ac2->type)
4125 || TREE_ADDRESSABLE (access->type)))
4126 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4127 return NULL;
4129 modification |= ac2->write;
4130 ac2->group_representative = access;
4131 ac2->next_sibling = access->next_sibling;
4132 access->next_sibling = ac2;
4133 j++;
4136 group_count++;
4137 access->grp_maybe_modified = modification;
4138 if (!modification)
4139 *ro_grp = true;
4140 *prev_acc_ptr = access;
4141 prev_acc_ptr = &access->next_grp;
4142 total_size += access->size;
4143 i = j;
4146 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4147 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4148 else
4149 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4150 if (total_size >= agg_size)
4151 return NULL;
4153 gcc_assert (group_count > 0);
4154 return res;
4157 /* Decide whether parameters with representative accesses given by REPR should
4158 be reduced into components. */
4160 static int
4161 decide_one_param_reduction (struct access *repr)
4163 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4164 bool by_ref;
4165 tree parm;
4167 parm = repr->base;
4168 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4169 gcc_assert (cur_parm_size > 0);
4171 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4173 by_ref = true;
4174 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4176 else
4178 by_ref = false;
4179 agg_size = cur_parm_size;
4182 if (dump_file)
4184 struct access *acc;
4185 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4186 print_generic_expr (dump_file, parm, 0);
4187 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4188 for (acc = repr; acc; acc = acc->next_grp)
4189 dump_access (dump_file, acc, true);
4192 total_size = 0;
4193 new_param_count = 0;
4195 for (; repr; repr = repr->next_grp)
4197 gcc_assert (parm == repr->base);
4199 /* Taking the address of a non-addressable field is verboten. */
4200 if (by_ref && repr->non_addressable)
4201 return 0;
4203 /* Do not decompose a non-BLKmode param in a way that would
4204 create BLKmode params. Especially for by-reference passing
4205 (thus, pointer-type param) this is hardly worthwhile. */
4206 if (DECL_MODE (parm) != BLKmode
4207 && TYPE_MODE (repr->type) == BLKmode)
4208 return 0;
4210 if (!by_ref || (!repr->grp_maybe_modified
4211 && !repr->grp_not_necessarilly_dereferenced))
4212 total_size += repr->size;
4213 else
4214 total_size += cur_parm_size;
4216 new_param_count++;
4219 gcc_assert (new_param_count > 0);
4221 if (optimize_function_for_size_p (cfun))
4222 parm_size_limit = cur_parm_size;
4223 else
4224 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4225 * cur_parm_size);
4227 if (total_size < agg_size
4228 && total_size <= parm_size_limit)
4230 if (dump_file)
4231 fprintf (dump_file, " ....will be split into %i components\n",
4232 new_param_count);
4233 return new_param_count;
4235 else
4236 return 0;
4239 /* The order of the following enums is important, we need to do extra work for
4240 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4241 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4242 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4244 /* Identify representatives of all accesses to all candidate parameters for
4245 IPA-SRA. Return result based on what representatives have been found. */
4247 static enum ipa_splicing_result
4248 splice_all_param_accesses (vec<access_p> &representatives)
4250 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4251 tree parm;
4252 struct access *repr;
4254 representatives.create (func_param_count);
4256 for (parm = DECL_ARGUMENTS (current_function_decl);
4257 parm;
4258 parm = DECL_CHAIN (parm))
4260 if (is_unused_scalar_param (parm))
4262 representatives.quick_push (&no_accesses_representant);
4263 if (result == NO_GOOD_ACCESS)
4264 result = UNUSED_PARAMS;
4266 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4267 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4268 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4270 repr = unmodified_by_ref_scalar_representative (parm);
4271 representatives.quick_push (repr);
4272 if (repr)
4273 result = UNMODIF_BY_REF_ACCESSES;
4275 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4277 bool ro_grp = false;
4278 repr = splice_param_accesses (parm, &ro_grp);
4279 representatives.quick_push (repr);
4281 if (repr && !no_accesses_p (repr))
4283 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4285 if (ro_grp)
4286 result = UNMODIF_BY_REF_ACCESSES;
4287 else if (result < MODIF_BY_REF_ACCESSES)
4288 result = MODIF_BY_REF_ACCESSES;
4290 else if (result < BY_VAL_ACCESSES)
4291 result = BY_VAL_ACCESSES;
4293 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4294 result = UNUSED_PARAMS;
4296 else
4297 representatives.quick_push (NULL);
4300 if (result == NO_GOOD_ACCESS)
4302 representatives.release ();
4303 return NO_GOOD_ACCESS;
4306 return result;
4309 /* Return the index of BASE in PARMS. Abort if it is not found. */
4311 static inline int
4312 get_param_index (tree base, vec<tree> parms)
4314 int i, len;
4316 len = parms.length ();
4317 for (i = 0; i < len; i++)
4318 if (parms[i] == base)
4319 return i;
4320 gcc_unreachable ();
4323 /* Convert the decisions made at the representative level into compact
4324 parameter adjustments. REPRESENTATIVES are pointers to first
4325 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4326 final number of adjustments. */
4328 static ipa_parm_adjustment_vec
4329 turn_representatives_into_adjustments (vec<access_p> representatives,
4330 int adjustments_count)
4332 vec<tree> parms;
4333 ipa_parm_adjustment_vec adjustments;
4334 tree parm;
4335 int i;
4337 gcc_assert (adjustments_count > 0);
4338 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4339 adjustments.create (adjustments_count);
4340 parm = DECL_ARGUMENTS (current_function_decl);
4341 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4343 struct access *repr = representatives[i];
4345 if (!repr || no_accesses_p (repr))
4347 struct ipa_parm_adjustment adj;
4349 memset (&adj, 0, sizeof (adj));
4350 adj.base_index = get_param_index (parm, parms);
4351 adj.base = parm;
4352 if (!repr)
4353 adj.op = IPA_PARM_OP_COPY;
4354 else
4355 adj.op = IPA_PARM_OP_REMOVE;
4356 adj.arg_prefix = "ISRA";
4357 adjustments.quick_push (adj);
4359 else
4361 struct ipa_parm_adjustment adj;
4362 int index = get_param_index (parm, parms);
4364 for (; repr; repr = repr->next_grp)
4366 memset (&adj, 0, sizeof (adj));
4367 gcc_assert (repr->base == parm);
4368 adj.base_index = index;
4369 adj.base = repr->base;
4370 adj.type = repr->type;
4371 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4372 adj.offset = repr->offset;
4373 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4374 && (repr->grp_maybe_modified
4375 || repr->grp_not_necessarilly_dereferenced));
4376 adj.arg_prefix = "ISRA";
4377 adjustments.quick_push (adj);
4381 parms.release ();
4382 return adjustments;
4385 /* Analyze the collected accesses and produce a plan what to do with the
4386 parameters in the form of adjustments, NULL meaning nothing. */
4388 static ipa_parm_adjustment_vec
4389 analyze_all_param_acesses (void)
4391 enum ipa_splicing_result repr_state;
4392 bool proceed = false;
4393 int i, adjustments_count = 0;
4394 vec<access_p> representatives;
4395 ipa_parm_adjustment_vec adjustments;
4397 repr_state = splice_all_param_accesses (representatives);
4398 if (repr_state == NO_GOOD_ACCESS)
4399 return ipa_parm_adjustment_vec ();
4401 /* If there are any parameters passed by reference which are not modified
4402 directly, we need to check whether they can be modified indirectly. */
4403 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4405 analyze_caller_dereference_legality (representatives);
4406 analyze_modified_params (representatives);
4409 for (i = 0; i < func_param_count; i++)
4411 struct access *repr = representatives[i];
4413 if (repr && !no_accesses_p (repr))
4415 if (repr->grp_scalar_ptr)
4417 adjustments_count++;
4418 if (repr->grp_not_necessarilly_dereferenced
4419 || repr->grp_maybe_modified)
4420 representatives[i] = NULL;
4421 else
4423 proceed = true;
4424 sra_stats.scalar_by_ref_to_by_val++;
4427 else
4429 int new_components = decide_one_param_reduction (repr);
4431 if (new_components == 0)
4433 representatives[i] = NULL;
4434 adjustments_count++;
4436 else
4438 adjustments_count += new_components;
4439 sra_stats.aggregate_params_reduced++;
4440 sra_stats.param_reductions_created += new_components;
4441 proceed = true;
4445 else
4447 if (no_accesses_p (repr))
4449 proceed = true;
4450 sra_stats.deleted_unused_parameters++;
4452 adjustments_count++;
4456 if (!proceed && dump_file)
4457 fprintf (dump_file, "NOT proceeding to change params.\n");
4459 if (proceed)
4460 adjustments = turn_representatives_into_adjustments (representatives,
4461 adjustments_count);
4462 else
4463 adjustments = ipa_parm_adjustment_vec ();
4465 representatives.release ();
4466 return adjustments;
4469 /* If a parameter replacement identified by ADJ does not yet exist in the form
4470 of declaration, create it and record it, otherwise return the previously
4471 created one. */
4473 static tree
4474 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4476 tree repl;
4477 if (!adj->new_ssa_base)
4479 char *pretty_name = make_fancy_name (adj->base);
4481 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4482 DECL_NAME (repl) = get_identifier (pretty_name);
4483 obstack_free (&name_obstack, pretty_name);
4485 adj->new_ssa_base = repl;
4487 else
4488 repl = adj->new_ssa_base;
4489 return repl;
4492 /* Find the first adjustment for a particular parameter BASE in a vector of
4493 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4494 adjustment. */
4496 static struct ipa_parm_adjustment *
4497 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4499 int i, len;
4501 len = adjustments.length ();
4502 for (i = 0; i < len; i++)
4504 struct ipa_parm_adjustment *adj;
4506 adj = &adjustments[i];
4507 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4508 return adj;
4511 return NULL;
4514 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4515 removed because its value is not used, replace the SSA_NAME with a one
4516 relating to a created VAR_DECL together all of its uses and return true.
4517 ADJUSTMENTS is a pointer to an adjustments vector. */
4519 static bool
4520 replace_removed_params_ssa_names (gimple stmt,
4521 ipa_parm_adjustment_vec adjustments)
4523 struct ipa_parm_adjustment *adj;
4524 tree lhs, decl, repl, name;
4526 if (gimple_code (stmt) == GIMPLE_PHI)
4527 lhs = gimple_phi_result (stmt);
4528 else if (is_gimple_assign (stmt))
4529 lhs = gimple_assign_lhs (stmt);
4530 else if (is_gimple_call (stmt))
4531 lhs = gimple_call_lhs (stmt);
4532 else
4533 gcc_unreachable ();
4535 if (TREE_CODE (lhs) != SSA_NAME)
4536 return false;
4538 decl = SSA_NAME_VAR (lhs);
4539 if (decl == NULL_TREE
4540 || TREE_CODE (decl) != PARM_DECL)
4541 return false;
4543 adj = get_adjustment_for_base (adjustments, decl);
4544 if (!adj)
4545 return false;
4547 repl = get_replaced_param_substitute (adj);
4548 name = make_ssa_name (repl, stmt);
4550 if (dump_file)
4552 fprintf (dump_file, "replacing an SSA name of a removed param ");
4553 print_generic_expr (dump_file, lhs, 0);
4554 fprintf (dump_file, " with ");
4555 print_generic_expr (dump_file, name, 0);
4556 fprintf (dump_file, "\n");
4559 if (is_gimple_assign (stmt))
4560 gimple_assign_set_lhs (stmt, name);
4561 else if (is_gimple_call (stmt))
4562 gimple_call_set_lhs (stmt, name);
4563 else
4564 gimple_phi_set_result (stmt, name);
4566 replace_uses_by (lhs, name);
4567 release_ssa_name (lhs);
4568 return true;
4571 /* If the statement STMT contains any expressions that need to replaced with a
4572 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4573 incompatibilities (GSI is used to accommodate conversion statements and must
4574 point to the statement). Return true iff the statement was modified. */
4576 static bool
4577 sra_ipa_modify_assign (gimple stmt, gimple_stmt_iterator *gsi,
4578 ipa_parm_adjustment_vec adjustments)
4580 tree *lhs_p, *rhs_p;
4581 bool any;
4583 if (!gimple_assign_single_p (stmt))
4584 return false;
4586 rhs_p = gimple_assign_rhs1_ptr (stmt);
4587 lhs_p = gimple_assign_lhs_ptr (stmt);
4589 any = ipa_modify_expr (rhs_p, false, adjustments);
4590 any |= ipa_modify_expr (lhs_p, false, adjustments);
4591 if (any)
4593 tree new_rhs = NULL_TREE;
4595 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4597 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4599 /* V_C_Es of constructors can cause trouble (PR 42714). */
4600 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4601 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4602 else
4603 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4604 NULL);
4606 else
4607 new_rhs = fold_build1_loc (gimple_location (stmt),
4608 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4609 *rhs_p);
4611 else if (REFERENCE_CLASS_P (*rhs_p)
4612 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4613 && !is_gimple_reg (*lhs_p))
4614 /* This can happen when an assignment in between two single field
4615 structures is turned into an assignment in between two pointers to
4616 scalars (PR 42237). */
4617 new_rhs = *rhs_p;
4619 if (new_rhs)
4621 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4622 true, GSI_SAME_STMT);
4624 gimple_assign_set_rhs_from_tree (gsi, tmp);
4627 return true;
4630 return false;
4633 /* Traverse the function body and all modifications as described in
4634 ADJUSTMENTS. Return true iff the CFG has been changed. */
4636 bool
4637 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4639 bool cfg_changed = false;
4640 basic_block bb;
4642 FOR_EACH_BB_FN (bb, cfun)
4644 gimple_stmt_iterator gsi;
4646 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4647 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4649 gsi = gsi_start_bb (bb);
4650 while (!gsi_end_p (gsi))
4652 gimple stmt = gsi_stmt (gsi);
4653 bool modified = false;
4654 tree *t;
4655 unsigned i;
4657 switch (gimple_code (stmt))
4659 case GIMPLE_RETURN:
4660 t = gimple_return_retval_ptr (stmt);
4661 if (*t != NULL_TREE)
4662 modified |= ipa_modify_expr (t, true, adjustments);
4663 break;
4665 case GIMPLE_ASSIGN:
4666 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4667 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4668 break;
4670 case GIMPLE_CALL:
4671 /* Operands must be processed before the lhs. */
4672 for (i = 0; i < gimple_call_num_args (stmt); i++)
4674 t = gimple_call_arg_ptr (stmt, i);
4675 modified |= ipa_modify_expr (t, true, adjustments);
4678 if (gimple_call_lhs (stmt))
4680 t = gimple_call_lhs_ptr (stmt);
4681 modified |= ipa_modify_expr (t, false, adjustments);
4682 modified |= replace_removed_params_ssa_names (stmt,
4683 adjustments);
4685 break;
4687 case GIMPLE_ASM:
4688 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4690 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4691 modified |= ipa_modify_expr (t, true, adjustments);
4693 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4695 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4696 modified |= ipa_modify_expr (t, false, adjustments);
4698 break;
4700 default:
4701 break;
4704 if (modified)
4706 update_stmt (stmt);
4707 if (maybe_clean_eh_stmt (stmt)
4708 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4709 cfg_changed = true;
4711 gsi_next (&gsi);
4715 return cfg_changed;
4718 /* Call gimple_debug_bind_reset_value on all debug statements describing
4719 gimple register parameters that are being removed or replaced. */
4721 static void
4722 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4724 int i, len;
4725 gimple_stmt_iterator *gsip = NULL, gsi;
4727 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4729 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4730 gsip = &gsi;
4732 len = adjustments.length ();
4733 for (i = 0; i < len; i++)
4735 struct ipa_parm_adjustment *adj;
4736 imm_use_iterator ui;
4737 gimple stmt, def_temp;
4738 tree name, vexpr, copy = NULL_TREE;
4739 use_operand_p use_p;
4741 adj = &adjustments[i];
4742 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4743 continue;
4744 name = ssa_default_def (cfun, adj->base);
4745 vexpr = NULL;
4746 if (name)
4747 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4749 if (gimple_clobber_p (stmt))
4751 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4752 unlink_stmt_vdef (stmt);
4753 gsi_remove (&cgsi, true);
4754 release_defs (stmt);
4755 continue;
4757 /* All other users must have been removed by
4758 ipa_sra_modify_function_body. */
4759 gcc_assert (is_gimple_debug (stmt));
4760 if (vexpr == NULL && gsip != NULL)
4762 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4763 vexpr = make_node (DEBUG_EXPR_DECL);
4764 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4765 NULL);
4766 DECL_ARTIFICIAL (vexpr) = 1;
4767 TREE_TYPE (vexpr) = TREE_TYPE (name);
4768 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4769 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4771 if (vexpr)
4773 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4774 SET_USE (use_p, vexpr);
4776 else
4777 gimple_debug_bind_reset_value (stmt);
4778 update_stmt (stmt);
4780 /* Create a VAR_DECL for debug info purposes. */
4781 if (!DECL_IGNORED_P (adj->base))
4783 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4784 VAR_DECL, DECL_NAME (adj->base),
4785 TREE_TYPE (adj->base));
4786 if (DECL_PT_UID_SET_P (adj->base))
4787 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4788 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4789 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4790 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4791 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4792 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4793 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4794 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4795 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4796 SET_DECL_RTL (copy, 0);
4797 TREE_USED (copy) = 1;
4798 DECL_CONTEXT (copy) = current_function_decl;
4799 add_local_decl (cfun, copy);
4800 DECL_CHAIN (copy) =
4801 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4802 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4804 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4806 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4807 if (vexpr)
4808 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4809 else
4810 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4811 NULL);
4812 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4817 /* Return false if all callers have at least as many actual arguments as there
4818 are formal parameters in the current function and that their types
4819 match. */
4821 static bool
4822 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
4823 void *data ATTRIBUTE_UNUSED)
4825 struct cgraph_edge *cs;
4826 for (cs = node->callers; cs; cs = cs->next_caller)
4827 if (!callsite_arguments_match_p (cs->call_stmt))
4828 return true;
4830 return false;
4833 /* Convert all callers of NODE. */
4835 static bool
4836 convert_callers_for_node (struct cgraph_node *node,
4837 void *data)
4839 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4840 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4841 struct cgraph_edge *cs;
4843 for (cs = node->callers; cs; cs = cs->next_caller)
4845 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4847 if (dump_file)
4848 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
4849 xstrdup (cs->caller->name ()),
4850 cs->caller->order,
4851 xstrdup (cs->callee->name ()),
4852 cs->callee->order);
4854 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4856 pop_cfun ();
4859 for (cs = node->callers; cs; cs = cs->next_caller)
4860 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4861 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4862 compute_inline_parameters (cs->caller, true);
4863 BITMAP_FREE (recomputed_callers);
4865 return true;
4868 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4870 static void
4871 convert_callers (struct cgraph_node *node, tree old_decl,
4872 ipa_parm_adjustment_vec adjustments)
4874 basic_block this_block;
4876 node->call_for_symbol_thunks_and_aliases (convert_callers_for_node,
4877 &adjustments, false);
4879 if (!encountered_recursive_call)
4880 return;
4882 FOR_EACH_BB_FN (this_block, cfun)
4884 gimple_stmt_iterator gsi;
4886 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4888 gimple stmt = gsi_stmt (gsi);
4889 tree call_fndecl;
4890 if (gimple_code (stmt) != GIMPLE_CALL)
4891 continue;
4892 call_fndecl = gimple_call_fndecl (stmt);
4893 if (call_fndecl == old_decl)
4895 if (dump_file)
4896 fprintf (dump_file, "Adjusting recursive call");
4897 gimple_call_set_fndecl (stmt, node->decl);
4898 ipa_modify_call_arguments (NULL, stmt, adjustments);
4903 return;
4906 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4907 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4909 static bool
4910 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4912 struct cgraph_node *new_node;
4913 bool cfg_changed;
4915 cgraph_edge::rebuild_edges ();
4916 free_dominance_info (CDI_DOMINATORS);
4917 pop_cfun ();
4919 /* This must be done after rebuilding cgraph edges for node above.
4920 Otherwise any recursive calls to node that are recorded in
4921 redirect_callers will be corrupted. */
4922 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
4923 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
4924 NULL, false, NULL, NULL,
4925 "isra");
4926 redirect_callers.release ();
4928 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4929 ipa_modify_formal_parameters (current_function_decl, adjustments);
4930 cfg_changed = ipa_sra_modify_function_body (adjustments);
4931 sra_ipa_reset_debug_stmts (adjustments);
4932 convert_callers (new_node, node->decl, adjustments);
4933 new_node->make_local ();
4934 return cfg_changed;
4937 /* If NODE has a caller, return true. */
4939 static bool
4940 has_caller_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4942 if (node->callers)
4943 return true;
4944 return false;
4947 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4948 attributes, return true otherwise. NODE is the cgraph node of the current
4949 function. */
4951 static bool
4952 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4954 if (!node->can_be_local_p ())
4956 if (dump_file)
4957 fprintf (dump_file, "Function not local to this compilation unit.\n");
4958 return false;
4961 if (!node->local.can_change_signature)
4963 if (dump_file)
4964 fprintf (dump_file, "Function can not change signature.\n");
4965 return false;
4968 if (!tree_versionable_function_p (node->decl))
4970 if (dump_file)
4971 fprintf (dump_file, "Function is not versionable.\n");
4972 return false;
4975 if (!opt_for_fn (node->decl, optimize)
4976 || !opt_for_fn (node->decl, flag_ipa_sra))
4978 if (dump_file)
4979 fprintf (dump_file, "Function not optimized.\n");
4980 return false;
4983 if (DECL_VIRTUAL_P (current_function_decl))
4985 if (dump_file)
4986 fprintf (dump_file, "Function is a virtual method.\n");
4987 return false;
4990 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4991 && inline_summary (node)->size >= MAX_INLINE_INSNS_AUTO)
4993 if (dump_file)
4994 fprintf (dump_file, "Function too big to be made truly local.\n");
4995 return false;
4998 if (!node->call_for_symbol_thunks_and_aliases (has_caller_p, NULL, true))
5000 if (dump_file)
5001 fprintf (dump_file,
5002 "Function has no callers in this compilation unit.\n");
5003 return false;
5006 if (cfun->stdarg)
5008 if (dump_file)
5009 fprintf (dump_file, "Function uses stdarg. \n");
5010 return false;
5013 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5014 return false;
5016 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5018 if (dump_file)
5019 fprintf (dump_file, "Always inline function will be inlined "
5020 "anyway. \n");
5021 return false;
5024 return true;
5027 /* Perform early interprocedural SRA. */
5029 static unsigned int
5030 ipa_early_sra (void)
5032 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5033 ipa_parm_adjustment_vec adjustments;
5034 int ret = 0;
5036 if (!ipa_sra_preliminary_function_checks (node))
5037 return 0;
5039 sra_initialize ();
5040 sra_mode = SRA_MODE_EARLY_IPA;
5042 if (!find_param_candidates ())
5044 if (dump_file)
5045 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5046 goto simple_out;
5049 if (node->call_for_symbol_thunks_and_aliases
5050 (some_callers_have_mismatched_arguments_p, NULL, true))
5052 if (dump_file)
5053 fprintf (dump_file, "There are callers with insufficient number of "
5054 "arguments or arguments with type mismatches.\n");
5055 goto simple_out;
5058 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5059 func_param_count
5060 * last_basic_block_for_fn (cfun));
5061 final_bbs = BITMAP_ALLOC (NULL);
5063 scan_function ();
5064 if (encountered_apply_args)
5066 if (dump_file)
5067 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5068 goto out;
5071 if (encountered_unchangable_recursive_call)
5073 if (dump_file)
5074 fprintf (dump_file, "Function calls itself with insufficient "
5075 "number of arguments.\n");
5076 goto out;
5079 adjustments = analyze_all_param_acesses ();
5080 if (!adjustments.exists ())
5081 goto out;
5082 if (dump_file)
5083 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5085 if (modify_function (node, adjustments))
5086 ret = TODO_update_ssa | TODO_cleanup_cfg;
5087 else
5088 ret = TODO_update_ssa;
5089 adjustments.release ();
5091 statistics_counter_event (cfun, "Unused parameters deleted",
5092 sra_stats.deleted_unused_parameters);
5093 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5094 sra_stats.scalar_by_ref_to_by_val);
5095 statistics_counter_event (cfun, "Aggregate parameters broken up",
5096 sra_stats.aggregate_params_reduced);
5097 statistics_counter_event (cfun, "Aggregate parameter components created",
5098 sra_stats.param_reductions_created);
5100 out:
5101 BITMAP_FREE (final_bbs);
5102 free (bb_dereferences);
5103 simple_out:
5104 sra_deinitialize ();
5105 return ret;
5108 namespace {
5110 const pass_data pass_data_early_ipa_sra =
5112 GIMPLE_PASS, /* type */
5113 "eipa_sra", /* name */
5114 OPTGROUP_NONE, /* optinfo_flags */
5115 TV_IPA_SRA, /* tv_id */
5116 0, /* properties_required */
5117 0, /* properties_provided */
5118 0, /* properties_destroyed */
5119 0, /* todo_flags_start */
5120 TODO_dump_symtab, /* todo_flags_finish */
5123 class pass_early_ipa_sra : public gimple_opt_pass
5125 public:
5126 pass_early_ipa_sra (gcc::context *ctxt)
5127 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5130 /* opt_pass methods: */
5131 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5132 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5134 }; // class pass_early_ipa_sra
5136 } // anon namespace
5138 gimple_opt_pass *
5139 make_pass_early_ipa_sra (gcc::context *ctxt)
5141 return new pass_early_ipa_sra (ctxt);