2013-10-25 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-sra.c
blob6feb8fb81345b2a8bc053e642400693c9953d717
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2013 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "hash-table.h"
78 #include "alloc-pool.h"
79 #include "tm.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "bitmap.h"
83 #include "gimple-ssa.h"
84 #include "tree-cfg.h"
85 #include "tree-phinodes.h"
86 #include "ssa-iterators.h"
87 #include "tree-ssanames.h"
88 #include "tree-dfa.h"
89 #include "tree-ssa.h"
90 #include "tree-pass.h"
91 #include "ipa-prop.h"
92 #include "statistics.h"
93 #include "params.h"
94 #include "target.h"
95 #include "flags.h"
96 #include "dbgcnt.h"
97 #include "tree-inline.h"
98 #include "gimple-pretty-print.h"
99 #include "ipa-inline.h"
100 #include "ipa-utils.h"
102 /* Enumeration of all aggregate reductions we can do. */
103 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
104 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
105 SRA_MODE_INTRA }; /* late intraprocedural SRA */
107 /* Global variable describing which aggregate reduction we are performing at
108 the moment. */
109 static enum sra_mode sra_mode;
111 struct assign_link;
113 /* ACCESS represents each access to an aggregate variable (as a whole or a
114 part). It can also represent a group of accesses that refer to exactly the
115 same fragment of an aggregate (i.e. those that have exactly the same offset
116 and size). Such representatives for a single aggregate, once determined,
117 are linked in a linked list and have the group fields set.
119 Moreover, when doing intraprocedural SRA, a tree is built from those
120 representatives (by the means of first_child and next_sibling pointers), in
121 which all items in a subtree are "within" the root, i.e. their offset is
122 greater or equal to offset of the root and offset+size is smaller or equal
123 to offset+size of the root. Children of an access are sorted by offset.
125 Note that accesses to parts of vector and complex number types always
126 represented by an access to the whole complex number or a vector. It is a
127 duty of the modifying functions to replace them appropriately. */
129 struct access
131 /* Values returned by `get_ref_base_and_extent' for each component reference
132 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
133 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
134 HOST_WIDE_INT offset;
135 HOST_WIDE_INT size;
136 tree base;
138 /* Expression. It is context dependent so do not use it to create new
139 expressions to access the original aggregate. See PR 42154 for a
140 testcase. */
141 tree expr;
142 /* Type. */
143 tree type;
145 /* The statement this access belongs to. */
146 gimple stmt;
148 /* Next group representative for this aggregate. */
149 struct access *next_grp;
151 /* Pointer to the group representative. Pointer to itself if the struct is
152 the representative. */
153 struct access *group_representative;
155 /* If this access has any children (in terms of the definition above), this
156 points to the first one. */
157 struct access *first_child;
159 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
160 described above. In IPA-SRA this is a pointer to the next access
161 belonging to the same group (having the same representative). */
162 struct access *next_sibling;
164 /* Pointers to the first and last element in the linked list of assign
165 links. */
166 struct assign_link *first_link, *last_link;
168 /* Pointer to the next access in the work queue. */
169 struct access *next_queued;
171 /* Replacement variable for this access "region." Never to be accessed
172 directly, always only by the means of get_access_replacement() and only
173 when grp_to_be_replaced flag is set. */
174 tree replacement_decl;
176 /* Is this particular access write access? */
177 unsigned write : 1;
179 /* Is this access an access to a non-addressable field? */
180 unsigned non_addressable : 1;
182 /* Is this access currently in the work queue? */
183 unsigned grp_queued : 1;
185 /* Does this group contain a write access? This flag is propagated down the
186 access tree. */
187 unsigned grp_write : 1;
189 /* Does this group contain a read access? This flag is propagated down the
190 access tree. */
191 unsigned grp_read : 1;
193 /* Does this group contain a read access that comes from an assignment
194 statement? This flag is propagated down the access tree. */
195 unsigned grp_assignment_read : 1;
197 /* Does this group contain a write access that comes from an assignment
198 statement? This flag is propagated down the access tree. */
199 unsigned grp_assignment_write : 1;
201 /* Does this group contain a read access through a scalar type? This flag is
202 not propagated in the access tree in any direction. */
203 unsigned grp_scalar_read : 1;
205 /* Does this group contain a write access through a scalar type? This flag
206 is not propagated in the access tree in any direction. */
207 unsigned grp_scalar_write : 1;
209 /* Is this access an artificial one created to scalarize some record
210 entirely? */
211 unsigned grp_total_scalarization : 1;
213 /* Other passes of the analysis use this bit to make function
214 analyze_access_subtree create scalar replacements for this group if
215 possible. */
216 unsigned grp_hint : 1;
218 /* Is the subtree rooted in this access fully covered by scalar
219 replacements? */
220 unsigned grp_covered : 1;
222 /* If set to true, this access and all below it in an access tree must not be
223 scalarized. */
224 unsigned grp_unscalarizable_region : 1;
226 /* Whether data have been written to parts of the aggregate covered by this
227 access which is not to be scalarized. This flag is propagated up in the
228 access tree. */
229 unsigned grp_unscalarized_data : 1;
231 /* Does this access and/or group contain a write access through a
232 BIT_FIELD_REF? */
233 unsigned grp_partial_lhs : 1;
235 /* Set when a scalar replacement should be created for this variable. */
236 unsigned grp_to_be_replaced : 1;
238 /* Set when we want a replacement for the sole purpose of having it in
239 generated debug statements. */
240 unsigned grp_to_be_debug_replaced : 1;
242 /* Should TREE_NO_WARNING of a replacement be set? */
243 unsigned grp_no_warning : 1;
245 /* Is it possible that the group refers to data which might be (directly or
246 otherwise) modified? */
247 unsigned grp_maybe_modified : 1;
249 /* Set when this is a representative of a pointer to scalar (i.e. by
250 reference) parameter which we consider for turning into a plain scalar
251 (i.e. a by value parameter). */
252 unsigned grp_scalar_ptr : 1;
254 /* Set when we discover that this pointer is not safe to dereference in the
255 caller. */
256 unsigned grp_not_necessarilly_dereferenced : 1;
259 typedef struct access *access_p;
262 /* Alloc pool for allocating access structures. */
263 static alloc_pool access_pool;
265 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
266 are used to propagate subaccesses from rhs to lhs as long as they don't
267 conflict with what is already there. */
268 struct assign_link
270 struct access *lacc, *racc;
271 struct assign_link *next;
274 /* Alloc pool for allocating assign link structures. */
275 static alloc_pool link_pool;
277 /* Base (tree) -> Vector (vec<access_p> *) map. */
278 static struct pointer_map_t *base_access_vec;
280 /* Candidate hash table helpers. */
282 struct uid_decl_hasher : typed_noop_remove <tree_node>
284 typedef tree_node value_type;
285 typedef tree_node compare_type;
286 static inline hashval_t hash (const value_type *);
287 static inline bool equal (const value_type *, const compare_type *);
290 /* Hash a tree in a uid_decl_map. */
292 inline hashval_t
293 uid_decl_hasher::hash (const value_type *item)
295 return item->decl_minimal.uid;
298 /* Return true if the DECL_UID in both trees are equal. */
300 inline bool
301 uid_decl_hasher::equal (const value_type *a, const compare_type *b)
303 return (a->decl_minimal.uid == b->decl_minimal.uid);
306 /* Set of candidates. */
307 static bitmap candidate_bitmap;
308 static hash_table <uid_decl_hasher> candidates;
310 /* For a candidate UID return the candidates decl. */
312 static inline tree
313 candidate (unsigned uid)
315 tree_node t;
316 t.decl_minimal.uid = uid;
317 return candidates.find_with_hash (&t, static_cast <hashval_t> (uid));
320 /* Bitmap of candidates which we should try to entirely scalarize away and
321 those which cannot be (because they are and need be used as a whole). */
322 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
324 /* Obstack for creation of fancy names. */
325 static struct obstack name_obstack;
327 /* Head of a linked list of accesses that need to have its subaccesses
328 propagated to their assignment counterparts. */
329 static struct access *work_queue_head;
331 /* Number of parameters of the analyzed function when doing early ipa SRA. */
332 static int func_param_count;
334 /* scan_function sets the following to true if it encounters a call to
335 __builtin_apply_args. */
336 static bool encountered_apply_args;
338 /* Set by scan_function when it finds a recursive call. */
339 static bool encountered_recursive_call;
341 /* Set by scan_function when it finds a recursive call with less actual
342 arguments than formal parameters.. */
343 static bool encountered_unchangable_recursive_call;
345 /* This is a table in which for each basic block and parameter there is a
346 distance (offset + size) in that parameter which is dereferenced and
347 accessed in that BB. */
348 static HOST_WIDE_INT *bb_dereferences;
349 /* Bitmap of BBs that can cause the function to "stop" progressing by
350 returning, throwing externally, looping infinitely or calling a function
351 which might abort etc.. */
352 static bitmap final_bbs;
354 /* Representative of no accesses at all. */
355 static struct access no_accesses_representant;
357 /* Predicate to test the special value. */
359 static inline bool
360 no_accesses_p (struct access *access)
362 return access == &no_accesses_representant;
365 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
366 representative fields are dumped, otherwise those which only describe the
367 individual access are. */
369 static struct
371 /* Number of processed aggregates is readily available in
372 analyze_all_variable_accesses and so is not stored here. */
374 /* Number of created scalar replacements. */
375 int replacements;
377 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
378 expression. */
379 int exprs;
381 /* Number of statements created by generate_subtree_copies. */
382 int subtree_copies;
384 /* Number of statements created by load_assign_lhs_subreplacements. */
385 int subreplacements;
387 /* Number of times sra_modify_assign has deleted a statement. */
388 int deleted;
390 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
391 RHS reparately due to type conversions or nonexistent matching
392 references. */
393 int separate_lhs_rhs_handling;
395 /* Number of parameters that were removed because they were unused. */
396 int deleted_unused_parameters;
398 /* Number of scalars passed as parameters by reference that have been
399 converted to be passed by value. */
400 int scalar_by_ref_to_by_val;
402 /* Number of aggregate parameters that were replaced by one or more of their
403 components. */
404 int aggregate_params_reduced;
406 /* Numbber of components created when splitting aggregate parameters. */
407 int param_reductions_created;
408 } sra_stats;
410 static void
411 dump_access (FILE *f, struct access *access, bool grp)
413 fprintf (f, "access { ");
414 fprintf (f, "base = (%d)'", DECL_UID (access->base));
415 print_generic_expr (f, access->base, 0);
416 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
417 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
418 fprintf (f, ", expr = ");
419 print_generic_expr (f, access->expr, 0);
420 fprintf (f, ", type = ");
421 print_generic_expr (f, access->type, 0);
422 if (grp)
423 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
424 "grp_assignment_write = %d, grp_scalar_read = %d, "
425 "grp_scalar_write = %d, grp_total_scalarization = %d, "
426 "grp_hint = %d, grp_covered = %d, "
427 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
428 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
429 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
430 "grp_not_necessarilly_dereferenced = %d\n",
431 access->grp_read, access->grp_write, access->grp_assignment_read,
432 access->grp_assignment_write, access->grp_scalar_read,
433 access->grp_scalar_write, access->grp_total_scalarization,
434 access->grp_hint, access->grp_covered,
435 access->grp_unscalarizable_region, access->grp_unscalarized_data,
436 access->grp_partial_lhs, access->grp_to_be_replaced,
437 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
438 access->grp_not_necessarilly_dereferenced);
439 else
440 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
441 "grp_partial_lhs = %d\n",
442 access->write, access->grp_total_scalarization,
443 access->grp_partial_lhs);
446 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
448 static void
449 dump_access_tree_1 (FILE *f, struct access *access, int level)
453 int i;
455 for (i = 0; i < level; i++)
456 fputs ("* ", dump_file);
458 dump_access (f, access, true);
460 if (access->first_child)
461 dump_access_tree_1 (f, access->first_child, level + 1);
463 access = access->next_sibling;
465 while (access);
468 /* Dump all access trees for a variable, given the pointer to the first root in
469 ACCESS. */
471 static void
472 dump_access_tree (FILE *f, struct access *access)
474 for (; access; access = access->next_grp)
475 dump_access_tree_1 (f, access, 0);
478 /* Return true iff ACC is non-NULL and has subaccesses. */
480 static inline bool
481 access_has_children_p (struct access *acc)
483 return acc && acc->first_child;
486 /* Return true iff ACC is (partly) covered by at least one replacement. */
488 static bool
489 access_has_replacements_p (struct access *acc)
491 struct access *child;
492 if (acc->grp_to_be_replaced)
493 return true;
494 for (child = acc->first_child; child; child = child->next_sibling)
495 if (access_has_replacements_p (child))
496 return true;
497 return false;
500 /* Return a vector of pointers to accesses for the variable given in BASE or
501 NULL if there is none. */
503 static vec<access_p> *
504 get_base_access_vector (tree base)
506 void **slot;
508 slot = pointer_map_contains (base_access_vec, base);
509 if (!slot)
510 return NULL;
511 else
512 return *(vec<access_p> **) slot;
515 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
516 in ACCESS. Return NULL if it cannot be found. */
518 static struct access *
519 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
520 HOST_WIDE_INT size)
522 while (access && (access->offset != offset || access->size != size))
524 struct access *child = access->first_child;
526 while (child && (child->offset + child->size <= offset))
527 child = child->next_sibling;
528 access = child;
531 return access;
534 /* Return the first group representative for DECL or NULL if none exists. */
536 static struct access *
537 get_first_repr_for_decl (tree base)
539 vec<access_p> *access_vec;
541 access_vec = get_base_access_vector (base);
542 if (!access_vec)
543 return NULL;
545 return (*access_vec)[0];
548 /* Find an access representative for the variable BASE and given OFFSET and
549 SIZE. Requires that access trees have already been built. Return NULL if
550 it cannot be found. */
552 static struct access *
553 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
554 HOST_WIDE_INT size)
556 struct access *access;
558 access = get_first_repr_for_decl (base);
559 while (access && (access->offset + access->size <= offset))
560 access = access->next_grp;
561 if (!access)
562 return NULL;
564 return find_access_in_subtree (access, offset, size);
567 /* Add LINK to the linked list of assign links of RACC. */
568 static void
569 add_link_to_rhs (struct access *racc, struct assign_link *link)
571 gcc_assert (link->racc == racc);
573 if (!racc->first_link)
575 gcc_assert (!racc->last_link);
576 racc->first_link = link;
578 else
579 racc->last_link->next = link;
581 racc->last_link = link;
582 link->next = NULL;
585 /* Move all link structures in their linked list in OLD_RACC to the linked list
586 in NEW_RACC. */
587 static void
588 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
590 if (!old_racc->first_link)
592 gcc_assert (!old_racc->last_link);
593 return;
596 if (new_racc->first_link)
598 gcc_assert (!new_racc->last_link->next);
599 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
601 new_racc->last_link->next = old_racc->first_link;
602 new_racc->last_link = old_racc->last_link;
604 else
606 gcc_assert (!new_racc->last_link);
608 new_racc->first_link = old_racc->first_link;
609 new_racc->last_link = old_racc->last_link;
611 old_racc->first_link = old_racc->last_link = NULL;
614 /* Add ACCESS to the work queue (which is actually a stack). */
616 static void
617 add_access_to_work_queue (struct access *access)
619 if (!access->grp_queued)
621 gcc_assert (!access->next_queued);
622 access->next_queued = work_queue_head;
623 access->grp_queued = 1;
624 work_queue_head = access;
628 /* Pop an access from the work queue, and return it, assuming there is one. */
630 static struct access *
631 pop_access_from_work_queue (void)
633 struct access *access = work_queue_head;
635 work_queue_head = access->next_queued;
636 access->next_queued = NULL;
637 access->grp_queued = 0;
638 return access;
642 /* Allocate necessary structures. */
644 static void
645 sra_initialize (void)
647 candidate_bitmap = BITMAP_ALLOC (NULL);
648 candidates.create (vec_safe_length (cfun->local_decls) / 2);
649 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
650 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
651 gcc_obstack_init (&name_obstack);
652 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
653 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
654 base_access_vec = pointer_map_create ();
655 memset (&sra_stats, 0, sizeof (sra_stats));
656 encountered_apply_args = false;
657 encountered_recursive_call = false;
658 encountered_unchangable_recursive_call = false;
661 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
663 static bool
664 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
665 void *data ATTRIBUTE_UNUSED)
667 vec<access_p> *access_vec = (vec<access_p> *) *value;
668 vec_free (access_vec);
669 return true;
672 /* Deallocate all general structures. */
674 static void
675 sra_deinitialize (void)
677 BITMAP_FREE (candidate_bitmap);
678 candidates.dispose ();
679 BITMAP_FREE (should_scalarize_away_bitmap);
680 BITMAP_FREE (cannot_scalarize_away_bitmap);
681 free_alloc_pool (access_pool);
682 free_alloc_pool (link_pool);
683 obstack_free (&name_obstack, NULL);
685 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
686 pointer_map_destroy (base_access_vec);
689 /* Remove DECL from candidates for SRA and write REASON to the dump file if
690 there is one. */
691 static void
692 disqualify_candidate (tree decl, const char *reason)
694 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
695 candidates.clear_slot (candidates.find_slot_with_hash (decl,
696 DECL_UID (decl),
697 NO_INSERT));
699 if (dump_file && (dump_flags & TDF_DETAILS))
701 fprintf (dump_file, "! Disqualifying ");
702 print_generic_expr (dump_file, decl, 0);
703 fprintf (dump_file, " - %s\n", reason);
707 /* Return true iff the type contains a field or an element which does not allow
708 scalarization. */
710 static bool
711 type_internals_preclude_sra_p (tree type, const char **msg)
713 tree fld;
714 tree et;
716 switch (TREE_CODE (type))
718 case RECORD_TYPE:
719 case UNION_TYPE:
720 case QUAL_UNION_TYPE:
721 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
722 if (TREE_CODE (fld) == FIELD_DECL)
724 tree ft = TREE_TYPE (fld);
726 if (TREE_THIS_VOLATILE (fld))
728 *msg = "volatile structure field";
729 return true;
731 if (!DECL_FIELD_OFFSET (fld))
733 *msg = "no structure field offset";
734 return true;
736 if (!DECL_SIZE (fld))
738 *msg = "zero structure field size";
739 return true;
741 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
743 *msg = "structure field offset not fixed";
744 return true;
746 if (!host_integerp (DECL_SIZE (fld), 1))
748 *msg = "structure field size not fixed";
749 return true;
751 if (!host_integerp (bit_position (fld), 0))
753 *msg = "structure field size too big";
754 return true;
756 if (AGGREGATE_TYPE_P (ft)
757 && int_bit_position (fld) % BITS_PER_UNIT != 0)
759 *msg = "structure field is bit field";
760 return true;
763 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
764 return true;
767 return false;
769 case ARRAY_TYPE:
770 et = TREE_TYPE (type);
772 if (TYPE_VOLATILE (et))
774 *msg = "element type is volatile";
775 return true;
778 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
779 return true;
781 return false;
783 default:
784 return false;
788 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
789 base variable if it is. Return T if it is not an SSA_NAME. */
791 static tree
792 get_ssa_base_param (tree t)
794 if (TREE_CODE (t) == SSA_NAME)
796 if (SSA_NAME_IS_DEFAULT_DEF (t))
797 return SSA_NAME_VAR (t);
798 else
799 return NULL_TREE;
801 return t;
804 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
805 belongs to, unless the BB has already been marked as a potentially
806 final. */
808 static void
809 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
811 basic_block bb = gimple_bb (stmt);
812 int idx, parm_index = 0;
813 tree parm;
815 if (bitmap_bit_p (final_bbs, bb->index))
816 return;
818 for (parm = DECL_ARGUMENTS (current_function_decl);
819 parm && parm != base;
820 parm = DECL_CHAIN (parm))
821 parm_index++;
823 gcc_assert (parm_index < func_param_count);
825 idx = bb->index * func_param_count + parm_index;
826 if (bb_dereferences[idx] < dist)
827 bb_dereferences[idx] = dist;
830 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
831 the three fields. Also add it to the vector of accesses corresponding to
832 the base. Finally, return the new access. */
834 static struct access *
835 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
837 vec<access_p> *v;
838 struct access *access;
839 void **slot;
841 access = (struct access *) pool_alloc (access_pool);
842 memset (access, 0, sizeof (struct access));
843 access->base = base;
844 access->offset = offset;
845 access->size = size;
847 slot = pointer_map_contains (base_access_vec, base);
848 if (slot)
849 v = (vec<access_p> *) *slot;
850 else
851 vec_alloc (v, 32);
853 v->safe_push (access);
855 *((vec<access_p> **)
856 pointer_map_insert (base_access_vec, base)) = v;
858 return access;
861 /* Create and insert access for EXPR. Return created access, or NULL if it is
862 not possible. */
864 static struct access *
865 create_access (tree expr, gimple stmt, bool write)
867 struct access *access;
868 HOST_WIDE_INT offset, size, max_size;
869 tree base = expr;
870 bool ptr, unscalarizable_region = false;
872 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
874 if (sra_mode == SRA_MODE_EARLY_IPA
875 && TREE_CODE (base) == MEM_REF)
877 base = get_ssa_base_param (TREE_OPERAND (base, 0));
878 if (!base)
879 return NULL;
880 ptr = true;
882 else
883 ptr = false;
885 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
886 return NULL;
888 if (sra_mode == SRA_MODE_EARLY_IPA)
890 if (size < 0 || size != max_size)
892 disqualify_candidate (base, "Encountered a variable sized access.");
893 return NULL;
895 if (TREE_CODE (expr) == COMPONENT_REF
896 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
898 disqualify_candidate (base, "Encountered a bit-field access.");
899 return NULL;
901 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
903 if (ptr)
904 mark_parm_dereference (base, offset + size, stmt);
906 else
908 if (size != max_size)
910 size = max_size;
911 unscalarizable_region = true;
913 if (size < 0)
915 disqualify_candidate (base, "Encountered an unconstrained access.");
916 return NULL;
920 access = create_access_1 (base, offset, size);
921 access->expr = expr;
922 access->type = TREE_TYPE (expr);
923 access->write = write;
924 access->grp_unscalarizable_region = unscalarizable_region;
925 access->stmt = stmt;
927 if (TREE_CODE (expr) == COMPONENT_REF
928 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
929 access->non_addressable = 1;
931 return access;
935 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
936 register types or (recursively) records with only these two kinds of fields.
937 It also returns false if any of these records contains a bit-field. */
939 static bool
940 type_consists_of_records_p (tree type)
942 tree fld;
944 if (TREE_CODE (type) != RECORD_TYPE)
945 return false;
947 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
948 if (TREE_CODE (fld) == FIELD_DECL)
950 tree ft = TREE_TYPE (fld);
952 if (DECL_BIT_FIELD (fld))
953 return false;
955 if (!is_gimple_reg_type (ft)
956 && !type_consists_of_records_p (ft))
957 return false;
960 return true;
963 /* Create total_scalarization accesses for all scalar type fields in DECL that
964 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
965 must be the top-most VAR_DECL representing the variable, OFFSET must be the
966 offset of DECL within BASE. REF must be the memory reference expression for
967 the given decl. */
969 static void
970 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
971 tree ref)
973 tree fld, decl_type = TREE_TYPE (decl);
975 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
976 if (TREE_CODE (fld) == FIELD_DECL)
978 HOST_WIDE_INT pos = offset + int_bit_position (fld);
979 tree ft = TREE_TYPE (fld);
980 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
981 NULL_TREE);
983 if (is_gimple_reg_type (ft))
985 struct access *access;
986 HOST_WIDE_INT size;
988 size = tree_low_cst (DECL_SIZE (fld), 1);
989 access = create_access_1 (base, pos, size);
990 access->expr = nref;
991 access->type = ft;
992 access->grp_total_scalarization = 1;
993 /* Accesses for intraprocedural SRA can have their stmt NULL. */
995 else
996 completely_scalarize_record (base, fld, pos, nref);
1000 /* Create total_scalarization accesses for all scalar type fields in VAR and
1001 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
1002 type_consists_of_records_p. */
1004 static void
1005 completely_scalarize_var (tree var)
1007 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
1008 struct access *access;
1010 access = create_access_1 (var, 0, size);
1011 access->expr = var;
1012 access->type = TREE_TYPE (var);
1013 access->grp_total_scalarization = 1;
1015 completely_scalarize_record (var, var, 0, var);
1018 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1020 static inline bool
1021 contains_view_convert_expr_p (const_tree ref)
1023 while (handled_component_p (ref))
1025 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1026 return true;
1027 ref = TREE_OPERAND (ref, 0);
1030 return false;
1033 /* Search the given tree for a declaration by skipping handled components and
1034 exclude it from the candidates. */
1036 static void
1037 disqualify_base_of_expr (tree t, const char *reason)
1039 t = get_base_address (t);
1040 if (sra_mode == SRA_MODE_EARLY_IPA
1041 && TREE_CODE (t) == MEM_REF)
1042 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1044 if (t && DECL_P (t))
1045 disqualify_candidate (t, reason);
1048 /* Scan expression EXPR and create access structures for all accesses to
1049 candidates for scalarization. Return the created access or NULL if none is
1050 created. */
1052 static struct access *
1053 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1055 struct access *ret = NULL;
1056 bool partial_ref;
1058 if (TREE_CODE (expr) == BIT_FIELD_REF
1059 || TREE_CODE (expr) == IMAGPART_EXPR
1060 || TREE_CODE (expr) == REALPART_EXPR)
1062 expr = TREE_OPERAND (expr, 0);
1063 partial_ref = true;
1065 else
1066 partial_ref = false;
1068 /* We need to dive through V_C_Es in order to get the size of its parameter
1069 and not the result type. Ada produces such statements. We are also
1070 capable of handling the topmost V_C_E but not any of those buried in other
1071 handled components. */
1072 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1073 expr = TREE_OPERAND (expr, 0);
1075 if (contains_view_convert_expr_p (expr))
1077 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1078 "component.");
1079 return NULL;
1082 switch (TREE_CODE (expr))
1084 case MEM_REF:
1085 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1086 && sra_mode != SRA_MODE_EARLY_IPA)
1087 return NULL;
1088 /* fall through */
1089 case VAR_DECL:
1090 case PARM_DECL:
1091 case RESULT_DECL:
1092 case COMPONENT_REF:
1093 case ARRAY_REF:
1094 case ARRAY_RANGE_REF:
1095 ret = create_access (expr, stmt, write);
1096 break;
1098 default:
1099 break;
1102 if (write && partial_ref && ret)
1103 ret->grp_partial_lhs = 1;
1105 return ret;
1108 /* Scan expression EXPR and create access structures for all accesses to
1109 candidates for scalarization. Return true if any access has been inserted.
1110 STMT must be the statement from which the expression is taken, WRITE must be
1111 true if the expression is a store and false otherwise. */
1113 static bool
1114 build_access_from_expr (tree expr, gimple stmt, bool write)
1116 struct access *access;
1118 access = build_access_from_expr_1 (expr, stmt, write);
1119 if (access)
1121 /* This means the aggregate is accesses as a whole in a way other than an
1122 assign statement and thus cannot be removed even if we had a scalar
1123 replacement for everything. */
1124 if (cannot_scalarize_away_bitmap)
1125 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1126 return true;
1128 return false;
1131 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1132 modes in which it matters, return true iff they have been disqualified. RHS
1133 may be NULL, in that case ignore it. If we scalarize an aggregate in
1134 intra-SRA we may need to add statements after each statement. This is not
1135 possible if a statement unconditionally has to end the basic block. */
1136 static bool
1137 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1139 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1140 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1142 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1143 if (rhs)
1144 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1145 return true;
1147 return false;
1150 /* Scan expressions occurring in STMT, create access structures for all accesses
1151 to candidates for scalarization and remove those candidates which occur in
1152 statements or expressions that prevent them from being split apart. Return
1153 true if any access has been inserted. */
1155 static bool
1156 build_accesses_from_assign (gimple stmt)
1158 tree lhs, rhs;
1159 struct access *lacc, *racc;
1161 if (!gimple_assign_single_p (stmt)
1162 /* Scope clobbers don't influence scalarization. */
1163 || gimple_clobber_p (stmt))
1164 return false;
1166 lhs = gimple_assign_lhs (stmt);
1167 rhs = gimple_assign_rhs1 (stmt);
1169 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1170 return false;
1172 racc = build_access_from_expr_1 (rhs, stmt, false);
1173 lacc = build_access_from_expr_1 (lhs, stmt, true);
1175 if (lacc)
1176 lacc->grp_assignment_write = 1;
1178 if (racc)
1180 racc->grp_assignment_read = 1;
1181 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1182 && !is_gimple_reg_type (racc->type))
1183 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1186 if (lacc && racc
1187 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1188 && !lacc->grp_unscalarizable_region
1189 && !racc->grp_unscalarizable_region
1190 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1191 && lacc->size == racc->size
1192 && useless_type_conversion_p (lacc->type, racc->type))
1194 struct assign_link *link;
1196 link = (struct assign_link *) pool_alloc (link_pool);
1197 memset (link, 0, sizeof (struct assign_link));
1199 link->lacc = lacc;
1200 link->racc = racc;
1202 add_link_to_rhs (racc, link);
1205 return lacc || racc;
1208 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1209 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1211 static bool
1212 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1213 void *data ATTRIBUTE_UNUSED)
1215 op = get_base_address (op);
1216 if (op
1217 && DECL_P (op))
1218 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1220 return false;
1223 /* Return true iff callsite CALL has at least as many actual arguments as there
1224 are formal parameters of the function currently processed by IPA-SRA. */
1226 static inline bool
1227 callsite_has_enough_arguments_p (gimple call)
1229 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1232 /* Scan function and look for interesting expressions and create access
1233 structures for them. Return true iff any access is created. */
1235 static bool
1236 scan_function (void)
1238 basic_block bb;
1239 bool ret = false;
1241 FOR_EACH_BB (bb)
1243 gimple_stmt_iterator gsi;
1244 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1246 gimple stmt = gsi_stmt (gsi);
1247 tree t;
1248 unsigned i;
1250 if (final_bbs && stmt_can_throw_external (stmt))
1251 bitmap_set_bit (final_bbs, bb->index);
1252 switch (gimple_code (stmt))
1254 case GIMPLE_RETURN:
1255 t = gimple_return_retval (stmt);
1256 if (t != NULL_TREE)
1257 ret |= build_access_from_expr (t, stmt, false);
1258 if (final_bbs)
1259 bitmap_set_bit (final_bbs, bb->index);
1260 break;
1262 case GIMPLE_ASSIGN:
1263 ret |= build_accesses_from_assign (stmt);
1264 break;
1266 case GIMPLE_CALL:
1267 for (i = 0; i < gimple_call_num_args (stmt); i++)
1268 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1269 stmt, false);
1271 if (sra_mode == SRA_MODE_EARLY_IPA)
1273 tree dest = gimple_call_fndecl (stmt);
1274 int flags = gimple_call_flags (stmt);
1276 if (dest)
1278 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1279 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1280 encountered_apply_args = true;
1281 if (recursive_call_p (current_function_decl, dest))
1283 encountered_recursive_call = true;
1284 if (!callsite_has_enough_arguments_p (stmt))
1285 encountered_unchangable_recursive_call = true;
1289 if (final_bbs
1290 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1291 bitmap_set_bit (final_bbs, bb->index);
1294 t = gimple_call_lhs (stmt);
1295 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1296 ret |= build_access_from_expr (t, stmt, true);
1297 break;
1299 case GIMPLE_ASM:
1300 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1301 asm_visit_addr);
1302 if (final_bbs)
1303 bitmap_set_bit (final_bbs, bb->index);
1305 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1307 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1308 ret |= build_access_from_expr (t, stmt, false);
1310 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1312 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1313 ret |= build_access_from_expr (t, stmt, true);
1315 break;
1317 default:
1318 break;
1323 return ret;
1326 /* Helper of QSORT function. There are pointers to accesses in the array. An
1327 access is considered smaller than another if it has smaller offset or if the
1328 offsets are the same but is size is bigger. */
1330 static int
1331 compare_access_positions (const void *a, const void *b)
1333 const access_p *fp1 = (const access_p *) a;
1334 const access_p *fp2 = (const access_p *) b;
1335 const access_p f1 = *fp1;
1336 const access_p f2 = *fp2;
1338 if (f1->offset != f2->offset)
1339 return f1->offset < f2->offset ? -1 : 1;
1341 if (f1->size == f2->size)
1343 if (f1->type == f2->type)
1344 return 0;
1345 /* Put any non-aggregate type before any aggregate type. */
1346 else if (!is_gimple_reg_type (f1->type)
1347 && is_gimple_reg_type (f2->type))
1348 return 1;
1349 else if (is_gimple_reg_type (f1->type)
1350 && !is_gimple_reg_type (f2->type))
1351 return -1;
1352 /* Put any complex or vector type before any other scalar type. */
1353 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1354 && TREE_CODE (f1->type) != VECTOR_TYPE
1355 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1356 || TREE_CODE (f2->type) == VECTOR_TYPE))
1357 return 1;
1358 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1359 || TREE_CODE (f1->type) == VECTOR_TYPE)
1360 && TREE_CODE (f2->type) != COMPLEX_TYPE
1361 && TREE_CODE (f2->type) != VECTOR_TYPE)
1362 return -1;
1363 /* Put the integral type with the bigger precision first. */
1364 else if (INTEGRAL_TYPE_P (f1->type)
1365 && INTEGRAL_TYPE_P (f2->type))
1366 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1367 /* Put any integral type with non-full precision last. */
1368 else if (INTEGRAL_TYPE_P (f1->type)
1369 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1370 != TYPE_PRECISION (f1->type)))
1371 return 1;
1372 else if (INTEGRAL_TYPE_P (f2->type)
1373 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1374 != TYPE_PRECISION (f2->type)))
1375 return -1;
1376 /* Stabilize the sort. */
1377 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1380 /* We want the bigger accesses first, thus the opposite operator in the next
1381 line: */
1382 return f1->size > f2->size ? -1 : 1;
1386 /* Append a name of the declaration to the name obstack. A helper function for
1387 make_fancy_name. */
1389 static void
1390 make_fancy_decl_name (tree decl)
1392 char buffer[32];
1394 tree name = DECL_NAME (decl);
1395 if (name)
1396 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1397 IDENTIFIER_LENGTH (name));
1398 else
1400 sprintf (buffer, "D%u", DECL_UID (decl));
1401 obstack_grow (&name_obstack, buffer, strlen (buffer));
1405 /* Helper for make_fancy_name. */
1407 static void
1408 make_fancy_name_1 (tree expr)
1410 char buffer[32];
1411 tree index;
1413 if (DECL_P (expr))
1415 make_fancy_decl_name (expr);
1416 return;
1419 switch (TREE_CODE (expr))
1421 case COMPONENT_REF:
1422 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1423 obstack_1grow (&name_obstack, '$');
1424 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1425 break;
1427 case ARRAY_REF:
1428 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1429 obstack_1grow (&name_obstack, '$');
1430 /* Arrays with only one element may not have a constant as their
1431 index. */
1432 index = TREE_OPERAND (expr, 1);
1433 if (TREE_CODE (index) != INTEGER_CST)
1434 break;
1435 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1436 obstack_grow (&name_obstack, buffer, strlen (buffer));
1437 break;
1439 case ADDR_EXPR:
1440 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1441 break;
1443 case MEM_REF:
1444 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1445 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1447 obstack_1grow (&name_obstack, '$');
1448 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1449 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1450 obstack_grow (&name_obstack, buffer, strlen (buffer));
1452 break;
1454 case BIT_FIELD_REF:
1455 case REALPART_EXPR:
1456 case IMAGPART_EXPR:
1457 gcc_unreachable (); /* we treat these as scalars. */
1458 break;
1459 default:
1460 break;
1464 /* Create a human readable name for replacement variable of ACCESS. */
1466 static char *
1467 make_fancy_name (tree expr)
1469 make_fancy_name_1 (expr);
1470 obstack_1grow (&name_obstack, '\0');
1471 return XOBFINISH (&name_obstack, char *);
1474 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1475 EXP_TYPE at the given OFFSET. If BASE is something for which
1476 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1477 to insert new statements either before or below the current one as specified
1478 by INSERT_AFTER. This function is not capable of handling bitfields.
1480 BASE must be either a declaration or a memory reference that has correct
1481 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1483 tree
1484 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1485 tree exp_type, gimple_stmt_iterator *gsi,
1486 bool insert_after)
1488 tree prev_base = base;
1489 tree off;
1490 tree mem_ref;
1491 HOST_WIDE_INT base_offset;
1492 unsigned HOST_WIDE_INT misalign;
1493 unsigned int align;
1495 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1496 get_object_alignment_1 (base, &align, &misalign);
1497 base = get_addr_base_and_unit_offset (base, &base_offset);
1499 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1500 offset such as array[var_index]. */
1501 if (!base)
1503 gimple stmt;
1504 tree tmp, addr;
1506 gcc_checking_assert (gsi);
1507 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1508 addr = build_fold_addr_expr (unshare_expr (prev_base));
1509 STRIP_USELESS_TYPE_CONVERSION (addr);
1510 stmt = gimple_build_assign (tmp, addr);
1511 gimple_set_location (stmt, loc);
1512 if (insert_after)
1513 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1514 else
1515 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1517 off = build_int_cst (reference_alias_ptr_type (prev_base),
1518 offset / BITS_PER_UNIT);
1519 base = tmp;
1521 else if (TREE_CODE (base) == MEM_REF)
1523 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1524 base_offset + offset / BITS_PER_UNIT);
1525 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1526 base = unshare_expr (TREE_OPERAND (base, 0));
1528 else
1530 off = build_int_cst (reference_alias_ptr_type (base),
1531 base_offset + offset / BITS_PER_UNIT);
1532 base = build_fold_addr_expr (unshare_expr (base));
1535 misalign = (misalign + offset) & (align - 1);
1536 if (misalign != 0)
1537 align = (misalign & -misalign);
1538 if (align < TYPE_ALIGN (exp_type))
1539 exp_type = build_aligned_type (exp_type, align);
1541 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1542 if (TREE_THIS_VOLATILE (prev_base))
1543 TREE_THIS_VOLATILE (mem_ref) = 1;
1544 if (TREE_SIDE_EFFECTS (prev_base))
1545 TREE_SIDE_EFFECTS (mem_ref) = 1;
1546 return mem_ref;
1549 /* Construct a memory reference to a part of an aggregate BASE at the given
1550 OFFSET and of the same type as MODEL. In case this is a reference to a
1551 bit-field, the function will replicate the last component_ref of model's
1552 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1553 build_ref_for_offset. */
1555 static tree
1556 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1557 struct access *model, gimple_stmt_iterator *gsi,
1558 bool insert_after)
1560 if (TREE_CODE (model->expr) == COMPONENT_REF
1561 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1563 /* This access represents a bit-field. */
1564 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1566 offset -= int_bit_position (fld);
1567 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1568 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1569 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1570 NULL_TREE);
1572 else
1573 return build_ref_for_offset (loc, base, offset, model->type,
1574 gsi, insert_after);
1577 /* Attempt to build a memory reference that we could but into a gimple
1578 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1579 create statements and return s NULL instead. This function also ignores
1580 alignment issues and so its results should never end up in non-debug
1581 statements. */
1583 static tree
1584 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1585 struct access *model)
1587 HOST_WIDE_INT base_offset;
1588 tree off;
1590 if (TREE_CODE (model->expr) == COMPONENT_REF
1591 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1592 return NULL_TREE;
1594 base = get_addr_base_and_unit_offset (base, &base_offset);
1595 if (!base)
1596 return NULL_TREE;
1597 if (TREE_CODE (base) == MEM_REF)
1599 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1600 base_offset + offset / BITS_PER_UNIT);
1601 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1602 base = unshare_expr (TREE_OPERAND (base, 0));
1604 else
1606 off = build_int_cst (reference_alias_ptr_type (base),
1607 base_offset + offset / BITS_PER_UNIT);
1608 base = build_fold_addr_expr (unshare_expr (base));
1611 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1614 /* Construct a memory reference consisting of component_refs and array_refs to
1615 a part of an aggregate *RES (which is of type TYPE). The requested part
1616 should have type EXP_TYPE at be the given OFFSET. This function might not
1617 succeed, it returns true when it does and only then *RES points to something
1618 meaningful. This function should be used only to build expressions that we
1619 might need to present to user (e.g. in warnings). In all other situations,
1620 build_ref_for_model or build_ref_for_offset should be used instead. */
1622 static bool
1623 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1624 tree exp_type)
1626 while (1)
1628 tree fld;
1629 tree tr_size, index, minidx;
1630 HOST_WIDE_INT el_size;
1632 if (offset == 0 && exp_type
1633 && types_compatible_p (exp_type, type))
1634 return true;
1636 switch (TREE_CODE (type))
1638 case UNION_TYPE:
1639 case QUAL_UNION_TYPE:
1640 case RECORD_TYPE:
1641 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1643 HOST_WIDE_INT pos, size;
1644 tree tr_pos, expr, *expr_ptr;
1646 if (TREE_CODE (fld) != FIELD_DECL)
1647 continue;
1649 tr_pos = bit_position (fld);
1650 if (!tr_pos || !host_integerp (tr_pos, 1))
1651 continue;
1652 pos = TREE_INT_CST_LOW (tr_pos);
1653 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1654 tr_size = DECL_SIZE (fld);
1655 if (!tr_size || !host_integerp (tr_size, 1))
1656 continue;
1657 size = TREE_INT_CST_LOW (tr_size);
1658 if (size == 0)
1660 if (pos != offset)
1661 continue;
1663 else if (pos > offset || (pos + size) <= offset)
1664 continue;
1666 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1667 NULL_TREE);
1668 expr_ptr = &expr;
1669 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1670 offset - pos, exp_type))
1672 *res = expr;
1673 return true;
1676 return false;
1678 case ARRAY_TYPE:
1679 tr_size = TYPE_SIZE (TREE_TYPE (type));
1680 if (!tr_size || !host_integerp (tr_size, 1))
1681 return false;
1682 el_size = tree_low_cst (tr_size, 1);
1684 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1685 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1686 return false;
1687 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1688 if (!integer_zerop (minidx))
1689 index = int_const_binop (PLUS_EXPR, index, minidx);
1690 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1691 NULL_TREE, NULL_TREE);
1692 offset = offset % el_size;
1693 type = TREE_TYPE (type);
1694 break;
1696 default:
1697 if (offset != 0)
1698 return false;
1700 if (exp_type)
1701 return false;
1702 else
1703 return true;
1708 /* Return true iff TYPE is stdarg va_list type. */
1710 static inline bool
1711 is_va_list_type (tree type)
1713 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1716 /* Print message to dump file why a variable was rejected. */
1718 static void
1719 reject (tree var, const char *msg)
1721 if (dump_file && (dump_flags & TDF_DETAILS))
1723 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1724 print_generic_expr (dump_file, var, 0);
1725 fprintf (dump_file, "\n");
1729 /* Return true if VAR is a candidate for SRA. */
1731 static bool
1732 maybe_add_sra_candidate (tree var)
1734 tree type = TREE_TYPE (var);
1735 const char *msg;
1736 tree_node **slot;
1738 if (!AGGREGATE_TYPE_P (type))
1740 reject (var, "not aggregate");
1741 return false;
1743 if (needs_to_live_in_memory (var))
1745 reject (var, "needs to live in memory");
1746 return false;
1748 if (TREE_THIS_VOLATILE (var))
1750 reject (var, "is volatile");
1751 return false;
1753 if (!COMPLETE_TYPE_P (type))
1755 reject (var, "has incomplete type");
1756 return false;
1758 if (!host_integerp (TYPE_SIZE (type), 1))
1760 reject (var, "type size not fixed");
1761 return false;
1763 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1765 reject (var, "type size is zero");
1766 return false;
1768 if (type_internals_preclude_sra_p (type, &msg))
1770 reject (var, msg);
1771 return false;
1773 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1774 we also want to schedule it rather late. Thus we ignore it in
1775 the early pass. */
1776 (sra_mode == SRA_MODE_EARLY_INTRA
1777 && is_va_list_type (type)))
1779 reject (var, "is va_list");
1780 return false;
1783 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1784 slot = candidates.find_slot_with_hash (var, DECL_UID (var), INSERT);
1785 *slot = var;
1787 if (dump_file && (dump_flags & TDF_DETAILS))
1789 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1790 print_generic_expr (dump_file, var, 0);
1791 fprintf (dump_file, "\n");
1794 return true;
1797 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1798 those with type which is suitable for scalarization. */
1800 static bool
1801 find_var_candidates (void)
1803 tree var, parm;
1804 unsigned int i;
1805 bool ret = false;
1807 for (parm = DECL_ARGUMENTS (current_function_decl);
1808 parm;
1809 parm = DECL_CHAIN (parm))
1810 ret |= maybe_add_sra_candidate (parm);
1812 FOR_EACH_LOCAL_DECL (cfun, i, var)
1814 if (TREE_CODE (var) != VAR_DECL)
1815 continue;
1817 ret |= maybe_add_sra_candidate (var);
1820 return ret;
1823 /* Sort all accesses for the given variable, check for partial overlaps and
1824 return NULL if there are any. If there are none, pick a representative for
1825 each combination of offset and size and create a linked list out of them.
1826 Return the pointer to the first representative and make sure it is the first
1827 one in the vector of accesses. */
1829 static struct access *
1830 sort_and_splice_var_accesses (tree var)
1832 int i, j, access_count;
1833 struct access *res, **prev_acc_ptr = &res;
1834 vec<access_p> *access_vec;
1835 bool first = true;
1836 HOST_WIDE_INT low = -1, high = 0;
1838 access_vec = get_base_access_vector (var);
1839 if (!access_vec)
1840 return NULL;
1841 access_count = access_vec->length ();
1843 /* Sort by <OFFSET, SIZE>. */
1844 access_vec->qsort (compare_access_positions);
1846 i = 0;
1847 while (i < access_count)
1849 struct access *access = (*access_vec)[i];
1850 bool grp_write = access->write;
1851 bool grp_read = !access->write;
1852 bool grp_scalar_write = access->write
1853 && is_gimple_reg_type (access->type);
1854 bool grp_scalar_read = !access->write
1855 && is_gimple_reg_type (access->type);
1856 bool grp_assignment_read = access->grp_assignment_read;
1857 bool grp_assignment_write = access->grp_assignment_write;
1858 bool multiple_scalar_reads = false;
1859 bool total_scalarization = access->grp_total_scalarization;
1860 bool grp_partial_lhs = access->grp_partial_lhs;
1861 bool first_scalar = is_gimple_reg_type (access->type);
1862 bool unscalarizable_region = access->grp_unscalarizable_region;
1864 if (first || access->offset >= high)
1866 first = false;
1867 low = access->offset;
1868 high = access->offset + access->size;
1870 else if (access->offset > low && access->offset + access->size > high)
1871 return NULL;
1872 else
1873 gcc_assert (access->offset >= low
1874 && access->offset + access->size <= high);
1876 j = i + 1;
1877 while (j < access_count)
1879 struct access *ac2 = (*access_vec)[j];
1880 if (ac2->offset != access->offset || ac2->size != access->size)
1881 break;
1882 if (ac2->write)
1884 grp_write = true;
1885 grp_scalar_write = (grp_scalar_write
1886 || is_gimple_reg_type (ac2->type));
1888 else
1890 grp_read = true;
1891 if (is_gimple_reg_type (ac2->type))
1893 if (grp_scalar_read)
1894 multiple_scalar_reads = true;
1895 else
1896 grp_scalar_read = true;
1899 grp_assignment_read |= ac2->grp_assignment_read;
1900 grp_assignment_write |= ac2->grp_assignment_write;
1901 grp_partial_lhs |= ac2->grp_partial_lhs;
1902 unscalarizable_region |= ac2->grp_unscalarizable_region;
1903 total_scalarization |= ac2->grp_total_scalarization;
1904 relink_to_new_repr (access, ac2);
1906 /* If there are both aggregate-type and scalar-type accesses with
1907 this combination of size and offset, the comparison function
1908 should have put the scalars first. */
1909 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1910 ac2->group_representative = access;
1911 j++;
1914 i = j;
1916 access->group_representative = access;
1917 access->grp_write = grp_write;
1918 access->grp_read = grp_read;
1919 access->grp_scalar_read = grp_scalar_read;
1920 access->grp_scalar_write = grp_scalar_write;
1921 access->grp_assignment_read = grp_assignment_read;
1922 access->grp_assignment_write = grp_assignment_write;
1923 access->grp_hint = multiple_scalar_reads || total_scalarization;
1924 access->grp_total_scalarization = total_scalarization;
1925 access->grp_partial_lhs = grp_partial_lhs;
1926 access->grp_unscalarizable_region = unscalarizable_region;
1927 if (access->first_link)
1928 add_access_to_work_queue (access);
1930 *prev_acc_ptr = access;
1931 prev_acc_ptr = &access->next_grp;
1934 gcc_assert (res == (*access_vec)[0]);
1935 return res;
1938 /* Create a variable for the given ACCESS which determines the type, name and a
1939 few other properties. Return the variable declaration and store it also to
1940 ACCESS->replacement. */
1942 static tree
1943 create_access_replacement (struct access *access)
1945 tree repl;
1947 if (access->grp_to_be_debug_replaced)
1949 repl = create_tmp_var_raw (access->type, NULL);
1950 DECL_CONTEXT (repl) = current_function_decl;
1952 else
1953 repl = create_tmp_var (access->type, "SR");
1954 if (TREE_CODE (access->type) == COMPLEX_TYPE
1955 || TREE_CODE (access->type) == VECTOR_TYPE)
1957 if (!access->grp_partial_lhs)
1958 DECL_GIMPLE_REG_P (repl) = 1;
1960 else if (access->grp_partial_lhs
1961 && is_gimple_reg_type (access->type))
1962 TREE_ADDRESSABLE (repl) = 1;
1964 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1965 DECL_ARTIFICIAL (repl) = 1;
1966 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1968 if (DECL_NAME (access->base)
1969 && !DECL_IGNORED_P (access->base)
1970 && !DECL_ARTIFICIAL (access->base))
1972 char *pretty_name = make_fancy_name (access->expr);
1973 tree debug_expr = unshare_expr_without_location (access->expr), d;
1974 bool fail = false;
1976 DECL_NAME (repl) = get_identifier (pretty_name);
1977 obstack_free (&name_obstack, pretty_name);
1979 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1980 as DECL_DEBUG_EXPR isn't considered when looking for still
1981 used SSA_NAMEs and thus they could be freed. All debug info
1982 generation cares is whether something is constant or variable
1983 and that get_ref_base_and_extent works properly on the
1984 expression. It cannot handle accesses at a non-constant offset
1985 though, so just give up in those cases. */
1986 for (d = debug_expr;
1987 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
1988 d = TREE_OPERAND (d, 0))
1989 switch (TREE_CODE (d))
1991 case ARRAY_REF:
1992 case ARRAY_RANGE_REF:
1993 if (TREE_OPERAND (d, 1)
1994 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
1995 fail = true;
1996 if (TREE_OPERAND (d, 3)
1997 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
1998 fail = true;
1999 /* FALLTHRU */
2000 case COMPONENT_REF:
2001 if (TREE_OPERAND (d, 2)
2002 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2003 fail = true;
2004 break;
2005 case MEM_REF:
2006 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2007 fail = true;
2008 else
2009 d = TREE_OPERAND (d, 0);
2010 break;
2011 default:
2012 break;
2014 if (!fail)
2016 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2017 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2019 if (access->grp_no_warning)
2020 TREE_NO_WARNING (repl) = 1;
2021 else
2022 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2024 else
2025 TREE_NO_WARNING (repl) = 1;
2027 if (dump_file)
2029 if (access->grp_to_be_debug_replaced)
2031 fprintf (dump_file, "Created a debug-only replacement for ");
2032 print_generic_expr (dump_file, access->base, 0);
2033 fprintf (dump_file, " offset: %u, size: %u\n",
2034 (unsigned) access->offset, (unsigned) access->size);
2036 else
2038 fprintf (dump_file, "Created a replacement for ");
2039 print_generic_expr (dump_file, access->base, 0);
2040 fprintf (dump_file, " offset: %u, size: %u: ",
2041 (unsigned) access->offset, (unsigned) access->size);
2042 print_generic_expr (dump_file, repl, 0);
2043 fprintf (dump_file, "\n");
2046 sra_stats.replacements++;
2048 return repl;
2051 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2053 static inline tree
2054 get_access_replacement (struct access *access)
2056 gcc_checking_assert (access->replacement_decl);
2057 return access->replacement_decl;
2061 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2062 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2063 to it is not "within" the root. Return false iff some accesses partially
2064 overlap. */
2066 static bool
2067 build_access_subtree (struct access **access)
2069 struct access *root = *access, *last_child = NULL;
2070 HOST_WIDE_INT limit = root->offset + root->size;
2072 *access = (*access)->next_grp;
2073 while (*access && (*access)->offset + (*access)->size <= limit)
2075 if (!last_child)
2076 root->first_child = *access;
2077 else
2078 last_child->next_sibling = *access;
2079 last_child = *access;
2081 if (!build_access_subtree (access))
2082 return false;
2085 if (*access && (*access)->offset < limit)
2086 return false;
2088 return true;
2091 /* Build a tree of access representatives, ACCESS is the pointer to the first
2092 one, others are linked in a list by the next_grp field. Return false iff
2093 some accesses partially overlap. */
2095 static bool
2096 build_access_trees (struct access *access)
2098 while (access)
2100 struct access *root = access;
2102 if (!build_access_subtree (&access))
2103 return false;
2104 root->next_grp = access;
2106 return true;
2109 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2110 array. */
2112 static bool
2113 expr_with_var_bounded_array_refs_p (tree expr)
2115 while (handled_component_p (expr))
2117 if (TREE_CODE (expr) == ARRAY_REF
2118 && !host_integerp (array_ref_low_bound (expr), 0))
2119 return true;
2120 expr = TREE_OPERAND (expr, 0);
2122 return false;
2125 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2126 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2127 sorts of access flags appropriately along the way, notably always set
2128 grp_read and grp_assign_read according to MARK_READ and grp_write when
2129 MARK_WRITE is true.
2131 Creating a replacement for a scalar access is considered beneficial if its
2132 grp_hint is set (this means we are either attempting total scalarization or
2133 there is more than one direct read access) or according to the following
2134 table:
2136 Access written to through a scalar type (once or more times)
2138 | Written to in an assignment statement
2140 | | Access read as scalar _once_
2141 | | |
2142 | | | Read in an assignment statement
2143 | | | |
2144 | | | | Scalarize Comment
2145 -----------------------------------------------------------------------------
2146 0 0 0 0 No access for the scalar
2147 0 0 0 1 No access for the scalar
2148 0 0 1 0 No Single read - won't help
2149 0 0 1 1 No The same case
2150 0 1 0 0 No access for the scalar
2151 0 1 0 1 No access for the scalar
2152 0 1 1 0 Yes s = *g; return s.i;
2153 0 1 1 1 Yes The same case as above
2154 1 0 0 0 No Won't help
2155 1 0 0 1 Yes s.i = 1; *g = s;
2156 1 0 1 0 Yes s.i = 5; g = s.i;
2157 1 0 1 1 Yes The same case as above
2158 1 1 0 0 No Won't help.
2159 1 1 0 1 Yes s.i = 1; *g = s;
2160 1 1 1 0 Yes s = *g; return s.i;
2161 1 1 1 1 Yes Any of the above yeses */
2163 static bool
2164 analyze_access_subtree (struct access *root, struct access *parent,
2165 bool allow_replacements)
2167 struct access *child;
2168 HOST_WIDE_INT limit = root->offset + root->size;
2169 HOST_WIDE_INT covered_to = root->offset;
2170 bool scalar = is_gimple_reg_type (root->type);
2171 bool hole = false, sth_created = false;
2173 if (parent)
2175 if (parent->grp_read)
2176 root->grp_read = 1;
2177 if (parent->grp_assignment_read)
2178 root->grp_assignment_read = 1;
2179 if (parent->grp_write)
2180 root->grp_write = 1;
2181 if (parent->grp_assignment_write)
2182 root->grp_assignment_write = 1;
2183 if (parent->grp_total_scalarization)
2184 root->grp_total_scalarization = 1;
2187 if (root->grp_unscalarizable_region)
2188 allow_replacements = false;
2190 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2191 allow_replacements = false;
2193 for (child = root->first_child; child; child = child->next_sibling)
2195 hole |= covered_to < child->offset;
2196 sth_created |= analyze_access_subtree (child, root,
2197 allow_replacements && !scalar);
2199 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2200 root->grp_total_scalarization &= child->grp_total_scalarization;
2201 if (child->grp_covered)
2202 covered_to += child->size;
2203 else
2204 hole = true;
2207 if (allow_replacements && scalar && !root->first_child
2208 && (root->grp_hint
2209 || ((root->grp_scalar_read || root->grp_assignment_read)
2210 && (root->grp_scalar_write || root->grp_assignment_write))))
2212 /* Always create access replacements that cover the whole access.
2213 For integral types this means the precision has to match.
2214 Avoid assumptions based on the integral type kind, too. */
2215 if (INTEGRAL_TYPE_P (root->type)
2216 && (TREE_CODE (root->type) != INTEGER_TYPE
2217 || TYPE_PRECISION (root->type) != root->size)
2218 /* But leave bitfield accesses alone. */
2219 && (TREE_CODE (root->expr) != COMPONENT_REF
2220 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2222 tree rt = root->type;
2223 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2224 && (root->size % BITS_PER_UNIT) == 0);
2225 root->type = build_nonstandard_integer_type (root->size,
2226 TYPE_UNSIGNED (rt));
2227 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2228 root->base, root->offset,
2229 root->type, NULL, false);
2231 if (dump_file && (dump_flags & TDF_DETAILS))
2233 fprintf (dump_file, "Changing the type of a replacement for ");
2234 print_generic_expr (dump_file, root->base, 0);
2235 fprintf (dump_file, " offset: %u, size: %u ",
2236 (unsigned) root->offset, (unsigned) root->size);
2237 fprintf (dump_file, " to an integer.\n");
2241 root->grp_to_be_replaced = 1;
2242 root->replacement_decl = create_access_replacement (root);
2243 sth_created = true;
2244 hole = false;
2246 else
2248 if (allow_replacements
2249 && scalar && !root->first_child
2250 && (root->grp_scalar_write || root->grp_assignment_write)
2251 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2252 DECL_UID (root->base)))
2254 gcc_checking_assert (!root->grp_scalar_read
2255 && !root->grp_assignment_read);
2256 sth_created = true;
2257 if (MAY_HAVE_DEBUG_STMTS)
2259 root->grp_to_be_debug_replaced = 1;
2260 root->replacement_decl = create_access_replacement (root);
2264 if (covered_to < limit)
2265 hole = true;
2266 if (scalar)
2267 root->grp_total_scalarization = 0;
2270 if (!hole || root->grp_total_scalarization)
2271 root->grp_covered = 1;
2272 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2273 root->grp_unscalarized_data = 1; /* not covered and written to */
2274 return sth_created;
2277 /* Analyze all access trees linked by next_grp by the means of
2278 analyze_access_subtree. */
2279 static bool
2280 analyze_access_trees (struct access *access)
2282 bool ret = false;
2284 while (access)
2286 if (analyze_access_subtree (access, NULL, true))
2287 ret = true;
2288 access = access->next_grp;
2291 return ret;
2294 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2295 SIZE would conflict with an already existing one. If exactly such a child
2296 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2298 static bool
2299 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2300 HOST_WIDE_INT size, struct access **exact_match)
2302 struct access *child;
2304 for (child = lacc->first_child; child; child = child->next_sibling)
2306 if (child->offset == norm_offset && child->size == size)
2308 *exact_match = child;
2309 return true;
2312 if (child->offset < norm_offset + size
2313 && child->offset + child->size > norm_offset)
2314 return true;
2317 return false;
2320 /* Create a new child access of PARENT, with all properties just like MODEL
2321 except for its offset and with its grp_write false and grp_read true.
2322 Return the new access or NULL if it cannot be created. Note that this access
2323 is created long after all splicing and sorting, it's not located in any
2324 access vector and is automatically a representative of its group. */
2326 static struct access *
2327 create_artificial_child_access (struct access *parent, struct access *model,
2328 HOST_WIDE_INT new_offset)
2330 struct access *access;
2331 struct access **child;
2332 tree expr = parent->base;
2334 gcc_assert (!model->grp_unscalarizable_region);
2336 access = (struct access *) pool_alloc (access_pool);
2337 memset (access, 0, sizeof (struct access));
2338 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2339 model->type))
2341 access->grp_no_warning = true;
2342 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2343 new_offset, model, NULL, false);
2346 access->base = parent->base;
2347 access->expr = expr;
2348 access->offset = new_offset;
2349 access->size = model->size;
2350 access->type = model->type;
2351 access->grp_write = true;
2352 access->grp_read = false;
2354 child = &parent->first_child;
2355 while (*child && (*child)->offset < new_offset)
2356 child = &(*child)->next_sibling;
2358 access->next_sibling = *child;
2359 *child = access;
2361 return access;
2365 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2366 true if any new subaccess was created. Additionally, if RACC is a scalar
2367 access but LACC is not, change the type of the latter, if possible. */
2369 static bool
2370 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2372 struct access *rchild;
2373 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2374 bool ret = false;
2376 if (is_gimple_reg_type (lacc->type)
2377 || lacc->grp_unscalarizable_region
2378 || racc->grp_unscalarizable_region)
2379 return false;
2381 if (is_gimple_reg_type (racc->type))
2383 if (!lacc->first_child && !racc->first_child)
2385 tree t = lacc->base;
2387 lacc->type = racc->type;
2388 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2389 lacc->offset, racc->type))
2390 lacc->expr = t;
2391 else
2393 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2394 lacc->base, lacc->offset,
2395 racc, NULL, false);
2396 lacc->grp_no_warning = true;
2399 return false;
2402 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2404 struct access *new_acc = NULL;
2405 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2407 if (rchild->grp_unscalarizable_region)
2408 continue;
2410 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2411 &new_acc))
2413 if (new_acc)
2415 rchild->grp_hint = 1;
2416 new_acc->grp_hint |= new_acc->grp_read;
2417 if (rchild->first_child)
2418 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2420 continue;
2423 rchild->grp_hint = 1;
2424 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2425 if (new_acc)
2427 ret = true;
2428 if (racc->first_child)
2429 propagate_subaccesses_across_link (new_acc, rchild);
2433 return ret;
2436 /* Propagate all subaccesses across assignment links. */
2438 static void
2439 propagate_all_subaccesses (void)
2441 while (work_queue_head)
2443 struct access *racc = pop_access_from_work_queue ();
2444 struct assign_link *link;
2446 gcc_assert (racc->first_link);
2448 for (link = racc->first_link; link; link = link->next)
2450 struct access *lacc = link->lacc;
2452 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2453 continue;
2454 lacc = lacc->group_representative;
2455 if (propagate_subaccesses_across_link (lacc, racc)
2456 && lacc->first_link)
2457 add_access_to_work_queue (lacc);
2462 /* Go through all accesses collected throughout the (intraprocedural) analysis
2463 stage, exclude overlapping ones, identify representatives and build trees
2464 out of them, making decisions about scalarization on the way. Return true
2465 iff there are any to-be-scalarized variables after this stage. */
2467 static bool
2468 analyze_all_variable_accesses (void)
2470 int res = 0;
2471 bitmap tmp = BITMAP_ALLOC (NULL);
2472 bitmap_iterator bi;
2473 unsigned i, max_total_scalarization_size;
2475 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2476 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2478 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2479 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2480 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2482 tree var = candidate (i);
2484 if (TREE_CODE (var) == VAR_DECL
2485 && type_consists_of_records_p (TREE_TYPE (var)))
2487 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2488 <= max_total_scalarization_size)
2490 completely_scalarize_var (var);
2491 if (dump_file && (dump_flags & TDF_DETAILS))
2493 fprintf (dump_file, "Will attempt to totally scalarize ");
2494 print_generic_expr (dump_file, var, 0);
2495 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2498 else if (dump_file && (dump_flags & TDF_DETAILS))
2500 fprintf (dump_file, "Too big to totally scalarize: ");
2501 print_generic_expr (dump_file, var, 0);
2502 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2507 bitmap_copy (tmp, candidate_bitmap);
2508 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2510 tree var = candidate (i);
2511 struct access *access;
2513 access = sort_and_splice_var_accesses (var);
2514 if (!access || !build_access_trees (access))
2515 disqualify_candidate (var,
2516 "No or inhibitingly overlapping accesses.");
2519 propagate_all_subaccesses ();
2521 bitmap_copy (tmp, candidate_bitmap);
2522 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2524 tree var = candidate (i);
2525 struct access *access = get_first_repr_for_decl (var);
2527 if (analyze_access_trees (access))
2529 res++;
2530 if (dump_file && (dump_flags & TDF_DETAILS))
2532 fprintf (dump_file, "\nAccess trees for ");
2533 print_generic_expr (dump_file, var, 0);
2534 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2535 dump_access_tree (dump_file, access);
2536 fprintf (dump_file, "\n");
2539 else
2540 disqualify_candidate (var, "No scalar replacements to be created.");
2543 BITMAP_FREE (tmp);
2545 if (res)
2547 statistics_counter_event (cfun, "Scalarized aggregates", res);
2548 return true;
2550 else
2551 return false;
2554 /* Generate statements copying scalar replacements of accesses within a subtree
2555 into or out of AGG. ACCESS, all its children, siblings and their children
2556 are to be processed. AGG is an aggregate type expression (can be a
2557 declaration but does not have to be, it can for example also be a mem_ref or
2558 a series of handled components). TOP_OFFSET is the offset of the processed
2559 subtree which has to be subtracted from offsets of individual accesses to
2560 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2561 replacements in the interval <start_offset, start_offset + chunk_size>,
2562 otherwise copy all. GSI is a statement iterator used to place the new
2563 statements. WRITE should be true when the statements should write from AGG
2564 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2565 statements will be added after the current statement in GSI, they will be
2566 added before the statement otherwise. */
2568 static void
2569 generate_subtree_copies (struct access *access, tree agg,
2570 HOST_WIDE_INT top_offset,
2571 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2572 gimple_stmt_iterator *gsi, bool write,
2573 bool insert_after, location_t loc)
2577 if (chunk_size && access->offset >= start_offset + chunk_size)
2578 return;
2580 if (access->grp_to_be_replaced
2581 && (chunk_size == 0
2582 || access->offset + access->size > start_offset))
2584 tree expr, repl = get_access_replacement (access);
2585 gimple stmt;
2587 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2588 access, gsi, insert_after);
2590 if (write)
2592 if (access->grp_partial_lhs)
2593 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2594 !insert_after,
2595 insert_after ? GSI_NEW_STMT
2596 : GSI_SAME_STMT);
2597 stmt = gimple_build_assign (repl, expr);
2599 else
2601 TREE_NO_WARNING (repl) = 1;
2602 if (access->grp_partial_lhs)
2603 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2604 !insert_after,
2605 insert_after ? GSI_NEW_STMT
2606 : GSI_SAME_STMT);
2607 stmt = gimple_build_assign (expr, repl);
2609 gimple_set_location (stmt, loc);
2611 if (insert_after)
2612 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2613 else
2614 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2615 update_stmt (stmt);
2616 sra_stats.subtree_copies++;
2618 else if (write
2619 && access->grp_to_be_debug_replaced
2620 && (chunk_size == 0
2621 || access->offset + access->size > start_offset))
2623 gimple ds;
2624 tree drhs = build_debug_ref_for_model (loc, agg,
2625 access->offset - top_offset,
2626 access);
2627 ds = gimple_build_debug_bind (get_access_replacement (access),
2628 drhs, gsi_stmt (*gsi));
2629 if (insert_after)
2630 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2631 else
2632 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2635 if (access->first_child)
2636 generate_subtree_copies (access->first_child, agg, top_offset,
2637 start_offset, chunk_size, gsi,
2638 write, insert_after, loc);
2640 access = access->next_sibling;
2642 while (access);
2645 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2646 the root of the subtree to be processed. GSI is the statement iterator used
2647 for inserting statements which are added after the current statement if
2648 INSERT_AFTER is true or before it otherwise. */
2650 static void
2651 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2652 bool insert_after, location_t loc)
2655 struct access *child;
2657 if (access->grp_to_be_replaced)
2659 gimple stmt;
2661 stmt = gimple_build_assign (get_access_replacement (access),
2662 build_zero_cst (access->type));
2663 if (insert_after)
2664 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2665 else
2666 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2667 update_stmt (stmt);
2668 gimple_set_location (stmt, loc);
2670 else if (access->grp_to_be_debug_replaced)
2672 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2673 build_zero_cst (access->type),
2674 gsi_stmt (*gsi));
2675 if (insert_after)
2676 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2677 else
2678 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2681 for (child = access->first_child; child; child = child->next_sibling)
2682 init_subtree_with_zero (child, gsi, insert_after, loc);
2685 /* Search for an access representative for the given expression EXPR and
2686 return it or NULL if it cannot be found. */
2688 static struct access *
2689 get_access_for_expr (tree expr)
2691 HOST_WIDE_INT offset, size, max_size;
2692 tree base;
2694 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2695 a different size than the size of its argument and we need the latter
2696 one. */
2697 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2698 expr = TREE_OPERAND (expr, 0);
2700 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2701 if (max_size == -1 || !DECL_P (base))
2702 return NULL;
2704 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2705 return NULL;
2707 return get_var_base_offset_size_access (base, offset, max_size);
2710 /* Replace the expression EXPR with a scalar replacement if there is one and
2711 generate other statements to do type conversion or subtree copying if
2712 necessary. GSI is used to place newly created statements, WRITE is true if
2713 the expression is being written to (it is on a LHS of a statement or output
2714 in an assembly statement). */
2716 static bool
2717 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2719 location_t loc;
2720 struct access *access;
2721 tree type, bfr;
2723 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2725 bfr = *expr;
2726 expr = &TREE_OPERAND (*expr, 0);
2728 else
2729 bfr = NULL_TREE;
2731 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2732 expr = &TREE_OPERAND (*expr, 0);
2733 access = get_access_for_expr (*expr);
2734 if (!access)
2735 return false;
2736 type = TREE_TYPE (*expr);
2738 loc = gimple_location (gsi_stmt (*gsi));
2739 if (access->grp_to_be_replaced)
2741 tree repl = get_access_replacement (access);
2742 /* If we replace a non-register typed access simply use the original
2743 access expression to extract the scalar component afterwards.
2744 This happens if scalarizing a function return value or parameter
2745 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2746 gcc.c-torture/compile/20011217-1.c.
2748 We also want to use this when accessing a complex or vector which can
2749 be accessed as a different type too, potentially creating a need for
2750 type conversion (see PR42196) and when scalarized unions are involved
2751 in assembler statements (see PR42398). */
2752 if (!useless_type_conversion_p (type, access->type))
2754 tree ref;
2756 ref = build_ref_for_model (loc, access->base, access->offset, access,
2757 NULL, false);
2759 if (write)
2761 gimple stmt;
2763 if (access->grp_partial_lhs)
2764 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2765 false, GSI_NEW_STMT);
2766 stmt = gimple_build_assign (repl, ref);
2767 gimple_set_location (stmt, loc);
2768 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2770 else
2772 gimple stmt;
2774 if (access->grp_partial_lhs)
2775 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2776 true, GSI_SAME_STMT);
2777 stmt = gimple_build_assign (ref, repl);
2778 gimple_set_location (stmt, loc);
2779 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2782 else
2783 *expr = repl;
2784 sra_stats.exprs++;
2786 else if (write && access->grp_to_be_debug_replaced)
2788 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2789 NULL_TREE,
2790 gsi_stmt (*gsi));
2791 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2794 if (access->first_child)
2796 HOST_WIDE_INT start_offset, chunk_size;
2797 if (bfr
2798 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2799 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2801 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2802 start_offset = access->offset
2803 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2805 else
2806 start_offset = chunk_size = 0;
2808 generate_subtree_copies (access->first_child, access->base, 0,
2809 start_offset, chunk_size, gsi, write, write,
2810 loc);
2812 return true;
2815 /* Where scalar replacements of the RHS have been written to when a replacement
2816 of a LHS of an assigments cannot be direclty loaded from a replacement of
2817 the RHS. */
2818 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2819 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2820 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2822 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2823 base aggregate if there are unscalarized data or directly to LHS of the
2824 statement that is pointed to by GSI otherwise. */
2826 static enum unscalarized_data_handling
2827 handle_unscalarized_data_in_subtree (struct access *top_racc,
2828 gimple_stmt_iterator *gsi)
2830 if (top_racc->grp_unscalarized_data)
2832 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2833 gsi, false, false,
2834 gimple_location (gsi_stmt (*gsi)));
2835 return SRA_UDH_RIGHT;
2837 else
2839 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2840 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2841 0, 0, gsi, false, false,
2842 gimple_location (gsi_stmt (*gsi)));
2843 return SRA_UDH_LEFT;
2848 /* Try to generate statements to load all sub-replacements in an access subtree
2849 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2850 If that is not possible, refresh the TOP_RACC base aggregate and load the
2851 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2852 copied. NEW_GSI is stmt iterator used for statement insertions after the
2853 original assignment, OLD_GSI is used to insert statements before the
2854 assignment. *REFRESHED keeps the information whether we have needed to
2855 refresh replacements of the LHS and from which side of the assignments this
2856 takes place. */
2858 static void
2859 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2860 HOST_WIDE_INT left_offset,
2861 gimple_stmt_iterator *old_gsi,
2862 gimple_stmt_iterator *new_gsi,
2863 enum unscalarized_data_handling *refreshed)
2865 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2866 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2868 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2870 if (lacc->grp_to_be_replaced)
2872 struct access *racc;
2873 gimple stmt;
2874 tree rhs;
2876 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2877 if (racc && racc->grp_to_be_replaced)
2879 rhs = get_access_replacement (racc);
2880 if (!useless_type_conversion_p (lacc->type, racc->type))
2881 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2883 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2884 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2885 true, GSI_SAME_STMT);
2887 else
2889 /* No suitable access on the right hand side, need to load from
2890 the aggregate. See if we have to update it first... */
2891 if (*refreshed == SRA_UDH_NONE)
2892 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2893 old_gsi);
2895 if (*refreshed == SRA_UDH_LEFT)
2896 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2897 new_gsi, true);
2898 else
2899 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2900 new_gsi, true);
2901 if (lacc->grp_partial_lhs)
2902 rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2903 false, GSI_NEW_STMT);
2906 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2907 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2908 gimple_set_location (stmt, loc);
2909 update_stmt (stmt);
2910 sra_stats.subreplacements++;
2912 else
2914 if (*refreshed == SRA_UDH_NONE
2915 && lacc->grp_read && !lacc->grp_covered)
2916 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2917 old_gsi);
2918 if (lacc && lacc->grp_to_be_debug_replaced)
2920 gimple ds;
2921 tree drhs;
2922 struct access *racc = find_access_in_subtree (top_racc, offset,
2923 lacc->size);
2925 if (racc && racc->grp_to_be_replaced)
2927 if (racc->grp_write)
2928 drhs = get_access_replacement (racc);
2929 else
2930 drhs = NULL;
2932 else if (*refreshed == SRA_UDH_LEFT)
2933 drhs = build_debug_ref_for_model (loc, lacc->base, lacc->offset,
2934 lacc);
2935 else if (*refreshed == SRA_UDH_RIGHT)
2936 drhs = build_debug_ref_for_model (loc, top_racc->base, offset,
2937 lacc);
2938 else
2939 drhs = NULL_TREE;
2940 ds = gimple_build_debug_bind (get_access_replacement (lacc),
2941 drhs, gsi_stmt (*old_gsi));
2942 gsi_insert_after (new_gsi, ds, GSI_NEW_STMT);
2946 if (lacc->first_child)
2947 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2948 old_gsi, new_gsi, refreshed);
2952 /* Result code for SRA assignment modification. */
2953 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2954 SRA_AM_MODIFIED, /* stmt changed but not
2955 removed */
2956 SRA_AM_REMOVED }; /* stmt eliminated */
2958 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2959 to the assignment and GSI is the statement iterator pointing at it. Returns
2960 the same values as sra_modify_assign. */
2962 static enum assignment_mod_result
2963 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2965 tree lhs = gimple_assign_lhs (*stmt);
2966 struct access *acc;
2967 location_t loc;
2969 acc = get_access_for_expr (lhs);
2970 if (!acc)
2971 return SRA_AM_NONE;
2973 if (gimple_clobber_p (*stmt))
2975 /* Remove clobbers of fully scalarized variables, otherwise
2976 do nothing. */
2977 if (acc->grp_covered)
2979 unlink_stmt_vdef (*stmt);
2980 gsi_remove (gsi, true);
2981 release_defs (*stmt);
2982 return SRA_AM_REMOVED;
2984 else
2985 return SRA_AM_NONE;
2988 loc = gimple_location (*stmt);
2989 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2991 /* I have never seen this code path trigger but if it can happen the
2992 following should handle it gracefully. */
2993 if (access_has_children_p (acc))
2994 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2995 true, true, loc);
2996 return SRA_AM_MODIFIED;
2999 if (acc->grp_covered)
3001 init_subtree_with_zero (acc, gsi, false, loc);
3002 unlink_stmt_vdef (*stmt);
3003 gsi_remove (gsi, true);
3004 release_defs (*stmt);
3005 return SRA_AM_REMOVED;
3007 else
3009 init_subtree_with_zero (acc, gsi, true, loc);
3010 return SRA_AM_MODIFIED;
3014 /* Create and return a new suitable default definition SSA_NAME for RACC which
3015 is an access describing an uninitialized part of an aggregate that is being
3016 loaded. */
3018 static tree
3019 get_repl_default_def_ssa_name (struct access *racc)
3021 gcc_checking_assert (!racc->grp_to_be_replaced
3022 && !racc->grp_to_be_debug_replaced);
3023 if (!racc->replacement_decl)
3024 racc->replacement_decl = create_access_replacement (racc);
3025 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3028 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3029 bit-field field declaration somewhere in it. */
3031 static inline bool
3032 contains_vce_or_bfcref_p (const_tree ref)
3034 while (handled_component_p (ref))
3036 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3037 || (TREE_CODE (ref) == COMPONENT_REF
3038 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3039 return true;
3040 ref = TREE_OPERAND (ref, 0);
3043 return false;
3046 /* Examine both sides of the assignment statement pointed to by STMT, replace
3047 them with a scalare replacement if there is one and generate copying of
3048 replacements if scalarized aggregates have been used in the assignment. GSI
3049 is used to hold generated statements for type conversions and subtree
3050 copying. */
3052 static enum assignment_mod_result
3053 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3055 struct access *lacc, *racc;
3056 tree lhs, rhs;
3057 bool modify_this_stmt = false;
3058 bool force_gimple_rhs = false;
3059 location_t loc;
3060 gimple_stmt_iterator orig_gsi = *gsi;
3062 if (!gimple_assign_single_p (*stmt))
3063 return SRA_AM_NONE;
3064 lhs = gimple_assign_lhs (*stmt);
3065 rhs = gimple_assign_rhs1 (*stmt);
3067 if (TREE_CODE (rhs) == CONSTRUCTOR)
3068 return sra_modify_constructor_assign (stmt, gsi);
3070 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3071 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3072 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3074 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
3075 gsi, false);
3076 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
3077 gsi, true);
3078 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3081 lacc = get_access_for_expr (lhs);
3082 racc = get_access_for_expr (rhs);
3083 if (!lacc && !racc)
3084 return SRA_AM_NONE;
3086 loc = gimple_location (*stmt);
3087 if (lacc && lacc->grp_to_be_replaced)
3089 lhs = get_access_replacement (lacc);
3090 gimple_assign_set_lhs (*stmt, lhs);
3091 modify_this_stmt = true;
3092 if (lacc->grp_partial_lhs)
3093 force_gimple_rhs = true;
3094 sra_stats.exprs++;
3097 if (racc && racc->grp_to_be_replaced)
3099 rhs = get_access_replacement (racc);
3100 modify_this_stmt = true;
3101 if (racc->grp_partial_lhs)
3102 force_gimple_rhs = true;
3103 sra_stats.exprs++;
3105 else if (racc
3106 && !racc->grp_unscalarized_data
3107 && TREE_CODE (lhs) == SSA_NAME
3108 && !access_has_replacements_p (racc))
3110 rhs = get_repl_default_def_ssa_name (racc);
3111 modify_this_stmt = true;
3112 sra_stats.exprs++;
3115 if (modify_this_stmt)
3117 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3119 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3120 ??? This should move to fold_stmt which we simply should
3121 call after building a VIEW_CONVERT_EXPR here. */
3122 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3123 && !contains_bitfld_component_ref_p (lhs))
3125 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3126 gimple_assign_set_lhs (*stmt, lhs);
3128 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3129 && !contains_vce_or_bfcref_p (rhs))
3130 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3132 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3134 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3135 rhs);
3136 if (is_gimple_reg_type (TREE_TYPE (lhs))
3137 && TREE_CODE (lhs) != SSA_NAME)
3138 force_gimple_rhs = true;
3143 if (lacc && lacc->grp_to_be_debug_replaced)
3145 tree dlhs = get_access_replacement (lacc);
3146 tree drhs = unshare_expr (rhs);
3147 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3149 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3150 && !contains_vce_or_bfcref_p (drhs))
3151 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3152 if (drhs
3153 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3154 TREE_TYPE (drhs)))
3155 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3156 TREE_TYPE (dlhs), drhs);
3158 gimple ds = gimple_build_debug_bind (dlhs, drhs, *stmt);
3159 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3162 /* From this point on, the function deals with assignments in between
3163 aggregates when at least one has scalar reductions of some of its
3164 components. There are three possible scenarios: Both the LHS and RHS have
3165 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3167 In the first case, we would like to load the LHS components from RHS
3168 components whenever possible. If that is not possible, we would like to
3169 read it directly from the RHS (after updating it by storing in it its own
3170 components). If there are some necessary unscalarized data in the LHS,
3171 those will be loaded by the original assignment too. If neither of these
3172 cases happen, the original statement can be removed. Most of this is done
3173 by load_assign_lhs_subreplacements.
3175 In the second case, we would like to store all RHS scalarized components
3176 directly into LHS and if they cover the aggregate completely, remove the
3177 statement too. In the third case, we want the LHS components to be loaded
3178 directly from the RHS (DSE will remove the original statement if it
3179 becomes redundant).
3181 This is a bit complex but manageable when types match and when unions do
3182 not cause confusion in a way that we cannot really load a component of LHS
3183 from the RHS or vice versa (the access representing this level can have
3184 subaccesses that are accessible only through a different union field at a
3185 higher level - different from the one used in the examined expression).
3186 Unions are fun.
3188 Therefore, I specially handle a fourth case, happening when there is a
3189 specific type cast or it is impossible to locate a scalarized subaccess on
3190 the other side of the expression. If that happens, I simply "refresh" the
3191 RHS by storing in it is scalarized components leave the original statement
3192 there to do the copying and then load the scalar replacements of the LHS.
3193 This is what the first branch does. */
3195 if (modify_this_stmt
3196 || gimple_has_volatile_ops (*stmt)
3197 || contains_vce_or_bfcref_p (rhs)
3198 || contains_vce_or_bfcref_p (lhs))
3200 if (access_has_children_p (racc))
3201 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3202 gsi, false, false, loc);
3203 if (access_has_children_p (lacc))
3204 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3205 gsi, true, true, loc);
3206 sra_stats.separate_lhs_rhs_handling++;
3208 /* This gimplification must be done after generate_subtree_copies,
3209 lest we insert the subtree copies in the middle of the gimplified
3210 sequence. */
3211 if (force_gimple_rhs)
3212 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3213 true, GSI_SAME_STMT);
3214 if (gimple_assign_rhs1 (*stmt) != rhs)
3216 modify_this_stmt = true;
3217 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3218 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3221 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3223 else
3225 if (access_has_children_p (lacc)
3226 && access_has_children_p (racc)
3227 /* When an access represents an unscalarizable region, it usually
3228 represents accesses with variable offset and thus must not be used
3229 to generate new memory accesses. */
3230 && !lacc->grp_unscalarizable_region
3231 && !racc->grp_unscalarizable_region)
3233 gimple_stmt_iterator orig_gsi = *gsi;
3234 enum unscalarized_data_handling refreshed;
3236 if (lacc->grp_read && !lacc->grp_covered)
3237 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3238 else
3239 refreshed = SRA_UDH_NONE;
3241 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3242 &orig_gsi, gsi, &refreshed);
3243 if (refreshed != SRA_UDH_RIGHT)
3245 gsi_next (gsi);
3246 unlink_stmt_vdef (*stmt);
3247 gsi_remove (&orig_gsi, true);
3248 release_defs (*stmt);
3249 sra_stats.deleted++;
3250 return SRA_AM_REMOVED;
3253 else
3255 if (access_has_children_p (racc)
3256 && !racc->grp_unscalarized_data)
3258 if (dump_file)
3260 fprintf (dump_file, "Removing load: ");
3261 print_gimple_stmt (dump_file, *stmt, 0, 0);
3263 generate_subtree_copies (racc->first_child, lhs,
3264 racc->offset, 0, 0, gsi,
3265 false, false, loc);
3266 gcc_assert (*stmt == gsi_stmt (*gsi));
3267 unlink_stmt_vdef (*stmt);
3268 gsi_remove (gsi, true);
3269 release_defs (*stmt);
3270 sra_stats.deleted++;
3271 return SRA_AM_REMOVED;
3273 /* Restore the aggregate RHS from its components so the
3274 prevailing aggregate copy does the right thing. */
3275 if (access_has_children_p (racc))
3276 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3277 gsi, false, false, loc);
3278 /* Re-load the components of the aggregate copy destination.
3279 But use the RHS aggregate to load from to expose more
3280 optimization opportunities. */
3281 if (access_has_children_p (lacc))
3282 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3283 0, 0, gsi, true, true, loc);
3286 return SRA_AM_NONE;
3290 /* Traverse the function body and all modifications as decided in
3291 analyze_all_variable_accesses. Return true iff the CFG has been
3292 changed. */
3294 static bool
3295 sra_modify_function_body (void)
3297 bool cfg_changed = false;
3298 basic_block bb;
3300 FOR_EACH_BB (bb)
3302 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3303 while (!gsi_end_p (gsi))
3305 gimple stmt = gsi_stmt (gsi);
3306 enum assignment_mod_result assign_result;
3307 bool modified = false, deleted = false;
3308 tree *t;
3309 unsigned i;
3311 switch (gimple_code (stmt))
3313 case GIMPLE_RETURN:
3314 t = gimple_return_retval_ptr (stmt);
3315 if (*t != NULL_TREE)
3316 modified |= sra_modify_expr (t, &gsi, false);
3317 break;
3319 case GIMPLE_ASSIGN:
3320 assign_result = sra_modify_assign (&stmt, &gsi);
3321 modified |= assign_result == SRA_AM_MODIFIED;
3322 deleted = assign_result == SRA_AM_REMOVED;
3323 break;
3325 case GIMPLE_CALL:
3326 /* Operands must be processed before the lhs. */
3327 for (i = 0; i < gimple_call_num_args (stmt); i++)
3329 t = gimple_call_arg_ptr (stmt, i);
3330 modified |= sra_modify_expr (t, &gsi, false);
3333 if (gimple_call_lhs (stmt))
3335 t = gimple_call_lhs_ptr (stmt);
3336 modified |= sra_modify_expr (t, &gsi, true);
3338 break;
3340 case GIMPLE_ASM:
3341 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3343 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3344 modified |= sra_modify_expr (t, &gsi, false);
3346 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3348 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3349 modified |= sra_modify_expr (t, &gsi, true);
3351 break;
3353 default:
3354 break;
3357 if (modified)
3359 update_stmt (stmt);
3360 if (maybe_clean_eh_stmt (stmt)
3361 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3362 cfg_changed = true;
3364 if (!deleted)
3365 gsi_next (&gsi);
3369 return cfg_changed;
3372 /* Generate statements initializing scalar replacements of parts of function
3373 parameters. */
3375 static void
3376 initialize_parameter_reductions (void)
3378 gimple_stmt_iterator gsi;
3379 gimple_seq seq = NULL;
3380 tree parm;
3382 gsi = gsi_start (seq);
3383 for (parm = DECL_ARGUMENTS (current_function_decl);
3384 parm;
3385 parm = DECL_CHAIN (parm))
3387 vec<access_p> *access_vec;
3388 struct access *access;
3390 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3391 continue;
3392 access_vec = get_base_access_vector (parm);
3393 if (!access_vec)
3394 continue;
3396 for (access = (*access_vec)[0];
3397 access;
3398 access = access->next_grp)
3399 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3400 EXPR_LOCATION (parm));
3403 seq = gsi_seq (gsi);
3404 if (seq)
3405 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3408 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3409 it reveals there are components of some aggregates to be scalarized, it runs
3410 the required transformations. */
3411 static unsigned int
3412 perform_intra_sra (void)
3414 int ret = 0;
3415 sra_initialize ();
3417 if (!find_var_candidates ())
3418 goto out;
3420 if (!scan_function ())
3421 goto out;
3423 if (!analyze_all_variable_accesses ())
3424 goto out;
3426 if (sra_modify_function_body ())
3427 ret = TODO_update_ssa | TODO_cleanup_cfg;
3428 else
3429 ret = TODO_update_ssa;
3430 initialize_parameter_reductions ();
3432 statistics_counter_event (cfun, "Scalar replacements created",
3433 sra_stats.replacements);
3434 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3435 statistics_counter_event (cfun, "Subtree copy stmts",
3436 sra_stats.subtree_copies);
3437 statistics_counter_event (cfun, "Subreplacement stmts",
3438 sra_stats.subreplacements);
3439 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3440 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3441 sra_stats.separate_lhs_rhs_handling);
3443 out:
3444 sra_deinitialize ();
3445 return ret;
3448 /* Perform early intraprocedural SRA. */
3449 static unsigned int
3450 early_intra_sra (void)
3452 sra_mode = SRA_MODE_EARLY_INTRA;
3453 return perform_intra_sra ();
3456 /* Perform "late" intraprocedural SRA. */
3457 static unsigned int
3458 late_intra_sra (void)
3460 sra_mode = SRA_MODE_INTRA;
3461 return perform_intra_sra ();
3465 static bool
3466 gate_intra_sra (void)
3468 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3472 namespace {
3474 const pass_data pass_data_sra_early =
3476 GIMPLE_PASS, /* type */
3477 "esra", /* name */
3478 OPTGROUP_NONE, /* optinfo_flags */
3479 true, /* has_gate */
3480 true, /* has_execute */
3481 TV_TREE_SRA, /* tv_id */
3482 ( PROP_cfg | PROP_ssa ), /* properties_required */
3483 0, /* properties_provided */
3484 0, /* properties_destroyed */
3485 0, /* todo_flags_start */
3486 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
3489 class pass_sra_early : public gimple_opt_pass
3491 public:
3492 pass_sra_early (gcc::context *ctxt)
3493 : gimple_opt_pass (pass_data_sra_early, ctxt)
3496 /* opt_pass methods: */
3497 bool gate () { return gate_intra_sra (); }
3498 unsigned int execute () { return early_intra_sra (); }
3500 }; // class pass_sra_early
3502 } // anon namespace
3504 gimple_opt_pass *
3505 make_pass_sra_early (gcc::context *ctxt)
3507 return new pass_sra_early (ctxt);
3510 namespace {
3512 const pass_data pass_data_sra =
3514 GIMPLE_PASS, /* type */
3515 "sra", /* name */
3516 OPTGROUP_NONE, /* optinfo_flags */
3517 true, /* has_gate */
3518 true, /* has_execute */
3519 TV_TREE_SRA, /* tv_id */
3520 ( PROP_cfg | PROP_ssa ), /* properties_required */
3521 0, /* properties_provided */
3522 0, /* properties_destroyed */
3523 TODO_update_address_taken, /* todo_flags_start */
3524 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
3527 class pass_sra : public gimple_opt_pass
3529 public:
3530 pass_sra (gcc::context *ctxt)
3531 : gimple_opt_pass (pass_data_sra, ctxt)
3534 /* opt_pass methods: */
3535 bool gate () { return gate_intra_sra (); }
3536 unsigned int execute () { return late_intra_sra (); }
3538 }; // class pass_sra
3540 } // anon namespace
3542 gimple_opt_pass *
3543 make_pass_sra (gcc::context *ctxt)
3545 return new pass_sra (ctxt);
3549 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3550 parameter. */
3552 static bool
3553 is_unused_scalar_param (tree parm)
3555 tree name;
3556 return (is_gimple_reg (parm)
3557 && (!(name = ssa_default_def (cfun, parm))
3558 || has_zero_uses (name)));
3561 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3562 examine whether there are any direct or otherwise infeasible ones. If so,
3563 return true, otherwise return false. PARM must be a gimple register with a
3564 non-NULL default definition. */
3566 static bool
3567 ptr_parm_has_direct_uses (tree parm)
3569 imm_use_iterator ui;
3570 gimple stmt;
3571 tree name = ssa_default_def (cfun, parm);
3572 bool ret = false;
3574 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3576 int uses_ok = 0;
3577 use_operand_p use_p;
3579 if (is_gimple_debug (stmt))
3580 continue;
3582 /* Valid uses include dereferences on the lhs and the rhs. */
3583 if (gimple_has_lhs (stmt))
3585 tree lhs = gimple_get_lhs (stmt);
3586 while (handled_component_p (lhs))
3587 lhs = TREE_OPERAND (lhs, 0);
3588 if (TREE_CODE (lhs) == MEM_REF
3589 && TREE_OPERAND (lhs, 0) == name
3590 && integer_zerop (TREE_OPERAND (lhs, 1))
3591 && types_compatible_p (TREE_TYPE (lhs),
3592 TREE_TYPE (TREE_TYPE (name)))
3593 && !TREE_THIS_VOLATILE (lhs))
3594 uses_ok++;
3596 if (gimple_assign_single_p (stmt))
3598 tree rhs = gimple_assign_rhs1 (stmt);
3599 while (handled_component_p (rhs))
3600 rhs = TREE_OPERAND (rhs, 0);
3601 if (TREE_CODE (rhs) == MEM_REF
3602 && TREE_OPERAND (rhs, 0) == name
3603 && integer_zerop (TREE_OPERAND (rhs, 1))
3604 && types_compatible_p (TREE_TYPE (rhs),
3605 TREE_TYPE (TREE_TYPE (name)))
3606 && !TREE_THIS_VOLATILE (rhs))
3607 uses_ok++;
3609 else if (is_gimple_call (stmt))
3611 unsigned i;
3612 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3614 tree arg = gimple_call_arg (stmt, i);
3615 while (handled_component_p (arg))
3616 arg = TREE_OPERAND (arg, 0);
3617 if (TREE_CODE (arg) == MEM_REF
3618 && TREE_OPERAND (arg, 0) == name
3619 && integer_zerop (TREE_OPERAND (arg, 1))
3620 && types_compatible_p (TREE_TYPE (arg),
3621 TREE_TYPE (TREE_TYPE (name)))
3622 && !TREE_THIS_VOLATILE (arg))
3623 uses_ok++;
3627 /* If the number of valid uses does not match the number of
3628 uses in this stmt there is an unhandled use. */
3629 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3630 --uses_ok;
3632 if (uses_ok != 0)
3633 ret = true;
3635 if (ret)
3636 BREAK_FROM_IMM_USE_STMT (ui);
3639 return ret;
3642 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3643 them in candidate_bitmap. Note that these do not necessarily include
3644 parameter which are unused and thus can be removed. Return true iff any
3645 such candidate has been found. */
3647 static bool
3648 find_param_candidates (void)
3650 tree parm;
3651 int count = 0;
3652 bool ret = false;
3653 const char *msg;
3655 for (parm = DECL_ARGUMENTS (current_function_decl);
3656 parm;
3657 parm = DECL_CHAIN (parm))
3659 tree type = TREE_TYPE (parm);
3660 tree_node **slot;
3662 count++;
3664 if (TREE_THIS_VOLATILE (parm)
3665 || TREE_ADDRESSABLE (parm)
3666 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3667 continue;
3669 if (is_unused_scalar_param (parm))
3671 ret = true;
3672 continue;
3675 if (POINTER_TYPE_P (type))
3677 type = TREE_TYPE (type);
3679 if (TREE_CODE (type) == FUNCTION_TYPE
3680 || TYPE_VOLATILE (type)
3681 || (TREE_CODE (type) == ARRAY_TYPE
3682 && TYPE_NONALIASED_COMPONENT (type))
3683 || !is_gimple_reg (parm)
3684 || is_va_list_type (type)
3685 || ptr_parm_has_direct_uses (parm))
3686 continue;
3688 else if (!AGGREGATE_TYPE_P (type))
3689 continue;
3691 if (!COMPLETE_TYPE_P (type)
3692 || !host_integerp (TYPE_SIZE (type), 1)
3693 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3694 || (AGGREGATE_TYPE_P (type)
3695 && type_internals_preclude_sra_p (type, &msg)))
3696 continue;
3698 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3699 slot = candidates.find_slot_with_hash (parm, DECL_UID (parm), INSERT);
3700 *slot = parm;
3702 ret = true;
3703 if (dump_file && (dump_flags & TDF_DETAILS))
3705 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3706 print_generic_expr (dump_file, parm, 0);
3707 fprintf (dump_file, "\n");
3711 func_param_count = count;
3712 return ret;
3715 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3716 maybe_modified. */
3718 static bool
3719 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3720 void *data)
3722 struct access *repr = (struct access *) data;
3724 repr->grp_maybe_modified = 1;
3725 return true;
3728 /* Analyze what representatives (in linked lists accessible from
3729 REPRESENTATIVES) can be modified by side effects of statements in the
3730 current function. */
3732 static void
3733 analyze_modified_params (vec<access_p> representatives)
3735 int i;
3737 for (i = 0; i < func_param_count; i++)
3739 struct access *repr;
3741 for (repr = representatives[i];
3742 repr;
3743 repr = repr->next_grp)
3745 struct access *access;
3746 bitmap visited;
3747 ao_ref ar;
3749 if (no_accesses_p (repr))
3750 continue;
3751 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3752 || repr->grp_maybe_modified)
3753 continue;
3755 ao_ref_init (&ar, repr->expr);
3756 visited = BITMAP_ALLOC (NULL);
3757 for (access = repr; access; access = access->next_sibling)
3759 /* All accesses are read ones, otherwise grp_maybe_modified would
3760 be trivially set. */
3761 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3762 mark_maybe_modified, repr, &visited);
3763 if (repr->grp_maybe_modified)
3764 break;
3766 BITMAP_FREE (visited);
3771 /* Propagate distances in bb_dereferences in the opposite direction than the
3772 control flow edges, in each step storing the maximum of the current value
3773 and the minimum of all successors. These steps are repeated until the table
3774 stabilizes. Note that BBs which might terminate the functions (according to
3775 final_bbs bitmap) never updated in this way. */
3777 static void
3778 propagate_dereference_distances (void)
3780 vec<basic_block> queue;
3781 basic_block bb;
3783 queue.create (last_basic_block_for_function (cfun));
3784 queue.quick_push (ENTRY_BLOCK_PTR);
3785 FOR_EACH_BB (bb)
3787 queue.quick_push (bb);
3788 bb->aux = bb;
3791 while (!queue.is_empty ())
3793 edge_iterator ei;
3794 edge e;
3795 bool change = false;
3796 int i;
3798 bb = queue.pop ();
3799 bb->aux = NULL;
3801 if (bitmap_bit_p (final_bbs, bb->index))
3802 continue;
3804 for (i = 0; i < func_param_count; i++)
3806 int idx = bb->index * func_param_count + i;
3807 bool first = true;
3808 HOST_WIDE_INT inh = 0;
3810 FOR_EACH_EDGE (e, ei, bb->succs)
3812 int succ_idx = e->dest->index * func_param_count + i;
3814 if (e->src == EXIT_BLOCK_PTR)
3815 continue;
3817 if (first)
3819 first = false;
3820 inh = bb_dereferences [succ_idx];
3822 else if (bb_dereferences [succ_idx] < inh)
3823 inh = bb_dereferences [succ_idx];
3826 if (!first && bb_dereferences[idx] < inh)
3828 bb_dereferences[idx] = inh;
3829 change = true;
3833 if (change && !bitmap_bit_p (final_bbs, bb->index))
3834 FOR_EACH_EDGE (e, ei, bb->preds)
3836 if (e->src->aux)
3837 continue;
3839 e->src->aux = e->src;
3840 queue.quick_push (e->src);
3844 queue.release ();
3847 /* Dump a dereferences TABLE with heading STR to file F. */
3849 static void
3850 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3852 basic_block bb;
3854 fprintf (dump_file, str);
3855 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3857 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3858 if (bb != EXIT_BLOCK_PTR)
3860 int i;
3861 for (i = 0; i < func_param_count; i++)
3863 int idx = bb->index * func_param_count + i;
3864 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3867 fprintf (f, "\n");
3869 fprintf (dump_file, "\n");
3872 /* Determine what (parts of) parameters passed by reference that are not
3873 assigned to are not certainly dereferenced in this function and thus the
3874 dereferencing cannot be safely moved to the caller without potentially
3875 introducing a segfault. Mark such REPRESENTATIVES as
3876 grp_not_necessarilly_dereferenced.
3878 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3879 part is calculated rather than simple booleans are calculated for each
3880 pointer parameter to handle cases when only a fraction of the whole
3881 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3882 an example).
3884 The maximum dereference distances for each pointer parameter and BB are
3885 already stored in bb_dereference. This routine simply propagates these
3886 values upwards by propagate_dereference_distances and then compares the
3887 distances of individual parameters in the ENTRY BB to the equivalent
3888 distances of each representative of a (fraction of a) parameter. */
3890 static void
3891 analyze_caller_dereference_legality (vec<access_p> representatives)
3893 int i;
3895 if (dump_file && (dump_flags & TDF_DETAILS))
3896 dump_dereferences_table (dump_file,
3897 "Dereference table before propagation:\n",
3898 bb_dereferences);
3900 propagate_dereference_distances ();
3902 if (dump_file && (dump_flags & TDF_DETAILS))
3903 dump_dereferences_table (dump_file,
3904 "Dereference table after propagation:\n",
3905 bb_dereferences);
3907 for (i = 0; i < func_param_count; i++)
3909 struct access *repr = representatives[i];
3910 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3912 if (!repr || no_accesses_p (repr))
3913 continue;
3917 if ((repr->offset + repr->size) > bb_dereferences[idx])
3918 repr->grp_not_necessarilly_dereferenced = 1;
3919 repr = repr->next_grp;
3921 while (repr);
3925 /* Return the representative access for the parameter declaration PARM if it is
3926 a scalar passed by reference which is not written to and the pointer value
3927 is not used directly. Thus, if it is legal to dereference it in the caller
3928 and we can rule out modifications through aliases, such parameter should be
3929 turned into one passed by value. Return NULL otherwise. */
3931 static struct access *
3932 unmodified_by_ref_scalar_representative (tree parm)
3934 int i, access_count;
3935 struct access *repr;
3936 vec<access_p> *access_vec;
3938 access_vec = get_base_access_vector (parm);
3939 gcc_assert (access_vec);
3940 repr = (*access_vec)[0];
3941 if (repr->write)
3942 return NULL;
3943 repr->group_representative = repr;
3945 access_count = access_vec->length ();
3946 for (i = 1; i < access_count; i++)
3948 struct access *access = (*access_vec)[i];
3949 if (access->write)
3950 return NULL;
3951 access->group_representative = repr;
3952 access->next_sibling = repr->next_sibling;
3953 repr->next_sibling = access;
3956 repr->grp_read = 1;
3957 repr->grp_scalar_ptr = 1;
3958 return repr;
3961 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
3962 associated with. REQ_ALIGN is the minimum required alignment. */
3964 static bool
3965 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
3967 unsigned int exp_align;
3968 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3969 is incompatible assign in a call statement (and possibly even in asm
3970 statements). This can be relaxed by using a new temporary but only for
3971 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3972 intraprocedural SRA we deal with this by keeping the old aggregate around,
3973 something we cannot do in IPA-SRA.) */
3974 if (access->write
3975 && (is_gimple_call (access->stmt)
3976 || gimple_code (access->stmt) == GIMPLE_ASM))
3977 return true;
3979 exp_align = get_object_alignment (access->expr);
3980 if (exp_align < req_align)
3981 return true;
3983 return false;
3987 /* Sort collected accesses for parameter PARM, identify representatives for
3988 each accessed region and link them together. Return NULL if there are
3989 different but overlapping accesses, return the special ptr value meaning
3990 there are no accesses for this parameter if that is the case and return the
3991 first representative otherwise. Set *RO_GRP if there is a group of accesses
3992 with only read (i.e. no write) accesses. */
3994 static struct access *
3995 splice_param_accesses (tree parm, bool *ro_grp)
3997 int i, j, access_count, group_count;
3998 int agg_size, total_size = 0;
3999 struct access *access, *res, **prev_acc_ptr = &res;
4000 vec<access_p> *access_vec;
4002 access_vec = get_base_access_vector (parm);
4003 if (!access_vec)
4004 return &no_accesses_representant;
4005 access_count = access_vec->length ();
4007 access_vec->qsort (compare_access_positions);
4009 i = 0;
4010 total_size = 0;
4011 group_count = 0;
4012 while (i < access_count)
4014 bool modification;
4015 tree a1_alias_type;
4016 access = (*access_vec)[i];
4017 modification = access->write;
4018 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4019 return NULL;
4020 a1_alias_type = reference_alias_ptr_type (access->expr);
4022 /* Access is about to become group representative unless we find some
4023 nasty overlap which would preclude us from breaking this parameter
4024 apart. */
4026 j = i + 1;
4027 while (j < access_count)
4029 struct access *ac2 = (*access_vec)[j];
4030 if (ac2->offset != access->offset)
4032 /* All or nothing law for parameters. */
4033 if (access->offset + access->size > ac2->offset)
4034 return NULL;
4035 else
4036 break;
4038 else if (ac2->size != access->size)
4039 return NULL;
4041 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4042 || (ac2->type != access->type
4043 && (TREE_ADDRESSABLE (ac2->type)
4044 || TREE_ADDRESSABLE (access->type)))
4045 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4046 return NULL;
4048 modification |= ac2->write;
4049 ac2->group_representative = access;
4050 ac2->next_sibling = access->next_sibling;
4051 access->next_sibling = ac2;
4052 j++;
4055 group_count++;
4056 access->grp_maybe_modified = modification;
4057 if (!modification)
4058 *ro_grp = true;
4059 *prev_acc_ptr = access;
4060 prev_acc_ptr = &access->next_grp;
4061 total_size += access->size;
4062 i = j;
4065 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4066 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
4067 else
4068 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
4069 if (total_size >= agg_size)
4070 return NULL;
4072 gcc_assert (group_count > 0);
4073 return res;
4076 /* Decide whether parameters with representative accesses given by REPR should
4077 be reduced into components. */
4079 static int
4080 decide_one_param_reduction (struct access *repr)
4082 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4083 bool by_ref;
4084 tree parm;
4086 parm = repr->base;
4087 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
4088 gcc_assert (cur_parm_size > 0);
4090 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4092 by_ref = true;
4093 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
4095 else
4097 by_ref = false;
4098 agg_size = cur_parm_size;
4101 if (dump_file)
4103 struct access *acc;
4104 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4105 print_generic_expr (dump_file, parm, 0);
4106 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4107 for (acc = repr; acc; acc = acc->next_grp)
4108 dump_access (dump_file, acc, true);
4111 total_size = 0;
4112 new_param_count = 0;
4114 for (; repr; repr = repr->next_grp)
4116 gcc_assert (parm == repr->base);
4118 /* Taking the address of a non-addressable field is verboten. */
4119 if (by_ref && repr->non_addressable)
4120 return 0;
4122 /* Do not decompose a non-BLKmode param in a way that would
4123 create BLKmode params. Especially for by-reference passing
4124 (thus, pointer-type param) this is hardly worthwhile. */
4125 if (DECL_MODE (parm) != BLKmode
4126 && TYPE_MODE (repr->type) == BLKmode)
4127 return 0;
4129 if (!by_ref || (!repr->grp_maybe_modified
4130 && !repr->grp_not_necessarilly_dereferenced))
4131 total_size += repr->size;
4132 else
4133 total_size += cur_parm_size;
4135 new_param_count++;
4138 gcc_assert (new_param_count > 0);
4140 if (optimize_function_for_size_p (cfun))
4141 parm_size_limit = cur_parm_size;
4142 else
4143 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4144 * cur_parm_size);
4146 if (total_size < agg_size
4147 && total_size <= parm_size_limit)
4149 if (dump_file)
4150 fprintf (dump_file, " ....will be split into %i components\n",
4151 new_param_count);
4152 return new_param_count;
4154 else
4155 return 0;
4158 /* The order of the following enums is important, we need to do extra work for
4159 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4160 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4161 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4163 /* Identify representatives of all accesses to all candidate parameters for
4164 IPA-SRA. Return result based on what representatives have been found. */
4166 static enum ipa_splicing_result
4167 splice_all_param_accesses (vec<access_p> &representatives)
4169 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4170 tree parm;
4171 struct access *repr;
4173 representatives.create (func_param_count);
4175 for (parm = DECL_ARGUMENTS (current_function_decl);
4176 parm;
4177 parm = DECL_CHAIN (parm))
4179 if (is_unused_scalar_param (parm))
4181 representatives.quick_push (&no_accesses_representant);
4182 if (result == NO_GOOD_ACCESS)
4183 result = UNUSED_PARAMS;
4185 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4186 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4187 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4189 repr = unmodified_by_ref_scalar_representative (parm);
4190 representatives.quick_push (repr);
4191 if (repr)
4192 result = UNMODIF_BY_REF_ACCESSES;
4194 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4196 bool ro_grp = false;
4197 repr = splice_param_accesses (parm, &ro_grp);
4198 representatives.quick_push (repr);
4200 if (repr && !no_accesses_p (repr))
4202 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4204 if (ro_grp)
4205 result = UNMODIF_BY_REF_ACCESSES;
4206 else if (result < MODIF_BY_REF_ACCESSES)
4207 result = MODIF_BY_REF_ACCESSES;
4209 else if (result < BY_VAL_ACCESSES)
4210 result = BY_VAL_ACCESSES;
4212 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4213 result = UNUSED_PARAMS;
4215 else
4216 representatives.quick_push (NULL);
4219 if (result == NO_GOOD_ACCESS)
4221 representatives.release ();
4222 return NO_GOOD_ACCESS;
4225 return result;
4228 /* Return the index of BASE in PARMS. Abort if it is not found. */
4230 static inline int
4231 get_param_index (tree base, vec<tree> parms)
4233 int i, len;
4235 len = parms.length ();
4236 for (i = 0; i < len; i++)
4237 if (parms[i] == base)
4238 return i;
4239 gcc_unreachable ();
4242 /* Convert the decisions made at the representative level into compact
4243 parameter adjustments. REPRESENTATIVES are pointers to first
4244 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4245 final number of adjustments. */
4247 static ipa_parm_adjustment_vec
4248 turn_representatives_into_adjustments (vec<access_p> representatives,
4249 int adjustments_count)
4251 vec<tree> parms;
4252 ipa_parm_adjustment_vec adjustments;
4253 tree parm;
4254 int i;
4256 gcc_assert (adjustments_count > 0);
4257 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4258 adjustments.create (adjustments_count);
4259 parm = DECL_ARGUMENTS (current_function_decl);
4260 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4262 struct access *repr = representatives[i];
4264 if (!repr || no_accesses_p (repr))
4266 struct ipa_parm_adjustment adj;
4268 memset (&adj, 0, sizeof (adj));
4269 adj.base_index = get_param_index (parm, parms);
4270 adj.base = parm;
4271 if (!repr)
4272 adj.copy_param = 1;
4273 else
4274 adj.remove_param = 1;
4275 adjustments.quick_push (adj);
4277 else
4279 struct ipa_parm_adjustment adj;
4280 int index = get_param_index (parm, parms);
4282 for (; repr; repr = repr->next_grp)
4284 memset (&adj, 0, sizeof (adj));
4285 gcc_assert (repr->base == parm);
4286 adj.base_index = index;
4287 adj.base = repr->base;
4288 adj.type = repr->type;
4289 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4290 adj.offset = repr->offset;
4291 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4292 && (repr->grp_maybe_modified
4293 || repr->grp_not_necessarilly_dereferenced));
4294 adjustments.quick_push (adj);
4298 parms.release ();
4299 return adjustments;
4302 /* Analyze the collected accesses and produce a plan what to do with the
4303 parameters in the form of adjustments, NULL meaning nothing. */
4305 static ipa_parm_adjustment_vec
4306 analyze_all_param_acesses (void)
4308 enum ipa_splicing_result repr_state;
4309 bool proceed = false;
4310 int i, adjustments_count = 0;
4311 vec<access_p> representatives;
4312 ipa_parm_adjustment_vec adjustments;
4314 repr_state = splice_all_param_accesses (representatives);
4315 if (repr_state == NO_GOOD_ACCESS)
4316 return ipa_parm_adjustment_vec ();
4318 /* If there are any parameters passed by reference which are not modified
4319 directly, we need to check whether they can be modified indirectly. */
4320 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4322 analyze_caller_dereference_legality (representatives);
4323 analyze_modified_params (representatives);
4326 for (i = 0; i < func_param_count; i++)
4328 struct access *repr = representatives[i];
4330 if (repr && !no_accesses_p (repr))
4332 if (repr->grp_scalar_ptr)
4334 adjustments_count++;
4335 if (repr->grp_not_necessarilly_dereferenced
4336 || repr->grp_maybe_modified)
4337 representatives[i] = NULL;
4338 else
4340 proceed = true;
4341 sra_stats.scalar_by_ref_to_by_val++;
4344 else
4346 int new_components = decide_one_param_reduction (repr);
4348 if (new_components == 0)
4350 representatives[i] = NULL;
4351 adjustments_count++;
4353 else
4355 adjustments_count += new_components;
4356 sra_stats.aggregate_params_reduced++;
4357 sra_stats.param_reductions_created += new_components;
4358 proceed = true;
4362 else
4364 if (no_accesses_p (repr))
4366 proceed = true;
4367 sra_stats.deleted_unused_parameters++;
4369 adjustments_count++;
4373 if (!proceed && dump_file)
4374 fprintf (dump_file, "NOT proceeding to change params.\n");
4376 if (proceed)
4377 adjustments = turn_representatives_into_adjustments (representatives,
4378 adjustments_count);
4379 else
4380 adjustments = ipa_parm_adjustment_vec ();
4382 representatives.release ();
4383 return adjustments;
4386 /* If a parameter replacement identified by ADJ does not yet exist in the form
4387 of declaration, create it and record it, otherwise return the previously
4388 created one. */
4390 static tree
4391 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4393 tree repl;
4394 if (!adj->new_ssa_base)
4396 char *pretty_name = make_fancy_name (adj->base);
4398 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4399 DECL_NAME (repl) = get_identifier (pretty_name);
4400 obstack_free (&name_obstack, pretty_name);
4402 adj->new_ssa_base = repl;
4404 else
4405 repl = adj->new_ssa_base;
4406 return repl;
4409 /* Find the first adjustment for a particular parameter BASE in a vector of
4410 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4411 adjustment. */
4413 static struct ipa_parm_adjustment *
4414 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4416 int i, len;
4418 len = adjustments.length ();
4419 for (i = 0; i < len; i++)
4421 struct ipa_parm_adjustment *adj;
4423 adj = &adjustments[i];
4424 if (!adj->copy_param && adj->base == base)
4425 return adj;
4428 return NULL;
4431 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4432 removed because its value is not used, replace the SSA_NAME with a one
4433 relating to a created VAR_DECL together all of its uses and return true.
4434 ADJUSTMENTS is a pointer to an adjustments vector. */
4436 static bool
4437 replace_removed_params_ssa_names (gimple stmt,
4438 ipa_parm_adjustment_vec adjustments)
4440 struct ipa_parm_adjustment *adj;
4441 tree lhs, decl, repl, name;
4443 if (gimple_code (stmt) == GIMPLE_PHI)
4444 lhs = gimple_phi_result (stmt);
4445 else if (is_gimple_assign (stmt))
4446 lhs = gimple_assign_lhs (stmt);
4447 else if (is_gimple_call (stmt))
4448 lhs = gimple_call_lhs (stmt);
4449 else
4450 gcc_unreachable ();
4452 if (TREE_CODE (lhs) != SSA_NAME)
4453 return false;
4455 decl = SSA_NAME_VAR (lhs);
4456 if (decl == NULL_TREE
4457 || TREE_CODE (decl) != PARM_DECL)
4458 return false;
4460 adj = get_adjustment_for_base (adjustments, decl);
4461 if (!adj)
4462 return false;
4464 repl = get_replaced_param_substitute (adj);
4465 name = make_ssa_name (repl, stmt);
4467 if (dump_file)
4469 fprintf (dump_file, "replacing an SSA name of a removed param ");
4470 print_generic_expr (dump_file, lhs, 0);
4471 fprintf (dump_file, " with ");
4472 print_generic_expr (dump_file, name, 0);
4473 fprintf (dump_file, "\n");
4476 if (is_gimple_assign (stmt))
4477 gimple_assign_set_lhs (stmt, name);
4478 else if (is_gimple_call (stmt))
4479 gimple_call_set_lhs (stmt, name);
4480 else
4481 gimple_phi_set_result (stmt, name);
4483 replace_uses_by (lhs, name);
4484 release_ssa_name (lhs);
4485 return true;
4488 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4489 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4490 specifies whether the function should care about type incompatibility the
4491 current and new expressions. If it is false, the function will leave
4492 incompatibility issues to the caller. Return true iff the expression
4493 was modified. */
4495 static bool
4496 sra_ipa_modify_expr (tree *expr, bool convert,
4497 ipa_parm_adjustment_vec adjustments)
4499 int i, len;
4500 struct ipa_parm_adjustment *adj, *cand = NULL;
4501 HOST_WIDE_INT offset, size, max_size;
4502 tree base, src;
4504 len = adjustments.length ();
4506 if (TREE_CODE (*expr) == BIT_FIELD_REF
4507 || TREE_CODE (*expr) == IMAGPART_EXPR
4508 || TREE_CODE (*expr) == REALPART_EXPR)
4510 expr = &TREE_OPERAND (*expr, 0);
4511 convert = true;
4514 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4515 if (!base || size == -1 || max_size == -1)
4516 return false;
4518 if (TREE_CODE (base) == MEM_REF)
4520 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4521 base = TREE_OPERAND (base, 0);
4524 base = get_ssa_base_param (base);
4525 if (!base || TREE_CODE (base) != PARM_DECL)
4526 return false;
4528 for (i = 0; i < len; i++)
4530 adj = &adjustments[i];
4532 if (adj->base == base
4533 && (adj->offset == offset || adj->remove_param))
4535 cand = adj;
4536 break;
4539 if (!cand || cand->copy_param || cand->remove_param)
4540 return false;
4542 if (cand->by_ref)
4543 src = build_simple_mem_ref (cand->reduction);
4544 else
4545 src = cand->reduction;
4547 if (dump_file && (dump_flags & TDF_DETAILS))
4549 fprintf (dump_file, "About to replace expr ");
4550 print_generic_expr (dump_file, *expr, 0);
4551 fprintf (dump_file, " with ");
4552 print_generic_expr (dump_file, src, 0);
4553 fprintf (dump_file, "\n");
4556 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4558 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4559 *expr = vce;
4561 else
4562 *expr = src;
4563 return true;
4566 /* If the statement pointed to by STMT_PTR contains any expressions that need
4567 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4568 potential type incompatibilities (GSI is used to accommodate conversion
4569 statements and must point to the statement). Return true iff the statement
4570 was modified. */
4572 static bool
4573 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4574 ipa_parm_adjustment_vec adjustments)
4576 gimple stmt = *stmt_ptr;
4577 tree *lhs_p, *rhs_p;
4578 bool any;
4580 if (!gimple_assign_single_p (stmt))
4581 return false;
4583 rhs_p = gimple_assign_rhs1_ptr (stmt);
4584 lhs_p = gimple_assign_lhs_ptr (stmt);
4586 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4587 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4588 if (any)
4590 tree new_rhs = NULL_TREE;
4592 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4594 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4596 /* V_C_Es of constructors can cause trouble (PR 42714). */
4597 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4598 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4599 else
4600 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4601 NULL);
4603 else
4604 new_rhs = fold_build1_loc (gimple_location (stmt),
4605 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4606 *rhs_p);
4608 else if (REFERENCE_CLASS_P (*rhs_p)
4609 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4610 && !is_gimple_reg (*lhs_p))
4611 /* This can happen when an assignment in between two single field
4612 structures is turned into an assignment in between two pointers to
4613 scalars (PR 42237). */
4614 new_rhs = *rhs_p;
4616 if (new_rhs)
4618 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4619 true, GSI_SAME_STMT);
4621 gimple_assign_set_rhs_from_tree (gsi, tmp);
4624 return true;
4627 return false;
4630 /* Traverse the function body and all modifications as described in
4631 ADJUSTMENTS. Return true iff the CFG has been changed. */
4633 static bool
4634 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4636 bool cfg_changed = false;
4637 basic_block bb;
4639 FOR_EACH_BB (bb)
4641 gimple_stmt_iterator gsi;
4643 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4644 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4646 gsi = gsi_start_bb (bb);
4647 while (!gsi_end_p (gsi))
4649 gimple stmt = gsi_stmt (gsi);
4650 bool modified = false;
4651 tree *t;
4652 unsigned i;
4654 switch (gimple_code (stmt))
4656 case GIMPLE_RETURN:
4657 t = gimple_return_retval_ptr (stmt);
4658 if (*t != NULL_TREE)
4659 modified |= sra_ipa_modify_expr (t, true, adjustments);
4660 break;
4662 case GIMPLE_ASSIGN:
4663 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4664 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4665 break;
4667 case GIMPLE_CALL:
4668 /* Operands must be processed before the lhs. */
4669 for (i = 0; i < gimple_call_num_args (stmt); i++)
4671 t = gimple_call_arg_ptr (stmt, i);
4672 modified |= sra_ipa_modify_expr (t, true, adjustments);
4675 if (gimple_call_lhs (stmt))
4677 t = gimple_call_lhs_ptr (stmt);
4678 modified |= sra_ipa_modify_expr (t, false, adjustments);
4679 modified |= replace_removed_params_ssa_names (stmt,
4680 adjustments);
4682 break;
4684 case GIMPLE_ASM:
4685 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4687 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4688 modified |= sra_ipa_modify_expr (t, true, adjustments);
4690 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4692 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4693 modified |= sra_ipa_modify_expr (t, false, adjustments);
4695 break;
4697 default:
4698 break;
4701 if (modified)
4703 update_stmt (stmt);
4704 if (maybe_clean_eh_stmt (stmt)
4705 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4706 cfg_changed = true;
4708 gsi_next (&gsi);
4712 return cfg_changed;
4715 /* Call gimple_debug_bind_reset_value on all debug statements describing
4716 gimple register parameters that are being removed or replaced. */
4718 static void
4719 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4721 int i, len;
4722 gimple_stmt_iterator *gsip = NULL, gsi;
4724 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4726 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4727 gsip = &gsi;
4729 len = adjustments.length ();
4730 for (i = 0; i < len; i++)
4732 struct ipa_parm_adjustment *adj;
4733 imm_use_iterator ui;
4734 gimple stmt, def_temp;
4735 tree name, vexpr, copy = NULL_TREE;
4736 use_operand_p use_p;
4738 adj = &adjustments[i];
4739 if (adj->copy_param || !is_gimple_reg (adj->base))
4740 continue;
4741 name = ssa_default_def (cfun, adj->base);
4742 vexpr = NULL;
4743 if (name)
4744 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4746 if (gimple_clobber_p (stmt))
4748 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4749 unlink_stmt_vdef (stmt);
4750 gsi_remove (&cgsi, true);
4751 release_defs (stmt);
4752 continue;
4754 /* All other users must have been removed by
4755 ipa_sra_modify_function_body. */
4756 gcc_assert (is_gimple_debug (stmt));
4757 if (vexpr == NULL && gsip != NULL)
4759 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4760 vexpr = make_node (DEBUG_EXPR_DECL);
4761 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4762 NULL);
4763 DECL_ARTIFICIAL (vexpr) = 1;
4764 TREE_TYPE (vexpr) = TREE_TYPE (name);
4765 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4766 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4768 if (vexpr)
4770 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4771 SET_USE (use_p, vexpr);
4773 else
4774 gimple_debug_bind_reset_value (stmt);
4775 update_stmt (stmt);
4777 /* Create a VAR_DECL for debug info purposes. */
4778 if (!DECL_IGNORED_P (adj->base))
4780 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4781 VAR_DECL, DECL_NAME (adj->base),
4782 TREE_TYPE (adj->base));
4783 if (DECL_PT_UID_SET_P (adj->base))
4784 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4785 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4786 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4787 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4788 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4789 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4790 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4791 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4792 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4793 SET_DECL_RTL (copy, 0);
4794 TREE_USED (copy) = 1;
4795 DECL_CONTEXT (copy) = current_function_decl;
4796 add_local_decl (cfun, copy);
4797 DECL_CHAIN (copy) =
4798 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4799 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4801 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4803 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4804 if (vexpr)
4805 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4806 else
4807 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4808 NULL);
4809 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4814 /* Return false iff all callers have at least as many actual arguments as there
4815 are formal parameters in the current function. */
4817 static bool
4818 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4819 void *data ATTRIBUTE_UNUSED)
4821 struct cgraph_edge *cs;
4822 for (cs = node->callers; cs; cs = cs->next_caller)
4823 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4824 return true;
4826 return false;
4829 /* Convert all callers of NODE. */
4831 static bool
4832 convert_callers_for_node (struct cgraph_node *node,
4833 void *data)
4835 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4836 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4837 struct cgraph_edge *cs;
4839 for (cs = node->callers; cs; cs = cs->next_caller)
4841 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl));
4843 if (dump_file)
4844 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
4845 xstrdup (cgraph_node_name (cs->caller)),
4846 cs->caller->symbol.order,
4847 xstrdup (cgraph_node_name (cs->callee)),
4848 cs->callee->symbol.order);
4850 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4852 pop_cfun ();
4855 for (cs = node->callers; cs; cs = cs->next_caller)
4856 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4857 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl)))
4858 compute_inline_parameters (cs->caller, true);
4859 BITMAP_FREE (recomputed_callers);
4861 return true;
4864 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4866 static void
4867 convert_callers (struct cgraph_node *node, tree old_decl,
4868 ipa_parm_adjustment_vec adjustments)
4870 basic_block this_block;
4872 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4873 &adjustments, false);
4875 if (!encountered_recursive_call)
4876 return;
4878 FOR_EACH_BB (this_block)
4880 gimple_stmt_iterator gsi;
4882 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4884 gimple stmt = gsi_stmt (gsi);
4885 tree call_fndecl;
4886 if (gimple_code (stmt) != GIMPLE_CALL)
4887 continue;
4888 call_fndecl = gimple_call_fndecl (stmt);
4889 if (call_fndecl == old_decl)
4891 if (dump_file)
4892 fprintf (dump_file, "Adjusting recursive call");
4893 gimple_call_set_fndecl (stmt, node->symbol.decl);
4894 ipa_modify_call_arguments (NULL, stmt, adjustments);
4899 return;
4902 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4903 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4905 static bool
4906 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4908 struct cgraph_node *new_node;
4909 bool cfg_changed;
4910 vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
4912 rebuild_cgraph_edges ();
4913 free_dominance_info (CDI_DOMINATORS);
4914 pop_cfun ();
4916 new_node = cgraph_function_versioning (node, redirect_callers,
4917 NULL,
4918 NULL, false, NULL, NULL, "isra");
4919 redirect_callers.release ();
4921 push_cfun (DECL_STRUCT_FUNCTION (new_node->symbol.decl));
4922 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4923 cfg_changed = ipa_sra_modify_function_body (adjustments);
4924 sra_ipa_reset_debug_stmts (adjustments);
4925 convert_callers (new_node, node->symbol.decl, adjustments);
4926 cgraph_make_node_local (new_node);
4927 return cfg_changed;
4930 /* If NODE has a caller, return true. */
4932 static bool
4933 has_caller_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4935 if (node->callers)
4936 return true;
4937 return false;
4940 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4941 attributes, return true otherwise. NODE is the cgraph node of the current
4942 function. */
4944 static bool
4945 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4947 if (!cgraph_node_can_be_local_p (node))
4949 if (dump_file)
4950 fprintf (dump_file, "Function not local to this compilation unit.\n");
4951 return false;
4954 if (!node->local.can_change_signature)
4956 if (dump_file)
4957 fprintf (dump_file, "Function can not change signature.\n");
4958 return false;
4961 if (!tree_versionable_function_p (node->symbol.decl))
4963 if (dump_file)
4964 fprintf (dump_file, "Function is not versionable.\n");
4965 return false;
4968 if (DECL_VIRTUAL_P (current_function_decl))
4970 if (dump_file)
4971 fprintf (dump_file, "Function is a virtual method.\n");
4972 return false;
4975 if ((DECL_COMDAT (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
4976 && inline_summary (node)->size >= MAX_INLINE_INSNS_AUTO)
4978 if (dump_file)
4979 fprintf (dump_file, "Function too big to be made truly local.\n");
4980 return false;
4983 if (!cgraph_for_node_and_aliases (node, has_caller_p, NULL, true))
4985 if (dump_file)
4986 fprintf (dump_file,
4987 "Function has no callers in this compilation unit.\n");
4988 return false;
4991 if (cfun->stdarg)
4993 if (dump_file)
4994 fprintf (dump_file, "Function uses stdarg. \n");
4995 return false;
4998 if (TYPE_ATTRIBUTES (TREE_TYPE (node->symbol.decl)))
4999 return false;
5001 return true;
5004 /* Perform early interprocedural SRA. */
5006 static unsigned int
5007 ipa_early_sra (void)
5009 struct cgraph_node *node = cgraph_get_node (current_function_decl);
5010 ipa_parm_adjustment_vec adjustments;
5011 int ret = 0;
5013 if (!ipa_sra_preliminary_function_checks (node))
5014 return 0;
5016 sra_initialize ();
5017 sra_mode = SRA_MODE_EARLY_IPA;
5019 if (!find_param_candidates ())
5021 if (dump_file)
5022 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5023 goto simple_out;
5026 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
5027 NULL, true))
5029 if (dump_file)
5030 fprintf (dump_file, "There are callers with insufficient number of "
5031 "arguments.\n");
5032 goto simple_out;
5035 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5036 func_param_count
5037 * last_basic_block_for_function (cfun));
5038 final_bbs = BITMAP_ALLOC (NULL);
5040 scan_function ();
5041 if (encountered_apply_args)
5043 if (dump_file)
5044 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5045 goto out;
5048 if (encountered_unchangable_recursive_call)
5050 if (dump_file)
5051 fprintf (dump_file, "Function calls itself with insufficient "
5052 "number of arguments.\n");
5053 goto out;
5056 adjustments = analyze_all_param_acesses ();
5057 if (!adjustments.exists ())
5058 goto out;
5059 if (dump_file)
5060 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5062 if (modify_function (node, adjustments))
5063 ret = TODO_update_ssa | TODO_cleanup_cfg;
5064 else
5065 ret = TODO_update_ssa;
5066 adjustments.release ();
5068 statistics_counter_event (cfun, "Unused parameters deleted",
5069 sra_stats.deleted_unused_parameters);
5070 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5071 sra_stats.scalar_by_ref_to_by_val);
5072 statistics_counter_event (cfun, "Aggregate parameters broken up",
5073 sra_stats.aggregate_params_reduced);
5074 statistics_counter_event (cfun, "Aggregate parameter components created",
5075 sra_stats.param_reductions_created);
5077 out:
5078 BITMAP_FREE (final_bbs);
5079 free (bb_dereferences);
5080 simple_out:
5081 sra_deinitialize ();
5082 return ret;
5085 /* Return if early ipa sra shall be performed. */
5086 static bool
5087 ipa_early_sra_gate (void)
5089 return flag_ipa_sra && dbg_cnt (eipa_sra);
5092 namespace {
5094 const pass_data pass_data_early_ipa_sra =
5096 GIMPLE_PASS, /* type */
5097 "eipa_sra", /* name */
5098 OPTGROUP_NONE, /* optinfo_flags */
5099 true, /* has_gate */
5100 true, /* has_execute */
5101 TV_IPA_SRA, /* tv_id */
5102 0, /* properties_required */
5103 0, /* properties_provided */
5104 0, /* properties_destroyed */
5105 0, /* todo_flags_start */
5106 TODO_dump_symtab, /* todo_flags_finish */
5109 class pass_early_ipa_sra : public gimple_opt_pass
5111 public:
5112 pass_early_ipa_sra (gcc::context *ctxt)
5113 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5116 /* opt_pass methods: */
5117 bool gate () { return ipa_early_sra_gate (); }
5118 unsigned int execute () { return ipa_early_sra (); }
5120 }; // class pass_early_ipa_sra
5122 } // anon namespace
5124 gimple_opt_pass *
5125 make_pass_early_ipa_sra (gcc::context *ctxt)
5127 return new pass_early_ipa_sra (ctxt);