Add C++11 header <cuchar>.
[official-gcc.git] / gcc / tree-sra.c
blob8b3a0adf7cce76ed6962b608481f6cf0550dabfa
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "backend.h"
79 #include "predict.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "rtl.h"
83 #include "ssa.h"
84 #include "alias.h"
85 #include "fold-const.h"
86 #include "internal-fn.h"
87 #include "tree-eh.h"
88 #include "stor-layout.h"
89 #include "gimplify.h"
90 #include "gimple-iterator.h"
91 #include "gimplify-me.h"
92 #include "gimple-walk.h"
93 #include "tree-cfg.h"
94 #include "flags.h"
95 #include "insn-config.h"
96 #include "expmed.h"
97 #include "dojump.h"
98 #include "explow.h"
99 #include "calls.h"
100 #include "emit-rtl.h"
101 #include "varasm.h"
102 #include "stmt.h"
103 #include "expr.h"
104 #include "tree-dfa.h"
105 #include "tree-ssa.h"
106 #include "tree-pass.h"
107 #include "cgraph.h"
108 #include "symbol-summary.h"
109 #include "ipa-prop.h"
110 #include "params.h"
111 #include "target.h"
112 #include "dbgcnt.h"
113 #include "tree-inline.h"
114 #include "gimple-pretty-print.h"
115 #include "ipa-inline.h"
116 #include "ipa-utils.h"
117 #include "builtins.h"
119 /* Enumeration of all aggregate reductions we can do. */
120 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
121 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
122 SRA_MODE_INTRA }; /* late intraprocedural SRA */
124 /* Global variable describing which aggregate reduction we are performing at
125 the moment. */
126 static enum sra_mode sra_mode;
128 struct assign_link;
130 /* ACCESS represents each access to an aggregate variable (as a whole or a
131 part). It can also represent a group of accesses that refer to exactly the
132 same fragment of an aggregate (i.e. those that have exactly the same offset
133 and size). Such representatives for a single aggregate, once determined,
134 are linked in a linked list and have the group fields set.
136 Moreover, when doing intraprocedural SRA, a tree is built from those
137 representatives (by the means of first_child and next_sibling pointers), in
138 which all items in a subtree are "within" the root, i.e. their offset is
139 greater or equal to offset of the root and offset+size is smaller or equal
140 to offset+size of the root. Children of an access are sorted by offset.
142 Note that accesses to parts of vector and complex number types always
143 represented by an access to the whole complex number or a vector. It is a
144 duty of the modifying functions to replace them appropriately. */
146 struct access
148 /* Values returned by `get_ref_base_and_extent' for each component reference
149 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
150 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
151 HOST_WIDE_INT offset;
152 HOST_WIDE_INT size;
153 tree base;
155 /* Expression. It is context dependent so do not use it to create new
156 expressions to access the original aggregate. See PR 42154 for a
157 testcase. */
158 tree expr;
159 /* Type. */
160 tree type;
162 /* The statement this access belongs to. */
163 gimple stmt;
165 /* Next group representative for this aggregate. */
166 struct access *next_grp;
168 /* Pointer to the group representative. Pointer to itself if the struct is
169 the representative. */
170 struct access *group_representative;
172 /* If this access has any children (in terms of the definition above), this
173 points to the first one. */
174 struct access *first_child;
176 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
177 described above. In IPA-SRA this is a pointer to the next access
178 belonging to the same group (having the same representative). */
179 struct access *next_sibling;
181 /* Pointers to the first and last element in the linked list of assign
182 links. */
183 struct assign_link *first_link, *last_link;
185 /* Pointer to the next access in the work queue. */
186 struct access *next_queued;
188 /* Replacement variable for this access "region." Never to be accessed
189 directly, always only by the means of get_access_replacement() and only
190 when grp_to_be_replaced flag is set. */
191 tree replacement_decl;
193 /* Is this particular access write access? */
194 unsigned write : 1;
196 /* Is this access an access to a non-addressable field? */
197 unsigned non_addressable : 1;
199 /* Is this access currently in the work queue? */
200 unsigned grp_queued : 1;
202 /* Does this group contain a write access? This flag is propagated down the
203 access tree. */
204 unsigned grp_write : 1;
206 /* Does this group contain a read access? This flag is propagated down the
207 access tree. */
208 unsigned grp_read : 1;
210 /* Does this group contain a read access that comes from an assignment
211 statement? This flag is propagated down the access tree. */
212 unsigned grp_assignment_read : 1;
214 /* Does this group contain a write access that comes from an assignment
215 statement? This flag is propagated down the access tree. */
216 unsigned grp_assignment_write : 1;
218 /* Does this group contain a read access through a scalar type? This flag is
219 not propagated in the access tree in any direction. */
220 unsigned grp_scalar_read : 1;
222 /* Does this group contain a write access through a scalar type? This flag
223 is not propagated in the access tree in any direction. */
224 unsigned grp_scalar_write : 1;
226 /* Is this access an artificial one created to scalarize some record
227 entirely? */
228 unsigned grp_total_scalarization : 1;
230 /* Other passes of the analysis use this bit to make function
231 analyze_access_subtree create scalar replacements for this group if
232 possible. */
233 unsigned grp_hint : 1;
235 /* Is the subtree rooted in this access fully covered by scalar
236 replacements? */
237 unsigned grp_covered : 1;
239 /* If set to true, this access and all below it in an access tree must not be
240 scalarized. */
241 unsigned grp_unscalarizable_region : 1;
243 /* Whether data have been written to parts of the aggregate covered by this
244 access which is not to be scalarized. This flag is propagated up in the
245 access tree. */
246 unsigned grp_unscalarized_data : 1;
248 /* Does this access and/or group contain a write access through a
249 BIT_FIELD_REF? */
250 unsigned grp_partial_lhs : 1;
252 /* Set when a scalar replacement should be created for this variable. */
253 unsigned grp_to_be_replaced : 1;
255 /* Set when we want a replacement for the sole purpose of having it in
256 generated debug statements. */
257 unsigned grp_to_be_debug_replaced : 1;
259 /* Should TREE_NO_WARNING of a replacement be set? */
260 unsigned grp_no_warning : 1;
262 /* Is it possible that the group refers to data which might be (directly or
263 otherwise) modified? */
264 unsigned grp_maybe_modified : 1;
266 /* Set when this is a representative of a pointer to scalar (i.e. by
267 reference) parameter which we consider for turning into a plain scalar
268 (i.e. a by value parameter). */
269 unsigned grp_scalar_ptr : 1;
271 /* Set when we discover that this pointer is not safe to dereference in the
272 caller. */
273 unsigned grp_not_necessarilly_dereferenced : 1;
276 typedef struct access *access_p;
279 /* Alloc pool for allocating access structures. */
280 static object_allocator<struct access> access_pool ("SRA accesses", 16);
282 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
283 are used to propagate subaccesses from rhs to lhs as long as they don't
284 conflict with what is already there. */
285 struct assign_link
287 struct access *lacc, *racc;
288 struct assign_link *next;
291 /* Alloc pool for allocating assign link structures. */
292 static object_allocator<assign_link> assign_link_pool ("SRA links", 16);
294 /* Base (tree) -> Vector (vec<access_p> *) map. */
295 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
297 /* Candidate hash table helpers. */
299 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
301 static inline hashval_t hash (const tree_node *);
302 static inline bool equal (const tree_node *, const tree_node *);
305 /* Hash a tree in a uid_decl_map. */
307 inline hashval_t
308 uid_decl_hasher::hash (const tree_node *item)
310 return item->decl_minimal.uid;
313 /* Return true if the DECL_UID in both trees are equal. */
315 inline bool
316 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
318 return (a->decl_minimal.uid == b->decl_minimal.uid);
321 /* Set of candidates. */
322 static bitmap candidate_bitmap;
323 static hash_table<uid_decl_hasher> *candidates;
325 /* For a candidate UID return the candidates decl. */
327 static inline tree
328 candidate (unsigned uid)
330 tree_node t;
331 t.decl_minimal.uid = uid;
332 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
335 /* Bitmap of candidates which we should try to entirely scalarize away and
336 those which cannot be (because they are and need be used as a whole). */
337 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
339 /* Obstack for creation of fancy names. */
340 static struct obstack name_obstack;
342 /* Head of a linked list of accesses that need to have its subaccesses
343 propagated to their assignment counterparts. */
344 static struct access *work_queue_head;
346 /* Number of parameters of the analyzed function when doing early ipa SRA. */
347 static int func_param_count;
349 /* scan_function sets the following to true if it encounters a call to
350 __builtin_apply_args. */
351 static bool encountered_apply_args;
353 /* Set by scan_function when it finds a recursive call. */
354 static bool encountered_recursive_call;
356 /* Set by scan_function when it finds a recursive call with less actual
357 arguments than formal parameters.. */
358 static bool encountered_unchangable_recursive_call;
360 /* This is a table in which for each basic block and parameter there is a
361 distance (offset + size) in that parameter which is dereferenced and
362 accessed in that BB. */
363 static HOST_WIDE_INT *bb_dereferences;
364 /* Bitmap of BBs that can cause the function to "stop" progressing by
365 returning, throwing externally, looping infinitely or calling a function
366 which might abort etc.. */
367 static bitmap final_bbs;
369 /* Representative of no accesses at all. */
370 static struct access no_accesses_representant;
372 /* Predicate to test the special value. */
374 static inline bool
375 no_accesses_p (struct access *access)
377 return access == &no_accesses_representant;
380 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
381 representative fields are dumped, otherwise those which only describe the
382 individual access are. */
384 static struct
386 /* Number of processed aggregates is readily available in
387 analyze_all_variable_accesses and so is not stored here. */
389 /* Number of created scalar replacements. */
390 int replacements;
392 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
393 expression. */
394 int exprs;
396 /* Number of statements created by generate_subtree_copies. */
397 int subtree_copies;
399 /* Number of statements created by load_assign_lhs_subreplacements. */
400 int subreplacements;
402 /* Number of times sra_modify_assign has deleted a statement. */
403 int deleted;
405 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
406 RHS reparately due to type conversions or nonexistent matching
407 references. */
408 int separate_lhs_rhs_handling;
410 /* Number of parameters that were removed because they were unused. */
411 int deleted_unused_parameters;
413 /* Number of scalars passed as parameters by reference that have been
414 converted to be passed by value. */
415 int scalar_by_ref_to_by_val;
417 /* Number of aggregate parameters that were replaced by one or more of their
418 components. */
419 int aggregate_params_reduced;
421 /* Numbber of components created when splitting aggregate parameters. */
422 int param_reductions_created;
423 } sra_stats;
425 static void
426 dump_access (FILE *f, struct access *access, bool grp)
428 fprintf (f, "access { ");
429 fprintf (f, "base = (%d)'", DECL_UID (access->base));
430 print_generic_expr (f, access->base, 0);
431 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
432 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
433 fprintf (f, ", expr = ");
434 print_generic_expr (f, access->expr, 0);
435 fprintf (f, ", type = ");
436 print_generic_expr (f, access->type, 0);
437 if (grp)
438 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
439 "grp_assignment_write = %d, grp_scalar_read = %d, "
440 "grp_scalar_write = %d, grp_total_scalarization = %d, "
441 "grp_hint = %d, grp_covered = %d, "
442 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
443 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
444 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
445 "grp_not_necessarilly_dereferenced = %d\n",
446 access->grp_read, access->grp_write, access->grp_assignment_read,
447 access->grp_assignment_write, access->grp_scalar_read,
448 access->grp_scalar_write, access->grp_total_scalarization,
449 access->grp_hint, access->grp_covered,
450 access->grp_unscalarizable_region, access->grp_unscalarized_data,
451 access->grp_partial_lhs, access->grp_to_be_replaced,
452 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
453 access->grp_not_necessarilly_dereferenced);
454 else
455 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
456 "grp_partial_lhs = %d\n",
457 access->write, access->grp_total_scalarization,
458 access->grp_partial_lhs);
461 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
463 static void
464 dump_access_tree_1 (FILE *f, struct access *access, int level)
468 int i;
470 for (i = 0; i < level; i++)
471 fputs ("* ", dump_file);
473 dump_access (f, access, true);
475 if (access->first_child)
476 dump_access_tree_1 (f, access->first_child, level + 1);
478 access = access->next_sibling;
480 while (access);
483 /* Dump all access trees for a variable, given the pointer to the first root in
484 ACCESS. */
486 static void
487 dump_access_tree (FILE *f, struct access *access)
489 for (; access; access = access->next_grp)
490 dump_access_tree_1 (f, access, 0);
493 /* Return true iff ACC is non-NULL and has subaccesses. */
495 static inline bool
496 access_has_children_p (struct access *acc)
498 return acc && acc->first_child;
501 /* Return true iff ACC is (partly) covered by at least one replacement. */
503 static bool
504 access_has_replacements_p (struct access *acc)
506 struct access *child;
507 if (acc->grp_to_be_replaced)
508 return true;
509 for (child = acc->first_child; child; child = child->next_sibling)
510 if (access_has_replacements_p (child))
511 return true;
512 return false;
515 /* Return a vector of pointers to accesses for the variable given in BASE or
516 NULL if there is none. */
518 static vec<access_p> *
519 get_base_access_vector (tree base)
521 return base_access_vec->get (base);
524 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
525 in ACCESS. Return NULL if it cannot be found. */
527 static struct access *
528 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
529 HOST_WIDE_INT size)
531 while (access && (access->offset != offset || access->size != size))
533 struct access *child = access->first_child;
535 while (child && (child->offset + child->size <= offset))
536 child = child->next_sibling;
537 access = child;
540 return access;
543 /* Return the first group representative for DECL or NULL if none exists. */
545 static struct access *
546 get_first_repr_for_decl (tree base)
548 vec<access_p> *access_vec;
550 access_vec = get_base_access_vector (base);
551 if (!access_vec)
552 return NULL;
554 return (*access_vec)[0];
557 /* Find an access representative for the variable BASE and given OFFSET and
558 SIZE. Requires that access trees have already been built. Return NULL if
559 it cannot be found. */
561 static struct access *
562 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
563 HOST_WIDE_INT size)
565 struct access *access;
567 access = get_first_repr_for_decl (base);
568 while (access && (access->offset + access->size <= offset))
569 access = access->next_grp;
570 if (!access)
571 return NULL;
573 return find_access_in_subtree (access, offset, size);
576 /* Add LINK to the linked list of assign links of RACC. */
577 static void
578 add_link_to_rhs (struct access *racc, struct assign_link *link)
580 gcc_assert (link->racc == racc);
582 if (!racc->first_link)
584 gcc_assert (!racc->last_link);
585 racc->first_link = link;
587 else
588 racc->last_link->next = link;
590 racc->last_link = link;
591 link->next = NULL;
594 /* Move all link structures in their linked list in OLD_RACC to the linked list
595 in NEW_RACC. */
596 static void
597 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
599 if (!old_racc->first_link)
601 gcc_assert (!old_racc->last_link);
602 return;
605 if (new_racc->first_link)
607 gcc_assert (!new_racc->last_link->next);
608 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
610 new_racc->last_link->next = old_racc->first_link;
611 new_racc->last_link = old_racc->last_link;
613 else
615 gcc_assert (!new_racc->last_link);
617 new_racc->first_link = old_racc->first_link;
618 new_racc->last_link = old_racc->last_link;
620 old_racc->first_link = old_racc->last_link = NULL;
623 /* Add ACCESS to the work queue (which is actually a stack). */
625 static void
626 add_access_to_work_queue (struct access *access)
628 if (!access->grp_queued)
630 gcc_assert (!access->next_queued);
631 access->next_queued = work_queue_head;
632 access->grp_queued = 1;
633 work_queue_head = access;
637 /* Pop an access from the work queue, and return it, assuming there is one. */
639 static struct access *
640 pop_access_from_work_queue (void)
642 struct access *access = work_queue_head;
644 work_queue_head = access->next_queued;
645 access->next_queued = NULL;
646 access->grp_queued = 0;
647 return access;
651 /* Allocate necessary structures. */
653 static void
654 sra_initialize (void)
656 candidate_bitmap = BITMAP_ALLOC (NULL);
657 candidates = new hash_table<uid_decl_hasher>
658 (vec_safe_length (cfun->local_decls) / 2);
659 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
660 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
661 gcc_obstack_init (&name_obstack);
662 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
663 memset (&sra_stats, 0, sizeof (sra_stats));
664 encountered_apply_args = false;
665 encountered_recursive_call = false;
666 encountered_unchangable_recursive_call = false;
669 /* Deallocate all general structures. */
671 static void
672 sra_deinitialize (void)
674 BITMAP_FREE (candidate_bitmap);
675 delete candidates;
676 candidates = NULL;
677 BITMAP_FREE (should_scalarize_away_bitmap);
678 BITMAP_FREE (cannot_scalarize_away_bitmap);
679 access_pool.release ();
680 assign_link_pool.release ();
681 obstack_free (&name_obstack, NULL);
683 delete base_access_vec;
686 /* Remove DECL from candidates for SRA and write REASON to the dump file if
687 there is one. */
688 static void
689 disqualify_candidate (tree decl, const char *reason)
691 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
692 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
694 if (dump_file && (dump_flags & TDF_DETAILS))
696 fprintf (dump_file, "! Disqualifying ");
697 print_generic_expr (dump_file, decl, 0);
698 fprintf (dump_file, " - %s\n", reason);
702 /* Return true iff the type contains a field or an element which does not allow
703 scalarization. */
705 static bool
706 type_internals_preclude_sra_p (tree type, const char **msg)
708 tree fld;
709 tree et;
711 switch (TREE_CODE (type))
713 case RECORD_TYPE:
714 case UNION_TYPE:
715 case QUAL_UNION_TYPE:
716 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
717 if (TREE_CODE (fld) == FIELD_DECL)
719 tree ft = TREE_TYPE (fld);
721 if (TREE_THIS_VOLATILE (fld))
723 *msg = "volatile structure field";
724 return true;
726 if (!DECL_FIELD_OFFSET (fld))
728 *msg = "no structure field offset";
729 return true;
731 if (!DECL_SIZE (fld))
733 *msg = "zero structure field size";
734 return true;
736 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
738 *msg = "structure field offset not fixed";
739 return true;
741 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
743 *msg = "structure field size not fixed";
744 return true;
746 if (!tree_fits_shwi_p (bit_position (fld)))
748 *msg = "structure field size too big";
749 return true;
751 if (AGGREGATE_TYPE_P (ft)
752 && int_bit_position (fld) % BITS_PER_UNIT != 0)
754 *msg = "structure field is bit field";
755 return true;
758 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
759 return true;
762 return false;
764 case ARRAY_TYPE:
765 et = TREE_TYPE (type);
767 if (TYPE_VOLATILE (et))
769 *msg = "element type is volatile";
770 return true;
773 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
774 return true;
776 return false;
778 default:
779 return false;
783 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
784 base variable if it is. Return T if it is not an SSA_NAME. */
786 static tree
787 get_ssa_base_param (tree t)
789 if (TREE_CODE (t) == SSA_NAME)
791 if (SSA_NAME_IS_DEFAULT_DEF (t))
792 return SSA_NAME_VAR (t);
793 else
794 return NULL_TREE;
796 return t;
799 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
800 belongs to, unless the BB has already been marked as a potentially
801 final. */
803 static void
804 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
806 basic_block bb = gimple_bb (stmt);
807 int idx, parm_index = 0;
808 tree parm;
810 if (bitmap_bit_p (final_bbs, bb->index))
811 return;
813 for (parm = DECL_ARGUMENTS (current_function_decl);
814 parm && parm != base;
815 parm = DECL_CHAIN (parm))
816 parm_index++;
818 gcc_assert (parm_index < func_param_count);
820 idx = bb->index * func_param_count + parm_index;
821 if (bb_dereferences[idx] < dist)
822 bb_dereferences[idx] = dist;
825 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
826 the three fields. Also add it to the vector of accesses corresponding to
827 the base. Finally, return the new access. */
829 static struct access *
830 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
832 struct access *access = access_pool.allocate ();
834 memset (access, 0, sizeof (struct access));
835 access->base = base;
836 access->offset = offset;
837 access->size = size;
839 base_access_vec->get_or_insert (base).safe_push (access);
841 return access;
844 /* Create and insert access for EXPR. Return created access, or NULL if it is
845 not possible. */
847 static struct access *
848 create_access (tree expr, gimple stmt, bool write)
850 struct access *access;
851 HOST_WIDE_INT offset, size, max_size;
852 tree base = expr;
853 bool ptr, unscalarizable_region = false;
855 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
857 if (sra_mode == SRA_MODE_EARLY_IPA
858 && TREE_CODE (base) == MEM_REF)
860 base = get_ssa_base_param (TREE_OPERAND (base, 0));
861 if (!base)
862 return NULL;
863 ptr = true;
865 else
866 ptr = false;
868 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
869 return NULL;
871 if (sra_mode == SRA_MODE_EARLY_IPA)
873 if (size < 0 || size != max_size)
875 disqualify_candidate (base, "Encountered a variable sized access.");
876 return NULL;
878 if (TREE_CODE (expr) == COMPONENT_REF
879 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
881 disqualify_candidate (base, "Encountered a bit-field access.");
882 return NULL;
884 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
886 if (ptr)
887 mark_parm_dereference (base, offset + size, stmt);
889 else
891 if (size != max_size)
893 size = max_size;
894 unscalarizable_region = true;
896 if (size < 0)
898 disqualify_candidate (base, "Encountered an unconstrained access.");
899 return NULL;
903 access = create_access_1 (base, offset, size);
904 access->expr = expr;
905 access->type = TREE_TYPE (expr);
906 access->write = write;
907 access->grp_unscalarizable_region = unscalarizable_region;
908 access->stmt = stmt;
910 if (TREE_CODE (expr) == COMPONENT_REF
911 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
912 access->non_addressable = 1;
914 return access;
918 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
919 register types or (recursively) records with only these two kinds of fields.
920 It also returns false if any of these records contains a bit-field. */
922 static bool
923 type_consists_of_records_p (tree type)
925 tree fld;
927 if (TREE_CODE (type) != RECORD_TYPE)
928 return false;
930 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
931 if (TREE_CODE (fld) == FIELD_DECL)
933 tree ft = TREE_TYPE (fld);
935 if (DECL_BIT_FIELD (fld))
936 return false;
938 if (!is_gimple_reg_type (ft)
939 && !type_consists_of_records_p (ft))
940 return false;
943 return true;
946 /* Create total_scalarization accesses for all scalar type fields in DECL that
947 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
948 must be the top-most VAR_DECL representing the variable, OFFSET must be the
949 offset of DECL within BASE. REF must be the memory reference expression for
950 the given decl. */
952 static void
953 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
954 tree ref)
956 tree fld, decl_type = TREE_TYPE (decl);
958 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
959 if (TREE_CODE (fld) == FIELD_DECL)
961 HOST_WIDE_INT pos = offset + int_bit_position (fld);
962 tree ft = TREE_TYPE (fld);
963 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
964 NULL_TREE);
966 if (is_gimple_reg_type (ft))
968 struct access *access;
969 HOST_WIDE_INT size;
971 size = tree_to_uhwi (DECL_SIZE (fld));
972 access = create_access_1 (base, pos, size);
973 access->expr = nref;
974 access->type = ft;
975 access->grp_total_scalarization = 1;
976 /* Accesses for intraprocedural SRA can have their stmt NULL. */
978 else
979 completely_scalarize_record (base, fld, pos, nref);
983 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
984 RECORD_TYPE conforming to type_consists_of_records_p. */
986 static void
987 create_total_scalarization_access (tree var)
989 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
990 struct access *access;
992 access = create_access_1 (var, 0, size);
993 access->expr = var;
994 access->type = TREE_TYPE (var);
995 access->grp_total_scalarization = 1;
998 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1000 static inline bool
1001 contains_view_convert_expr_p (const_tree ref)
1003 while (handled_component_p (ref))
1005 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1006 return true;
1007 ref = TREE_OPERAND (ref, 0);
1010 return false;
1013 /* Search the given tree for a declaration by skipping handled components and
1014 exclude it from the candidates. */
1016 static void
1017 disqualify_base_of_expr (tree t, const char *reason)
1019 t = get_base_address (t);
1020 if (sra_mode == SRA_MODE_EARLY_IPA
1021 && TREE_CODE (t) == MEM_REF)
1022 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1024 if (t && DECL_P (t))
1025 disqualify_candidate (t, reason);
1028 /* Scan expression EXPR and create access structures for all accesses to
1029 candidates for scalarization. Return the created access or NULL if none is
1030 created. */
1032 static struct access *
1033 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1035 struct access *ret = NULL;
1036 bool partial_ref;
1038 if (TREE_CODE (expr) == BIT_FIELD_REF
1039 || TREE_CODE (expr) == IMAGPART_EXPR
1040 || TREE_CODE (expr) == REALPART_EXPR)
1042 expr = TREE_OPERAND (expr, 0);
1043 partial_ref = true;
1045 else
1046 partial_ref = false;
1048 /* We need to dive through V_C_Es in order to get the size of its parameter
1049 and not the result type. Ada produces such statements. We are also
1050 capable of handling the topmost V_C_E but not any of those buried in other
1051 handled components. */
1052 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1053 expr = TREE_OPERAND (expr, 0);
1055 if (contains_view_convert_expr_p (expr))
1057 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1058 "component.");
1059 return NULL;
1061 if (TREE_THIS_VOLATILE (expr))
1063 disqualify_base_of_expr (expr, "part of a volatile reference.");
1064 return NULL;
1067 switch (TREE_CODE (expr))
1069 case MEM_REF:
1070 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1071 && sra_mode != SRA_MODE_EARLY_IPA)
1072 return NULL;
1073 /* fall through */
1074 case VAR_DECL:
1075 case PARM_DECL:
1076 case RESULT_DECL:
1077 case COMPONENT_REF:
1078 case ARRAY_REF:
1079 case ARRAY_RANGE_REF:
1080 ret = create_access (expr, stmt, write);
1081 break;
1083 default:
1084 break;
1087 if (write && partial_ref && ret)
1088 ret->grp_partial_lhs = 1;
1090 return ret;
1093 /* Scan expression EXPR and create access structures for all accesses to
1094 candidates for scalarization. Return true if any access has been inserted.
1095 STMT must be the statement from which the expression is taken, WRITE must be
1096 true if the expression is a store and false otherwise. */
1098 static bool
1099 build_access_from_expr (tree expr, gimple stmt, bool write)
1101 struct access *access;
1103 access = build_access_from_expr_1 (expr, stmt, write);
1104 if (access)
1106 /* This means the aggregate is accesses as a whole in a way other than an
1107 assign statement and thus cannot be removed even if we had a scalar
1108 replacement for everything. */
1109 if (cannot_scalarize_away_bitmap)
1110 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1111 return true;
1113 return false;
1116 /* Return the single non-EH successor edge of BB or NULL if there is none or
1117 more than one. */
1119 static edge
1120 single_non_eh_succ (basic_block bb)
1122 edge e, res = NULL;
1123 edge_iterator ei;
1125 FOR_EACH_EDGE (e, ei, bb->succs)
1126 if (!(e->flags & EDGE_EH))
1128 if (res)
1129 return NULL;
1130 res = e;
1133 return res;
1136 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1137 there is no alternative spot where to put statements SRA might need to
1138 generate after it. The spot we are looking for is an edge leading to a
1139 single non-EH successor, if it exists and is indeed single. RHS may be
1140 NULL, in that case ignore it. */
1142 static bool
1143 disqualify_if_bad_bb_terminating_stmt (gimple stmt, tree lhs, tree rhs)
1145 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1146 && stmt_ends_bb_p (stmt))
1148 if (single_non_eh_succ (gimple_bb (stmt)))
1149 return false;
1151 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1152 if (rhs)
1153 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1154 return true;
1156 return false;
1159 /* Scan expressions occurring in STMT, create access structures for all accesses
1160 to candidates for scalarization and remove those candidates which occur in
1161 statements or expressions that prevent them from being split apart. Return
1162 true if any access has been inserted. */
1164 static bool
1165 build_accesses_from_assign (gimple stmt)
1167 tree lhs, rhs;
1168 struct access *lacc, *racc;
1170 if (!gimple_assign_single_p (stmt)
1171 /* Scope clobbers don't influence scalarization. */
1172 || gimple_clobber_p (stmt))
1173 return false;
1175 lhs = gimple_assign_lhs (stmt);
1176 rhs = gimple_assign_rhs1 (stmt);
1178 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1179 return false;
1181 racc = build_access_from_expr_1 (rhs, stmt, false);
1182 lacc = build_access_from_expr_1 (lhs, stmt, true);
1184 if (lacc)
1185 lacc->grp_assignment_write = 1;
1187 if (racc)
1189 racc->grp_assignment_read = 1;
1190 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1191 && !is_gimple_reg_type (racc->type))
1192 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1195 if (lacc && racc
1196 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1197 && !lacc->grp_unscalarizable_region
1198 && !racc->grp_unscalarizable_region
1199 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1200 && lacc->size == racc->size
1201 && useless_type_conversion_p (lacc->type, racc->type))
1203 struct assign_link *link;
1205 link = assign_link_pool.allocate ();
1206 memset (link, 0, sizeof (struct assign_link));
1208 link->lacc = lacc;
1209 link->racc = racc;
1211 add_link_to_rhs (racc, link);
1214 return lacc || racc;
1217 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1218 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1220 static bool
1221 asm_visit_addr (gimple, tree op, tree, void *)
1223 op = get_base_address (op);
1224 if (op
1225 && DECL_P (op))
1226 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1228 return false;
1231 /* Return true iff callsite CALL has at least as many actual arguments as there
1232 are formal parameters of the function currently processed by IPA-SRA and
1233 that their types match. */
1235 static inline bool
1236 callsite_arguments_match_p (gimple call)
1238 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1239 return false;
1241 tree parm;
1242 int i;
1243 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1244 parm;
1245 parm = DECL_CHAIN (parm), i++)
1247 tree arg = gimple_call_arg (call, i);
1248 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1249 return false;
1251 return true;
1254 /* Scan function and look for interesting expressions and create access
1255 structures for them. Return true iff any access is created. */
1257 static bool
1258 scan_function (void)
1260 basic_block bb;
1261 bool ret = false;
1263 FOR_EACH_BB_FN (bb, cfun)
1265 gimple_stmt_iterator gsi;
1266 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1268 gimple stmt = gsi_stmt (gsi);
1269 tree t;
1270 unsigned i;
1272 if (final_bbs && stmt_can_throw_external (stmt))
1273 bitmap_set_bit (final_bbs, bb->index);
1274 switch (gimple_code (stmt))
1276 case GIMPLE_RETURN:
1277 t = gimple_return_retval (as_a <greturn *> (stmt));
1278 if (t != NULL_TREE)
1279 ret |= build_access_from_expr (t, stmt, false);
1280 if (final_bbs)
1281 bitmap_set_bit (final_bbs, bb->index);
1282 break;
1284 case GIMPLE_ASSIGN:
1285 ret |= build_accesses_from_assign (stmt);
1286 break;
1288 case GIMPLE_CALL:
1289 for (i = 0; i < gimple_call_num_args (stmt); i++)
1290 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1291 stmt, false);
1293 if (sra_mode == SRA_MODE_EARLY_IPA)
1295 tree dest = gimple_call_fndecl (stmt);
1296 int flags = gimple_call_flags (stmt);
1298 if (dest)
1300 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1301 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1302 encountered_apply_args = true;
1303 if (recursive_call_p (current_function_decl, dest))
1305 encountered_recursive_call = true;
1306 if (!callsite_arguments_match_p (stmt))
1307 encountered_unchangable_recursive_call = true;
1311 if (final_bbs
1312 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1313 bitmap_set_bit (final_bbs, bb->index);
1316 t = gimple_call_lhs (stmt);
1317 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1318 ret |= build_access_from_expr (t, stmt, true);
1319 break;
1321 case GIMPLE_ASM:
1323 gasm *asm_stmt = as_a <gasm *> (stmt);
1324 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1325 asm_visit_addr);
1326 if (final_bbs)
1327 bitmap_set_bit (final_bbs, bb->index);
1329 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1331 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1332 ret |= build_access_from_expr (t, asm_stmt, false);
1334 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1336 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1337 ret |= build_access_from_expr (t, asm_stmt, true);
1340 break;
1342 default:
1343 break;
1348 return ret;
1351 /* Helper of QSORT function. There are pointers to accesses in the array. An
1352 access is considered smaller than another if it has smaller offset or if the
1353 offsets are the same but is size is bigger. */
1355 static int
1356 compare_access_positions (const void *a, const void *b)
1358 const access_p *fp1 = (const access_p *) a;
1359 const access_p *fp2 = (const access_p *) b;
1360 const access_p f1 = *fp1;
1361 const access_p f2 = *fp2;
1363 if (f1->offset != f2->offset)
1364 return f1->offset < f2->offset ? -1 : 1;
1366 if (f1->size == f2->size)
1368 if (f1->type == f2->type)
1369 return 0;
1370 /* Put any non-aggregate type before any aggregate type. */
1371 else if (!is_gimple_reg_type (f1->type)
1372 && is_gimple_reg_type (f2->type))
1373 return 1;
1374 else if (is_gimple_reg_type (f1->type)
1375 && !is_gimple_reg_type (f2->type))
1376 return -1;
1377 /* Put any complex or vector type before any other scalar type. */
1378 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1379 && TREE_CODE (f1->type) != VECTOR_TYPE
1380 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1381 || TREE_CODE (f2->type) == VECTOR_TYPE))
1382 return 1;
1383 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1384 || TREE_CODE (f1->type) == VECTOR_TYPE)
1385 && TREE_CODE (f2->type) != COMPLEX_TYPE
1386 && TREE_CODE (f2->type) != VECTOR_TYPE)
1387 return -1;
1388 /* Put the integral type with the bigger precision first. */
1389 else if (INTEGRAL_TYPE_P (f1->type)
1390 && INTEGRAL_TYPE_P (f2->type))
1391 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1392 /* Put any integral type with non-full precision last. */
1393 else if (INTEGRAL_TYPE_P (f1->type)
1394 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1395 != TYPE_PRECISION (f1->type)))
1396 return 1;
1397 else if (INTEGRAL_TYPE_P (f2->type)
1398 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1399 != TYPE_PRECISION (f2->type)))
1400 return -1;
1401 /* Stabilize the sort. */
1402 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1405 /* We want the bigger accesses first, thus the opposite operator in the next
1406 line: */
1407 return f1->size > f2->size ? -1 : 1;
1411 /* Append a name of the declaration to the name obstack. A helper function for
1412 make_fancy_name. */
1414 static void
1415 make_fancy_decl_name (tree decl)
1417 char buffer[32];
1419 tree name = DECL_NAME (decl);
1420 if (name)
1421 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1422 IDENTIFIER_LENGTH (name));
1423 else
1425 sprintf (buffer, "D%u", DECL_UID (decl));
1426 obstack_grow (&name_obstack, buffer, strlen (buffer));
1430 /* Helper for make_fancy_name. */
1432 static void
1433 make_fancy_name_1 (tree expr)
1435 char buffer[32];
1436 tree index;
1438 if (DECL_P (expr))
1440 make_fancy_decl_name (expr);
1441 return;
1444 switch (TREE_CODE (expr))
1446 case COMPONENT_REF:
1447 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1448 obstack_1grow (&name_obstack, '$');
1449 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1450 break;
1452 case ARRAY_REF:
1453 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1454 obstack_1grow (&name_obstack, '$');
1455 /* Arrays with only one element may not have a constant as their
1456 index. */
1457 index = TREE_OPERAND (expr, 1);
1458 if (TREE_CODE (index) != INTEGER_CST)
1459 break;
1460 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1461 obstack_grow (&name_obstack, buffer, strlen (buffer));
1462 break;
1464 case ADDR_EXPR:
1465 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1466 break;
1468 case MEM_REF:
1469 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1470 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1472 obstack_1grow (&name_obstack, '$');
1473 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1474 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1475 obstack_grow (&name_obstack, buffer, strlen (buffer));
1477 break;
1479 case BIT_FIELD_REF:
1480 case REALPART_EXPR:
1481 case IMAGPART_EXPR:
1482 gcc_unreachable (); /* we treat these as scalars. */
1483 break;
1484 default:
1485 break;
1489 /* Create a human readable name for replacement variable of ACCESS. */
1491 static char *
1492 make_fancy_name (tree expr)
1494 make_fancy_name_1 (expr);
1495 obstack_1grow (&name_obstack, '\0');
1496 return XOBFINISH (&name_obstack, char *);
1499 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1500 EXP_TYPE at the given OFFSET. If BASE is something for which
1501 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1502 to insert new statements either before or below the current one as specified
1503 by INSERT_AFTER. This function is not capable of handling bitfields.
1505 BASE must be either a declaration or a memory reference that has correct
1506 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1508 tree
1509 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1510 tree exp_type, gimple_stmt_iterator *gsi,
1511 bool insert_after)
1513 tree prev_base = base;
1514 tree off;
1515 tree mem_ref;
1516 HOST_WIDE_INT base_offset;
1517 unsigned HOST_WIDE_INT misalign;
1518 unsigned int align;
1520 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1521 get_object_alignment_1 (base, &align, &misalign);
1522 base = get_addr_base_and_unit_offset (base, &base_offset);
1524 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1525 offset such as array[var_index]. */
1526 if (!base)
1528 gassign *stmt;
1529 tree tmp, addr;
1531 gcc_checking_assert (gsi);
1532 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1533 addr = build_fold_addr_expr (unshare_expr (prev_base));
1534 STRIP_USELESS_TYPE_CONVERSION (addr);
1535 stmt = gimple_build_assign (tmp, addr);
1536 gimple_set_location (stmt, loc);
1537 if (insert_after)
1538 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1539 else
1540 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1542 off = build_int_cst (reference_alias_ptr_type (prev_base),
1543 offset / BITS_PER_UNIT);
1544 base = tmp;
1546 else if (TREE_CODE (base) == MEM_REF)
1548 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1549 base_offset + offset / BITS_PER_UNIT);
1550 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1551 base = unshare_expr (TREE_OPERAND (base, 0));
1553 else
1555 off = build_int_cst (reference_alias_ptr_type (base),
1556 base_offset + offset / BITS_PER_UNIT);
1557 base = build_fold_addr_expr (unshare_expr (base));
1560 misalign = (misalign + offset) & (align - 1);
1561 if (misalign != 0)
1562 align = (misalign & -misalign);
1563 if (align != TYPE_ALIGN (exp_type))
1564 exp_type = build_aligned_type (exp_type, align);
1566 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1567 if (TREE_THIS_VOLATILE (prev_base))
1568 TREE_THIS_VOLATILE (mem_ref) = 1;
1569 if (TREE_SIDE_EFFECTS (prev_base))
1570 TREE_SIDE_EFFECTS (mem_ref) = 1;
1571 return mem_ref;
1574 /* Construct a memory reference to a part of an aggregate BASE at the given
1575 OFFSET and of the same type as MODEL. In case this is a reference to a
1576 bit-field, the function will replicate the last component_ref of model's
1577 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1578 build_ref_for_offset. */
1580 static tree
1581 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1582 struct access *model, gimple_stmt_iterator *gsi,
1583 bool insert_after)
1585 if (TREE_CODE (model->expr) == COMPONENT_REF
1586 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1588 /* This access represents a bit-field. */
1589 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1591 offset -= int_bit_position (fld);
1592 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1593 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1594 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1595 NULL_TREE);
1597 else
1598 return build_ref_for_offset (loc, base, offset, model->type,
1599 gsi, insert_after);
1602 /* Attempt to build a memory reference that we could but into a gimple
1603 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1604 create statements and return s NULL instead. This function also ignores
1605 alignment issues and so its results should never end up in non-debug
1606 statements. */
1608 static tree
1609 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1610 struct access *model)
1612 HOST_WIDE_INT base_offset;
1613 tree off;
1615 if (TREE_CODE (model->expr) == COMPONENT_REF
1616 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1617 return NULL_TREE;
1619 base = get_addr_base_and_unit_offset (base, &base_offset);
1620 if (!base)
1621 return NULL_TREE;
1622 if (TREE_CODE (base) == MEM_REF)
1624 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1625 base_offset + offset / BITS_PER_UNIT);
1626 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1627 base = unshare_expr (TREE_OPERAND (base, 0));
1629 else
1631 off = build_int_cst (reference_alias_ptr_type (base),
1632 base_offset + offset / BITS_PER_UNIT);
1633 base = build_fold_addr_expr (unshare_expr (base));
1636 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1639 /* Construct a memory reference consisting of component_refs and array_refs to
1640 a part of an aggregate *RES (which is of type TYPE). The requested part
1641 should have type EXP_TYPE at be the given OFFSET. This function might not
1642 succeed, it returns true when it does and only then *RES points to something
1643 meaningful. This function should be used only to build expressions that we
1644 might need to present to user (e.g. in warnings). In all other situations,
1645 build_ref_for_model or build_ref_for_offset should be used instead. */
1647 static bool
1648 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1649 tree exp_type)
1651 while (1)
1653 tree fld;
1654 tree tr_size, index, minidx;
1655 HOST_WIDE_INT el_size;
1657 if (offset == 0 && exp_type
1658 && types_compatible_p (exp_type, type))
1659 return true;
1661 switch (TREE_CODE (type))
1663 case UNION_TYPE:
1664 case QUAL_UNION_TYPE:
1665 case RECORD_TYPE:
1666 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1668 HOST_WIDE_INT pos, size;
1669 tree tr_pos, expr, *expr_ptr;
1671 if (TREE_CODE (fld) != FIELD_DECL)
1672 continue;
1674 tr_pos = bit_position (fld);
1675 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1676 continue;
1677 pos = tree_to_uhwi (tr_pos);
1678 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1679 tr_size = DECL_SIZE (fld);
1680 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1681 continue;
1682 size = tree_to_uhwi (tr_size);
1683 if (size == 0)
1685 if (pos != offset)
1686 continue;
1688 else if (pos > offset || (pos + size) <= offset)
1689 continue;
1691 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1692 NULL_TREE);
1693 expr_ptr = &expr;
1694 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1695 offset - pos, exp_type))
1697 *res = expr;
1698 return true;
1701 return false;
1703 case ARRAY_TYPE:
1704 tr_size = TYPE_SIZE (TREE_TYPE (type));
1705 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1706 return false;
1707 el_size = tree_to_uhwi (tr_size);
1709 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1710 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1711 return false;
1712 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1713 if (!integer_zerop (minidx))
1714 index = int_const_binop (PLUS_EXPR, index, minidx);
1715 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1716 NULL_TREE, NULL_TREE);
1717 offset = offset % el_size;
1718 type = TREE_TYPE (type);
1719 break;
1721 default:
1722 if (offset != 0)
1723 return false;
1725 if (exp_type)
1726 return false;
1727 else
1728 return true;
1733 /* Return true iff TYPE is stdarg va_list type. */
1735 static inline bool
1736 is_va_list_type (tree type)
1738 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1741 /* Print message to dump file why a variable was rejected. */
1743 static void
1744 reject (tree var, const char *msg)
1746 if (dump_file && (dump_flags & TDF_DETAILS))
1748 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1749 print_generic_expr (dump_file, var, 0);
1750 fprintf (dump_file, "\n");
1754 /* Return true if VAR is a candidate for SRA. */
1756 static bool
1757 maybe_add_sra_candidate (tree var)
1759 tree type = TREE_TYPE (var);
1760 const char *msg;
1761 tree_node **slot;
1763 if (!AGGREGATE_TYPE_P (type))
1765 reject (var, "not aggregate");
1766 return false;
1768 if (needs_to_live_in_memory (var))
1770 reject (var, "needs to live in memory");
1771 return false;
1773 if (TREE_THIS_VOLATILE (var))
1775 reject (var, "is volatile");
1776 return false;
1778 if (!COMPLETE_TYPE_P (type))
1780 reject (var, "has incomplete type");
1781 return false;
1783 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1785 reject (var, "type size not fixed");
1786 return false;
1788 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1790 reject (var, "type size is zero");
1791 return false;
1793 if (type_internals_preclude_sra_p (type, &msg))
1795 reject (var, msg);
1796 return false;
1798 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1799 we also want to schedule it rather late. Thus we ignore it in
1800 the early pass. */
1801 (sra_mode == SRA_MODE_EARLY_INTRA
1802 && is_va_list_type (type)))
1804 reject (var, "is va_list");
1805 return false;
1808 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1809 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1810 *slot = var;
1812 if (dump_file && (dump_flags & TDF_DETAILS))
1814 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1815 print_generic_expr (dump_file, var, 0);
1816 fprintf (dump_file, "\n");
1819 return true;
1822 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1823 those with type which is suitable for scalarization. */
1825 static bool
1826 find_var_candidates (void)
1828 tree var, parm;
1829 unsigned int i;
1830 bool ret = false;
1832 for (parm = DECL_ARGUMENTS (current_function_decl);
1833 parm;
1834 parm = DECL_CHAIN (parm))
1835 ret |= maybe_add_sra_candidate (parm);
1837 FOR_EACH_LOCAL_DECL (cfun, i, var)
1839 if (TREE_CODE (var) != VAR_DECL)
1840 continue;
1842 ret |= maybe_add_sra_candidate (var);
1845 return ret;
1848 /* Sort all accesses for the given variable, check for partial overlaps and
1849 return NULL if there are any. If there are none, pick a representative for
1850 each combination of offset and size and create a linked list out of them.
1851 Return the pointer to the first representative and make sure it is the first
1852 one in the vector of accesses. */
1854 static struct access *
1855 sort_and_splice_var_accesses (tree var)
1857 int i, j, access_count;
1858 struct access *res, **prev_acc_ptr = &res;
1859 vec<access_p> *access_vec;
1860 bool first = true;
1861 HOST_WIDE_INT low = -1, high = 0;
1863 access_vec = get_base_access_vector (var);
1864 if (!access_vec)
1865 return NULL;
1866 access_count = access_vec->length ();
1868 /* Sort by <OFFSET, SIZE>. */
1869 access_vec->qsort (compare_access_positions);
1871 i = 0;
1872 while (i < access_count)
1874 struct access *access = (*access_vec)[i];
1875 bool grp_write = access->write;
1876 bool grp_read = !access->write;
1877 bool grp_scalar_write = access->write
1878 && is_gimple_reg_type (access->type);
1879 bool grp_scalar_read = !access->write
1880 && is_gimple_reg_type (access->type);
1881 bool grp_assignment_read = access->grp_assignment_read;
1882 bool grp_assignment_write = access->grp_assignment_write;
1883 bool multiple_scalar_reads = false;
1884 bool total_scalarization = access->grp_total_scalarization;
1885 bool grp_partial_lhs = access->grp_partial_lhs;
1886 bool first_scalar = is_gimple_reg_type (access->type);
1887 bool unscalarizable_region = access->grp_unscalarizable_region;
1889 if (first || access->offset >= high)
1891 first = false;
1892 low = access->offset;
1893 high = access->offset + access->size;
1895 else if (access->offset > low && access->offset + access->size > high)
1896 return NULL;
1897 else
1898 gcc_assert (access->offset >= low
1899 && access->offset + access->size <= high);
1901 j = i + 1;
1902 while (j < access_count)
1904 struct access *ac2 = (*access_vec)[j];
1905 if (ac2->offset != access->offset || ac2->size != access->size)
1906 break;
1907 if (ac2->write)
1909 grp_write = true;
1910 grp_scalar_write = (grp_scalar_write
1911 || is_gimple_reg_type (ac2->type));
1913 else
1915 grp_read = true;
1916 if (is_gimple_reg_type (ac2->type))
1918 if (grp_scalar_read)
1919 multiple_scalar_reads = true;
1920 else
1921 grp_scalar_read = true;
1924 grp_assignment_read |= ac2->grp_assignment_read;
1925 grp_assignment_write |= ac2->grp_assignment_write;
1926 grp_partial_lhs |= ac2->grp_partial_lhs;
1927 unscalarizable_region |= ac2->grp_unscalarizable_region;
1928 total_scalarization |= ac2->grp_total_scalarization;
1929 relink_to_new_repr (access, ac2);
1931 /* If there are both aggregate-type and scalar-type accesses with
1932 this combination of size and offset, the comparison function
1933 should have put the scalars first. */
1934 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1935 ac2->group_representative = access;
1936 j++;
1939 i = j;
1941 access->group_representative = access;
1942 access->grp_write = grp_write;
1943 access->grp_read = grp_read;
1944 access->grp_scalar_read = grp_scalar_read;
1945 access->grp_scalar_write = grp_scalar_write;
1946 access->grp_assignment_read = grp_assignment_read;
1947 access->grp_assignment_write = grp_assignment_write;
1948 access->grp_hint = multiple_scalar_reads || total_scalarization;
1949 access->grp_total_scalarization = total_scalarization;
1950 access->grp_partial_lhs = grp_partial_lhs;
1951 access->grp_unscalarizable_region = unscalarizable_region;
1952 if (access->first_link)
1953 add_access_to_work_queue (access);
1955 *prev_acc_ptr = access;
1956 prev_acc_ptr = &access->next_grp;
1959 gcc_assert (res == (*access_vec)[0]);
1960 return res;
1963 /* Create a variable for the given ACCESS which determines the type, name and a
1964 few other properties. Return the variable declaration and store it also to
1965 ACCESS->replacement. */
1967 static tree
1968 create_access_replacement (struct access *access)
1970 tree repl;
1972 if (access->grp_to_be_debug_replaced)
1974 repl = create_tmp_var_raw (access->type);
1975 DECL_CONTEXT (repl) = current_function_decl;
1977 else
1978 /* Drop any special alignment on the type if it's not on the main
1979 variant. This avoids issues with weirdo ABIs like AAPCS. */
1980 repl = create_tmp_var (build_qualified_type
1981 (TYPE_MAIN_VARIANT (access->type),
1982 TYPE_QUALS (access->type)), "SR");
1983 if (TREE_CODE (access->type) == COMPLEX_TYPE
1984 || TREE_CODE (access->type) == VECTOR_TYPE)
1986 if (!access->grp_partial_lhs)
1987 DECL_GIMPLE_REG_P (repl) = 1;
1989 else if (access->grp_partial_lhs
1990 && is_gimple_reg_type (access->type))
1991 TREE_ADDRESSABLE (repl) = 1;
1993 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1994 DECL_ARTIFICIAL (repl) = 1;
1995 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1997 if (DECL_NAME (access->base)
1998 && !DECL_IGNORED_P (access->base)
1999 && !DECL_ARTIFICIAL (access->base))
2001 char *pretty_name = make_fancy_name (access->expr);
2002 tree debug_expr = unshare_expr_without_location (access->expr), d;
2003 bool fail = false;
2005 DECL_NAME (repl) = get_identifier (pretty_name);
2006 obstack_free (&name_obstack, pretty_name);
2008 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2009 as DECL_DEBUG_EXPR isn't considered when looking for still
2010 used SSA_NAMEs and thus they could be freed. All debug info
2011 generation cares is whether something is constant or variable
2012 and that get_ref_base_and_extent works properly on the
2013 expression. It cannot handle accesses at a non-constant offset
2014 though, so just give up in those cases. */
2015 for (d = debug_expr;
2016 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2017 d = TREE_OPERAND (d, 0))
2018 switch (TREE_CODE (d))
2020 case ARRAY_REF:
2021 case ARRAY_RANGE_REF:
2022 if (TREE_OPERAND (d, 1)
2023 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2024 fail = true;
2025 if (TREE_OPERAND (d, 3)
2026 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2027 fail = true;
2028 /* FALLTHRU */
2029 case COMPONENT_REF:
2030 if (TREE_OPERAND (d, 2)
2031 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2032 fail = true;
2033 break;
2034 case MEM_REF:
2035 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2036 fail = true;
2037 else
2038 d = TREE_OPERAND (d, 0);
2039 break;
2040 default:
2041 break;
2043 if (!fail)
2045 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2046 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2048 if (access->grp_no_warning)
2049 TREE_NO_WARNING (repl) = 1;
2050 else
2051 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2053 else
2054 TREE_NO_WARNING (repl) = 1;
2056 if (dump_file)
2058 if (access->grp_to_be_debug_replaced)
2060 fprintf (dump_file, "Created a debug-only replacement for ");
2061 print_generic_expr (dump_file, access->base, 0);
2062 fprintf (dump_file, " offset: %u, size: %u\n",
2063 (unsigned) access->offset, (unsigned) access->size);
2065 else
2067 fprintf (dump_file, "Created a replacement for ");
2068 print_generic_expr (dump_file, access->base, 0);
2069 fprintf (dump_file, " offset: %u, size: %u: ",
2070 (unsigned) access->offset, (unsigned) access->size);
2071 print_generic_expr (dump_file, repl, 0);
2072 fprintf (dump_file, "\n");
2075 sra_stats.replacements++;
2077 return repl;
2080 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2082 static inline tree
2083 get_access_replacement (struct access *access)
2085 gcc_checking_assert (access->replacement_decl);
2086 return access->replacement_decl;
2090 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2091 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2092 to it is not "within" the root. Return false iff some accesses partially
2093 overlap. */
2095 static bool
2096 build_access_subtree (struct access **access)
2098 struct access *root = *access, *last_child = NULL;
2099 HOST_WIDE_INT limit = root->offset + root->size;
2101 *access = (*access)->next_grp;
2102 while (*access && (*access)->offset + (*access)->size <= limit)
2104 if (!last_child)
2105 root->first_child = *access;
2106 else
2107 last_child->next_sibling = *access;
2108 last_child = *access;
2110 if (!build_access_subtree (access))
2111 return false;
2114 if (*access && (*access)->offset < limit)
2115 return false;
2117 return true;
2120 /* Build a tree of access representatives, ACCESS is the pointer to the first
2121 one, others are linked in a list by the next_grp field. Return false iff
2122 some accesses partially overlap. */
2124 static bool
2125 build_access_trees (struct access *access)
2127 while (access)
2129 struct access *root = access;
2131 if (!build_access_subtree (&access))
2132 return false;
2133 root->next_grp = access;
2135 return true;
2138 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2139 array. */
2141 static bool
2142 expr_with_var_bounded_array_refs_p (tree expr)
2144 while (handled_component_p (expr))
2146 if (TREE_CODE (expr) == ARRAY_REF
2147 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2148 return true;
2149 expr = TREE_OPERAND (expr, 0);
2151 return false;
2154 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2155 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2156 sorts of access flags appropriately along the way, notably always set
2157 grp_read and grp_assign_read according to MARK_READ and grp_write when
2158 MARK_WRITE is true.
2160 Creating a replacement for a scalar access is considered beneficial if its
2161 grp_hint is set (this means we are either attempting total scalarization or
2162 there is more than one direct read access) or according to the following
2163 table:
2165 Access written to through a scalar type (once or more times)
2167 | Written to in an assignment statement
2169 | | Access read as scalar _once_
2170 | | |
2171 | | | Read in an assignment statement
2172 | | | |
2173 | | | | Scalarize Comment
2174 -----------------------------------------------------------------------------
2175 0 0 0 0 No access for the scalar
2176 0 0 0 1 No access for the scalar
2177 0 0 1 0 No Single read - won't help
2178 0 0 1 1 No The same case
2179 0 1 0 0 No access for the scalar
2180 0 1 0 1 No access for the scalar
2181 0 1 1 0 Yes s = *g; return s.i;
2182 0 1 1 1 Yes The same case as above
2183 1 0 0 0 No Won't help
2184 1 0 0 1 Yes s.i = 1; *g = s;
2185 1 0 1 0 Yes s.i = 5; g = s.i;
2186 1 0 1 1 Yes The same case as above
2187 1 1 0 0 No Won't help.
2188 1 1 0 1 Yes s.i = 1; *g = s;
2189 1 1 1 0 Yes s = *g; return s.i;
2190 1 1 1 1 Yes Any of the above yeses */
2192 static bool
2193 analyze_access_subtree (struct access *root, struct access *parent,
2194 bool allow_replacements)
2196 struct access *child;
2197 HOST_WIDE_INT limit = root->offset + root->size;
2198 HOST_WIDE_INT covered_to = root->offset;
2199 bool scalar = is_gimple_reg_type (root->type);
2200 bool hole = false, sth_created = false;
2202 if (parent)
2204 if (parent->grp_read)
2205 root->grp_read = 1;
2206 if (parent->grp_assignment_read)
2207 root->grp_assignment_read = 1;
2208 if (parent->grp_write)
2209 root->grp_write = 1;
2210 if (parent->grp_assignment_write)
2211 root->grp_assignment_write = 1;
2212 if (parent->grp_total_scalarization)
2213 root->grp_total_scalarization = 1;
2216 if (root->grp_unscalarizable_region)
2217 allow_replacements = false;
2219 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2220 allow_replacements = false;
2222 for (child = root->first_child; child; child = child->next_sibling)
2224 hole |= covered_to < child->offset;
2225 sth_created |= analyze_access_subtree (child, root,
2226 allow_replacements && !scalar);
2228 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2229 root->grp_total_scalarization &= child->grp_total_scalarization;
2230 if (child->grp_covered)
2231 covered_to += child->size;
2232 else
2233 hole = true;
2236 if (allow_replacements && scalar && !root->first_child
2237 && (root->grp_hint
2238 || ((root->grp_scalar_read || root->grp_assignment_read)
2239 && (root->grp_scalar_write || root->grp_assignment_write))))
2241 /* Always create access replacements that cover the whole access.
2242 For integral types this means the precision has to match.
2243 Avoid assumptions based on the integral type kind, too. */
2244 if (INTEGRAL_TYPE_P (root->type)
2245 && (TREE_CODE (root->type) != INTEGER_TYPE
2246 || TYPE_PRECISION (root->type) != root->size)
2247 /* But leave bitfield accesses alone. */
2248 && (TREE_CODE (root->expr) != COMPONENT_REF
2249 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2251 tree rt = root->type;
2252 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2253 && (root->size % BITS_PER_UNIT) == 0);
2254 root->type = build_nonstandard_integer_type (root->size,
2255 TYPE_UNSIGNED (rt));
2256 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2257 root->base, root->offset,
2258 root->type, NULL, false);
2260 if (dump_file && (dump_flags & TDF_DETAILS))
2262 fprintf (dump_file, "Changing the type of a replacement for ");
2263 print_generic_expr (dump_file, root->base, 0);
2264 fprintf (dump_file, " offset: %u, size: %u ",
2265 (unsigned) root->offset, (unsigned) root->size);
2266 fprintf (dump_file, " to an integer.\n");
2270 root->grp_to_be_replaced = 1;
2271 root->replacement_decl = create_access_replacement (root);
2272 sth_created = true;
2273 hole = false;
2275 else
2277 if (allow_replacements
2278 && scalar && !root->first_child
2279 && (root->grp_scalar_write || root->grp_assignment_write)
2280 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2281 DECL_UID (root->base)))
2283 gcc_checking_assert (!root->grp_scalar_read
2284 && !root->grp_assignment_read);
2285 sth_created = true;
2286 if (MAY_HAVE_DEBUG_STMTS)
2288 root->grp_to_be_debug_replaced = 1;
2289 root->replacement_decl = create_access_replacement (root);
2293 if (covered_to < limit)
2294 hole = true;
2295 if (scalar)
2296 root->grp_total_scalarization = 0;
2299 if (!hole || root->grp_total_scalarization)
2300 root->grp_covered = 1;
2301 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2302 root->grp_unscalarized_data = 1; /* not covered and written to */
2303 return sth_created;
2306 /* Analyze all access trees linked by next_grp by the means of
2307 analyze_access_subtree. */
2308 static bool
2309 analyze_access_trees (struct access *access)
2311 bool ret = false;
2313 while (access)
2315 if (analyze_access_subtree (access, NULL, true))
2316 ret = true;
2317 access = access->next_grp;
2320 return ret;
2323 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2324 SIZE would conflict with an already existing one. If exactly such a child
2325 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2327 static bool
2328 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2329 HOST_WIDE_INT size, struct access **exact_match)
2331 struct access *child;
2333 for (child = lacc->first_child; child; child = child->next_sibling)
2335 if (child->offset == norm_offset && child->size == size)
2337 *exact_match = child;
2338 return true;
2341 if (child->offset < norm_offset + size
2342 && child->offset + child->size > norm_offset)
2343 return true;
2346 return false;
2349 /* Create a new child access of PARENT, with all properties just like MODEL
2350 except for its offset and with its grp_write false and grp_read true.
2351 Return the new access or NULL if it cannot be created. Note that this access
2352 is created long after all splicing and sorting, it's not located in any
2353 access vector and is automatically a representative of its group. */
2355 static struct access *
2356 create_artificial_child_access (struct access *parent, struct access *model,
2357 HOST_WIDE_INT new_offset)
2359 struct access **child;
2360 tree expr = parent->base;
2362 gcc_assert (!model->grp_unscalarizable_region);
2364 struct access *access = access_pool.allocate ();
2365 memset (access, 0, sizeof (struct access));
2366 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2367 model->type))
2369 access->grp_no_warning = true;
2370 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2371 new_offset, model, NULL, false);
2374 access->base = parent->base;
2375 access->expr = expr;
2376 access->offset = new_offset;
2377 access->size = model->size;
2378 access->type = model->type;
2379 access->grp_write = true;
2380 access->grp_read = false;
2382 child = &parent->first_child;
2383 while (*child && (*child)->offset < new_offset)
2384 child = &(*child)->next_sibling;
2386 access->next_sibling = *child;
2387 *child = access;
2389 return access;
2393 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2394 true if any new subaccess was created. Additionally, if RACC is a scalar
2395 access but LACC is not, change the type of the latter, if possible. */
2397 static bool
2398 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2400 struct access *rchild;
2401 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2402 bool ret = false;
2404 if (is_gimple_reg_type (lacc->type)
2405 || lacc->grp_unscalarizable_region
2406 || racc->grp_unscalarizable_region)
2407 return false;
2409 if (is_gimple_reg_type (racc->type))
2411 if (!lacc->first_child && !racc->first_child)
2413 tree t = lacc->base;
2415 lacc->type = racc->type;
2416 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2417 lacc->offset, racc->type))
2418 lacc->expr = t;
2419 else
2421 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2422 lacc->base, lacc->offset,
2423 racc, NULL, false);
2424 lacc->grp_no_warning = true;
2427 return false;
2430 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2432 struct access *new_acc = NULL;
2433 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2435 if (rchild->grp_unscalarizable_region)
2436 continue;
2438 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2439 &new_acc))
2441 if (new_acc)
2443 rchild->grp_hint = 1;
2444 new_acc->grp_hint |= new_acc->grp_read;
2445 if (rchild->first_child)
2446 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2448 continue;
2451 rchild->grp_hint = 1;
2452 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2453 if (new_acc)
2455 ret = true;
2456 if (racc->first_child)
2457 propagate_subaccesses_across_link (new_acc, rchild);
2461 return ret;
2464 /* Propagate all subaccesses across assignment links. */
2466 static void
2467 propagate_all_subaccesses (void)
2469 while (work_queue_head)
2471 struct access *racc = pop_access_from_work_queue ();
2472 struct assign_link *link;
2474 gcc_assert (racc->first_link);
2476 for (link = racc->first_link; link; link = link->next)
2478 struct access *lacc = link->lacc;
2480 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2481 continue;
2482 lacc = lacc->group_representative;
2483 if (propagate_subaccesses_across_link (lacc, racc)
2484 && lacc->first_link)
2485 add_access_to_work_queue (lacc);
2490 /* Go through all accesses collected throughout the (intraprocedural) analysis
2491 stage, exclude overlapping ones, identify representatives and build trees
2492 out of them, making decisions about scalarization on the way. Return true
2493 iff there are any to-be-scalarized variables after this stage. */
2495 static bool
2496 analyze_all_variable_accesses (void)
2498 int res = 0;
2499 bitmap tmp = BITMAP_ALLOC (NULL);
2500 bitmap_iterator bi;
2501 unsigned i;
2502 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2504 enum compiler_param param = optimize_speed_p
2505 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2506 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2508 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2509 fall back to a target default. */
2510 unsigned HOST_WIDE_INT max_scalarization_size
2511 = global_options_set.x_param_values[param]
2512 ? PARAM_VALUE (param)
2513 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2515 max_scalarization_size *= BITS_PER_UNIT;
2517 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2518 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2519 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2521 tree var = candidate (i);
2523 if (TREE_CODE (var) == VAR_DECL
2524 && type_consists_of_records_p (TREE_TYPE (var)))
2526 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2527 <= max_scalarization_size)
2529 create_total_scalarization_access (var);
2530 completely_scalarize_record (var, var, 0, var);
2531 if (dump_file && (dump_flags & TDF_DETAILS))
2533 fprintf (dump_file, "Will attempt to totally scalarize ");
2534 print_generic_expr (dump_file, var, 0);
2535 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2538 else if (dump_file && (dump_flags & TDF_DETAILS))
2540 fprintf (dump_file, "Too big to totally scalarize: ");
2541 print_generic_expr (dump_file, var, 0);
2542 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2547 bitmap_copy (tmp, candidate_bitmap);
2548 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2550 tree var = candidate (i);
2551 struct access *access;
2553 access = sort_and_splice_var_accesses (var);
2554 if (!access || !build_access_trees (access))
2555 disqualify_candidate (var,
2556 "No or inhibitingly overlapping accesses.");
2559 propagate_all_subaccesses ();
2561 bitmap_copy (tmp, candidate_bitmap);
2562 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2564 tree var = candidate (i);
2565 struct access *access = get_first_repr_for_decl (var);
2567 if (analyze_access_trees (access))
2569 res++;
2570 if (dump_file && (dump_flags & TDF_DETAILS))
2572 fprintf (dump_file, "\nAccess trees for ");
2573 print_generic_expr (dump_file, var, 0);
2574 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2575 dump_access_tree (dump_file, access);
2576 fprintf (dump_file, "\n");
2579 else
2580 disqualify_candidate (var, "No scalar replacements to be created.");
2583 BITMAP_FREE (tmp);
2585 if (res)
2587 statistics_counter_event (cfun, "Scalarized aggregates", res);
2588 return true;
2590 else
2591 return false;
2594 /* Generate statements copying scalar replacements of accesses within a subtree
2595 into or out of AGG. ACCESS, all its children, siblings and their children
2596 are to be processed. AGG is an aggregate type expression (can be a
2597 declaration but does not have to be, it can for example also be a mem_ref or
2598 a series of handled components). TOP_OFFSET is the offset of the processed
2599 subtree which has to be subtracted from offsets of individual accesses to
2600 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2601 replacements in the interval <start_offset, start_offset + chunk_size>,
2602 otherwise copy all. GSI is a statement iterator used to place the new
2603 statements. WRITE should be true when the statements should write from AGG
2604 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2605 statements will be added after the current statement in GSI, they will be
2606 added before the statement otherwise. */
2608 static void
2609 generate_subtree_copies (struct access *access, tree agg,
2610 HOST_WIDE_INT top_offset,
2611 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2612 gimple_stmt_iterator *gsi, bool write,
2613 bool insert_after, location_t loc)
2617 if (chunk_size && access->offset >= start_offset + chunk_size)
2618 return;
2620 if (access->grp_to_be_replaced
2621 && (chunk_size == 0
2622 || access->offset + access->size > start_offset))
2624 tree expr, repl = get_access_replacement (access);
2625 gassign *stmt;
2627 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2628 access, gsi, insert_after);
2630 if (write)
2632 if (access->grp_partial_lhs)
2633 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2634 !insert_after,
2635 insert_after ? GSI_NEW_STMT
2636 : GSI_SAME_STMT);
2637 stmt = gimple_build_assign (repl, expr);
2639 else
2641 TREE_NO_WARNING (repl) = 1;
2642 if (access->grp_partial_lhs)
2643 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2644 !insert_after,
2645 insert_after ? GSI_NEW_STMT
2646 : GSI_SAME_STMT);
2647 stmt = gimple_build_assign (expr, repl);
2649 gimple_set_location (stmt, loc);
2651 if (insert_after)
2652 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2653 else
2654 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2655 update_stmt (stmt);
2656 sra_stats.subtree_copies++;
2658 else if (write
2659 && access->grp_to_be_debug_replaced
2660 && (chunk_size == 0
2661 || access->offset + access->size > start_offset))
2663 gdebug *ds;
2664 tree drhs = build_debug_ref_for_model (loc, agg,
2665 access->offset - top_offset,
2666 access);
2667 ds = gimple_build_debug_bind (get_access_replacement (access),
2668 drhs, gsi_stmt (*gsi));
2669 if (insert_after)
2670 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2671 else
2672 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2675 if (access->first_child)
2676 generate_subtree_copies (access->first_child, agg, top_offset,
2677 start_offset, chunk_size, gsi,
2678 write, insert_after, loc);
2680 access = access->next_sibling;
2682 while (access);
2685 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2686 the root of the subtree to be processed. GSI is the statement iterator used
2687 for inserting statements which are added after the current statement if
2688 INSERT_AFTER is true or before it otherwise. */
2690 static void
2691 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2692 bool insert_after, location_t loc)
2695 struct access *child;
2697 if (access->grp_to_be_replaced)
2699 gassign *stmt;
2701 stmt = gimple_build_assign (get_access_replacement (access),
2702 build_zero_cst (access->type));
2703 if (insert_after)
2704 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2705 else
2706 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2707 update_stmt (stmt);
2708 gimple_set_location (stmt, loc);
2710 else if (access->grp_to_be_debug_replaced)
2712 gdebug *ds
2713 = gimple_build_debug_bind (get_access_replacement (access),
2714 build_zero_cst (access->type),
2715 gsi_stmt (*gsi));
2716 if (insert_after)
2717 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2718 else
2719 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2722 for (child = access->first_child; child; child = child->next_sibling)
2723 init_subtree_with_zero (child, gsi, insert_after, loc);
2726 /* Clobber all scalar replacements in an access subtree. ACCESS is the the
2727 root of the subtree to be processed. GSI is the statement iterator used
2728 for inserting statements which are added after the current statement if
2729 INSERT_AFTER is true or before it otherwise. */
2731 static void
2732 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2733 bool insert_after, location_t loc)
2736 struct access *child;
2738 if (access->grp_to_be_replaced)
2740 tree rep = get_access_replacement (access);
2741 tree clobber = build_constructor (access->type, NULL);
2742 TREE_THIS_VOLATILE (clobber) = 1;
2743 gimple stmt = gimple_build_assign (rep, clobber);
2745 if (insert_after)
2746 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2747 else
2748 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2749 update_stmt (stmt);
2750 gimple_set_location (stmt, loc);
2753 for (child = access->first_child; child; child = child->next_sibling)
2754 clobber_subtree (child, gsi, insert_after, loc);
2757 /* Search for an access representative for the given expression EXPR and
2758 return it or NULL if it cannot be found. */
2760 static struct access *
2761 get_access_for_expr (tree expr)
2763 HOST_WIDE_INT offset, size, max_size;
2764 tree base;
2766 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2767 a different size than the size of its argument and we need the latter
2768 one. */
2769 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2770 expr = TREE_OPERAND (expr, 0);
2772 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2773 if (max_size == -1 || !DECL_P (base))
2774 return NULL;
2776 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2777 return NULL;
2779 return get_var_base_offset_size_access (base, offset, max_size);
2782 /* Replace the expression EXPR with a scalar replacement if there is one and
2783 generate other statements to do type conversion or subtree copying if
2784 necessary. GSI is used to place newly created statements, WRITE is true if
2785 the expression is being written to (it is on a LHS of a statement or output
2786 in an assembly statement). */
2788 static bool
2789 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2791 location_t loc;
2792 struct access *access;
2793 tree type, bfr, orig_expr;
2795 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2797 bfr = *expr;
2798 expr = &TREE_OPERAND (*expr, 0);
2800 else
2801 bfr = NULL_TREE;
2803 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2804 expr = &TREE_OPERAND (*expr, 0);
2805 access = get_access_for_expr (*expr);
2806 if (!access)
2807 return false;
2808 type = TREE_TYPE (*expr);
2809 orig_expr = *expr;
2811 loc = gimple_location (gsi_stmt (*gsi));
2812 gimple_stmt_iterator alt_gsi = gsi_none ();
2813 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2815 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2816 gsi = &alt_gsi;
2819 if (access->grp_to_be_replaced)
2821 tree repl = get_access_replacement (access);
2822 /* If we replace a non-register typed access simply use the original
2823 access expression to extract the scalar component afterwards.
2824 This happens if scalarizing a function return value or parameter
2825 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2826 gcc.c-torture/compile/20011217-1.c.
2828 We also want to use this when accessing a complex or vector which can
2829 be accessed as a different type too, potentially creating a need for
2830 type conversion (see PR42196) and when scalarized unions are involved
2831 in assembler statements (see PR42398). */
2832 if (!useless_type_conversion_p (type, access->type))
2834 tree ref;
2836 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2838 if (write)
2840 gassign *stmt;
2842 if (access->grp_partial_lhs)
2843 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2844 false, GSI_NEW_STMT);
2845 stmt = gimple_build_assign (repl, ref);
2846 gimple_set_location (stmt, loc);
2847 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2849 else
2851 gassign *stmt;
2853 if (access->grp_partial_lhs)
2854 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2855 true, GSI_SAME_STMT);
2856 stmt = gimple_build_assign (ref, repl);
2857 gimple_set_location (stmt, loc);
2858 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2861 else
2862 *expr = repl;
2863 sra_stats.exprs++;
2865 else if (write && access->grp_to_be_debug_replaced)
2867 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
2868 NULL_TREE,
2869 gsi_stmt (*gsi));
2870 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2873 if (access->first_child)
2875 HOST_WIDE_INT start_offset, chunk_size;
2876 if (bfr
2877 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
2878 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
2880 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
2881 start_offset = access->offset
2882 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
2884 else
2885 start_offset = chunk_size = 0;
2887 generate_subtree_copies (access->first_child, orig_expr, access->offset,
2888 start_offset, chunk_size, gsi, write, write,
2889 loc);
2891 return true;
2894 /* Where scalar replacements of the RHS have been written to when a replacement
2895 of a LHS of an assigments cannot be direclty loaded from a replacement of
2896 the RHS. */
2897 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2898 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2899 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2901 struct subreplacement_assignment_data
2903 /* Offset of the access representing the lhs of the assignment. */
2904 HOST_WIDE_INT left_offset;
2906 /* LHS and RHS of the original assignment. */
2907 tree assignment_lhs, assignment_rhs;
2909 /* Access representing the rhs of the whole assignment. */
2910 struct access *top_racc;
2912 /* Stmt iterator used for statement insertions after the original assignment.
2913 It points to the main GSI used to traverse a BB during function body
2914 modification. */
2915 gimple_stmt_iterator *new_gsi;
2917 /* Stmt iterator used for statement insertions before the original
2918 assignment. Keeps on pointing to the original statement. */
2919 gimple_stmt_iterator old_gsi;
2921 /* Location of the assignment. */
2922 location_t loc;
2924 /* Keeps the information whether we have needed to refresh replacements of
2925 the LHS and from which side of the assignments this takes place. */
2926 enum unscalarized_data_handling refreshed;
2929 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2930 base aggregate if there are unscalarized data or directly to LHS of the
2931 statement that is pointed to by GSI otherwise. */
2933 static void
2934 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
2936 tree src;
2937 if (sad->top_racc->grp_unscalarized_data)
2939 src = sad->assignment_rhs;
2940 sad->refreshed = SRA_UDH_RIGHT;
2942 else
2944 src = sad->assignment_lhs;
2945 sad->refreshed = SRA_UDH_LEFT;
2947 generate_subtree_copies (sad->top_racc->first_child, src,
2948 sad->top_racc->offset, 0, 0,
2949 &sad->old_gsi, false, false, sad->loc);
2952 /* Try to generate statements to load all sub-replacements in an access subtree
2953 formed by children of LACC from scalar replacements in the SAD->top_racc
2954 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
2955 and load the accesses from it. */
2957 static void
2958 load_assign_lhs_subreplacements (struct access *lacc,
2959 struct subreplacement_assignment_data *sad)
2961 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2963 HOST_WIDE_INT offset;
2964 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
2966 if (lacc->grp_to_be_replaced)
2968 struct access *racc;
2969 gassign *stmt;
2970 tree rhs;
2972 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
2973 if (racc && racc->grp_to_be_replaced)
2975 rhs = get_access_replacement (racc);
2976 if (!useless_type_conversion_p (lacc->type, racc->type))
2977 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
2978 lacc->type, rhs);
2980 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2981 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
2982 NULL_TREE, true, GSI_SAME_STMT);
2984 else
2986 /* No suitable access on the right hand side, need to load from
2987 the aggregate. See if we have to update it first... */
2988 if (sad->refreshed == SRA_UDH_NONE)
2989 handle_unscalarized_data_in_subtree (sad);
2991 if (sad->refreshed == SRA_UDH_LEFT)
2992 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
2993 lacc->offset - sad->left_offset,
2994 lacc, sad->new_gsi, true);
2995 else
2996 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
2997 lacc->offset - sad->left_offset,
2998 lacc, sad->new_gsi, true);
2999 if (lacc->grp_partial_lhs)
3000 rhs = force_gimple_operand_gsi (sad->new_gsi,
3001 rhs, true, NULL_TREE,
3002 false, GSI_NEW_STMT);
3005 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3006 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3007 gimple_set_location (stmt, sad->loc);
3008 update_stmt (stmt);
3009 sra_stats.subreplacements++;
3011 else
3013 if (sad->refreshed == SRA_UDH_NONE
3014 && lacc->grp_read && !lacc->grp_covered)
3015 handle_unscalarized_data_in_subtree (sad);
3017 if (lacc && lacc->grp_to_be_debug_replaced)
3019 gdebug *ds;
3020 tree drhs;
3021 struct access *racc = find_access_in_subtree (sad->top_racc,
3022 offset,
3023 lacc->size);
3025 if (racc && racc->grp_to_be_replaced)
3027 if (racc->grp_write)
3028 drhs = get_access_replacement (racc);
3029 else
3030 drhs = NULL;
3032 else if (sad->refreshed == SRA_UDH_LEFT)
3033 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3034 lacc->offset, lacc);
3035 else if (sad->refreshed == SRA_UDH_RIGHT)
3036 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3037 offset, lacc);
3038 else
3039 drhs = NULL_TREE;
3040 if (drhs
3041 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3042 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3043 lacc->type, drhs);
3044 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3045 drhs, gsi_stmt (sad->old_gsi));
3046 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3050 if (lacc->first_child)
3051 load_assign_lhs_subreplacements (lacc, sad);
3055 /* Result code for SRA assignment modification. */
3056 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3057 SRA_AM_MODIFIED, /* stmt changed but not
3058 removed */
3059 SRA_AM_REMOVED }; /* stmt eliminated */
3061 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3062 to the assignment and GSI is the statement iterator pointing at it. Returns
3063 the same values as sra_modify_assign. */
3065 static enum assignment_mod_result
3066 sra_modify_constructor_assign (gimple stmt, gimple_stmt_iterator *gsi)
3068 tree lhs = gimple_assign_lhs (stmt);
3069 struct access *acc = get_access_for_expr (lhs);
3070 if (!acc)
3071 return SRA_AM_NONE;
3072 location_t loc = gimple_location (stmt);
3074 if (gimple_clobber_p (stmt))
3076 /* Clobber the replacement variable. */
3077 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3078 /* Remove clobbers of fully scalarized variables, they are dead. */
3079 if (acc->grp_covered)
3081 unlink_stmt_vdef (stmt);
3082 gsi_remove (gsi, true);
3083 release_defs (stmt);
3084 return SRA_AM_REMOVED;
3086 else
3087 return SRA_AM_MODIFIED;
3090 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt))) > 0)
3092 /* I have never seen this code path trigger but if it can happen the
3093 following should handle it gracefully. */
3094 if (access_has_children_p (acc))
3095 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3096 true, true, loc);
3097 return SRA_AM_MODIFIED;
3100 if (acc->grp_covered)
3102 init_subtree_with_zero (acc, gsi, false, loc);
3103 unlink_stmt_vdef (stmt);
3104 gsi_remove (gsi, true);
3105 release_defs (stmt);
3106 return SRA_AM_REMOVED;
3108 else
3110 init_subtree_with_zero (acc, gsi, true, loc);
3111 return SRA_AM_MODIFIED;
3115 /* Create and return a new suitable default definition SSA_NAME for RACC which
3116 is an access describing an uninitialized part of an aggregate that is being
3117 loaded. */
3119 static tree
3120 get_repl_default_def_ssa_name (struct access *racc)
3122 gcc_checking_assert (!racc->grp_to_be_replaced
3123 && !racc->grp_to_be_debug_replaced);
3124 if (!racc->replacement_decl)
3125 racc->replacement_decl = create_access_replacement (racc);
3126 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3129 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3130 bit-field field declaration somewhere in it. */
3132 static inline bool
3133 contains_vce_or_bfcref_p (const_tree ref)
3135 while (handled_component_p (ref))
3137 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3138 || (TREE_CODE (ref) == COMPONENT_REF
3139 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3140 return true;
3141 ref = TREE_OPERAND (ref, 0);
3144 return false;
3147 /* Examine both sides of the assignment statement pointed to by STMT, replace
3148 them with a scalare replacement if there is one and generate copying of
3149 replacements if scalarized aggregates have been used in the assignment. GSI
3150 is used to hold generated statements for type conversions and subtree
3151 copying. */
3153 static enum assignment_mod_result
3154 sra_modify_assign (gimple stmt, gimple_stmt_iterator *gsi)
3156 struct access *lacc, *racc;
3157 tree lhs, rhs;
3158 bool modify_this_stmt = false;
3159 bool force_gimple_rhs = false;
3160 location_t loc;
3161 gimple_stmt_iterator orig_gsi = *gsi;
3163 if (!gimple_assign_single_p (stmt))
3164 return SRA_AM_NONE;
3165 lhs = gimple_assign_lhs (stmt);
3166 rhs = gimple_assign_rhs1 (stmt);
3168 if (TREE_CODE (rhs) == CONSTRUCTOR)
3169 return sra_modify_constructor_assign (stmt, gsi);
3171 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3172 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3173 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3175 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3176 gsi, false);
3177 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3178 gsi, true);
3179 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3182 lacc = get_access_for_expr (lhs);
3183 racc = get_access_for_expr (rhs);
3184 if (!lacc && !racc)
3185 return SRA_AM_NONE;
3187 loc = gimple_location (stmt);
3188 if (lacc && lacc->grp_to_be_replaced)
3190 lhs = get_access_replacement (lacc);
3191 gimple_assign_set_lhs (stmt, lhs);
3192 modify_this_stmt = true;
3193 if (lacc->grp_partial_lhs)
3194 force_gimple_rhs = true;
3195 sra_stats.exprs++;
3198 if (racc && racc->grp_to_be_replaced)
3200 rhs = get_access_replacement (racc);
3201 modify_this_stmt = true;
3202 if (racc->grp_partial_lhs)
3203 force_gimple_rhs = true;
3204 sra_stats.exprs++;
3206 else if (racc
3207 && !racc->grp_unscalarized_data
3208 && TREE_CODE (lhs) == SSA_NAME
3209 && !access_has_replacements_p (racc))
3211 rhs = get_repl_default_def_ssa_name (racc);
3212 modify_this_stmt = true;
3213 sra_stats.exprs++;
3216 if (modify_this_stmt)
3218 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3220 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3221 ??? This should move to fold_stmt which we simply should
3222 call after building a VIEW_CONVERT_EXPR here. */
3223 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3224 && !contains_bitfld_component_ref_p (lhs))
3226 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3227 gimple_assign_set_lhs (stmt, lhs);
3229 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3230 && !contains_vce_or_bfcref_p (rhs))
3231 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3233 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3235 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3236 rhs);
3237 if (is_gimple_reg_type (TREE_TYPE (lhs))
3238 && TREE_CODE (lhs) != SSA_NAME)
3239 force_gimple_rhs = true;
3244 if (lacc && lacc->grp_to_be_debug_replaced)
3246 tree dlhs = get_access_replacement (lacc);
3247 tree drhs = unshare_expr (rhs);
3248 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3250 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3251 && !contains_vce_or_bfcref_p (drhs))
3252 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3253 if (drhs
3254 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3255 TREE_TYPE (drhs)))
3256 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3257 TREE_TYPE (dlhs), drhs);
3259 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3260 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3263 /* From this point on, the function deals with assignments in between
3264 aggregates when at least one has scalar reductions of some of its
3265 components. There are three possible scenarios: Both the LHS and RHS have
3266 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3268 In the first case, we would like to load the LHS components from RHS
3269 components whenever possible. If that is not possible, we would like to
3270 read it directly from the RHS (after updating it by storing in it its own
3271 components). If there are some necessary unscalarized data in the LHS,
3272 those will be loaded by the original assignment too. If neither of these
3273 cases happen, the original statement can be removed. Most of this is done
3274 by load_assign_lhs_subreplacements.
3276 In the second case, we would like to store all RHS scalarized components
3277 directly into LHS and if they cover the aggregate completely, remove the
3278 statement too. In the third case, we want the LHS components to be loaded
3279 directly from the RHS (DSE will remove the original statement if it
3280 becomes redundant).
3282 This is a bit complex but manageable when types match and when unions do
3283 not cause confusion in a way that we cannot really load a component of LHS
3284 from the RHS or vice versa (the access representing this level can have
3285 subaccesses that are accessible only through a different union field at a
3286 higher level - different from the one used in the examined expression).
3287 Unions are fun.
3289 Therefore, I specially handle a fourth case, happening when there is a
3290 specific type cast or it is impossible to locate a scalarized subaccess on
3291 the other side of the expression. If that happens, I simply "refresh" the
3292 RHS by storing in it is scalarized components leave the original statement
3293 there to do the copying and then load the scalar replacements of the LHS.
3294 This is what the first branch does. */
3296 if (modify_this_stmt
3297 || gimple_has_volatile_ops (stmt)
3298 || contains_vce_or_bfcref_p (rhs)
3299 || contains_vce_or_bfcref_p (lhs)
3300 || stmt_ends_bb_p (stmt))
3302 if (access_has_children_p (racc))
3303 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3304 gsi, false, false, loc);
3305 if (access_has_children_p (lacc))
3307 gimple_stmt_iterator alt_gsi = gsi_none ();
3308 if (stmt_ends_bb_p (stmt))
3310 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3311 gsi = &alt_gsi;
3313 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3314 gsi, true, true, loc);
3316 sra_stats.separate_lhs_rhs_handling++;
3318 /* This gimplification must be done after generate_subtree_copies,
3319 lest we insert the subtree copies in the middle of the gimplified
3320 sequence. */
3321 if (force_gimple_rhs)
3322 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3323 true, GSI_SAME_STMT);
3324 if (gimple_assign_rhs1 (stmt) != rhs)
3326 modify_this_stmt = true;
3327 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3328 gcc_assert (stmt == gsi_stmt (orig_gsi));
3331 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3333 else
3335 if (access_has_children_p (lacc)
3336 && access_has_children_p (racc)
3337 /* When an access represents an unscalarizable region, it usually
3338 represents accesses with variable offset and thus must not be used
3339 to generate new memory accesses. */
3340 && !lacc->grp_unscalarizable_region
3341 && !racc->grp_unscalarizable_region)
3343 struct subreplacement_assignment_data sad;
3345 sad.left_offset = lacc->offset;
3346 sad.assignment_lhs = lhs;
3347 sad.assignment_rhs = rhs;
3348 sad.top_racc = racc;
3349 sad.old_gsi = *gsi;
3350 sad.new_gsi = gsi;
3351 sad.loc = gimple_location (stmt);
3352 sad.refreshed = SRA_UDH_NONE;
3354 if (lacc->grp_read && !lacc->grp_covered)
3355 handle_unscalarized_data_in_subtree (&sad);
3357 load_assign_lhs_subreplacements (lacc, &sad);
3358 if (sad.refreshed != SRA_UDH_RIGHT)
3360 gsi_next (gsi);
3361 unlink_stmt_vdef (stmt);
3362 gsi_remove (&sad.old_gsi, true);
3363 release_defs (stmt);
3364 sra_stats.deleted++;
3365 return SRA_AM_REMOVED;
3368 else
3370 if (access_has_children_p (racc)
3371 && !racc->grp_unscalarized_data)
3373 if (dump_file)
3375 fprintf (dump_file, "Removing load: ");
3376 print_gimple_stmt (dump_file, stmt, 0, 0);
3378 generate_subtree_copies (racc->first_child, lhs,
3379 racc->offset, 0, 0, gsi,
3380 false, false, loc);
3381 gcc_assert (stmt == gsi_stmt (*gsi));
3382 unlink_stmt_vdef (stmt);
3383 gsi_remove (gsi, true);
3384 release_defs (stmt);
3385 sra_stats.deleted++;
3386 return SRA_AM_REMOVED;
3388 /* Restore the aggregate RHS from its components so the
3389 prevailing aggregate copy does the right thing. */
3390 if (access_has_children_p (racc))
3391 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3392 gsi, false, false, loc);
3393 /* Re-load the components of the aggregate copy destination.
3394 But use the RHS aggregate to load from to expose more
3395 optimization opportunities. */
3396 if (access_has_children_p (lacc))
3397 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3398 0, 0, gsi, true, true, loc);
3401 return SRA_AM_NONE;
3405 /* Traverse the function body and all modifications as decided in
3406 analyze_all_variable_accesses. Return true iff the CFG has been
3407 changed. */
3409 static bool
3410 sra_modify_function_body (void)
3412 bool cfg_changed = false;
3413 basic_block bb;
3415 FOR_EACH_BB_FN (bb, cfun)
3417 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3418 while (!gsi_end_p (gsi))
3420 gimple stmt = gsi_stmt (gsi);
3421 enum assignment_mod_result assign_result;
3422 bool modified = false, deleted = false;
3423 tree *t;
3424 unsigned i;
3426 switch (gimple_code (stmt))
3428 case GIMPLE_RETURN:
3429 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3430 if (*t != NULL_TREE)
3431 modified |= sra_modify_expr (t, &gsi, false);
3432 break;
3434 case GIMPLE_ASSIGN:
3435 assign_result = sra_modify_assign (stmt, &gsi);
3436 modified |= assign_result == SRA_AM_MODIFIED;
3437 deleted = assign_result == SRA_AM_REMOVED;
3438 break;
3440 case GIMPLE_CALL:
3441 /* Operands must be processed before the lhs. */
3442 for (i = 0; i < gimple_call_num_args (stmt); i++)
3444 t = gimple_call_arg_ptr (stmt, i);
3445 modified |= sra_modify_expr (t, &gsi, false);
3448 if (gimple_call_lhs (stmt))
3450 t = gimple_call_lhs_ptr (stmt);
3451 modified |= sra_modify_expr (t, &gsi, true);
3453 break;
3455 case GIMPLE_ASM:
3457 gasm *asm_stmt = as_a <gasm *> (stmt);
3458 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3460 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3461 modified |= sra_modify_expr (t, &gsi, false);
3463 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3465 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3466 modified |= sra_modify_expr (t, &gsi, true);
3469 break;
3471 default:
3472 break;
3475 if (modified)
3477 update_stmt (stmt);
3478 if (maybe_clean_eh_stmt (stmt)
3479 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3480 cfg_changed = true;
3482 if (!deleted)
3483 gsi_next (&gsi);
3487 gsi_commit_edge_inserts ();
3488 return cfg_changed;
3491 /* Generate statements initializing scalar replacements of parts of function
3492 parameters. */
3494 static void
3495 initialize_parameter_reductions (void)
3497 gimple_stmt_iterator gsi;
3498 gimple_seq seq = NULL;
3499 tree parm;
3501 gsi = gsi_start (seq);
3502 for (parm = DECL_ARGUMENTS (current_function_decl);
3503 parm;
3504 parm = DECL_CHAIN (parm))
3506 vec<access_p> *access_vec;
3507 struct access *access;
3509 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3510 continue;
3511 access_vec = get_base_access_vector (parm);
3512 if (!access_vec)
3513 continue;
3515 for (access = (*access_vec)[0];
3516 access;
3517 access = access->next_grp)
3518 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3519 EXPR_LOCATION (parm));
3522 seq = gsi_seq (gsi);
3523 if (seq)
3524 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3527 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3528 it reveals there are components of some aggregates to be scalarized, it runs
3529 the required transformations. */
3530 static unsigned int
3531 perform_intra_sra (void)
3533 int ret = 0;
3534 sra_initialize ();
3536 if (!find_var_candidates ())
3537 goto out;
3539 if (!scan_function ())
3540 goto out;
3542 if (!analyze_all_variable_accesses ())
3543 goto out;
3545 if (sra_modify_function_body ())
3546 ret = TODO_update_ssa | TODO_cleanup_cfg;
3547 else
3548 ret = TODO_update_ssa;
3549 initialize_parameter_reductions ();
3551 statistics_counter_event (cfun, "Scalar replacements created",
3552 sra_stats.replacements);
3553 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3554 statistics_counter_event (cfun, "Subtree copy stmts",
3555 sra_stats.subtree_copies);
3556 statistics_counter_event (cfun, "Subreplacement stmts",
3557 sra_stats.subreplacements);
3558 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3559 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3560 sra_stats.separate_lhs_rhs_handling);
3562 out:
3563 sra_deinitialize ();
3564 return ret;
3567 /* Perform early intraprocedural SRA. */
3568 static unsigned int
3569 early_intra_sra (void)
3571 sra_mode = SRA_MODE_EARLY_INTRA;
3572 return perform_intra_sra ();
3575 /* Perform "late" intraprocedural SRA. */
3576 static unsigned int
3577 late_intra_sra (void)
3579 sra_mode = SRA_MODE_INTRA;
3580 return perform_intra_sra ();
3584 static bool
3585 gate_intra_sra (void)
3587 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3591 namespace {
3593 const pass_data pass_data_sra_early =
3595 GIMPLE_PASS, /* type */
3596 "esra", /* name */
3597 OPTGROUP_NONE, /* optinfo_flags */
3598 TV_TREE_SRA, /* tv_id */
3599 ( PROP_cfg | PROP_ssa ), /* properties_required */
3600 0, /* properties_provided */
3601 0, /* properties_destroyed */
3602 0, /* todo_flags_start */
3603 TODO_update_ssa, /* todo_flags_finish */
3606 class pass_sra_early : public gimple_opt_pass
3608 public:
3609 pass_sra_early (gcc::context *ctxt)
3610 : gimple_opt_pass (pass_data_sra_early, ctxt)
3613 /* opt_pass methods: */
3614 virtual bool gate (function *) { return gate_intra_sra (); }
3615 virtual unsigned int execute (function *) { return early_intra_sra (); }
3617 }; // class pass_sra_early
3619 } // anon namespace
3621 gimple_opt_pass *
3622 make_pass_sra_early (gcc::context *ctxt)
3624 return new pass_sra_early (ctxt);
3627 namespace {
3629 const pass_data pass_data_sra =
3631 GIMPLE_PASS, /* type */
3632 "sra", /* name */
3633 OPTGROUP_NONE, /* optinfo_flags */
3634 TV_TREE_SRA, /* tv_id */
3635 ( PROP_cfg | PROP_ssa ), /* properties_required */
3636 0, /* properties_provided */
3637 0, /* properties_destroyed */
3638 TODO_update_address_taken, /* todo_flags_start */
3639 TODO_update_ssa, /* todo_flags_finish */
3642 class pass_sra : public gimple_opt_pass
3644 public:
3645 pass_sra (gcc::context *ctxt)
3646 : gimple_opt_pass (pass_data_sra, ctxt)
3649 /* opt_pass methods: */
3650 virtual bool gate (function *) { return gate_intra_sra (); }
3651 virtual unsigned int execute (function *) { return late_intra_sra (); }
3653 }; // class pass_sra
3655 } // anon namespace
3657 gimple_opt_pass *
3658 make_pass_sra (gcc::context *ctxt)
3660 return new pass_sra (ctxt);
3664 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3665 parameter. */
3667 static bool
3668 is_unused_scalar_param (tree parm)
3670 tree name;
3671 return (is_gimple_reg (parm)
3672 && (!(name = ssa_default_def (cfun, parm))
3673 || has_zero_uses (name)));
3676 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3677 examine whether there are any direct or otherwise infeasible ones. If so,
3678 return true, otherwise return false. PARM must be a gimple register with a
3679 non-NULL default definition. */
3681 static bool
3682 ptr_parm_has_direct_uses (tree parm)
3684 imm_use_iterator ui;
3685 gimple stmt;
3686 tree name = ssa_default_def (cfun, parm);
3687 bool ret = false;
3689 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3691 int uses_ok = 0;
3692 use_operand_p use_p;
3694 if (is_gimple_debug (stmt))
3695 continue;
3697 /* Valid uses include dereferences on the lhs and the rhs. */
3698 if (gimple_has_lhs (stmt))
3700 tree lhs = gimple_get_lhs (stmt);
3701 while (handled_component_p (lhs))
3702 lhs = TREE_OPERAND (lhs, 0);
3703 if (TREE_CODE (lhs) == MEM_REF
3704 && TREE_OPERAND (lhs, 0) == name
3705 && integer_zerop (TREE_OPERAND (lhs, 1))
3706 && types_compatible_p (TREE_TYPE (lhs),
3707 TREE_TYPE (TREE_TYPE (name)))
3708 && !TREE_THIS_VOLATILE (lhs))
3709 uses_ok++;
3711 if (gimple_assign_single_p (stmt))
3713 tree rhs = gimple_assign_rhs1 (stmt);
3714 while (handled_component_p (rhs))
3715 rhs = TREE_OPERAND (rhs, 0);
3716 if (TREE_CODE (rhs) == MEM_REF
3717 && TREE_OPERAND (rhs, 0) == name
3718 && integer_zerop (TREE_OPERAND (rhs, 1))
3719 && types_compatible_p (TREE_TYPE (rhs),
3720 TREE_TYPE (TREE_TYPE (name)))
3721 && !TREE_THIS_VOLATILE (rhs))
3722 uses_ok++;
3724 else if (is_gimple_call (stmt))
3726 unsigned i;
3727 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3729 tree arg = gimple_call_arg (stmt, i);
3730 while (handled_component_p (arg))
3731 arg = TREE_OPERAND (arg, 0);
3732 if (TREE_CODE (arg) == MEM_REF
3733 && TREE_OPERAND (arg, 0) == name
3734 && integer_zerop (TREE_OPERAND (arg, 1))
3735 && types_compatible_p (TREE_TYPE (arg),
3736 TREE_TYPE (TREE_TYPE (name)))
3737 && !TREE_THIS_VOLATILE (arg))
3738 uses_ok++;
3742 /* If the number of valid uses does not match the number of
3743 uses in this stmt there is an unhandled use. */
3744 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3745 --uses_ok;
3747 if (uses_ok != 0)
3748 ret = true;
3750 if (ret)
3751 BREAK_FROM_IMM_USE_STMT (ui);
3754 return ret;
3757 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3758 them in candidate_bitmap. Note that these do not necessarily include
3759 parameter which are unused and thus can be removed. Return true iff any
3760 such candidate has been found. */
3762 static bool
3763 find_param_candidates (void)
3765 tree parm;
3766 int count = 0;
3767 bool ret = false;
3768 const char *msg;
3770 for (parm = DECL_ARGUMENTS (current_function_decl);
3771 parm;
3772 parm = DECL_CHAIN (parm))
3774 tree type = TREE_TYPE (parm);
3775 tree_node **slot;
3777 count++;
3779 if (TREE_THIS_VOLATILE (parm)
3780 || TREE_ADDRESSABLE (parm)
3781 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3782 continue;
3784 if (is_unused_scalar_param (parm))
3786 ret = true;
3787 continue;
3790 if (POINTER_TYPE_P (type))
3792 type = TREE_TYPE (type);
3794 if (TREE_CODE (type) == FUNCTION_TYPE
3795 || TYPE_VOLATILE (type)
3796 || (TREE_CODE (type) == ARRAY_TYPE
3797 && TYPE_NONALIASED_COMPONENT (type))
3798 || !is_gimple_reg (parm)
3799 || is_va_list_type (type)
3800 || ptr_parm_has_direct_uses (parm))
3801 continue;
3803 else if (!AGGREGATE_TYPE_P (type))
3804 continue;
3806 if (!COMPLETE_TYPE_P (type)
3807 || !tree_fits_uhwi_p (TYPE_SIZE (type))
3808 || tree_to_uhwi (TYPE_SIZE (type)) == 0
3809 || (AGGREGATE_TYPE_P (type)
3810 && type_internals_preclude_sra_p (type, &msg)))
3811 continue;
3813 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3814 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
3815 *slot = parm;
3817 ret = true;
3818 if (dump_file && (dump_flags & TDF_DETAILS))
3820 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3821 print_generic_expr (dump_file, parm, 0);
3822 fprintf (dump_file, "\n");
3826 func_param_count = count;
3827 return ret;
3830 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3831 maybe_modified. */
3833 static bool
3834 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3835 void *data)
3837 struct access *repr = (struct access *) data;
3839 repr->grp_maybe_modified = 1;
3840 return true;
3843 /* Analyze what representatives (in linked lists accessible from
3844 REPRESENTATIVES) can be modified by side effects of statements in the
3845 current function. */
3847 static void
3848 analyze_modified_params (vec<access_p> representatives)
3850 int i;
3852 for (i = 0; i < func_param_count; i++)
3854 struct access *repr;
3856 for (repr = representatives[i];
3857 repr;
3858 repr = repr->next_grp)
3860 struct access *access;
3861 bitmap visited;
3862 ao_ref ar;
3864 if (no_accesses_p (repr))
3865 continue;
3866 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3867 || repr->grp_maybe_modified)
3868 continue;
3870 ao_ref_init (&ar, repr->expr);
3871 visited = BITMAP_ALLOC (NULL);
3872 for (access = repr; access; access = access->next_sibling)
3874 /* All accesses are read ones, otherwise grp_maybe_modified would
3875 be trivially set. */
3876 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3877 mark_maybe_modified, repr, &visited);
3878 if (repr->grp_maybe_modified)
3879 break;
3881 BITMAP_FREE (visited);
3886 /* Propagate distances in bb_dereferences in the opposite direction than the
3887 control flow edges, in each step storing the maximum of the current value
3888 and the minimum of all successors. These steps are repeated until the table
3889 stabilizes. Note that BBs which might terminate the functions (according to
3890 final_bbs bitmap) never updated in this way. */
3892 static void
3893 propagate_dereference_distances (void)
3895 basic_block bb;
3897 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
3898 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3899 FOR_EACH_BB_FN (bb, cfun)
3901 queue.quick_push (bb);
3902 bb->aux = bb;
3905 while (!queue.is_empty ())
3907 edge_iterator ei;
3908 edge e;
3909 bool change = false;
3910 int i;
3912 bb = queue.pop ();
3913 bb->aux = NULL;
3915 if (bitmap_bit_p (final_bbs, bb->index))
3916 continue;
3918 for (i = 0; i < func_param_count; i++)
3920 int idx = bb->index * func_param_count + i;
3921 bool first = true;
3922 HOST_WIDE_INT inh = 0;
3924 FOR_EACH_EDGE (e, ei, bb->succs)
3926 int succ_idx = e->dest->index * func_param_count + i;
3928 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
3929 continue;
3931 if (first)
3933 first = false;
3934 inh = bb_dereferences [succ_idx];
3936 else if (bb_dereferences [succ_idx] < inh)
3937 inh = bb_dereferences [succ_idx];
3940 if (!first && bb_dereferences[idx] < inh)
3942 bb_dereferences[idx] = inh;
3943 change = true;
3947 if (change && !bitmap_bit_p (final_bbs, bb->index))
3948 FOR_EACH_EDGE (e, ei, bb->preds)
3950 if (e->src->aux)
3951 continue;
3953 e->src->aux = e->src;
3954 queue.quick_push (e->src);
3959 /* Dump a dereferences TABLE with heading STR to file F. */
3961 static void
3962 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3964 basic_block bb;
3966 fprintf (dump_file, "%s", str);
3967 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
3968 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
3970 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3971 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3973 int i;
3974 for (i = 0; i < func_param_count; i++)
3976 int idx = bb->index * func_param_count + i;
3977 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3980 fprintf (f, "\n");
3982 fprintf (dump_file, "\n");
3985 /* Determine what (parts of) parameters passed by reference that are not
3986 assigned to are not certainly dereferenced in this function and thus the
3987 dereferencing cannot be safely moved to the caller without potentially
3988 introducing a segfault. Mark such REPRESENTATIVES as
3989 grp_not_necessarilly_dereferenced.
3991 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3992 part is calculated rather than simple booleans are calculated for each
3993 pointer parameter to handle cases when only a fraction of the whole
3994 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3995 an example).
3997 The maximum dereference distances for each pointer parameter and BB are
3998 already stored in bb_dereference. This routine simply propagates these
3999 values upwards by propagate_dereference_distances and then compares the
4000 distances of individual parameters in the ENTRY BB to the equivalent
4001 distances of each representative of a (fraction of a) parameter. */
4003 static void
4004 analyze_caller_dereference_legality (vec<access_p> representatives)
4006 int i;
4008 if (dump_file && (dump_flags & TDF_DETAILS))
4009 dump_dereferences_table (dump_file,
4010 "Dereference table before propagation:\n",
4011 bb_dereferences);
4013 propagate_dereference_distances ();
4015 if (dump_file && (dump_flags & TDF_DETAILS))
4016 dump_dereferences_table (dump_file,
4017 "Dereference table after propagation:\n",
4018 bb_dereferences);
4020 for (i = 0; i < func_param_count; i++)
4022 struct access *repr = representatives[i];
4023 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4025 if (!repr || no_accesses_p (repr))
4026 continue;
4030 if ((repr->offset + repr->size) > bb_dereferences[idx])
4031 repr->grp_not_necessarilly_dereferenced = 1;
4032 repr = repr->next_grp;
4034 while (repr);
4038 /* Return the representative access for the parameter declaration PARM if it is
4039 a scalar passed by reference which is not written to and the pointer value
4040 is not used directly. Thus, if it is legal to dereference it in the caller
4041 and we can rule out modifications through aliases, such parameter should be
4042 turned into one passed by value. Return NULL otherwise. */
4044 static struct access *
4045 unmodified_by_ref_scalar_representative (tree parm)
4047 int i, access_count;
4048 struct access *repr;
4049 vec<access_p> *access_vec;
4051 access_vec = get_base_access_vector (parm);
4052 gcc_assert (access_vec);
4053 repr = (*access_vec)[0];
4054 if (repr->write)
4055 return NULL;
4056 repr->group_representative = repr;
4058 access_count = access_vec->length ();
4059 for (i = 1; i < access_count; i++)
4061 struct access *access = (*access_vec)[i];
4062 if (access->write)
4063 return NULL;
4064 access->group_representative = repr;
4065 access->next_sibling = repr->next_sibling;
4066 repr->next_sibling = access;
4069 repr->grp_read = 1;
4070 repr->grp_scalar_ptr = 1;
4071 return repr;
4074 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4075 associated with. REQ_ALIGN is the minimum required alignment. */
4077 static bool
4078 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4080 unsigned int exp_align;
4081 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4082 is incompatible assign in a call statement (and possibly even in asm
4083 statements). This can be relaxed by using a new temporary but only for
4084 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4085 intraprocedural SRA we deal with this by keeping the old aggregate around,
4086 something we cannot do in IPA-SRA.) */
4087 if (access->write
4088 && (is_gimple_call (access->stmt)
4089 || gimple_code (access->stmt) == GIMPLE_ASM))
4090 return true;
4092 exp_align = get_object_alignment (access->expr);
4093 if (exp_align < req_align)
4094 return true;
4096 return false;
4100 /* Sort collected accesses for parameter PARM, identify representatives for
4101 each accessed region and link them together. Return NULL if there are
4102 different but overlapping accesses, return the special ptr value meaning
4103 there are no accesses for this parameter if that is the case and return the
4104 first representative otherwise. Set *RO_GRP if there is a group of accesses
4105 with only read (i.e. no write) accesses. */
4107 static struct access *
4108 splice_param_accesses (tree parm, bool *ro_grp)
4110 int i, j, access_count, group_count;
4111 int agg_size, total_size = 0;
4112 struct access *access, *res, **prev_acc_ptr = &res;
4113 vec<access_p> *access_vec;
4115 access_vec = get_base_access_vector (parm);
4116 if (!access_vec)
4117 return &no_accesses_representant;
4118 access_count = access_vec->length ();
4120 access_vec->qsort (compare_access_positions);
4122 i = 0;
4123 total_size = 0;
4124 group_count = 0;
4125 while (i < access_count)
4127 bool modification;
4128 tree a1_alias_type;
4129 access = (*access_vec)[i];
4130 modification = access->write;
4131 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4132 return NULL;
4133 a1_alias_type = reference_alias_ptr_type (access->expr);
4135 /* Access is about to become group representative unless we find some
4136 nasty overlap which would preclude us from breaking this parameter
4137 apart. */
4139 j = i + 1;
4140 while (j < access_count)
4142 struct access *ac2 = (*access_vec)[j];
4143 if (ac2->offset != access->offset)
4145 /* All or nothing law for parameters. */
4146 if (access->offset + access->size > ac2->offset)
4147 return NULL;
4148 else
4149 break;
4151 else if (ac2->size != access->size)
4152 return NULL;
4154 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4155 || (ac2->type != access->type
4156 && (TREE_ADDRESSABLE (ac2->type)
4157 || TREE_ADDRESSABLE (access->type)))
4158 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4159 return NULL;
4161 modification |= ac2->write;
4162 ac2->group_representative = access;
4163 ac2->next_sibling = access->next_sibling;
4164 access->next_sibling = ac2;
4165 j++;
4168 group_count++;
4169 access->grp_maybe_modified = modification;
4170 if (!modification)
4171 *ro_grp = true;
4172 *prev_acc_ptr = access;
4173 prev_acc_ptr = &access->next_grp;
4174 total_size += access->size;
4175 i = j;
4178 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4179 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4180 else
4181 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4182 if (total_size >= agg_size)
4183 return NULL;
4185 gcc_assert (group_count > 0);
4186 return res;
4189 /* Decide whether parameters with representative accesses given by REPR should
4190 be reduced into components. */
4192 static int
4193 decide_one_param_reduction (struct access *repr)
4195 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4196 bool by_ref;
4197 tree parm;
4199 parm = repr->base;
4200 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4201 gcc_assert (cur_parm_size > 0);
4203 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4205 by_ref = true;
4206 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4208 else
4210 by_ref = false;
4211 agg_size = cur_parm_size;
4214 if (dump_file)
4216 struct access *acc;
4217 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4218 print_generic_expr (dump_file, parm, 0);
4219 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4220 for (acc = repr; acc; acc = acc->next_grp)
4221 dump_access (dump_file, acc, true);
4224 total_size = 0;
4225 new_param_count = 0;
4227 for (; repr; repr = repr->next_grp)
4229 gcc_assert (parm == repr->base);
4231 /* Taking the address of a non-addressable field is verboten. */
4232 if (by_ref && repr->non_addressable)
4233 return 0;
4235 /* Do not decompose a non-BLKmode param in a way that would
4236 create BLKmode params. Especially for by-reference passing
4237 (thus, pointer-type param) this is hardly worthwhile. */
4238 if (DECL_MODE (parm) != BLKmode
4239 && TYPE_MODE (repr->type) == BLKmode)
4240 return 0;
4242 if (!by_ref || (!repr->grp_maybe_modified
4243 && !repr->grp_not_necessarilly_dereferenced))
4244 total_size += repr->size;
4245 else
4246 total_size += cur_parm_size;
4248 new_param_count++;
4251 gcc_assert (new_param_count > 0);
4253 if (optimize_function_for_size_p (cfun))
4254 parm_size_limit = cur_parm_size;
4255 else
4256 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4257 * cur_parm_size);
4259 if (total_size < agg_size
4260 && total_size <= parm_size_limit)
4262 if (dump_file)
4263 fprintf (dump_file, " ....will be split into %i components\n",
4264 new_param_count);
4265 return new_param_count;
4267 else
4268 return 0;
4271 /* The order of the following enums is important, we need to do extra work for
4272 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4273 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4274 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4276 /* Identify representatives of all accesses to all candidate parameters for
4277 IPA-SRA. Return result based on what representatives have been found. */
4279 static enum ipa_splicing_result
4280 splice_all_param_accesses (vec<access_p> &representatives)
4282 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4283 tree parm;
4284 struct access *repr;
4286 representatives.create (func_param_count);
4288 for (parm = DECL_ARGUMENTS (current_function_decl);
4289 parm;
4290 parm = DECL_CHAIN (parm))
4292 if (is_unused_scalar_param (parm))
4294 representatives.quick_push (&no_accesses_representant);
4295 if (result == NO_GOOD_ACCESS)
4296 result = UNUSED_PARAMS;
4298 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4299 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4300 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4302 repr = unmodified_by_ref_scalar_representative (parm);
4303 representatives.quick_push (repr);
4304 if (repr)
4305 result = UNMODIF_BY_REF_ACCESSES;
4307 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4309 bool ro_grp = false;
4310 repr = splice_param_accesses (parm, &ro_grp);
4311 representatives.quick_push (repr);
4313 if (repr && !no_accesses_p (repr))
4315 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4317 if (ro_grp)
4318 result = UNMODIF_BY_REF_ACCESSES;
4319 else if (result < MODIF_BY_REF_ACCESSES)
4320 result = MODIF_BY_REF_ACCESSES;
4322 else if (result < BY_VAL_ACCESSES)
4323 result = BY_VAL_ACCESSES;
4325 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4326 result = UNUSED_PARAMS;
4328 else
4329 representatives.quick_push (NULL);
4332 if (result == NO_GOOD_ACCESS)
4334 representatives.release ();
4335 return NO_GOOD_ACCESS;
4338 return result;
4341 /* Return the index of BASE in PARMS. Abort if it is not found. */
4343 static inline int
4344 get_param_index (tree base, vec<tree> parms)
4346 int i, len;
4348 len = parms.length ();
4349 for (i = 0; i < len; i++)
4350 if (parms[i] == base)
4351 return i;
4352 gcc_unreachable ();
4355 /* Convert the decisions made at the representative level into compact
4356 parameter adjustments. REPRESENTATIVES are pointers to first
4357 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4358 final number of adjustments. */
4360 static ipa_parm_adjustment_vec
4361 turn_representatives_into_adjustments (vec<access_p> representatives,
4362 int adjustments_count)
4364 vec<tree> parms;
4365 ipa_parm_adjustment_vec adjustments;
4366 tree parm;
4367 int i;
4369 gcc_assert (adjustments_count > 0);
4370 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4371 adjustments.create (adjustments_count);
4372 parm = DECL_ARGUMENTS (current_function_decl);
4373 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4375 struct access *repr = representatives[i];
4377 if (!repr || no_accesses_p (repr))
4379 struct ipa_parm_adjustment adj;
4381 memset (&adj, 0, sizeof (adj));
4382 adj.base_index = get_param_index (parm, parms);
4383 adj.base = parm;
4384 if (!repr)
4385 adj.op = IPA_PARM_OP_COPY;
4386 else
4387 adj.op = IPA_PARM_OP_REMOVE;
4388 adj.arg_prefix = "ISRA";
4389 adjustments.quick_push (adj);
4391 else
4393 struct ipa_parm_adjustment adj;
4394 int index = get_param_index (parm, parms);
4396 for (; repr; repr = repr->next_grp)
4398 memset (&adj, 0, sizeof (adj));
4399 gcc_assert (repr->base == parm);
4400 adj.base_index = index;
4401 adj.base = repr->base;
4402 adj.type = repr->type;
4403 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4404 adj.offset = repr->offset;
4405 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4406 && (repr->grp_maybe_modified
4407 || repr->grp_not_necessarilly_dereferenced));
4408 adj.arg_prefix = "ISRA";
4409 adjustments.quick_push (adj);
4413 parms.release ();
4414 return adjustments;
4417 /* Analyze the collected accesses and produce a plan what to do with the
4418 parameters in the form of adjustments, NULL meaning nothing. */
4420 static ipa_parm_adjustment_vec
4421 analyze_all_param_acesses (void)
4423 enum ipa_splicing_result repr_state;
4424 bool proceed = false;
4425 int i, adjustments_count = 0;
4426 vec<access_p> representatives;
4427 ipa_parm_adjustment_vec adjustments;
4429 repr_state = splice_all_param_accesses (representatives);
4430 if (repr_state == NO_GOOD_ACCESS)
4431 return ipa_parm_adjustment_vec ();
4433 /* If there are any parameters passed by reference which are not modified
4434 directly, we need to check whether they can be modified indirectly. */
4435 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4437 analyze_caller_dereference_legality (representatives);
4438 analyze_modified_params (representatives);
4441 for (i = 0; i < func_param_count; i++)
4443 struct access *repr = representatives[i];
4445 if (repr && !no_accesses_p (repr))
4447 if (repr->grp_scalar_ptr)
4449 adjustments_count++;
4450 if (repr->grp_not_necessarilly_dereferenced
4451 || repr->grp_maybe_modified)
4452 representatives[i] = NULL;
4453 else
4455 proceed = true;
4456 sra_stats.scalar_by_ref_to_by_val++;
4459 else
4461 int new_components = decide_one_param_reduction (repr);
4463 if (new_components == 0)
4465 representatives[i] = NULL;
4466 adjustments_count++;
4468 else
4470 adjustments_count += new_components;
4471 sra_stats.aggregate_params_reduced++;
4472 sra_stats.param_reductions_created += new_components;
4473 proceed = true;
4477 else
4479 if (no_accesses_p (repr))
4481 proceed = true;
4482 sra_stats.deleted_unused_parameters++;
4484 adjustments_count++;
4488 if (!proceed && dump_file)
4489 fprintf (dump_file, "NOT proceeding to change params.\n");
4491 if (proceed)
4492 adjustments = turn_representatives_into_adjustments (representatives,
4493 adjustments_count);
4494 else
4495 adjustments = ipa_parm_adjustment_vec ();
4497 representatives.release ();
4498 return adjustments;
4501 /* If a parameter replacement identified by ADJ does not yet exist in the form
4502 of declaration, create it and record it, otherwise return the previously
4503 created one. */
4505 static tree
4506 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4508 tree repl;
4509 if (!adj->new_ssa_base)
4511 char *pretty_name = make_fancy_name (adj->base);
4513 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4514 DECL_NAME (repl) = get_identifier (pretty_name);
4515 obstack_free (&name_obstack, pretty_name);
4517 adj->new_ssa_base = repl;
4519 else
4520 repl = adj->new_ssa_base;
4521 return repl;
4524 /* Find the first adjustment for a particular parameter BASE in a vector of
4525 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4526 adjustment. */
4528 static struct ipa_parm_adjustment *
4529 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4531 int i, len;
4533 len = adjustments.length ();
4534 for (i = 0; i < len; i++)
4536 struct ipa_parm_adjustment *adj;
4538 adj = &adjustments[i];
4539 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4540 return adj;
4543 return NULL;
4546 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4547 removed because its value is not used, replace the SSA_NAME with a one
4548 relating to a created VAR_DECL together all of its uses and return true.
4549 ADJUSTMENTS is a pointer to an adjustments vector. */
4551 static bool
4552 replace_removed_params_ssa_names (gimple stmt,
4553 ipa_parm_adjustment_vec adjustments)
4555 struct ipa_parm_adjustment *adj;
4556 tree lhs, decl, repl, name;
4558 if (gimple_code (stmt) == GIMPLE_PHI)
4559 lhs = gimple_phi_result (stmt);
4560 else if (is_gimple_assign (stmt))
4561 lhs = gimple_assign_lhs (stmt);
4562 else if (is_gimple_call (stmt))
4563 lhs = gimple_call_lhs (stmt);
4564 else
4565 gcc_unreachable ();
4567 if (TREE_CODE (lhs) != SSA_NAME)
4568 return false;
4570 decl = SSA_NAME_VAR (lhs);
4571 if (decl == NULL_TREE
4572 || TREE_CODE (decl) != PARM_DECL)
4573 return false;
4575 adj = get_adjustment_for_base (adjustments, decl);
4576 if (!adj)
4577 return false;
4579 repl = get_replaced_param_substitute (adj);
4580 name = make_ssa_name (repl, stmt);
4582 if (dump_file)
4584 fprintf (dump_file, "replacing an SSA name of a removed param ");
4585 print_generic_expr (dump_file, lhs, 0);
4586 fprintf (dump_file, " with ");
4587 print_generic_expr (dump_file, name, 0);
4588 fprintf (dump_file, "\n");
4591 if (is_gimple_assign (stmt))
4592 gimple_assign_set_lhs (stmt, name);
4593 else if (is_gimple_call (stmt))
4594 gimple_call_set_lhs (stmt, name);
4595 else
4596 gimple_phi_set_result (as_a <gphi *> (stmt), name);
4598 replace_uses_by (lhs, name);
4599 release_ssa_name (lhs);
4600 return true;
4603 /* If the statement STMT contains any expressions that need to replaced with a
4604 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4605 incompatibilities (GSI is used to accommodate conversion statements and must
4606 point to the statement). Return true iff the statement was modified. */
4608 static bool
4609 sra_ipa_modify_assign (gimple stmt, gimple_stmt_iterator *gsi,
4610 ipa_parm_adjustment_vec adjustments)
4612 tree *lhs_p, *rhs_p;
4613 bool any;
4615 if (!gimple_assign_single_p (stmt))
4616 return false;
4618 rhs_p = gimple_assign_rhs1_ptr (stmt);
4619 lhs_p = gimple_assign_lhs_ptr (stmt);
4621 any = ipa_modify_expr (rhs_p, false, adjustments);
4622 any |= ipa_modify_expr (lhs_p, false, adjustments);
4623 if (any)
4625 tree new_rhs = NULL_TREE;
4627 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4629 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4631 /* V_C_Es of constructors can cause trouble (PR 42714). */
4632 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4633 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4634 else
4635 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4636 NULL);
4638 else
4639 new_rhs = fold_build1_loc (gimple_location (stmt),
4640 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4641 *rhs_p);
4643 else if (REFERENCE_CLASS_P (*rhs_p)
4644 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4645 && !is_gimple_reg (*lhs_p))
4646 /* This can happen when an assignment in between two single field
4647 structures is turned into an assignment in between two pointers to
4648 scalars (PR 42237). */
4649 new_rhs = *rhs_p;
4651 if (new_rhs)
4653 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4654 true, GSI_SAME_STMT);
4656 gimple_assign_set_rhs_from_tree (gsi, tmp);
4659 return true;
4662 return false;
4665 /* Traverse the function body and all modifications as described in
4666 ADJUSTMENTS. Return true iff the CFG has been changed. */
4668 bool
4669 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4671 bool cfg_changed = false;
4672 basic_block bb;
4674 FOR_EACH_BB_FN (bb, cfun)
4676 gimple_stmt_iterator gsi;
4678 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4679 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4681 gsi = gsi_start_bb (bb);
4682 while (!gsi_end_p (gsi))
4684 gimple stmt = gsi_stmt (gsi);
4685 bool modified = false;
4686 tree *t;
4687 unsigned i;
4689 switch (gimple_code (stmt))
4691 case GIMPLE_RETURN:
4692 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4693 if (*t != NULL_TREE)
4694 modified |= ipa_modify_expr (t, true, adjustments);
4695 break;
4697 case GIMPLE_ASSIGN:
4698 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4699 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4700 break;
4702 case GIMPLE_CALL:
4703 /* Operands must be processed before the lhs. */
4704 for (i = 0; i < gimple_call_num_args (stmt); i++)
4706 t = gimple_call_arg_ptr (stmt, i);
4707 modified |= ipa_modify_expr (t, true, adjustments);
4710 if (gimple_call_lhs (stmt))
4712 t = gimple_call_lhs_ptr (stmt);
4713 modified |= ipa_modify_expr (t, false, adjustments);
4714 modified |= replace_removed_params_ssa_names (stmt,
4715 adjustments);
4717 break;
4719 case GIMPLE_ASM:
4721 gasm *asm_stmt = as_a <gasm *> (stmt);
4722 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4724 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4725 modified |= ipa_modify_expr (t, true, adjustments);
4727 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4729 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4730 modified |= ipa_modify_expr (t, false, adjustments);
4733 break;
4735 default:
4736 break;
4739 if (modified)
4741 update_stmt (stmt);
4742 if (maybe_clean_eh_stmt (stmt)
4743 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4744 cfg_changed = true;
4746 gsi_next (&gsi);
4750 return cfg_changed;
4753 /* Call gimple_debug_bind_reset_value on all debug statements describing
4754 gimple register parameters that are being removed or replaced. */
4756 static void
4757 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4759 int i, len;
4760 gimple_stmt_iterator *gsip = NULL, gsi;
4762 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4764 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4765 gsip = &gsi;
4767 len = adjustments.length ();
4768 for (i = 0; i < len; i++)
4770 struct ipa_parm_adjustment *adj;
4771 imm_use_iterator ui;
4772 gimple stmt;
4773 gdebug *def_temp;
4774 tree name, vexpr, copy = NULL_TREE;
4775 use_operand_p use_p;
4777 adj = &adjustments[i];
4778 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4779 continue;
4780 name = ssa_default_def (cfun, adj->base);
4781 vexpr = NULL;
4782 if (name)
4783 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4785 if (gimple_clobber_p (stmt))
4787 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4788 unlink_stmt_vdef (stmt);
4789 gsi_remove (&cgsi, true);
4790 release_defs (stmt);
4791 continue;
4793 /* All other users must have been removed by
4794 ipa_sra_modify_function_body. */
4795 gcc_assert (is_gimple_debug (stmt));
4796 if (vexpr == NULL && gsip != NULL)
4798 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4799 vexpr = make_node (DEBUG_EXPR_DECL);
4800 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4801 NULL);
4802 DECL_ARTIFICIAL (vexpr) = 1;
4803 TREE_TYPE (vexpr) = TREE_TYPE (name);
4804 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4805 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4807 if (vexpr)
4809 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4810 SET_USE (use_p, vexpr);
4812 else
4813 gimple_debug_bind_reset_value (stmt);
4814 update_stmt (stmt);
4816 /* Create a VAR_DECL for debug info purposes. */
4817 if (!DECL_IGNORED_P (adj->base))
4819 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4820 VAR_DECL, DECL_NAME (adj->base),
4821 TREE_TYPE (adj->base));
4822 if (DECL_PT_UID_SET_P (adj->base))
4823 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4824 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4825 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4826 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4827 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4828 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4829 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4830 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4831 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4832 SET_DECL_RTL (copy, 0);
4833 TREE_USED (copy) = 1;
4834 DECL_CONTEXT (copy) = current_function_decl;
4835 add_local_decl (cfun, copy);
4836 DECL_CHAIN (copy) =
4837 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4838 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4840 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4842 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4843 if (vexpr)
4844 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4845 else
4846 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4847 NULL);
4848 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4853 /* Return false if all callers have at least as many actual arguments as there
4854 are formal parameters in the current function and that their types
4855 match. */
4857 static bool
4858 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
4859 void *data ATTRIBUTE_UNUSED)
4861 struct cgraph_edge *cs;
4862 for (cs = node->callers; cs; cs = cs->next_caller)
4863 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
4864 return true;
4866 return false;
4869 /* Return false if all callers have vuse attached to a call statement. */
4871 static bool
4872 some_callers_have_no_vuse_p (struct cgraph_node *node,
4873 void *data ATTRIBUTE_UNUSED)
4875 struct cgraph_edge *cs;
4876 for (cs = node->callers; cs; cs = cs->next_caller)
4877 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
4878 return true;
4880 return false;
4883 /* Convert all callers of NODE. */
4885 static bool
4886 convert_callers_for_node (struct cgraph_node *node,
4887 void *data)
4889 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4890 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4891 struct cgraph_edge *cs;
4893 for (cs = node->callers; cs; cs = cs->next_caller)
4895 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4897 if (dump_file)
4898 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
4899 xstrdup (cs->caller->name ()),
4900 cs->caller->order,
4901 xstrdup (cs->callee->name ()),
4902 cs->callee->order);
4904 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4906 pop_cfun ();
4909 for (cs = node->callers; cs; cs = cs->next_caller)
4910 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4911 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4912 compute_inline_parameters (cs->caller, true);
4913 BITMAP_FREE (recomputed_callers);
4915 return true;
4918 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4920 static void
4921 convert_callers (struct cgraph_node *node, tree old_decl,
4922 ipa_parm_adjustment_vec adjustments)
4924 basic_block this_block;
4926 node->call_for_symbol_and_aliases (convert_callers_for_node,
4927 &adjustments, false);
4929 if (!encountered_recursive_call)
4930 return;
4932 FOR_EACH_BB_FN (this_block, cfun)
4934 gimple_stmt_iterator gsi;
4936 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4938 gcall *stmt;
4939 tree call_fndecl;
4940 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
4941 if (!stmt)
4942 continue;
4943 call_fndecl = gimple_call_fndecl (stmt);
4944 if (call_fndecl == old_decl)
4946 if (dump_file)
4947 fprintf (dump_file, "Adjusting recursive call");
4948 gimple_call_set_fndecl (stmt, node->decl);
4949 ipa_modify_call_arguments (NULL, stmt, adjustments);
4954 return;
4957 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4958 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4960 static bool
4961 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4963 struct cgraph_node *new_node;
4964 bool cfg_changed;
4966 cgraph_edge::rebuild_edges ();
4967 free_dominance_info (CDI_DOMINATORS);
4968 pop_cfun ();
4970 /* This must be done after rebuilding cgraph edges for node above.
4971 Otherwise any recursive calls to node that are recorded in
4972 redirect_callers will be corrupted. */
4973 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
4974 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
4975 NULL, false, NULL, NULL,
4976 "isra");
4977 redirect_callers.release ();
4979 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4980 ipa_modify_formal_parameters (current_function_decl, adjustments);
4981 cfg_changed = ipa_sra_modify_function_body (adjustments);
4982 sra_ipa_reset_debug_stmts (adjustments);
4983 convert_callers (new_node, node->decl, adjustments);
4984 new_node->make_local ();
4985 return cfg_changed;
4988 /* Means of communication between ipa_sra_check_caller and
4989 ipa_sra_preliminary_function_checks. */
4991 struct ipa_sra_check_caller_data
4993 bool has_callers;
4994 bool bad_arg_alignment;
4995 bool has_thunk;
4998 /* If NODE has a caller, mark that fact in DATA which is pointer to
4999 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5000 calls if they are unit aligned and if not, set the appropriate flag in DATA
5001 too. */
5003 static bool
5004 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5006 if (!node->callers)
5007 return false;
5009 struct ipa_sra_check_caller_data *iscc;
5010 iscc = (struct ipa_sra_check_caller_data *) data;
5011 iscc->has_callers = true;
5013 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5015 if (cs->caller->thunk.thunk_p)
5017 iscc->has_thunk = true;
5018 return true;
5020 gimple call_stmt = cs->call_stmt;
5021 unsigned count = gimple_call_num_args (call_stmt);
5022 for (unsigned i = 0; i < count; i++)
5024 tree arg = gimple_call_arg (call_stmt, i);
5025 if (is_gimple_reg (arg))
5026 continue;
5028 tree offset;
5029 HOST_WIDE_INT bitsize, bitpos;
5030 machine_mode mode;
5031 int unsignedp, volatilep = 0;
5032 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5033 &unsignedp, &volatilep, false);
5034 if (bitpos % BITS_PER_UNIT)
5036 iscc->bad_arg_alignment = true;
5037 return true;
5042 return false;
5045 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5046 attributes, return true otherwise. NODE is the cgraph node of the current
5047 function. */
5049 static bool
5050 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5052 if (!node->can_be_local_p ())
5054 if (dump_file)
5055 fprintf (dump_file, "Function not local to this compilation unit.\n");
5056 return false;
5059 if (!node->local.can_change_signature)
5061 if (dump_file)
5062 fprintf (dump_file, "Function can not change signature.\n");
5063 return false;
5066 if (!tree_versionable_function_p (node->decl))
5068 if (dump_file)
5069 fprintf (dump_file, "Function is not versionable.\n");
5070 return false;
5073 if (!opt_for_fn (node->decl, optimize)
5074 || !opt_for_fn (node->decl, flag_ipa_sra))
5076 if (dump_file)
5077 fprintf (dump_file, "Function not optimized.\n");
5078 return false;
5081 if (DECL_VIRTUAL_P (current_function_decl))
5083 if (dump_file)
5084 fprintf (dump_file, "Function is a virtual method.\n");
5085 return false;
5088 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5089 && inline_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5091 if (dump_file)
5092 fprintf (dump_file, "Function too big to be made truly local.\n");
5093 return false;
5096 if (cfun->stdarg)
5098 if (dump_file)
5099 fprintf (dump_file, "Function uses stdarg. \n");
5100 return false;
5103 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5104 return false;
5106 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5108 if (dump_file)
5109 fprintf (dump_file, "Always inline function will be inlined "
5110 "anyway. \n");
5111 return false;
5114 struct ipa_sra_check_caller_data iscc;
5115 memset (&iscc, 0, sizeof(iscc));
5116 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5117 if (!iscc.has_callers)
5119 if (dump_file)
5120 fprintf (dump_file,
5121 "Function has no callers in this compilation unit.\n");
5122 return false;
5125 if (iscc.bad_arg_alignment)
5127 if (dump_file)
5128 fprintf (dump_file,
5129 "A function call has an argument with non-unit alignment.\n");
5130 return false;
5133 if (iscc.has_thunk)
5135 if (dump_file)
5136 fprintf (dump_file,
5137 "A has thunk.\n");
5138 return false;
5141 return true;
5144 /* Perform early interprocedural SRA. */
5146 static unsigned int
5147 ipa_early_sra (void)
5149 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5150 ipa_parm_adjustment_vec adjustments;
5151 int ret = 0;
5153 if (!ipa_sra_preliminary_function_checks (node))
5154 return 0;
5156 sra_initialize ();
5157 sra_mode = SRA_MODE_EARLY_IPA;
5159 if (!find_param_candidates ())
5161 if (dump_file)
5162 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5163 goto simple_out;
5166 if (node->call_for_symbol_and_aliases
5167 (some_callers_have_mismatched_arguments_p, NULL, true))
5169 if (dump_file)
5170 fprintf (dump_file, "There are callers with insufficient number of "
5171 "arguments or arguments with type mismatches.\n");
5172 goto simple_out;
5175 if (node->call_for_symbol_and_aliases
5176 (some_callers_have_no_vuse_p, NULL, true))
5178 if (dump_file)
5179 fprintf (dump_file, "There are callers with no VUSE attached "
5180 "to a call stmt.\n");
5181 goto simple_out;
5184 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5185 func_param_count
5186 * last_basic_block_for_fn (cfun));
5187 final_bbs = BITMAP_ALLOC (NULL);
5189 scan_function ();
5190 if (encountered_apply_args)
5192 if (dump_file)
5193 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5194 goto out;
5197 if (encountered_unchangable_recursive_call)
5199 if (dump_file)
5200 fprintf (dump_file, "Function calls itself with insufficient "
5201 "number of arguments.\n");
5202 goto out;
5205 adjustments = analyze_all_param_acesses ();
5206 if (!adjustments.exists ())
5207 goto out;
5208 if (dump_file)
5209 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5211 if (modify_function (node, adjustments))
5212 ret = TODO_update_ssa | TODO_cleanup_cfg;
5213 else
5214 ret = TODO_update_ssa;
5215 adjustments.release ();
5217 statistics_counter_event (cfun, "Unused parameters deleted",
5218 sra_stats.deleted_unused_parameters);
5219 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5220 sra_stats.scalar_by_ref_to_by_val);
5221 statistics_counter_event (cfun, "Aggregate parameters broken up",
5222 sra_stats.aggregate_params_reduced);
5223 statistics_counter_event (cfun, "Aggregate parameter components created",
5224 sra_stats.param_reductions_created);
5226 out:
5227 BITMAP_FREE (final_bbs);
5228 free (bb_dereferences);
5229 simple_out:
5230 sra_deinitialize ();
5231 return ret;
5234 namespace {
5236 const pass_data pass_data_early_ipa_sra =
5238 GIMPLE_PASS, /* type */
5239 "eipa_sra", /* name */
5240 OPTGROUP_NONE, /* optinfo_flags */
5241 TV_IPA_SRA, /* tv_id */
5242 0, /* properties_required */
5243 0, /* properties_provided */
5244 0, /* properties_destroyed */
5245 0, /* todo_flags_start */
5246 TODO_dump_symtab, /* todo_flags_finish */
5249 class pass_early_ipa_sra : public gimple_opt_pass
5251 public:
5252 pass_early_ipa_sra (gcc::context *ctxt)
5253 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5256 /* opt_pass methods: */
5257 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5258 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5260 }; // class pass_early_ipa_sra
5262 } // anon namespace
5264 gimple_opt_pass *
5265 make_pass_early_ipa_sra (gcc::context *ctxt)
5267 return new pass_early_ipa_sra (ctxt);