[PATCH] [PR testsuite/67959]Minor cleanup for ssa-thread-13.c
[official-gcc.git] / gcc / tree-sra.c
blobf8517581df2c4044da1877221d59a07458bfdcb8
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "backend.h"
79 #include "predict.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "rtl.h"
83 #include "ssa.h"
84 #include "alias.h"
85 #include "fold-const.h"
86 #include "internal-fn.h"
87 #include "tree-eh.h"
88 #include "stor-layout.h"
89 #include "gimplify.h"
90 #include "gimple-iterator.h"
91 #include "gimplify-me.h"
92 #include "gimple-walk.h"
93 #include "tree-cfg.h"
94 #include "flags.h"
95 #include "insn-config.h"
96 #include "expmed.h"
97 #include "dojump.h"
98 #include "explow.h"
99 #include "calls.h"
100 #include "emit-rtl.h"
101 #include "varasm.h"
102 #include "stmt.h"
103 #include "expr.h"
104 #include "tree-dfa.h"
105 #include "tree-ssa.h"
106 #include "tree-pass.h"
107 #include "cgraph.h"
108 #include "symbol-summary.h"
109 #include "ipa-prop.h"
110 #include "params.h"
111 #include "target.h"
112 #include "dbgcnt.h"
113 #include "tree-inline.h"
114 #include "gimple-pretty-print.h"
115 #include "ipa-inline.h"
116 #include "ipa-utils.h"
117 #include "builtins.h"
119 /* Enumeration of all aggregate reductions we can do. */
120 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
121 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
122 SRA_MODE_INTRA }; /* late intraprocedural SRA */
124 /* Global variable describing which aggregate reduction we are performing at
125 the moment. */
126 static enum sra_mode sra_mode;
128 struct assign_link;
130 /* ACCESS represents each access to an aggregate variable (as a whole or a
131 part). It can also represent a group of accesses that refer to exactly the
132 same fragment of an aggregate (i.e. those that have exactly the same offset
133 and size). Such representatives for a single aggregate, once determined,
134 are linked in a linked list and have the group fields set.
136 Moreover, when doing intraprocedural SRA, a tree is built from those
137 representatives (by the means of first_child and next_sibling pointers), in
138 which all items in a subtree are "within" the root, i.e. their offset is
139 greater or equal to offset of the root and offset+size is smaller or equal
140 to offset+size of the root. Children of an access are sorted by offset.
142 Note that accesses to parts of vector and complex number types always
143 represented by an access to the whole complex number or a vector. It is a
144 duty of the modifying functions to replace them appropriately. */
146 struct access
148 /* Values returned by `get_ref_base_and_extent' for each component reference
149 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
150 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
151 HOST_WIDE_INT offset;
152 HOST_WIDE_INT size;
153 tree base;
155 /* Expression. It is context dependent so do not use it to create new
156 expressions to access the original aggregate. See PR 42154 for a
157 testcase. */
158 tree expr;
159 /* Type. */
160 tree type;
162 /* The statement this access belongs to. */
163 gimple *stmt;
165 /* Next group representative for this aggregate. */
166 struct access *next_grp;
168 /* Pointer to the group representative. Pointer to itself if the struct is
169 the representative. */
170 struct access *group_representative;
172 /* If this access has any children (in terms of the definition above), this
173 points to the first one. */
174 struct access *first_child;
176 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
177 described above. In IPA-SRA this is a pointer to the next access
178 belonging to the same group (having the same representative). */
179 struct access *next_sibling;
181 /* Pointers to the first and last element in the linked list of assign
182 links. */
183 struct assign_link *first_link, *last_link;
185 /* Pointer to the next access in the work queue. */
186 struct access *next_queued;
188 /* Replacement variable for this access "region." Never to be accessed
189 directly, always only by the means of get_access_replacement() and only
190 when grp_to_be_replaced flag is set. */
191 tree replacement_decl;
193 /* Is this particular access write access? */
194 unsigned write : 1;
196 /* Is this access an access to a non-addressable field? */
197 unsigned non_addressable : 1;
199 /* Is this access currently in the work queue? */
200 unsigned grp_queued : 1;
202 /* Does this group contain a write access? This flag is propagated down the
203 access tree. */
204 unsigned grp_write : 1;
206 /* Does this group contain a read access? This flag is propagated down the
207 access tree. */
208 unsigned grp_read : 1;
210 /* Does this group contain a read access that comes from an assignment
211 statement? This flag is propagated down the access tree. */
212 unsigned grp_assignment_read : 1;
214 /* Does this group contain a write access that comes from an assignment
215 statement? This flag is propagated down the access tree. */
216 unsigned grp_assignment_write : 1;
218 /* Does this group contain a read access through a scalar type? This flag is
219 not propagated in the access tree in any direction. */
220 unsigned grp_scalar_read : 1;
222 /* Does this group contain a write access through a scalar type? This flag
223 is not propagated in the access tree in any direction. */
224 unsigned grp_scalar_write : 1;
226 /* Is this access an artificial one created to scalarize some record
227 entirely? */
228 unsigned grp_total_scalarization : 1;
230 /* Other passes of the analysis use this bit to make function
231 analyze_access_subtree create scalar replacements for this group if
232 possible. */
233 unsigned grp_hint : 1;
235 /* Is the subtree rooted in this access fully covered by scalar
236 replacements? */
237 unsigned grp_covered : 1;
239 /* If set to true, this access and all below it in an access tree must not be
240 scalarized. */
241 unsigned grp_unscalarizable_region : 1;
243 /* Whether data have been written to parts of the aggregate covered by this
244 access which is not to be scalarized. This flag is propagated up in the
245 access tree. */
246 unsigned grp_unscalarized_data : 1;
248 /* Does this access and/or group contain a write access through a
249 BIT_FIELD_REF? */
250 unsigned grp_partial_lhs : 1;
252 /* Set when a scalar replacement should be created for this variable. */
253 unsigned grp_to_be_replaced : 1;
255 /* Set when we want a replacement for the sole purpose of having it in
256 generated debug statements. */
257 unsigned grp_to_be_debug_replaced : 1;
259 /* Should TREE_NO_WARNING of a replacement be set? */
260 unsigned grp_no_warning : 1;
262 /* Is it possible that the group refers to data which might be (directly or
263 otherwise) modified? */
264 unsigned grp_maybe_modified : 1;
266 /* Set when this is a representative of a pointer to scalar (i.e. by
267 reference) parameter which we consider for turning into a plain scalar
268 (i.e. a by value parameter). */
269 unsigned grp_scalar_ptr : 1;
271 /* Set when we discover that this pointer is not safe to dereference in the
272 caller. */
273 unsigned grp_not_necessarilly_dereferenced : 1;
276 typedef struct access *access_p;
279 /* Alloc pool for allocating access structures. */
280 static object_allocator<struct access> access_pool ("SRA accesses");
282 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
283 are used to propagate subaccesses from rhs to lhs as long as they don't
284 conflict with what is already there. */
285 struct assign_link
287 struct access *lacc, *racc;
288 struct assign_link *next;
291 /* Alloc pool for allocating assign link structures. */
292 static object_allocator<assign_link> assign_link_pool ("SRA links");
294 /* Base (tree) -> Vector (vec<access_p> *) map. */
295 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
297 /* Candidate hash table helpers. */
299 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
301 static inline hashval_t hash (const tree_node *);
302 static inline bool equal (const tree_node *, const tree_node *);
305 /* Hash a tree in a uid_decl_map. */
307 inline hashval_t
308 uid_decl_hasher::hash (const tree_node *item)
310 return item->decl_minimal.uid;
313 /* Return true if the DECL_UID in both trees are equal. */
315 inline bool
316 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
318 return (a->decl_minimal.uid == b->decl_minimal.uid);
321 /* Set of candidates. */
322 static bitmap candidate_bitmap;
323 static hash_table<uid_decl_hasher> *candidates;
325 /* For a candidate UID return the candidates decl. */
327 static inline tree
328 candidate (unsigned uid)
330 tree_node t;
331 t.decl_minimal.uid = uid;
332 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
335 /* Bitmap of candidates which we should try to entirely scalarize away and
336 those which cannot be (because they are and need be used as a whole). */
337 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
339 /* Obstack for creation of fancy names. */
340 static struct obstack name_obstack;
342 /* Head of a linked list of accesses that need to have its subaccesses
343 propagated to their assignment counterparts. */
344 static struct access *work_queue_head;
346 /* Number of parameters of the analyzed function when doing early ipa SRA. */
347 static int func_param_count;
349 /* scan_function sets the following to true if it encounters a call to
350 __builtin_apply_args. */
351 static bool encountered_apply_args;
353 /* Set by scan_function when it finds a recursive call. */
354 static bool encountered_recursive_call;
356 /* Set by scan_function when it finds a recursive call with less actual
357 arguments than formal parameters.. */
358 static bool encountered_unchangable_recursive_call;
360 /* This is a table in which for each basic block and parameter there is a
361 distance (offset + size) in that parameter which is dereferenced and
362 accessed in that BB. */
363 static HOST_WIDE_INT *bb_dereferences;
364 /* Bitmap of BBs that can cause the function to "stop" progressing by
365 returning, throwing externally, looping infinitely or calling a function
366 which might abort etc.. */
367 static bitmap final_bbs;
369 /* Representative of no accesses at all. */
370 static struct access no_accesses_representant;
372 /* Predicate to test the special value. */
374 static inline bool
375 no_accesses_p (struct access *access)
377 return access == &no_accesses_representant;
380 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
381 representative fields are dumped, otherwise those which only describe the
382 individual access are. */
384 static struct
386 /* Number of processed aggregates is readily available in
387 analyze_all_variable_accesses and so is not stored here. */
389 /* Number of created scalar replacements. */
390 int replacements;
392 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
393 expression. */
394 int exprs;
396 /* Number of statements created by generate_subtree_copies. */
397 int subtree_copies;
399 /* Number of statements created by load_assign_lhs_subreplacements. */
400 int subreplacements;
402 /* Number of times sra_modify_assign has deleted a statement. */
403 int deleted;
405 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
406 RHS reparately due to type conversions or nonexistent matching
407 references. */
408 int separate_lhs_rhs_handling;
410 /* Number of parameters that were removed because they were unused. */
411 int deleted_unused_parameters;
413 /* Number of scalars passed as parameters by reference that have been
414 converted to be passed by value. */
415 int scalar_by_ref_to_by_val;
417 /* Number of aggregate parameters that were replaced by one or more of their
418 components. */
419 int aggregate_params_reduced;
421 /* Numbber of components created when splitting aggregate parameters. */
422 int param_reductions_created;
423 } sra_stats;
425 static void
426 dump_access (FILE *f, struct access *access, bool grp)
428 fprintf (f, "access { ");
429 fprintf (f, "base = (%d)'", DECL_UID (access->base));
430 print_generic_expr (f, access->base, 0);
431 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
432 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
433 fprintf (f, ", expr = ");
434 print_generic_expr (f, access->expr, 0);
435 fprintf (f, ", type = ");
436 print_generic_expr (f, access->type, 0);
437 if (grp)
438 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
439 "grp_assignment_write = %d, grp_scalar_read = %d, "
440 "grp_scalar_write = %d, grp_total_scalarization = %d, "
441 "grp_hint = %d, grp_covered = %d, "
442 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
443 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
444 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
445 "grp_not_necessarilly_dereferenced = %d\n",
446 access->grp_read, access->grp_write, access->grp_assignment_read,
447 access->grp_assignment_write, access->grp_scalar_read,
448 access->grp_scalar_write, access->grp_total_scalarization,
449 access->grp_hint, access->grp_covered,
450 access->grp_unscalarizable_region, access->grp_unscalarized_data,
451 access->grp_partial_lhs, access->grp_to_be_replaced,
452 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
453 access->grp_not_necessarilly_dereferenced);
454 else
455 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
456 "grp_partial_lhs = %d\n",
457 access->write, access->grp_total_scalarization,
458 access->grp_partial_lhs);
461 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
463 static void
464 dump_access_tree_1 (FILE *f, struct access *access, int level)
468 int i;
470 for (i = 0; i < level; i++)
471 fputs ("* ", dump_file);
473 dump_access (f, access, true);
475 if (access->first_child)
476 dump_access_tree_1 (f, access->first_child, level + 1);
478 access = access->next_sibling;
480 while (access);
483 /* Dump all access trees for a variable, given the pointer to the first root in
484 ACCESS. */
486 static void
487 dump_access_tree (FILE *f, struct access *access)
489 for (; access; access = access->next_grp)
490 dump_access_tree_1 (f, access, 0);
493 /* Return true iff ACC is non-NULL and has subaccesses. */
495 static inline bool
496 access_has_children_p (struct access *acc)
498 return acc && acc->first_child;
501 /* Return true iff ACC is (partly) covered by at least one replacement. */
503 static bool
504 access_has_replacements_p (struct access *acc)
506 struct access *child;
507 if (acc->grp_to_be_replaced)
508 return true;
509 for (child = acc->first_child; child; child = child->next_sibling)
510 if (access_has_replacements_p (child))
511 return true;
512 return false;
515 /* Return a vector of pointers to accesses for the variable given in BASE or
516 NULL if there is none. */
518 static vec<access_p> *
519 get_base_access_vector (tree base)
521 return base_access_vec->get (base);
524 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
525 in ACCESS. Return NULL if it cannot be found. */
527 static struct access *
528 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
529 HOST_WIDE_INT size)
531 while (access && (access->offset != offset || access->size != size))
533 struct access *child = access->first_child;
535 while (child && (child->offset + child->size <= offset))
536 child = child->next_sibling;
537 access = child;
540 return access;
543 /* Return the first group representative for DECL or NULL if none exists. */
545 static struct access *
546 get_first_repr_for_decl (tree base)
548 vec<access_p> *access_vec;
550 access_vec = get_base_access_vector (base);
551 if (!access_vec)
552 return NULL;
554 return (*access_vec)[0];
557 /* Find an access representative for the variable BASE and given OFFSET and
558 SIZE. Requires that access trees have already been built. Return NULL if
559 it cannot be found. */
561 static struct access *
562 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
563 HOST_WIDE_INT size)
565 struct access *access;
567 access = get_first_repr_for_decl (base);
568 while (access && (access->offset + access->size <= offset))
569 access = access->next_grp;
570 if (!access)
571 return NULL;
573 return find_access_in_subtree (access, offset, size);
576 /* Add LINK to the linked list of assign links of RACC. */
577 static void
578 add_link_to_rhs (struct access *racc, struct assign_link *link)
580 gcc_assert (link->racc == racc);
582 if (!racc->first_link)
584 gcc_assert (!racc->last_link);
585 racc->first_link = link;
587 else
588 racc->last_link->next = link;
590 racc->last_link = link;
591 link->next = NULL;
594 /* Move all link structures in their linked list in OLD_RACC to the linked list
595 in NEW_RACC. */
596 static void
597 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
599 if (!old_racc->first_link)
601 gcc_assert (!old_racc->last_link);
602 return;
605 if (new_racc->first_link)
607 gcc_assert (!new_racc->last_link->next);
608 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
610 new_racc->last_link->next = old_racc->first_link;
611 new_racc->last_link = old_racc->last_link;
613 else
615 gcc_assert (!new_racc->last_link);
617 new_racc->first_link = old_racc->first_link;
618 new_racc->last_link = old_racc->last_link;
620 old_racc->first_link = old_racc->last_link = NULL;
623 /* Add ACCESS to the work queue (which is actually a stack). */
625 static void
626 add_access_to_work_queue (struct access *access)
628 if (!access->grp_queued)
630 gcc_assert (!access->next_queued);
631 access->next_queued = work_queue_head;
632 access->grp_queued = 1;
633 work_queue_head = access;
637 /* Pop an access from the work queue, and return it, assuming there is one. */
639 static struct access *
640 pop_access_from_work_queue (void)
642 struct access *access = work_queue_head;
644 work_queue_head = access->next_queued;
645 access->next_queued = NULL;
646 access->grp_queued = 0;
647 return access;
651 /* Allocate necessary structures. */
653 static void
654 sra_initialize (void)
656 candidate_bitmap = BITMAP_ALLOC (NULL);
657 candidates = new hash_table<uid_decl_hasher>
658 (vec_safe_length (cfun->local_decls) / 2);
659 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
660 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
661 gcc_obstack_init (&name_obstack);
662 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
663 memset (&sra_stats, 0, sizeof (sra_stats));
664 encountered_apply_args = false;
665 encountered_recursive_call = false;
666 encountered_unchangable_recursive_call = false;
669 /* Deallocate all general structures. */
671 static void
672 sra_deinitialize (void)
674 BITMAP_FREE (candidate_bitmap);
675 delete candidates;
676 candidates = NULL;
677 BITMAP_FREE (should_scalarize_away_bitmap);
678 BITMAP_FREE (cannot_scalarize_away_bitmap);
679 access_pool.release ();
680 assign_link_pool.release ();
681 obstack_free (&name_obstack, NULL);
683 delete base_access_vec;
686 /* Remove DECL from candidates for SRA and write REASON to the dump file if
687 there is one. */
688 static void
689 disqualify_candidate (tree decl, const char *reason)
691 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
692 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
694 if (dump_file && (dump_flags & TDF_DETAILS))
696 fprintf (dump_file, "! Disqualifying ");
697 print_generic_expr (dump_file, decl, 0);
698 fprintf (dump_file, " - %s\n", reason);
702 /* Return true iff the type contains a field or an element which does not allow
703 scalarization. */
705 static bool
706 type_internals_preclude_sra_p (tree type, const char **msg)
708 tree fld;
709 tree et;
711 switch (TREE_CODE (type))
713 case RECORD_TYPE:
714 case UNION_TYPE:
715 case QUAL_UNION_TYPE:
716 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
717 if (TREE_CODE (fld) == FIELD_DECL)
719 tree ft = TREE_TYPE (fld);
721 if (TREE_THIS_VOLATILE (fld))
723 *msg = "volatile structure field";
724 return true;
726 if (!DECL_FIELD_OFFSET (fld))
728 *msg = "no structure field offset";
729 return true;
731 if (!DECL_SIZE (fld))
733 *msg = "zero structure field size";
734 return true;
736 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
738 *msg = "structure field offset not fixed";
739 return true;
741 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
743 *msg = "structure field size not fixed";
744 return true;
746 if (!tree_fits_shwi_p (bit_position (fld)))
748 *msg = "structure field size too big";
749 return true;
751 if (AGGREGATE_TYPE_P (ft)
752 && int_bit_position (fld) % BITS_PER_UNIT != 0)
754 *msg = "structure field is bit field";
755 return true;
758 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
759 return true;
762 return false;
764 case ARRAY_TYPE:
765 et = TREE_TYPE (type);
767 if (TYPE_VOLATILE (et))
769 *msg = "element type is volatile";
770 return true;
773 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
774 return true;
776 return false;
778 default:
779 return false;
783 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
784 base variable if it is. Return T if it is not an SSA_NAME. */
786 static tree
787 get_ssa_base_param (tree t)
789 if (TREE_CODE (t) == SSA_NAME)
791 if (SSA_NAME_IS_DEFAULT_DEF (t))
792 return SSA_NAME_VAR (t);
793 else
794 return NULL_TREE;
796 return t;
799 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
800 belongs to, unless the BB has already been marked as a potentially
801 final. */
803 static void
804 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
806 basic_block bb = gimple_bb (stmt);
807 int idx, parm_index = 0;
808 tree parm;
810 if (bitmap_bit_p (final_bbs, bb->index))
811 return;
813 for (parm = DECL_ARGUMENTS (current_function_decl);
814 parm && parm != base;
815 parm = DECL_CHAIN (parm))
816 parm_index++;
818 gcc_assert (parm_index < func_param_count);
820 idx = bb->index * func_param_count + parm_index;
821 if (bb_dereferences[idx] < dist)
822 bb_dereferences[idx] = dist;
825 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
826 the three fields. Also add it to the vector of accesses corresponding to
827 the base. Finally, return the new access. */
829 static struct access *
830 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
832 struct access *access = access_pool.allocate ();
834 memset (access, 0, sizeof (struct access));
835 access->base = base;
836 access->offset = offset;
837 access->size = size;
839 base_access_vec->get_or_insert (base).safe_push (access);
841 return access;
844 /* Create and insert access for EXPR. Return created access, or NULL if it is
845 not possible. */
847 static struct access *
848 create_access (tree expr, gimple *stmt, bool write)
850 struct access *access;
851 HOST_WIDE_INT offset, size, max_size;
852 tree base = expr;
853 bool ptr, unscalarizable_region = false;
855 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
857 if (sra_mode == SRA_MODE_EARLY_IPA
858 && TREE_CODE (base) == MEM_REF)
860 base = get_ssa_base_param (TREE_OPERAND (base, 0));
861 if (!base)
862 return NULL;
863 ptr = true;
865 else
866 ptr = false;
868 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
869 return NULL;
871 if (sra_mode == SRA_MODE_EARLY_IPA)
873 if (size < 0 || size != max_size)
875 disqualify_candidate (base, "Encountered a variable sized access.");
876 return NULL;
878 if (TREE_CODE (expr) == COMPONENT_REF
879 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
881 disqualify_candidate (base, "Encountered a bit-field access.");
882 return NULL;
884 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
886 if (ptr)
887 mark_parm_dereference (base, offset + size, stmt);
889 else
891 if (size != max_size)
893 size = max_size;
894 unscalarizable_region = true;
896 if (size < 0)
898 disqualify_candidate (base, "Encountered an unconstrained access.");
899 return NULL;
903 access = create_access_1 (base, offset, size);
904 access->expr = expr;
905 access->type = TREE_TYPE (expr);
906 access->write = write;
907 access->grp_unscalarizable_region = unscalarizable_region;
908 access->stmt = stmt;
910 if (TREE_CODE (expr) == COMPONENT_REF
911 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
912 access->non_addressable = 1;
914 return access;
918 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
919 ARRAY_TYPE with fields that are either of gimple register types (excluding
920 bit-fields) or (recursively) scalarizable types. */
922 static bool
923 scalarizable_type_p (tree type)
925 gcc_assert (!is_gimple_reg_type (type));
927 switch (TREE_CODE (type))
929 case RECORD_TYPE:
930 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
931 if (TREE_CODE (fld) == FIELD_DECL)
933 tree ft = TREE_TYPE (fld);
935 if (DECL_BIT_FIELD (fld))
936 return false;
938 if (!is_gimple_reg_type (ft)
939 && !scalarizable_type_p (ft))
940 return false;
943 return true;
945 case ARRAY_TYPE:
947 if (TYPE_DOMAIN (type) == NULL_TREE
948 || !tree_fits_shwi_p (TYPE_SIZE (type))
949 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
950 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= 0)
951 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
952 return false;
953 if (tree_to_shwi (TYPE_SIZE (type)) == 0
954 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
955 /* Zero-element array, should not prevent scalarization. */
957 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
958 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
959 return false;
961 tree elem = TREE_TYPE (type);
962 if (!is_gimple_reg_type (elem)
963 && !scalarizable_type_p (elem))
964 return false;
965 return true;
967 default:
968 return false;
972 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, tree, tree);
974 /* Create total_scalarization accesses for all scalar fields of a member
975 of type DECL_TYPE conforming to scalarizable_type_p. BASE
976 must be the top-most VAR_DECL representing the variable; within that,
977 OFFSET locates the member and REF must be the memory reference expression for
978 the member. */
980 static void
981 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
983 switch (TREE_CODE (decl_type))
985 case RECORD_TYPE:
986 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
987 if (TREE_CODE (fld) == FIELD_DECL)
989 HOST_WIDE_INT pos = offset + int_bit_position (fld);
990 tree ft = TREE_TYPE (fld);
991 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
993 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)), nref,
994 ft);
996 break;
997 case ARRAY_TYPE:
999 tree elemtype = TREE_TYPE (decl_type);
1000 tree elem_size = TYPE_SIZE (elemtype);
1001 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1002 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1003 gcc_assert (el_size > 0);
1005 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1006 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1007 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1008 if (maxidx)
1010 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1011 /* MINIDX and MAXIDX are inclusive. Try to avoid overflow. */
1012 unsigned HOST_WIDE_INT lenp1 = tree_to_shwi (maxidx)
1013 - tree_to_shwi (minidx);
1014 unsigned HOST_WIDE_INT idx = 0;
1017 tree nref = build4 (ARRAY_REF, elemtype, ref, size_int (idx),
1018 NULL_TREE, NULL_TREE);
1019 int el_off = offset + idx * el_size;
1020 scalarize_elem (base, el_off, el_size, nref, elemtype);
1022 while (++idx <= lenp1);
1025 break;
1026 default:
1027 gcc_unreachable ();
1031 /* Create total_scalarization accesses for a member of type TYPE, which must
1032 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1033 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1034 the member and REF must be the reference expression for it. */
1036 static void
1037 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size,
1038 tree ref, tree type)
1040 if (is_gimple_reg_type (type))
1042 struct access *access = create_access_1 (base, pos, size);
1043 access->expr = ref;
1044 access->type = type;
1045 access->grp_total_scalarization = 1;
1046 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1048 else
1049 completely_scalarize (base, type, pos, ref);
1052 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1053 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1055 static void
1056 create_total_scalarization_access (tree var)
1058 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1059 struct access *access;
1061 access = create_access_1 (var, 0, size);
1062 access->expr = var;
1063 access->type = TREE_TYPE (var);
1064 access->grp_total_scalarization = 1;
1067 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1069 static inline bool
1070 contains_view_convert_expr_p (const_tree ref)
1072 while (handled_component_p (ref))
1074 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1075 return true;
1076 ref = TREE_OPERAND (ref, 0);
1079 return false;
1082 /* Search the given tree for a declaration by skipping handled components and
1083 exclude it from the candidates. */
1085 static void
1086 disqualify_base_of_expr (tree t, const char *reason)
1088 t = get_base_address (t);
1089 if (sra_mode == SRA_MODE_EARLY_IPA
1090 && TREE_CODE (t) == MEM_REF)
1091 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1093 if (t && DECL_P (t))
1094 disqualify_candidate (t, reason);
1097 /* Scan expression EXPR and create access structures for all accesses to
1098 candidates for scalarization. Return the created access or NULL if none is
1099 created. */
1101 static struct access *
1102 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1104 struct access *ret = NULL;
1105 bool partial_ref;
1107 if (TREE_CODE (expr) == BIT_FIELD_REF
1108 || TREE_CODE (expr) == IMAGPART_EXPR
1109 || TREE_CODE (expr) == REALPART_EXPR)
1111 expr = TREE_OPERAND (expr, 0);
1112 partial_ref = true;
1114 else
1115 partial_ref = false;
1117 /* We need to dive through V_C_Es in order to get the size of its parameter
1118 and not the result type. Ada produces such statements. We are also
1119 capable of handling the topmost V_C_E but not any of those buried in other
1120 handled components. */
1121 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1122 expr = TREE_OPERAND (expr, 0);
1124 if (contains_view_convert_expr_p (expr))
1126 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1127 "component.");
1128 return NULL;
1130 if (TREE_THIS_VOLATILE (expr))
1132 disqualify_base_of_expr (expr, "part of a volatile reference.");
1133 return NULL;
1136 switch (TREE_CODE (expr))
1138 case MEM_REF:
1139 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1140 && sra_mode != SRA_MODE_EARLY_IPA)
1141 return NULL;
1142 /* fall through */
1143 case VAR_DECL:
1144 case PARM_DECL:
1145 case RESULT_DECL:
1146 case COMPONENT_REF:
1147 case ARRAY_REF:
1148 case ARRAY_RANGE_REF:
1149 ret = create_access (expr, stmt, write);
1150 break;
1152 default:
1153 break;
1156 if (write && partial_ref && ret)
1157 ret->grp_partial_lhs = 1;
1159 return ret;
1162 /* Scan expression EXPR and create access structures for all accesses to
1163 candidates for scalarization. Return true if any access has been inserted.
1164 STMT must be the statement from which the expression is taken, WRITE must be
1165 true if the expression is a store and false otherwise. */
1167 static bool
1168 build_access_from_expr (tree expr, gimple *stmt, bool write)
1170 struct access *access;
1172 access = build_access_from_expr_1 (expr, stmt, write);
1173 if (access)
1175 /* This means the aggregate is accesses as a whole in a way other than an
1176 assign statement and thus cannot be removed even if we had a scalar
1177 replacement for everything. */
1178 if (cannot_scalarize_away_bitmap)
1179 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1180 return true;
1182 return false;
1185 /* Return the single non-EH successor edge of BB or NULL if there is none or
1186 more than one. */
1188 static edge
1189 single_non_eh_succ (basic_block bb)
1191 edge e, res = NULL;
1192 edge_iterator ei;
1194 FOR_EACH_EDGE (e, ei, bb->succs)
1195 if (!(e->flags & EDGE_EH))
1197 if (res)
1198 return NULL;
1199 res = e;
1202 return res;
1205 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1206 there is no alternative spot where to put statements SRA might need to
1207 generate after it. The spot we are looking for is an edge leading to a
1208 single non-EH successor, if it exists and is indeed single. RHS may be
1209 NULL, in that case ignore it. */
1211 static bool
1212 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1214 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1215 && stmt_ends_bb_p (stmt))
1217 if (single_non_eh_succ (gimple_bb (stmt)))
1218 return false;
1220 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1221 if (rhs)
1222 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1223 return true;
1225 return false;
1228 /* Scan expressions occurring in STMT, create access structures for all accesses
1229 to candidates for scalarization and remove those candidates which occur in
1230 statements or expressions that prevent them from being split apart. Return
1231 true if any access has been inserted. */
1233 static bool
1234 build_accesses_from_assign (gimple *stmt)
1236 tree lhs, rhs;
1237 struct access *lacc, *racc;
1239 if (!gimple_assign_single_p (stmt)
1240 /* Scope clobbers don't influence scalarization. */
1241 || gimple_clobber_p (stmt))
1242 return false;
1244 lhs = gimple_assign_lhs (stmt);
1245 rhs = gimple_assign_rhs1 (stmt);
1247 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1248 return false;
1250 racc = build_access_from_expr_1 (rhs, stmt, false);
1251 lacc = build_access_from_expr_1 (lhs, stmt, true);
1253 if (lacc)
1254 lacc->grp_assignment_write = 1;
1256 if (racc)
1258 racc->grp_assignment_read = 1;
1259 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1260 && !is_gimple_reg_type (racc->type))
1261 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1264 if (lacc && racc
1265 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1266 && !lacc->grp_unscalarizable_region
1267 && !racc->grp_unscalarizable_region
1268 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1269 && lacc->size == racc->size
1270 && useless_type_conversion_p (lacc->type, racc->type))
1272 struct assign_link *link;
1274 link = assign_link_pool.allocate ();
1275 memset (link, 0, sizeof (struct assign_link));
1277 link->lacc = lacc;
1278 link->racc = racc;
1280 add_link_to_rhs (racc, link);
1283 return lacc || racc;
1286 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1287 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1289 static bool
1290 asm_visit_addr (gimple *, tree op, tree, void *)
1292 op = get_base_address (op);
1293 if (op
1294 && DECL_P (op))
1295 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1297 return false;
1300 /* Return true iff callsite CALL has at least as many actual arguments as there
1301 are formal parameters of the function currently processed by IPA-SRA and
1302 that their types match. */
1304 static inline bool
1305 callsite_arguments_match_p (gimple *call)
1307 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1308 return false;
1310 tree parm;
1311 int i;
1312 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1313 parm;
1314 parm = DECL_CHAIN (parm), i++)
1316 tree arg = gimple_call_arg (call, i);
1317 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1318 return false;
1320 return true;
1323 /* Scan function and look for interesting expressions and create access
1324 structures for them. Return true iff any access is created. */
1326 static bool
1327 scan_function (void)
1329 basic_block bb;
1330 bool ret = false;
1332 FOR_EACH_BB_FN (bb, cfun)
1334 gimple_stmt_iterator gsi;
1335 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1337 gimple *stmt = gsi_stmt (gsi);
1338 tree t;
1339 unsigned i;
1341 if (final_bbs && stmt_can_throw_external (stmt))
1342 bitmap_set_bit (final_bbs, bb->index);
1343 switch (gimple_code (stmt))
1345 case GIMPLE_RETURN:
1346 t = gimple_return_retval (as_a <greturn *> (stmt));
1347 if (t != NULL_TREE)
1348 ret |= build_access_from_expr (t, stmt, false);
1349 if (final_bbs)
1350 bitmap_set_bit (final_bbs, bb->index);
1351 break;
1353 case GIMPLE_ASSIGN:
1354 ret |= build_accesses_from_assign (stmt);
1355 break;
1357 case GIMPLE_CALL:
1358 for (i = 0; i < gimple_call_num_args (stmt); i++)
1359 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1360 stmt, false);
1362 if (sra_mode == SRA_MODE_EARLY_IPA)
1364 tree dest = gimple_call_fndecl (stmt);
1365 int flags = gimple_call_flags (stmt);
1367 if (dest)
1369 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1370 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1371 encountered_apply_args = true;
1372 if (recursive_call_p (current_function_decl, dest))
1374 encountered_recursive_call = true;
1375 if (!callsite_arguments_match_p (stmt))
1376 encountered_unchangable_recursive_call = true;
1380 if (final_bbs
1381 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1382 bitmap_set_bit (final_bbs, bb->index);
1385 t = gimple_call_lhs (stmt);
1386 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1387 ret |= build_access_from_expr (t, stmt, true);
1388 break;
1390 case GIMPLE_ASM:
1392 gasm *asm_stmt = as_a <gasm *> (stmt);
1393 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1394 asm_visit_addr);
1395 if (final_bbs)
1396 bitmap_set_bit (final_bbs, bb->index);
1398 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1400 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1401 ret |= build_access_from_expr (t, asm_stmt, false);
1403 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1405 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1406 ret |= build_access_from_expr (t, asm_stmt, true);
1409 break;
1411 default:
1412 break;
1417 return ret;
1420 /* Helper of QSORT function. There are pointers to accesses in the array. An
1421 access is considered smaller than another if it has smaller offset or if the
1422 offsets are the same but is size is bigger. */
1424 static int
1425 compare_access_positions (const void *a, const void *b)
1427 const access_p *fp1 = (const access_p *) a;
1428 const access_p *fp2 = (const access_p *) b;
1429 const access_p f1 = *fp1;
1430 const access_p f2 = *fp2;
1432 if (f1->offset != f2->offset)
1433 return f1->offset < f2->offset ? -1 : 1;
1435 if (f1->size == f2->size)
1437 if (f1->type == f2->type)
1438 return 0;
1439 /* Put any non-aggregate type before any aggregate type. */
1440 else if (!is_gimple_reg_type (f1->type)
1441 && is_gimple_reg_type (f2->type))
1442 return 1;
1443 else if (is_gimple_reg_type (f1->type)
1444 && !is_gimple_reg_type (f2->type))
1445 return -1;
1446 /* Put any complex or vector type before any other scalar type. */
1447 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1448 && TREE_CODE (f1->type) != VECTOR_TYPE
1449 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1450 || TREE_CODE (f2->type) == VECTOR_TYPE))
1451 return 1;
1452 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1453 || TREE_CODE (f1->type) == VECTOR_TYPE)
1454 && TREE_CODE (f2->type) != COMPLEX_TYPE
1455 && TREE_CODE (f2->type) != VECTOR_TYPE)
1456 return -1;
1457 /* Put the integral type with the bigger precision first. */
1458 else if (INTEGRAL_TYPE_P (f1->type)
1459 && INTEGRAL_TYPE_P (f2->type))
1460 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1461 /* Put any integral type with non-full precision last. */
1462 else if (INTEGRAL_TYPE_P (f1->type)
1463 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1464 != TYPE_PRECISION (f1->type)))
1465 return 1;
1466 else if (INTEGRAL_TYPE_P (f2->type)
1467 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1468 != TYPE_PRECISION (f2->type)))
1469 return -1;
1470 /* Stabilize the sort. */
1471 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1474 /* We want the bigger accesses first, thus the opposite operator in the next
1475 line: */
1476 return f1->size > f2->size ? -1 : 1;
1480 /* Append a name of the declaration to the name obstack. A helper function for
1481 make_fancy_name. */
1483 static void
1484 make_fancy_decl_name (tree decl)
1486 char buffer[32];
1488 tree name = DECL_NAME (decl);
1489 if (name)
1490 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1491 IDENTIFIER_LENGTH (name));
1492 else
1494 sprintf (buffer, "D%u", DECL_UID (decl));
1495 obstack_grow (&name_obstack, buffer, strlen (buffer));
1499 /* Helper for make_fancy_name. */
1501 static void
1502 make_fancy_name_1 (tree expr)
1504 char buffer[32];
1505 tree index;
1507 if (DECL_P (expr))
1509 make_fancy_decl_name (expr);
1510 return;
1513 switch (TREE_CODE (expr))
1515 case COMPONENT_REF:
1516 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1517 obstack_1grow (&name_obstack, '$');
1518 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1519 break;
1521 case ARRAY_REF:
1522 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1523 obstack_1grow (&name_obstack, '$');
1524 /* Arrays with only one element may not have a constant as their
1525 index. */
1526 index = TREE_OPERAND (expr, 1);
1527 if (TREE_CODE (index) != INTEGER_CST)
1528 break;
1529 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1530 obstack_grow (&name_obstack, buffer, strlen (buffer));
1531 break;
1533 case ADDR_EXPR:
1534 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1535 break;
1537 case MEM_REF:
1538 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1539 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1541 obstack_1grow (&name_obstack, '$');
1542 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1543 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1544 obstack_grow (&name_obstack, buffer, strlen (buffer));
1546 break;
1548 case BIT_FIELD_REF:
1549 case REALPART_EXPR:
1550 case IMAGPART_EXPR:
1551 gcc_unreachable (); /* we treat these as scalars. */
1552 break;
1553 default:
1554 break;
1558 /* Create a human readable name for replacement variable of ACCESS. */
1560 static char *
1561 make_fancy_name (tree expr)
1563 make_fancy_name_1 (expr);
1564 obstack_1grow (&name_obstack, '\0');
1565 return XOBFINISH (&name_obstack, char *);
1568 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1569 EXP_TYPE at the given OFFSET. If BASE is something for which
1570 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1571 to insert new statements either before or below the current one as specified
1572 by INSERT_AFTER. This function is not capable of handling bitfields.
1574 BASE must be either a declaration or a memory reference that has correct
1575 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1577 tree
1578 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1579 tree exp_type, gimple_stmt_iterator *gsi,
1580 bool insert_after)
1582 tree prev_base = base;
1583 tree off;
1584 tree mem_ref;
1585 HOST_WIDE_INT base_offset;
1586 unsigned HOST_WIDE_INT misalign;
1587 unsigned int align;
1589 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1590 get_object_alignment_1 (base, &align, &misalign);
1591 base = get_addr_base_and_unit_offset (base, &base_offset);
1593 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1594 offset such as array[var_index]. */
1595 if (!base)
1597 gassign *stmt;
1598 tree tmp, addr;
1600 gcc_checking_assert (gsi);
1601 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1602 addr = build_fold_addr_expr (unshare_expr (prev_base));
1603 STRIP_USELESS_TYPE_CONVERSION (addr);
1604 stmt = gimple_build_assign (tmp, addr);
1605 gimple_set_location (stmt, loc);
1606 if (insert_after)
1607 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1608 else
1609 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1611 off = build_int_cst (reference_alias_ptr_type (prev_base),
1612 offset / BITS_PER_UNIT);
1613 base = tmp;
1615 else if (TREE_CODE (base) == MEM_REF)
1617 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1618 base_offset + offset / BITS_PER_UNIT);
1619 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1620 base = unshare_expr (TREE_OPERAND (base, 0));
1622 else
1624 off = build_int_cst (reference_alias_ptr_type (base),
1625 base_offset + offset / BITS_PER_UNIT);
1626 base = build_fold_addr_expr (unshare_expr (base));
1629 misalign = (misalign + offset) & (align - 1);
1630 if (misalign != 0)
1631 align = (misalign & -misalign);
1632 if (align != TYPE_ALIGN (exp_type))
1633 exp_type = build_aligned_type (exp_type, align);
1635 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1636 if (TREE_THIS_VOLATILE (prev_base))
1637 TREE_THIS_VOLATILE (mem_ref) = 1;
1638 if (TREE_SIDE_EFFECTS (prev_base))
1639 TREE_SIDE_EFFECTS (mem_ref) = 1;
1640 return mem_ref;
1643 /* Construct a memory reference to a part of an aggregate BASE at the given
1644 OFFSET and of the same type as MODEL. In case this is a reference to a
1645 bit-field, the function will replicate the last component_ref of model's
1646 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1647 build_ref_for_offset. */
1649 static tree
1650 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1651 struct access *model, gimple_stmt_iterator *gsi,
1652 bool insert_after)
1654 if (TREE_CODE (model->expr) == COMPONENT_REF
1655 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1657 /* This access represents a bit-field. */
1658 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1660 offset -= int_bit_position (fld);
1661 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1662 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1663 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1664 NULL_TREE);
1666 else
1667 return build_ref_for_offset (loc, base, offset, model->type,
1668 gsi, insert_after);
1671 /* Attempt to build a memory reference that we could but into a gimple
1672 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1673 create statements and return s NULL instead. This function also ignores
1674 alignment issues and so its results should never end up in non-debug
1675 statements. */
1677 static tree
1678 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1679 struct access *model)
1681 HOST_WIDE_INT base_offset;
1682 tree off;
1684 if (TREE_CODE (model->expr) == COMPONENT_REF
1685 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1686 return NULL_TREE;
1688 base = get_addr_base_and_unit_offset (base, &base_offset);
1689 if (!base)
1690 return NULL_TREE;
1691 if (TREE_CODE (base) == MEM_REF)
1693 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1694 base_offset + offset / BITS_PER_UNIT);
1695 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1696 base = unshare_expr (TREE_OPERAND (base, 0));
1698 else
1700 off = build_int_cst (reference_alias_ptr_type (base),
1701 base_offset + offset / BITS_PER_UNIT);
1702 base = build_fold_addr_expr (unshare_expr (base));
1705 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1708 /* Construct a memory reference consisting of component_refs and array_refs to
1709 a part of an aggregate *RES (which is of type TYPE). The requested part
1710 should have type EXP_TYPE at be the given OFFSET. This function might not
1711 succeed, it returns true when it does and only then *RES points to something
1712 meaningful. This function should be used only to build expressions that we
1713 might need to present to user (e.g. in warnings). In all other situations,
1714 build_ref_for_model or build_ref_for_offset should be used instead. */
1716 static bool
1717 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1718 tree exp_type)
1720 while (1)
1722 tree fld;
1723 tree tr_size, index, minidx;
1724 HOST_WIDE_INT el_size;
1726 if (offset == 0 && exp_type
1727 && types_compatible_p (exp_type, type))
1728 return true;
1730 switch (TREE_CODE (type))
1732 case UNION_TYPE:
1733 case QUAL_UNION_TYPE:
1734 case RECORD_TYPE:
1735 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1737 HOST_WIDE_INT pos, size;
1738 tree tr_pos, expr, *expr_ptr;
1740 if (TREE_CODE (fld) != FIELD_DECL)
1741 continue;
1743 tr_pos = bit_position (fld);
1744 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1745 continue;
1746 pos = tree_to_uhwi (tr_pos);
1747 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1748 tr_size = DECL_SIZE (fld);
1749 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1750 continue;
1751 size = tree_to_uhwi (tr_size);
1752 if (size == 0)
1754 if (pos != offset)
1755 continue;
1757 else if (pos > offset || (pos + size) <= offset)
1758 continue;
1760 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1761 NULL_TREE);
1762 expr_ptr = &expr;
1763 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1764 offset - pos, exp_type))
1766 *res = expr;
1767 return true;
1770 return false;
1772 case ARRAY_TYPE:
1773 tr_size = TYPE_SIZE (TREE_TYPE (type));
1774 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1775 return false;
1776 el_size = tree_to_uhwi (tr_size);
1778 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1779 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1780 return false;
1781 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1782 if (!integer_zerop (minidx))
1783 index = int_const_binop (PLUS_EXPR, index, minidx);
1784 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1785 NULL_TREE, NULL_TREE);
1786 offset = offset % el_size;
1787 type = TREE_TYPE (type);
1788 break;
1790 default:
1791 if (offset != 0)
1792 return false;
1794 if (exp_type)
1795 return false;
1796 else
1797 return true;
1802 /* Return true iff TYPE is stdarg va_list type. */
1804 static inline bool
1805 is_va_list_type (tree type)
1807 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1810 /* Print message to dump file why a variable was rejected. */
1812 static void
1813 reject (tree var, const char *msg)
1815 if (dump_file && (dump_flags & TDF_DETAILS))
1817 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1818 print_generic_expr (dump_file, var, 0);
1819 fprintf (dump_file, "\n");
1823 /* Return true if VAR is a candidate for SRA. */
1825 static bool
1826 maybe_add_sra_candidate (tree var)
1828 tree type = TREE_TYPE (var);
1829 const char *msg;
1830 tree_node **slot;
1832 if (!AGGREGATE_TYPE_P (type))
1834 reject (var, "not aggregate");
1835 return false;
1837 if (needs_to_live_in_memory (var))
1839 reject (var, "needs to live in memory");
1840 return false;
1842 if (TREE_THIS_VOLATILE (var))
1844 reject (var, "is volatile");
1845 return false;
1847 if (!COMPLETE_TYPE_P (type))
1849 reject (var, "has incomplete type");
1850 return false;
1852 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1854 reject (var, "type size not fixed");
1855 return false;
1857 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1859 reject (var, "type size is zero");
1860 return false;
1862 if (type_internals_preclude_sra_p (type, &msg))
1864 reject (var, msg);
1865 return false;
1867 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1868 we also want to schedule it rather late. Thus we ignore it in
1869 the early pass. */
1870 (sra_mode == SRA_MODE_EARLY_INTRA
1871 && is_va_list_type (type)))
1873 reject (var, "is va_list");
1874 return false;
1877 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1878 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1879 *slot = var;
1881 if (dump_file && (dump_flags & TDF_DETAILS))
1883 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1884 print_generic_expr (dump_file, var, 0);
1885 fprintf (dump_file, "\n");
1888 return true;
1891 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1892 those with type which is suitable for scalarization. */
1894 static bool
1895 find_var_candidates (void)
1897 tree var, parm;
1898 unsigned int i;
1899 bool ret = false;
1901 for (parm = DECL_ARGUMENTS (current_function_decl);
1902 parm;
1903 parm = DECL_CHAIN (parm))
1904 ret |= maybe_add_sra_candidate (parm);
1906 FOR_EACH_LOCAL_DECL (cfun, i, var)
1908 if (TREE_CODE (var) != VAR_DECL)
1909 continue;
1911 ret |= maybe_add_sra_candidate (var);
1914 return ret;
1917 /* Sort all accesses for the given variable, check for partial overlaps and
1918 return NULL if there are any. If there are none, pick a representative for
1919 each combination of offset and size and create a linked list out of them.
1920 Return the pointer to the first representative and make sure it is the first
1921 one in the vector of accesses. */
1923 static struct access *
1924 sort_and_splice_var_accesses (tree var)
1926 int i, j, access_count;
1927 struct access *res, **prev_acc_ptr = &res;
1928 vec<access_p> *access_vec;
1929 bool first = true;
1930 HOST_WIDE_INT low = -1, high = 0;
1932 access_vec = get_base_access_vector (var);
1933 if (!access_vec)
1934 return NULL;
1935 access_count = access_vec->length ();
1937 /* Sort by <OFFSET, SIZE>. */
1938 access_vec->qsort (compare_access_positions);
1940 i = 0;
1941 while (i < access_count)
1943 struct access *access = (*access_vec)[i];
1944 bool grp_write = access->write;
1945 bool grp_read = !access->write;
1946 bool grp_scalar_write = access->write
1947 && is_gimple_reg_type (access->type);
1948 bool grp_scalar_read = !access->write
1949 && is_gimple_reg_type (access->type);
1950 bool grp_assignment_read = access->grp_assignment_read;
1951 bool grp_assignment_write = access->grp_assignment_write;
1952 bool multiple_scalar_reads = false;
1953 bool total_scalarization = access->grp_total_scalarization;
1954 bool grp_partial_lhs = access->grp_partial_lhs;
1955 bool first_scalar = is_gimple_reg_type (access->type);
1956 bool unscalarizable_region = access->grp_unscalarizable_region;
1958 if (first || access->offset >= high)
1960 first = false;
1961 low = access->offset;
1962 high = access->offset + access->size;
1964 else if (access->offset > low && access->offset + access->size > high)
1965 return NULL;
1966 else
1967 gcc_assert (access->offset >= low
1968 && access->offset + access->size <= high);
1970 j = i + 1;
1971 while (j < access_count)
1973 struct access *ac2 = (*access_vec)[j];
1974 if (ac2->offset != access->offset || ac2->size != access->size)
1975 break;
1976 if (ac2->write)
1978 grp_write = true;
1979 grp_scalar_write = (grp_scalar_write
1980 || is_gimple_reg_type (ac2->type));
1982 else
1984 grp_read = true;
1985 if (is_gimple_reg_type (ac2->type))
1987 if (grp_scalar_read)
1988 multiple_scalar_reads = true;
1989 else
1990 grp_scalar_read = true;
1993 grp_assignment_read |= ac2->grp_assignment_read;
1994 grp_assignment_write |= ac2->grp_assignment_write;
1995 grp_partial_lhs |= ac2->grp_partial_lhs;
1996 unscalarizable_region |= ac2->grp_unscalarizable_region;
1997 total_scalarization |= ac2->grp_total_scalarization;
1998 relink_to_new_repr (access, ac2);
2000 /* If there are both aggregate-type and scalar-type accesses with
2001 this combination of size and offset, the comparison function
2002 should have put the scalars first. */
2003 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2004 ac2->group_representative = access;
2005 j++;
2008 i = j;
2010 access->group_representative = access;
2011 access->grp_write = grp_write;
2012 access->grp_read = grp_read;
2013 access->grp_scalar_read = grp_scalar_read;
2014 access->grp_scalar_write = grp_scalar_write;
2015 access->grp_assignment_read = grp_assignment_read;
2016 access->grp_assignment_write = grp_assignment_write;
2017 access->grp_hint = multiple_scalar_reads || total_scalarization;
2018 access->grp_total_scalarization = total_scalarization;
2019 access->grp_partial_lhs = grp_partial_lhs;
2020 access->grp_unscalarizable_region = unscalarizable_region;
2021 if (access->first_link)
2022 add_access_to_work_queue (access);
2024 *prev_acc_ptr = access;
2025 prev_acc_ptr = &access->next_grp;
2028 gcc_assert (res == (*access_vec)[0]);
2029 return res;
2032 /* Create a variable for the given ACCESS which determines the type, name and a
2033 few other properties. Return the variable declaration and store it also to
2034 ACCESS->replacement. */
2036 static tree
2037 create_access_replacement (struct access *access)
2039 tree repl;
2041 if (access->grp_to_be_debug_replaced)
2043 repl = create_tmp_var_raw (access->type);
2044 DECL_CONTEXT (repl) = current_function_decl;
2046 else
2047 /* Drop any special alignment on the type if it's not on the main
2048 variant. This avoids issues with weirdo ABIs like AAPCS. */
2049 repl = create_tmp_var (build_qualified_type
2050 (TYPE_MAIN_VARIANT (access->type),
2051 TYPE_QUALS (access->type)), "SR");
2052 if (TREE_CODE (access->type) == COMPLEX_TYPE
2053 || TREE_CODE (access->type) == VECTOR_TYPE)
2055 if (!access->grp_partial_lhs)
2056 DECL_GIMPLE_REG_P (repl) = 1;
2058 else if (access->grp_partial_lhs
2059 && is_gimple_reg_type (access->type))
2060 TREE_ADDRESSABLE (repl) = 1;
2062 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2063 DECL_ARTIFICIAL (repl) = 1;
2064 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2066 if (DECL_NAME (access->base)
2067 && !DECL_IGNORED_P (access->base)
2068 && !DECL_ARTIFICIAL (access->base))
2070 char *pretty_name = make_fancy_name (access->expr);
2071 tree debug_expr = unshare_expr_without_location (access->expr), d;
2072 bool fail = false;
2074 DECL_NAME (repl) = get_identifier (pretty_name);
2075 obstack_free (&name_obstack, pretty_name);
2077 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2078 as DECL_DEBUG_EXPR isn't considered when looking for still
2079 used SSA_NAMEs and thus they could be freed. All debug info
2080 generation cares is whether something is constant or variable
2081 and that get_ref_base_and_extent works properly on the
2082 expression. It cannot handle accesses at a non-constant offset
2083 though, so just give up in those cases. */
2084 for (d = debug_expr;
2085 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2086 d = TREE_OPERAND (d, 0))
2087 switch (TREE_CODE (d))
2089 case ARRAY_REF:
2090 case ARRAY_RANGE_REF:
2091 if (TREE_OPERAND (d, 1)
2092 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2093 fail = true;
2094 if (TREE_OPERAND (d, 3)
2095 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2096 fail = true;
2097 /* FALLTHRU */
2098 case COMPONENT_REF:
2099 if (TREE_OPERAND (d, 2)
2100 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2101 fail = true;
2102 break;
2103 case MEM_REF:
2104 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2105 fail = true;
2106 else
2107 d = TREE_OPERAND (d, 0);
2108 break;
2109 default:
2110 break;
2112 if (!fail)
2114 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2115 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2117 if (access->grp_no_warning)
2118 TREE_NO_WARNING (repl) = 1;
2119 else
2120 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2122 else
2123 TREE_NO_WARNING (repl) = 1;
2125 if (dump_file)
2127 if (access->grp_to_be_debug_replaced)
2129 fprintf (dump_file, "Created a debug-only replacement for ");
2130 print_generic_expr (dump_file, access->base, 0);
2131 fprintf (dump_file, " offset: %u, size: %u\n",
2132 (unsigned) access->offset, (unsigned) access->size);
2134 else
2136 fprintf (dump_file, "Created a replacement for ");
2137 print_generic_expr (dump_file, access->base, 0);
2138 fprintf (dump_file, " offset: %u, size: %u: ",
2139 (unsigned) access->offset, (unsigned) access->size);
2140 print_generic_expr (dump_file, repl, 0);
2141 fprintf (dump_file, "\n");
2144 sra_stats.replacements++;
2146 return repl;
2149 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2151 static inline tree
2152 get_access_replacement (struct access *access)
2154 gcc_checking_assert (access->replacement_decl);
2155 return access->replacement_decl;
2159 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2160 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2161 to it is not "within" the root. Return false iff some accesses partially
2162 overlap. */
2164 static bool
2165 build_access_subtree (struct access **access)
2167 struct access *root = *access, *last_child = NULL;
2168 HOST_WIDE_INT limit = root->offset + root->size;
2170 *access = (*access)->next_grp;
2171 while (*access && (*access)->offset + (*access)->size <= limit)
2173 if (!last_child)
2174 root->first_child = *access;
2175 else
2176 last_child->next_sibling = *access;
2177 last_child = *access;
2179 if (!build_access_subtree (access))
2180 return false;
2183 if (*access && (*access)->offset < limit)
2184 return false;
2186 return true;
2189 /* Build a tree of access representatives, ACCESS is the pointer to the first
2190 one, others are linked in a list by the next_grp field. Return false iff
2191 some accesses partially overlap. */
2193 static bool
2194 build_access_trees (struct access *access)
2196 while (access)
2198 struct access *root = access;
2200 if (!build_access_subtree (&access))
2201 return false;
2202 root->next_grp = access;
2204 return true;
2207 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2208 array. */
2210 static bool
2211 expr_with_var_bounded_array_refs_p (tree expr)
2213 while (handled_component_p (expr))
2215 if (TREE_CODE (expr) == ARRAY_REF
2216 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2217 return true;
2218 expr = TREE_OPERAND (expr, 0);
2220 return false;
2223 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2224 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2225 sorts of access flags appropriately along the way, notably always set
2226 grp_read and grp_assign_read according to MARK_READ and grp_write when
2227 MARK_WRITE is true.
2229 Creating a replacement for a scalar access is considered beneficial if its
2230 grp_hint is set (this means we are either attempting total scalarization or
2231 there is more than one direct read access) or according to the following
2232 table:
2234 Access written to through a scalar type (once or more times)
2236 | Written to in an assignment statement
2238 | | Access read as scalar _once_
2239 | | |
2240 | | | Read in an assignment statement
2241 | | | |
2242 | | | | Scalarize Comment
2243 -----------------------------------------------------------------------------
2244 0 0 0 0 No access for the scalar
2245 0 0 0 1 No access for the scalar
2246 0 0 1 0 No Single read - won't help
2247 0 0 1 1 No The same case
2248 0 1 0 0 No access for the scalar
2249 0 1 0 1 No access for the scalar
2250 0 1 1 0 Yes s = *g; return s.i;
2251 0 1 1 1 Yes The same case as above
2252 1 0 0 0 No Won't help
2253 1 0 0 1 Yes s.i = 1; *g = s;
2254 1 0 1 0 Yes s.i = 5; g = s.i;
2255 1 0 1 1 Yes The same case as above
2256 1 1 0 0 No Won't help.
2257 1 1 0 1 Yes s.i = 1; *g = s;
2258 1 1 1 0 Yes s = *g; return s.i;
2259 1 1 1 1 Yes Any of the above yeses */
2261 static bool
2262 analyze_access_subtree (struct access *root, struct access *parent,
2263 bool allow_replacements)
2265 struct access *child;
2266 HOST_WIDE_INT limit = root->offset + root->size;
2267 HOST_WIDE_INT covered_to = root->offset;
2268 bool scalar = is_gimple_reg_type (root->type);
2269 bool hole = false, sth_created = false;
2271 if (parent)
2273 if (parent->grp_read)
2274 root->grp_read = 1;
2275 if (parent->grp_assignment_read)
2276 root->grp_assignment_read = 1;
2277 if (parent->grp_write)
2278 root->grp_write = 1;
2279 if (parent->grp_assignment_write)
2280 root->grp_assignment_write = 1;
2281 if (parent->grp_total_scalarization)
2282 root->grp_total_scalarization = 1;
2285 if (root->grp_unscalarizable_region)
2286 allow_replacements = false;
2288 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2289 allow_replacements = false;
2291 for (child = root->first_child; child; child = child->next_sibling)
2293 hole |= covered_to < child->offset;
2294 sth_created |= analyze_access_subtree (child, root,
2295 allow_replacements && !scalar);
2297 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2298 root->grp_total_scalarization &= child->grp_total_scalarization;
2299 if (child->grp_covered)
2300 covered_to += child->size;
2301 else
2302 hole = true;
2305 if (allow_replacements && scalar && !root->first_child
2306 && (root->grp_hint
2307 || ((root->grp_scalar_read || root->grp_assignment_read)
2308 && (root->grp_scalar_write || root->grp_assignment_write))))
2310 /* Always create access replacements that cover the whole access.
2311 For integral types this means the precision has to match.
2312 Avoid assumptions based on the integral type kind, too. */
2313 if (INTEGRAL_TYPE_P (root->type)
2314 && (TREE_CODE (root->type) != INTEGER_TYPE
2315 || TYPE_PRECISION (root->type) != root->size)
2316 /* But leave bitfield accesses alone. */
2317 && (TREE_CODE (root->expr) != COMPONENT_REF
2318 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2320 tree rt = root->type;
2321 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2322 && (root->size % BITS_PER_UNIT) == 0);
2323 root->type = build_nonstandard_integer_type (root->size,
2324 TYPE_UNSIGNED (rt));
2325 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2326 root->base, root->offset,
2327 root->type, NULL, false);
2329 if (dump_file && (dump_flags & TDF_DETAILS))
2331 fprintf (dump_file, "Changing the type of a replacement for ");
2332 print_generic_expr (dump_file, root->base, 0);
2333 fprintf (dump_file, " offset: %u, size: %u ",
2334 (unsigned) root->offset, (unsigned) root->size);
2335 fprintf (dump_file, " to an integer.\n");
2339 root->grp_to_be_replaced = 1;
2340 root->replacement_decl = create_access_replacement (root);
2341 sth_created = true;
2342 hole = false;
2344 else
2346 if (allow_replacements
2347 && scalar && !root->first_child
2348 && (root->grp_scalar_write || root->grp_assignment_write)
2349 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2350 DECL_UID (root->base)))
2352 gcc_checking_assert (!root->grp_scalar_read
2353 && !root->grp_assignment_read);
2354 sth_created = true;
2355 if (MAY_HAVE_DEBUG_STMTS)
2357 root->grp_to_be_debug_replaced = 1;
2358 root->replacement_decl = create_access_replacement (root);
2362 if (covered_to < limit)
2363 hole = true;
2364 if (scalar)
2365 root->grp_total_scalarization = 0;
2368 if (!hole || root->grp_total_scalarization)
2369 root->grp_covered = 1;
2370 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2371 root->grp_unscalarized_data = 1; /* not covered and written to */
2372 return sth_created;
2375 /* Analyze all access trees linked by next_grp by the means of
2376 analyze_access_subtree. */
2377 static bool
2378 analyze_access_trees (struct access *access)
2380 bool ret = false;
2382 while (access)
2384 if (analyze_access_subtree (access, NULL, true))
2385 ret = true;
2386 access = access->next_grp;
2389 return ret;
2392 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2393 SIZE would conflict with an already existing one. If exactly such a child
2394 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2396 static bool
2397 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2398 HOST_WIDE_INT size, struct access **exact_match)
2400 struct access *child;
2402 for (child = lacc->first_child; child; child = child->next_sibling)
2404 if (child->offset == norm_offset && child->size == size)
2406 *exact_match = child;
2407 return true;
2410 if (child->offset < norm_offset + size
2411 && child->offset + child->size > norm_offset)
2412 return true;
2415 return false;
2418 /* Create a new child access of PARENT, with all properties just like MODEL
2419 except for its offset and with its grp_write false and grp_read true.
2420 Return the new access or NULL if it cannot be created. Note that this access
2421 is created long after all splicing and sorting, it's not located in any
2422 access vector and is automatically a representative of its group. */
2424 static struct access *
2425 create_artificial_child_access (struct access *parent, struct access *model,
2426 HOST_WIDE_INT new_offset)
2428 struct access **child;
2429 tree expr = parent->base;
2431 gcc_assert (!model->grp_unscalarizable_region);
2433 struct access *access = access_pool.allocate ();
2434 memset (access, 0, sizeof (struct access));
2435 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2436 model->type))
2438 access->grp_no_warning = true;
2439 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2440 new_offset, model, NULL, false);
2443 access->base = parent->base;
2444 access->expr = expr;
2445 access->offset = new_offset;
2446 access->size = model->size;
2447 access->type = model->type;
2448 access->grp_write = true;
2449 access->grp_read = false;
2451 child = &parent->first_child;
2452 while (*child && (*child)->offset < new_offset)
2453 child = &(*child)->next_sibling;
2455 access->next_sibling = *child;
2456 *child = access;
2458 return access;
2462 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2463 true if any new subaccess was created. Additionally, if RACC is a scalar
2464 access but LACC is not, change the type of the latter, if possible. */
2466 static bool
2467 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2469 struct access *rchild;
2470 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2471 bool ret = false;
2473 if (is_gimple_reg_type (lacc->type)
2474 || lacc->grp_unscalarizable_region
2475 || racc->grp_unscalarizable_region)
2476 return false;
2478 if (is_gimple_reg_type (racc->type))
2480 if (!lacc->first_child && !racc->first_child)
2482 tree t = lacc->base;
2484 lacc->type = racc->type;
2485 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2486 lacc->offset, racc->type))
2487 lacc->expr = t;
2488 else
2490 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2491 lacc->base, lacc->offset,
2492 racc, NULL, false);
2493 lacc->grp_no_warning = true;
2496 return false;
2499 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2501 struct access *new_acc = NULL;
2502 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2504 if (rchild->grp_unscalarizable_region)
2505 continue;
2507 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2508 &new_acc))
2510 if (new_acc)
2512 rchild->grp_hint = 1;
2513 new_acc->grp_hint |= new_acc->grp_read;
2514 if (rchild->first_child)
2515 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2517 continue;
2520 rchild->grp_hint = 1;
2521 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2522 if (new_acc)
2524 ret = true;
2525 if (racc->first_child)
2526 propagate_subaccesses_across_link (new_acc, rchild);
2530 return ret;
2533 /* Propagate all subaccesses across assignment links. */
2535 static void
2536 propagate_all_subaccesses (void)
2538 while (work_queue_head)
2540 struct access *racc = pop_access_from_work_queue ();
2541 struct assign_link *link;
2543 gcc_assert (racc->first_link);
2545 for (link = racc->first_link; link; link = link->next)
2547 struct access *lacc = link->lacc;
2549 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2550 continue;
2551 lacc = lacc->group_representative;
2552 if (propagate_subaccesses_across_link (lacc, racc)
2553 && lacc->first_link)
2554 add_access_to_work_queue (lacc);
2559 /* Go through all accesses collected throughout the (intraprocedural) analysis
2560 stage, exclude overlapping ones, identify representatives and build trees
2561 out of them, making decisions about scalarization on the way. Return true
2562 iff there are any to-be-scalarized variables after this stage. */
2564 static bool
2565 analyze_all_variable_accesses (void)
2567 int res = 0;
2568 bitmap tmp = BITMAP_ALLOC (NULL);
2569 bitmap_iterator bi;
2570 unsigned i;
2571 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2573 enum compiler_param param = optimize_speed_p
2574 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2575 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2577 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2578 fall back to a target default. */
2579 unsigned HOST_WIDE_INT max_scalarization_size
2580 = global_options_set.x_param_values[param]
2581 ? PARAM_VALUE (param)
2582 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2584 max_scalarization_size *= BITS_PER_UNIT;
2586 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2587 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2588 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2590 tree var = candidate (i);
2592 if (TREE_CODE (var) == VAR_DECL
2593 && scalarizable_type_p (TREE_TYPE (var)))
2595 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2596 <= max_scalarization_size)
2598 create_total_scalarization_access (var);
2599 completely_scalarize (var, TREE_TYPE (var), 0, var);
2600 if (dump_file && (dump_flags & TDF_DETAILS))
2602 fprintf (dump_file, "Will attempt to totally scalarize ");
2603 print_generic_expr (dump_file, var, 0);
2604 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2607 else if (dump_file && (dump_flags & TDF_DETAILS))
2609 fprintf (dump_file, "Too big to totally scalarize: ");
2610 print_generic_expr (dump_file, var, 0);
2611 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2616 bitmap_copy (tmp, candidate_bitmap);
2617 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2619 tree var = candidate (i);
2620 struct access *access;
2622 access = sort_and_splice_var_accesses (var);
2623 if (!access || !build_access_trees (access))
2624 disqualify_candidate (var,
2625 "No or inhibitingly overlapping accesses.");
2628 propagate_all_subaccesses ();
2630 bitmap_copy (tmp, candidate_bitmap);
2631 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2633 tree var = candidate (i);
2634 struct access *access = get_first_repr_for_decl (var);
2636 if (analyze_access_trees (access))
2638 res++;
2639 if (dump_file && (dump_flags & TDF_DETAILS))
2641 fprintf (dump_file, "\nAccess trees for ");
2642 print_generic_expr (dump_file, var, 0);
2643 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2644 dump_access_tree (dump_file, access);
2645 fprintf (dump_file, "\n");
2648 else
2649 disqualify_candidate (var, "No scalar replacements to be created.");
2652 BITMAP_FREE (tmp);
2654 if (res)
2656 statistics_counter_event (cfun, "Scalarized aggregates", res);
2657 return true;
2659 else
2660 return false;
2663 /* Generate statements copying scalar replacements of accesses within a subtree
2664 into or out of AGG. ACCESS, all its children, siblings and their children
2665 are to be processed. AGG is an aggregate type expression (can be a
2666 declaration but does not have to be, it can for example also be a mem_ref or
2667 a series of handled components). TOP_OFFSET is the offset of the processed
2668 subtree which has to be subtracted from offsets of individual accesses to
2669 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2670 replacements in the interval <start_offset, start_offset + chunk_size>,
2671 otherwise copy all. GSI is a statement iterator used to place the new
2672 statements. WRITE should be true when the statements should write from AGG
2673 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2674 statements will be added after the current statement in GSI, they will be
2675 added before the statement otherwise. */
2677 static void
2678 generate_subtree_copies (struct access *access, tree agg,
2679 HOST_WIDE_INT top_offset,
2680 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2681 gimple_stmt_iterator *gsi, bool write,
2682 bool insert_after, location_t loc)
2686 if (chunk_size && access->offset >= start_offset + chunk_size)
2687 return;
2689 if (access->grp_to_be_replaced
2690 && (chunk_size == 0
2691 || access->offset + access->size > start_offset))
2693 tree expr, repl = get_access_replacement (access);
2694 gassign *stmt;
2696 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2697 access, gsi, insert_after);
2699 if (write)
2701 if (access->grp_partial_lhs)
2702 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2703 !insert_after,
2704 insert_after ? GSI_NEW_STMT
2705 : GSI_SAME_STMT);
2706 stmt = gimple_build_assign (repl, expr);
2708 else
2710 TREE_NO_WARNING (repl) = 1;
2711 if (access->grp_partial_lhs)
2712 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2713 !insert_after,
2714 insert_after ? GSI_NEW_STMT
2715 : GSI_SAME_STMT);
2716 stmt = gimple_build_assign (expr, repl);
2718 gimple_set_location (stmt, loc);
2720 if (insert_after)
2721 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2722 else
2723 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2724 update_stmt (stmt);
2725 sra_stats.subtree_copies++;
2727 else if (write
2728 && access->grp_to_be_debug_replaced
2729 && (chunk_size == 0
2730 || access->offset + access->size > start_offset))
2732 gdebug *ds;
2733 tree drhs = build_debug_ref_for_model (loc, agg,
2734 access->offset - top_offset,
2735 access);
2736 ds = gimple_build_debug_bind (get_access_replacement (access),
2737 drhs, gsi_stmt (*gsi));
2738 if (insert_after)
2739 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2740 else
2741 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2744 if (access->first_child)
2745 generate_subtree_copies (access->first_child, agg, top_offset,
2746 start_offset, chunk_size, gsi,
2747 write, insert_after, loc);
2749 access = access->next_sibling;
2751 while (access);
2754 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2755 the root of the subtree to be processed. GSI is the statement iterator used
2756 for inserting statements which are added after the current statement if
2757 INSERT_AFTER is true or before it otherwise. */
2759 static void
2760 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2761 bool insert_after, location_t loc)
2764 struct access *child;
2766 if (access->grp_to_be_replaced)
2768 gassign *stmt;
2770 stmt = gimple_build_assign (get_access_replacement (access),
2771 build_zero_cst (access->type));
2772 if (insert_after)
2773 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2774 else
2775 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2776 update_stmt (stmt);
2777 gimple_set_location (stmt, loc);
2779 else if (access->grp_to_be_debug_replaced)
2781 gdebug *ds
2782 = gimple_build_debug_bind (get_access_replacement (access),
2783 build_zero_cst (access->type),
2784 gsi_stmt (*gsi));
2785 if (insert_after)
2786 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2787 else
2788 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2791 for (child = access->first_child; child; child = child->next_sibling)
2792 init_subtree_with_zero (child, gsi, insert_after, loc);
2795 /* Clobber all scalar replacements in an access subtree. ACCESS is the the
2796 root of the subtree to be processed. GSI is the statement iterator used
2797 for inserting statements which are added after the current statement if
2798 INSERT_AFTER is true or before it otherwise. */
2800 static void
2801 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2802 bool insert_after, location_t loc)
2805 struct access *child;
2807 if (access->grp_to_be_replaced)
2809 tree rep = get_access_replacement (access);
2810 tree clobber = build_constructor (access->type, NULL);
2811 TREE_THIS_VOLATILE (clobber) = 1;
2812 gimple *stmt = gimple_build_assign (rep, clobber);
2814 if (insert_after)
2815 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2816 else
2817 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2818 update_stmt (stmt);
2819 gimple_set_location (stmt, loc);
2822 for (child = access->first_child; child; child = child->next_sibling)
2823 clobber_subtree (child, gsi, insert_after, loc);
2826 /* Search for an access representative for the given expression EXPR and
2827 return it or NULL if it cannot be found. */
2829 static struct access *
2830 get_access_for_expr (tree expr)
2832 HOST_WIDE_INT offset, size, max_size;
2833 tree base;
2835 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2836 a different size than the size of its argument and we need the latter
2837 one. */
2838 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2839 expr = TREE_OPERAND (expr, 0);
2841 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2842 if (max_size == -1 || !DECL_P (base))
2843 return NULL;
2845 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2846 return NULL;
2848 return get_var_base_offset_size_access (base, offset, max_size);
2851 /* Replace the expression EXPR with a scalar replacement if there is one and
2852 generate other statements to do type conversion or subtree copying if
2853 necessary. GSI is used to place newly created statements, WRITE is true if
2854 the expression is being written to (it is on a LHS of a statement or output
2855 in an assembly statement). */
2857 static bool
2858 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2860 location_t loc;
2861 struct access *access;
2862 tree type, bfr, orig_expr;
2864 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2866 bfr = *expr;
2867 expr = &TREE_OPERAND (*expr, 0);
2869 else
2870 bfr = NULL_TREE;
2872 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2873 expr = &TREE_OPERAND (*expr, 0);
2874 access = get_access_for_expr (*expr);
2875 if (!access)
2876 return false;
2877 type = TREE_TYPE (*expr);
2878 orig_expr = *expr;
2880 loc = gimple_location (gsi_stmt (*gsi));
2881 gimple_stmt_iterator alt_gsi = gsi_none ();
2882 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2884 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2885 gsi = &alt_gsi;
2888 if (access->grp_to_be_replaced)
2890 tree repl = get_access_replacement (access);
2891 /* If we replace a non-register typed access simply use the original
2892 access expression to extract the scalar component afterwards.
2893 This happens if scalarizing a function return value or parameter
2894 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2895 gcc.c-torture/compile/20011217-1.c.
2897 We also want to use this when accessing a complex or vector which can
2898 be accessed as a different type too, potentially creating a need for
2899 type conversion (see PR42196) and when scalarized unions are involved
2900 in assembler statements (see PR42398). */
2901 if (!useless_type_conversion_p (type, access->type))
2903 tree ref;
2905 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
2907 if (write)
2909 gassign *stmt;
2911 if (access->grp_partial_lhs)
2912 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2913 false, GSI_NEW_STMT);
2914 stmt = gimple_build_assign (repl, ref);
2915 gimple_set_location (stmt, loc);
2916 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2918 else
2920 gassign *stmt;
2922 if (access->grp_partial_lhs)
2923 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2924 true, GSI_SAME_STMT);
2925 stmt = gimple_build_assign (ref, repl);
2926 gimple_set_location (stmt, loc);
2927 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2930 else
2931 *expr = repl;
2932 sra_stats.exprs++;
2934 else if (write && access->grp_to_be_debug_replaced)
2936 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
2937 NULL_TREE,
2938 gsi_stmt (*gsi));
2939 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2942 if (access->first_child)
2944 HOST_WIDE_INT start_offset, chunk_size;
2945 if (bfr
2946 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
2947 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
2949 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
2950 start_offset = access->offset
2951 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
2953 else
2954 start_offset = chunk_size = 0;
2956 generate_subtree_copies (access->first_child, orig_expr, access->offset,
2957 start_offset, chunk_size, gsi, write, write,
2958 loc);
2960 return true;
2963 /* Where scalar replacements of the RHS have been written to when a replacement
2964 of a LHS of an assigments cannot be direclty loaded from a replacement of
2965 the RHS. */
2966 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2967 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2968 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2970 struct subreplacement_assignment_data
2972 /* Offset of the access representing the lhs of the assignment. */
2973 HOST_WIDE_INT left_offset;
2975 /* LHS and RHS of the original assignment. */
2976 tree assignment_lhs, assignment_rhs;
2978 /* Access representing the rhs of the whole assignment. */
2979 struct access *top_racc;
2981 /* Stmt iterator used for statement insertions after the original assignment.
2982 It points to the main GSI used to traverse a BB during function body
2983 modification. */
2984 gimple_stmt_iterator *new_gsi;
2986 /* Stmt iterator used for statement insertions before the original
2987 assignment. Keeps on pointing to the original statement. */
2988 gimple_stmt_iterator old_gsi;
2990 /* Location of the assignment. */
2991 location_t loc;
2993 /* Keeps the information whether we have needed to refresh replacements of
2994 the LHS and from which side of the assignments this takes place. */
2995 enum unscalarized_data_handling refreshed;
2998 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2999 base aggregate if there are unscalarized data or directly to LHS of the
3000 statement that is pointed to by GSI otherwise. */
3002 static void
3003 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3005 tree src;
3006 if (sad->top_racc->grp_unscalarized_data)
3008 src = sad->assignment_rhs;
3009 sad->refreshed = SRA_UDH_RIGHT;
3011 else
3013 src = sad->assignment_lhs;
3014 sad->refreshed = SRA_UDH_LEFT;
3016 generate_subtree_copies (sad->top_racc->first_child, src,
3017 sad->top_racc->offset, 0, 0,
3018 &sad->old_gsi, false, false, sad->loc);
3021 /* Try to generate statements to load all sub-replacements in an access subtree
3022 formed by children of LACC from scalar replacements in the SAD->top_racc
3023 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3024 and load the accesses from it. */
3026 static void
3027 load_assign_lhs_subreplacements (struct access *lacc,
3028 struct subreplacement_assignment_data *sad)
3030 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3032 HOST_WIDE_INT offset;
3033 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3035 if (lacc->grp_to_be_replaced)
3037 struct access *racc;
3038 gassign *stmt;
3039 tree rhs;
3041 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3042 if (racc && racc->grp_to_be_replaced)
3044 rhs = get_access_replacement (racc);
3045 if (!useless_type_conversion_p (lacc->type, racc->type))
3046 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3047 lacc->type, rhs);
3049 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3050 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3051 NULL_TREE, true, GSI_SAME_STMT);
3053 else
3055 /* No suitable access on the right hand side, need to load from
3056 the aggregate. See if we have to update it first... */
3057 if (sad->refreshed == SRA_UDH_NONE)
3058 handle_unscalarized_data_in_subtree (sad);
3060 if (sad->refreshed == SRA_UDH_LEFT)
3061 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3062 lacc->offset - sad->left_offset,
3063 lacc, sad->new_gsi, true);
3064 else
3065 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3066 lacc->offset - sad->left_offset,
3067 lacc, sad->new_gsi, true);
3068 if (lacc->grp_partial_lhs)
3069 rhs = force_gimple_operand_gsi (sad->new_gsi,
3070 rhs, true, NULL_TREE,
3071 false, GSI_NEW_STMT);
3074 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3075 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3076 gimple_set_location (stmt, sad->loc);
3077 update_stmt (stmt);
3078 sra_stats.subreplacements++;
3080 else
3082 if (sad->refreshed == SRA_UDH_NONE
3083 && lacc->grp_read && !lacc->grp_covered)
3084 handle_unscalarized_data_in_subtree (sad);
3086 if (lacc && lacc->grp_to_be_debug_replaced)
3088 gdebug *ds;
3089 tree drhs;
3090 struct access *racc = find_access_in_subtree (sad->top_racc,
3091 offset,
3092 lacc->size);
3094 if (racc && racc->grp_to_be_replaced)
3096 if (racc->grp_write)
3097 drhs = get_access_replacement (racc);
3098 else
3099 drhs = NULL;
3101 else if (sad->refreshed == SRA_UDH_LEFT)
3102 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3103 lacc->offset, lacc);
3104 else if (sad->refreshed == SRA_UDH_RIGHT)
3105 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3106 offset, lacc);
3107 else
3108 drhs = NULL_TREE;
3109 if (drhs
3110 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3111 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3112 lacc->type, drhs);
3113 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3114 drhs, gsi_stmt (sad->old_gsi));
3115 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3119 if (lacc->first_child)
3120 load_assign_lhs_subreplacements (lacc, sad);
3124 /* Result code for SRA assignment modification. */
3125 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3126 SRA_AM_MODIFIED, /* stmt changed but not
3127 removed */
3128 SRA_AM_REMOVED }; /* stmt eliminated */
3130 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3131 to the assignment and GSI is the statement iterator pointing at it. Returns
3132 the same values as sra_modify_assign. */
3134 static enum assignment_mod_result
3135 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3137 tree lhs = gimple_assign_lhs (stmt);
3138 struct access *acc = get_access_for_expr (lhs);
3139 if (!acc)
3140 return SRA_AM_NONE;
3141 location_t loc = gimple_location (stmt);
3143 if (gimple_clobber_p (stmt))
3145 /* Clobber the replacement variable. */
3146 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3147 /* Remove clobbers of fully scalarized variables, they are dead. */
3148 if (acc->grp_covered)
3150 unlink_stmt_vdef (stmt);
3151 gsi_remove (gsi, true);
3152 release_defs (stmt);
3153 return SRA_AM_REMOVED;
3155 else
3156 return SRA_AM_MODIFIED;
3159 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt))) > 0)
3161 /* I have never seen this code path trigger but if it can happen the
3162 following should handle it gracefully. */
3163 if (access_has_children_p (acc))
3164 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3165 true, true, loc);
3166 return SRA_AM_MODIFIED;
3169 if (acc->grp_covered)
3171 init_subtree_with_zero (acc, gsi, false, loc);
3172 unlink_stmt_vdef (stmt);
3173 gsi_remove (gsi, true);
3174 release_defs (stmt);
3175 return SRA_AM_REMOVED;
3177 else
3179 init_subtree_with_zero (acc, gsi, true, loc);
3180 return SRA_AM_MODIFIED;
3184 /* Create and return a new suitable default definition SSA_NAME for RACC which
3185 is an access describing an uninitialized part of an aggregate that is being
3186 loaded. */
3188 static tree
3189 get_repl_default_def_ssa_name (struct access *racc)
3191 gcc_checking_assert (!racc->grp_to_be_replaced
3192 && !racc->grp_to_be_debug_replaced);
3193 if (!racc->replacement_decl)
3194 racc->replacement_decl = create_access_replacement (racc);
3195 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3198 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3199 bit-field field declaration somewhere in it. */
3201 static inline bool
3202 contains_vce_or_bfcref_p (const_tree ref)
3204 while (handled_component_p (ref))
3206 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3207 || (TREE_CODE (ref) == COMPONENT_REF
3208 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3209 return true;
3210 ref = TREE_OPERAND (ref, 0);
3213 return false;
3216 /* Examine both sides of the assignment statement pointed to by STMT, replace
3217 them with a scalare replacement if there is one and generate copying of
3218 replacements if scalarized aggregates have been used in the assignment. GSI
3219 is used to hold generated statements for type conversions and subtree
3220 copying. */
3222 static enum assignment_mod_result
3223 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3225 struct access *lacc, *racc;
3226 tree lhs, rhs;
3227 bool modify_this_stmt = false;
3228 bool force_gimple_rhs = false;
3229 location_t loc;
3230 gimple_stmt_iterator orig_gsi = *gsi;
3232 if (!gimple_assign_single_p (stmt))
3233 return SRA_AM_NONE;
3234 lhs = gimple_assign_lhs (stmt);
3235 rhs = gimple_assign_rhs1 (stmt);
3237 if (TREE_CODE (rhs) == CONSTRUCTOR)
3238 return sra_modify_constructor_assign (stmt, gsi);
3240 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3241 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3242 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3244 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3245 gsi, false);
3246 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3247 gsi, true);
3248 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3251 lacc = get_access_for_expr (lhs);
3252 racc = get_access_for_expr (rhs);
3253 if (!lacc && !racc)
3254 return SRA_AM_NONE;
3256 loc = gimple_location (stmt);
3257 if (lacc && lacc->grp_to_be_replaced)
3259 lhs = get_access_replacement (lacc);
3260 gimple_assign_set_lhs (stmt, lhs);
3261 modify_this_stmt = true;
3262 if (lacc->grp_partial_lhs)
3263 force_gimple_rhs = true;
3264 sra_stats.exprs++;
3267 if (racc && racc->grp_to_be_replaced)
3269 rhs = get_access_replacement (racc);
3270 modify_this_stmt = true;
3271 if (racc->grp_partial_lhs)
3272 force_gimple_rhs = true;
3273 sra_stats.exprs++;
3275 else if (racc
3276 && !racc->grp_unscalarized_data
3277 && TREE_CODE (lhs) == SSA_NAME
3278 && !access_has_replacements_p (racc))
3280 rhs = get_repl_default_def_ssa_name (racc);
3281 modify_this_stmt = true;
3282 sra_stats.exprs++;
3285 if (modify_this_stmt)
3287 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3289 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3290 ??? This should move to fold_stmt which we simply should
3291 call after building a VIEW_CONVERT_EXPR here. */
3292 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3293 && !contains_bitfld_component_ref_p (lhs))
3295 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3296 gimple_assign_set_lhs (stmt, lhs);
3298 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3299 && !contains_vce_or_bfcref_p (rhs))
3300 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3302 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3304 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3305 rhs);
3306 if (is_gimple_reg_type (TREE_TYPE (lhs))
3307 && TREE_CODE (lhs) != SSA_NAME)
3308 force_gimple_rhs = true;
3313 if (lacc && lacc->grp_to_be_debug_replaced)
3315 tree dlhs = get_access_replacement (lacc);
3316 tree drhs = unshare_expr (rhs);
3317 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3319 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3320 && !contains_vce_or_bfcref_p (drhs))
3321 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3322 if (drhs
3323 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3324 TREE_TYPE (drhs)))
3325 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3326 TREE_TYPE (dlhs), drhs);
3328 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3329 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3332 /* From this point on, the function deals with assignments in between
3333 aggregates when at least one has scalar reductions of some of its
3334 components. There are three possible scenarios: Both the LHS and RHS have
3335 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3337 In the first case, we would like to load the LHS components from RHS
3338 components whenever possible. If that is not possible, we would like to
3339 read it directly from the RHS (after updating it by storing in it its own
3340 components). If there are some necessary unscalarized data in the LHS,
3341 those will be loaded by the original assignment too. If neither of these
3342 cases happen, the original statement can be removed. Most of this is done
3343 by load_assign_lhs_subreplacements.
3345 In the second case, we would like to store all RHS scalarized components
3346 directly into LHS and if they cover the aggregate completely, remove the
3347 statement too. In the third case, we want the LHS components to be loaded
3348 directly from the RHS (DSE will remove the original statement if it
3349 becomes redundant).
3351 This is a bit complex but manageable when types match and when unions do
3352 not cause confusion in a way that we cannot really load a component of LHS
3353 from the RHS or vice versa (the access representing this level can have
3354 subaccesses that are accessible only through a different union field at a
3355 higher level - different from the one used in the examined expression).
3356 Unions are fun.
3358 Therefore, I specially handle a fourth case, happening when there is a
3359 specific type cast or it is impossible to locate a scalarized subaccess on
3360 the other side of the expression. If that happens, I simply "refresh" the
3361 RHS by storing in it is scalarized components leave the original statement
3362 there to do the copying and then load the scalar replacements of the LHS.
3363 This is what the first branch does. */
3365 if (modify_this_stmt
3366 || gimple_has_volatile_ops (stmt)
3367 || contains_vce_or_bfcref_p (rhs)
3368 || contains_vce_or_bfcref_p (lhs)
3369 || stmt_ends_bb_p (stmt))
3371 if (access_has_children_p (racc))
3372 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3373 gsi, false, false, loc);
3374 if (access_has_children_p (lacc))
3376 gimple_stmt_iterator alt_gsi = gsi_none ();
3377 if (stmt_ends_bb_p (stmt))
3379 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3380 gsi = &alt_gsi;
3382 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3383 gsi, true, true, loc);
3385 sra_stats.separate_lhs_rhs_handling++;
3387 /* This gimplification must be done after generate_subtree_copies,
3388 lest we insert the subtree copies in the middle of the gimplified
3389 sequence. */
3390 if (force_gimple_rhs)
3391 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3392 true, GSI_SAME_STMT);
3393 if (gimple_assign_rhs1 (stmt) != rhs)
3395 modify_this_stmt = true;
3396 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3397 gcc_assert (stmt == gsi_stmt (orig_gsi));
3400 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3402 else
3404 if (access_has_children_p (lacc)
3405 && access_has_children_p (racc)
3406 /* When an access represents an unscalarizable region, it usually
3407 represents accesses with variable offset and thus must not be used
3408 to generate new memory accesses. */
3409 && !lacc->grp_unscalarizable_region
3410 && !racc->grp_unscalarizable_region)
3412 struct subreplacement_assignment_data sad;
3414 sad.left_offset = lacc->offset;
3415 sad.assignment_lhs = lhs;
3416 sad.assignment_rhs = rhs;
3417 sad.top_racc = racc;
3418 sad.old_gsi = *gsi;
3419 sad.new_gsi = gsi;
3420 sad.loc = gimple_location (stmt);
3421 sad.refreshed = SRA_UDH_NONE;
3423 if (lacc->grp_read && !lacc->grp_covered)
3424 handle_unscalarized_data_in_subtree (&sad);
3426 load_assign_lhs_subreplacements (lacc, &sad);
3427 if (sad.refreshed != SRA_UDH_RIGHT)
3429 gsi_next (gsi);
3430 unlink_stmt_vdef (stmt);
3431 gsi_remove (&sad.old_gsi, true);
3432 release_defs (stmt);
3433 sra_stats.deleted++;
3434 return SRA_AM_REMOVED;
3437 else
3439 if (access_has_children_p (racc)
3440 && !racc->grp_unscalarized_data)
3442 if (dump_file)
3444 fprintf (dump_file, "Removing load: ");
3445 print_gimple_stmt (dump_file, stmt, 0, 0);
3447 generate_subtree_copies (racc->first_child, lhs,
3448 racc->offset, 0, 0, gsi,
3449 false, false, loc);
3450 gcc_assert (stmt == gsi_stmt (*gsi));
3451 unlink_stmt_vdef (stmt);
3452 gsi_remove (gsi, true);
3453 release_defs (stmt);
3454 sra_stats.deleted++;
3455 return SRA_AM_REMOVED;
3457 /* Restore the aggregate RHS from its components so the
3458 prevailing aggregate copy does the right thing. */
3459 if (access_has_children_p (racc))
3460 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3461 gsi, false, false, loc);
3462 /* Re-load the components of the aggregate copy destination.
3463 But use the RHS aggregate to load from to expose more
3464 optimization opportunities. */
3465 if (access_has_children_p (lacc))
3466 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3467 0, 0, gsi, true, true, loc);
3470 return SRA_AM_NONE;
3474 /* Traverse the function body and all modifications as decided in
3475 analyze_all_variable_accesses. Return true iff the CFG has been
3476 changed. */
3478 static bool
3479 sra_modify_function_body (void)
3481 bool cfg_changed = false;
3482 basic_block bb;
3484 FOR_EACH_BB_FN (bb, cfun)
3486 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3487 while (!gsi_end_p (gsi))
3489 gimple *stmt = gsi_stmt (gsi);
3490 enum assignment_mod_result assign_result;
3491 bool modified = false, deleted = false;
3492 tree *t;
3493 unsigned i;
3495 switch (gimple_code (stmt))
3497 case GIMPLE_RETURN:
3498 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3499 if (*t != NULL_TREE)
3500 modified |= sra_modify_expr (t, &gsi, false);
3501 break;
3503 case GIMPLE_ASSIGN:
3504 assign_result = sra_modify_assign (stmt, &gsi);
3505 modified |= assign_result == SRA_AM_MODIFIED;
3506 deleted = assign_result == SRA_AM_REMOVED;
3507 break;
3509 case GIMPLE_CALL:
3510 /* Operands must be processed before the lhs. */
3511 for (i = 0; i < gimple_call_num_args (stmt); i++)
3513 t = gimple_call_arg_ptr (stmt, i);
3514 modified |= sra_modify_expr (t, &gsi, false);
3517 if (gimple_call_lhs (stmt))
3519 t = gimple_call_lhs_ptr (stmt);
3520 modified |= sra_modify_expr (t, &gsi, true);
3522 break;
3524 case GIMPLE_ASM:
3526 gasm *asm_stmt = as_a <gasm *> (stmt);
3527 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3529 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3530 modified |= sra_modify_expr (t, &gsi, false);
3532 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3534 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3535 modified |= sra_modify_expr (t, &gsi, true);
3538 break;
3540 default:
3541 break;
3544 if (modified)
3546 update_stmt (stmt);
3547 if (maybe_clean_eh_stmt (stmt)
3548 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3549 cfg_changed = true;
3551 if (!deleted)
3552 gsi_next (&gsi);
3556 gsi_commit_edge_inserts ();
3557 return cfg_changed;
3560 /* Generate statements initializing scalar replacements of parts of function
3561 parameters. */
3563 static void
3564 initialize_parameter_reductions (void)
3566 gimple_stmt_iterator gsi;
3567 gimple_seq seq = NULL;
3568 tree parm;
3570 gsi = gsi_start (seq);
3571 for (parm = DECL_ARGUMENTS (current_function_decl);
3572 parm;
3573 parm = DECL_CHAIN (parm))
3575 vec<access_p> *access_vec;
3576 struct access *access;
3578 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3579 continue;
3580 access_vec = get_base_access_vector (parm);
3581 if (!access_vec)
3582 continue;
3584 for (access = (*access_vec)[0];
3585 access;
3586 access = access->next_grp)
3587 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3588 EXPR_LOCATION (parm));
3591 seq = gsi_seq (gsi);
3592 if (seq)
3593 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3596 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3597 it reveals there are components of some aggregates to be scalarized, it runs
3598 the required transformations. */
3599 static unsigned int
3600 perform_intra_sra (void)
3602 int ret = 0;
3603 sra_initialize ();
3605 if (!find_var_candidates ())
3606 goto out;
3608 if (!scan_function ())
3609 goto out;
3611 if (!analyze_all_variable_accesses ())
3612 goto out;
3614 if (sra_modify_function_body ())
3615 ret = TODO_update_ssa | TODO_cleanup_cfg;
3616 else
3617 ret = TODO_update_ssa;
3618 initialize_parameter_reductions ();
3620 statistics_counter_event (cfun, "Scalar replacements created",
3621 sra_stats.replacements);
3622 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3623 statistics_counter_event (cfun, "Subtree copy stmts",
3624 sra_stats.subtree_copies);
3625 statistics_counter_event (cfun, "Subreplacement stmts",
3626 sra_stats.subreplacements);
3627 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3628 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3629 sra_stats.separate_lhs_rhs_handling);
3631 out:
3632 sra_deinitialize ();
3633 return ret;
3636 /* Perform early intraprocedural SRA. */
3637 static unsigned int
3638 early_intra_sra (void)
3640 sra_mode = SRA_MODE_EARLY_INTRA;
3641 return perform_intra_sra ();
3644 /* Perform "late" intraprocedural SRA. */
3645 static unsigned int
3646 late_intra_sra (void)
3648 sra_mode = SRA_MODE_INTRA;
3649 return perform_intra_sra ();
3653 static bool
3654 gate_intra_sra (void)
3656 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3660 namespace {
3662 const pass_data pass_data_sra_early =
3664 GIMPLE_PASS, /* type */
3665 "esra", /* name */
3666 OPTGROUP_NONE, /* optinfo_flags */
3667 TV_TREE_SRA, /* tv_id */
3668 ( PROP_cfg | PROP_ssa ), /* properties_required */
3669 0, /* properties_provided */
3670 0, /* properties_destroyed */
3671 0, /* todo_flags_start */
3672 TODO_update_ssa, /* todo_flags_finish */
3675 class pass_sra_early : public gimple_opt_pass
3677 public:
3678 pass_sra_early (gcc::context *ctxt)
3679 : gimple_opt_pass (pass_data_sra_early, ctxt)
3682 /* opt_pass methods: */
3683 virtual bool gate (function *) { return gate_intra_sra (); }
3684 virtual unsigned int execute (function *) { return early_intra_sra (); }
3686 }; // class pass_sra_early
3688 } // anon namespace
3690 gimple_opt_pass *
3691 make_pass_sra_early (gcc::context *ctxt)
3693 return new pass_sra_early (ctxt);
3696 namespace {
3698 const pass_data pass_data_sra =
3700 GIMPLE_PASS, /* type */
3701 "sra", /* name */
3702 OPTGROUP_NONE, /* optinfo_flags */
3703 TV_TREE_SRA, /* tv_id */
3704 ( PROP_cfg | PROP_ssa ), /* properties_required */
3705 0, /* properties_provided */
3706 0, /* properties_destroyed */
3707 TODO_update_address_taken, /* todo_flags_start */
3708 TODO_update_ssa, /* todo_flags_finish */
3711 class pass_sra : public gimple_opt_pass
3713 public:
3714 pass_sra (gcc::context *ctxt)
3715 : gimple_opt_pass (pass_data_sra, ctxt)
3718 /* opt_pass methods: */
3719 virtual bool gate (function *) { return gate_intra_sra (); }
3720 virtual unsigned int execute (function *) { return late_intra_sra (); }
3722 }; // class pass_sra
3724 } // anon namespace
3726 gimple_opt_pass *
3727 make_pass_sra (gcc::context *ctxt)
3729 return new pass_sra (ctxt);
3733 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3734 parameter. */
3736 static bool
3737 is_unused_scalar_param (tree parm)
3739 tree name;
3740 return (is_gimple_reg (parm)
3741 && (!(name = ssa_default_def (cfun, parm))
3742 || has_zero_uses (name)));
3745 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3746 examine whether there are any direct or otherwise infeasible ones. If so,
3747 return true, otherwise return false. PARM must be a gimple register with a
3748 non-NULL default definition. */
3750 static bool
3751 ptr_parm_has_direct_uses (tree parm)
3753 imm_use_iterator ui;
3754 gimple *stmt;
3755 tree name = ssa_default_def (cfun, parm);
3756 bool ret = false;
3758 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3760 int uses_ok = 0;
3761 use_operand_p use_p;
3763 if (is_gimple_debug (stmt))
3764 continue;
3766 /* Valid uses include dereferences on the lhs and the rhs. */
3767 if (gimple_has_lhs (stmt))
3769 tree lhs = gimple_get_lhs (stmt);
3770 while (handled_component_p (lhs))
3771 lhs = TREE_OPERAND (lhs, 0);
3772 if (TREE_CODE (lhs) == MEM_REF
3773 && TREE_OPERAND (lhs, 0) == name
3774 && integer_zerop (TREE_OPERAND (lhs, 1))
3775 && types_compatible_p (TREE_TYPE (lhs),
3776 TREE_TYPE (TREE_TYPE (name)))
3777 && !TREE_THIS_VOLATILE (lhs))
3778 uses_ok++;
3780 if (gimple_assign_single_p (stmt))
3782 tree rhs = gimple_assign_rhs1 (stmt);
3783 while (handled_component_p (rhs))
3784 rhs = TREE_OPERAND (rhs, 0);
3785 if (TREE_CODE (rhs) == MEM_REF
3786 && TREE_OPERAND (rhs, 0) == name
3787 && integer_zerop (TREE_OPERAND (rhs, 1))
3788 && types_compatible_p (TREE_TYPE (rhs),
3789 TREE_TYPE (TREE_TYPE (name)))
3790 && !TREE_THIS_VOLATILE (rhs))
3791 uses_ok++;
3793 else if (is_gimple_call (stmt))
3795 unsigned i;
3796 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3798 tree arg = gimple_call_arg (stmt, i);
3799 while (handled_component_p (arg))
3800 arg = TREE_OPERAND (arg, 0);
3801 if (TREE_CODE (arg) == MEM_REF
3802 && TREE_OPERAND (arg, 0) == name
3803 && integer_zerop (TREE_OPERAND (arg, 1))
3804 && types_compatible_p (TREE_TYPE (arg),
3805 TREE_TYPE (TREE_TYPE (name)))
3806 && !TREE_THIS_VOLATILE (arg))
3807 uses_ok++;
3811 /* If the number of valid uses does not match the number of
3812 uses in this stmt there is an unhandled use. */
3813 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3814 --uses_ok;
3816 if (uses_ok != 0)
3817 ret = true;
3819 if (ret)
3820 BREAK_FROM_IMM_USE_STMT (ui);
3823 return ret;
3826 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3827 them in candidate_bitmap. Note that these do not necessarily include
3828 parameter which are unused and thus can be removed. Return true iff any
3829 such candidate has been found. */
3831 static bool
3832 find_param_candidates (void)
3834 tree parm;
3835 int count = 0;
3836 bool ret = false;
3837 const char *msg;
3839 for (parm = DECL_ARGUMENTS (current_function_decl);
3840 parm;
3841 parm = DECL_CHAIN (parm))
3843 tree type = TREE_TYPE (parm);
3844 tree_node **slot;
3846 count++;
3848 if (TREE_THIS_VOLATILE (parm)
3849 || TREE_ADDRESSABLE (parm)
3850 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3851 continue;
3853 if (is_unused_scalar_param (parm))
3855 ret = true;
3856 continue;
3859 if (POINTER_TYPE_P (type))
3861 type = TREE_TYPE (type);
3863 if (TREE_CODE (type) == FUNCTION_TYPE
3864 || TYPE_VOLATILE (type)
3865 || (TREE_CODE (type) == ARRAY_TYPE
3866 && TYPE_NONALIASED_COMPONENT (type))
3867 || !is_gimple_reg (parm)
3868 || is_va_list_type (type)
3869 || ptr_parm_has_direct_uses (parm))
3870 continue;
3872 else if (!AGGREGATE_TYPE_P (type))
3873 continue;
3875 if (!COMPLETE_TYPE_P (type)
3876 || !tree_fits_uhwi_p (TYPE_SIZE (type))
3877 || tree_to_uhwi (TYPE_SIZE (type)) == 0
3878 || (AGGREGATE_TYPE_P (type)
3879 && type_internals_preclude_sra_p (type, &msg)))
3880 continue;
3882 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3883 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
3884 *slot = parm;
3886 ret = true;
3887 if (dump_file && (dump_flags & TDF_DETAILS))
3889 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3890 print_generic_expr (dump_file, parm, 0);
3891 fprintf (dump_file, "\n");
3895 func_param_count = count;
3896 return ret;
3899 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3900 maybe_modified. */
3902 static bool
3903 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3904 void *data)
3906 struct access *repr = (struct access *) data;
3908 repr->grp_maybe_modified = 1;
3909 return true;
3912 /* Analyze what representatives (in linked lists accessible from
3913 REPRESENTATIVES) can be modified by side effects of statements in the
3914 current function. */
3916 static void
3917 analyze_modified_params (vec<access_p> representatives)
3919 int i;
3921 for (i = 0; i < func_param_count; i++)
3923 struct access *repr;
3925 for (repr = representatives[i];
3926 repr;
3927 repr = repr->next_grp)
3929 struct access *access;
3930 bitmap visited;
3931 ao_ref ar;
3933 if (no_accesses_p (repr))
3934 continue;
3935 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3936 || repr->grp_maybe_modified)
3937 continue;
3939 ao_ref_init (&ar, repr->expr);
3940 visited = BITMAP_ALLOC (NULL);
3941 for (access = repr; access; access = access->next_sibling)
3943 /* All accesses are read ones, otherwise grp_maybe_modified would
3944 be trivially set. */
3945 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3946 mark_maybe_modified, repr, &visited);
3947 if (repr->grp_maybe_modified)
3948 break;
3950 BITMAP_FREE (visited);
3955 /* Propagate distances in bb_dereferences in the opposite direction than the
3956 control flow edges, in each step storing the maximum of the current value
3957 and the minimum of all successors. These steps are repeated until the table
3958 stabilizes. Note that BBs which might terminate the functions (according to
3959 final_bbs bitmap) never updated in this way. */
3961 static void
3962 propagate_dereference_distances (void)
3964 basic_block bb;
3966 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
3967 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3968 FOR_EACH_BB_FN (bb, cfun)
3970 queue.quick_push (bb);
3971 bb->aux = bb;
3974 while (!queue.is_empty ())
3976 edge_iterator ei;
3977 edge e;
3978 bool change = false;
3979 int i;
3981 bb = queue.pop ();
3982 bb->aux = NULL;
3984 if (bitmap_bit_p (final_bbs, bb->index))
3985 continue;
3987 for (i = 0; i < func_param_count; i++)
3989 int idx = bb->index * func_param_count + i;
3990 bool first = true;
3991 HOST_WIDE_INT inh = 0;
3993 FOR_EACH_EDGE (e, ei, bb->succs)
3995 int succ_idx = e->dest->index * func_param_count + i;
3997 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
3998 continue;
4000 if (first)
4002 first = false;
4003 inh = bb_dereferences [succ_idx];
4005 else if (bb_dereferences [succ_idx] < inh)
4006 inh = bb_dereferences [succ_idx];
4009 if (!first && bb_dereferences[idx] < inh)
4011 bb_dereferences[idx] = inh;
4012 change = true;
4016 if (change && !bitmap_bit_p (final_bbs, bb->index))
4017 FOR_EACH_EDGE (e, ei, bb->preds)
4019 if (e->src->aux)
4020 continue;
4022 e->src->aux = e->src;
4023 queue.quick_push (e->src);
4028 /* Dump a dereferences TABLE with heading STR to file F. */
4030 static void
4031 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4033 basic_block bb;
4035 fprintf (dump_file, "%s", str);
4036 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4037 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4039 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4040 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4042 int i;
4043 for (i = 0; i < func_param_count; i++)
4045 int idx = bb->index * func_param_count + i;
4046 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4049 fprintf (f, "\n");
4051 fprintf (dump_file, "\n");
4054 /* Determine what (parts of) parameters passed by reference that are not
4055 assigned to are not certainly dereferenced in this function and thus the
4056 dereferencing cannot be safely moved to the caller without potentially
4057 introducing a segfault. Mark such REPRESENTATIVES as
4058 grp_not_necessarilly_dereferenced.
4060 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4061 part is calculated rather than simple booleans are calculated for each
4062 pointer parameter to handle cases when only a fraction of the whole
4063 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4064 an example).
4066 The maximum dereference distances for each pointer parameter and BB are
4067 already stored in bb_dereference. This routine simply propagates these
4068 values upwards by propagate_dereference_distances and then compares the
4069 distances of individual parameters in the ENTRY BB to the equivalent
4070 distances of each representative of a (fraction of a) parameter. */
4072 static void
4073 analyze_caller_dereference_legality (vec<access_p> representatives)
4075 int i;
4077 if (dump_file && (dump_flags & TDF_DETAILS))
4078 dump_dereferences_table (dump_file,
4079 "Dereference table before propagation:\n",
4080 bb_dereferences);
4082 propagate_dereference_distances ();
4084 if (dump_file && (dump_flags & TDF_DETAILS))
4085 dump_dereferences_table (dump_file,
4086 "Dereference table after propagation:\n",
4087 bb_dereferences);
4089 for (i = 0; i < func_param_count; i++)
4091 struct access *repr = representatives[i];
4092 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4094 if (!repr || no_accesses_p (repr))
4095 continue;
4099 if ((repr->offset + repr->size) > bb_dereferences[idx])
4100 repr->grp_not_necessarilly_dereferenced = 1;
4101 repr = repr->next_grp;
4103 while (repr);
4107 /* Return the representative access for the parameter declaration PARM if it is
4108 a scalar passed by reference which is not written to and the pointer value
4109 is not used directly. Thus, if it is legal to dereference it in the caller
4110 and we can rule out modifications through aliases, such parameter should be
4111 turned into one passed by value. Return NULL otherwise. */
4113 static struct access *
4114 unmodified_by_ref_scalar_representative (tree parm)
4116 int i, access_count;
4117 struct access *repr;
4118 vec<access_p> *access_vec;
4120 access_vec = get_base_access_vector (parm);
4121 gcc_assert (access_vec);
4122 repr = (*access_vec)[0];
4123 if (repr->write)
4124 return NULL;
4125 repr->group_representative = repr;
4127 access_count = access_vec->length ();
4128 for (i = 1; i < access_count; i++)
4130 struct access *access = (*access_vec)[i];
4131 if (access->write)
4132 return NULL;
4133 access->group_representative = repr;
4134 access->next_sibling = repr->next_sibling;
4135 repr->next_sibling = access;
4138 repr->grp_read = 1;
4139 repr->grp_scalar_ptr = 1;
4140 return repr;
4143 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4144 associated with. REQ_ALIGN is the minimum required alignment. */
4146 static bool
4147 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4149 unsigned int exp_align;
4150 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4151 is incompatible assign in a call statement (and possibly even in asm
4152 statements). This can be relaxed by using a new temporary but only for
4153 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4154 intraprocedural SRA we deal with this by keeping the old aggregate around,
4155 something we cannot do in IPA-SRA.) */
4156 if (access->write
4157 && (is_gimple_call (access->stmt)
4158 || gimple_code (access->stmt) == GIMPLE_ASM))
4159 return true;
4161 exp_align = get_object_alignment (access->expr);
4162 if (exp_align < req_align)
4163 return true;
4165 return false;
4169 /* Sort collected accesses for parameter PARM, identify representatives for
4170 each accessed region and link them together. Return NULL if there are
4171 different but overlapping accesses, return the special ptr value meaning
4172 there are no accesses for this parameter if that is the case and return the
4173 first representative otherwise. Set *RO_GRP if there is a group of accesses
4174 with only read (i.e. no write) accesses. */
4176 static struct access *
4177 splice_param_accesses (tree parm, bool *ro_grp)
4179 int i, j, access_count, group_count;
4180 int agg_size, total_size = 0;
4181 struct access *access, *res, **prev_acc_ptr = &res;
4182 vec<access_p> *access_vec;
4184 access_vec = get_base_access_vector (parm);
4185 if (!access_vec)
4186 return &no_accesses_representant;
4187 access_count = access_vec->length ();
4189 access_vec->qsort (compare_access_positions);
4191 i = 0;
4192 total_size = 0;
4193 group_count = 0;
4194 while (i < access_count)
4196 bool modification;
4197 tree a1_alias_type;
4198 access = (*access_vec)[i];
4199 modification = access->write;
4200 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4201 return NULL;
4202 a1_alias_type = reference_alias_ptr_type (access->expr);
4204 /* Access is about to become group representative unless we find some
4205 nasty overlap which would preclude us from breaking this parameter
4206 apart. */
4208 j = i + 1;
4209 while (j < access_count)
4211 struct access *ac2 = (*access_vec)[j];
4212 if (ac2->offset != access->offset)
4214 /* All or nothing law for parameters. */
4215 if (access->offset + access->size > ac2->offset)
4216 return NULL;
4217 else
4218 break;
4220 else if (ac2->size != access->size)
4221 return NULL;
4223 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4224 || (ac2->type != access->type
4225 && (TREE_ADDRESSABLE (ac2->type)
4226 || TREE_ADDRESSABLE (access->type)))
4227 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4228 return NULL;
4230 modification |= ac2->write;
4231 ac2->group_representative = access;
4232 ac2->next_sibling = access->next_sibling;
4233 access->next_sibling = ac2;
4234 j++;
4237 group_count++;
4238 access->grp_maybe_modified = modification;
4239 if (!modification)
4240 *ro_grp = true;
4241 *prev_acc_ptr = access;
4242 prev_acc_ptr = &access->next_grp;
4243 total_size += access->size;
4244 i = j;
4247 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4248 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4249 else
4250 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4251 if (total_size >= agg_size)
4252 return NULL;
4254 gcc_assert (group_count > 0);
4255 return res;
4258 /* Decide whether parameters with representative accesses given by REPR should
4259 be reduced into components. */
4261 static int
4262 decide_one_param_reduction (struct access *repr)
4264 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4265 bool by_ref;
4266 tree parm;
4268 parm = repr->base;
4269 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4270 gcc_assert (cur_parm_size > 0);
4272 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4274 by_ref = true;
4275 agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4277 else
4279 by_ref = false;
4280 agg_size = cur_parm_size;
4283 if (dump_file)
4285 struct access *acc;
4286 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4287 print_generic_expr (dump_file, parm, 0);
4288 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4289 for (acc = repr; acc; acc = acc->next_grp)
4290 dump_access (dump_file, acc, true);
4293 total_size = 0;
4294 new_param_count = 0;
4296 for (; repr; repr = repr->next_grp)
4298 gcc_assert (parm == repr->base);
4300 /* Taking the address of a non-addressable field is verboten. */
4301 if (by_ref && repr->non_addressable)
4302 return 0;
4304 /* Do not decompose a non-BLKmode param in a way that would
4305 create BLKmode params. Especially for by-reference passing
4306 (thus, pointer-type param) this is hardly worthwhile. */
4307 if (DECL_MODE (parm) != BLKmode
4308 && TYPE_MODE (repr->type) == BLKmode)
4309 return 0;
4311 if (!by_ref || (!repr->grp_maybe_modified
4312 && !repr->grp_not_necessarilly_dereferenced))
4313 total_size += repr->size;
4314 else
4315 total_size += cur_parm_size;
4317 new_param_count++;
4320 gcc_assert (new_param_count > 0);
4322 if (optimize_function_for_size_p (cfun))
4323 parm_size_limit = cur_parm_size;
4324 else
4325 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4326 * cur_parm_size);
4328 if (total_size < agg_size
4329 && total_size <= parm_size_limit)
4331 if (dump_file)
4332 fprintf (dump_file, " ....will be split into %i components\n",
4333 new_param_count);
4334 return new_param_count;
4336 else
4337 return 0;
4340 /* The order of the following enums is important, we need to do extra work for
4341 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4342 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4343 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4345 /* Identify representatives of all accesses to all candidate parameters for
4346 IPA-SRA. Return result based on what representatives have been found. */
4348 static enum ipa_splicing_result
4349 splice_all_param_accesses (vec<access_p> &representatives)
4351 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4352 tree parm;
4353 struct access *repr;
4355 representatives.create (func_param_count);
4357 for (parm = DECL_ARGUMENTS (current_function_decl);
4358 parm;
4359 parm = DECL_CHAIN (parm))
4361 if (is_unused_scalar_param (parm))
4363 representatives.quick_push (&no_accesses_representant);
4364 if (result == NO_GOOD_ACCESS)
4365 result = UNUSED_PARAMS;
4367 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4368 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4369 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4371 repr = unmodified_by_ref_scalar_representative (parm);
4372 representatives.quick_push (repr);
4373 if (repr)
4374 result = UNMODIF_BY_REF_ACCESSES;
4376 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4378 bool ro_grp = false;
4379 repr = splice_param_accesses (parm, &ro_grp);
4380 representatives.quick_push (repr);
4382 if (repr && !no_accesses_p (repr))
4384 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4386 if (ro_grp)
4387 result = UNMODIF_BY_REF_ACCESSES;
4388 else if (result < MODIF_BY_REF_ACCESSES)
4389 result = MODIF_BY_REF_ACCESSES;
4391 else if (result < BY_VAL_ACCESSES)
4392 result = BY_VAL_ACCESSES;
4394 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4395 result = UNUSED_PARAMS;
4397 else
4398 representatives.quick_push (NULL);
4401 if (result == NO_GOOD_ACCESS)
4403 representatives.release ();
4404 return NO_GOOD_ACCESS;
4407 return result;
4410 /* Return the index of BASE in PARMS. Abort if it is not found. */
4412 static inline int
4413 get_param_index (tree base, vec<tree> parms)
4415 int i, len;
4417 len = parms.length ();
4418 for (i = 0; i < len; i++)
4419 if (parms[i] == base)
4420 return i;
4421 gcc_unreachable ();
4424 /* Convert the decisions made at the representative level into compact
4425 parameter adjustments. REPRESENTATIVES are pointers to first
4426 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4427 final number of adjustments. */
4429 static ipa_parm_adjustment_vec
4430 turn_representatives_into_adjustments (vec<access_p> representatives,
4431 int adjustments_count)
4433 vec<tree> parms;
4434 ipa_parm_adjustment_vec adjustments;
4435 tree parm;
4436 int i;
4438 gcc_assert (adjustments_count > 0);
4439 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4440 adjustments.create (adjustments_count);
4441 parm = DECL_ARGUMENTS (current_function_decl);
4442 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4444 struct access *repr = representatives[i];
4446 if (!repr || no_accesses_p (repr))
4448 struct ipa_parm_adjustment adj;
4450 memset (&adj, 0, sizeof (adj));
4451 adj.base_index = get_param_index (parm, parms);
4452 adj.base = parm;
4453 if (!repr)
4454 adj.op = IPA_PARM_OP_COPY;
4455 else
4456 adj.op = IPA_PARM_OP_REMOVE;
4457 adj.arg_prefix = "ISRA";
4458 adjustments.quick_push (adj);
4460 else
4462 struct ipa_parm_adjustment adj;
4463 int index = get_param_index (parm, parms);
4465 for (; repr; repr = repr->next_grp)
4467 memset (&adj, 0, sizeof (adj));
4468 gcc_assert (repr->base == parm);
4469 adj.base_index = index;
4470 adj.base = repr->base;
4471 adj.type = repr->type;
4472 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4473 adj.offset = repr->offset;
4474 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4475 && (repr->grp_maybe_modified
4476 || repr->grp_not_necessarilly_dereferenced));
4477 adj.arg_prefix = "ISRA";
4478 adjustments.quick_push (adj);
4482 parms.release ();
4483 return adjustments;
4486 /* Analyze the collected accesses and produce a plan what to do with the
4487 parameters in the form of adjustments, NULL meaning nothing. */
4489 static ipa_parm_adjustment_vec
4490 analyze_all_param_acesses (void)
4492 enum ipa_splicing_result repr_state;
4493 bool proceed = false;
4494 int i, adjustments_count = 0;
4495 vec<access_p> representatives;
4496 ipa_parm_adjustment_vec adjustments;
4498 repr_state = splice_all_param_accesses (representatives);
4499 if (repr_state == NO_GOOD_ACCESS)
4500 return ipa_parm_adjustment_vec ();
4502 /* If there are any parameters passed by reference which are not modified
4503 directly, we need to check whether they can be modified indirectly. */
4504 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4506 analyze_caller_dereference_legality (representatives);
4507 analyze_modified_params (representatives);
4510 for (i = 0; i < func_param_count; i++)
4512 struct access *repr = representatives[i];
4514 if (repr && !no_accesses_p (repr))
4516 if (repr->grp_scalar_ptr)
4518 adjustments_count++;
4519 if (repr->grp_not_necessarilly_dereferenced
4520 || repr->grp_maybe_modified)
4521 representatives[i] = NULL;
4522 else
4524 proceed = true;
4525 sra_stats.scalar_by_ref_to_by_val++;
4528 else
4530 int new_components = decide_one_param_reduction (repr);
4532 if (new_components == 0)
4534 representatives[i] = NULL;
4535 adjustments_count++;
4537 else
4539 adjustments_count += new_components;
4540 sra_stats.aggregate_params_reduced++;
4541 sra_stats.param_reductions_created += new_components;
4542 proceed = true;
4546 else
4548 if (no_accesses_p (repr))
4550 proceed = true;
4551 sra_stats.deleted_unused_parameters++;
4553 adjustments_count++;
4557 if (!proceed && dump_file)
4558 fprintf (dump_file, "NOT proceeding to change params.\n");
4560 if (proceed)
4561 adjustments = turn_representatives_into_adjustments (representatives,
4562 adjustments_count);
4563 else
4564 adjustments = ipa_parm_adjustment_vec ();
4566 representatives.release ();
4567 return adjustments;
4570 /* If a parameter replacement identified by ADJ does not yet exist in the form
4571 of declaration, create it and record it, otherwise return the previously
4572 created one. */
4574 static tree
4575 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4577 tree repl;
4578 if (!adj->new_ssa_base)
4580 char *pretty_name = make_fancy_name (adj->base);
4582 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4583 DECL_NAME (repl) = get_identifier (pretty_name);
4584 obstack_free (&name_obstack, pretty_name);
4586 adj->new_ssa_base = repl;
4588 else
4589 repl = adj->new_ssa_base;
4590 return repl;
4593 /* Find the first adjustment for a particular parameter BASE in a vector of
4594 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4595 adjustment. */
4597 static struct ipa_parm_adjustment *
4598 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4600 int i, len;
4602 len = adjustments.length ();
4603 for (i = 0; i < len; i++)
4605 struct ipa_parm_adjustment *adj;
4607 adj = &adjustments[i];
4608 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4609 return adj;
4612 return NULL;
4615 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4616 parameter which is to be removed because its value is not used, create a new
4617 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4618 original with it and return it. If there is no need to re-map, return NULL.
4619 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4621 static tree
4622 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4623 ipa_parm_adjustment_vec adjustments)
4625 struct ipa_parm_adjustment *adj;
4626 tree decl, repl, new_name;
4628 if (TREE_CODE (old_name) != SSA_NAME)
4629 return NULL;
4631 decl = SSA_NAME_VAR (old_name);
4632 if (decl == NULL_TREE
4633 || TREE_CODE (decl) != PARM_DECL)
4634 return NULL;
4636 adj = get_adjustment_for_base (adjustments, decl);
4637 if (!adj)
4638 return NULL;
4640 repl = get_replaced_param_substitute (adj);
4641 new_name = make_ssa_name (repl, stmt);
4643 if (dump_file)
4645 fprintf (dump_file, "replacing an SSA name of a removed param ");
4646 print_generic_expr (dump_file, old_name, 0);
4647 fprintf (dump_file, " with ");
4648 print_generic_expr (dump_file, new_name, 0);
4649 fprintf (dump_file, "\n");
4652 replace_uses_by (old_name, new_name);
4653 return new_name;
4656 /* If the statement STMT contains any expressions that need to replaced with a
4657 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4658 incompatibilities (GSI is used to accommodate conversion statements and must
4659 point to the statement). Return true iff the statement was modified. */
4661 static bool
4662 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4663 ipa_parm_adjustment_vec adjustments)
4665 tree *lhs_p, *rhs_p;
4666 bool any;
4668 if (!gimple_assign_single_p (stmt))
4669 return false;
4671 rhs_p = gimple_assign_rhs1_ptr (stmt);
4672 lhs_p = gimple_assign_lhs_ptr (stmt);
4674 any = ipa_modify_expr (rhs_p, false, adjustments);
4675 any |= ipa_modify_expr (lhs_p, false, adjustments);
4676 if (any)
4678 tree new_rhs = NULL_TREE;
4680 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4682 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4684 /* V_C_Es of constructors can cause trouble (PR 42714). */
4685 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4686 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4687 else
4688 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4689 NULL);
4691 else
4692 new_rhs = fold_build1_loc (gimple_location (stmt),
4693 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4694 *rhs_p);
4696 else if (REFERENCE_CLASS_P (*rhs_p)
4697 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4698 && !is_gimple_reg (*lhs_p))
4699 /* This can happen when an assignment in between two single field
4700 structures is turned into an assignment in between two pointers to
4701 scalars (PR 42237). */
4702 new_rhs = *rhs_p;
4704 if (new_rhs)
4706 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4707 true, GSI_SAME_STMT);
4709 gimple_assign_set_rhs_from_tree (gsi, tmp);
4712 return true;
4715 return false;
4718 /* Traverse the function body and all modifications as described in
4719 ADJUSTMENTS. Return true iff the CFG has been changed. */
4721 bool
4722 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4724 bool cfg_changed = false;
4725 basic_block bb;
4727 FOR_EACH_BB_FN (bb, cfun)
4729 gimple_stmt_iterator gsi;
4731 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4733 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
4734 tree new_lhs, old_lhs = gimple_phi_result (phi);
4735 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
4736 if (new_lhs)
4738 gimple_phi_set_result (phi, new_lhs);
4739 release_ssa_name (old_lhs);
4743 gsi = gsi_start_bb (bb);
4744 while (!gsi_end_p (gsi))
4746 gimple *stmt = gsi_stmt (gsi);
4747 bool modified = false;
4748 tree *t;
4749 unsigned i;
4751 switch (gimple_code (stmt))
4753 case GIMPLE_RETURN:
4754 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4755 if (*t != NULL_TREE)
4756 modified |= ipa_modify_expr (t, true, adjustments);
4757 break;
4759 case GIMPLE_ASSIGN:
4760 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4761 break;
4763 case GIMPLE_CALL:
4764 /* Operands must be processed before the lhs. */
4765 for (i = 0; i < gimple_call_num_args (stmt); i++)
4767 t = gimple_call_arg_ptr (stmt, i);
4768 modified |= ipa_modify_expr (t, true, adjustments);
4771 if (gimple_call_lhs (stmt))
4773 t = gimple_call_lhs_ptr (stmt);
4774 modified |= ipa_modify_expr (t, false, adjustments);
4776 break;
4778 case GIMPLE_ASM:
4780 gasm *asm_stmt = as_a <gasm *> (stmt);
4781 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4783 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4784 modified |= ipa_modify_expr (t, true, adjustments);
4786 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4788 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4789 modified |= ipa_modify_expr (t, false, adjustments);
4792 break;
4794 default:
4795 break;
4798 def_operand_p defp;
4799 ssa_op_iter iter;
4800 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4802 tree old_def = DEF_FROM_PTR (defp);
4803 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
4804 adjustments))
4806 SET_DEF (defp, new_def);
4807 release_ssa_name (old_def);
4808 modified = true;
4812 if (modified)
4814 update_stmt (stmt);
4815 if (maybe_clean_eh_stmt (stmt)
4816 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4817 cfg_changed = true;
4819 gsi_next (&gsi);
4823 return cfg_changed;
4826 /* Call gimple_debug_bind_reset_value on all debug statements describing
4827 gimple register parameters that are being removed or replaced. */
4829 static void
4830 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4832 int i, len;
4833 gimple_stmt_iterator *gsip = NULL, gsi;
4835 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
4837 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4838 gsip = &gsi;
4840 len = adjustments.length ();
4841 for (i = 0; i < len; i++)
4843 struct ipa_parm_adjustment *adj;
4844 imm_use_iterator ui;
4845 gimple *stmt;
4846 gdebug *def_temp;
4847 tree name, vexpr, copy = NULL_TREE;
4848 use_operand_p use_p;
4850 adj = &adjustments[i];
4851 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
4852 continue;
4853 name = ssa_default_def (cfun, adj->base);
4854 vexpr = NULL;
4855 if (name)
4856 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4858 if (gimple_clobber_p (stmt))
4860 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
4861 unlink_stmt_vdef (stmt);
4862 gsi_remove (&cgsi, true);
4863 release_defs (stmt);
4864 continue;
4866 /* All other users must have been removed by
4867 ipa_sra_modify_function_body. */
4868 gcc_assert (is_gimple_debug (stmt));
4869 if (vexpr == NULL && gsip != NULL)
4871 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4872 vexpr = make_node (DEBUG_EXPR_DECL);
4873 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4874 NULL);
4875 DECL_ARTIFICIAL (vexpr) = 1;
4876 TREE_TYPE (vexpr) = TREE_TYPE (name);
4877 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4878 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4880 if (vexpr)
4882 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4883 SET_USE (use_p, vexpr);
4885 else
4886 gimple_debug_bind_reset_value (stmt);
4887 update_stmt (stmt);
4889 /* Create a VAR_DECL for debug info purposes. */
4890 if (!DECL_IGNORED_P (adj->base))
4892 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4893 VAR_DECL, DECL_NAME (adj->base),
4894 TREE_TYPE (adj->base));
4895 if (DECL_PT_UID_SET_P (adj->base))
4896 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4897 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4898 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4899 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4900 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4901 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4902 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4903 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4904 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4905 SET_DECL_RTL (copy, 0);
4906 TREE_USED (copy) = 1;
4907 DECL_CONTEXT (copy) = current_function_decl;
4908 add_local_decl (cfun, copy);
4909 DECL_CHAIN (copy) =
4910 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4911 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4913 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4915 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4916 if (vexpr)
4917 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4918 else
4919 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4920 NULL);
4921 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4926 /* Return false if all callers have at least as many actual arguments as there
4927 are formal parameters in the current function and that their types
4928 match. */
4930 static bool
4931 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
4932 void *data ATTRIBUTE_UNUSED)
4934 struct cgraph_edge *cs;
4935 for (cs = node->callers; cs; cs = cs->next_caller)
4936 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
4937 return true;
4939 return false;
4942 /* Return false if all callers have vuse attached to a call statement. */
4944 static bool
4945 some_callers_have_no_vuse_p (struct cgraph_node *node,
4946 void *data ATTRIBUTE_UNUSED)
4948 struct cgraph_edge *cs;
4949 for (cs = node->callers; cs; cs = cs->next_caller)
4950 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
4951 return true;
4953 return false;
4956 /* Convert all callers of NODE. */
4958 static bool
4959 convert_callers_for_node (struct cgraph_node *node,
4960 void *data)
4962 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4963 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4964 struct cgraph_edge *cs;
4966 for (cs = node->callers; cs; cs = cs->next_caller)
4968 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4970 if (dump_file)
4971 fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
4972 xstrdup (cs->caller->name ()),
4973 cs->caller->order,
4974 xstrdup (cs->callee->name ()),
4975 cs->callee->order);
4977 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4979 pop_cfun ();
4982 for (cs = node->callers; cs; cs = cs->next_caller)
4983 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4984 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4985 compute_inline_parameters (cs->caller, true);
4986 BITMAP_FREE (recomputed_callers);
4988 return true;
4991 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4993 static void
4994 convert_callers (struct cgraph_node *node, tree old_decl,
4995 ipa_parm_adjustment_vec adjustments)
4997 basic_block this_block;
4999 node->call_for_symbol_and_aliases (convert_callers_for_node,
5000 &adjustments, false);
5002 if (!encountered_recursive_call)
5003 return;
5005 FOR_EACH_BB_FN (this_block, cfun)
5007 gimple_stmt_iterator gsi;
5009 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5011 gcall *stmt;
5012 tree call_fndecl;
5013 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5014 if (!stmt)
5015 continue;
5016 call_fndecl = gimple_call_fndecl (stmt);
5017 if (call_fndecl == old_decl)
5019 if (dump_file)
5020 fprintf (dump_file, "Adjusting recursive call");
5021 gimple_call_set_fndecl (stmt, node->decl);
5022 ipa_modify_call_arguments (NULL, stmt, adjustments);
5027 return;
5030 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5031 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5033 static bool
5034 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5036 struct cgraph_node *new_node;
5037 bool cfg_changed;
5039 cgraph_edge::rebuild_edges ();
5040 free_dominance_info (CDI_DOMINATORS);
5041 pop_cfun ();
5043 /* This must be done after rebuilding cgraph edges for node above.
5044 Otherwise any recursive calls to node that are recorded in
5045 redirect_callers will be corrupted. */
5046 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5047 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5048 NULL, false, NULL, NULL,
5049 "isra");
5050 redirect_callers.release ();
5052 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5053 ipa_modify_formal_parameters (current_function_decl, adjustments);
5054 cfg_changed = ipa_sra_modify_function_body (adjustments);
5055 sra_ipa_reset_debug_stmts (adjustments);
5056 convert_callers (new_node, node->decl, adjustments);
5057 new_node->make_local ();
5058 return cfg_changed;
5061 /* Means of communication between ipa_sra_check_caller and
5062 ipa_sra_preliminary_function_checks. */
5064 struct ipa_sra_check_caller_data
5066 bool has_callers;
5067 bool bad_arg_alignment;
5068 bool has_thunk;
5071 /* If NODE has a caller, mark that fact in DATA which is pointer to
5072 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5073 calls if they are unit aligned and if not, set the appropriate flag in DATA
5074 too. */
5076 static bool
5077 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5079 if (!node->callers)
5080 return false;
5082 struct ipa_sra_check_caller_data *iscc;
5083 iscc = (struct ipa_sra_check_caller_data *) data;
5084 iscc->has_callers = true;
5086 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5088 if (cs->caller->thunk.thunk_p)
5090 iscc->has_thunk = true;
5091 return true;
5093 gimple *call_stmt = cs->call_stmt;
5094 unsigned count = gimple_call_num_args (call_stmt);
5095 for (unsigned i = 0; i < count; i++)
5097 tree arg = gimple_call_arg (call_stmt, i);
5098 if (is_gimple_reg (arg))
5099 continue;
5101 tree offset;
5102 HOST_WIDE_INT bitsize, bitpos;
5103 machine_mode mode;
5104 int unsignedp, volatilep = 0;
5105 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5106 &unsignedp, &volatilep, false);
5107 if (bitpos % BITS_PER_UNIT)
5109 iscc->bad_arg_alignment = true;
5110 return true;
5115 return false;
5118 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5119 attributes, return true otherwise. NODE is the cgraph node of the current
5120 function. */
5122 static bool
5123 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5125 if (!node->can_be_local_p ())
5127 if (dump_file)
5128 fprintf (dump_file, "Function not local to this compilation unit.\n");
5129 return false;
5132 if (!node->local.can_change_signature)
5134 if (dump_file)
5135 fprintf (dump_file, "Function can not change signature.\n");
5136 return false;
5139 if (!tree_versionable_function_p (node->decl))
5141 if (dump_file)
5142 fprintf (dump_file, "Function is not versionable.\n");
5143 return false;
5146 if (!opt_for_fn (node->decl, optimize)
5147 || !opt_for_fn (node->decl, flag_ipa_sra))
5149 if (dump_file)
5150 fprintf (dump_file, "Function not optimized.\n");
5151 return false;
5154 if (DECL_VIRTUAL_P (current_function_decl))
5156 if (dump_file)
5157 fprintf (dump_file, "Function is a virtual method.\n");
5158 return false;
5161 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5162 && inline_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5164 if (dump_file)
5165 fprintf (dump_file, "Function too big to be made truly local.\n");
5166 return false;
5169 if (cfun->stdarg)
5171 if (dump_file)
5172 fprintf (dump_file, "Function uses stdarg. \n");
5173 return false;
5176 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5177 return false;
5179 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5181 if (dump_file)
5182 fprintf (dump_file, "Always inline function will be inlined "
5183 "anyway. \n");
5184 return false;
5187 struct ipa_sra_check_caller_data iscc;
5188 memset (&iscc, 0, sizeof(iscc));
5189 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5190 if (!iscc.has_callers)
5192 if (dump_file)
5193 fprintf (dump_file,
5194 "Function has no callers in this compilation unit.\n");
5195 return false;
5198 if (iscc.bad_arg_alignment)
5200 if (dump_file)
5201 fprintf (dump_file,
5202 "A function call has an argument with non-unit alignment.\n");
5203 return false;
5206 if (iscc.has_thunk)
5208 if (dump_file)
5209 fprintf (dump_file,
5210 "A has thunk.\n");
5211 return false;
5214 return true;
5217 /* Perform early interprocedural SRA. */
5219 static unsigned int
5220 ipa_early_sra (void)
5222 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5223 ipa_parm_adjustment_vec adjustments;
5224 int ret = 0;
5226 if (!ipa_sra_preliminary_function_checks (node))
5227 return 0;
5229 sra_initialize ();
5230 sra_mode = SRA_MODE_EARLY_IPA;
5232 if (!find_param_candidates ())
5234 if (dump_file)
5235 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5236 goto simple_out;
5239 if (node->call_for_symbol_and_aliases
5240 (some_callers_have_mismatched_arguments_p, NULL, true))
5242 if (dump_file)
5243 fprintf (dump_file, "There are callers with insufficient number of "
5244 "arguments or arguments with type mismatches.\n");
5245 goto simple_out;
5248 if (node->call_for_symbol_and_aliases
5249 (some_callers_have_no_vuse_p, NULL, true))
5251 if (dump_file)
5252 fprintf (dump_file, "There are callers with no VUSE attached "
5253 "to a call stmt.\n");
5254 goto simple_out;
5257 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5258 func_param_count
5259 * last_basic_block_for_fn (cfun));
5260 final_bbs = BITMAP_ALLOC (NULL);
5262 scan_function ();
5263 if (encountered_apply_args)
5265 if (dump_file)
5266 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5267 goto out;
5270 if (encountered_unchangable_recursive_call)
5272 if (dump_file)
5273 fprintf (dump_file, "Function calls itself with insufficient "
5274 "number of arguments.\n");
5275 goto out;
5278 adjustments = analyze_all_param_acesses ();
5279 if (!adjustments.exists ())
5280 goto out;
5281 if (dump_file)
5282 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5284 if (modify_function (node, adjustments))
5285 ret = TODO_update_ssa | TODO_cleanup_cfg;
5286 else
5287 ret = TODO_update_ssa;
5288 adjustments.release ();
5290 statistics_counter_event (cfun, "Unused parameters deleted",
5291 sra_stats.deleted_unused_parameters);
5292 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5293 sra_stats.scalar_by_ref_to_by_val);
5294 statistics_counter_event (cfun, "Aggregate parameters broken up",
5295 sra_stats.aggregate_params_reduced);
5296 statistics_counter_event (cfun, "Aggregate parameter components created",
5297 sra_stats.param_reductions_created);
5299 out:
5300 BITMAP_FREE (final_bbs);
5301 free (bb_dereferences);
5302 simple_out:
5303 sra_deinitialize ();
5304 return ret;
5307 namespace {
5309 const pass_data pass_data_early_ipa_sra =
5311 GIMPLE_PASS, /* type */
5312 "eipa_sra", /* name */
5313 OPTGROUP_NONE, /* optinfo_flags */
5314 TV_IPA_SRA, /* tv_id */
5315 0, /* properties_required */
5316 0, /* properties_provided */
5317 0, /* properties_destroyed */
5318 0, /* todo_flags_start */
5319 TODO_dump_symtab, /* todo_flags_finish */
5322 class pass_early_ipa_sra : public gimple_opt_pass
5324 public:
5325 pass_early_ipa_sra (gcc::context *ctxt)
5326 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5329 /* opt_pass methods: */
5330 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5331 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5333 }; // class pass_early_ipa_sra
5335 } // anon namespace
5337 gimple_opt_pass *
5338 make_pass_early_ipa_sra (gcc::context *ctxt)
5340 return new pass_early_ipa_sra (ctxt);