typeck.c (cp_build_function_call_vec): When mark_used fails unconditionally return...
[official-gcc.git] / gcc / tree-sra.c
blobfd51a3d0323548b35c000f0332f2000bd15f0a8e
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2019 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-param-manipulation.h"
101 #include "ipa-prop.h"
102 #include "params.h"
103 #include "dbgcnt.h"
104 #include "tree-inline.h"
105 #include "ipa-fnsummary.h"
106 #include "ipa-utils.h"
107 #include "builtins.h"
109 /* Enumeration of all aggregate reductions we can do. */
110 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
111 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
112 SRA_MODE_INTRA }; /* late intraprocedural SRA */
114 /* Global variable describing which aggregate reduction we are performing at
115 the moment. */
116 static enum sra_mode sra_mode;
118 struct assign_link;
120 /* ACCESS represents each access to an aggregate variable (as a whole or a
121 part). It can also represent a group of accesses that refer to exactly the
122 same fragment of an aggregate (i.e. those that have exactly the same offset
123 and size). Such representatives for a single aggregate, once determined,
124 are linked in a linked list and have the group fields set.
126 Moreover, when doing intraprocedural SRA, a tree is built from those
127 representatives (by the means of first_child and next_sibling pointers), in
128 which all items in a subtree are "within" the root, i.e. their offset is
129 greater or equal to offset of the root and offset+size is smaller or equal
130 to offset+size of the root. Children of an access are sorted by offset.
132 Note that accesses to parts of vector and complex number types always
133 represented by an access to the whole complex number or a vector. It is a
134 duty of the modifying functions to replace them appropriately. */
136 struct access
138 /* Values returned by `get_ref_base_and_extent' for each component reference
139 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
140 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
141 HOST_WIDE_INT offset;
142 HOST_WIDE_INT size;
143 tree base;
145 /* Expression. It is context dependent so do not use it to create new
146 expressions to access the original aggregate. See PR 42154 for a
147 testcase. */
148 tree expr;
149 /* Type. */
150 tree type;
152 /* The statement this access belongs to. */
153 gimple *stmt;
155 /* Next group representative for this aggregate. */
156 struct access *next_grp;
158 /* Pointer to the group representative. Pointer to itself if the struct is
159 the representative. */
160 struct access *group_representative;
162 /* After access tree has been constructed, this points to the parent of the
163 current access, if there is one. NULL for roots. */
164 struct access *parent;
166 /* If this access has any children (in terms of the definition above), this
167 points to the first one. */
168 struct access *first_child;
170 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
171 described above. In IPA-SRA this is a pointer to the next access
172 belonging to the same group (having the same representative). */
173 struct access *next_sibling;
175 /* Pointers to the first and last element in the linked list of assign
176 links. */
177 struct assign_link *first_link, *last_link;
179 /* Pointer to the next access in the work queue. */
180 struct access *next_queued;
182 /* Replacement variable for this access "region." Never to be accessed
183 directly, always only by the means of get_access_replacement() and only
184 when grp_to_be_replaced flag is set. */
185 tree replacement_decl;
187 /* Is this access an access to a non-addressable field? */
188 unsigned non_addressable : 1;
190 /* Is this access made in reverse storage order? */
191 unsigned reverse : 1;
193 /* Is this particular access write access? */
194 unsigned write : 1;
196 /* Is this access currently in the work queue? */
197 unsigned grp_queued : 1;
199 /* Does this group contain a write access? This flag is propagated down the
200 access tree. */
201 unsigned grp_write : 1;
203 /* Does this group contain a read access? This flag is propagated down the
204 access tree. */
205 unsigned grp_read : 1;
207 /* Does this group contain a read access that comes from an assignment
208 statement? This flag is propagated down the access tree. */
209 unsigned grp_assignment_read : 1;
211 /* Does this group contain a write access that comes from an assignment
212 statement? This flag is propagated down the access tree. */
213 unsigned grp_assignment_write : 1;
215 /* Does this group contain a read access through a scalar type? This flag is
216 not propagated in the access tree in any direction. */
217 unsigned grp_scalar_read : 1;
219 /* Does this group contain a write access through a scalar type? This flag
220 is not propagated in the access tree in any direction. */
221 unsigned grp_scalar_write : 1;
223 /* Is this access an artificial one created to scalarize some record
224 entirely? */
225 unsigned grp_total_scalarization : 1;
227 /* Other passes of the analysis use this bit to make function
228 analyze_access_subtree create scalar replacements for this group if
229 possible. */
230 unsigned grp_hint : 1;
232 /* Is the subtree rooted in this access fully covered by scalar
233 replacements? */
234 unsigned grp_covered : 1;
236 /* If set to true, this access and all below it in an access tree must not be
237 scalarized. */
238 unsigned grp_unscalarizable_region : 1;
240 /* Whether data have been written to parts of the aggregate covered by this
241 access which is not to be scalarized. This flag is propagated up in the
242 access tree. */
243 unsigned grp_unscalarized_data : 1;
245 /* Does this access and/or group contain a write access through a
246 BIT_FIELD_REF? */
247 unsigned grp_partial_lhs : 1;
249 /* Set when a scalar replacement should be created for this variable. */
250 unsigned grp_to_be_replaced : 1;
252 /* Set when we want a replacement for the sole purpose of having it in
253 generated debug statements. */
254 unsigned grp_to_be_debug_replaced : 1;
256 /* Should TREE_NO_WARNING of a replacement be set? */
257 unsigned grp_no_warning : 1;
259 /* Is it possible that the group refers to data which might be (directly or
260 otherwise) modified? */
261 unsigned grp_maybe_modified : 1;
263 /* Set when this is a representative of a pointer to scalar (i.e. by
264 reference) parameter which we consider for turning into a plain scalar
265 (i.e. a by value parameter). */
266 unsigned grp_scalar_ptr : 1;
268 /* Set when we discover that this pointer is not safe to dereference in the
269 caller. */
270 unsigned grp_not_necessarilly_dereferenced : 1;
273 typedef struct access *access_p;
276 /* Alloc pool for allocating access structures. */
277 static object_allocator<struct access> access_pool ("SRA accesses");
279 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
280 are used to propagate subaccesses from rhs to lhs as long as they don't
281 conflict with what is already there. */
282 struct assign_link
284 struct access *lacc, *racc;
285 struct assign_link *next;
288 /* Alloc pool for allocating assign link structures. */
289 static object_allocator<assign_link> assign_link_pool ("SRA links");
291 /* Base (tree) -> Vector (vec<access_p> *) map. */
292 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
294 /* Candidate hash table helpers. */
296 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
298 static inline hashval_t hash (const tree_node *);
299 static inline bool equal (const tree_node *, const tree_node *);
302 /* Hash a tree in a uid_decl_map. */
304 inline hashval_t
305 uid_decl_hasher::hash (const tree_node *item)
307 return item->decl_minimal.uid;
310 /* Return true if the DECL_UID in both trees are equal. */
312 inline bool
313 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
315 return (a->decl_minimal.uid == b->decl_minimal.uid);
318 /* Set of candidates. */
319 static bitmap candidate_bitmap;
320 static hash_table<uid_decl_hasher> *candidates;
322 /* For a candidate UID return the candidates decl. */
324 static inline tree
325 candidate (unsigned uid)
327 tree_node t;
328 t.decl_minimal.uid = uid;
329 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
332 /* Bitmap of candidates which we should try to entirely scalarize away and
333 those which cannot be (because they are and need be used as a whole). */
334 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
336 /* Bitmap of candidates in the constant pool, which cannot be scalarized
337 because this would produce non-constant expressions (e.g. Ada). */
338 static bitmap disqualified_constants;
340 /* Obstack for creation of fancy names. */
341 static struct obstack name_obstack;
343 /* Head of a linked list of accesses that need to have its subaccesses
344 propagated to their assignment counterparts. */
345 static struct access *work_queue_head;
347 /* Number of parameters of the analyzed function when doing early ipa SRA. */
348 static int func_param_count;
350 /* scan_function sets the following to true if it encounters a call to
351 __builtin_apply_args. */
352 static bool encountered_apply_args;
354 /* Set by scan_function when it finds a recursive call. */
355 static bool encountered_recursive_call;
357 /* Set by scan_function when it finds a recursive call with less actual
358 arguments than formal parameters.. */
359 static bool encountered_unchangable_recursive_call;
361 /* This is a table in which for each basic block and parameter there is a
362 distance (offset + size) in that parameter which is dereferenced and
363 accessed in that BB. */
364 static HOST_WIDE_INT *bb_dereferences;
365 /* Bitmap of BBs that can cause the function to "stop" progressing by
366 returning, throwing externally, looping infinitely or calling a function
367 which might abort etc.. */
368 static bitmap final_bbs;
370 /* Representative of no accesses at all. */
371 static struct access no_accesses_representant;
373 /* Predicate to test the special value. */
375 static inline bool
376 no_accesses_p (struct access *access)
378 return access == &no_accesses_representant;
381 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
382 representative fields are dumped, otherwise those which only describe the
383 individual access are. */
385 static struct
387 /* Number of processed aggregates is readily available in
388 analyze_all_variable_accesses and so is not stored here. */
390 /* Number of created scalar replacements. */
391 int replacements;
393 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
394 expression. */
395 int exprs;
397 /* Number of statements created by generate_subtree_copies. */
398 int subtree_copies;
400 /* Number of statements created by load_assign_lhs_subreplacements. */
401 int subreplacements;
403 /* Number of times sra_modify_assign has deleted a statement. */
404 int deleted;
406 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
407 RHS reparately due to type conversions or nonexistent matching
408 references. */
409 int separate_lhs_rhs_handling;
411 /* Number of parameters that were removed because they were unused. */
412 int deleted_unused_parameters;
414 /* Number of scalars passed as parameters by reference that have been
415 converted to be passed by value. */
416 int scalar_by_ref_to_by_val;
418 /* Number of aggregate parameters that were replaced by one or more of their
419 components. */
420 int aggregate_params_reduced;
422 /* Numbber of components created when splitting aggregate parameters. */
423 int param_reductions_created;
424 } sra_stats;
426 static void
427 dump_access (FILE *f, struct access *access, bool grp)
429 fprintf (f, "access { ");
430 fprintf (f, "base = (%d)'", DECL_UID (access->base));
431 print_generic_expr (f, access->base);
432 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
433 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
434 fprintf (f, ", expr = ");
435 print_generic_expr (f, access->expr);
436 fprintf (f, ", type = ");
437 print_generic_expr (f, access->type);
438 fprintf (f, ", non_addressable = %d, reverse = %d",
439 access->non_addressable, access->reverse);
440 if (grp)
441 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
442 "grp_assignment_write = %d, grp_scalar_read = %d, "
443 "grp_scalar_write = %d, grp_total_scalarization = %d, "
444 "grp_hint = %d, grp_covered = %d, "
445 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
446 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
447 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
448 "grp_not_necessarilly_dereferenced = %d\n",
449 access->grp_read, access->grp_write, access->grp_assignment_read,
450 access->grp_assignment_write, access->grp_scalar_read,
451 access->grp_scalar_write, access->grp_total_scalarization,
452 access->grp_hint, access->grp_covered,
453 access->grp_unscalarizable_region, access->grp_unscalarized_data,
454 access->grp_partial_lhs, access->grp_to_be_replaced,
455 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
456 access->grp_not_necessarilly_dereferenced);
457 else
458 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
459 "grp_partial_lhs = %d\n",
460 access->write, access->grp_total_scalarization,
461 access->grp_partial_lhs);
464 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
466 static void
467 dump_access_tree_1 (FILE *f, struct access *access, int level)
471 int i;
473 for (i = 0; i < level; i++)
474 fputs ("* ", f);
476 dump_access (f, access, true);
478 if (access->first_child)
479 dump_access_tree_1 (f, access->first_child, level + 1);
481 access = access->next_sibling;
483 while (access);
486 /* Dump all access trees for a variable, given the pointer to the first root in
487 ACCESS. */
489 static void
490 dump_access_tree (FILE *f, struct access *access)
492 for (; access; access = access->next_grp)
493 dump_access_tree_1 (f, access, 0);
496 /* Return true iff ACC is non-NULL and has subaccesses. */
498 static inline bool
499 access_has_children_p (struct access *acc)
501 return acc && acc->first_child;
504 /* Return true iff ACC is (partly) covered by at least one replacement. */
506 static bool
507 access_has_replacements_p (struct access *acc)
509 struct access *child;
510 if (acc->grp_to_be_replaced)
511 return true;
512 for (child = acc->first_child; child; child = child->next_sibling)
513 if (access_has_replacements_p (child))
514 return true;
515 return false;
518 /* Return a vector of pointers to accesses for the variable given in BASE or
519 NULL if there is none. */
521 static vec<access_p> *
522 get_base_access_vector (tree base)
524 return base_access_vec->get (base);
527 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
528 in ACCESS. Return NULL if it cannot be found. */
530 static struct access *
531 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
532 HOST_WIDE_INT size)
534 while (access && (access->offset != offset || access->size != size))
536 struct access *child = access->first_child;
538 while (child && (child->offset + child->size <= offset))
539 child = child->next_sibling;
540 access = child;
543 return access;
546 /* Return the first group representative for DECL or NULL if none exists. */
548 static struct access *
549 get_first_repr_for_decl (tree base)
551 vec<access_p> *access_vec;
553 access_vec = get_base_access_vector (base);
554 if (!access_vec)
555 return NULL;
557 return (*access_vec)[0];
560 /* Find an access representative for the variable BASE and given OFFSET and
561 SIZE. Requires that access trees have already been built. Return NULL if
562 it cannot be found. */
564 static struct access *
565 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
566 HOST_WIDE_INT size)
568 struct access *access;
570 access = get_first_repr_for_decl (base);
571 while (access && (access->offset + access->size <= offset))
572 access = access->next_grp;
573 if (!access)
574 return NULL;
576 return find_access_in_subtree (access, offset, size);
579 /* Add LINK to the linked list of assign links of RACC. */
580 static void
581 add_link_to_rhs (struct access *racc, struct assign_link *link)
583 gcc_assert (link->racc == racc);
585 if (!racc->first_link)
587 gcc_assert (!racc->last_link);
588 racc->first_link = link;
590 else
591 racc->last_link->next = link;
593 racc->last_link = link;
594 link->next = NULL;
597 /* Move all link structures in their linked list in OLD_RACC to the linked list
598 in NEW_RACC. */
599 static void
600 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
602 if (!old_racc->first_link)
604 gcc_assert (!old_racc->last_link);
605 return;
608 if (new_racc->first_link)
610 gcc_assert (!new_racc->last_link->next);
611 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
613 new_racc->last_link->next = old_racc->first_link;
614 new_racc->last_link = old_racc->last_link;
616 else
618 gcc_assert (!new_racc->last_link);
620 new_racc->first_link = old_racc->first_link;
621 new_racc->last_link = old_racc->last_link;
623 old_racc->first_link = old_racc->last_link = NULL;
626 /* Add ACCESS to the work queue (which is actually a stack). */
628 static void
629 add_access_to_work_queue (struct access *access)
631 if (access->first_link && !access->grp_queued)
633 gcc_assert (!access->next_queued);
634 access->next_queued = work_queue_head;
635 access->grp_queued = 1;
636 work_queue_head = access;
640 /* Pop an access from the work queue, and return it, assuming there is one. */
642 static struct access *
643 pop_access_from_work_queue (void)
645 struct access *access = work_queue_head;
647 work_queue_head = access->next_queued;
648 access->next_queued = NULL;
649 access->grp_queued = 0;
650 return access;
654 /* Allocate necessary structures. */
656 static void
657 sra_initialize (void)
659 candidate_bitmap = BITMAP_ALLOC (NULL);
660 candidates = new hash_table<uid_decl_hasher>
661 (vec_safe_length (cfun->local_decls) / 2);
662 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
663 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
664 disqualified_constants = BITMAP_ALLOC (NULL);
665 gcc_obstack_init (&name_obstack);
666 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
667 memset (&sra_stats, 0, sizeof (sra_stats));
668 encountered_apply_args = false;
669 encountered_recursive_call = false;
670 encountered_unchangable_recursive_call = false;
673 /* Deallocate all general structures. */
675 static void
676 sra_deinitialize (void)
678 BITMAP_FREE (candidate_bitmap);
679 delete candidates;
680 candidates = NULL;
681 BITMAP_FREE (should_scalarize_away_bitmap);
682 BITMAP_FREE (cannot_scalarize_away_bitmap);
683 BITMAP_FREE (disqualified_constants);
684 access_pool.release ();
685 assign_link_pool.release ();
686 obstack_free (&name_obstack, NULL);
688 delete base_access_vec;
691 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
693 static bool constant_decl_p (tree decl)
695 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
698 /* Remove DECL from candidates for SRA and write REASON to the dump file if
699 there is one. */
701 static void
702 disqualify_candidate (tree decl, const char *reason)
704 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
705 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
706 if (constant_decl_p (decl))
707 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
709 if (dump_file && (dump_flags & TDF_DETAILS))
711 fprintf (dump_file, "! Disqualifying ");
712 print_generic_expr (dump_file, decl);
713 fprintf (dump_file, " - %s\n", reason);
717 /* Return true iff the type contains a field or an element which does not allow
718 scalarization. */
720 static bool
721 type_internals_preclude_sra_p (tree type, const char **msg)
723 tree fld;
724 tree et;
726 switch (TREE_CODE (type))
728 case RECORD_TYPE:
729 case UNION_TYPE:
730 case QUAL_UNION_TYPE:
731 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
732 if (TREE_CODE (fld) == FIELD_DECL)
734 tree ft = TREE_TYPE (fld);
736 if (TREE_THIS_VOLATILE (fld))
738 *msg = "volatile structure field";
739 return true;
741 if (!DECL_FIELD_OFFSET (fld))
743 *msg = "no structure field offset";
744 return true;
746 if (!DECL_SIZE (fld))
748 *msg = "zero structure field size";
749 return true;
751 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
753 *msg = "structure field offset not fixed";
754 return true;
756 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
758 *msg = "structure field size not fixed";
759 return true;
761 if (!tree_fits_shwi_p (bit_position (fld)))
763 *msg = "structure field size too big";
764 return true;
766 if (AGGREGATE_TYPE_P (ft)
767 && int_bit_position (fld) % BITS_PER_UNIT != 0)
769 *msg = "structure field is bit field";
770 return true;
773 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
774 return true;
777 return false;
779 case ARRAY_TYPE:
780 et = TREE_TYPE (type);
782 if (TYPE_VOLATILE (et))
784 *msg = "element type is volatile";
785 return true;
788 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
789 return true;
791 return false;
793 default:
794 return false;
798 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
799 base variable if it is. Return T if it is not an SSA_NAME. */
801 static tree
802 get_ssa_base_param (tree t)
804 if (TREE_CODE (t) == SSA_NAME)
806 if (SSA_NAME_IS_DEFAULT_DEF (t))
807 return SSA_NAME_VAR (t);
808 else
809 return NULL_TREE;
811 return t;
814 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
815 belongs to, unless the BB has already been marked as a potentially
816 final. */
818 static void
819 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
821 basic_block bb = gimple_bb (stmt);
822 int idx, parm_index = 0;
823 tree parm;
825 if (bitmap_bit_p (final_bbs, bb->index))
826 return;
828 for (parm = DECL_ARGUMENTS (current_function_decl);
829 parm && parm != base;
830 parm = DECL_CHAIN (parm))
831 parm_index++;
833 gcc_assert (parm_index < func_param_count);
835 idx = bb->index * func_param_count + parm_index;
836 if (bb_dereferences[idx] < dist)
837 bb_dereferences[idx] = dist;
840 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
841 the three fields. Also add it to the vector of accesses corresponding to
842 the base. Finally, return the new access. */
844 static struct access *
845 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
847 struct access *access = access_pool.allocate ();
849 memset (access, 0, sizeof (struct access));
850 access->base = base;
851 access->offset = offset;
852 access->size = size;
854 base_access_vec->get_or_insert (base).safe_push (access);
856 return access;
859 static bool maybe_add_sra_candidate (tree);
861 /* Create and insert access for EXPR. Return created access, or NULL if it is
862 not possible. Also scan for uses of constant pool as we go along and add
863 to candidates. */
865 static struct access *
866 create_access (tree expr, gimple *stmt, bool write)
868 struct access *access;
869 poly_int64 poffset, psize, pmax_size;
870 HOST_WIDE_INT offset, size, max_size;
871 tree base = expr;
872 bool reverse, ptr, unscalarizable_region = false;
874 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
875 &reverse);
876 if (!poffset.is_constant (&offset)
877 || !psize.is_constant (&size)
878 || !pmax_size.is_constant (&max_size))
880 disqualify_candidate (base, "Encountered a polynomial-sized access.");
881 return NULL;
884 if (sra_mode == SRA_MODE_EARLY_IPA
885 && TREE_CODE (base) == MEM_REF)
887 base = get_ssa_base_param (TREE_OPERAND (base, 0));
888 if (!base)
889 return NULL;
890 ptr = true;
892 else
893 ptr = false;
895 /* For constant-pool entries, check we can substitute the constant value. */
896 if (constant_decl_p (base)
897 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
899 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
900 if (expr != base
901 && !is_gimple_reg_type (TREE_TYPE (expr))
902 && dump_file && (dump_flags & TDF_DETAILS))
904 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
905 and elements of multidimensional arrays (which are
906 multi-element arrays in their own right). */
907 fprintf (dump_file, "Allowing non-reg-type load of part"
908 " of constant-pool entry: ");
909 print_generic_expr (dump_file, expr);
911 maybe_add_sra_candidate (base);
914 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
915 return NULL;
917 if (sra_mode == SRA_MODE_EARLY_IPA)
919 if (size < 0 || size != max_size)
921 disqualify_candidate (base, "Encountered a variable sized access.");
922 return NULL;
924 if (TREE_CODE (expr) == COMPONENT_REF
925 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
927 disqualify_candidate (base, "Encountered a bit-field access.");
928 return NULL;
930 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
932 if (ptr)
933 mark_parm_dereference (base, offset + size, stmt);
935 else
937 if (size != max_size)
939 size = max_size;
940 unscalarizable_region = true;
942 if (size < 0)
944 disqualify_candidate (base, "Encountered an unconstrained access.");
945 return NULL;
949 access = create_access_1 (base, offset, size);
950 access->expr = expr;
951 access->type = TREE_TYPE (expr);
952 access->write = write;
953 access->grp_unscalarizable_region = unscalarizable_region;
954 access->stmt = stmt;
955 access->reverse = reverse;
957 if (TREE_CODE (expr) == COMPONENT_REF
958 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
959 access->non_addressable = 1;
961 return access;
965 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
966 ARRAY_TYPE with fields that are either of gimple register types (excluding
967 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
968 we are considering a decl from constant pool. If it is false, char arrays
969 will be refused. */
971 static bool
972 scalarizable_type_p (tree type, bool const_decl)
974 gcc_assert (!is_gimple_reg_type (type));
975 if (type_contains_placeholder_p (type))
976 return false;
978 switch (TREE_CODE (type))
980 case RECORD_TYPE:
981 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
982 if (TREE_CODE (fld) == FIELD_DECL)
984 tree ft = TREE_TYPE (fld);
986 if (DECL_BIT_FIELD (fld))
987 return false;
989 if (!is_gimple_reg_type (ft)
990 && !scalarizable_type_p (ft, const_decl))
991 return false;
994 return true;
996 case ARRAY_TYPE:
998 HOST_WIDE_INT min_elem_size;
999 if (const_decl)
1000 min_elem_size = 0;
1001 else
1002 min_elem_size = BITS_PER_UNIT;
1004 if (TYPE_DOMAIN (type) == NULL_TREE
1005 || !tree_fits_shwi_p (TYPE_SIZE (type))
1006 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
1007 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
1008 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1009 return false;
1010 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1011 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1012 /* Zero-element array, should not prevent scalarization. */
1014 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1015 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1016 /* Variable-length array, do not allow scalarization. */
1017 return false;
1019 tree elem = TREE_TYPE (type);
1020 if (!is_gimple_reg_type (elem)
1021 && !scalarizable_type_p (elem, const_decl))
1022 return false;
1023 return true;
1025 default:
1026 return false;
1030 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1032 /* Create total_scalarization accesses for all scalar fields of a member
1033 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1034 must be the top-most VAR_DECL representing the variable; within that,
1035 OFFSET locates the member and REF must be the memory reference expression for
1036 the member. */
1038 static void
1039 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1041 switch (TREE_CODE (decl_type))
1043 case RECORD_TYPE:
1044 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1045 if (TREE_CODE (fld) == FIELD_DECL)
1047 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1048 tree ft = TREE_TYPE (fld);
1049 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1051 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1052 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1053 nref, ft);
1055 break;
1056 case ARRAY_TYPE:
1058 tree elemtype = TREE_TYPE (decl_type);
1059 tree elem_size = TYPE_SIZE (elemtype);
1060 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1061 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1062 gcc_assert (el_size > 0);
1064 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1065 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1066 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1067 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1068 if (maxidx)
1070 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1071 tree domain = TYPE_DOMAIN (decl_type);
1072 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1073 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1074 offset_int idx = wi::to_offset (minidx);
1075 offset_int max = wi::to_offset (maxidx);
1076 if (!TYPE_UNSIGNED (domain))
1078 idx = wi::sext (idx, TYPE_PRECISION (domain));
1079 max = wi::sext (max, TYPE_PRECISION (domain));
1081 for (int el_off = offset; idx <= max; ++idx)
1083 tree nref = build4 (ARRAY_REF, elemtype,
1084 ref,
1085 wide_int_to_tree (domain, idx),
1086 NULL_TREE, NULL_TREE);
1087 scalarize_elem (base, el_off, el_size,
1088 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1089 nref, elemtype);
1090 el_off += el_size;
1094 break;
1095 default:
1096 gcc_unreachable ();
1100 /* Create total_scalarization accesses for a member of type TYPE, which must
1101 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1102 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1103 the member, REVERSE gives its torage order. and REF must be the reference
1104 expression for it. */
1106 static void
1107 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1108 tree ref, tree type)
1110 if (is_gimple_reg_type (type))
1112 struct access *access = create_access_1 (base, pos, size);
1113 access->expr = ref;
1114 access->type = type;
1115 access->grp_total_scalarization = 1;
1116 access->reverse = reverse;
1117 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1119 else
1120 completely_scalarize (base, type, pos, ref);
1123 /* Create a total_scalarization access for VAR as a whole. VAR must be of a
1124 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1126 static void
1127 create_total_scalarization_access (tree var)
1129 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1130 struct access *access;
1132 access = create_access_1 (var, 0, size);
1133 access->expr = var;
1134 access->type = TREE_TYPE (var);
1135 access->grp_total_scalarization = 1;
1138 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1140 static inline bool
1141 contains_view_convert_expr_p (const_tree ref)
1143 while (handled_component_p (ref))
1145 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1146 return true;
1147 ref = TREE_OPERAND (ref, 0);
1150 return false;
1153 /* Return true if REF contains a VIEW_CONVERT_EXPR or a COMPONENT_REF with a
1154 bit-field field declaration. If TYPE_CHANGING_P is non-NULL, set the bool
1155 it points to will be set if REF contains any of the above or a MEM_REF
1156 expression that effectively performs type conversion. */
1158 static bool
1159 contains_vce_or_bfcref_p (const_tree ref, bool *type_changing_p = NULL)
1161 while (handled_component_p (ref))
1163 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
1164 || (TREE_CODE (ref) == COMPONENT_REF
1165 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
1167 if (type_changing_p)
1168 *type_changing_p = true;
1169 return true;
1171 ref = TREE_OPERAND (ref, 0);
1174 if (!type_changing_p
1175 || TREE_CODE (ref) != MEM_REF
1176 || TREE_CODE (TREE_OPERAND (ref, 0)) != ADDR_EXPR)
1177 return false;
1179 tree mem = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
1180 if (TYPE_MAIN_VARIANT (TREE_TYPE (ref))
1181 != TYPE_MAIN_VARIANT (TREE_TYPE (mem)))
1182 *type_changing_p = true;
1184 return false;
1187 /* Search the given tree for a declaration by skipping handled components and
1188 exclude it from the candidates. */
1190 static void
1191 disqualify_base_of_expr (tree t, const char *reason)
1193 t = get_base_address (t);
1194 if (sra_mode == SRA_MODE_EARLY_IPA
1195 && TREE_CODE (t) == MEM_REF)
1196 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1198 if (t && DECL_P (t))
1199 disqualify_candidate (t, reason);
1202 /* Scan expression EXPR and create access structures for all accesses to
1203 candidates for scalarization. Return the created access or NULL if none is
1204 created. */
1206 static struct access *
1207 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1209 struct access *ret = NULL;
1210 bool partial_ref;
1212 if (TREE_CODE (expr) == BIT_FIELD_REF
1213 || TREE_CODE (expr) == IMAGPART_EXPR
1214 || TREE_CODE (expr) == REALPART_EXPR)
1216 expr = TREE_OPERAND (expr, 0);
1217 partial_ref = true;
1219 else
1220 partial_ref = false;
1222 if (storage_order_barrier_p (expr))
1224 disqualify_base_of_expr (expr, "storage order barrier.");
1225 return NULL;
1228 /* We need to dive through V_C_Es in order to get the size of its parameter
1229 and not the result type. Ada produces such statements. We are also
1230 capable of handling the topmost V_C_E but not any of those buried in other
1231 handled components. */
1232 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1233 expr = TREE_OPERAND (expr, 0);
1235 if (contains_view_convert_expr_p (expr))
1237 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1238 "component.");
1239 return NULL;
1241 if (TREE_THIS_VOLATILE (expr))
1243 disqualify_base_of_expr (expr, "part of a volatile reference.");
1244 return NULL;
1247 switch (TREE_CODE (expr))
1249 case MEM_REF:
1250 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1251 && sra_mode != SRA_MODE_EARLY_IPA)
1252 return NULL;
1253 /* fall through */
1254 case VAR_DECL:
1255 case PARM_DECL:
1256 case RESULT_DECL:
1257 case COMPONENT_REF:
1258 case ARRAY_REF:
1259 case ARRAY_RANGE_REF:
1260 ret = create_access (expr, stmt, write);
1261 break;
1263 default:
1264 break;
1267 if (write && partial_ref && ret)
1268 ret->grp_partial_lhs = 1;
1270 return ret;
1273 /* Scan expression EXPR and create access structures for all accesses to
1274 candidates for scalarization. Return true if any access has been inserted.
1275 STMT must be the statement from which the expression is taken, WRITE must be
1276 true if the expression is a store and false otherwise. */
1278 static bool
1279 build_access_from_expr (tree expr, gimple *stmt, bool write)
1281 struct access *access;
1283 access = build_access_from_expr_1 (expr, stmt, write);
1284 if (access)
1286 /* This means the aggregate is accesses as a whole in a way other than an
1287 assign statement and thus cannot be removed even if we had a scalar
1288 replacement for everything. */
1289 if (cannot_scalarize_away_bitmap)
1290 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1291 return true;
1293 return false;
1296 /* Return the single non-EH successor edge of BB or NULL if there is none or
1297 more than one. */
1299 static edge
1300 single_non_eh_succ (basic_block bb)
1302 edge e, res = NULL;
1303 edge_iterator ei;
1305 FOR_EACH_EDGE (e, ei, bb->succs)
1306 if (!(e->flags & EDGE_EH))
1308 if (res)
1309 return NULL;
1310 res = e;
1313 return res;
1316 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1317 there is no alternative spot where to put statements SRA might need to
1318 generate after it. The spot we are looking for is an edge leading to a
1319 single non-EH successor, if it exists and is indeed single. RHS may be
1320 NULL, in that case ignore it. */
1322 static bool
1323 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1325 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1326 && stmt_ends_bb_p (stmt))
1328 if (single_non_eh_succ (gimple_bb (stmt)))
1329 return false;
1331 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1332 if (rhs)
1333 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1334 return true;
1336 return false;
1339 /* Return true if the nature of BASE is such that it contains data even if
1340 there is no write to it in the function. */
1342 static bool
1343 comes_initialized_p (tree base)
1345 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1348 /* Scan expressions occurring in STMT, create access structures for all accesses
1349 to candidates for scalarization and remove those candidates which occur in
1350 statements or expressions that prevent them from being split apart. Return
1351 true if any access has been inserted. */
1353 static bool
1354 build_accesses_from_assign (gimple *stmt)
1356 tree lhs, rhs;
1357 struct access *lacc, *racc;
1359 if (!gimple_assign_single_p (stmt)
1360 /* Scope clobbers don't influence scalarization. */
1361 || gimple_clobber_p (stmt))
1362 return false;
1364 lhs = gimple_assign_lhs (stmt);
1365 rhs = gimple_assign_rhs1 (stmt);
1367 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1368 return false;
1370 racc = build_access_from_expr_1 (rhs, stmt, false);
1371 lacc = build_access_from_expr_1 (lhs, stmt, true);
1373 if (lacc)
1375 lacc->grp_assignment_write = 1;
1376 if (storage_order_barrier_p (rhs))
1377 lacc->grp_unscalarizable_region = 1;
1379 if (should_scalarize_away_bitmap && !is_gimple_reg_type (lacc->type))
1381 bool type_changing_p = false;
1382 contains_vce_or_bfcref_p (lhs, &type_changing_p);
1383 if (type_changing_p)
1384 bitmap_set_bit (cannot_scalarize_away_bitmap,
1385 DECL_UID (lacc->base));
1389 if (racc)
1391 racc->grp_assignment_read = 1;
1392 if (should_scalarize_away_bitmap && !is_gimple_reg_type (racc->type))
1394 bool type_changing_p = false;
1395 contains_vce_or_bfcref_p (rhs, &type_changing_p);
1397 if (type_changing_p || gimple_has_volatile_ops (stmt))
1398 bitmap_set_bit (cannot_scalarize_away_bitmap,
1399 DECL_UID (racc->base));
1400 else
1401 bitmap_set_bit (should_scalarize_away_bitmap,
1402 DECL_UID (racc->base));
1404 if (storage_order_barrier_p (lhs))
1405 racc->grp_unscalarizable_region = 1;
1408 if (lacc && racc
1409 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1410 && !lacc->grp_unscalarizable_region
1411 && !racc->grp_unscalarizable_region
1412 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1413 && lacc->size == racc->size
1414 && useless_type_conversion_p (lacc->type, racc->type))
1416 struct assign_link *link;
1418 link = assign_link_pool.allocate ();
1419 memset (link, 0, sizeof (struct assign_link));
1421 link->lacc = lacc;
1422 link->racc = racc;
1423 add_link_to_rhs (racc, link);
1424 add_access_to_work_queue (racc);
1426 /* Let's delay marking the areas as written until propagation of accesses
1427 across link, unless the nature of rhs tells us that its data comes
1428 from elsewhere. */
1429 if (!comes_initialized_p (racc->base))
1430 lacc->write = false;
1433 return lacc || racc;
1436 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1437 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1439 static bool
1440 asm_visit_addr (gimple *, tree op, tree, void *)
1442 op = get_base_address (op);
1443 if (op
1444 && DECL_P (op))
1445 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1447 return false;
1450 /* Return true iff callsite CALL has at least as many actual arguments as there
1451 are formal parameters of the function currently processed by IPA-SRA and
1452 that their types match. */
1454 static inline bool
1455 callsite_arguments_match_p (gimple *call)
1457 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1458 return false;
1460 tree parm;
1461 int i;
1462 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1463 parm;
1464 parm = DECL_CHAIN (parm), i++)
1466 tree arg = gimple_call_arg (call, i);
1467 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1468 return false;
1470 return true;
1473 /* Scan function and look for interesting expressions and create access
1474 structures for them. Return true iff any access is created. */
1476 static bool
1477 scan_function (void)
1479 basic_block bb;
1480 bool ret = false;
1482 FOR_EACH_BB_FN (bb, cfun)
1484 gimple_stmt_iterator gsi;
1485 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1487 gimple *stmt = gsi_stmt (gsi);
1488 tree t;
1489 unsigned i;
1491 if (final_bbs && stmt_can_throw_external (cfun, stmt))
1492 bitmap_set_bit (final_bbs, bb->index);
1493 switch (gimple_code (stmt))
1495 case GIMPLE_RETURN:
1496 t = gimple_return_retval (as_a <greturn *> (stmt));
1497 if (t != NULL_TREE)
1498 ret |= build_access_from_expr (t, stmt, false);
1499 if (final_bbs)
1500 bitmap_set_bit (final_bbs, bb->index);
1501 break;
1503 case GIMPLE_ASSIGN:
1504 ret |= build_accesses_from_assign (stmt);
1505 break;
1507 case GIMPLE_CALL:
1508 for (i = 0; i < gimple_call_num_args (stmt); i++)
1509 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1510 stmt, false);
1512 if (sra_mode == SRA_MODE_EARLY_IPA)
1514 tree dest = gimple_call_fndecl (stmt);
1515 int flags = gimple_call_flags (stmt);
1517 if (dest)
1519 if (fndecl_built_in_p (dest, BUILT_IN_APPLY_ARGS))
1520 encountered_apply_args = true;
1521 if (recursive_call_p (current_function_decl, dest))
1523 encountered_recursive_call = true;
1524 if (!callsite_arguments_match_p (stmt))
1525 encountered_unchangable_recursive_call = true;
1529 if (final_bbs
1530 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1531 bitmap_set_bit (final_bbs, bb->index);
1534 t = gimple_call_lhs (stmt);
1535 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1536 ret |= build_access_from_expr (t, stmt, true);
1537 break;
1539 case GIMPLE_ASM:
1541 gasm *asm_stmt = as_a <gasm *> (stmt);
1542 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1543 asm_visit_addr);
1544 if (final_bbs)
1545 bitmap_set_bit (final_bbs, bb->index);
1547 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1549 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1550 ret |= build_access_from_expr (t, asm_stmt, false);
1552 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1554 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1555 ret |= build_access_from_expr (t, asm_stmt, true);
1558 break;
1560 default:
1561 break;
1566 return ret;
1569 /* Helper of QSORT function. There are pointers to accesses in the array. An
1570 access is considered smaller than another if it has smaller offset or if the
1571 offsets are the same but is size is bigger. */
1573 static int
1574 compare_access_positions (const void *a, const void *b)
1576 const access_p *fp1 = (const access_p *) a;
1577 const access_p *fp2 = (const access_p *) b;
1578 const access_p f1 = *fp1;
1579 const access_p f2 = *fp2;
1581 if (f1->offset != f2->offset)
1582 return f1->offset < f2->offset ? -1 : 1;
1584 if (f1->size == f2->size)
1586 if (f1->type == f2->type)
1587 return 0;
1588 /* Put any non-aggregate type before any aggregate type. */
1589 else if (!is_gimple_reg_type (f1->type)
1590 && is_gimple_reg_type (f2->type))
1591 return 1;
1592 else if (is_gimple_reg_type (f1->type)
1593 && !is_gimple_reg_type (f2->type))
1594 return -1;
1595 /* Put any complex or vector type before any other scalar type. */
1596 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1597 && TREE_CODE (f1->type) != VECTOR_TYPE
1598 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1599 || TREE_CODE (f2->type) == VECTOR_TYPE))
1600 return 1;
1601 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1602 || TREE_CODE (f1->type) == VECTOR_TYPE)
1603 && TREE_CODE (f2->type) != COMPLEX_TYPE
1604 && TREE_CODE (f2->type) != VECTOR_TYPE)
1605 return -1;
1606 /* Put any integral type before any non-integral type. When splicing, we
1607 make sure that those with insufficient precision and occupying the
1608 same space are not scalarized. */
1609 else if (INTEGRAL_TYPE_P (f1->type)
1610 && !INTEGRAL_TYPE_P (f2->type))
1611 return -1;
1612 else if (!INTEGRAL_TYPE_P (f1->type)
1613 && INTEGRAL_TYPE_P (f2->type))
1614 return 1;
1615 /* Put the integral type with the bigger precision first. */
1616 else if (INTEGRAL_TYPE_P (f1->type)
1617 && INTEGRAL_TYPE_P (f2->type)
1618 && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1619 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1620 /* Stabilize the sort. */
1621 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1624 /* We want the bigger accesses first, thus the opposite operator in the next
1625 line: */
1626 return f1->size > f2->size ? -1 : 1;
1630 /* Append a name of the declaration to the name obstack. A helper function for
1631 make_fancy_name. */
1633 static void
1634 make_fancy_decl_name (tree decl)
1636 char buffer[32];
1638 tree name = DECL_NAME (decl);
1639 if (name)
1640 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1641 IDENTIFIER_LENGTH (name));
1642 else
1644 sprintf (buffer, "D%u", DECL_UID (decl));
1645 obstack_grow (&name_obstack, buffer, strlen (buffer));
1649 /* Helper for make_fancy_name. */
1651 static void
1652 make_fancy_name_1 (tree expr)
1654 char buffer[32];
1655 tree index;
1657 if (DECL_P (expr))
1659 make_fancy_decl_name (expr);
1660 return;
1663 switch (TREE_CODE (expr))
1665 case COMPONENT_REF:
1666 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1667 obstack_1grow (&name_obstack, '$');
1668 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1669 break;
1671 case ARRAY_REF:
1672 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1673 obstack_1grow (&name_obstack, '$');
1674 /* Arrays with only one element may not have a constant as their
1675 index. */
1676 index = TREE_OPERAND (expr, 1);
1677 if (TREE_CODE (index) != INTEGER_CST)
1678 break;
1679 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1680 obstack_grow (&name_obstack, buffer, strlen (buffer));
1681 break;
1683 case ADDR_EXPR:
1684 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1685 break;
1687 case MEM_REF:
1688 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1689 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1691 obstack_1grow (&name_obstack, '$');
1692 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1693 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1694 obstack_grow (&name_obstack, buffer, strlen (buffer));
1696 break;
1698 case BIT_FIELD_REF:
1699 case REALPART_EXPR:
1700 case IMAGPART_EXPR:
1701 gcc_unreachable (); /* we treat these as scalars. */
1702 break;
1703 default:
1704 break;
1708 /* Create a human readable name for replacement variable of ACCESS. */
1710 static char *
1711 make_fancy_name (tree expr)
1713 make_fancy_name_1 (expr);
1714 obstack_1grow (&name_obstack, '\0');
1715 return XOBFINISH (&name_obstack, char *);
1718 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1719 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1720 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1721 be non-NULL and is used to insert new statements either before or below
1722 the current one as specified by INSERT_AFTER. This function is not capable
1723 of handling bitfields. */
1725 tree
1726 build_ref_for_offset (location_t loc, tree base, poly_int64 offset,
1727 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1728 bool insert_after)
1730 tree prev_base = base;
1731 tree off;
1732 tree mem_ref;
1733 poly_int64 base_offset;
1734 unsigned HOST_WIDE_INT misalign;
1735 unsigned int align;
1737 /* Preserve address-space information. */
1738 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1739 if (as != TYPE_ADDR_SPACE (exp_type))
1740 exp_type = build_qualified_type (exp_type,
1741 TYPE_QUALS (exp_type)
1742 | ENCODE_QUAL_ADDR_SPACE (as));
1744 poly_int64 byte_offset = exact_div (offset, BITS_PER_UNIT);
1745 get_object_alignment_1 (base, &align, &misalign);
1746 base = get_addr_base_and_unit_offset (base, &base_offset);
1748 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1749 offset such as array[var_index]. */
1750 if (!base)
1752 gassign *stmt;
1753 tree tmp, addr;
1755 gcc_checking_assert (gsi);
1756 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1757 addr = build_fold_addr_expr (unshare_expr (prev_base));
1758 STRIP_USELESS_TYPE_CONVERSION (addr);
1759 stmt = gimple_build_assign (tmp, addr);
1760 gimple_set_location (stmt, loc);
1761 if (insert_after)
1762 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1763 else
1764 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1766 off = build_int_cst (reference_alias_ptr_type (prev_base), byte_offset);
1767 base = tmp;
1769 else if (TREE_CODE (base) == MEM_REF)
1771 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1772 base_offset + byte_offset);
1773 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1774 base = unshare_expr (TREE_OPERAND (base, 0));
1776 else
1778 off = build_int_cst (reference_alias_ptr_type (prev_base),
1779 base_offset + byte_offset);
1780 base = build_fold_addr_expr (unshare_expr (base));
1783 unsigned int align_bound = known_alignment (misalign + offset);
1784 if (align_bound != 0)
1785 align = MIN (align, align_bound);
1786 if (align != TYPE_ALIGN (exp_type))
1787 exp_type = build_aligned_type (exp_type, align);
1789 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1790 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1791 if (TREE_THIS_VOLATILE (prev_base))
1792 TREE_THIS_VOLATILE (mem_ref) = 1;
1793 if (TREE_SIDE_EFFECTS (prev_base))
1794 TREE_SIDE_EFFECTS (mem_ref) = 1;
1795 return mem_ref;
1798 /* Construct a memory reference to a part of an aggregate BASE at the given
1799 OFFSET and of the same type as MODEL. In case this is a reference to a
1800 bit-field, the function will replicate the last component_ref of model's
1801 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1802 build_ref_for_offset. */
1804 static tree
1805 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1806 struct access *model, gimple_stmt_iterator *gsi,
1807 bool insert_after)
1809 if (TREE_CODE (model->expr) == COMPONENT_REF
1810 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1812 /* This access represents a bit-field. */
1813 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1815 offset -= int_bit_position (fld);
1816 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1817 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1818 gsi, insert_after);
1819 /* The flag will be set on the record type. */
1820 REF_REVERSE_STORAGE_ORDER (t) = 0;
1821 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1822 NULL_TREE);
1824 else
1825 return
1826 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1827 gsi, insert_after);
1830 /* Attempt to build a memory reference that we could but into a gimple
1831 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1832 create statements and return s NULL instead. This function also ignores
1833 alignment issues and so its results should never end up in non-debug
1834 statements. */
1836 static tree
1837 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1838 struct access *model)
1840 poly_int64 base_offset;
1841 tree off;
1843 if (TREE_CODE (model->expr) == COMPONENT_REF
1844 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1845 return NULL_TREE;
1847 base = get_addr_base_and_unit_offset (base, &base_offset);
1848 if (!base)
1849 return NULL_TREE;
1850 if (TREE_CODE (base) == MEM_REF)
1852 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1853 base_offset + offset / BITS_PER_UNIT);
1854 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1855 base = unshare_expr (TREE_OPERAND (base, 0));
1857 else
1859 off = build_int_cst (reference_alias_ptr_type (base),
1860 base_offset + offset / BITS_PER_UNIT);
1861 base = build_fold_addr_expr (unshare_expr (base));
1864 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1867 /* Construct a memory reference consisting of component_refs and array_refs to
1868 a part of an aggregate *RES (which is of type TYPE). The requested part
1869 should have type EXP_TYPE at be the given OFFSET. This function might not
1870 succeed, it returns true when it does and only then *RES points to something
1871 meaningful. This function should be used only to build expressions that we
1872 might need to present to user (e.g. in warnings). In all other situations,
1873 build_ref_for_model or build_ref_for_offset should be used instead. */
1875 static bool
1876 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1877 tree exp_type)
1879 while (1)
1881 tree fld;
1882 tree tr_size, index, minidx;
1883 HOST_WIDE_INT el_size;
1885 if (offset == 0 && exp_type
1886 && types_compatible_p (exp_type, type))
1887 return true;
1889 switch (TREE_CODE (type))
1891 case UNION_TYPE:
1892 case QUAL_UNION_TYPE:
1893 case RECORD_TYPE:
1894 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1896 HOST_WIDE_INT pos, size;
1897 tree tr_pos, expr, *expr_ptr;
1899 if (TREE_CODE (fld) != FIELD_DECL)
1900 continue;
1902 tr_pos = bit_position (fld);
1903 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1904 continue;
1905 pos = tree_to_uhwi (tr_pos);
1906 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1907 tr_size = DECL_SIZE (fld);
1908 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1909 continue;
1910 size = tree_to_uhwi (tr_size);
1911 if (size == 0)
1913 if (pos != offset)
1914 continue;
1916 else if (pos > offset || (pos + size) <= offset)
1917 continue;
1919 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1920 NULL_TREE);
1921 expr_ptr = &expr;
1922 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1923 offset - pos, exp_type))
1925 *res = expr;
1926 return true;
1929 return false;
1931 case ARRAY_TYPE:
1932 tr_size = TYPE_SIZE (TREE_TYPE (type));
1933 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1934 return false;
1935 el_size = tree_to_uhwi (tr_size);
1937 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1938 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1939 return false;
1940 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1941 if (!integer_zerop (minidx))
1942 index = int_const_binop (PLUS_EXPR, index, minidx);
1943 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1944 NULL_TREE, NULL_TREE);
1945 offset = offset % el_size;
1946 type = TREE_TYPE (type);
1947 break;
1949 default:
1950 if (offset != 0)
1951 return false;
1953 if (exp_type)
1954 return false;
1955 else
1956 return true;
1961 /* Return true iff TYPE is stdarg va_list type. */
1963 static inline bool
1964 is_va_list_type (tree type)
1966 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1969 /* Print message to dump file why a variable was rejected. */
1971 static void
1972 reject (tree var, const char *msg)
1974 if (dump_file && (dump_flags & TDF_DETAILS))
1976 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1977 print_generic_expr (dump_file, var);
1978 fprintf (dump_file, "\n");
1982 /* Return true if VAR is a candidate for SRA. */
1984 static bool
1985 maybe_add_sra_candidate (tree var)
1987 tree type = TREE_TYPE (var);
1988 const char *msg;
1989 tree_node **slot;
1991 if (!AGGREGATE_TYPE_P (type))
1993 reject (var, "not aggregate");
1994 return false;
1996 /* Allow constant-pool entries (that "need to live in memory")
1997 unless we are doing IPA SRA. */
1998 if (needs_to_live_in_memory (var)
1999 && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
2001 reject (var, "needs to live in memory");
2002 return false;
2004 if (TREE_THIS_VOLATILE (var))
2006 reject (var, "is volatile");
2007 return false;
2009 if (!COMPLETE_TYPE_P (type))
2011 reject (var, "has incomplete type");
2012 return false;
2014 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
2016 reject (var, "type size not fixed");
2017 return false;
2019 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
2021 reject (var, "type size is zero");
2022 return false;
2024 if (type_internals_preclude_sra_p (type, &msg))
2026 reject (var, msg);
2027 return false;
2029 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
2030 we also want to schedule it rather late. Thus we ignore it in
2031 the early pass. */
2032 (sra_mode == SRA_MODE_EARLY_INTRA
2033 && is_va_list_type (type)))
2035 reject (var, "is va_list");
2036 return false;
2039 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
2040 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
2041 *slot = var;
2043 if (dump_file && (dump_flags & TDF_DETAILS))
2045 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
2046 print_generic_expr (dump_file, var);
2047 fprintf (dump_file, "\n");
2050 return true;
2053 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
2054 those with type which is suitable for scalarization. */
2056 static bool
2057 find_var_candidates (void)
2059 tree var, parm;
2060 unsigned int i;
2061 bool ret = false;
2063 for (parm = DECL_ARGUMENTS (current_function_decl);
2064 parm;
2065 parm = DECL_CHAIN (parm))
2066 ret |= maybe_add_sra_candidate (parm);
2068 FOR_EACH_LOCAL_DECL (cfun, i, var)
2070 if (!VAR_P (var))
2071 continue;
2073 ret |= maybe_add_sra_candidate (var);
2076 return ret;
2079 /* Sort all accesses for the given variable, check for partial overlaps and
2080 return NULL if there are any. If there are none, pick a representative for
2081 each combination of offset and size and create a linked list out of them.
2082 Return the pointer to the first representative and make sure it is the first
2083 one in the vector of accesses. */
2085 static struct access *
2086 sort_and_splice_var_accesses (tree var)
2088 int i, j, access_count;
2089 struct access *res, **prev_acc_ptr = &res;
2090 vec<access_p> *access_vec;
2091 bool first = true;
2092 HOST_WIDE_INT low = -1, high = 0;
2094 access_vec = get_base_access_vector (var);
2095 if (!access_vec)
2096 return NULL;
2097 access_count = access_vec->length ();
2099 /* Sort by <OFFSET, SIZE>. */
2100 access_vec->qsort (compare_access_positions);
2102 i = 0;
2103 while (i < access_count)
2105 struct access *access = (*access_vec)[i];
2106 bool grp_write = access->write;
2107 bool grp_read = !access->write;
2108 bool grp_scalar_write = access->write
2109 && is_gimple_reg_type (access->type);
2110 bool grp_scalar_read = !access->write
2111 && is_gimple_reg_type (access->type);
2112 bool grp_assignment_read = access->grp_assignment_read;
2113 bool grp_assignment_write = access->grp_assignment_write;
2114 bool multiple_scalar_reads = false;
2115 bool total_scalarization = access->grp_total_scalarization;
2116 bool grp_partial_lhs = access->grp_partial_lhs;
2117 bool first_scalar = is_gimple_reg_type (access->type);
2118 bool unscalarizable_region = access->grp_unscalarizable_region;
2119 bool bf_non_full_precision
2120 = (INTEGRAL_TYPE_P (access->type)
2121 && TYPE_PRECISION (access->type) != access->size
2122 && TREE_CODE (access->expr) == COMPONENT_REF
2123 && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
2125 if (first || access->offset >= high)
2127 first = false;
2128 low = access->offset;
2129 high = access->offset + access->size;
2131 else if (access->offset > low && access->offset + access->size > high)
2132 return NULL;
2133 else
2134 gcc_assert (access->offset >= low
2135 && access->offset + access->size <= high);
2137 j = i + 1;
2138 while (j < access_count)
2140 struct access *ac2 = (*access_vec)[j];
2141 if (ac2->offset != access->offset || ac2->size != access->size)
2142 break;
2143 if (ac2->write)
2145 grp_write = true;
2146 grp_scalar_write = (grp_scalar_write
2147 || is_gimple_reg_type (ac2->type));
2149 else
2151 grp_read = true;
2152 if (is_gimple_reg_type (ac2->type))
2154 if (grp_scalar_read)
2155 multiple_scalar_reads = true;
2156 else
2157 grp_scalar_read = true;
2160 grp_assignment_read |= ac2->grp_assignment_read;
2161 grp_assignment_write |= ac2->grp_assignment_write;
2162 grp_partial_lhs |= ac2->grp_partial_lhs;
2163 unscalarizable_region |= ac2->grp_unscalarizable_region;
2164 total_scalarization |= ac2->grp_total_scalarization;
2165 relink_to_new_repr (access, ac2);
2167 /* If there are both aggregate-type and scalar-type accesses with
2168 this combination of size and offset, the comparison function
2169 should have put the scalars first. */
2170 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2171 /* It also prefers integral types to non-integral. However, when the
2172 precision of the selected type does not span the entire area and
2173 should also be used for a non-integer (i.e. float), we must not
2174 let that happen. Normally analyze_access_subtree expands the type
2175 to cover the entire area but for bit-fields it doesn't. */
2176 if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2178 if (dump_file && (dump_flags & TDF_DETAILS))
2180 fprintf (dump_file, "Cannot scalarize the following access "
2181 "because insufficient precision integer type was "
2182 "selected.\n ");
2183 dump_access (dump_file, access, false);
2185 unscalarizable_region = true;
2187 ac2->group_representative = access;
2188 j++;
2191 i = j;
2193 access->group_representative = access;
2194 access->grp_write = grp_write;
2195 access->grp_read = grp_read;
2196 access->grp_scalar_read = grp_scalar_read;
2197 access->grp_scalar_write = grp_scalar_write;
2198 access->grp_assignment_read = grp_assignment_read;
2199 access->grp_assignment_write = grp_assignment_write;
2200 access->grp_hint = total_scalarization
2201 || (multiple_scalar_reads && !constant_decl_p (var));
2202 access->grp_total_scalarization = total_scalarization;
2203 access->grp_partial_lhs = grp_partial_lhs;
2204 access->grp_unscalarizable_region = unscalarizable_region;
2206 *prev_acc_ptr = access;
2207 prev_acc_ptr = &access->next_grp;
2210 gcc_assert (res == (*access_vec)[0]);
2211 return res;
2214 /* Create a variable for the given ACCESS which determines the type, name and a
2215 few other properties. Return the variable declaration and store it also to
2216 ACCESS->replacement. REG_TREE is used when creating a declaration to base a
2217 default-definition SSA name on on in order to facilitate an uninitialized
2218 warning. It is used instead of the actual ACCESS type if that is not of a
2219 gimple register type. */
2221 static tree
2222 create_access_replacement (struct access *access, tree reg_type = NULL_TREE)
2224 tree repl;
2226 tree type = access->type;
2227 if (reg_type && !is_gimple_reg_type (type))
2228 type = reg_type;
2230 if (access->grp_to_be_debug_replaced)
2232 repl = create_tmp_var_raw (access->type);
2233 DECL_CONTEXT (repl) = current_function_decl;
2235 else
2236 /* Drop any special alignment on the type if it's not on the main
2237 variant. This avoids issues with weirdo ABIs like AAPCS. */
2238 repl = create_tmp_var (build_qualified_type (TYPE_MAIN_VARIANT (type),
2239 TYPE_QUALS (type)), "SR");
2240 if (TREE_CODE (type) == COMPLEX_TYPE
2241 || TREE_CODE (type) == VECTOR_TYPE)
2243 if (!access->grp_partial_lhs)
2244 DECL_GIMPLE_REG_P (repl) = 1;
2246 else if (access->grp_partial_lhs
2247 && is_gimple_reg_type (type))
2248 TREE_ADDRESSABLE (repl) = 1;
2250 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2251 DECL_ARTIFICIAL (repl) = 1;
2252 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2254 if (DECL_NAME (access->base)
2255 && !DECL_IGNORED_P (access->base)
2256 && !DECL_ARTIFICIAL (access->base))
2258 char *pretty_name = make_fancy_name (access->expr);
2259 tree debug_expr = unshare_expr_without_location (access->expr), d;
2260 bool fail = false;
2262 DECL_NAME (repl) = get_identifier (pretty_name);
2263 DECL_NAMELESS (repl) = 1;
2264 obstack_free (&name_obstack, pretty_name);
2266 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2267 as DECL_DEBUG_EXPR isn't considered when looking for still
2268 used SSA_NAMEs and thus they could be freed. All debug info
2269 generation cares is whether something is constant or variable
2270 and that get_ref_base_and_extent works properly on the
2271 expression. It cannot handle accesses at a non-constant offset
2272 though, so just give up in those cases. */
2273 for (d = debug_expr;
2274 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2275 d = TREE_OPERAND (d, 0))
2276 switch (TREE_CODE (d))
2278 case ARRAY_REF:
2279 case ARRAY_RANGE_REF:
2280 if (TREE_OPERAND (d, 1)
2281 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2282 fail = true;
2283 if (TREE_OPERAND (d, 3)
2284 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2285 fail = true;
2286 /* FALLTHRU */
2287 case COMPONENT_REF:
2288 if (TREE_OPERAND (d, 2)
2289 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2290 fail = true;
2291 break;
2292 case MEM_REF:
2293 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2294 fail = true;
2295 else
2296 d = TREE_OPERAND (d, 0);
2297 break;
2298 default:
2299 break;
2301 if (!fail)
2303 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2304 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2306 if (access->grp_no_warning)
2307 TREE_NO_WARNING (repl) = 1;
2308 else
2309 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2311 else
2312 TREE_NO_WARNING (repl) = 1;
2314 if (dump_file)
2316 if (access->grp_to_be_debug_replaced)
2318 fprintf (dump_file, "Created a debug-only replacement for ");
2319 print_generic_expr (dump_file, access->base);
2320 fprintf (dump_file, " offset: %u, size: %u\n",
2321 (unsigned) access->offset, (unsigned) access->size);
2323 else
2325 fprintf (dump_file, "Created a replacement for ");
2326 print_generic_expr (dump_file, access->base);
2327 fprintf (dump_file, " offset: %u, size: %u: ",
2328 (unsigned) access->offset, (unsigned) access->size);
2329 print_generic_expr (dump_file, repl);
2330 fprintf (dump_file, "\n");
2333 sra_stats.replacements++;
2335 return repl;
2338 /* Return ACCESS scalar replacement, which must exist. */
2340 static inline tree
2341 get_access_replacement (struct access *access)
2343 gcc_checking_assert (access->replacement_decl);
2344 return access->replacement_decl;
2348 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2349 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2350 to it is not "within" the root. Return false iff some accesses partially
2351 overlap. */
2353 static bool
2354 build_access_subtree (struct access **access)
2356 struct access *root = *access, *last_child = NULL;
2357 HOST_WIDE_INT limit = root->offset + root->size;
2359 *access = (*access)->next_grp;
2360 while (*access && (*access)->offset + (*access)->size <= limit)
2362 if (!last_child)
2363 root->first_child = *access;
2364 else
2365 last_child->next_sibling = *access;
2366 last_child = *access;
2367 (*access)->parent = root;
2368 (*access)->grp_write |= root->grp_write;
2370 if (!build_access_subtree (access))
2371 return false;
2374 if (*access && (*access)->offset < limit)
2375 return false;
2377 return true;
2380 /* Build a tree of access representatives, ACCESS is the pointer to the first
2381 one, others are linked in a list by the next_grp field. Return false iff
2382 some accesses partially overlap. */
2384 static bool
2385 build_access_trees (struct access *access)
2387 while (access)
2389 struct access *root = access;
2391 if (!build_access_subtree (&access))
2392 return false;
2393 root->next_grp = access;
2395 return true;
2398 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2399 array. */
2401 static bool
2402 expr_with_var_bounded_array_refs_p (tree expr)
2404 while (handled_component_p (expr))
2406 if (TREE_CODE (expr) == ARRAY_REF
2407 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2408 return true;
2409 expr = TREE_OPERAND (expr, 0);
2411 return false;
2414 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2415 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2416 sorts of access flags appropriately along the way, notably always set
2417 grp_read and grp_assign_read according to MARK_READ and grp_write when
2418 MARK_WRITE is true.
2420 Creating a replacement for a scalar access is considered beneficial if its
2421 grp_hint is set (this means we are either attempting total scalarization or
2422 there is more than one direct read access) or according to the following
2423 table:
2425 Access written to through a scalar type (once or more times)
2427 | Written to in an assignment statement
2429 | | Access read as scalar _once_
2430 | | |
2431 | | | Read in an assignment statement
2432 | | | |
2433 | | | | Scalarize Comment
2434 -----------------------------------------------------------------------------
2435 0 0 0 0 No access for the scalar
2436 0 0 0 1 No access for the scalar
2437 0 0 1 0 No Single read - won't help
2438 0 0 1 1 No The same case
2439 0 1 0 0 No access for the scalar
2440 0 1 0 1 No access for the scalar
2441 0 1 1 0 Yes s = *g; return s.i;
2442 0 1 1 1 Yes The same case as above
2443 1 0 0 0 No Won't help
2444 1 0 0 1 Yes s.i = 1; *g = s;
2445 1 0 1 0 Yes s.i = 5; g = s.i;
2446 1 0 1 1 Yes The same case as above
2447 1 1 0 0 No Won't help.
2448 1 1 0 1 Yes s.i = 1; *g = s;
2449 1 1 1 0 Yes s = *g; return s.i;
2450 1 1 1 1 Yes Any of the above yeses */
2452 static bool
2453 analyze_access_subtree (struct access *root, struct access *parent,
2454 bool allow_replacements)
2456 struct access *child;
2457 HOST_WIDE_INT limit = root->offset + root->size;
2458 HOST_WIDE_INT covered_to = root->offset;
2459 bool scalar = is_gimple_reg_type (root->type);
2460 bool hole = false, sth_created = false;
2462 if (parent)
2464 if (parent->grp_read)
2465 root->grp_read = 1;
2466 if (parent->grp_assignment_read)
2467 root->grp_assignment_read = 1;
2468 if (parent->grp_write)
2469 root->grp_write = 1;
2470 if (parent->grp_assignment_write)
2471 root->grp_assignment_write = 1;
2472 if (parent->grp_total_scalarization)
2473 root->grp_total_scalarization = 1;
2476 if (root->grp_unscalarizable_region)
2477 allow_replacements = false;
2479 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2480 allow_replacements = false;
2482 for (child = root->first_child; child; child = child->next_sibling)
2484 hole |= covered_to < child->offset;
2485 sth_created |= analyze_access_subtree (child, root,
2486 allow_replacements && !scalar);
2488 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2489 root->grp_total_scalarization &= child->grp_total_scalarization;
2490 if (child->grp_covered)
2491 covered_to += child->size;
2492 else
2493 hole = true;
2496 if (allow_replacements && scalar && !root->first_child
2497 && (root->grp_hint
2498 || ((root->grp_scalar_read || root->grp_assignment_read)
2499 && (root->grp_scalar_write || root->grp_assignment_write))))
2501 /* Always create access replacements that cover the whole access.
2502 For integral types this means the precision has to match.
2503 Avoid assumptions based on the integral type kind, too. */
2504 if (INTEGRAL_TYPE_P (root->type)
2505 && (TREE_CODE (root->type) != INTEGER_TYPE
2506 || TYPE_PRECISION (root->type) != root->size)
2507 /* But leave bitfield accesses alone. */
2508 && (TREE_CODE (root->expr) != COMPONENT_REF
2509 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2511 tree rt = root->type;
2512 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2513 && (root->size % BITS_PER_UNIT) == 0);
2514 root->type = build_nonstandard_integer_type (root->size,
2515 TYPE_UNSIGNED (rt));
2516 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2517 root->offset, root->reverse,
2518 root->type, NULL, false);
2520 if (dump_file && (dump_flags & TDF_DETAILS))
2522 fprintf (dump_file, "Changing the type of a replacement for ");
2523 print_generic_expr (dump_file, root->base);
2524 fprintf (dump_file, " offset: %u, size: %u ",
2525 (unsigned) root->offset, (unsigned) root->size);
2526 fprintf (dump_file, " to an integer.\n");
2530 root->grp_to_be_replaced = 1;
2531 root->replacement_decl = create_access_replacement (root);
2532 sth_created = true;
2533 hole = false;
2535 else
2537 if (allow_replacements
2538 && scalar && !root->first_child
2539 && (root->grp_scalar_write || root->grp_assignment_write)
2540 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2541 DECL_UID (root->base)))
2543 gcc_checking_assert (!root->grp_scalar_read
2544 && !root->grp_assignment_read);
2545 sth_created = true;
2546 if (MAY_HAVE_DEBUG_BIND_STMTS)
2548 root->grp_to_be_debug_replaced = 1;
2549 root->replacement_decl = create_access_replacement (root);
2553 if (covered_to < limit)
2554 hole = true;
2555 if (scalar || !allow_replacements)
2556 root->grp_total_scalarization = 0;
2559 if (!hole || root->grp_total_scalarization)
2560 root->grp_covered = 1;
2561 else if (root->grp_write || comes_initialized_p (root->base))
2562 root->grp_unscalarized_data = 1; /* not covered and written to */
2563 return sth_created;
2566 /* Analyze all access trees linked by next_grp by the means of
2567 analyze_access_subtree. */
2568 static bool
2569 analyze_access_trees (struct access *access)
2571 bool ret = false;
2573 while (access)
2575 if (analyze_access_subtree (access, NULL, true))
2576 ret = true;
2577 access = access->next_grp;
2580 return ret;
2583 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2584 SIZE would conflict with an already existing one. If exactly such a child
2585 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2587 static bool
2588 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2589 HOST_WIDE_INT size, struct access **exact_match)
2591 struct access *child;
2593 for (child = lacc->first_child; child; child = child->next_sibling)
2595 if (child->offset == norm_offset && child->size == size)
2597 *exact_match = child;
2598 return true;
2601 if (child->offset < norm_offset + size
2602 && child->offset + child->size > norm_offset)
2603 return true;
2606 return false;
2609 /* Create a new child access of PARENT, with all properties just like MODEL
2610 except for its offset and with its grp_write false and grp_read true.
2611 Return the new access or NULL if it cannot be created. Note that this
2612 access is created long after all splicing and sorting, it's not located in
2613 any access vector and is automatically a representative of its group. Set
2614 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2616 static struct access *
2617 create_artificial_child_access (struct access *parent, struct access *model,
2618 HOST_WIDE_INT new_offset,
2619 bool set_grp_write)
2621 struct access **child;
2622 tree expr = parent->base;
2624 gcc_assert (!model->grp_unscalarizable_region);
2626 struct access *access = access_pool.allocate ();
2627 memset (access, 0, sizeof (struct access));
2628 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2629 model->type))
2631 access->grp_no_warning = true;
2632 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2633 new_offset, model, NULL, false);
2636 access->base = parent->base;
2637 access->expr = expr;
2638 access->offset = new_offset;
2639 access->size = model->size;
2640 access->type = model->type;
2641 access->grp_write = set_grp_write;
2642 access->grp_read = false;
2643 access->reverse = model->reverse;
2645 child = &parent->first_child;
2646 while (*child && (*child)->offset < new_offset)
2647 child = &(*child)->next_sibling;
2649 access->next_sibling = *child;
2650 *child = access;
2652 return access;
2656 /* Beginning with ACCESS, traverse its whole access subtree and mark all
2657 sub-trees as written to. If any of them has not been marked so previously
2658 and has assignment links leading from it, re-enqueue it. */
2660 static void
2661 subtree_mark_written_and_enqueue (struct access *access)
2663 if (access->grp_write)
2664 return;
2665 access->grp_write = true;
2666 add_access_to_work_queue (access);
2668 struct access *child;
2669 for (child = access->first_child; child; child = child->next_sibling)
2670 subtree_mark_written_and_enqueue (child);
2673 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
2674 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2675 propagated transitively. Return true if anything changed. Additionally, if
2676 RACC is a scalar access but LACC is not, change the type of the latter, if
2677 possible. */
2679 static bool
2680 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2682 struct access *rchild;
2683 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2684 bool ret = false;
2686 /* IF the LHS is still not marked as being written to, we only need to do so
2687 if the RHS at this level actually was. */
2688 if (!lacc->grp_write)
2690 gcc_checking_assert (!comes_initialized_p (racc->base));
2691 if (racc->grp_write)
2693 subtree_mark_written_and_enqueue (lacc);
2694 ret = true;
2698 if (is_gimple_reg_type (lacc->type)
2699 || lacc->grp_unscalarizable_region
2700 || racc->grp_unscalarizable_region)
2702 if (!lacc->grp_write)
2704 ret = true;
2705 subtree_mark_written_and_enqueue (lacc);
2707 return ret;
2710 if (is_gimple_reg_type (racc->type))
2712 if (!lacc->grp_write)
2714 ret = true;
2715 subtree_mark_written_and_enqueue (lacc);
2717 if (!lacc->first_child && !racc->first_child)
2719 tree t = lacc->base;
2721 lacc->type = racc->type;
2722 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2723 lacc->offset, racc->type))
2724 lacc->expr = t;
2725 else
2727 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2728 lacc->base, lacc->offset,
2729 racc, NULL, false);
2730 lacc->grp_no_warning = true;
2733 return ret;
2736 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2738 struct access *new_acc = NULL;
2739 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2741 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2742 &new_acc))
2744 if (new_acc)
2746 if (!new_acc->grp_write && rchild->grp_write)
2748 gcc_assert (!lacc->grp_write);
2749 subtree_mark_written_and_enqueue (new_acc);
2750 ret = true;
2753 rchild->grp_hint = 1;
2754 new_acc->grp_hint |= new_acc->grp_read;
2755 if (rchild->first_child
2756 && propagate_subaccesses_across_link (new_acc, rchild))
2758 ret = 1;
2759 add_access_to_work_queue (new_acc);
2762 else
2764 if (!lacc->grp_write)
2766 ret = true;
2767 subtree_mark_written_and_enqueue (lacc);
2770 continue;
2773 if (rchild->grp_unscalarizable_region)
2775 if (rchild->grp_write && !lacc->grp_write)
2777 ret = true;
2778 subtree_mark_written_and_enqueue (lacc);
2780 continue;
2783 rchild->grp_hint = 1;
2784 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2785 lacc->grp_write
2786 || rchild->grp_write);
2787 gcc_checking_assert (new_acc);
2788 if (racc->first_child)
2789 propagate_subaccesses_across_link (new_acc, rchild);
2791 add_access_to_work_queue (lacc);
2792 ret = true;
2795 return ret;
2798 /* Propagate all subaccesses across assignment links. */
2800 static void
2801 propagate_all_subaccesses (void)
2803 while (work_queue_head)
2805 struct access *racc = pop_access_from_work_queue ();
2806 struct assign_link *link;
2808 if (racc->group_representative)
2809 racc= racc->group_representative;
2810 gcc_assert (racc->first_link);
2812 for (link = racc->first_link; link; link = link->next)
2814 struct access *lacc = link->lacc;
2816 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2817 continue;
2818 lacc = lacc->group_representative;
2820 bool reque_parents = false;
2821 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2823 if (!lacc->grp_write)
2825 subtree_mark_written_and_enqueue (lacc);
2826 reque_parents = true;
2829 else if (propagate_subaccesses_across_link (lacc, racc))
2830 reque_parents = true;
2832 if (reque_parents)
2835 add_access_to_work_queue (lacc);
2836 lacc = lacc->parent;
2838 while (lacc);
2843 /* Go through all accesses collected throughout the (intraprocedural) analysis
2844 stage, exclude overlapping ones, identify representatives and build trees
2845 out of them, making decisions about scalarization on the way. Return true
2846 iff there are any to-be-scalarized variables after this stage. */
2848 static bool
2849 analyze_all_variable_accesses (void)
2851 int res = 0;
2852 bitmap tmp = BITMAP_ALLOC (NULL);
2853 bitmap_iterator bi;
2854 unsigned i;
2855 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2857 enum compiler_param param = optimize_speed_p
2858 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2859 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2861 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2862 fall back to a target default. */
2863 unsigned HOST_WIDE_INT max_scalarization_size
2864 = global_options_set.x_param_values[param]
2865 ? PARAM_VALUE (param)
2866 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2868 max_scalarization_size *= BITS_PER_UNIT;
2870 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2871 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2872 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2874 tree var = candidate (i);
2876 if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var),
2877 constant_decl_p (var)))
2879 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2880 <= max_scalarization_size)
2882 create_total_scalarization_access (var);
2883 completely_scalarize (var, TREE_TYPE (var), 0, var);
2884 statistics_counter_event (cfun,
2885 "Totally-scalarized aggregates", 1);
2886 if (dump_file && (dump_flags & TDF_DETAILS))
2888 fprintf (dump_file, "Will attempt to totally scalarize ");
2889 print_generic_expr (dump_file, var);
2890 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2893 else if (dump_file && (dump_flags & TDF_DETAILS))
2895 fprintf (dump_file, "Too big to totally scalarize: ");
2896 print_generic_expr (dump_file, var);
2897 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2902 bitmap_copy (tmp, candidate_bitmap);
2903 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2905 tree var = candidate (i);
2906 struct access *access;
2908 access = sort_and_splice_var_accesses (var);
2909 if (!access || !build_access_trees (access))
2910 disqualify_candidate (var,
2911 "No or inhibitingly overlapping accesses.");
2914 propagate_all_subaccesses ();
2916 bitmap_copy (tmp, candidate_bitmap);
2917 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2919 tree var = candidate (i);
2920 struct access *access = get_first_repr_for_decl (var);
2922 if (analyze_access_trees (access))
2924 res++;
2925 if (dump_file && (dump_flags & TDF_DETAILS))
2927 fprintf (dump_file, "\nAccess trees for ");
2928 print_generic_expr (dump_file, var);
2929 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2930 dump_access_tree (dump_file, access);
2931 fprintf (dump_file, "\n");
2934 else
2935 disqualify_candidate (var, "No scalar replacements to be created.");
2938 BITMAP_FREE (tmp);
2940 if (res)
2942 statistics_counter_event (cfun, "Scalarized aggregates", res);
2943 return true;
2945 else
2946 return false;
2949 /* Generate statements copying scalar replacements of accesses within a subtree
2950 into or out of AGG. ACCESS, all its children, siblings and their children
2951 are to be processed. AGG is an aggregate type expression (can be a
2952 declaration but does not have to be, it can for example also be a mem_ref or
2953 a series of handled components). TOP_OFFSET is the offset of the processed
2954 subtree which has to be subtracted from offsets of individual accesses to
2955 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2956 replacements in the interval <start_offset, start_offset + chunk_size>,
2957 otherwise copy all. GSI is a statement iterator used to place the new
2958 statements. WRITE should be true when the statements should write from AGG
2959 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2960 statements will be added after the current statement in GSI, they will be
2961 added before the statement otherwise. */
2963 static void
2964 generate_subtree_copies (struct access *access, tree agg,
2965 HOST_WIDE_INT top_offset,
2966 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2967 gimple_stmt_iterator *gsi, bool write,
2968 bool insert_after, location_t loc)
2970 /* Never write anything into constant pool decls. See PR70602. */
2971 if (!write && constant_decl_p (agg))
2972 return;
2975 if (chunk_size && access->offset >= start_offset + chunk_size)
2976 return;
2978 if (access->grp_to_be_replaced
2979 && (chunk_size == 0
2980 || access->offset + access->size > start_offset))
2982 tree expr, repl = get_access_replacement (access);
2983 gassign *stmt;
2985 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2986 access, gsi, insert_after);
2988 if (write)
2990 if (access->grp_partial_lhs)
2991 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2992 !insert_after,
2993 insert_after ? GSI_NEW_STMT
2994 : GSI_SAME_STMT);
2995 stmt = gimple_build_assign (repl, expr);
2997 else
2999 TREE_NO_WARNING (repl) = 1;
3000 if (access->grp_partial_lhs)
3001 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3002 !insert_after,
3003 insert_after ? GSI_NEW_STMT
3004 : GSI_SAME_STMT);
3005 stmt = gimple_build_assign (expr, repl);
3007 gimple_set_location (stmt, loc);
3009 if (insert_after)
3010 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3011 else
3012 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3013 update_stmt (stmt);
3014 sra_stats.subtree_copies++;
3016 else if (write
3017 && access->grp_to_be_debug_replaced
3018 && (chunk_size == 0
3019 || access->offset + access->size > start_offset))
3021 gdebug *ds;
3022 tree drhs = build_debug_ref_for_model (loc, agg,
3023 access->offset - top_offset,
3024 access);
3025 ds = gimple_build_debug_bind (get_access_replacement (access),
3026 drhs, gsi_stmt (*gsi));
3027 if (insert_after)
3028 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3029 else
3030 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3033 if (access->first_child)
3034 generate_subtree_copies (access->first_child, agg, top_offset,
3035 start_offset, chunk_size, gsi,
3036 write, insert_after, loc);
3038 access = access->next_sibling;
3040 while (access);
3043 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
3044 root of the subtree to be processed. GSI is the statement iterator used
3045 for inserting statements which are added after the current statement if
3046 INSERT_AFTER is true or before it otherwise. */
3048 static void
3049 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
3050 bool insert_after, location_t loc)
3053 struct access *child;
3055 if (access->grp_to_be_replaced)
3057 gassign *stmt;
3059 stmt = gimple_build_assign (get_access_replacement (access),
3060 build_zero_cst (access->type));
3061 if (insert_after)
3062 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3063 else
3064 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3065 update_stmt (stmt);
3066 gimple_set_location (stmt, loc);
3068 else if (access->grp_to_be_debug_replaced)
3070 gdebug *ds
3071 = gimple_build_debug_bind (get_access_replacement (access),
3072 build_zero_cst (access->type),
3073 gsi_stmt (*gsi));
3074 if (insert_after)
3075 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3076 else
3077 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3080 for (child = access->first_child; child; child = child->next_sibling)
3081 init_subtree_with_zero (child, gsi, insert_after, loc);
3084 /* Clobber all scalar replacements in an access subtree. ACCESS is the
3085 root of the subtree to be processed. GSI is the statement iterator used
3086 for inserting statements which are added after the current statement if
3087 INSERT_AFTER is true or before it otherwise. */
3089 static void
3090 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
3091 bool insert_after, location_t loc)
3094 struct access *child;
3096 if (access->grp_to_be_replaced)
3098 tree rep = get_access_replacement (access);
3099 tree clobber = build_constructor (access->type, NULL);
3100 TREE_THIS_VOLATILE (clobber) = 1;
3101 gimple *stmt = gimple_build_assign (rep, clobber);
3103 if (insert_after)
3104 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3105 else
3106 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3107 update_stmt (stmt);
3108 gimple_set_location (stmt, loc);
3111 for (child = access->first_child; child; child = child->next_sibling)
3112 clobber_subtree (child, gsi, insert_after, loc);
3115 /* Search for an access representative for the given expression EXPR and
3116 return it or NULL if it cannot be found. */
3118 static struct access *
3119 get_access_for_expr (tree expr)
3121 poly_int64 poffset, psize, pmax_size;
3122 HOST_WIDE_INT offset, max_size;
3123 tree base;
3124 bool reverse;
3126 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3127 a different size than the size of its argument and we need the latter
3128 one. */
3129 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3130 expr = TREE_OPERAND (expr, 0);
3132 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
3133 &reverse);
3134 if (!known_size_p (pmax_size)
3135 || !pmax_size.is_constant (&max_size)
3136 || !poffset.is_constant (&offset)
3137 || !DECL_P (base))
3138 return NULL;
3140 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3141 return NULL;
3143 return get_var_base_offset_size_access (base, offset, max_size);
3146 /* Replace the expression EXPR with a scalar replacement if there is one and
3147 generate other statements to do type conversion or subtree copying if
3148 necessary. GSI is used to place newly created statements, WRITE is true if
3149 the expression is being written to (it is on a LHS of a statement or output
3150 in an assembly statement). */
3152 static bool
3153 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
3155 location_t loc;
3156 struct access *access;
3157 tree type, bfr, orig_expr;
3159 if (TREE_CODE (*expr) == BIT_FIELD_REF)
3161 bfr = *expr;
3162 expr = &TREE_OPERAND (*expr, 0);
3164 else
3165 bfr = NULL_TREE;
3167 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3168 expr = &TREE_OPERAND (*expr, 0);
3169 access = get_access_for_expr (*expr);
3170 if (!access)
3171 return false;
3172 type = TREE_TYPE (*expr);
3173 orig_expr = *expr;
3175 loc = gimple_location (gsi_stmt (*gsi));
3176 gimple_stmt_iterator alt_gsi = gsi_none ();
3177 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3179 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3180 gsi = &alt_gsi;
3183 if (access->grp_to_be_replaced)
3185 tree repl = get_access_replacement (access);
3186 /* If we replace a non-register typed access simply use the original
3187 access expression to extract the scalar component afterwards.
3188 This happens if scalarizing a function return value or parameter
3189 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3190 gcc.c-torture/compile/20011217-1.c.
3192 We also want to use this when accessing a complex or vector which can
3193 be accessed as a different type too, potentially creating a need for
3194 type conversion (see PR42196) and when scalarized unions are involved
3195 in assembler statements (see PR42398). */
3196 if (!useless_type_conversion_p (type, access->type))
3198 tree ref;
3200 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3202 if (write)
3204 gassign *stmt;
3206 if (access->grp_partial_lhs)
3207 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3208 false, GSI_NEW_STMT);
3209 stmt = gimple_build_assign (repl, ref);
3210 gimple_set_location (stmt, loc);
3211 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3213 else
3215 gassign *stmt;
3217 if (access->grp_partial_lhs)
3218 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3219 true, GSI_SAME_STMT);
3220 stmt = gimple_build_assign (ref, repl);
3221 gimple_set_location (stmt, loc);
3222 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3225 else
3226 *expr = repl;
3227 sra_stats.exprs++;
3229 else if (write && access->grp_to_be_debug_replaced)
3231 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3232 NULL_TREE,
3233 gsi_stmt (*gsi));
3234 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3237 if (access->first_child)
3239 HOST_WIDE_INT start_offset, chunk_size;
3240 if (bfr
3241 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3242 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3244 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3245 start_offset = access->offset
3246 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3248 else
3249 start_offset = chunk_size = 0;
3251 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3252 start_offset, chunk_size, gsi, write, write,
3253 loc);
3255 return true;
3258 /* Where scalar replacements of the RHS have been written to when a replacement
3259 of a LHS of an assigments cannot be direclty loaded from a replacement of
3260 the RHS. */
3261 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3262 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3263 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3265 struct subreplacement_assignment_data
3267 /* Offset of the access representing the lhs of the assignment. */
3268 HOST_WIDE_INT left_offset;
3270 /* LHS and RHS of the original assignment. */
3271 tree assignment_lhs, assignment_rhs;
3273 /* Access representing the rhs of the whole assignment. */
3274 struct access *top_racc;
3276 /* Stmt iterator used for statement insertions after the original assignment.
3277 It points to the main GSI used to traverse a BB during function body
3278 modification. */
3279 gimple_stmt_iterator *new_gsi;
3281 /* Stmt iterator used for statement insertions before the original
3282 assignment. Keeps on pointing to the original statement. */
3283 gimple_stmt_iterator old_gsi;
3285 /* Location of the assignment. */
3286 location_t loc;
3288 /* Keeps the information whether we have needed to refresh replacements of
3289 the LHS and from which side of the assignments this takes place. */
3290 enum unscalarized_data_handling refreshed;
3293 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3294 base aggregate if there are unscalarized data or directly to LHS of the
3295 statement that is pointed to by GSI otherwise. */
3297 static void
3298 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3300 tree src;
3301 if (sad->top_racc->grp_unscalarized_data)
3303 src = sad->assignment_rhs;
3304 sad->refreshed = SRA_UDH_RIGHT;
3306 else
3308 src = sad->assignment_lhs;
3309 sad->refreshed = SRA_UDH_LEFT;
3311 generate_subtree_copies (sad->top_racc->first_child, src,
3312 sad->top_racc->offset, 0, 0,
3313 &sad->old_gsi, false, false, sad->loc);
3316 /* Try to generate statements to load all sub-replacements in an access subtree
3317 formed by children of LACC from scalar replacements in the SAD->top_racc
3318 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3319 and load the accesses from it. */
3321 static void
3322 load_assign_lhs_subreplacements (struct access *lacc,
3323 struct subreplacement_assignment_data *sad)
3325 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3327 HOST_WIDE_INT offset;
3328 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3330 if (lacc->grp_to_be_replaced)
3332 struct access *racc;
3333 gassign *stmt;
3334 tree rhs;
3336 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3337 if (racc && racc->grp_to_be_replaced)
3339 rhs = get_access_replacement (racc);
3340 if (!useless_type_conversion_p (lacc->type, racc->type))
3341 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3342 lacc->type, rhs);
3344 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3345 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3346 NULL_TREE, true, GSI_SAME_STMT);
3348 else
3350 /* No suitable access on the right hand side, need to load from
3351 the aggregate. See if we have to update it first... */
3352 if (sad->refreshed == SRA_UDH_NONE)
3353 handle_unscalarized_data_in_subtree (sad);
3355 if (sad->refreshed == SRA_UDH_LEFT)
3356 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3357 lacc->offset - sad->left_offset,
3358 lacc, sad->new_gsi, true);
3359 else
3360 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3361 lacc->offset - sad->left_offset,
3362 lacc, sad->new_gsi, true);
3363 if (lacc->grp_partial_lhs)
3364 rhs = force_gimple_operand_gsi (sad->new_gsi,
3365 rhs, true, NULL_TREE,
3366 false, GSI_NEW_STMT);
3369 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3370 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3371 gimple_set_location (stmt, sad->loc);
3372 update_stmt (stmt);
3373 sra_stats.subreplacements++;
3375 else
3377 if (sad->refreshed == SRA_UDH_NONE
3378 && lacc->grp_read && !lacc->grp_covered)
3379 handle_unscalarized_data_in_subtree (sad);
3381 if (lacc && lacc->grp_to_be_debug_replaced)
3383 gdebug *ds;
3384 tree drhs;
3385 struct access *racc = find_access_in_subtree (sad->top_racc,
3386 offset,
3387 lacc->size);
3389 if (racc && racc->grp_to_be_replaced)
3391 if (racc->grp_write || constant_decl_p (racc->base))
3392 drhs = get_access_replacement (racc);
3393 else
3394 drhs = NULL;
3396 else if (sad->refreshed == SRA_UDH_LEFT)
3397 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3398 lacc->offset, lacc);
3399 else if (sad->refreshed == SRA_UDH_RIGHT)
3400 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3401 offset, lacc);
3402 else
3403 drhs = NULL_TREE;
3404 if (drhs
3405 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3406 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3407 lacc->type, drhs);
3408 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3409 drhs, gsi_stmt (sad->old_gsi));
3410 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3414 if (lacc->first_child)
3415 load_assign_lhs_subreplacements (lacc, sad);
3419 /* Result code for SRA assignment modification. */
3420 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3421 SRA_AM_MODIFIED, /* stmt changed but not
3422 removed */
3423 SRA_AM_REMOVED }; /* stmt eliminated */
3425 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3426 to the assignment and GSI is the statement iterator pointing at it. Returns
3427 the same values as sra_modify_assign. */
3429 static enum assignment_mod_result
3430 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3432 tree lhs = gimple_assign_lhs (stmt);
3433 struct access *acc = get_access_for_expr (lhs);
3434 if (!acc)
3435 return SRA_AM_NONE;
3436 location_t loc = gimple_location (stmt);
3438 if (gimple_clobber_p (stmt))
3440 /* Clobber the replacement variable. */
3441 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3442 /* Remove clobbers of fully scalarized variables, they are dead. */
3443 if (acc->grp_covered)
3445 unlink_stmt_vdef (stmt);
3446 gsi_remove (gsi, true);
3447 release_defs (stmt);
3448 return SRA_AM_REMOVED;
3450 else
3451 return SRA_AM_MODIFIED;
3454 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
3456 /* I have never seen this code path trigger but if it can happen the
3457 following should handle it gracefully. */
3458 if (access_has_children_p (acc))
3459 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3460 true, true, loc);
3461 return SRA_AM_MODIFIED;
3464 if (acc->grp_covered)
3466 init_subtree_with_zero (acc, gsi, false, loc);
3467 unlink_stmt_vdef (stmt);
3468 gsi_remove (gsi, true);
3469 release_defs (stmt);
3470 return SRA_AM_REMOVED;
3472 else
3474 init_subtree_with_zero (acc, gsi, true, loc);
3475 return SRA_AM_MODIFIED;
3479 /* Create and return a new suitable default definition SSA_NAME for RACC which
3480 is an access describing an uninitialized part of an aggregate that is being
3481 loaded. REG_TREE is used instead of the actual RACC type if that is not of
3482 a gimple register type. */
3484 static tree
3485 get_repl_default_def_ssa_name (struct access *racc, tree reg_type)
3487 gcc_checking_assert (!racc->grp_to_be_replaced
3488 && !racc->grp_to_be_debug_replaced);
3489 if (!racc->replacement_decl)
3490 racc->replacement_decl = create_access_replacement (racc, reg_type);
3491 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3494 /* Examine both sides of the assignment statement pointed to by STMT, replace
3495 them with a scalare replacement if there is one and generate copying of
3496 replacements if scalarized aggregates have been used in the assignment. GSI
3497 is used to hold generated statements for type conversions and subtree
3498 copying. */
3500 static enum assignment_mod_result
3501 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3503 struct access *lacc, *racc;
3504 tree lhs, rhs;
3505 bool modify_this_stmt = false;
3506 bool force_gimple_rhs = false;
3507 location_t loc;
3508 gimple_stmt_iterator orig_gsi = *gsi;
3510 if (!gimple_assign_single_p (stmt))
3511 return SRA_AM_NONE;
3512 lhs = gimple_assign_lhs (stmt);
3513 rhs = gimple_assign_rhs1 (stmt);
3515 if (TREE_CODE (rhs) == CONSTRUCTOR)
3516 return sra_modify_constructor_assign (stmt, gsi);
3518 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3519 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3520 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3522 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3523 gsi, false);
3524 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3525 gsi, true);
3526 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3529 lacc = get_access_for_expr (lhs);
3530 racc = get_access_for_expr (rhs);
3531 if (!lacc && !racc)
3532 return SRA_AM_NONE;
3533 /* Avoid modifying initializations of constant-pool replacements. */
3534 if (racc && (racc->replacement_decl == lhs))
3535 return SRA_AM_NONE;
3537 loc = gimple_location (stmt);
3538 if (lacc && lacc->grp_to_be_replaced)
3540 lhs = get_access_replacement (lacc);
3541 gimple_assign_set_lhs (stmt, lhs);
3542 modify_this_stmt = true;
3543 if (lacc->grp_partial_lhs)
3544 force_gimple_rhs = true;
3545 sra_stats.exprs++;
3548 if (racc && racc->grp_to_be_replaced)
3550 rhs = get_access_replacement (racc);
3551 modify_this_stmt = true;
3552 if (racc->grp_partial_lhs)
3553 force_gimple_rhs = true;
3554 sra_stats.exprs++;
3556 else if (racc
3557 && !racc->grp_unscalarized_data
3558 && !racc->grp_unscalarizable_region
3559 && TREE_CODE (lhs) == SSA_NAME
3560 && !access_has_replacements_p (racc))
3562 rhs = get_repl_default_def_ssa_name (racc, TREE_TYPE (lhs));
3563 modify_this_stmt = true;
3564 sra_stats.exprs++;
3567 if (modify_this_stmt)
3569 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3571 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3572 ??? This should move to fold_stmt which we simply should
3573 call after building a VIEW_CONVERT_EXPR here. */
3574 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3575 && !contains_bitfld_component_ref_p (lhs))
3577 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3578 gimple_assign_set_lhs (stmt, lhs);
3580 else if (lacc
3581 && AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3582 && !contains_vce_or_bfcref_p (rhs))
3583 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3585 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3587 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3588 rhs);
3589 if (is_gimple_reg_type (TREE_TYPE (lhs))
3590 && TREE_CODE (lhs) != SSA_NAME)
3591 force_gimple_rhs = true;
3596 if (lacc && lacc->grp_to_be_debug_replaced)
3598 tree dlhs = get_access_replacement (lacc);
3599 tree drhs = unshare_expr (rhs);
3600 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3602 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3603 && !contains_vce_or_bfcref_p (drhs))
3604 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3605 if (drhs
3606 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3607 TREE_TYPE (drhs)))
3608 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3609 TREE_TYPE (dlhs), drhs);
3611 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3612 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3615 /* From this point on, the function deals with assignments in between
3616 aggregates when at least one has scalar reductions of some of its
3617 components. There are three possible scenarios: Both the LHS and RHS have
3618 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3620 In the first case, we would like to load the LHS components from RHS
3621 components whenever possible. If that is not possible, we would like to
3622 read it directly from the RHS (after updating it by storing in it its own
3623 components). If there are some necessary unscalarized data in the LHS,
3624 those will be loaded by the original assignment too. If neither of these
3625 cases happen, the original statement can be removed. Most of this is done
3626 by load_assign_lhs_subreplacements.
3628 In the second case, we would like to store all RHS scalarized components
3629 directly into LHS and if they cover the aggregate completely, remove the
3630 statement too. In the third case, we want the LHS components to be loaded
3631 directly from the RHS (DSE will remove the original statement if it
3632 becomes redundant).
3634 This is a bit complex but manageable when types match and when unions do
3635 not cause confusion in a way that we cannot really load a component of LHS
3636 from the RHS or vice versa (the access representing this level can have
3637 subaccesses that are accessible only through a different union field at a
3638 higher level - different from the one used in the examined expression).
3639 Unions are fun.
3641 Therefore, I specially handle a fourth case, happening when there is a
3642 specific type cast or it is impossible to locate a scalarized subaccess on
3643 the other side of the expression. If that happens, I simply "refresh" the
3644 RHS by storing in it is scalarized components leave the original statement
3645 there to do the copying and then load the scalar replacements of the LHS.
3646 This is what the first branch does. */
3648 if (modify_this_stmt
3649 || gimple_has_volatile_ops (stmt)
3650 || contains_vce_or_bfcref_p (rhs)
3651 || contains_vce_or_bfcref_p (lhs)
3652 || stmt_ends_bb_p (stmt))
3654 /* No need to copy into a constant-pool, it comes pre-initialized. */
3655 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3656 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3657 gsi, false, false, loc);
3658 if (access_has_children_p (lacc))
3660 gimple_stmt_iterator alt_gsi = gsi_none ();
3661 if (stmt_ends_bb_p (stmt))
3663 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3664 gsi = &alt_gsi;
3666 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3667 gsi, true, true, loc);
3669 sra_stats.separate_lhs_rhs_handling++;
3671 /* This gimplification must be done after generate_subtree_copies,
3672 lest we insert the subtree copies in the middle of the gimplified
3673 sequence. */
3674 if (force_gimple_rhs)
3675 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3676 true, GSI_SAME_STMT);
3677 if (gimple_assign_rhs1 (stmt) != rhs)
3679 modify_this_stmt = true;
3680 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3681 gcc_assert (stmt == gsi_stmt (orig_gsi));
3684 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3686 else
3688 if (access_has_children_p (lacc)
3689 && access_has_children_p (racc)
3690 /* When an access represents an unscalarizable region, it usually
3691 represents accesses with variable offset and thus must not be used
3692 to generate new memory accesses. */
3693 && !lacc->grp_unscalarizable_region
3694 && !racc->grp_unscalarizable_region)
3696 struct subreplacement_assignment_data sad;
3698 sad.left_offset = lacc->offset;
3699 sad.assignment_lhs = lhs;
3700 sad.assignment_rhs = rhs;
3701 sad.top_racc = racc;
3702 sad.old_gsi = *gsi;
3703 sad.new_gsi = gsi;
3704 sad.loc = gimple_location (stmt);
3705 sad.refreshed = SRA_UDH_NONE;
3707 if (lacc->grp_read && !lacc->grp_covered)
3708 handle_unscalarized_data_in_subtree (&sad);
3710 load_assign_lhs_subreplacements (lacc, &sad);
3711 if (sad.refreshed != SRA_UDH_RIGHT)
3713 gsi_next (gsi);
3714 unlink_stmt_vdef (stmt);
3715 gsi_remove (&sad.old_gsi, true);
3716 release_defs (stmt);
3717 sra_stats.deleted++;
3718 return SRA_AM_REMOVED;
3721 else
3723 if (access_has_children_p (racc)
3724 && !racc->grp_unscalarized_data
3725 && TREE_CODE (lhs) != SSA_NAME)
3727 if (dump_file)
3729 fprintf (dump_file, "Removing load: ");
3730 print_gimple_stmt (dump_file, stmt, 0);
3732 generate_subtree_copies (racc->first_child, lhs,
3733 racc->offset, 0, 0, gsi,
3734 false, false, loc);
3735 gcc_assert (stmt == gsi_stmt (*gsi));
3736 unlink_stmt_vdef (stmt);
3737 gsi_remove (gsi, true);
3738 release_defs (stmt);
3739 sra_stats.deleted++;
3740 return SRA_AM_REMOVED;
3742 /* Restore the aggregate RHS from its components so the
3743 prevailing aggregate copy does the right thing. */
3744 if (access_has_children_p (racc))
3745 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3746 gsi, false, false, loc);
3747 /* Re-load the components of the aggregate copy destination.
3748 But use the RHS aggregate to load from to expose more
3749 optimization opportunities. */
3750 if (access_has_children_p (lacc))
3751 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3752 0, 0, gsi, true, true, loc);
3755 return SRA_AM_NONE;
3759 /* Set any scalar replacements of values in the constant pool to the initial
3760 value of the constant. (Constant-pool decls like *.LC0 have effectively
3761 been initialized before the program starts, we must do the same for their
3762 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3763 the function's entry block. */
3765 static void
3766 initialize_constant_pool_replacements (void)
3768 gimple_seq seq = NULL;
3769 gimple_stmt_iterator gsi = gsi_start (seq);
3770 bitmap_iterator bi;
3771 unsigned i;
3773 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3775 tree var = candidate (i);
3776 if (!constant_decl_p (var))
3777 continue;
3778 vec<access_p> *access_vec = get_base_access_vector (var);
3779 if (!access_vec)
3780 continue;
3781 for (unsigned i = 0; i < access_vec->length (); i++)
3783 struct access *access = (*access_vec)[i];
3784 if (!access->replacement_decl)
3785 continue;
3786 gassign *stmt
3787 = gimple_build_assign (get_access_replacement (access),
3788 unshare_expr (access->expr));
3789 if (dump_file && (dump_flags & TDF_DETAILS))
3791 fprintf (dump_file, "Generating constant initializer: ");
3792 print_gimple_stmt (dump_file, stmt, 0);
3793 fprintf (dump_file, "\n");
3795 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3796 update_stmt (stmt);
3800 seq = gsi_seq (gsi);
3801 if (seq)
3802 gsi_insert_seq_on_edge_immediate (
3803 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3806 /* Traverse the function body and all modifications as decided in
3807 analyze_all_variable_accesses. Return true iff the CFG has been
3808 changed. */
3810 static bool
3811 sra_modify_function_body (void)
3813 bool cfg_changed = false;
3814 basic_block bb;
3816 initialize_constant_pool_replacements ();
3818 FOR_EACH_BB_FN (bb, cfun)
3820 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3821 while (!gsi_end_p (gsi))
3823 gimple *stmt = gsi_stmt (gsi);
3824 enum assignment_mod_result assign_result;
3825 bool modified = false, deleted = false;
3826 tree *t;
3827 unsigned i;
3829 switch (gimple_code (stmt))
3831 case GIMPLE_RETURN:
3832 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3833 if (*t != NULL_TREE)
3834 modified |= sra_modify_expr (t, &gsi, false);
3835 break;
3837 case GIMPLE_ASSIGN:
3838 assign_result = sra_modify_assign (stmt, &gsi);
3839 modified |= assign_result == SRA_AM_MODIFIED;
3840 deleted = assign_result == SRA_AM_REMOVED;
3841 break;
3843 case GIMPLE_CALL:
3844 /* Operands must be processed before the lhs. */
3845 for (i = 0; i < gimple_call_num_args (stmt); i++)
3847 t = gimple_call_arg_ptr (stmt, i);
3848 modified |= sra_modify_expr (t, &gsi, false);
3851 if (gimple_call_lhs (stmt))
3853 t = gimple_call_lhs_ptr (stmt);
3854 modified |= sra_modify_expr (t, &gsi, true);
3856 break;
3858 case GIMPLE_ASM:
3860 gasm *asm_stmt = as_a <gasm *> (stmt);
3861 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3863 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3864 modified |= sra_modify_expr (t, &gsi, false);
3866 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3868 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3869 modified |= sra_modify_expr (t, &gsi, true);
3872 break;
3874 default:
3875 break;
3878 if (modified)
3880 update_stmt (stmt);
3881 if (maybe_clean_eh_stmt (stmt)
3882 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3883 cfg_changed = true;
3885 if (!deleted)
3886 gsi_next (&gsi);
3890 gsi_commit_edge_inserts ();
3891 return cfg_changed;
3894 /* Generate statements initializing scalar replacements of parts of function
3895 parameters. */
3897 static void
3898 initialize_parameter_reductions (void)
3900 gimple_stmt_iterator gsi;
3901 gimple_seq seq = NULL;
3902 tree parm;
3904 gsi = gsi_start (seq);
3905 for (parm = DECL_ARGUMENTS (current_function_decl);
3906 parm;
3907 parm = DECL_CHAIN (parm))
3909 vec<access_p> *access_vec;
3910 struct access *access;
3912 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3913 continue;
3914 access_vec = get_base_access_vector (parm);
3915 if (!access_vec)
3916 continue;
3918 for (access = (*access_vec)[0];
3919 access;
3920 access = access->next_grp)
3921 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3922 EXPR_LOCATION (parm));
3925 seq = gsi_seq (gsi);
3926 if (seq)
3927 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3930 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3931 it reveals there are components of some aggregates to be scalarized, it runs
3932 the required transformations. */
3933 static unsigned int
3934 perform_intra_sra (void)
3936 int ret = 0;
3937 sra_initialize ();
3939 if (!find_var_candidates ())
3940 goto out;
3942 if (!scan_function ())
3943 goto out;
3945 if (!analyze_all_variable_accesses ())
3946 goto out;
3948 if (sra_modify_function_body ())
3949 ret = TODO_update_ssa | TODO_cleanup_cfg;
3950 else
3951 ret = TODO_update_ssa;
3952 initialize_parameter_reductions ();
3954 statistics_counter_event (cfun, "Scalar replacements created",
3955 sra_stats.replacements);
3956 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3957 statistics_counter_event (cfun, "Subtree copy stmts",
3958 sra_stats.subtree_copies);
3959 statistics_counter_event (cfun, "Subreplacement stmts",
3960 sra_stats.subreplacements);
3961 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3962 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3963 sra_stats.separate_lhs_rhs_handling);
3965 out:
3966 sra_deinitialize ();
3967 return ret;
3970 /* Perform early intraprocedural SRA. */
3971 static unsigned int
3972 early_intra_sra (void)
3974 sra_mode = SRA_MODE_EARLY_INTRA;
3975 return perform_intra_sra ();
3978 /* Perform "late" intraprocedural SRA. */
3979 static unsigned int
3980 late_intra_sra (void)
3982 sra_mode = SRA_MODE_INTRA;
3983 return perform_intra_sra ();
3987 static bool
3988 gate_intra_sra (void)
3990 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3994 namespace {
3996 const pass_data pass_data_sra_early =
3998 GIMPLE_PASS, /* type */
3999 "esra", /* name */
4000 OPTGROUP_NONE, /* optinfo_flags */
4001 TV_TREE_SRA, /* tv_id */
4002 ( PROP_cfg | PROP_ssa ), /* properties_required */
4003 0, /* properties_provided */
4004 0, /* properties_destroyed */
4005 0, /* todo_flags_start */
4006 TODO_update_ssa, /* todo_flags_finish */
4009 class pass_sra_early : public gimple_opt_pass
4011 public:
4012 pass_sra_early (gcc::context *ctxt)
4013 : gimple_opt_pass (pass_data_sra_early, ctxt)
4016 /* opt_pass methods: */
4017 virtual bool gate (function *) { return gate_intra_sra (); }
4018 virtual unsigned int execute (function *) { return early_intra_sra (); }
4020 }; // class pass_sra_early
4022 } // anon namespace
4024 gimple_opt_pass *
4025 make_pass_sra_early (gcc::context *ctxt)
4027 return new pass_sra_early (ctxt);
4030 namespace {
4032 const pass_data pass_data_sra =
4034 GIMPLE_PASS, /* type */
4035 "sra", /* name */
4036 OPTGROUP_NONE, /* optinfo_flags */
4037 TV_TREE_SRA, /* tv_id */
4038 ( PROP_cfg | PROP_ssa ), /* properties_required */
4039 0, /* properties_provided */
4040 0, /* properties_destroyed */
4041 TODO_update_address_taken, /* todo_flags_start */
4042 TODO_update_ssa, /* todo_flags_finish */
4045 class pass_sra : public gimple_opt_pass
4047 public:
4048 pass_sra (gcc::context *ctxt)
4049 : gimple_opt_pass (pass_data_sra, ctxt)
4052 /* opt_pass methods: */
4053 virtual bool gate (function *) { return gate_intra_sra (); }
4054 virtual unsigned int execute (function *) { return late_intra_sra (); }
4056 }; // class pass_sra
4058 } // anon namespace
4060 gimple_opt_pass *
4061 make_pass_sra (gcc::context *ctxt)
4063 return new pass_sra (ctxt);
4067 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
4068 parameter. */
4070 static bool
4071 is_unused_scalar_param (tree parm)
4073 tree name;
4074 return (is_gimple_reg (parm)
4075 && (!(name = ssa_default_def (cfun, parm))
4076 || has_zero_uses (name)));
4079 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
4080 examine whether there are any direct or otherwise infeasible ones. If so,
4081 return true, otherwise return false. PARM must be a gimple register with a
4082 non-NULL default definition. */
4084 static bool
4085 ptr_parm_has_direct_uses (tree parm)
4087 imm_use_iterator ui;
4088 gimple *stmt;
4089 tree name = ssa_default_def (cfun, parm);
4090 bool ret = false;
4092 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4094 int uses_ok = 0;
4095 use_operand_p use_p;
4097 if (is_gimple_debug (stmt))
4098 continue;
4100 /* Valid uses include dereferences on the lhs and the rhs. */
4101 if (gimple_has_lhs (stmt))
4103 tree lhs = gimple_get_lhs (stmt);
4104 while (handled_component_p (lhs))
4105 lhs = TREE_OPERAND (lhs, 0);
4106 if (TREE_CODE (lhs) == MEM_REF
4107 && TREE_OPERAND (lhs, 0) == name
4108 && integer_zerop (TREE_OPERAND (lhs, 1))
4109 && types_compatible_p (TREE_TYPE (lhs),
4110 TREE_TYPE (TREE_TYPE (name)))
4111 && !TREE_THIS_VOLATILE (lhs))
4112 uses_ok++;
4114 if (gimple_assign_single_p (stmt))
4116 tree rhs = gimple_assign_rhs1 (stmt);
4117 while (handled_component_p (rhs))
4118 rhs = TREE_OPERAND (rhs, 0);
4119 if (TREE_CODE (rhs) == MEM_REF
4120 && TREE_OPERAND (rhs, 0) == name
4121 && integer_zerop (TREE_OPERAND (rhs, 1))
4122 && types_compatible_p (TREE_TYPE (rhs),
4123 TREE_TYPE (TREE_TYPE (name)))
4124 && !TREE_THIS_VOLATILE (rhs))
4125 uses_ok++;
4127 else if (is_gimple_call (stmt))
4129 unsigned i;
4130 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4132 tree arg = gimple_call_arg (stmt, i);
4133 while (handled_component_p (arg))
4134 arg = TREE_OPERAND (arg, 0);
4135 if (TREE_CODE (arg) == MEM_REF
4136 && TREE_OPERAND (arg, 0) == name
4137 && integer_zerop (TREE_OPERAND (arg, 1))
4138 && types_compatible_p (TREE_TYPE (arg),
4139 TREE_TYPE (TREE_TYPE (name)))
4140 && !TREE_THIS_VOLATILE (arg))
4141 uses_ok++;
4145 /* If the number of valid uses does not match the number of
4146 uses in this stmt there is an unhandled use. */
4147 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4148 --uses_ok;
4150 if (uses_ok != 0)
4151 ret = true;
4153 if (ret)
4154 BREAK_FROM_IMM_USE_STMT (ui);
4157 return ret;
4160 /* Identify candidates for reduction for IPA-SRA based on their type and mark
4161 them in candidate_bitmap. Note that these do not necessarily include
4162 parameter which are unused and thus can be removed. Return true iff any
4163 such candidate has been found. */
4165 static bool
4166 find_param_candidates (void)
4168 tree parm;
4169 int count = 0;
4170 bool ret = false;
4171 const char *msg;
4173 for (parm = DECL_ARGUMENTS (current_function_decl);
4174 parm;
4175 parm = DECL_CHAIN (parm))
4177 tree type = TREE_TYPE (parm);
4178 tree_node **slot;
4180 count++;
4182 if (TREE_THIS_VOLATILE (parm)
4183 || TREE_ADDRESSABLE (parm)
4184 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
4185 continue;
4187 if (is_unused_scalar_param (parm))
4189 ret = true;
4190 continue;
4193 if (POINTER_TYPE_P (type))
4195 type = TREE_TYPE (type);
4197 if (TREE_CODE (type) == FUNCTION_TYPE
4198 || TYPE_VOLATILE (type)
4199 || (TREE_CODE (type) == ARRAY_TYPE
4200 && TYPE_NONALIASED_COMPONENT (type))
4201 || !is_gimple_reg (parm)
4202 || is_va_list_type (type)
4203 || ptr_parm_has_direct_uses (parm))
4204 continue;
4206 else if (!AGGREGATE_TYPE_P (type))
4207 continue;
4209 if (!COMPLETE_TYPE_P (type)
4210 || !tree_fits_uhwi_p (TYPE_SIZE (type))
4211 || tree_to_uhwi (TYPE_SIZE (type)) == 0
4212 || (AGGREGATE_TYPE_P (type)
4213 && type_internals_preclude_sra_p (type, &msg)))
4214 continue;
4216 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4217 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4218 *slot = parm;
4220 ret = true;
4221 if (dump_file && (dump_flags & TDF_DETAILS))
4223 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4224 print_generic_expr (dump_file, parm);
4225 fprintf (dump_file, "\n");
4229 func_param_count = count;
4230 return ret;
4233 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4234 maybe_modified. */
4236 static bool
4237 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4238 void *data)
4240 struct access *repr = (struct access *) data;
4242 repr->grp_maybe_modified = 1;
4243 return true;
4246 /* Analyze what representatives (in linked lists accessible from
4247 REPRESENTATIVES) can be modified by side effects of statements in the
4248 current function. */
4250 static void
4251 analyze_modified_params (vec<access_p> representatives)
4253 int i;
4255 for (i = 0; i < func_param_count; i++)
4257 struct access *repr;
4259 for (repr = representatives[i];
4260 repr;
4261 repr = repr->next_grp)
4263 struct access *access;
4264 bitmap visited;
4265 ao_ref ar;
4267 if (no_accesses_p (repr))
4268 continue;
4269 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4270 || repr->grp_maybe_modified)
4271 continue;
4273 ao_ref_init (&ar, repr->expr);
4274 visited = BITMAP_ALLOC (NULL);
4275 for (access = repr; access; access = access->next_sibling)
4277 /* All accesses are read ones, otherwise grp_maybe_modified would
4278 be trivially set. */
4279 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4280 mark_maybe_modified, repr, &visited);
4281 if (repr->grp_maybe_modified)
4282 break;
4284 BITMAP_FREE (visited);
4289 /* Propagate distances in bb_dereferences in the opposite direction than the
4290 control flow edges, in each step storing the maximum of the current value
4291 and the minimum of all successors. These steps are repeated until the table
4292 stabilizes. Note that BBs which might terminate the functions (according to
4293 final_bbs bitmap) never updated in this way. */
4295 static void
4296 propagate_dereference_distances (void)
4298 basic_block bb;
4300 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4301 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4302 FOR_EACH_BB_FN (bb, cfun)
4304 queue.quick_push (bb);
4305 bb->aux = bb;
4308 while (!queue.is_empty ())
4310 edge_iterator ei;
4311 edge e;
4312 bool change = false;
4313 int i;
4315 bb = queue.pop ();
4316 bb->aux = NULL;
4318 if (bitmap_bit_p (final_bbs, bb->index))
4319 continue;
4321 for (i = 0; i < func_param_count; i++)
4323 int idx = bb->index * func_param_count + i;
4324 bool first = true;
4325 HOST_WIDE_INT inh = 0;
4327 FOR_EACH_EDGE (e, ei, bb->succs)
4329 int succ_idx = e->dest->index * func_param_count + i;
4331 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4332 continue;
4334 if (first)
4336 first = false;
4337 inh = bb_dereferences [succ_idx];
4339 else if (bb_dereferences [succ_idx] < inh)
4340 inh = bb_dereferences [succ_idx];
4343 if (!first && bb_dereferences[idx] < inh)
4345 bb_dereferences[idx] = inh;
4346 change = true;
4350 if (change && !bitmap_bit_p (final_bbs, bb->index))
4351 FOR_EACH_EDGE (e, ei, bb->preds)
4353 if (e->src->aux)
4354 continue;
4356 e->src->aux = e->src;
4357 queue.quick_push (e->src);
4362 /* Dump a dereferences TABLE with heading STR to file F. */
4364 static void
4365 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4367 basic_block bb;
4369 fprintf (dump_file, "%s", str);
4370 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4371 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4373 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4374 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4376 int i;
4377 for (i = 0; i < func_param_count; i++)
4379 int idx = bb->index * func_param_count + i;
4380 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4383 fprintf (f, "\n");
4385 fprintf (dump_file, "\n");
4388 /* Determine what (parts of) parameters passed by reference that are not
4389 assigned to are not certainly dereferenced in this function and thus the
4390 dereferencing cannot be safely moved to the caller without potentially
4391 introducing a segfault. Mark such REPRESENTATIVES as
4392 grp_not_necessarilly_dereferenced.
4394 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4395 part is calculated rather than simple booleans are calculated for each
4396 pointer parameter to handle cases when only a fraction of the whole
4397 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4398 an example).
4400 The maximum dereference distances for each pointer parameter and BB are
4401 already stored in bb_dereference. This routine simply propagates these
4402 values upwards by propagate_dereference_distances and then compares the
4403 distances of individual parameters in the ENTRY BB to the equivalent
4404 distances of each representative of a (fraction of a) parameter. */
4406 static void
4407 analyze_caller_dereference_legality (vec<access_p> representatives)
4409 int i;
4411 if (dump_file && (dump_flags & TDF_DETAILS))
4412 dump_dereferences_table (dump_file,
4413 "Dereference table before propagation:\n",
4414 bb_dereferences);
4416 propagate_dereference_distances ();
4418 if (dump_file && (dump_flags & TDF_DETAILS))
4419 dump_dereferences_table (dump_file,
4420 "Dereference table after propagation:\n",
4421 bb_dereferences);
4423 for (i = 0; i < func_param_count; i++)
4425 struct access *repr = representatives[i];
4426 int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4428 if (!repr || no_accesses_p (repr))
4429 continue;
4433 if ((repr->offset + repr->size) > bb_dereferences[idx])
4434 repr->grp_not_necessarilly_dereferenced = 1;
4435 repr = repr->next_grp;
4437 while (repr);
4441 /* Return the representative access for the parameter declaration PARM if it is
4442 a scalar passed by reference which is not written to and the pointer value
4443 is not used directly. Thus, if it is legal to dereference it in the caller
4444 and we can rule out modifications through aliases, such parameter should be
4445 turned into one passed by value. Return NULL otherwise. */
4447 static struct access *
4448 unmodified_by_ref_scalar_representative (tree parm)
4450 int i, access_count;
4451 struct access *repr;
4452 vec<access_p> *access_vec;
4454 access_vec = get_base_access_vector (parm);
4455 gcc_assert (access_vec);
4456 repr = (*access_vec)[0];
4457 if (repr->write)
4458 return NULL;
4459 repr->group_representative = repr;
4461 access_count = access_vec->length ();
4462 for (i = 1; i < access_count; i++)
4464 struct access *access = (*access_vec)[i];
4465 if (access->write)
4466 return NULL;
4467 access->group_representative = repr;
4468 access->next_sibling = repr->next_sibling;
4469 repr->next_sibling = access;
4472 repr->grp_read = 1;
4473 repr->grp_scalar_ptr = 1;
4474 return repr;
4477 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4478 associated with. REQ_ALIGN is the minimum required alignment. */
4480 static bool
4481 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4483 unsigned int exp_align;
4484 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4485 is incompatible assign in a call statement (and possibly even in asm
4486 statements). This can be relaxed by using a new temporary but only for
4487 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4488 intraprocedural SRA we deal with this by keeping the old aggregate around,
4489 something we cannot do in IPA-SRA.) */
4490 if (access->write
4491 && (is_gimple_call (access->stmt)
4492 || gimple_code (access->stmt) == GIMPLE_ASM))
4493 return true;
4495 exp_align = get_object_alignment (access->expr);
4496 if (exp_align < req_align)
4497 return true;
4499 return false;
4503 /* Sort collected accesses for parameter PARM, identify representatives for
4504 each accessed region and link them together. Return NULL if there are
4505 different but overlapping accesses, return the special ptr value meaning
4506 there are no accesses for this parameter if that is the case and return the
4507 first representative otherwise. Set *RO_GRP if there is a group of accesses
4508 with only read (i.e. no write) accesses. */
4510 static struct access *
4511 splice_param_accesses (tree parm, bool *ro_grp)
4513 int i, j, access_count, group_count;
4514 int total_size = 0;
4515 struct access *access, *res, **prev_acc_ptr = &res;
4516 vec<access_p> *access_vec;
4518 access_vec = get_base_access_vector (parm);
4519 if (!access_vec)
4520 return &no_accesses_representant;
4521 access_count = access_vec->length ();
4523 access_vec->qsort (compare_access_positions);
4525 i = 0;
4526 total_size = 0;
4527 group_count = 0;
4528 while (i < access_count)
4530 bool modification;
4531 tree a1_alias_type;
4532 access = (*access_vec)[i];
4533 modification = access->write;
4534 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4535 return NULL;
4536 a1_alias_type = reference_alias_ptr_type (access->expr);
4538 /* Access is about to become group representative unless we find some
4539 nasty overlap which would preclude us from breaking this parameter
4540 apart. */
4542 j = i + 1;
4543 while (j < access_count)
4545 struct access *ac2 = (*access_vec)[j];
4546 if (ac2->offset != access->offset)
4548 /* All or nothing law for parameters. */
4549 if (access->offset + access->size > ac2->offset)
4550 return NULL;
4551 else
4552 break;
4554 else if (ac2->size != access->size)
4555 return NULL;
4557 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4558 || (ac2->type != access->type
4559 && (TREE_ADDRESSABLE (ac2->type)
4560 || TREE_ADDRESSABLE (access->type)))
4561 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4562 return NULL;
4564 modification |= ac2->write;
4565 ac2->group_representative = access;
4566 ac2->next_sibling = access->next_sibling;
4567 access->next_sibling = ac2;
4568 j++;
4571 group_count++;
4572 access->grp_maybe_modified = modification;
4573 if (!modification)
4574 *ro_grp = true;
4575 *prev_acc_ptr = access;
4576 prev_acc_ptr = &access->next_grp;
4577 total_size += access->size;
4578 i = j;
4581 gcc_assert (group_count > 0);
4582 return res;
4585 /* Decide whether parameters with representative accesses given by REPR should
4586 be reduced into components. */
4588 static int
4589 decide_one_param_reduction (struct access *repr)
4591 HOST_WIDE_INT total_size, cur_parm_size;
4592 bool by_ref;
4593 tree parm;
4595 parm = repr->base;
4596 cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4597 gcc_assert (cur_parm_size > 0);
4599 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4600 by_ref = true;
4601 else
4602 by_ref = false;
4604 if (dump_file)
4606 struct access *acc;
4607 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4608 print_generic_expr (dump_file, parm);
4609 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4610 for (acc = repr; acc; acc = acc->next_grp)
4611 dump_access (dump_file, acc, true);
4614 total_size = 0;
4615 int new_param_count = 0;
4617 for (; repr; repr = repr->next_grp)
4619 gcc_assert (parm == repr->base);
4621 /* Taking the address of a non-addressable field is verboten. */
4622 if (by_ref && repr->non_addressable)
4623 return 0;
4625 /* Do not decompose a non-BLKmode param in a way that would
4626 create BLKmode params. Especially for by-reference passing
4627 (thus, pointer-type param) this is hardly worthwhile. */
4628 if (DECL_MODE (parm) != BLKmode
4629 && TYPE_MODE (repr->type) == BLKmode)
4630 return 0;
4632 if (!by_ref || (!repr->grp_maybe_modified
4633 && !repr->grp_not_necessarilly_dereferenced))
4634 total_size += repr->size;
4635 else
4636 total_size += cur_parm_size;
4638 new_param_count++;
4641 gcc_assert (new_param_count > 0);
4643 if (!by_ref)
4645 if (total_size >= cur_parm_size)
4646 return 0;
4648 else
4650 int parm_num_limit;
4651 if (optimize_function_for_size_p (cfun))
4652 parm_num_limit = 1;
4653 else
4654 parm_num_limit = PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR);
4656 if (new_param_count > parm_num_limit
4657 || total_size > (parm_num_limit * cur_parm_size))
4658 return 0;
4661 if (dump_file)
4662 fprintf (dump_file, " ....will be split into %i components\n",
4663 new_param_count);
4664 return new_param_count;
4667 /* The order of the following enums is important, we need to do extra work for
4668 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4669 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4670 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4672 /* Identify representatives of all accesses to all candidate parameters for
4673 IPA-SRA. Return result based on what representatives have been found. */
4675 static enum ipa_splicing_result
4676 splice_all_param_accesses (vec<access_p> &representatives)
4678 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4679 tree parm;
4680 struct access *repr;
4682 representatives.create (func_param_count);
4684 for (parm = DECL_ARGUMENTS (current_function_decl);
4685 parm;
4686 parm = DECL_CHAIN (parm))
4688 if (is_unused_scalar_param (parm))
4690 representatives.quick_push (&no_accesses_representant);
4691 if (result == NO_GOOD_ACCESS)
4692 result = UNUSED_PARAMS;
4694 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4695 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4696 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4698 repr = unmodified_by_ref_scalar_representative (parm);
4699 representatives.quick_push (repr);
4700 if (repr)
4701 result = UNMODIF_BY_REF_ACCESSES;
4703 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4705 bool ro_grp = false;
4706 repr = splice_param_accesses (parm, &ro_grp);
4707 representatives.quick_push (repr);
4709 if (repr && !no_accesses_p (repr))
4711 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4713 if (ro_grp)
4714 result = UNMODIF_BY_REF_ACCESSES;
4715 else if (result < MODIF_BY_REF_ACCESSES)
4716 result = MODIF_BY_REF_ACCESSES;
4718 else if (result < BY_VAL_ACCESSES)
4719 result = BY_VAL_ACCESSES;
4721 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4722 result = UNUSED_PARAMS;
4724 else
4725 representatives.quick_push (NULL);
4728 if (result == NO_GOOD_ACCESS)
4730 representatives.release ();
4731 return NO_GOOD_ACCESS;
4734 return result;
4737 /* Return the index of BASE in PARMS. Abort if it is not found. */
4739 static inline int
4740 get_param_index (tree base, vec<tree> parms)
4742 int i, len;
4744 len = parms.length ();
4745 for (i = 0; i < len; i++)
4746 if (parms[i] == base)
4747 return i;
4748 gcc_unreachable ();
4751 /* Convert the decisions made at the representative level into compact
4752 parameter adjustments. REPRESENTATIVES are pointers to first
4753 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4754 final number of adjustments. */
4756 static ipa_parm_adjustment_vec
4757 turn_representatives_into_adjustments (vec<access_p> representatives,
4758 int adjustments_count)
4760 vec<tree> parms;
4761 ipa_parm_adjustment_vec adjustments;
4762 tree parm;
4763 int i;
4765 gcc_assert (adjustments_count > 0);
4766 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4767 adjustments.create (adjustments_count);
4768 parm = DECL_ARGUMENTS (current_function_decl);
4769 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4771 struct access *repr = representatives[i];
4773 if (!repr || no_accesses_p (repr))
4775 struct ipa_parm_adjustment adj;
4777 memset (&adj, 0, sizeof (adj));
4778 adj.base_index = get_param_index (parm, parms);
4779 adj.base = parm;
4780 if (!repr)
4781 adj.op = IPA_PARM_OP_COPY;
4782 else
4783 adj.op = IPA_PARM_OP_REMOVE;
4784 adj.arg_prefix = "ISRA";
4785 adjustments.quick_push (adj);
4787 else
4789 struct ipa_parm_adjustment adj;
4790 int index = get_param_index (parm, parms);
4792 for (; repr; repr = repr->next_grp)
4794 memset (&adj, 0, sizeof (adj));
4795 gcc_assert (repr->base == parm);
4796 adj.base_index = index;
4797 adj.base = repr->base;
4798 adj.type = repr->type;
4799 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4800 adj.offset = repr->offset;
4801 adj.reverse = repr->reverse;
4802 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4803 && (repr->grp_maybe_modified
4804 || repr->grp_not_necessarilly_dereferenced));
4805 adj.arg_prefix = "ISRA";
4806 adjustments.quick_push (adj);
4810 parms.release ();
4811 return adjustments;
4814 /* Analyze the collected accesses and produce a plan what to do with the
4815 parameters in the form of adjustments, NULL meaning nothing. */
4817 static ipa_parm_adjustment_vec
4818 analyze_all_param_acesses (void)
4820 enum ipa_splicing_result repr_state;
4821 bool proceed = false;
4822 int i, adjustments_count = 0;
4823 vec<access_p> representatives;
4824 ipa_parm_adjustment_vec adjustments;
4826 repr_state = splice_all_param_accesses (representatives);
4827 if (repr_state == NO_GOOD_ACCESS)
4828 return ipa_parm_adjustment_vec ();
4830 /* If there are any parameters passed by reference which are not modified
4831 directly, we need to check whether they can be modified indirectly. */
4832 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4834 analyze_caller_dereference_legality (representatives);
4835 analyze_modified_params (representatives);
4838 for (i = 0; i < func_param_count; i++)
4840 struct access *repr = representatives[i];
4842 if (repr && !no_accesses_p (repr))
4844 if (repr->grp_scalar_ptr)
4846 adjustments_count++;
4847 if (repr->grp_not_necessarilly_dereferenced
4848 || repr->grp_maybe_modified)
4849 representatives[i] = NULL;
4850 else
4852 proceed = true;
4853 sra_stats.scalar_by_ref_to_by_val++;
4856 else
4858 int new_components = decide_one_param_reduction (repr);
4860 if (new_components == 0)
4862 representatives[i] = NULL;
4863 adjustments_count++;
4865 else
4867 adjustments_count += new_components;
4868 sra_stats.aggregate_params_reduced++;
4869 sra_stats.param_reductions_created += new_components;
4870 proceed = true;
4874 else
4876 if (no_accesses_p (repr))
4878 proceed = true;
4879 sra_stats.deleted_unused_parameters++;
4881 adjustments_count++;
4885 if (!proceed && dump_file)
4886 fprintf (dump_file, "NOT proceeding to change params.\n");
4888 if (proceed)
4889 adjustments = turn_representatives_into_adjustments (representatives,
4890 adjustments_count);
4891 else
4892 adjustments = ipa_parm_adjustment_vec ();
4894 representatives.release ();
4895 return adjustments;
4898 /* If a parameter replacement identified by ADJ does not yet exist in the form
4899 of declaration, create it and record it, otherwise return the previously
4900 created one. */
4902 static tree
4903 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4905 tree repl;
4906 if (!adj->new_ssa_base)
4908 char *pretty_name = make_fancy_name (adj->base);
4910 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4911 DECL_NAME (repl) = get_identifier (pretty_name);
4912 DECL_NAMELESS (repl) = 1;
4913 obstack_free (&name_obstack, pretty_name);
4915 adj->new_ssa_base = repl;
4917 else
4918 repl = adj->new_ssa_base;
4919 return repl;
4922 /* Find the first adjustment for a particular parameter BASE in a vector of
4923 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4924 adjustment. */
4926 static struct ipa_parm_adjustment *
4927 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4929 int i, len;
4931 len = adjustments.length ();
4932 for (i = 0; i < len; i++)
4934 struct ipa_parm_adjustment *adj;
4936 adj = &adjustments[i];
4937 if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4938 return adj;
4941 return NULL;
4944 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4945 parameter which is to be removed because its value is not used, create a new
4946 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4947 original with it and return it. If there is no need to re-map, return NULL.
4948 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
4950 static tree
4951 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4952 ipa_parm_adjustment_vec adjustments)
4954 struct ipa_parm_adjustment *adj;
4955 tree decl, repl, new_name;
4957 if (TREE_CODE (old_name) != SSA_NAME)
4958 return NULL;
4960 decl = SSA_NAME_VAR (old_name);
4961 if (decl == NULL_TREE
4962 || TREE_CODE (decl) != PARM_DECL)
4963 return NULL;
4965 adj = get_adjustment_for_base (adjustments, decl);
4966 if (!adj)
4967 return NULL;
4969 repl = get_replaced_param_substitute (adj);
4970 new_name = make_ssa_name (repl, stmt);
4971 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4972 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
4974 if (dump_file)
4976 fprintf (dump_file, "replacing an SSA name of a removed param ");
4977 print_generic_expr (dump_file, old_name);
4978 fprintf (dump_file, " with ");
4979 print_generic_expr (dump_file, new_name);
4980 fprintf (dump_file, "\n");
4983 replace_uses_by (old_name, new_name);
4984 return new_name;
4987 /* If the statement STMT contains any expressions that need to replaced with a
4988 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4989 incompatibilities (GSI is used to accommodate conversion statements and must
4990 point to the statement). Return true iff the statement was modified. */
4992 static bool
4993 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4994 ipa_parm_adjustment_vec adjustments)
4996 tree *lhs_p, *rhs_p;
4997 bool any;
4999 if (!gimple_assign_single_p (stmt))
5000 return false;
5002 rhs_p = gimple_assign_rhs1_ptr (stmt);
5003 lhs_p = gimple_assign_lhs_ptr (stmt);
5005 any = ipa_modify_expr (rhs_p, false, adjustments);
5006 any |= ipa_modify_expr (lhs_p, false, adjustments);
5007 if (any)
5009 tree new_rhs = NULL_TREE;
5011 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
5013 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
5015 /* V_C_Es of constructors can cause trouble (PR 42714). */
5016 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
5017 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
5018 else
5019 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
5020 NULL);
5022 else
5023 new_rhs = fold_build1_loc (gimple_location (stmt),
5024 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
5025 *rhs_p);
5027 else if (REFERENCE_CLASS_P (*rhs_p)
5028 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
5029 && !is_gimple_reg (*lhs_p))
5030 /* This can happen when an assignment in between two single field
5031 structures is turned into an assignment in between two pointers to
5032 scalars (PR 42237). */
5033 new_rhs = *rhs_p;
5035 if (new_rhs)
5037 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
5038 true, GSI_SAME_STMT);
5040 gimple_assign_set_rhs_from_tree (gsi, tmp);
5043 return true;
5046 return false;
5049 /* Traverse the function body and all modifications as described in
5050 ADJUSTMENTS. Return true iff the CFG has been changed. */
5052 bool
5053 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
5055 bool cfg_changed = false;
5056 basic_block bb;
5058 FOR_EACH_BB_FN (bb, cfun)
5060 gimple_stmt_iterator gsi;
5062 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5064 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
5065 tree new_lhs, old_lhs = gimple_phi_result (phi);
5066 new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
5067 if (new_lhs)
5069 gimple_phi_set_result (phi, new_lhs);
5070 release_ssa_name (old_lhs);
5074 gsi = gsi_start_bb (bb);
5075 while (!gsi_end_p (gsi))
5077 gimple *stmt = gsi_stmt (gsi);
5078 bool modified = false;
5079 tree *t;
5080 unsigned i;
5082 switch (gimple_code (stmt))
5084 case GIMPLE_RETURN:
5085 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
5086 if (*t != NULL_TREE)
5087 modified |= ipa_modify_expr (t, true, adjustments);
5088 break;
5090 case GIMPLE_ASSIGN:
5091 modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
5092 break;
5094 case GIMPLE_CALL:
5095 /* Operands must be processed before the lhs. */
5096 for (i = 0; i < gimple_call_num_args (stmt); i++)
5098 t = gimple_call_arg_ptr (stmt, i);
5099 modified |= ipa_modify_expr (t, true, adjustments);
5102 if (gimple_call_lhs (stmt))
5104 t = gimple_call_lhs_ptr (stmt);
5105 modified |= ipa_modify_expr (t, false, adjustments);
5107 break;
5109 case GIMPLE_ASM:
5111 gasm *asm_stmt = as_a <gasm *> (stmt);
5112 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
5114 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
5115 modified |= ipa_modify_expr (t, true, adjustments);
5117 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
5119 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
5120 modified |= ipa_modify_expr (t, false, adjustments);
5123 break;
5125 default:
5126 break;
5129 def_operand_p defp;
5130 ssa_op_iter iter;
5131 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5133 tree old_def = DEF_FROM_PTR (defp);
5134 if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
5135 adjustments))
5137 SET_DEF (defp, new_def);
5138 release_ssa_name (old_def);
5139 modified = true;
5143 if (modified)
5145 update_stmt (stmt);
5146 if (maybe_clean_eh_stmt (stmt)
5147 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5148 cfg_changed = true;
5150 gsi_next (&gsi);
5154 return cfg_changed;
5157 /* Call gimple_debug_bind_reset_value on all debug statements describing
5158 gimple register parameters that are being removed or replaced. */
5160 static void
5161 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
5163 int i, len;
5164 gimple_stmt_iterator *gsip = NULL, gsi;
5166 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
5168 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5169 gsip = &gsi;
5171 len = adjustments.length ();
5172 for (i = 0; i < len; i++)
5174 struct ipa_parm_adjustment *adj;
5175 imm_use_iterator ui;
5176 gimple *stmt;
5177 gdebug *def_temp;
5178 tree name, vexpr, copy = NULL_TREE;
5179 use_operand_p use_p;
5181 adj = &adjustments[i];
5182 if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
5183 continue;
5184 name = ssa_default_def (cfun, adj->base);
5185 vexpr = NULL;
5186 if (name)
5187 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
5189 if (gimple_clobber_p (stmt))
5191 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
5192 unlink_stmt_vdef (stmt);
5193 gsi_remove (&cgsi, true);
5194 release_defs (stmt);
5195 continue;
5197 /* All other users must have been removed by
5198 ipa_sra_modify_function_body. */
5199 gcc_assert (is_gimple_debug (stmt));
5200 if (vexpr == NULL && gsip != NULL)
5202 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5203 vexpr = make_node (DEBUG_EXPR_DECL);
5204 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
5205 NULL);
5206 DECL_ARTIFICIAL (vexpr) = 1;
5207 TREE_TYPE (vexpr) = TREE_TYPE (name);
5208 SET_DECL_MODE (vexpr, DECL_MODE (adj->base));
5209 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5211 if (vexpr)
5213 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5214 SET_USE (use_p, vexpr);
5216 else
5217 gimple_debug_bind_reset_value (stmt);
5218 update_stmt (stmt);
5220 /* Create a VAR_DECL for debug info purposes. */
5221 if (!DECL_IGNORED_P (adj->base))
5223 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5224 VAR_DECL, DECL_NAME (adj->base),
5225 TREE_TYPE (adj->base));
5226 if (DECL_PT_UID_SET_P (adj->base))
5227 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5228 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5229 TREE_READONLY (copy) = TREE_READONLY (adj->base);
5230 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5231 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5232 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5233 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5234 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5235 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5236 SET_DECL_RTL (copy, 0);
5237 TREE_USED (copy) = 1;
5238 DECL_CONTEXT (copy) = current_function_decl;
5239 add_local_decl (cfun, copy);
5240 DECL_CHAIN (copy) =
5241 BLOCK_VARS (DECL_INITIAL (current_function_decl));
5242 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5244 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5246 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5247 if (vexpr)
5248 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5249 else
5250 def_temp = gimple_build_debug_source_bind (copy, adj->base,
5251 NULL);
5252 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5257 /* Return false if all callers have at least as many actual arguments as there
5258 are formal parameters in the current function and that their types
5259 match. */
5261 static bool
5262 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5263 void *data ATTRIBUTE_UNUSED)
5265 struct cgraph_edge *cs;
5266 for (cs = node->callers; cs; cs = cs->next_caller)
5267 if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
5268 return true;
5270 return false;
5273 /* Return false if all callers have vuse attached to a call statement. */
5275 static bool
5276 some_callers_have_no_vuse_p (struct cgraph_node *node,
5277 void *data ATTRIBUTE_UNUSED)
5279 struct cgraph_edge *cs;
5280 for (cs = node->callers; cs; cs = cs->next_caller)
5281 if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5282 return true;
5284 return false;
5287 /* Convert all callers of NODE. */
5289 static bool
5290 convert_callers_for_node (struct cgraph_node *node,
5291 void *data)
5293 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
5294 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
5295 struct cgraph_edge *cs;
5297 for (cs = node->callers; cs; cs = cs->next_caller)
5299 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
5301 if (dump_file)
5302 fprintf (dump_file, "Adjusting call %s -> %s\n",
5303 cs->caller->dump_name (), cs->callee->dump_name ());
5305 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5307 pop_cfun ();
5310 for (cs = node->callers; cs; cs = cs->next_caller)
5311 if (bitmap_set_bit (recomputed_callers, cs->caller->get_uid ())
5312 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5313 compute_fn_summary (cs->caller, true);
5314 BITMAP_FREE (recomputed_callers);
5316 return true;
5319 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
5321 static void
5322 convert_callers (struct cgraph_node *node, tree old_decl,
5323 ipa_parm_adjustment_vec adjustments)
5325 basic_block this_block;
5327 node->call_for_symbol_and_aliases (convert_callers_for_node,
5328 &adjustments, false);
5330 if (!encountered_recursive_call)
5331 return;
5333 FOR_EACH_BB_FN (this_block, cfun)
5335 gimple_stmt_iterator gsi;
5337 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5339 gcall *stmt;
5340 tree call_fndecl;
5341 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5342 if (!stmt)
5343 continue;
5344 call_fndecl = gimple_call_fndecl (stmt);
5345 if (call_fndecl == old_decl)
5347 if (dump_file)
5348 fprintf (dump_file, "Adjusting recursive call");
5349 gimple_call_set_fndecl (stmt, node->decl);
5350 ipa_modify_call_arguments (NULL, stmt, adjustments);
5355 return;
5358 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5359 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
5361 static bool
5362 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5364 struct cgraph_node *new_node;
5365 bool cfg_changed;
5367 cgraph_edge::rebuild_edges ();
5368 free_dominance_info (CDI_DOMINATORS);
5369 pop_cfun ();
5371 /* This must be done after rebuilding cgraph edges for node above.
5372 Otherwise any recursive calls to node that are recorded in
5373 redirect_callers will be corrupted. */
5374 vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5375 new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5376 NULL, false, NULL, NULL,
5377 "isra");
5378 redirect_callers.release ();
5380 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5381 ipa_modify_formal_parameters (current_function_decl, adjustments);
5382 cfg_changed = ipa_sra_modify_function_body (adjustments);
5383 sra_ipa_reset_debug_stmts (adjustments);
5384 convert_callers (new_node, node->decl, adjustments);
5385 new_node->make_local ();
5386 return cfg_changed;
5389 /* Means of communication between ipa_sra_check_caller and
5390 ipa_sra_preliminary_function_checks. */
5392 struct ipa_sra_check_caller_data
5394 bool has_callers;
5395 bool bad_arg_alignment;
5396 bool has_thunk;
5399 /* If NODE has a caller, mark that fact in DATA which is pointer to
5400 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5401 calls if they are unit aligned and if not, set the appropriate flag in DATA
5402 too. */
5404 static bool
5405 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5407 if (!node->callers)
5408 return false;
5410 struct ipa_sra_check_caller_data *iscc;
5411 iscc = (struct ipa_sra_check_caller_data *) data;
5412 iscc->has_callers = true;
5414 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5416 if (cs->caller->thunk.thunk_p)
5418 iscc->has_thunk = true;
5419 return true;
5421 gimple *call_stmt = cs->call_stmt;
5422 unsigned count = gimple_call_num_args (call_stmt);
5423 for (unsigned i = 0; i < count; i++)
5425 tree arg = gimple_call_arg (call_stmt, i);
5426 if (is_gimple_reg (arg))
5427 continue;
5429 tree offset;
5430 poly_int64 bitsize, bitpos;
5431 machine_mode mode;
5432 int unsignedp, reversep, volatilep = 0;
5433 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5434 &unsignedp, &reversep, &volatilep);
5435 if (!multiple_p (bitpos, BITS_PER_UNIT))
5437 iscc->bad_arg_alignment = true;
5438 return true;
5443 return false;
5446 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5447 attributes, return true otherwise. NODE is the cgraph node of the current
5448 function. */
5450 static bool
5451 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5453 if (!node->can_be_local_p ())
5455 if (dump_file)
5456 fprintf (dump_file, "Function not local to this compilation unit.\n");
5457 return false;
5460 if (!node->local.can_change_signature)
5462 if (dump_file)
5463 fprintf (dump_file, "Function cannot change signature.\n");
5464 return false;
5467 if (!tree_versionable_function_p (node->decl))
5469 if (dump_file)
5470 fprintf (dump_file, "Function is not versionable.\n");
5471 return false;
5474 if (!opt_for_fn (node->decl, optimize)
5475 || !opt_for_fn (node->decl, flag_ipa_sra))
5477 if (dump_file)
5478 fprintf (dump_file, "Function not optimized.\n");
5479 return false;
5482 if (DECL_VIRTUAL_P (current_function_decl))
5484 if (dump_file)
5485 fprintf (dump_file, "Function is a virtual method.\n");
5486 return false;
5489 if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5490 && ipa_fn_summaries->get (node)
5491 && ipa_fn_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5493 if (dump_file)
5494 fprintf (dump_file, "Function too big to be made truly local.\n");
5495 return false;
5498 if (cfun->stdarg)
5500 if (dump_file)
5501 fprintf (dump_file, "Function uses stdarg. \n");
5502 return false;
5505 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5506 return false;
5508 if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5510 if (dump_file)
5511 fprintf (dump_file, "Always inline function will be inlined "
5512 "anyway. \n");
5513 return false;
5516 struct ipa_sra_check_caller_data iscc;
5517 memset (&iscc, 0, sizeof(iscc));
5518 node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5519 if (!iscc.has_callers)
5521 if (dump_file)
5522 fprintf (dump_file,
5523 "Function has no callers in this compilation unit.\n");
5524 return false;
5527 if (iscc.bad_arg_alignment)
5529 if (dump_file)
5530 fprintf (dump_file,
5531 "A function call has an argument with non-unit alignment.\n");
5532 return false;
5535 if (iscc.has_thunk)
5537 if (dump_file)
5538 fprintf (dump_file,
5539 "A has thunk.\n");
5540 return false;
5543 return true;
5546 /* Perform early interprocedural SRA. */
5548 static unsigned int
5549 ipa_early_sra (void)
5551 struct cgraph_node *node = cgraph_node::get (current_function_decl);
5552 ipa_parm_adjustment_vec adjustments;
5553 int ret = 0;
5555 if (!ipa_sra_preliminary_function_checks (node))
5556 return 0;
5558 sra_initialize ();
5559 sra_mode = SRA_MODE_EARLY_IPA;
5561 if (!find_param_candidates ())
5563 if (dump_file)
5564 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5565 goto simple_out;
5568 if (node->call_for_symbol_and_aliases
5569 (some_callers_have_mismatched_arguments_p, NULL, true))
5571 if (dump_file)
5572 fprintf (dump_file, "There are callers with insufficient number of "
5573 "arguments or arguments with type mismatches.\n");
5574 goto simple_out;
5577 if (node->call_for_symbol_and_aliases
5578 (some_callers_have_no_vuse_p, NULL, true))
5580 if (dump_file)
5581 fprintf (dump_file, "There are callers with no VUSE attached "
5582 "to a call stmt.\n");
5583 goto simple_out;
5586 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5587 func_param_count
5588 * last_basic_block_for_fn (cfun));
5589 final_bbs = BITMAP_ALLOC (NULL);
5591 scan_function ();
5592 if (encountered_apply_args)
5594 if (dump_file)
5595 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
5596 goto out;
5599 if (encountered_unchangable_recursive_call)
5601 if (dump_file)
5602 fprintf (dump_file, "Function calls itself with insufficient "
5603 "number of arguments.\n");
5604 goto out;
5607 adjustments = analyze_all_param_acesses ();
5608 if (!adjustments.exists ())
5609 goto out;
5610 if (dump_file)
5611 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5613 if (modify_function (node, adjustments))
5614 ret = TODO_update_ssa | TODO_cleanup_cfg;
5615 else
5616 ret = TODO_update_ssa;
5617 adjustments.release ();
5619 statistics_counter_event (cfun, "Unused parameters deleted",
5620 sra_stats.deleted_unused_parameters);
5621 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5622 sra_stats.scalar_by_ref_to_by_val);
5623 statistics_counter_event (cfun, "Aggregate parameters broken up",
5624 sra_stats.aggregate_params_reduced);
5625 statistics_counter_event (cfun, "Aggregate parameter components created",
5626 sra_stats.param_reductions_created);
5628 out:
5629 BITMAP_FREE (final_bbs);
5630 free (bb_dereferences);
5631 simple_out:
5632 sra_deinitialize ();
5633 return ret;
5636 namespace {
5638 const pass_data pass_data_early_ipa_sra =
5640 GIMPLE_PASS, /* type */
5641 "eipa_sra", /* name */
5642 OPTGROUP_NONE, /* optinfo_flags */
5643 TV_IPA_SRA, /* tv_id */
5644 0, /* properties_required */
5645 0, /* properties_provided */
5646 0, /* properties_destroyed */
5647 0, /* todo_flags_start */
5648 TODO_dump_symtab, /* todo_flags_finish */
5651 class pass_early_ipa_sra : public gimple_opt_pass
5653 public:
5654 pass_early_ipa_sra (gcc::context *ctxt)
5655 : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5658 /* opt_pass methods: */
5659 virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5660 virtual unsigned int execute (function *) { return ipa_early_sra (); }
5662 }; // class pass_early_ipa_sra
5664 } // anon namespace
5666 gimple_opt_pass *
5667 make_pass_early_ipa_sra (gcc::context *ctxt)
5669 return new pass_early_ipa_sra (ctxt);