Merge aosp-toolchain/gcc/gcc-4_9 changes.
[official-gcc.git] / gcc-4_8 / gcc / tree-sra.c
blob54159a09db1f58d858b2c5b6cab66bd02fc4224f
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2013 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "tree-pass.h"
84 #include "ipa-prop.h"
85 #include "statistics.h"
86 #include "params.h"
87 #include "toplev.h"
88 #include "target.h"
89 #include "flags.h"
90 #include "dbgcnt.h"
91 #include "tree-inline.h"
92 #include "gimple-pretty-print.h"
93 #include "l-ipo.h"
94 #include "ipa-inline.h"
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
101 /* Global variable describing which aggregate reduction we are performing at
102 the moment. */
103 static enum sra_mode sra_mode;
105 struct assign_link;
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
123 struct access
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
129 HOST_WIDE_INT size;
130 tree base;
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
134 testcase. */
135 tree expr;
136 /* Type. */
137 tree type;
139 /* The statement this access belongs to. */
140 gimple stmt;
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
158 /* Pointers to the first and last element in the linked list of assign
159 links. */
160 struct assign_link *first_link, *last_link;
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
170 /* Is this particular access write access? */
171 unsigned write : 1;
173 /* Is this access an access to a non-addressable field? */
174 unsigned non_addressable : 1;
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued : 1;
179 /* Does this group contain a write access? This flag is propagated down the
180 access tree. */
181 unsigned grp_write : 1;
183 /* Does this group contain a read access? This flag is propagated down the
184 access tree. */
185 unsigned grp_read : 1;
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read : 1;
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write : 1;
195 /* Does this group contain a read access through a scalar type? This flag is
196 not propagated in the access tree in any direction. */
197 unsigned grp_scalar_read : 1;
199 /* Does this group contain a write access through a scalar type? This flag
200 is not propagated in the access tree in any direction. */
201 unsigned grp_scalar_write : 1;
203 /* Is this access an artificial one created to scalarize some record
204 entirely? */
205 unsigned grp_total_scalarization : 1;
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
209 possible. */
210 unsigned grp_hint : 1;
212 /* Is the subtree rooted in this access fully covered by scalar
213 replacements? */
214 unsigned grp_covered : 1;
216 /* If set to true, this access and all below it in an access tree must not be
217 scalarized. */
218 unsigned grp_unscalarizable_region : 1;
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
222 access tree. */
223 unsigned grp_unscalarized_data : 1;
225 /* Does this access and/or group contain a write access through a
226 BIT_FIELD_REF? */
227 unsigned grp_partial_lhs : 1;
229 /* Set when a scalar replacement should be created for this variable. */
230 unsigned grp_to_be_replaced : 1;
232 /* Set when we want a replacement for the sole purpose of having it in
233 generated debug statements. */
234 unsigned grp_to_be_debug_replaced : 1;
236 /* Should TREE_NO_WARNING of a replacement be set? */
237 unsigned grp_no_warning : 1;
239 /* Is it possible that the group refers to data which might be (directly or
240 otherwise) modified? */
241 unsigned grp_maybe_modified : 1;
243 /* Set when this is a representative of a pointer to scalar (i.e. by
244 reference) parameter which we consider for turning into a plain scalar
245 (i.e. a by value parameter). */
246 unsigned grp_scalar_ptr : 1;
248 /* Set when we discover that this pointer is not safe to dereference in the
249 caller. */
250 unsigned grp_not_necessarilly_dereferenced : 1;
253 typedef struct access *access_p;
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool;
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
262 struct assign_link
264 struct access *lacc, *racc;
265 struct assign_link *next;
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool;
271 /* Base (tree) -> Vector (vec<access_p> *) map. */
272 static struct pointer_map_t *base_access_vec;
274 /* Set of candidates. */
275 static bitmap candidate_bitmap;
276 static htab_t candidates;
278 /* For a candidate UID return the candidates decl. */
280 static inline tree
281 candidate (unsigned uid)
283 struct tree_decl_minimal t;
284 t.uid = uid;
285 return (tree) htab_find_with_hash (candidates, &t, uid);
288 /* Bitmap of candidates which we should try to entirely scalarize away and
289 those which cannot be (because they are and need be used as a whole). */
290 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
292 /* Obstack for creation of fancy names. */
293 static struct obstack name_obstack;
295 /* Head of a linked list of accesses that need to have its subaccesses
296 propagated to their assignment counterparts. */
297 static struct access *work_queue_head;
299 /* Number of parameters of the analyzed function when doing early ipa SRA. */
300 static int func_param_count;
302 /* scan_function sets the following to true if it encounters a call to
303 __builtin_apply_args. */
304 static bool encountered_apply_args;
306 /* Set by scan_function when it finds a recursive call. */
307 static bool encountered_recursive_call;
309 /* Set by scan_function when it finds a recursive call with less actual
310 arguments than formal parameters.. */
311 static bool encountered_unchangable_recursive_call;
313 /* This is a table in which for each basic block and parameter there is a
314 distance (offset + size) in that parameter which is dereferenced and
315 accessed in that BB. */
316 static HOST_WIDE_INT *bb_dereferences;
317 /* Bitmap of BBs that can cause the function to "stop" progressing by
318 returning, throwing externally, looping infinitely or calling a function
319 which might abort etc.. */
320 static bitmap final_bbs;
322 /* Representative of no accesses at all. */
323 static struct access no_accesses_representant;
325 /* Predicate to test the special value. */
327 static inline bool
328 no_accesses_p (struct access *access)
330 return access == &no_accesses_representant;
333 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
334 representative fields are dumped, otherwise those which only describe the
335 individual access are. */
337 static struct
339 /* Number of processed aggregates is readily available in
340 analyze_all_variable_accesses and so is not stored here. */
342 /* Number of created scalar replacements. */
343 int replacements;
345 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
346 expression. */
347 int exprs;
349 /* Number of statements created by generate_subtree_copies. */
350 int subtree_copies;
352 /* Number of statements created by load_assign_lhs_subreplacements. */
353 int subreplacements;
355 /* Number of times sra_modify_assign has deleted a statement. */
356 int deleted;
358 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
359 RHS reparately due to type conversions or nonexistent matching
360 references. */
361 int separate_lhs_rhs_handling;
363 /* Number of parameters that were removed because they were unused. */
364 int deleted_unused_parameters;
366 /* Number of scalars passed as parameters by reference that have been
367 converted to be passed by value. */
368 int scalar_by_ref_to_by_val;
370 /* Number of aggregate parameters that were replaced by one or more of their
371 components. */
372 int aggregate_params_reduced;
374 /* Numbber of components created when splitting aggregate parameters. */
375 int param_reductions_created;
376 } sra_stats;
378 static void
379 dump_access (FILE *f, struct access *access, bool grp)
381 fprintf (f, "access { ");
382 fprintf (f, "base = (%d)'", DECL_UID (access->base));
383 print_generic_expr (f, access->base, 0);
384 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
385 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
386 fprintf (f, ", expr = ");
387 print_generic_expr (f, access->expr, 0);
388 fprintf (f, ", type = ");
389 print_generic_expr (f, access->type, 0);
390 if (grp)
391 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
392 "grp_assignment_write = %d, grp_scalar_read = %d, "
393 "grp_scalar_write = %d, grp_total_scalarization = %d, "
394 "grp_hint = %d, grp_covered = %d, "
395 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
396 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
397 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
398 "grp_not_necessarilly_dereferenced = %d\n",
399 access->grp_read, access->grp_write, access->grp_assignment_read,
400 access->grp_assignment_write, access->grp_scalar_read,
401 access->grp_scalar_write, access->grp_total_scalarization,
402 access->grp_hint, access->grp_covered,
403 access->grp_unscalarizable_region, access->grp_unscalarized_data,
404 access->grp_partial_lhs, access->grp_to_be_replaced,
405 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
406 access->grp_not_necessarilly_dereferenced);
407 else
408 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
409 "grp_partial_lhs = %d\n",
410 access->write, access->grp_total_scalarization,
411 access->grp_partial_lhs);
414 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
416 static void
417 dump_access_tree_1 (FILE *f, struct access *access, int level)
421 int i;
423 for (i = 0; i < level; i++)
424 fputs ("* ", dump_file);
426 dump_access (f, access, true);
428 if (access->first_child)
429 dump_access_tree_1 (f, access->first_child, level + 1);
431 access = access->next_sibling;
433 while (access);
436 /* Dump all access trees for a variable, given the pointer to the first root in
437 ACCESS. */
439 static void
440 dump_access_tree (FILE *f, struct access *access)
442 for (; access; access = access->next_grp)
443 dump_access_tree_1 (f, access, 0);
446 /* Return true iff ACC is non-NULL and has subaccesses. */
448 static inline bool
449 access_has_children_p (struct access *acc)
451 return acc && acc->first_child;
454 /* Return true iff ACC is (partly) covered by at least one replacement. */
456 static bool
457 access_has_replacements_p (struct access *acc)
459 struct access *child;
460 if (acc->grp_to_be_replaced)
461 return true;
462 for (child = acc->first_child; child; child = child->next_sibling)
463 if (access_has_replacements_p (child))
464 return true;
465 return false;
468 /* Return a vector of pointers to accesses for the variable given in BASE or
469 NULL if there is none. */
471 static vec<access_p> *
472 get_base_access_vector (tree base)
474 void **slot;
476 slot = pointer_map_contains (base_access_vec, base);
477 if (!slot)
478 return NULL;
479 else
480 return *(vec<access_p> **) slot;
483 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
484 in ACCESS. Return NULL if it cannot be found. */
486 static struct access *
487 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
488 HOST_WIDE_INT size)
490 while (access && (access->offset != offset || access->size != size))
492 struct access *child = access->first_child;
494 while (child && (child->offset + child->size <= offset))
495 child = child->next_sibling;
496 access = child;
499 return access;
502 /* Return the first group representative for DECL or NULL if none exists. */
504 static struct access *
505 get_first_repr_for_decl (tree base)
507 vec<access_p> *access_vec;
509 access_vec = get_base_access_vector (base);
510 if (!access_vec)
511 return NULL;
513 return (*access_vec)[0];
516 /* Find an access representative for the variable BASE and given OFFSET and
517 SIZE. Requires that access trees have already been built. Return NULL if
518 it cannot be found. */
520 static struct access *
521 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
522 HOST_WIDE_INT size)
524 struct access *access;
526 access = get_first_repr_for_decl (base);
527 while (access && (access->offset + access->size <= offset))
528 access = access->next_grp;
529 if (!access)
530 return NULL;
532 return find_access_in_subtree (access, offset, size);
535 /* Add LINK to the linked list of assign links of RACC. */
536 static void
537 add_link_to_rhs (struct access *racc, struct assign_link *link)
539 gcc_assert (link->racc == racc);
541 if (!racc->first_link)
543 gcc_assert (!racc->last_link);
544 racc->first_link = link;
546 else
547 racc->last_link->next = link;
549 racc->last_link = link;
550 link->next = NULL;
553 /* Move all link structures in their linked list in OLD_RACC to the linked list
554 in NEW_RACC. */
555 static void
556 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
558 if (!old_racc->first_link)
560 gcc_assert (!old_racc->last_link);
561 return;
564 if (new_racc->first_link)
566 gcc_assert (!new_racc->last_link->next);
567 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
569 new_racc->last_link->next = old_racc->first_link;
570 new_racc->last_link = old_racc->last_link;
572 else
574 gcc_assert (!new_racc->last_link);
576 new_racc->first_link = old_racc->first_link;
577 new_racc->last_link = old_racc->last_link;
579 old_racc->first_link = old_racc->last_link = NULL;
582 /* Add ACCESS to the work queue (which is actually a stack). */
584 static void
585 add_access_to_work_queue (struct access *access)
587 if (!access->grp_queued)
589 gcc_assert (!access->next_queued);
590 access->next_queued = work_queue_head;
591 access->grp_queued = 1;
592 work_queue_head = access;
596 /* Pop an access from the work queue, and return it, assuming there is one. */
598 static struct access *
599 pop_access_from_work_queue (void)
601 struct access *access = work_queue_head;
603 work_queue_head = access->next_queued;
604 access->next_queued = NULL;
605 access->grp_queued = 0;
606 return access;
610 /* Allocate necessary structures. */
612 static void
613 sra_initialize (void)
615 candidate_bitmap = BITMAP_ALLOC (NULL);
616 candidates = htab_create (vec_safe_length (cfun->local_decls) / 2,
617 uid_decl_map_hash, uid_decl_map_eq, NULL);
618 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
619 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
620 gcc_obstack_init (&name_obstack);
621 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
622 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
623 base_access_vec = pointer_map_create ();
624 memset (&sra_stats, 0, sizeof (sra_stats));
625 encountered_apply_args = false;
626 encountered_recursive_call = false;
627 encountered_unchangable_recursive_call = false;
630 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
632 static bool
633 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
634 void *data ATTRIBUTE_UNUSED)
636 vec<access_p> *access_vec = (vec<access_p> *) *value;
637 vec_free (access_vec);
638 return true;
641 /* Deallocate all general structures. */
643 static void
644 sra_deinitialize (void)
646 BITMAP_FREE (candidate_bitmap);
647 htab_delete (candidates);
648 BITMAP_FREE (should_scalarize_away_bitmap);
649 BITMAP_FREE (cannot_scalarize_away_bitmap);
650 free_alloc_pool (access_pool);
651 free_alloc_pool (link_pool);
652 obstack_free (&name_obstack, NULL);
654 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
655 pointer_map_destroy (base_access_vec);
658 /* Remove DECL from candidates for SRA and write REASON to the dump file if
659 there is one. */
660 static void
661 disqualify_candidate (tree decl, const char *reason)
663 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
664 htab_clear_slot (candidates,
665 htab_find_slot_with_hash (candidates, decl,
666 DECL_UID (decl), NO_INSERT));
668 if (dump_file && (dump_flags & TDF_DETAILS))
670 fprintf (dump_file, "! Disqualifying ");
671 print_generic_expr (dump_file, decl, 0);
672 fprintf (dump_file, " - %s\n", reason);
676 /* Return true iff the type contains a field or an element which does not allow
677 scalarization. */
679 static bool
680 type_internals_preclude_sra_p (tree type, const char **msg)
682 tree fld;
683 tree et;
685 switch (TREE_CODE (type))
687 case RECORD_TYPE:
688 case UNION_TYPE:
689 case QUAL_UNION_TYPE:
690 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
691 if (TREE_CODE (fld) == FIELD_DECL)
693 tree ft = TREE_TYPE (fld);
695 if (TREE_THIS_VOLATILE (fld))
697 *msg = "volatile structure field";
698 return true;
700 if (!DECL_FIELD_OFFSET (fld))
702 *msg = "no structure field offset";
703 return true;
705 if (!DECL_SIZE (fld))
707 *msg = "zero structure field size";
708 return true;
710 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
712 *msg = "structure field offset not fixed";
713 return true;
715 if (!host_integerp (DECL_SIZE (fld), 1))
717 *msg = "structure field size not fixed";
718 return true;
720 if (!host_integerp (bit_position (fld), 0))
722 *msg = "structure field size too big";
723 return true;
725 if (AGGREGATE_TYPE_P (ft)
726 && int_bit_position (fld) % BITS_PER_UNIT != 0)
728 *msg = "structure field is bit field";
729 return true;
732 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
733 return true;
736 return false;
738 case ARRAY_TYPE:
739 et = TREE_TYPE (type);
741 if (TYPE_VOLATILE (et))
743 *msg = "element type is volatile";
744 return true;
747 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
748 return true;
750 return false;
752 default:
753 return false;
757 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
758 base variable if it is. Return T if it is not an SSA_NAME. */
760 static tree
761 get_ssa_base_param (tree t)
763 if (TREE_CODE (t) == SSA_NAME)
765 if (SSA_NAME_IS_DEFAULT_DEF (t))
766 return SSA_NAME_VAR (t);
767 else
768 return NULL_TREE;
770 return t;
773 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
774 belongs to, unless the BB has already been marked as a potentially
775 final. */
777 static void
778 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
780 basic_block bb = gimple_bb (stmt);
781 int idx, parm_index = 0;
782 tree parm;
784 if (bitmap_bit_p (final_bbs, bb->index))
785 return;
787 for (parm = DECL_ARGUMENTS (current_function_decl);
788 parm && parm != base;
789 parm = DECL_CHAIN (parm))
790 parm_index++;
792 gcc_assert (parm_index < func_param_count);
794 idx = bb->index * func_param_count + parm_index;
795 if (bb_dereferences[idx] < dist)
796 bb_dereferences[idx] = dist;
799 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
800 the three fields. Also add it to the vector of accesses corresponding to
801 the base. Finally, return the new access. */
803 static struct access *
804 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
806 vec<access_p> *v;
807 struct access *access;
808 void **slot;
810 access = (struct access *) pool_alloc (access_pool);
811 memset (access, 0, sizeof (struct access));
812 access->base = base;
813 access->offset = offset;
814 access->size = size;
816 slot = pointer_map_contains (base_access_vec, base);
817 if (slot)
818 v = (vec<access_p> *) *slot;
819 else
820 vec_alloc (v, 32);
822 v->safe_push (access);
824 *((vec<access_p> **)
825 pointer_map_insert (base_access_vec, base)) = v;
827 return access;
830 /* Create and insert access for EXPR. Return created access, or NULL if it is
831 not possible. */
833 static struct access *
834 create_access (tree expr, gimple stmt, bool write)
836 struct access *access;
837 HOST_WIDE_INT offset, size, max_size;
838 tree base = expr;
839 bool ptr, unscalarizable_region = false;
841 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
843 if (sra_mode == SRA_MODE_EARLY_IPA
844 && TREE_CODE (base) == MEM_REF)
846 base = get_ssa_base_param (TREE_OPERAND (base, 0));
847 if (!base)
848 return NULL;
849 ptr = true;
851 else
852 ptr = false;
854 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
855 return NULL;
857 if (sra_mode == SRA_MODE_EARLY_IPA)
859 if (size < 0 || size != max_size)
861 disqualify_candidate (base, "Encountered a variable sized access.");
862 return NULL;
864 if (TREE_CODE (expr) == COMPONENT_REF
865 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
867 disqualify_candidate (base, "Encountered a bit-field access.");
868 return NULL;
870 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
872 if (ptr)
873 mark_parm_dereference (base, offset + size, stmt);
875 else
877 if (size != max_size)
879 size = max_size;
880 unscalarizable_region = true;
882 if (size < 0)
884 disqualify_candidate (base, "Encountered an unconstrained access.");
885 return NULL;
889 access = create_access_1 (base, offset, size);
890 access->expr = expr;
891 access->type = TREE_TYPE (expr);
892 access->write = write;
893 access->grp_unscalarizable_region = unscalarizable_region;
894 access->stmt = stmt;
896 if (TREE_CODE (expr) == COMPONENT_REF
897 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
898 access->non_addressable = 1;
900 return access;
904 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
905 register types or (recursively) records with only these two kinds of fields.
906 It also returns false if any of these records contains a bit-field. */
908 static bool
909 type_consists_of_records_p (tree type)
911 tree fld;
913 if (TREE_CODE (type) != RECORD_TYPE)
914 return false;
916 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
917 if (TREE_CODE (fld) == FIELD_DECL)
919 tree ft = TREE_TYPE (fld);
921 if (DECL_BIT_FIELD (fld))
922 return false;
924 if (!is_gimple_reg_type (ft)
925 && !type_consists_of_records_p (ft))
926 return false;
929 return true;
932 /* Create total_scalarization accesses for all scalar type fields in DECL that
933 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
934 must be the top-most VAR_DECL representing the variable, OFFSET must be the
935 offset of DECL within BASE. REF must be the memory reference expression for
936 the given decl. */
938 static void
939 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
940 tree ref)
942 tree fld, decl_type = TREE_TYPE (decl);
944 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
945 if (TREE_CODE (fld) == FIELD_DECL)
947 HOST_WIDE_INT pos = offset + int_bit_position (fld);
948 tree ft = TREE_TYPE (fld);
949 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
950 NULL_TREE);
952 if (is_gimple_reg_type (ft))
954 struct access *access;
955 HOST_WIDE_INT size;
957 size = tree_low_cst (DECL_SIZE (fld), 1);
958 access = create_access_1 (base, pos, size);
959 access->expr = nref;
960 access->type = ft;
961 access->grp_total_scalarization = 1;
962 /* Accesses for intraprocedural SRA can have their stmt NULL. */
964 else
965 completely_scalarize_record (base, fld, pos, nref);
969 /* Create total_scalarization accesses for all scalar type fields in VAR and
970 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
971 type_consists_of_records_p. */
973 static void
974 completely_scalarize_var (tree var)
976 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
977 struct access *access;
979 access = create_access_1 (var, 0, size);
980 access->expr = var;
981 access->type = TREE_TYPE (var);
982 access->grp_total_scalarization = 1;
984 completely_scalarize_record (var, var, 0, var);
987 /* Search the given tree for a declaration by skipping handled components and
988 exclude it from the candidates. */
990 static void
991 disqualify_base_of_expr (tree t, const char *reason)
993 t = get_base_address (t);
994 if (sra_mode == SRA_MODE_EARLY_IPA
995 && TREE_CODE (t) == MEM_REF)
996 t = get_ssa_base_param (TREE_OPERAND (t, 0));
998 if (t && DECL_P (t))
999 disqualify_candidate (t, reason);
1002 /* Scan expression EXPR and create access structures for all accesses to
1003 candidates for scalarization. Return the created access or NULL if none is
1004 created. */
1006 static struct access *
1007 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1009 struct access *ret = NULL;
1010 bool partial_ref;
1012 if (TREE_CODE (expr) == BIT_FIELD_REF
1013 || TREE_CODE (expr) == IMAGPART_EXPR
1014 || TREE_CODE (expr) == REALPART_EXPR)
1016 expr = TREE_OPERAND (expr, 0);
1017 partial_ref = true;
1019 else
1020 partial_ref = false;
1022 /* We need to dive through V_C_Es in order to get the size of its parameter
1023 and not the result type. Ada produces such statements. We are also
1024 capable of handling the topmost V_C_E but not any of those buried in other
1025 handled components. */
1026 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1027 expr = TREE_OPERAND (expr, 0);
1029 if (contains_view_convert_expr_p (expr))
1031 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1032 "component.");
1033 return NULL;
1036 switch (TREE_CODE (expr))
1038 case MEM_REF:
1039 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1040 && sra_mode != SRA_MODE_EARLY_IPA)
1041 return NULL;
1042 /* fall through */
1043 case VAR_DECL:
1044 case PARM_DECL:
1045 case RESULT_DECL:
1046 case COMPONENT_REF:
1047 case ARRAY_REF:
1048 case ARRAY_RANGE_REF:
1049 ret = create_access (expr, stmt, write);
1050 break;
1052 default:
1053 break;
1056 if (write && partial_ref && ret)
1057 ret->grp_partial_lhs = 1;
1059 return ret;
1062 /* Scan expression EXPR and create access structures for all accesses to
1063 candidates for scalarization. Return true if any access has been inserted.
1064 STMT must be the statement from which the expression is taken, WRITE must be
1065 true if the expression is a store and false otherwise. */
1067 static bool
1068 build_access_from_expr (tree expr, gimple stmt, bool write)
1070 struct access *access;
1072 access = build_access_from_expr_1 (expr, stmt, write);
1073 if (access)
1075 /* This means the aggregate is accesses as a whole in a way other than an
1076 assign statement and thus cannot be removed even if we had a scalar
1077 replacement for everything. */
1078 if (cannot_scalarize_away_bitmap)
1079 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1080 return true;
1082 return false;
1085 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1086 modes in which it matters, return true iff they have been disqualified. RHS
1087 may be NULL, in that case ignore it. If we scalarize an aggregate in
1088 intra-SRA we may need to add statements after each statement. This is not
1089 possible if a statement unconditionally has to end the basic block. */
1090 static bool
1091 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1093 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1094 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1096 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1097 if (rhs)
1098 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1099 return true;
1101 return false;
1104 /* Scan expressions occurring in STMT, create access structures for all accesses
1105 to candidates for scalarization and remove those candidates which occur in
1106 statements or expressions that prevent them from being split apart. Return
1107 true if any access has been inserted. */
1109 static bool
1110 build_accesses_from_assign (gimple stmt)
1112 tree lhs, rhs;
1113 struct access *lacc, *racc;
1115 if (!gimple_assign_single_p (stmt)
1116 /* Scope clobbers don't influence scalarization. */
1117 || gimple_clobber_p (stmt))
1118 return false;
1120 lhs = gimple_assign_lhs (stmt);
1121 rhs = gimple_assign_rhs1 (stmt);
1123 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1124 return false;
1126 racc = build_access_from_expr_1 (rhs, stmt, false);
1127 lacc = build_access_from_expr_1 (lhs, stmt, true);
1129 if (lacc)
1130 lacc->grp_assignment_write = 1;
1132 if (racc)
1134 racc->grp_assignment_read = 1;
1135 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1136 && !is_gimple_reg_type (racc->type))
1137 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1140 if (lacc && racc
1141 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1142 && !lacc->grp_unscalarizable_region
1143 && !racc->grp_unscalarizable_region
1144 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1145 && lacc->size == racc->size
1146 && useless_type_conversion_p (lacc->type, racc->type))
1148 struct assign_link *link;
1150 link = (struct assign_link *) pool_alloc (link_pool);
1151 memset (link, 0, sizeof (struct assign_link));
1153 link->lacc = lacc;
1154 link->racc = racc;
1156 add_link_to_rhs (racc, link);
1159 return lacc || racc;
1162 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1163 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1165 static bool
1166 asm_visit_addr (gimple, tree op, tree, void *)
1168 op = get_base_address (op);
1169 if (op
1170 && DECL_P (op))
1171 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1173 return false;
1176 /* Return true iff callsite CALL has at least as many actual arguments as there
1177 are formal parameters of the function currently processed by IPA-SRA. */
1179 static inline bool
1180 callsite_has_enough_arguments_p (gimple call)
1182 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1185 /* Scan function and look for interesting expressions and create access
1186 structures for them. Return true iff any access is created. */
1188 static bool
1189 scan_function (void)
1191 basic_block bb;
1192 bool ret = false;
1194 FOR_EACH_BB (bb)
1196 gimple_stmt_iterator gsi;
1197 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1199 gimple stmt = gsi_stmt (gsi);
1200 tree t;
1201 unsigned i;
1203 if (final_bbs && stmt_can_throw_external (stmt))
1204 bitmap_set_bit (final_bbs, bb->index);
1205 switch (gimple_code (stmt))
1207 case GIMPLE_RETURN:
1208 t = gimple_return_retval (stmt);
1209 if (t != NULL_TREE)
1210 ret |= build_access_from_expr (t, stmt, false);
1211 if (final_bbs)
1212 bitmap_set_bit (final_bbs, bb->index);
1213 break;
1215 case GIMPLE_ASSIGN:
1216 ret |= build_accesses_from_assign (stmt);
1217 break;
1219 case GIMPLE_CALL:
1220 for (i = 0; i < gimple_call_num_args (stmt); i++)
1221 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1222 stmt, false);
1224 if (sra_mode == SRA_MODE_EARLY_IPA)
1226 tree dest = gimple_call_fndecl (stmt);
1227 int flags = gimple_call_flags (stmt);
1229 if (dest)
1231 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1232 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1233 encountered_apply_args = true;
1234 if (cgraph_get_node (dest)
1235 == cgraph_get_node (current_function_decl))
1237 encountered_recursive_call = true;
1238 if (!callsite_has_enough_arguments_p (stmt))
1239 encountered_unchangable_recursive_call = true;
1243 if (final_bbs
1244 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1245 bitmap_set_bit (final_bbs, bb->index);
1248 t = gimple_call_lhs (stmt);
1249 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1250 ret |= build_access_from_expr (t, stmt, true);
1251 break;
1253 case GIMPLE_ASM:
1254 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1255 asm_visit_addr);
1256 if (final_bbs)
1257 bitmap_set_bit (final_bbs, bb->index);
1259 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1261 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1262 ret |= build_access_from_expr (t, stmt, false);
1264 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1266 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1267 ret |= build_access_from_expr (t, stmt, true);
1269 break;
1271 default:
1272 break;
1277 return ret;
1280 /* Helper of QSORT function. There are pointers to accesses in the array. An
1281 access is considered smaller than another if it has smaller offset or if the
1282 offsets are the same but is size is bigger. */
1284 static int
1285 compare_access_positions (const void *a, const void *b)
1287 const access_p *fp1 = (const access_p *) a;
1288 const access_p *fp2 = (const access_p *) b;
1289 const access_p f1 = *fp1;
1290 const access_p f2 = *fp2;
1292 if (f1->offset != f2->offset)
1293 return f1->offset < f2->offset ? -1 : 1;
1295 if (f1->size == f2->size)
1297 if (f1->type == f2->type)
1298 return 0;
1299 /* Put any non-aggregate type before any aggregate type. */
1300 else if (!is_gimple_reg_type (f1->type)
1301 && is_gimple_reg_type (f2->type))
1302 return 1;
1303 else if (is_gimple_reg_type (f1->type)
1304 && !is_gimple_reg_type (f2->type))
1305 return -1;
1306 /* Put any complex or vector type before any other scalar type. */
1307 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1308 && TREE_CODE (f1->type) != VECTOR_TYPE
1309 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1310 || TREE_CODE (f2->type) == VECTOR_TYPE))
1311 return 1;
1312 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1313 || TREE_CODE (f1->type) == VECTOR_TYPE)
1314 && TREE_CODE (f2->type) != COMPLEX_TYPE
1315 && TREE_CODE (f2->type) != VECTOR_TYPE)
1316 return -1;
1317 /* Put the integral type with the bigger precision first. */
1318 else if (INTEGRAL_TYPE_P (f1->type)
1319 && INTEGRAL_TYPE_P (f2->type))
1320 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1321 /* Put any integral type with non-full precision last. */
1322 else if (INTEGRAL_TYPE_P (f1->type)
1323 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1324 != TYPE_PRECISION (f1->type)))
1325 return 1;
1326 else if (INTEGRAL_TYPE_P (f2->type)
1327 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1328 != TYPE_PRECISION (f2->type)))
1329 return -1;
1330 /* Stabilize the sort. */
1331 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1334 /* We want the bigger accesses first, thus the opposite operator in the next
1335 line: */
1336 return f1->size > f2->size ? -1 : 1;
1340 /* Append a name of the declaration to the name obstack. A helper function for
1341 make_fancy_name. */
1343 static void
1344 make_fancy_decl_name (tree decl)
1346 char buffer[32];
1348 tree name = DECL_NAME (decl);
1349 if (name)
1350 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1351 IDENTIFIER_LENGTH (name));
1352 else
1354 sprintf (buffer, "D%u", DECL_UID (decl));
1355 obstack_grow (&name_obstack, buffer, strlen (buffer));
1359 /* Helper for make_fancy_name. */
1361 static void
1362 make_fancy_name_1 (tree expr)
1364 char buffer[32];
1365 tree index;
1367 if (DECL_P (expr))
1369 make_fancy_decl_name (expr);
1370 return;
1373 switch (TREE_CODE (expr))
1375 case COMPONENT_REF:
1376 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1377 obstack_1grow (&name_obstack, '$');
1378 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1379 break;
1381 case ARRAY_REF:
1382 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1383 obstack_1grow (&name_obstack, '$');
1384 /* Arrays with only one element may not have a constant as their
1385 index. */
1386 index = TREE_OPERAND (expr, 1);
1387 if (TREE_CODE (index) != INTEGER_CST)
1388 break;
1389 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1390 obstack_grow (&name_obstack, buffer, strlen (buffer));
1391 break;
1393 case ADDR_EXPR:
1394 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1395 break;
1397 case MEM_REF:
1398 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1399 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1401 obstack_1grow (&name_obstack, '$');
1402 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1403 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1404 obstack_grow (&name_obstack, buffer, strlen (buffer));
1406 break;
1408 case BIT_FIELD_REF:
1409 case REALPART_EXPR:
1410 case IMAGPART_EXPR:
1411 gcc_unreachable (); /* we treat these as scalars. */
1412 break;
1413 default:
1414 break;
1418 /* Create a human readable name for replacement variable of ACCESS. */
1420 static char *
1421 make_fancy_name (tree expr)
1423 make_fancy_name_1 (expr);
1424 obstack_1grow (&name_obstack, '\0');
1425 return XOBFINISH (&name_obstack, char *);
1428 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1429 EXP_TYPE at the given OFFSET. If BASE is something for which
1430 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1431 to insert new statements either before or below the current one as specified
1432 by INSERT_AFTER. This function is not capable of handling bitfields.
1434 BASE must be either a declaration or a memory reference that has correct
1435 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1437 tree
1438 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1439 tree exp_type, gimple_stmt_iterator *gsi,
1440 bool insert_after)
1442 tree prev_base = base;
1443 tree off;
1444 tree mem_ref;
1445 HOST_WIDE_INT base_offset;
1446 unsigned HOST_WIDE_INT misalign;
1447 unsigned int align;
1449 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1450 get_object_alignment_1 (base, &align, &misalign);
1451 base = get_addr_base_and_unit_offset (base, &base_offset);
1453 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1454 offset such as array[var_index]. */
1455 if (!base)
1457 gimple stmt;
1458 tree tmp, addr;
1460 gcc_checking_assert (gsi);
1461 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1462 addr = build_fold_addr_expr (unshare_expr (prev_base));
1463 STRIP_USELESS_TYPE_CONVERSION (addr);
1464 stmt = gimple_build_assign (tmp, addr);
1465 gimple_set_location (stmt, loc);
1466 if (insert_after)
1467 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1468 else
1469 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1471 off = build_int_cst (reference_alias_ptr_type (prev_base),
1472 offset / BITS_PER_UNIT);
1473 base = tmp;
1475 else if (TREE_CODE (base) == MEM_REF)
1477 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1478 base_offset + offset / BITS_PER_UNIT);
1479 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1480 base = unshare_expr (TREE_OPERAND (base, 0));
1482 else
1484 off = build_int_cst (reference_alias_ptr_type (base),
1485 base_offset + offset / BITS_PER_UNIT);
1486 base = build_fold_addr_expr (unshare_expr (base));
1489 misalign = (misalign + offset) & (align - 1);
1490 if (misalign != 0)
1491 align = (misalign & -misalign);
1492 if (align < TYPE_ALIGN (exp_type))
1493 exp_type = build_aligned_type (exp_type, align);
1495 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1496 if (TREE_THIS_VOLATILE (prev_base))
1497 TREE_THIS_VOLATILE (mem_ref) = 1;
1498 if (TREE_SIDE_EFFECTS (prev_base))
1499 TREE_SIDE_EFFECTS (mem_ref) = 1;
1500 return mem_ref;
1503 /* Construct a memory reference to a part of an aggregate BASE at the given
1504 OFFSET and of the same type as MODEL. In case this is a reference to a
1505 bit-field, the function will replicate the last component_ref of model's
1506 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1507 build_ref_for_offset. */
1509 static tree
1510 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1511 struct access *model, gimple_stmt_iterator *gsi,
1512 bool insert_after)
1514 if (TREE_CODE (model->expr) == COMPONENT_REF
1515 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1517 /* This access represents a bit-field. */
1518 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1520 offset -= int_bit_position (fld);
1521 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1522 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1523 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1524 NULL_TREE);
1526 else
1527 return build_ref_for_offset (loc, base, offset, model->type,
1528 gsi, insert_after);
1531 /* Attempt to build a memory reference that we could but into a gimple
1532 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1533 create statements and return s NULL instead. This function also ignores
1534 alignment issues and so its results should never end up in non-debug
1535 statements. */
1537 static tree
1538 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1539 struct access *model)
1541 HOST_WIDE_INT base_offset;
1542 tree off;
1544 if (TREE_CODE (model->expr) == COMPONENT_REF
1545 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1546 return NULL_TREE;
1548 base = get_addr_base_and_unit_offset (base, &base_offset);
1549 if (!base)
1550 return NULL_TREE;
1551 if (TREE_CODE (base) == MEM_REF)
1553 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1554 base_offset + offset / BITS_PER_UNIT);
1555 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1556 base = unshare_expr (TREE_OPERAND (base, 0));
1558 else
1560 off = build_int_cst (reference_alias_ptr_type (base),
1561 base_offset + offset / BITS_PER_UNIT);
1562 base = build_fold_addr_expr (unshare_expr (base));
1565 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1568 /* Construct a memory reference consisting of component_refs and array_refs to
1569 a part of an aggregate *RES (which is of type TYPE). The requested part
1570 should have type EXP_TYPE at be the given OFFSET. This function might not
1571 succeed, it returns true when it does and only then *RES points to something
1572 meaningful. This function should be used only to build expressions that we
1573 might need to present to user (e.g. in warnings). In all other situations,
1574 build_ref_for_model or build_ref_for_offset should be used instead. */
1576 static bool
1577 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1578 tree exp_type)
1580 while (1)
1582 tree fld;
1583 tree tr_size, index, minidx;
1584 HOST_WIDE_INT el_size;
1586 if (offset == 0 && exp_type
1587 && types_compatible_p (exp_type, type))
1588 return true;
1590 switch (TREE_CODE (type))
1592 case UNION_TYPE:
1593 case QUAL_UNION_TYPE:
1594 case RECORD_TYPE:
1595 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1597 HOST_WIDE_INT pos, size;
1598 tree tr_pos, expr, *expr_ptr;
1600 if (TREE_CODE (fld) != FIELD_DECL)
1601 continue;
1603 tr_pos = bit_position (fld);
1604 if (!tr_pos || !host_integerp (tr_pos, 1))
1605 continue;
1606 pos = TREE_INT_CST_LOW (tr_pos);
1607 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1608 tr_size = DECL_SIZE (fld);
1609 if (!tr_size || !host_integerp (tr_size, 1))
1610 continue;
1611 size = TREE_INT_CST_LOW (tr_size);
1612 if (size == 0)
1614 if (pos != offset)
1615 continue;
1617 else if (pos > offset || (pos + size) <= offset)
1618 continue;
1620 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1621 NULL_TREE);
1622 expr_ptr = &expr;
1623 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1624 offset - pos, exp_type))
1626 *res = expr;
1627 return true;
1630 return false;
1632 case ARRAY_TYPE:
1633 tr_size = TYPE_SIZE (TREE_TYPE (type));
1634 if (!tr_size || !host_integerp (tr_size, 1))
1635 return false;
1636 el_size = tree_low_cst (tr_size, 1);
1638 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1639 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1640 return false;
1641 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1642 if (!integer_zerop (minidx))
1643 index = int_const_binop (PLUS_EXPR, index, minidx);
1644 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1645 NULL_TREE, NULL_TREE);
1646 offset = offset % el_size;
1647 type = TREE_TYPE (type);
1648 break;
1650 default:
1651 if (offset != 0)
1652 return false;
1654 if (exp_type)
1655 return false;
1656 else
1657 return true;
1662 /* Return true iff TYPE is stdarg va_list type. */
1664 static inline bool
1665 is_va_list_type (tree type)
1667 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1670 /* Print message to dump file why a variable was rejected. */
1672 static void
1673 reject (tree var, const char *msg)
1675 if (dump_file && (dump_flags & TDF_DETAILS))
1677 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1678 print_generic_expr (dump_file, var, 0);
1679 fprintf (dump_file, "\n");
1683 /* Return true if VAR is a candidate for SRA. */
1685 static bool
1686 maybe_add_sra_candidate (tree var)
1688 tree type = TREE_TYPE (var);
1689 const char *msg;
1690 void **slot;
1692 if (!AGGREGATE_TYPE_P (type))
1694 reject (var, "not aggregate");
1695 return false;
1697 if (needs_to_live_in_memory (var))
1699 reject (var, "needs to live in memory");
1700 return false;
1702 if (TREE_THIS_VOLATILE (var))
1704 reject (var, "is volatile");
1705 return false;
1707 if (!COMPLETE_TYPE_P (type))
1709 reject (var, "has incomplete type");
1710 return false;
1712 if (!host_integerp (TYPE_SIZE (type), 1))
1714 reject (var, "type size not fixed");
1715 return false;
1717 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1719 reject (var, "type size is zero");
1720 return false;
1722 if (type_internals_preclude_sra_p (type, &msg))
1724 reject (var, msg);
1725 return false;
1727 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1728 we also want to schedule it rather late. Thus we ignore it in
1729 the early pass. */
1730 (sra_mode == SRA_MODE_EARLY_INTRA
1731 && is_va_list_type (type)))
1733 reject (var, "is va_list");
1734 return false;
1737 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1738 slot = htab_find_slot_with_hash (candidates, var, DECL_UID (var), INSERT);
1739 *slot = (void *) var;
1741 if (dump_file && (dump_flags & TDF_DETAILS))
1743 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1744 print_generic_expr (dump_file, var, 0);
1745 fprintf (dump_file, "\n");
1748 return true;
1751 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1752 those with type which is suitable for scalarization. */
1754 static bool
1755 find_var_candidates (void)
1757 tree var, parm;
1758 unsigned int i;
1759 bool ret = false;
1761 for (parm = DECL_ARGUMENTS (current_function_decl);
1762 parm;
1763 parm = DECL_CHAIN (parm))
1764 ret |= maybe_add_sra_candidate (parm);
1766 FOR_EACH_LOCAL_DECL (cfun, i, var)
1768 if (TREE_CODE (var) != VAR_DECL)
1769 continue;
1771 ret |= maybe_add_sra_candidate (var);
1774 return ret;
1777 /* Sort all accesses for the given variable, check for partial overlaps and
1778 return NULL if there are any. If there are none, pick a representative for
1779 each combination of offset and size and create a linked list out of them.
1780 Return the pointer to the first representative and make sure it is the first
1781 one in the vector of accesses. */
1783 static struct access *
1784 sort_and_splice_var_accesses (tree var)
1786 int i, j, access_count;
1787 struct access *res, **prev_acc_ptr = &res;
1788 vec<access_p> *access_vec;
1789 bool first = true;
1790 HOST_WIDE_INT low = -1, high = 0;
1792 access_vec = get_base_access_vector (var);
1793 if (!access_vec)
1794 return NULL;
1795 access_count = access_vec->length ();
1797 /* Sort by <OFFSET, SIZE>. */
1798 access_vec->qsort (compare_access_positions);
1800 i = 0;
1801 while (i < access_count)
1803 struct access *access = (*access_vec)[i];
1804 bool grp_write = access->write;
1805 bool grp_read = !access->write;
1806 bool grp_scalar_write = access->write
1807 && is_gimple_reg_type (access->type);
1808 bool grp_scalar_read = !access->write
1809 && is_gimple_reg_type (access->type);
1810 bool grp_assignment_read = access->grp_assignment_read;
1811 bool grp_assignment_write = access->grp_assignment_write;
1812 bool multiple_scalar_reads = false;
1813 bool total_scalarization = access->grp_total_scalarization;
1814 bool grp_partial_lhs = access->grp_partial_lhs;
1815 bool first_scalar = is_gimple_reg_type (access->type);
1816 bool unscalarizable_region = access->grp_unscalarizable_region;
1818 if (first || access->offset >= high)
1820 first = false;
1821 low = access->offset;
1822 high = access->offset + access->size;
1824 else if (access->offset > low && access->offset + access->size > high)
1825 return NULL;
1826 else
1827 gcc_assert (access->offset >= low
1828 && access->offset + access->size <= high);
1830 j = i + 1;
1831 while (j < access_count)
1833 struct access *ac2 = (*access_vec)[j];
1834 if (ac2->offset != access->offset || ac2->size != access->size)
1835 break;
1836 if (ac2->write)
1838 grp_write = true;
1839 grp_scalar_write = (grp_scalar_write
1840 || is_gimple_reg_type (ac2->type));
1842 else
1844 grp_read = true;
1845 if (is_gimple_reg_type (ac2->type))
1847 if (grp_scalar_read)
1848 multiple_scalar_reads = true;
1849 else
1850 grp_scalar_read = true;
1853 grp_assignment_read |= ac2->grp_assignment_read;
1854 grp_assignment_write |= ac2->grp_assignment_write;
1855 grp_partial_lhs |= ac2->grp_partial_lhs;
1856 unscalarizable_region |= ac2->grp_unscalarizable_region;
1857 total_scalarization |= ac2->grp_total_scalarization;
1858 relink_to_new_repr (access, ac2);
1860 /* If there are both aggregate-type and scalar-type accesses with
1861 this combination of size and offset, the comparison function
1862 should have put the scalars first. */
1863 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1864 ac2->group_representative = access;
1865 j++;
1868 i = j;
1870 access->group_representative = access;
1871 access->grp_write = grp_write;
1872 access->grp_read = grp_read;
1873 access->grp_scalar_read = grp_scalar_read;
1874 access->grp_scalar_write = grp_scalar_write;
1875 access->grp_assignment_read = grp_assignment_read;
1876 access->grp_assignment_write = grp_assignment_write;
1877 access->grp_hint = multiple_scalar_reads || total_scalarization;
1878 access->grp_total_scalarization = total_scalarization;
1879 access->grp_partial_lhs = grp_partial_lhs;
1880 access->grp_unscalarizable_region = unscalarizable_region;
1881 if (access->first_link)
1882 add_access_to_work_queue (access);
1884 *prev_acc_ptr = access;
1885 prev_acc_ptr = &access->next_grp;
1888 gcc_assert (res == (*access_vec)[0]);
1889 return res;
1892 /* Create a variable for the given ACCESS which determines the type, name and a
1893 few other properties. Return the variable declaration and store it also to
1894 ACCESS->replacement. */
1896 static tree
1897 create_access_replacement (struct access *access)
1899 tree repl;
1901 if (access->grp_to_be_debug_replaced)
1903 repl = create_tmp_var_raw (access->type, NULL);
1904 DECL_CONTEXT (repl) = current_function_decl;
1906 else
1907 repl = create_tmp_var (access->type, "SR");
1908 if (TREE_CODE (access->type) == COMPLEX_TYPE
1909 || TREE_CODE (access->type) == VECTOR_TYPE)
1911 if (!access->grp_partial_lhs)
1912 DECL_GIMPLE_REG_P (repl) = 1;
1914 else if (access->grp_partial_lhs
1915 && is_gimple_reg_type (access->type))
1916 TREE_ADDRESSABLE (repl) = 1;
1918 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1919 DECL_ARTIFICIAL (repl) = 1;
1920 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1922 if (DECL_NAME (access->base)
1923 && !DECL_IGNORED_P (access->base)
1924 && !DECL_ARTIFICIAL (access->base))
1926 char *pretty_name = make_fancy_name (access->expr);
1927 tree debug_expr = unshare_expr_without_location (access->expr), d;
1928 bool fail = false;
1930 DECL_NAME (repl) = get_identifier (pretty_name);
1931 obstack_free (&name_obstack, pretty_name);
1933 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1934 as DECL_DEBUG_EXPR isn't considered when looking for still
1935 used SSA_NAMEs and thus they could be freed. All debug info
1936 generation cares is whether something is constant or variable
1937 and that get_ref_base_and_extent works properly on the
1938 expression. It cannot handle accesses at a non-constant offset
1939 though, so just give up in those cases. */
1940 for (d = debug_expr;
1941 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
1942 d = TREE_OPERAND (d, 0))
1943 switch (TREE_CODE (d))
1945 case ARRAY_REF:
1946 case ARRAY_RANGE_REF:
1947 if (TREE_OPERAND (d, 1)
1948 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
1949 fail = true;
1950 if (TREE_OPERAND (d, 3)
1951 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
1952 fail = true;
1953 /* FALLTHRU */
1954 case COMPONENT_REF:
1955 if (TREE_OPERAND (d, 2)
1956 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
1957 fail = true;
1958 break;
1959 case MEM_REF:
1960 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
1961 fail = true;
1962 else
1963 d = TREE_OPERAND (d, 0);
1964 break;
1965 default:
1966 break;
1968 if (!fail)
1970 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1971 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1973 if (access->grp_no_warning)
1974 TREE_NO_WARNING (repl) = 1;
1975 else
1976 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1978 else
1979 TREE_NO_WARNING (repl) = 1;
1981 if (dump_file)
1983 if (access->grp_to_be_debug_replaced)
1985 fprintf (dump_file, "Created a debug-only replacement for ");
1986 print_generic_expr (dump_file, access->base, 0);
1987 fprintf (dump_file, " offset: %u, size: %u\n",
1988 (unsigned) access->offset, (unsigned) access->size);
1990 else
1992 fprintf (dump_file, "Created a replacement for ");
1993 print_generic_expr (dump_file, access->base, 0);
1994 fprintf (dump_file, " offset: %u, size: %u: ",
1995 (unsigned) access->offset, (unsigned) access->size);
1996 print_generic_expr (dump_file, repl, 0);
1997 fprintf (dump_file, "\n");
2000 sra_stats.replacements++;
2002 return repl;
2005 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2007 static inline tree
2008 get_access_replacement (struct access *access)
2010 gcc_checking_assert (access->replacement_decl);
2011 return access->replacement_decl;
2015 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2016 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2017 to it is not "within" the root. Return false iff some accesses partially
2018 overlap. */
2020 static bool
2021 build_access_subtree (struct access **access)
2023 struct access *root = *access, *last_child = NULL;
2024 HOST_WIDE_INT limit = root->offset + root->size;
2026 *access = (*access)->next_grp;
2027 while (*access && (*access)->offset + (*access)->size <= limit)
2029 if (!last_child)
2030 root->first_child = *access;
2031 else
2032 last_child->next_sibling = *access;
2033 last_child = *access;
2035 if (!build_access_subtree (access))
2036 return false;
2039 if (*access && (*access)->offset < limit)
2040 return false;
2042 return true;
2045 /* Build a tree of access representatives, ACCESS is the pointer to the first
2046 one, others are linked in a list by the next_grp field. Return false iff
2047 some accesses partially overlap. */
2049 static bool
2050 build_access_trees (struct access *access)
2052 while (access)
2054 struct access *root = access;
2056 if (!build_access_subtree (&access))
2057 return false;
2058 root->next_grp = access;
2060 return true;
2063 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2064 array. */
2066 static bool
2067 expr_with_var_bounded_array_refs_p (tree expr)
2069 while (handled_component_p (expr))
2071 if (TREE_CODE (expr) == ARRAY_REF
2072 && !host_integerp (array_ref_low_bound (expr), 0))
2073 return true;
2074 expr = TREE_OPERAND (expr, 0);
2076 return false;
2079 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2080 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2081 sorts of access flags appropriately along the way, notably always set
2082 grp_read and grp_assign_read according to MARK_READ and grp_write when
2083 MARK_WRITE is true.
2085 Creating a replacement for a scalar access is considered beneficial if its
2086 grp_hint is set (this means we are either attempting total scalarization or
2087 there is more than one direct read access) or according to the following
2088 table:
2090 Access written to through a scalar type (once or more times)
2092 | Written to in an assignment statement
2094 | | Access read as scalar _once_
2095 | | |
2096 | | | Read in an assignment statement
2097 | | | |
2098 | | | | Scalarize Comment
2099 -----------------------------------------------------------------------------
2100 0 0 0 0 No access for the scalar
2101 0 0 0 1 No access for the scalar
2102 0 0 1 0 No Single read - won't help
2103 0 0 1 1 No The same case
2104 0 1 0 0 No access for the scalar
2105 0 1 0 1 No access for the scalar
2106 0 1 1 0 Yes s = *g; return s.i;
2107 0 1 1 1 Yes The same case as above
2108 1 0 0 0 No Won't help
2109 1 0 0 1 Yes s.i = 1; *g = s;
2110 1 0 1 0 Yes s.i = 5; g = s.i;
2111 1 0 1 1 Yes The same case as above
2112 1 1 0 0 No Won't help.
2113 1 1 0 1 Yes s.i = 1; *g = s;
2114 1 1 1 0 Yes s = *g; return s.i;
2115 1 1 1 1 Yes Any of the above yeses */
2117 static bool
2118 analyze_access_subtree (struct access *root, struct access *parent,
2119 bool allow_replacements)
2121 struct access *child;
2122 HOST_WIDE_INT limit = root->offset + root->size;
2123 HOST_WIDE_INT covered_to = root->offset;
2124 bool scalar = is_gimple_reg_type (root->type);
2125 bool hole = false, sth_created = false;
2127 if (parent)
2129 if (parent->grp_read)
2130 root->grp_read = 1;
2131 if (parent->grp_assignment_read)
2132 root->grp_assignment_read = 1;
2133 if (parent->grp_write)
2134 root->grp_write = 1;
2135 if (parent->grp_assignment_write)
2136 root->grp_assignment_write = 1;
2137 if (parent->grp_total_scalarization)
2138 root->grp_total_scalarization = 1;
2141 if (root->grp_unscalarizable_region)
2142 allow_replacements = false;
2144 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2145 allow_replacements = false;
2147 for (child = root->first_child; child; child = child->next_sibling)
2149 hole |= covered_to < child->offset;
2150 sth_created |= analyze_access_subtree (child, root,
2151 allow_replacements && !scalar);
2153 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2154 root->grp_total_scalarization &= child->grp_total_scalarization;
2155 if (child->grp_covered)
2156 covered_to += child->size;
2157 else
2158 hole = true;
2161 if (allow_replacements && scalar && !root->first_child
2162 && (root->grp_hint
2163 || ((root->grp_scalar_read || root->grp_assignment_read)
2164 && (root->grp_scalar_write || root->grp_assignment_write))))
2166 /* Always create access replacements that cover the whole access.
2167 For integral types this means the precision has to match.
2168 Avoid assumptions based on the integral type kind, too. */
2169 if (INTEGRAL_TYPE_P (root->type)
2170 && (TREE_CODE (root->type) != INTEGER_TYPE
2171 || TYPE_PRECISION (root->type) != root->size)
2172 /* But leave bitfield accesses alone. */
2173 && (TREE_CODE (root->expr) != COMPONENT_REF
2174 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2176 tree rt = root->type;
2177 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2178 && (root->size % BITS_PER_UNIT) == 0);
2179 root->type = build_nonstandard_integer_type (root->size,
2180 TYPE_UNSIGNED (rt));
2181 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2182 root->base, root->offset,
2183 root->type, NULL, false);
2185 if (dump_file && (dump_flags & TDF_DETAILS))
2187 fprintf (dump_file, "Changing the type of a replacement for ");
2188 print_generic_expr (dump_file, root->base, 0);
2189 fprintf (dump_file, " offset: %u, size: %u ",
2190 (unsigned) root->offset, (unsigned) root->size);
2191 fprintf (dump_file, " to an integer.\n");
2195 root->grp_to_be_replaced = 1;
2196 root->replacement_decl = create_access_replacement (root);
2197 sth_created = true;
2198 hole = false;
2200 else
2202 if (allow_replacements
2203 && scalar && !root->first_child
2204 && (root->grp_scalar_write || root->grp_assignment_write)
2205 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2206 DECL_UID (root->base)))
2208 gcc_checking_assert (!root->grp_scalar_read
2209 && !root->grp_assignment_read);
2210 sth_created = true;
2211 if (MAY_HAVE_DEBUG_STMTS)
2213 root->grp_to_be_debug_replaced = 1;
2214 root->replacement_decl = create_access_replacement (root);
2218 if (covered_to < limit)
2219 hole = true;
2220 if (scalar)
2221 root->grp_total_scalarization = 0;
2224 if (!hole || root->grp_total_scalarization)
2225 root->grp_covered = 1;
2226 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2227 root->grp_unscalarized_data = 1; /* not covered and written to */
2228 return sth_created;
2231 /* Analyze all access trees linked by next_grp by the means of
2232 analyze_access_subtree. */
2233 static bool
2234 analyze_access_trees (struct access *access)
2236 bool ret = false;
2238 while (access)
2240 if (analyze_access_subtree (access, NULL, true))
2241 ret = true;
2242 access = access->next_grp;
2245 return ret;
2248 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2249 SIZE would conflict with an already existing one. If exactly such a child
2250 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2252 static bool
2253 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2254 HOST_WIDE_INT size, struct access **exact_match)
2256 struct access *child;
2258 for (child = lacc->first_child; child; child = child->next_sibling)
2260 if (child->offset == norm_offset && child->size == size)
2262 *exact_match = child;
2263 return true;
2266 if (child->offset < norm_offset + size
2267 && child->offset + child->size > norm_offset)
2268 return true;
2271 return false;
2274 /* Create a new child access of PARENT, with all properties just like MODEL
2275 except for its offset and with its grp_write false and grp_read true.
2276 Return the new access or NULL if it cannot be created. Note that this access
2277 is created long after all splicing and sorting, it's not located in any
2278 access vector and is automatically a representative of its group. */
2280 static struct access *
2281 create_artificial_child_access (struct access *parent, struct access *model,
2282 HOST_WIDE_INT new_offset)
2284 struct access *access;
2285 struct access **child;
2286 tree expr = parent->base;
2288 gcc_assert (!model->grp_unscalarizable_region);
2290 access = (struct access *) pool_alloc (access_pool);
2291 memset (access, 0, sizeof (struct access));
2292 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2293 model->type))
2295 access->grp_no_warning = true;
2296 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2297 new_offset, model, NULL, false);
2300 access->base = parent->base;
2301 access->expr = expr;
2302 access->offset = new_offset;
2303 access->size = model->size;
2304 access->type = model->type;
2305 access->grp_write = true;
2306 access->grp_read = false;
2308 child = &parent->first_child;
2309 while (*child && (*child)->offset < new_offset)
2310 child = &(*child)->next_sibling;
2312 access->next_sibling = *child;
2313 *child = access;
2315 return access;
2319 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2320 true if any new subaccess was created. Additionally, if RACC is a scalar
2321 access but LACC is not, change the type of the latter, if possible. */
2323 static bool
2324 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2326 struct access *rchild;
2327 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2328 bool ret = false;
2330 if (is_gimple_reg_type (lacc->type)
2331 || lacc->grp_unscalarizable_region
2332 || racc->grp_unscalarizable_region)
2333 return false;
2335 if (is_gimple_reg_type (racc->type))
2337 if (!lacc->first_child && !racc->first_child)
2339 tree t = lacc->base;
2341 lacc->type = racc->type;
2342 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2343 lacc->offset, racc->type))
2344 lacc->expr = t;
2345 else
2347 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2348 lacc->base, lacc->offset,
2349 racc, NULL, false);
2350 lacc->grp_no_warning = true;
2353 return false;
2356 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2358 struct access *new_acc = NULL;
2359 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2361 if (rchild->grp_unscalarizable_region)
2362 continue;
2364 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2365 &new_acc))
2367 if (new_acc)
2369 rchild->grp_hint = 1;
2370 new_acc->grp_hint |= new_acc->grp_read;
2371 if (rchild->first_child)
2372 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2374 continue;
2377 rchild->grp_hint = 1;
2378 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2379 if (new_acc)
2381 ret = true;
2382 if (racc->first_child)
2383 propagate_subaccesses_across_link (new_acc, rchild);
2387 return ret;
2390 /* Propagate all subaccesses across assignment links. */
2392 static void
2393 propagate_all_subaccesses (void)
2395 while (work_queue_head)
2397 struct access *racc = pop_access_from_work_queue ();
2398 struct assign_link *link;
2400 gcc_assert (racc->first_link);
2402 for (link = racc->first_link; link; link = link->next)
2404 struct access *lacc = link->lacc;
2406 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2407 continue;
2408 lacc = lacc->group_representative;
2409 if (propagate_subaccesses_across_link (lacc, racc)
2410 && lacc->first_link)
2411 add_access_to_work_queue (lacc);
2416 /* Go through all accesses collected throughout the (intraprocedural) analysis
2417 stage, exclude overlapping ones, identify representatives and build trees
2418 out of them, making decisions about scalarization on the way. Return true
2419 iff there are any to-be-scalarized variables after this stage. */
2421 static bool
2422 analyze_all_variable_accesses (void)
2424 int res = 0;
2425 bitmap tmp = BITMAP_ALLOC (NULL);
2426 bitmap_iterator bi;
2427 unsigned i, max_total_scalarization_size;
2429 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2430 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2432 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2433 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2434 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2436 tree var = candidate (i);
2438 if (TREE_CODE (var) == VAR_DECL
2439 && type_consists_of_records_p (TREE_TYPE (var)))
2441 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2442 <= max_total_scalarization_size)
2444 completely_scalarize_var (var);
2445 if (dump_file && (dump_flags & TDF_DETAILS))
2447 fprintf (dump_file, "Will attempt to totally scalarize ");
2448 print_generic_expr (dump_file, var, 0);
2449 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2452 else if (dump_file && (dump_flags & TDF_DETAILS))
2454 fprintf (dump_file, "Too big to totally scalarize: ");
2455 print_generic_expr (dump_file, var, 0);
2456 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2461 bitmap_copy (tmp, candidate_bitmap);
2462 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2464 tree var = candidate (i);
2465 struct access *access;
2467 access = sort_and_splice_var_accesses (var);
2468 if (!access || !build_access_trees (access))
2469 disqualify_candidate (var,
2470 "No or inhibitingly overlapping accesses.");
2473 propagate_all_subaccesses ();
2475 bitmap_copy (tmp, candidate_bitmap);
2476 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2478 tree var = candidate (i);
2479 struct access *access = get_first_repr_for_decl (var);
2481 if (analyze_access_trees (access))
2483 res++;
2484 if (dump_file && (dump_flags & TDF_DETAILS))
2486 fprintf (dump_file, "\nAccess trees for ");
2487 print_generic_expr (dump_file, var, 0);
2488 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2489 dump_access_tree (dump_file, access);
2490 fprintf (dump_file, "\n");
2493 else
2494 disqualify_candidate (var, "No scalar replacements to be created.");
2497 BITMAP_FREE (tmp);
2499 if (res)
2501 statistics_counter_event (cfun, "Scalarized aggregates", res);
2502 return true;
2504 else
2505 return false;
2508 /* Generate statements copying scalar replacements of accesses within a subtree
2509 into or out of AGG. ACCESS, all its children, siblings and their children
2510 are to be processed. AGG is an aggregate type expression (can be a
2511 declaration but does not have to be, it can for example also be a mem_ref or
2512 a series of handled components). TOP_OFFSET is the offset of the processed
2513 subtree which has to be subtracted from offsets of individual accesses to
2514 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2515 replacements in the interval <start_offset, start_offset + chunk_size>,
2516 otherwise copy all. GSI is a statement iterator used to place the new
2517 statements. WRITE should be true when the statements should write from AGG
2518 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2519 statements will be added after the current statement in GSI, they will be
2520 added before the statement otherwise. */
2522 static void
2523 generate_subtree_copies (struct access *access, tree agg,
2524 HOST_WIDE_INT top_offset,
2525 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2526 gimple_stmt_iterator *gsi, bool write,
2527 bool insert_after, location_t loc)
2531 if (chunk_size && access->offset >= start_offset + chunk_size)
2532 return;
2534 if (access->grp_to_be_replaced
2535 && (chunk_size == 0
2536 || access->offset + access->size > start_offset))
2538 tree expr, repl = get_access_replacement (access);
2539 gimple stmt;
2541 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2542 access, gsi, insert_after);
2544 if (write)
2546 if (access->grp_partial_lhs)
2547 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2548 !insert_after,
2549 insert_after ? GSI_NEW_STMT
2550 : GSI_SAME_STMT);
2551 stmt = gimple_build_assign (repl, expr);
2553 else
2555 TREE_NO_WARNING (repl) = 1;
2556 if (access->grp_partial_lhs)
2557 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2558 !insert_after,
2559 insert_after ? GSI_NEW_STMT
2560 : GSI_SAME_STMT);
2561 stmt = gimple_build_assign (expr, repl);
2563 gimple_set_location (stmt, loc);
2565 if (insert_after)
2566 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2567 else
2568 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2569 update_stmt (stmt);
2570 sra_stats.subtree_copies++;
2572 else if (write
2573 && access->grp_to_be_debug_replaced
2574 && (chunk_size == 0
2575 || access->offset + access->size > start_offset))
2577 gimple ds;
2578 tree drhs = build_debug_ref_for_model (loc, agg,
2579 access->offset - top_offset,
2580 access);
2581 ds = gimple_build_debug_bind (get_access_replacement (access),
2582 drhs, gsi_stmt (*gsi));
2583 if (insert_after)
2584 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2585 else
2586 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2589 if (access->first_child)
2590 generate_subtree_copies (access->first_child, agg, top_offset,
2591 start_offset, chunk_size, gsi,
2592 write, insert_after, loc);
2594 access = access->next_sibling;
2596 while (access);
2599 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2600 the root of the subtree to be processed. GSI is the statement iterator used
2601 for inserting statements which are added after the current statement if
2602 INSERT_AFTER is true or before it otherwise. */
2604 static void
2605 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2606 bool insert_after, location_t loc)
2609 struct access *child;
2611 if (access->grp_to_be_replaced)
2613 gimple stmt;
2615 stmt = gimple_build_assign (get_access_replacement (access),
2616 build_zero_cst (access->type));
2617 if (insert_after)
2618 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2619 else
2620 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2621 update_stmt (stmt);
2622 gimple_set_location (stmt, loc);
2624 else if (access->grp_to_be_debug_replaced)
2626 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2627 build_zero_cst (access->type),
2628 gsi_stmt (*gsi));
2629 if (insert_after)
2630 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2631 else
2632 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2635 for (child = access->first_child; child; child = child->next_sibling)
2636 init_subtree_with_zero (child, gsi, insert_after, loc);
2639 /* Search for an access representative for the given expression EXPR and
2640 return it or NULL if it cannot be found. */
2642 static struct access *
2643 get_access_for_expr (tree expr)
2645 HOST_WIDE_INT offset, size, max_size;
2646 tree base;
2648 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2649 a different size than the size of its argument and we need the latter
2650 one. */
2651 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2652 expr = TREE_OPERAND (expr, 0);
2654 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2655 if (max_size == -1 || !DECL_P (base))
2656 return NULL;
2658 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2659 return NULL;
2661 return get_var_base_offset_size_access (base, offset, max_size);
2664 /* Replace the expression EXPR with a scalar replacement if there is one and
2665 generate other statements to do type conversion or subtree copying if
2666 necessary. GSI is used to place newly created statements, WRITE is true if
2667 the expression is being written to (it is on a LHS of a statement or output
2668 in an assembly statement). */
2670 static bool
2671 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2673 location_t loc;
2674 struct access *access;
2675 tree type, bfr;
2677 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2679 bfr = *expr;
2680 expr = &TREE_OPERAND (*expr, 0);
2682 else
2683 bfr = NULL_TREE;
2685 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2686 expr = &TREE_OPERAND (*expr, 0);
2687 access = get_access_for_expr (*expr);
2688 if (!access)
2689 return false;
2690 type = TREE_TYPE (*expr);
2692 loc = gimple_location (gsi_stmt (*gsi));
2693 if (access->grp_to_be_replaced)
2695 tree repl = get_access_replacement (access);
2696 /* If we replace a non-register typed access simply use the original
2697 access expression to extract the scalar component afterwards.
2698 This happens if scalarizing a function return value or parameter
2699 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2700 gcc.c-torture/compile/20011217-1.c.
2702 We also want to use this when accessing a complex or vector which can
2703 be accessed as a different type too, potentially creating a need for
2704 type conversion (see PR42196) and when scalarized unions are involved
2705 in assembler statements (see PR42398). */
2706 if (!useless_type_conversion_p (type, access->type))
2708 tree ref;
2710 ref = build_ref_for_model (loc, access->base, access->offset, access,
2711 NULL, false);
2713 if (write)
2715 gimple stmt;
2717 if (access->grp_partial_lhs)
2718 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2719 false, GSI_NEW_STMT);
2720 stmt = gimple_build_assign (repl, ref);
2721 gimple_set_location (stmt, loc);
2722 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2724 else
2726 gimple stmt;
2728 if (access->grp_partial_lhs)
2729 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2730 true, GSI_SAME_STMT);
2731 stmt = gimple_build_assign (ref, repl);
2732 gimple_set_location (stmt, loc);
2733 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2736 else
2737 *expr = repl;
2738 sra_stats.exprs++;
2740 else if (write && access->grp_to_be_debug_replaced)
2742 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2743 NULL_TREE,
2744 gsi_stmt (*gsi));
2745 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2748 if (access->first_child)
2750 HOST_WIDE_INT start_offset, chunk_size;
2751 if (bfr
2752 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2753 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2755 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2756 start_offset = access->offset
2757 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2759 else
2760 start_offset = chunk_size = 0;
2762 generate_subtree_copies (access->first_child, access->base, 0,
2763 start_offset, chunk_size, gsi, write, write,
2764 loc);
2766 return true;
2769 /* Where scalar replacements of the RHS have been written to when a replacement
2770 of a LHS of an assigments cannot be direclty loaded from a replacement of
2771 the RHS. */
2772 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2773 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2774 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2776 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2777 base aggregate if there are unscalarized data or directly to LHS of the
2778 statement that is pointed to by GSI otherwise. */
2780 static enum unscalarized_data_handling
2781 handle_unscalarized_data_in_subtree (struct access *top_racc,
2782 gimple_stmt_iterator *gsi)
2784 if (top_racc->grp_unscalarized_data)
2786 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2787 gsi, false, false,
2788 gimple_location (gsi_stmt (*gsi)));
2789 return SRA_UDH_RIGHT;
2791 else
2793 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2794 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2795 0, 0, gsi, false, false,
2796 gimple_location (gsi_stmt (*gsi)));
2797 return SRA_UDH_LEFT;
2802 /* Try to generate statements to load all sub-replacements in an access subtree
2803 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2804 If that is not possible, refresh the TOP_RACC base aggregate and load the
2805 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2806 copied. NEW_GSI is stmt iterator used for statement insertions after the
2807 original assignment, OLD_GSI is used to insert statements before the
2808 assignment. *REFRESHED keeps the information whether we have needed to
2809 refresh replacements of the LHS and from which side of the assignments this
2810 takes place. */
2812 static void
2813 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2814 HOST_WIDE_INT left_offset,
2815 gimple_stmt_iterator *old_gsi,
2816 gimple_stmt_iterator *new_gsi,
2817 enum unscalarized_data_handling *refreshed)
2819 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2820 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2822 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2824 if (lacc->grp_to_be_replaced)
2826 struct access *racc;
2827 gimple stmt;
2828 tree rhs;
2830 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2831 if (racc && racc->grp_to_be_replaced)
2833 rhs = get_access_replacement (racc);
2834 if (!useless_type_conversion_p (lacc->type, racc->type))
2835 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2837 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2838 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2839 true, GSI_SAME_STMT);
2841 else
2843 /* No suitable access on the right hand side, need to load from
2844 the aggregate. See if we have to update it first... */
2845 if (*refreshed == SRA_UDH_NONE)
2846 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2847 old_gsi);
2849 if (*refreshed == SRA_UDH_LEFT)
2850 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2851 new_gsi, true);
2852 else
2853 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2854 new_gsi, true);
2855 if (lacc->grp_partial_lhs)
2856 rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2857 false, GSI_NEW_STMT);
2860 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2861 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2862 gimple_set_location (stmt, loc);
2863 update_stmt (stmt);
2864 sra_stats.subreplacements++;
2866 else
2868 if (*refreshed == SRA_UDH_NONE
2869 && lacc->grp_read && !lacc->grp_covered)
2870 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2871 old_gsi);
2872 if (lacc && lacc->grp_to_be_debug_replaced)
2874 gimple ds;
2875 tree drhs;
2876 struct access *racc = find_access_in_subtree (top_racc, offset,
2877 lacc->size);
2879 if (racc && racc->grp_to_be_replaced)
2881 if (racc->grp_write)
2882 drhs = get_access_replacement (racc);
2883 else
2884 drhs = NULL;
2886 else if (*refreshed == SRA_UDH_LEFT)
2887 drhs = build_debug_ref_for_model (loc, lacc->base, lacc->offset,
2888 lacc);
2889 else if (*refreshed == SRA_UDH_RIGHT)
2890 drhs = build_debug_ref_for_model (loc, top_racc->base, offset,
2891 lacc);
2892 else
2893 drhs = NULL_TREE;
2894 if (drhs
2895 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
2896 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
2897 lacc->type, drhs);
2898 ds = gimple_build_debug_bind (get_access_replacement (lacc),
2899 drhs, gsi_stmt (*old_gsi));
2900 gsi_insert_after (new_gsi, ds, GSI_NEW_STMT);
2904 if (lacc->first_child)
2905 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2906 old_gsi, new_gsi, refreshed);
2910 /* Result code for SRA assignment modification. */
2911 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2912 SRA_AM_MODIFIED, /* stmt changed but not
2913 removed */
2914 SRA_AM_REMOVED }; /* stmt eliminated */
2916 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2917 to the assignment and GSI is the statement iterator pointing at it. Returns
2918 the same values as sra_modify_assign. */
2920 static enum assignment_mod_result
2921 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2923 tree lhs = gimple_assign_lhs (*stmt);
2924 struct access *acc;
2925 location_t loc;
2927 acc = get_access_for_expr (lhs);
2928 if (!acc)
2929 return SRA_AM_NONE;
2931 if (gimple_clobber_p (*stmt))
2933 /* Remove clobbers of fully scalarized variables, otherwise
2934 do nothing. */
2935 if (acc->grp_covered)
2937 unlink_stmt_vdef (*stmt);
2938 gsi_remove (gsi, true);
2939 release_defs (*stmt);
2940 return SRA_AM_REMOVED;
2942 else
2943 return SRA_AM_NONE;
2946 loc = gimple_location (*stmt);
2947 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2949 /* I have never seen this code path trigger but if it can happen the
2950 following should handle it gracefully. */
2951 if (access_has_children_p (acc))
2952 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2953 true, true, loc);
2954 return SRA_AM_MODIFIED;
2957 if (acc->grp_covered)
2959 init_subtree_with_zero (acc, gsi, false, loc);
2960 unlink_stmt_vdef (*stmt);
2961 gsi_remove (gsi, true);
2962 release_defs (*stmt);
2963 return SRA_AM_REMOVED;
2965 else
2967 init_subtree_with_zero (acc, gsi, true, loc);
2968 return SRA_AM_MODIFIED;
2972 /* Create and return a new suitable default definition SSA_NAME for RACC which
2973 is an access describing an uninitialized part of an aggregate that is being
2974 loaded. */
2976 static tree
2977 get_repl_default_def_ssa_name (struct access *racc)
2979 gcc_checking_assert (!racc->grp_to_be_replaced &&
2980 !racc->grp_to_be_debug_replaced);
2981 if (!racc->replacement_decl)
2982 racc->replacement_decl = create_access_replacement (racc);
2983 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
2986 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2987 bit-field field declaration somewhere in it. */
2989 static inline bool
2990 contains_vce_or_bfcref_p (const_tree ref)
2992 while (handled_component_p (ref))
2994 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2995 || (TREE_CODE (ref) == COMPONENT_REF
2996 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2997 return true;
2998 ref = TREE_OPERAND (ref, 0);
3001 return false;
3004 /* Examine both sides of the assignment statement pointed to by STMT, replace
3005 them with a scalare replacement if there is one and generate copying of
3006 replacements if scalarized aggregates have been used in the assignment. GSI
3007 is used to hold generated statements for type conversions and subtree
3008 copying. */
3010 static enum assignment_mod_result
3011 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3013 struct access *lacc, *racc;
3014 tree lhs, rhs;
3015 bool modify_this_stmt = false;
3016 bool force_gimple_rhs = false;
3017 location_t loc;
3018 gimple_stmt_iterator orig_gsi = *gsi;
3020 if (!gimple_assign_single_p (*stmt))
3021 return SRA_AM_NONE;
3022 lhs = gimple_assign_lhs (*stmt);
3023 rhs = gimple_assign_rhs1 (*stmt);
3025 if (TREE_CODE (rhs) == CONSTRUCTOR)
3026 return sra_modify_constructor_assign (stmt, gsi);
3028 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3029 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3030 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3032 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
3033 gsi, false);
3034 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
3035 gsi, true);
3036 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3039 lacc = get_access_for_expr (lhs);
3040 racc = get_access_for_expr (rhs);
3041 if (!lacc && !racc)
3042 return SRA_AM_NONE;
3044 loc = gimple_location (*stmt);
3045 if (lacc && lacc->grp_to_be_replaced)
3047 lhs = get_access_replacement (lacc);
3048 gimple_assign_set_lhs (*stmt, lhs);
3049 modify_this_stmt = true;
3050 if (lacc->grp_partial_lhs)
3051 force_gimple_rhs = true;
3052 sra_stats.exprs++;
3055 if (racc && racc->grp_to_be_replaced)
3057 rhs = get_access_replacement (racc);
3058 modify_this_stmt = true;
3059 if (racc->grp_partial_lhs)
3060 force_gimple_rhs = true;
3061 sra_stats.exprs++;
3063 else if (racc
3064 && !racc->grp_unscalarized_data
3065 && TREE_CODE (lhs) == SSA_NAME
3066 && !access_has_replacements_p (racc))
3068 rhs = get_repl_default_def_ssa_name (racc);
3069 modify_this_stmt = true;
3070 sra_stats.exprs++;
3073 if (modify_this_stmt)
3075 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3077 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3078 ??? This should move to fold_stmt which we simply should
3079 call after building a VIEW_CONVERT_EXPR here. */
3080 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3081 && !contains_bitfld_component_ref_p (lhs))
3083 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3084 gimple_assign_set_lhs (*stmt, lhs);
3086 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3087 && !contains_vce_or_bfcref_p (rhs))
3088 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3090 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3092 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3093 rhs);
3094 if (is_gimple_reg_type (TREE_TYPE (lhs))
3095 && TREE_CODE (lhs) != SSA_NAME)
3096 force_gimple_rhs = true;
3101 if (lacc && lacc->grp_to_be_debug_replaced)
3103 tree dlhs = get_access_replacement (lacc);
3104 tree drhs = unshare_expr (rhs);
3105 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3107 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3108 && !contains_vce_or_bfcref_p (drhs))
3109 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3110 if (drhs
3111 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3112 TREE_TYPE (drhs)))
3113 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3114 TREE_TYPE (dlhs), drhs);
3116 gimple ds = gimple_build_debug_bind (dlhs, drhs, *stmt);
3117 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3120 /* From this point on, the function deals with assignments in between
3121 aggregates when at least one has scalar reductions of some of its
3122 components. There are three possible scenarios: Both the LHS and RHS have
3123 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3125 In the first case, we would like to load the LHS components from RHS
3126 components whenever possible. If that is not possible, we would like to
3127 read it directly from the RHS (after updating it by storing in it its own
3128 components). If there are some necessary unscalarized data in the LHS,
3129 those will be loaded by the original assignment too. If neither of these
3130 cases happen, the original statement can be removed. Most of this is done
3131 by load_assign_lhs_subreplacements.
3133 In the second case, we would like to store all RHS scalarized components
3134 directly into LHS and if they cover the aggregate completely, remove the
3135 statement too. In the third case, we want the LHS components to be loaded
3136 directly from the RHS (DSE will remove the original statement if it
3137 becomes redundant).
3139 This is a bit complex but manageable when types match and when unions do
3140 not cause confusion in a way that we cannot really load a component of LHS
3141 from the RHS or vice versa (the access representing this level can have
3142 subaccesses that are accessible only through a different union field at a
3143 higher level - different from the one used in the examined expression).
3144 Unions are fun.
3146 Therefore, I specially handle a fourth case, happening when there is a
3147 specific type cast or it is impossible to locate a scalarized subaccess on
3148 the other side of the expression. If that happens, I simply "refresh" the
3149 RHS by storing in it is scalarized components leave the original statement
3150 there to do the copying and then load the scalar replacements of the LHS.
3151 This is what the first branch does. */
3153 if (modify_this_stmt
3154 || gimple_has_volatile_ops (*stmt)
3155 || contains_vce_or_bfcref_p (rhs)
3156 || contains_vce_or_bfcref_p (lhs))
3158 if (access_has_children_p (racc))
3159 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3160 gsi, false, false, loc);
3161 if (access_has_children_p (lacc))
3162 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3163 gsi, true, true, loc);
3164 sra_stats.separate_lhs_rhs_handling++;
3166 /* This gimplification must be done after generate_subtree_copies,
3167 lest we insert the subtree copies in the middle of the gimplified
3168 sequence. */
3169 if (force_gimple_rhs)
3170 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3171 true, GSI_SAME_STMT);
3172 if (gimple_assign_rhs1 (*stmt) != rhs)
3174 modify_this_stmt = true;
3175 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3176 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3179 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3181 else
3183 if (access_has_children_p (lacc)
3184 && access_has_children_p (racc)
3185 /* When an access represents an unscalarizable region, it usually
3186 represents accesses with variable offset and thus must not be used
3187 to generate new memory accesses. */
3188 && !lacc->grp_unscalarizable_region
3189 && !racc->grp_unscalarizable_region)
3191 gimple_stmt_iterator orig_gsi = *gsi;
3192 enum unscalarized_data_handling refreshed;
3194 if (lacc->grp_read && !lacc->grp_covered)
3195 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3196 else
3197 refreshed = SRA_UDH_NONE;
3199 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3200 &orig_gsi, gsi, &refreshed);
3201 if (refreshed != SRA_UDH_RIGHT)
3203 gsi_next (gsi);
3204 unlink_stmt_vdef (*stmt);
3205 gsi_remove (&orig_gsi, true);
3206 release_defs (*stmt);
3207 sra_stats.deleted++;
3208 return SRA_AM_REMOVED;
3211 else
3213 if (access_has_children_p (racc)
3214 && !racc->grp_unscalarized_data)
3216 if (dump_file)
3218 fprintf (dump_file, "Removing load: ");
3219 print_gimple_stmt (dump_file, *stmt, 0, 0);
3221 generate_subtree_copies (racc->first_child, lhs,
3222 racc->offset, 0, 0, gsi,
3223 false, false, loc);
3224 gcc_assert (*stmt == gsi_stmt (*gsi));
3225 unlink_stmt_vdef (*stmt);
3226 gsi_remove (gsi, true);
3227 release_defs (*stmt);
3228 sra_stats.deleted++;
3229 return SRA_AM_REMOVED;
3231 /* Restore the aggregate RHS from its components so the
3232 prevailing aggregate copy does the right thing. */
3233 if (access_has_children_p (racc))
3234 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3235 gsi, false, false, loc);
3236 /* Re-load the components of the aggregate copy destination.
3237 But use the RHS aggregate to load from to expose more
3238 optimization opportunities. */
3239 if (access_has_children_p (lacc))
3240 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3241 0, 0, gsi, true, true, loc);
3244 return SRA_AM_NONE;
3248 /* Traverse the function body and all modifications as decided in
3249 analyze_all_variable_accesses. Return true iff the CFG has been
3250 changed. */
3252 static bool
3253 sra_modify_function_body (void)
3255 bool cfg_changed = false;
3256 basic_block bb;
3258 FOR_EACH_BB (bb)
3260 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3261 while (!gsi_end_p (gsi))
3263 gimple stmt = gsi_stmt (gsi);
3264 enum assignment_mod_result assign_result;
3265 bool modified = false, deleted = false;
3266 tree *t;
3267 unsigned i;
3269 switch (gimple_code (stmt))
3271 case GIMPLE_RETURN:
3272 t = gimple_return_retval_ptr (stmt);
3273 if (*t != NULL_TREE)
3274 modified |= sra_modify_expr (t, &gsi, false);
3275 break;
3277 case GIMPLE_ASSIGN:
3278 assign_result = sra_modify_assign (&stmt, &gsi);
3279 modified |= assign_result == SRA_AM_MODIFIED;
3280 deleted = assign_result == SRA_AM_REMOVED;
3281 break;
3283 case GIMPLE_CALL:
3284 /* Operands must be processed before the lhs. */
3285 for (i = 0; i < gimple_call_num_args (stmt); i++)
3287 t = gimple_call_arg_ptr (stmt, i);
3288 modified |= sra_modify_expr (t, &gsi, false);
3291 if (gimple_call_lhs (stmt))
3293 t = gimple_call_lhs_ptr (stmt);
3294 modified |= sra_modify_expr (t, &gsi, true);
3296 break;
3298 case GIMPLE_ASM:
3299 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3301 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3302 modified |= sra_modify_expr (t, &gsi, false);
3304 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3306 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3307 modified |= sra_modify_expr (t, &gsi, true);
3309 break;
3311 default:
3312 break;
3315 if (modified)
3317 update_stmt (stmt);
3318 if (maybe_clean_eh_stmt (stmt)
3319 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3320 cfg_changed = true;
3322 if (!deleted)
3323 gsi_next (&gsi);
3327 return cfg_changed;
3330 /* Generate statements initializing scalar replacements of parts of function
3331 parameters. */
3333 static void
3334 initialize_parameter_reductions (void)
3336 gimple_stmt_iterator gsi;
3337 gimple_seq seq = NULL;
3338 tree parm;
3340 gsi = gsi_start (seq);
3341 for (parm = DECL_ARGUMENTS (current_function_decl);
3342 parm;
3343 parm = DECL_CHAIN (parm))
3345 vec<access_p> *access_vec;
3346 struct access *access;
3348 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3349 continue;
3350 access_vec = get_base_access_vector (parm);
3351 if (!access_vec)
3352 continue;
3354 for (access = (*access_vec)[0];
3355 access;
3356 access = access->next_grp)
3357 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3358 EXPR_LOCATION (parm));
3361 seq = gsi_seq (gsi);
3362 if (seq)
3363 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3366 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3367 it reveals there are components of some aggregates to be scalarized, it runs
3368 the required transformations. */
3369 static unsigned int
3370 perform_intra_sra (void)
3372 int ret = 0;
3373 sra_initialize ();
3375 if (!find_var_candidates ())
3376 goto out;
3378 if (!scan_function ())
3379 goto out;
3381 if (!analyze_all_variable_accesses ())
3382 goto out;
3384 if (sra_modify_function_body ())
3385 ret = TODO_update_ssa | TODO_cleanup_cfg;
3386 else
3387 ret = TODO_update_ssa;
3388 initialize_parameter_reductions ();
3390 statistics_counter_event (cfun, "Scalar replacements created",
3391 sra_stats.replacements);
3392 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3393 statistics_counter_event (cfun, "Subtree copy stmts",
3394 sra_stats.subtree_copies);
3395 statistics_counter_event (cfun, "Subreplacement stmts",
3396 sra_stats.subreplacements);
3397 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3398 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3399 sra_stats.separate_lhs_rhs_handling);
3401 out:
3402 sra_deinitialize ();
3403 return ret;
3406 /* Perform early intraprocedural SRA. */
3407 static unsigned int
3408 early_intra_sra (void)
3410 sra_mode = SRA_MODE_EARLY_INTRA;
3411 return perform_intra_sra ();
3414 /* Perform "late" intraprocedural SRA. */
3415 static unsigned int
3416 late_intra_sra (void)
3418 sra_mode = SRA_MODE_INTRA;
3419 return perform_intra_sra ();
3423 static bool
3424 gate_intra_sra (void)
3426 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3430 struct gimple_opt_pass pass_sra_early =
3433 GIMPLE_PASS,
3434 "esra", /* name */
3435 OPTGROUP_NONE, /* optinfo_flags */
3436 gate_intra_sra, /* gate */
3437 early_intra_sra, /* execute */
3438 NULL, /* sub */
3439 NULL, /* next */
3440 0, /* static_pass_number */
3441 TV_TREE_SRA, /* tv_id */
3442 PROP_cfg | PROP_ssa, /* properties_required */
3443 0, /* properties_provided */
3444 0, /* properties_destroyed */
3445 0, /* todo_flags_start */
3446 TODO_update_ssa
3447 | TODO_ggc_collect
3448 | TODO_verify_ssa /* todo_flags_finish */
3452 struct gimple_opt_pass pass_sra =
3455 GIMPLE_PASS,
3456 "sra", /* name */
3457 OPTGROUP_NONE, /* optinfo_flags */
3458 gate_intra_sra, /* gate */
3459 late_intra_sra, /* execute */
3460 NULL, /* sub */
3461 NULL, /* next */
3462 0, /* static_pass_number */
3463 TV_TREE_SRA, /* tv_id */
3464 PROP_cfg | PROP_ssa, /* properties_required */
3465 0, /* properties_provided */
3466 0, /* properties_destroyed */
3467 TODO_update_address_taken, /* todo_flags_start */
3468 TODO_update_ssa
3469 | TODO_ggc_collect
3470 | TODO_verify_ssa /* todo_flags_finish */
3475 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3476 parameter. */
3478 static bool
3479 is_unused_scalar_param (tree parm)
3481 tree name;
3482 return (is_gimple_reg (parm)
3483 && (!(name = ssa_default_def (cfun, parm))
3484 || has_zero_uses (name)));
3487 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3488 examine whether there are any direct or otherwise infeasible ones. If so,
3489 return true, otherwise return false. PARM must be a gimple register with a
3490 non-NULL default definition. */
3492 static bool
3493 ptr_parm_has_direct_uses (tree parm)
3495 imm_use_iterator ui;
3496 gimple stmt;
3497 tree name = ssa_default_def (cfun, parm);
3498 bool ret = false;
3500 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3502 int uses_ok = 0;
3503 use_operand_p use_p;
3505 if (is_gimple_debug (stmt))
3506 continue;
3508 /* Valid uses include dereferences on the lhs and the rhs. */
3509 if (gimple_has_lhs (stmt))
3511 tree lhs = gimple_get_lhs (stmt);
3512 while (handled_component_p (lhs))
3513 lhs = TREE_OPERAND (lhs, 0);
3514 if (TREE_CODE (lhs) == MEM_REF
3515 && TREE_OPERAND (lhs, 0) == name
3516 && integer_zerop (TREE_OPERAND (lhs, 1))
3517 && types_compatible_p (TREE_TYPE (lhs),
3518 TREE_TYPE (TREE_TYPE (name)))
3519 && !TREE_THIS_VOLATILE (lhs))
3520 uses_ok++;
3522 if (gimple_assign_single_p (stmt))
3524 tree rhs = gimple_assign_rhs1 (stmt);
3525 while (handled_component_p (rhs))
3526 rhs = TREE_OPERAND (rhs, 0);
3527 if (TREE_CODE (rhs) == MEM_REF
3528 && TREE_OPERAND (rhs, 0) == name
3529 && integer_zerop (TREE_OPERAND (rhs, 1))
3530 && types_compatible_p (TREE_TYPE (rhs),
3531 TREE_TYPE (TREE_TYPE (name)))
3532 && !TREE_THIS_VOLATILE (rhs))
3533 uses_ok++;
3535 else if (is_gimple_call (stmt))
3537 unsigned i;
3538 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3540 tree arg = gimple_call_arg (stmt, i);
3541 while (handled_component_p (arg))
3542 arg = TREE_OPERAND (arg, 0);
3543 if (TREE_CODE (arg) == MEM_REF
3544 && TREE_OPERAND (arg, 0) == name
3545 && integer_zerop (TREE_OPERAND (arg, 1))
3546 && types_compatible_p (TREE_TYPE (arg),
3547 TREE_TYPE (TREE_TYPE (name)))
3548 && !TREE_THIS_VOLATILE (arg))
3549 uses_ok++;
3553 /* If the number of valid uses does not match the number of
3554 uses in this stmt there is an unhandled use. */
3555 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3556 --uses_ok;
3558 if (uses_ok != 0)
3559 ret = true;
3561 if (ret)
3562 BREAK_FROM_IMM_USE_STMT (ui);
3565 return ret;
3568 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3569 them in candidate_bitmap. Note that these do not necessarily include
3570 parameter which are unused and thus can be removed. Return true iff any
3571 such candidate has been found. */
3573 static bool
3574 find_param_candidates (void)
3576 tree parm;
3577 int count = 0;
3578 bool ret = false;
3579 const char *msg;
3581 for (parm = DECL_ARGUMENTS (current_function_decl);
3582 parm;
3583 parm = DECL_CHAIN (parm))
3585 tree type = TREE_TYPE (parm);
3586 void **slot;
3588 count++;
3590 if (TREE_THIS_VOLATILE (parm)
3591 || TREE_ADDRESSABLE (parm)
3592 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3593 continue;
3595 if (is_unused_scalar_param (parm))
3597 ret = true;
3598 continue;
3601 if (POINTER_TYPE_P (type))
3603 type = TREE_TYPE (type);
3605 if (TREE_CODE (type) == FUNCTION_TYPE
3606 || TYPE_VOLATILE (type)
3607 || (TREE_CODE (type) == ARRAY_TYPE
3608 && TYPE_NONALIASED_COMPONENT (type))
3609 || !is_gimple_reg (parm)
3610 || is_va_list_type (type)
3611 || ptr_parm_has_direct_uses (parm))
3612 continue;
3614 else if (!AGGREGATE_TYPE_P (type))
3615 continue;
3617 if (!COMPLETE_TYPE_P (type)
3618 || !host_integerp (TYPE_SIZE (type), 1)
3619 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3620 || (AGGREGATE_TYPE_P (type)
3621 && type_internals_preclude_sra_p (type, &msg)))
3622 continue;
3624 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3625 slot = htab_find_slot_with_hash (candidates, parm,
3626 DECL_UID (parm), INSERT);
3627 *slot = (void *) parm;
3629 ret = true;
3630 if (dump_file && (dump_flags & TDF_DETAILS))
3632 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3633 print_generic_expr (dump_file, parm, 0);
3634 fprintf (dump_file, "\n");
3638 func_param_count = count;
3639 return ret;
3642 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3643 maybe_modified. */
3645 static bool
3646 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3647 void *data)
3649 struct access *repr = (struct access *) data;
3651 repr->grp_maybe_modified = 1;
3652 return true;
3655 /* Analyze what representatives (in linked lists accessible from
3656 REPRESENTATIVES) can be modified by side effects of statements in the
3657 current function. */
3659 static void
3660 analyze_modified_params (vec<access_p> representatives)
3662 int i;
3664 for (i = 0; i < func_param_count; i++)
3666 struct access *repr;
3668 for (repr = representatives[i];
3669 repr;
3670 repr = repr->next_grp)
3672 struct access *access;
3673 bitmap visited;
3674 ao_ref ar;
3676 if (no_accesses_p (repr))
3677 continue;
3678 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3679 || repr->grp_maybe_modified)
3680 continue;
3682 ao_ref_init (&ar, repr->expr);
3683 visited = BITMAP_ALLOC (NULL);
3684 for (access = repr; access; access = access->next_sibling)
3686 /* All accesses are read ones, otherwise grp_maybe_modified would
3687 be trivially set. */
3688 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3689 mark_maybe_modified, repr, &visited);
3690 if (repr->grp_maybe_modified)
3691 break;
3693 BITMAP_FREE (visited);
3698 /* Propagate distances in bb_dereferences in the opposite direction than the
3699 control flow edges, in each step storing the maximum of the current value
3700 and the minimum of all successors. These steps are repeated until the table
3701 stabilizes. Note that BBs which might terminate the functions (according to
3702 final_bbs bitmap) never updated in this way. */
3704 static void
3705 propagate_dereference_distances (void)
3707 vec<basic_block> queue;
3708 basic_block bb;
3710 queue.create (last_basic_block_for_function (cfun));
3711 queue.quick_push (ENTRY_BLOCK_PTR);
3712 FOR_EACH_BB (bb)
3714 queue.quick_push (bb);
3715 bb->aux = bb;
3718 while (!queue.is_empty ())
3720 edge_iterator ei;
3721 edge e;
3722 bool change = false;
3723 int i;
3725 bb = queue.pop ();
3726 bb->aux = NULL;
3728 if (bitmap_bit_p (final_bbs, bb->index))
3729 continue;
3731 for (i = 0; i < func_param_count; i++)
3733 int idx = bb->index * func_param_count + i;
3734 bool first = true;
3735 HOST_WIDE_INT inh = 0;
3737 FOR_EACH_EDGE (e, ei, bb->succs)
3739 int succ_idx = e->dest->index * func_param_count + i;
3741 if (e->src == EXIT_BLOCK_PTR)
3742 continue;
3744 if (first)
3746 first = false;
3747 inh = bb_dereferences [succ_idx];
3749 else if (bb_dereferences [succ_idx] < inh)
3750 inh = bb_dereferences [succ_idx];
3753 if (!first && bb_dereferences[idx] < inh)
3755 bb_dereferences[idx] = inh;
3756 change = true;
3760 if (change && !bitmap_bit_p (final_bbs, bb->index))
3761 FOR_EACH_EDGE (e, ei, bb->preds)
3763 if (e->src->aux)
3764 continue;
3766 e->src->aux = e->src;
3767 queue.quick_push (e->src);
3771 queue.release ();
3774 /* Dump a dereferences TABLE with heading STR to file F. */
3776 static void
3777 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3779 basic_block bb;
3781 fprintf (dump_file, str);
3782 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3784 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3785 if (bb != EXIT_BLOCK_PTR)
3787 int i;
3788 for (i = 0; i < func_param_count; i++)
3790 int idx = bb->index * func_param_count + i;
3791 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3794 fprintf (f, "\n");
3796 fprintf (dump_file, "\n");
3799 /* Determine what (parts of) parameters passed by reference that are not
3800 assigned to are not certainly dereferenced in this function and thus the
3801 dereferencing cannot be safely moved to the caller without potentially
3802 introducing a segfault. Mark such REPRESENTATIVES as
3803 grp_not_necessarilly_dereferenced.
3805 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3806 part is calculated rather than simple booleans are calculated for each
3807 pointer parameter to handle cases when only a fraction of the whole
3808 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3809 an example).
3811 The maximum dereference distances for each pointer parameter and BB are
3812 already stored in bb_dereference. This routine simply propagates these
3813 values upwards by propagate_dereference_distances and then compares the
3814 distances of individual parameters in the ENTRY BB to the equivalent
3815 distances of each representative of a (fraction of a) parameter. */
3817 static void
3818 analyze_caller_dereference_legality (vec<access_p> representatives)
3820 int i;
3822 if (dump_file && (dump_flags & TDF_DETAILS))
3823 dump_dereferences_table (dump_file,
3824 "Dereference table before propagation:\n",
3825 bb_dereferences);
3827 propagate_dereference_distances ();
3829 if (dump_file && (dump_flags & TDF_DETAILS))
3830 dump_dereferences_table (dump_file,
3831 "Dereference table after propagation:\n",
3832 bb_dereferences);
3834 for (i = 0; i < func_param_count; i++)
3836 struct access *repr = representatives[i];
3837 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3839 if (!repr || no_accesses_p (repr))
3840 continue;
3844 if ((repr->offset + repr->size) > bb_dereferences[idx])
3845 repr->grp_not_necessarilly_dereferenced = 1;
3846 repr = repr->next_grp;
3848 while (repr);
3852 /* Return the representative access for the parameter declaration PARM if it is
3853 a scalar passed by reference which is not written to and the pointer value
3854 is not used directly. Thus, if it is legal to dereference it in the caller
3855 and we can rule out modifications through aliases, such parameter should be
3856 turned into one passed by value. Return NULL otherwise. */
3858 static struct access *
3859 unmodified_by_ref_scalar_representative (tree parm)
3861 int i, access_count;
3862 struct access *repr;
3863 vec<access_p> *access_vec;
3865 access_vec = get_base_access_vector (parm);
3866 gcc_assert (access_vec);
3867 repr = (*access_vec)[0];
3868 if (repr->write)
3869 return NULL;
3870 repr->group_representative = repr;
3872 access_count = access_vec->length ();
3873 for (i = 1; i < access_count; i++)
3875 struct access *access = (*access_vec)[i];
3876 if (access->write)
3877 return NULL;
3878 access->group_representative = repr;
3879 access->next_sibling = repr->next_sibling;
3880 repr->next_sibling = access;
3883 repr->grp_read = 1;
3884 repr->grp_scalar_ptr = 1;
3885 return repr;
3888 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
3889 associated with. REQ_ALIGN is the minimum required alignment. */
3891 static bool
3892 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
3894 unsigned int exp_align;
3895 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3896 is incompatible assign in a call statement (and possibly even in asm
3897 statements). This can be relaxed by using a new temporary but only for
3898 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3899 intraprocedural SRA we deal with this by keeping the old aggregate around,
3900 something we cannot do in IPA-SRA.) */
3901 if (access->write
3902 && (is_gimple_call (access->stmt)
3903 || gimple_code (access->stmt) == GIMPLE_ASM))
3904 return true;
3906 exp_align = get_object_alignment (access->expr);
3907 if (exp_align < req_align)
3908 return true;
3910 return false;
3914 /* Sort collected accesses for parameter PARM, identify representatives for
3915 each accessed region and link them together. Return NULL if there are
3916 different but overlapping accesses, return the special ptr value meaning
3917 there are no accesses for this parameter if that is the case and return the
3918 first representative otherwise. Set *RO_GRP if there is a group of accesses
3919 with only read (i.e. no write) accesses. */
3921 static struct access *
3922 splice_param_accesses (tree parm, bool *ro_grp)
3924 int i, j, access_count, group_count;
3925 int agg_size, total_size = 0;
3926 struct access *access, *res, **prev_acc_ptr = &res;
3927 vec<access_p> *access_vec;
3929 access_vec = get_base_access_vector (parm);
3930 if (!access_vec)
3931 return &no_accesses_representant;
3932 access_count = access_vec->length ();
3934 access_vec->qsort (compare_access_positions);
3936 i = 0;
3937 total_size = 0;
3938 group_count = 0;
3939 while (i < access_count)
3941 bool modification;
3942 tree a1_alias_type;
3943 access = (*access_vec)[i];
3944 modification = access->write;
3945 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
3946 return NULL;
3947 a1_alias_type = reference_alias_ptr_type (access->expr);
3949 /* Access is about to become group representative unless we find some
3950 nasty overlap which would preclude us from breaking this parameter
3951 apart. */
3953 j = i + 1;
3954 while (j < access_count)
3956 struct access *ac2 = (*access_vec)[j];
3957 if (ac2->offset != access->offset)
3959 /* All or nothing law for parameters. */
3960 if (access->offset + access->size > ac2->offset)
3961 return NULL;
3962 else
3963 break;
3965 else if (ac2->size != access->size)
3966 return NULL;
3968 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
3969 || (ac2->type != access->type
3970 && (TREE_ADDRESSABLE (ac2->type)
3971 || TREE_ADDRESSABLE (access->type)))
3972 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3973 return NULL;
3975 modification |= ac2->write;
3976 ac2->group_representative = access;
3977 ac2->next_sibling = access->next_sibling;
3978 access->next_sibling = ac2;
3979 j++;
3982 group_count++;
3983 access->grp_maybe_modified = modification;
3984 if (!modification)
3985 *ro_grp = true;
3986 *prev_acc_ptr = access;
3987 prev_acc_ptr = &access->next_grp;
3988 total_size += access->size;
3989 i = j;
3992 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3993 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3994 else
3995 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3996 if (total_size >= agg_size)
3997 return NULL;
3999 gcc_assert (group_count > 0);
4000 return res;
4003 /* Decide whether parameters with representative accesses given by REPR should
4004 be reduced into components. */
4006 static int
4007 decide_one_param_reduction (struct access *repr)
4009 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4010 bool by_ref;
4011 tree parm;
4013 parm = repr->base;
4014 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
4015 gcc_assert (cur_parm_size > 0);
4017 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4019 by_ref = true;
4020 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
4022 else
4024 by_ref = false;
4025 agg_size = cur_parm_size;
4028 if (dump_file)
4030 struct access *acc;
4031 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4032 print_generic_expr (dump_file, parm, 0);
4033 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4034 for (acc = repr; acc; acc = acc->next_grp)
4035 dump_access (dump_file, acc, true);
4038 total_size = 0;
4039 new_param_count = 0;
4041 for (; repr; repr = repr->next_grp)
4043 gcc_assert (parm == repr->base);
4045 /* Taking the address of a non-addressable field is verboten. */
4046 if (by_ref && repr->non_addressable)
4047 return 0;
4049 /* Do not decompose a non-BLKmode param in a way that would
4050 create BLKmode params. Especially for by-reference passing
4051 (thus, pointer-type param) this is hardly worthwhile. */
4052 if (DECL_MODE (parm) != BLKmode
4053 && TYPE_MODE (repr->type) == BLKmode)
4054 return 0;
4056 if (!by_ref || (!repr->grp_maybe_modified
4057 && !repr->grp_not_necessarilly_dereferenced))
4058 total_size += repr->size;
4059 else
4060 total_size += cur_parm_size;
4062 new_param_count++;
4065 gcc_assert (new_param_count > 0);
4067 if (optimize_function_for_size_p (cfun))
4068 parm_size_limit = cur_parm_size;
4069 else
4070 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4071 * cur_parm_size);
4073 if (total_size < agg_size
4074 && total_size <= parm_size_limit)
4076 if (dump_file)
4077 fprintf (dump_file, " ....will be split into %i components\n",
4078 new_param_count);
4079 return new_param_count;
4081 else
4082 return 0;
4085 /* The order of the following enums is important, we need to do extra work for
4086 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4087 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4088 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4090 /* Identify representatives of all accesses to all candidate parameters for
4091 IPA-SRA. Return result based on what representatives have been found. */
4093 static enum ipa_splicing_result
4094 splice_all_param_accesses (vec<access_p> &representatives)
4096 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4097 tree parm;
4098 struct access *repr;
4100 representatives.create (func_param_count);
4102 for (parm = DECL_ARGUMENTS (current_function_decl);
4103 parm;
4104 parm = DECL_CHAIN (parm))
4106 if (is_unused_scalar_param (parm))
4108 representatives.quick_push (&no_accesses_representant);
4109 if (result == NO_GOOD_ACCESS)
4110 result = UNUSED_PARAMS;
4112 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4113 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4114 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4116 repr = unmodified_by_ref_scalar_representative (parm);
4117 representatives.quick_push (repr);
4118 if (repr)
4119 result = UNMODIF_BY_REF_ACCESSES;
4121 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4123 bool ro_grp = false;
4124 repr = splice_param_accesses (parm, &ro_grp);
4125 representatives.quick_push (repr);
4127 if (repr && !no_accesses_p (repr))
4129 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4131 if (ro_grp)
4132 result = UNMODIF_BY_REF_ACCESSES;
4133 else if (result < MODIF_BY_REF_ACCESSES)
4134 result = MODIF_BY_REF_ACCESSES;
4136 else if (result < BY_VAL_ACCESSES)
4137 result = BY_VAL_ACCESSES;
4139 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4140 result = UNUSED_PARAMS;
4142 else
4143 representatives.quick_push (NULL);
4146 if (result == NO_GOOD_ACCESS)
4148 representatives.release ();
4149 return NO_GOOD_ACCESS;
4152 return result;
4155 /* Return the index of BASE in PARMS. Abort if it is not found. */
4157 static inline int
4158 get_param_index (tree base, vec<tree> parms)
4160 int i, len;
4162 len = parms.length ();
4163 for (i = 0; i < len; i++)
4164 if (parms[i] == base)
4165 return i;
4166 gcc_unreachable ();
4169 /* Convert the decisions made at the representative level into compact
4170 parameter adjustments. REPRESENTATIVES are pointers to first
4171 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4172 final number of adjustments. */
4174 static ipa_parm_adjustment_vec
4175 turn_representatives_into_adjustments (vec<access_p> representatives,
4176 int adjustments_count)
4178 vec<tree> parms;
4179 ipa_parm_adjustment_vec adjustments;
4180 tree parm;
4181 int i;
4183 gcc_assert (adjustments_count > 0);
4184 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4185 adjustments.create (adjustments_count);
4186 parm = DECL_ARGUMENTS (current_function_decl);
4187 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4189 struct access *repr = representatives[i];
4191 if (!repr || no_accesses_p (repr))
4193 struct ipa_parm_adjustment adj;
4195 memset (&adj, 0, sizeof (adj));
4196 adj.base_index = get_param_index (parm, parms);
4197 adj.base = parm;
4198 if (!repr)
4199 adj.copy_param = 1;
4200 else
4201 adj.remove_param = 1;
4202 adjustments.quick_push (adj);
4204 else
4206 struct ipa_parm_adjustment adj;
4207 int index = get_param_index (parm, parms);
4209 for (; repr; repr = repr->next_grp)
4211 memset (&adj, 0, sizeof (adj));
4212 gcc_assert (repr->base == parm);
4213 adj.base_index = index;
4214 adj.base = repr->base;
4215 adj.type = repr->type;
4216 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4217 adj.offset = repr->offset;
4218 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4219 && (repr->grp_maybe_modified
4220 || repr->grp_not_necessarilly_dereferenced));
4221 adjustments.quick_push (adj);
4225 parms.release ();
4226 return adjustments;
4229 /* Analyze the collected accesses and produce a plan what to do with the
4230 parameters in the form of adjustments, NULL meaning nothing. */
4232 static ipa_parm_adjustment_vec
4233 analyze_all_param_acesses (void)
4235 enum ipa_splicing_result repr_state;
4236 bool proceed = false;
4237 int i, adjustments_count = 0;
4238 vec<access_p> representatives;
4239 ipa_parm_adjustment_vec adjustments;
4241 repr_state = splice_all_param_accesses (representatives);
4242 if (repr_state == NO_GOOD_ACCESS)
4243 return ipa_parm_adjustment_vec();
4245 /* If there are any parameters passed by reference which are not modified
4246 directly, we need to check whether they can be modified indirectly. */
4247 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4249 analyze_caller_dereference_legality (representatives);
4250 analyze_modified_params (representatives);
4253 for (i = 0; i < func_param_count; i++)
4255 struct access *repr = representatives[i];
4257 if (repr && !no_accesses_p (repr))
4259 if (repr->grp_scalar_ptr)
4261 adjustments_count++;
4262 if (repr->grp_not_necessarilly_dereferenced
4263 || repr->grp_maybe_modified)
4264 representatives[i] = NULL;
4265 else
4267 proceed = true;
4268 sra_stats.scalar_by_ref_to_by_val++;
4271 else
4273 int new_components = decide_one_param_reduction (repr);
4275 if (new_components == 0)
4277 representatives[i] = NULL;
4278 adjustments_count++;
4280 else
4282 adjustments_count += new_components;
4283 sra_stats.aggregate_params_reduced++;
4284 sra_stats.param_reductions_created += new_components;
4285 proceed = true;
4289 else
4291 if (no_accesses_p (repr))
4293 proceed = true;
4294 sra_stats.deleted_unused_parameters++;
4296 adjustments_count++;
4300 if (!proceed && dump_file)
4301 fprintf (dump_file, "NOT proceeding to change params.\n");
4303 if (proceed)
4304 adjustments = turn_representatives_into_adjustments (representatives,
4305 adjustments_count);
4306 else
4307 adjustments = ipa_parm_adjustment_vec();
4309 representatives.release ();
4310 return adjustments;
4313 /* If a parameter replacement identified by ADJ does not yet exist in the form
4314 of declaration, create it and record it, otherwise return the previously
4315 created one. */
4317 static tree
4318 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4320 tree repl;
4321 if (!adj->new_ssa_base)
4323 char *pretty_name = make_fancy_name (adj->base);
4325 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4326 DECL_NAME (repl) = get_identifier (pretty_name);
4327 obstack_free (&name_obstack, pretty_name);
4329 adj->new_ssa_base = repl;
4331 else
4332 repl = adj->new_ssa_base;
4333 return repl;
4336 /* Find the first adjustment for a particular parameter BASE in a vector of
4337 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4338 adjustment. */
4340 static struct ipa_parm_adjustment *
4341 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4343 int i, len;
4345 len = adjustments.length ();
4346 for (i = 0; i < len; i++)
4348 struct ipa_parm_adjustment *adj;
4350 adj = &adjustments[i];
4351 if (!adj->copy_param && adj->base == base)
4352 return adj;
4355 return NULL;
4358 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4359 removed because its value is not used, replace the SSA_NAME with a one
4360 relating to a created VAR_DECL together all of its uses and return true.
4361 ADJUSTMENTS is a pointer to an adjustments vector. */
4363 static bool
4364 replace_removed_params_ssa_names (gimple stmt,
4365 ipa_parm_adjustment_vec adjustments)
4367 struct ipa_parm_adjustment *adj;
4368 tree lhs, decl, repl, name;
4370 if (gimple_code (stmt) == GIMPLE_PHI)
4371 lhs = gimple_phi_result (stmt);
4372 else if (is_gimple_assign (stmt))
4373 lhs = gimple_assign_lhs (stmt);
4374 else if (is_gimple_call (stmt))
4375 lhs = gimple_call_lhs (stmt);
4376 else
4377 gcc_unreachable ();
4379 if (TREE_CODE (lhs) != SSA_NAME)
4380 return false;
4382 decl = SSA_NAME_VAR (lhs);
4383 if (decl == NULL_TREE
4384 || TREE_CODE (decl) != PARM_DECL)
4385 return false;
4387 adj = get_adjustment_for_base (adjustments, decl);
4388 if (!adj)
4389 return false;
4391 repl = get_replaced_param_substitute (adj);
4392 name = make_ssa_name (repl, stmt);
4394 if (dump_file)
4396 fprintf (dump_file, "replacing an SSA name of a removed param ");
4397 print_generic_expr (dump_file, lhs, 0);
4398 fprintf (dump_file, " with ");
4399 print_generic_expr (dump_file, name, 0);
4400 fprintf (dump_file, "\n");
4403 if (is_gimple_assign (stmt))
4404 gimple_assign_set_lhs (stmt, name);
4405 else if (is_gimple_call (stmt))
4406 gimple_call_set_lhs (stmt, name);
4407 else
4408 gimple_phi_set_result (stmt, name);
4410 replace_uses_by (lhs, name);
4411 release_ssa_name (lhs);
4412 return true;
4415 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4416 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4417 specifies whether the function should care about type incompatibility the
4418 current and new expressions. If it is false, the function will leave
4419 incompatibility issues to the caller. Return true iff the expression
4420 was modified. */
4422 static bool
4423 sra_ipa_modify_expr (tree *expr, bool convert,
4424 ipa_parm_adjustment_vec adjustments)
4426 int i, len;
4427 struct ipa_parm_adjustment *adj, *cand = NULL;
4428 HOST_WIDE_INT offset, size, max_size;
4429 tree base, src;
4431 len = adjustments.length ();
4433 if (TREE_CODE (*expr) == BIT_FIELD_REF
4434 || TREE_CODE (*expr) == IMAGPART_EXPR
4435 || TREE_CODE (*expr) == REALPART_EXPR)
4437 expr = &TREE_OPERAND (*expr, 0);
4438 convert = true;
4441 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4442 if (!base || size == -1 || max_size == -1)
4443 return false;
4445 if (TREE_CODE (base) == MEM_REF)
4447 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4448 base = TREE_OPERAND (base, 0);
4451 base = get_ssa_base_param (base);
4452 if (!base || TREE_CODE (base) != PARM_DECL)
4453 return false;
4455 for (i = 0; i < len; i++)
4457 adj = &adjustments[i];
4459 if (adj->base == base &&
4460 (adj->offset == offset || adj->remove_param))
4462 cand = adj;
4463 break;
4466 if (!cand || cand->copy_param || cand->remove_param)
4467 return false;
4469 if (cand->by_ref)
4470 src = build_simple_mem_ref (cand->reduction);
4471 else
4472 src = cand->reduction;
4474 if (dump_file && (dump_flags & TDF_DETAILS))
4476 fprintf (dump_file, "About to replace expr ");
4477 print_generic_expr (dump_file, *expr, 0);
4478 fprintf (dump_file, " with ");
4479 print_generic_expr (dump_file, src, 0);
4480 fprintf (dump_file, "\n");
4483 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4485 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4486 *expr = vce;
4488 else
4489 *expr = src;
4490 return true;
4493 /* If the statement pointed to by STMT_PTR contains any expressions that need
4494 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4495 potential type incompatibilities (GSI is used to accommodate conversion
4496 statements and must point to the statement). Return true iff the statement
4497 was modified. */
4499 static bool
4500 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4501 ipa_parm_adjustment_vec adjustments)
4503 gimple stmt = *stmt_ptr;
4504 tree *lhs_p, *rhs_p;
4505 bool any;
4507 if (!gimple_assign_single_p (stmt))
4508 return false;
4510 rhs_p = gimple_assign_rhs1_ptr (stmt);
4511 lhs_p = gimple_assign_lhs_ptr (stmt);
4513 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4514 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4515 if (any)
4517 tree new_rhs = NULL_TREE;
4519 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4521 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4523 /* V_C_Es of constructors can cause trouble (PR 42714). */
4524 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4525 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4526 else
4527 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4528 NULL);
4530 else
4531 new_rhs = fold_build1_loc (gimple_location (stmt),
4532 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4533 *rhs_p);
4535 else if (REFERENCE_CLASS_P (*rhs_p)
4536 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4537 && !is_gimple_reg (*lhs_p))
4538 /* This can happen when an assignment in between two single field
4539 structures is turned into an assignment in between two pointers to
4540 scalars (PR 42237). */
4541 new_rhs = *rhs_p;
4543 if (new_rhs)
4545 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4546 true, GSI_SAME_STMT);
4548 gimple_assign_set_rhs_from_tree (gsi, tmp);
4551 return true;
4554 return false;
4557 /* Traverse the function body and all modifications as described in
4558 ADJUSTMENTS. Return true iff the CFG has been changed. */
4560 static bool
4561 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4563 bool cfg_changed = false;
4564 basic_block bb;
4566 FOR_EACH_BB (bb)
4568 gimple_stmt_iterator gsi;
4570 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4571 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4573 gsi = gsi_start_bb (bb);
4574 while (!gsi_end_p (gsi))
4576 gimple stmt = gsi_stmt (gsi);
4577 bool modified = false;
4578 tree *t;
4579 unsigned i;
4581 switch (gimple_code (stmt))
4583 case GIMPLE_RETURN:
4584 t = gimple_return_retval_ptr (stmt);
4585 if (*t != NULL_TREE)
4586 modified |= sra_ipa_modify_expr (t, true, adjustments);
4587 break;
4589 case GIMPLE_ASSIGN:
4590 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4591 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4592 break;
4594 case GIMPLE_CALL:
4595 /* Operands must be processed before the lhs. */
4596 for (i = 0; i < gimple_call_num_args (stmt); i++)
4598 t = gimple_call_arg_ptr (stmt, i);
4599 modified |= sra_ipa_modify_expr (t, true, adjustments);
4602 if (gimple_call_lhs (stmt))
4604 t = gimple_call_lhs_ptr (stmt);
4605 modified |= sra_ipa_modify_expr (t, false, adjustments);
4606 modified |= replace_removed_params_ssa_names (stmt,
4607 adjustments);
4609 break;
4611 case GIMPLE_ASM:
4612 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4614 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4615 modified |= sra_ipa_modify_expr (t, true, adjustments);
4617 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4619 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4620 modified |= sra_ipa_modify_expr (t, false, adjustments);
4622 break;
4624 default:
4625 break;
4628 if (modified)
4630 update_stmt (stmt);
4631 if (maybe_clean_eh_stmt (stmt)
4632 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4633 cfg_changed = true;
4635 gsi_next (&gsi);
4639 return cfg_changed;
4642 /* Call gimple_debug_bind_reset_value on all debug statements describing
4643 gimple register parameters that are being removed or replaced. */
4645 static void
4646 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4648 int i, len;
4649 gimple_stmt_iterator *gsip = NULL, gsi;
4651 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4653 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4654 gsip = &gsi;
4656 len = adjustments.length ();
4657 for (i = 0; i < len; i++)
4659 struct ipa_parm_adjustment *adj;
4660 imm_use_iterator ui;
4661 gimple stmt, def_temp;
4662 tree name, vexpr, copy = NULL_TREE;
4663 use_operand_p use_p;
4665 adj = &adjustments[i];
4666 if (adj->copy_param || !is_gimple_reg (adj->base))
4667 continue;
4668 name = ssa_default_def (cfun, adj->base);
4669 vexpr = NULL;
4670 if (name)
4671 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4673 /* All other users must have been removed by
4674 ipa_sra_modify_function_body. */
4675 gcc_assert (is_gimple_debug (stmt));
4676 if (vexpr == NULL && gsip != NULL)
4678 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4679 vexpr = make_node (DEBUG_EXPR_DECL);
4680 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4681 NULL);
4682 DECL_ARTIFICIAL (vexpr) = 1;
4683 TREE_TYPE (vexpr) = TREE_TYPE (name);
4684 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4685 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4687 if (vexpr)
4689 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4690 SET_USE (use_p, vexpr);
4692 else
4693 gimple_debug_bind_reset_value (stmt);
4694 update_stmt (stmt);
4696 /* Create a VAR_DECL for debug info purposes. */
4697 if (!DECL_IGNORED_P (adj->base))
4699 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4700 VAR_DECL, DECL_NAME (adj->base),
4701 TREE_TYPE (adj->base));
4702 if (DECL_PT_UID_SET_P (adj->base))
4703 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4704 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4705 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4706 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4707 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4708 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4709 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4710 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4711 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4712 SET_DECL_RTL (copy, 0);
4713 TREE_USED (copy) = 1;
4714 DECL_CONTEXT (copy) = current_function_decl;
4715 add_local_decl (cfun, copy);
4716 DECL_CHAIN (copy) =
4717 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4718 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4720 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4722 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4723 if (vexpr)
4724 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4725 else
4726 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4727 NULL);
4728 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4733 /* Return false iff all callers have at least as many actual arguments as there
4734 are formal parameters in the current function. */
4736 static bool
4737 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4738 void *data ATTRIBUTE_UNUSED)
4740 struct cgraph_edge *cs;
4741 for (cs = node->callers; cs; cs = cs->next_caller)
4742 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4743 return true;
4745 return false;
4748 /* Convert all callers of NODE. */
4750 static bool
4751 convert_callers_for_node (struct cgraph_node *node,
4752 void *data)
4754 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4755 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4756 struct cgraph_edge *cs;
4758 for (cs = node->callers; cs; cs = cs->next_caller)
4760 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl));
4762 if (dump_file)
4763 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4764 cs->caller->uid, cs->callee->uid,
4765 xstrdup (cgraph_node_name (cs->caller)),
4766 xstrdup (cgraph_node_name (cs->callee)));
4768 if (cs->call_stmt)
4769 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4771 pop_cfun ();
4774 for (cs = node->callers; cs; cs = cs->next_caller)
4775 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4776 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl)))
4777 compute_inline_parameters (cs->caller, true);
4778 BITMAP_FREE (recomputed_callers);
4780 return true;
4783 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4785 static void
4786 convert_callers (struct cgraph_node *node, tree old_decl,
4787 ipa_parm_adjustment_vec adjustments)
4789 basic_block this_block;
4791 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4792 &adjustments, false);
4794 if (!encountered_recursive_call)
4795 return;
4797 FOR_EACH_BB (this_block)
4799 gimple_stmt_iterator gsi;
4801 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4803 gimple stmt = gsi_stmt (gsi);
4804 tree call_fndecl;
4805 if (gimple_code (stmt) != GIMPLE_CALL)
4806 continue;
4807 call_fndecl = gimple_call_fndecl (stmt);
4808 if (call_fndecl == old_decl)
4810 if (dump_file)
4811 fprintf (dump_file, "Adjusting recursive call");
4812 gimple_call_set_fndecl (stmt, node->symbol.decl);
4813 ipa_modify_call_arguments (NULL, stmt, adjustments);
4818 return;
4821 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4822 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4824 static bool
4825 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4827 struct cgraph_node *new_node;
4828 bool cfg_changed;
4830 rebuild_cgraph_edges ();
4831 free_dominance_info (CDI_DOMINATORS);
4832 pop_cfun ();
4834 /* This must be done after rebuilding cgraph edges for node above.
4835 Otherwise any recursive calls to node that are recorded in
4836 redirect_callers will be corrupted. */
4837 vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
4838 new_node = cgraph_function_versioning (node, redirect_callers,
4839 NULL,
4840 NULL, false, NULL, NULL, "isra");
4841 redirect_callers.release ();
4843 push_cfun (DECL_STRUCT_FUNCTION (new_node->symbol.decl));
4844 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4845 cfg_changed = ipa_sra_modify_function_body (adjustments);
4846 sra_ipa_reset_debug_stmts (adjustments);
4847 convert_callers (new_node, node->symbol.decl, adjustments);
4848 cgraph_make_node_local (new_node);
4850 return cfg_changed;
4853 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4854 attributes, return true otherwise. NODE is the cgraph node of the current
4855 function. */
4857 static bool
4858 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4860 if (!cgraph_node_can_be_local_p (node))
4862 if (dump_file)
4863 fprintf (dump_file, "Function not local to this compilation unit.\n");
4864 return false;
4867 if (!node->local.can_change_signature)
4869 if (dump_file)
4870 fprintf (dump_file, "Function can not change signature.\n");
4871 return false;
4874 if (!tree_versionable_function_p (node->symbol.decl))
4876 if (dump_file)
4877 fprintf (dump_file, "Function is not versionable.\n");
4878 return false;
4881 if (!opt_for_fn (node->symbol.decl, optimize)
4882 || !opt_for_fn (node->symbol.decl, flag_ipa_sra))
4884 if (dump_file)
4885 fprintf (dump_file, "Function not optimized.\n");
4886 return false;
4889 if (DECL_VIRTUAL_P (current_function_decl))
4891 if (dump_file)
4892 fprintf (dump_file, "Function is a virtual method.\n");
4893 return false;
4896 if ((DECL_COMDAT (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
4897 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4899 if (dump_file)
4900 fprintf (dump_file, "Function too big to be made truly local.\n");
4901 return false;
4904 if (!node->callers)
4906 if (dump_file)
4907 fprintf (dump_file,
4908 "Function has no callers in this compilation unit.\n");
4909 return false;
4912 if (cfun->stdarg)
4914 if (dump_file)
4915 fprintf (dump_file, "Function uses stdarg. \n");
4916 return false;
4919 if (TYPE_ATTRIBUTES (TREE_TYPE (node->symbol.decl)))
4920 return false;
4922 return true;
4925 /* Perform early interprocedural SRA. */
4927 static unsigned int
4928 ipa_early_sra (void)
4930 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4931 ipa_parm_adjustment_vec adjustments;
4932 int ret = 0;
4934 if (!ipa_sra_preliminary_function_checks (node))
4935 return 0;
4937 sra_initialize ();
4938 sra_mode = SRA_MODE_EARLY_IPA;
4940 if (!find_param_candidates ())
4942 if (dump_file)
4943 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4944 goto simple_out;
4947 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4948 NULL, true))
4950 if (dump_file)
4951 fprintf (dump_file, "There are callers with insufficient number of "
4952 "arguments.\n");
4953 goto simple_out;
4956 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4957 func_param_count
4958 * last_basic_block_for_function (cfun));
4959 final_bbs = BITMAP_ALLOC (NULL);
4961 scan_function ();
4962 if (encountered_apply_args)
4964 if (dump_file)
4965 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4966 goto out;
4969 if (encountered_unchangable_recursive_call)
4971 if (dump_file)
4972 fprintf (dump_file, "Function calls itself with insufficient "
4973 "number of arguments.\n");
4974 goto out;
4977 adjustments = analyze_all_param_acesses ();
4978 if (!adjustments.exists ())
4979 goto out;
4980 if (dump_file)
4981 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4983 if (modify_function (node, adjustments))
4984 ret = TODO_update_ssa | TODO_cleanup_cfg;
4985 else
4986 ret = TODO_update_ssa;
4987 adjustments.release ();
4989 statistics_counter_event (cfun, "Unused parameters deleted",
4990 sra_stats.deleted_unused_parameters);
4991 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4992 sra_stats.scalar_by_ref_to_by_val);
4993 statistics_counter_event (cfun, "Aggregate parameters broken up",
4994 sra_stats.aggregate_params_reduced);
4995 statistics_counter_event (cfun, "Aggregate parameter components created",
4996 sra_stats.param_reductions_created);
4998 out:
4999 BITMAP_FREE (final_bbs);
5000 free (bb_dereferences);
5001 simple_out:
5002 sra_deinitialize ();
5003 return ret;
5006 /* Return if early ipa sra shall be performed. */
5007 static bool
5008 ipa_early_sra_gate (void)
5010 return flag_ipa_sra && !flag_dyn_ipa && dbg_cnt (eipa_sra);
5013 struct gimple_opt_pass pass_early_ipa_sra =
5016 GIMPLE_PASS,
5017 "eipa_sra", /* name */
5018 OPTGROUP_NONE, /* optinfo_flags */
5019 ipa_early_sra_gate, /* gate */
5020 ipa_early_sra, /* execute */
5021 NULL, /* sub */
5022 NULL, /* next */
5023 0, /* static_pass_number */
5024 TV_IPA_SRA, /* tv_id */
5025 0, /* properties_required */
5026 0, /* properties_provided */
5027 0, /* properties_destroyed */
5028 0, /* todo_flags_start */
5029 TODO_dump_symtab /* todo_flags_finish */