Merge from mainline (167278:168000).
[official-gcc/graphite-test-results.git] / gcc / tree-sra.c
blob9d22ad76b4a54e0fc135b770722c801d41de541c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "ipa-prop.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
87 #include "timevar.h"
88 #include "params.h"
89 #include "target.h"
90 #include "flags.h"
91 #include "dbgcnt.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
95 /* Enumeration of all aggregate reductions we can do. */
96 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
97 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
98 SRA_MODE_INTRA }; /* late intraprocedural SRA */
100 /* Global variable describing which aggregate reduction we are performing at
101 the moment. */
102 static enum sra_mode sra_mode;
104 struct assign_link;
106 /* ACCESS represents each access to an aggregate variable (as a whole or a
107 part). It can also represent a group of accesses that refer to exactly the
108 same fragment of an aggregate (i.e. those that have exactly the same offset
109 and size). Such representatives for a single aggregate, once determined,
110 are linked in a linked list and have the group fields set.
112 Moreover, when doing intraprocedural SRA, a tree is built from those
113 representatives (by the means of first_child and next_sibling pointers), in
114 which all items in a subtree are "within" the root, i.e. their offset is
115 greater or equal to offset of the root and offset+size is smaller or equal
116 to offset+size of the root. Children of an access are sorted by offset.
118 Note that accesses to parts of vector and complex number types always
119 represented by an access to the whole complex number or a vector. It is a
120 duty of the modifying functions to replace them appropriately. */
122 struct access
124 /* Values returned by `get_ref_base_and_extent' for each component reference
125 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
126 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
127 HOST_WIDE_INT offset;
128 HOST_WIDE_INT size;
129 tree base;
131 /* Expression. It is context dependent so do not use it to create new
132 expressions to access the original aggregate. See PR 42154 for a
133 testcase. */
134 tree expr;
135 /* Type. */
136 tree type;
138 /* The statement this access belongs to. */
139 gimple stmt;
141 /* Next group representative for this aggregate. */
142 struct access *next_grp;
144 /* Pointer to the group representative. Pointer to itself if the struct is
145 the representative. */
146 struct access *group_representative;
148 /* If this access has any children (in terms of the definition above), this
149 points to the first one. */
150 struct access *first_child;
152 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
153 described above. In IPA-SRA this is a pointer to the next access
154 belonging to the same group (having the same representative). */
155 struct access *next_sibling;
157 /* Pointers to the first and last element in the linked list of assign
158 links. */
159 struct assign_link *first_link, *last_link;
161 /* Pointer to the next access in the work queue. */
162 struct access *next_queued;
164 /* Replacement variable for this access "region." Never to be accessed
165 directly, always only by the means of get_access_replacement() and only
166 when grp_to_be_replaced flag is set. */
167 tree replacement_decl;
169 /* Is this particular access write access? */
170 unsigned write : 1;
172 /* Is this access an artificial one created to scalarize some record
173 entirely? */
174 unsigned total_scalarization : 1;
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued : 1;
179 /* Does this group contain a write access? This flag is propagated down the
180 access tree. */
181 unsigned grp_write : 1;
183 /* Does this group contain a read access? This flag is propagated down the
184 access tree. */
185 unsigned grp_read : 1;
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read : 1;
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write : 1;
195 /* Other passes of the analysis use this bit to make function
196 analyze_access_subtree create scalar replacements for this group if
197 possible. */
198 unsigned grp_hint : 1;
200 /* Is the subtree rooted in this access fully covered by scalar
201 replacements? */
202 unsigned grp_covered : 1;
204 /* If set to true, this access and all below it in an access tree must not be
205 scalarized. */
206 unsigned grp_unscalarizable_region : 1;
208 /* Whether data have been written to parts of the aggregate covered by this
209 access which is not to be scalarized. This flag is propagated up in the
210 access tree. */
211 unsigned grp_unscalarized_data : 1;
213 /* Does this access and/or group contain a write access through a
214 BIT_FIELD_REF? */
215 unsigned grp_partial_lhs : 1;
217 /* Set when a scalar replacement should be created for this variable. We do
218 the decision and creation at different places because create_tmp_var
219 cannot be called from within FOR_EACH_REFERENCED_VAR. */
220 unsigned grp_to_be_replaced : 1;
222 /* Should TREE_NO_WARNING of a replacement be set? */
223 unsigned grp_no_warning : 1;
225 /* Is it possible that the group refers to data which might be (directly or
226 otherwise) modified? */
227 unsigned grp_maybe_modified : 1;
229 /* Set when this is a representative of a pointer to scalar (i.e. by
230 reference) parameter which we consider for turning into a plain scalar
231 (i.e. a by value parameter). */
232 unsigned grp_scalar_ptr : 1;
234 /* Set when we discover that this pointer is not safe to dereference in the
235 caller. */
236 unsigned grp_not_necessarilly_dereferenced : 1;
239 typedef struct access *access_p;
241 DEF_VEC_P (access_p);
242 DEF_VEC_ALLOC_P (access_p, heap);
244 /* Alloc pool for allocating access structures. */
245 static alloc_pool access_pool;
247 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
248 are used to propagate subaccesses from rhs to lhs as long as they don't
249 conflict with what is already there. */
250 struct assign_link
252 struct access *lacc, *racc;
253 struct assign_link *next;
256 /* Alloc pool for allocating assign link structures. */
257 static alloc_pool link_pool;
259 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
260 static struct pointer_map_t *base_access_vec;
262 /* Bitmap of candidates. */
263 static bitmap candidate_bitmap;
265 /* Bitmap of candidates which we should try to entirely scalarize away and
266 those which cannot be (because they are and need be used as a whole). */
267 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
269 /* Obstack for creation of fancy names. */
270 static struct obstack name_obstack;
272 /* Head of a linked list of accesses that need to have its subaccesses
273 propagated to their assignment counterparts. */
274 static struct access *work_queue_head;
276 /* Number of parameters of the analyzed function when doing early ipa SRA. */
277 static int func_param_count;
279 /* scan_function sets the following to true if it encounters a call to
280 __builtin_apply_args. */
281 static bool encountered_apply_args;
283 /* Set by scan_function when it finds a recursive call. */
284 static bool encountered_recursive_call;
286 /* Set by scan_function when it finds a recursive call with less actual
287 arguments than formal parameters.. */
288 static bool encountered_unchangable_recursive_call;
290 /* This is a table in which for each basic block and parameter there is a
291 distance (offset + size) in that parameter which is dereferenced and
292 accessed in that BB. */
293 static HOST_WIDE_INT *bb_dereferences;
294 /* Bitmap of BBs that can cause the function to "stop" progressing by
295 returning, throwing externally, looping infinitely or calling a function
296 which might abort etc.. */
297 static bitmap final_bbs;
299 /* Representative of no accesses at all. */
300 static struct access no_accesses_representant;
302 /* Predicate to test the special value. */
304 static inline bool
305 no_accesses_p (struct access *access)
307 return access == &no_accesses_representant;
310 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
311 representative fields are dumped, otherwise those which only describe the
312 individual access are. */
314 static struct
316 /* Number of processed aggregates is readily available in
317 analyze_all_variable_accesses and so is not stored here. */
319 /* Number of created scalar replacements. */
320 int replacements;
322 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
323 expression. */
324 int exprs;
326 /* Number of statements created by generate_subtree_copies. */
327 int subtree_copies;
329 /* Number of statements created by load_assign_lhs_subreplacements. */
330 int subreplacements;
332 /* Number of times sra_modify_assign has deleted a statement. */
333 int deleted;
335 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
336 RHS reparately due to type conversions or nonexistent matching
337 references. */
338 int separate_lhs_rhs_handling;
340 /* Number of parameters that were removed because they were unused. */
341 int deleted_unused_parameters;
343 /* Number of scalars passed as parameters by reference that have been
344 converted to be passed by value. */
345 int scalar_by_ref_to_by_val;
347 /* Number of aggregate parameters that were replaced by one or more of their
348 components. */
349 int aggregate_params_reduced;
351 /* Numbber of components created when splitting aggregate parameters. */
352 int param_reductions_created;
353 } sra_stats;
355 static void
356 dump_access (FILE *f, struct access *access, bool grp)
358 fprintf (f, "access { ");
359 fprintf (f, "base = (%d)'", DECL_UID (access->base));
360 print_generic_expr (f, access->base, 0);
361 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
362 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
363 fprintf (f, ", expr = ");
364 print_generic_expr (f, access->expr, 0);
365 fprintf (f, ", type = ");
366 print_generic_expr (f, access->type, 0);
367 if (grp)
368 fprintf (f, ", grp_write = %d, total_scalarization = %d, "
369 "grp_read = %d, grp_hint = %d, grp_assignment_read = %d,"
370 "grp_assignment_write = %d, grp_covered = %d, "
371 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
372 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
373 "grp_maybe_modified = %d, "
374 "grp_not_necessarilly_dereferenced = %d\n",
375 access->grp_write, access->total_scalarization,
376 access->grp_read, access->grp_hint, access->grp_assignment_read,
377 access->grp_assignment_write, access->grp_covered,
378 access->grp_unscalarizable_region, access->grp_unscalarized_data,
379 access->grp_partial_lhs, access->grp_to_be_replaced,
380 access->grp_maybe_modified,
381 access->grp_not_necessarilly_dereferenced);
382 else
383 fprintf (f, ", write = %d, total_scalarization = %d, "
384 "grp_partial_lhs = %d\n",
385 access->write, access->total_scalarization,
386 access->grp_partial_lhs);
389 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
391 static void
392 dump_access_tree_1 (FILE *f, struct access *access, int level)
396 int i;
398 for (i = 0; i < level; i++)
399 fputs ("* ", dump_file);
401 dump_access (f, access, true);
403 if (access->first_child)
404 dump_access_tree_1 (f, access->first_child, level + 1);
406 access = access->next_sibling;
408 while (access);
411 /* Dump all access trees for a variable, given the pointer to the first root in
412 ACCESS. */
414 static void
415 dump_access_tree (FILE *f, struct access *access)
417 for (; access; access = access->next_grp)
418 dump_access_tree_1 (f, access, 0);
421 /* Return true iff ACC is non-NULL and has subaccesses. */
423 static inline bool
424 access_has_children_p (struct access *acc)
426 return acc && acc->first_child;
429 /* Return a vector of pointers to accesses for the variable given in BASE or
430 NULL if there is none. */
432 static VEC (access_p, heap) *
433 get_base_access_vector (tree base)
435 void **slot;
437 slot = pointer_map_contains (base_access_vec, base);
438 if (!slot)
439 return NULL;
440 else
441 return *(VEC (access_p, heap) **) slot;
444 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
445 in ACCESS. Return NULL if it cannot be found. */
447 static struct access *
448 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
449 HOST_WIDE_INT size)
451 while (access && (access->offset != offset || access->size != size))
453 struct access *child = access->first_child;
455 while (child && (child->offset + child->size <= offset))
456 child = child->next_sibling;
457 access = child;
460 return access;
463 /* Return the first group representative for DECL or NULL if none exists. */
465 static struct access *
466 get_first_repr_for_decl (tree base)
468 VEC (access_p, heap) *access_vec;
470 access_vec = get_base_access_vector (base);
471 if (!access_vec)
472 return NULL;
474 return VEC_index (access_p, access_vec, 0);
477 /* Find an access representative for the variable BASE and given OFFSET and
478 SIZE. Requires that access trees have already been built. Return NULL if
479 it cannot be found. */
481 static struct access *
482 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
483 HOST_WIDE_INT size)
485 struct access *access;
487 access = get_first_repr_for_decl (base);
488 while (access && (access->offset + access->size <= offset))
489 access = access->next_grp;
490 if (!access)
491 return NULL;
493 return find_access_in_subtree (access, offset, size);
496 /* Add LINK to the linked list of assign links of RACC. */
497 static void
498 add_link_to_rhs (struct access *racc, struct assign_link *link)
500 gcc_assert (link->racc == racc);
502 if (!racc->first_link)
504 gcc_assert (!racc->last_link);
505 racc->first_link = link;
507 else
508 racc->last_link->next = link;
510 racc->last_link = link;
511 link->next = NULL;
514 /* Move all link structures in their linked list in OLD_RACC to the linked list
515 in NEW_RACC. */
516 static void
517 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
519 if (!old_racc->first_link)
521 gcc_assert (!old_racc->last_link);
522 return;
525 if (new_racc->first_link)
527 gcc_assert (!new_racc->last_link->next);
528 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
530 new_racc->last_link->next = old_racc->first_link;
531 new_racc->last_link = old_racc->last_link;
533 else
535 gcc_assert (!new_racc->last_link);
537 new_racc->first_link = old_racc->first_link;
538 new_racc->last_link = old_racc->last_link;
540 old_racc->first_link = old_racc->last_link = NULL;
543 /* Add ACCESS to the work queue (which is actually a stack). */
545 static void
546 add_access_to_work_queue (struct access *access)
548 if (!access->grp_queued)
550 gcc_assert (!access->next_queued);
551 access->next_queued = work_queue_head;
552 access->grp_queued = 1;
553 work_queue_head = access;
557 /* Pop an access from the work queue, and return it, assuming there is one. */
559 static struct access *
560 pop_access_from_work_queue (void)
562 struct access *access = work_queue_head;
564 work_queue_head = access->next_queued;
565 access->next_queued = NULL;
566 access->grp_queued = 0;
567 return access;
571 /* Allocate necessary structures. */
573 static void
574 sra_initialize (void)
576 candidate_bitmap = BITMAP_ALLOC (NULL);
577 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
578 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
579 gcc_obstack_init (&name_obstack);
580 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
581 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
582 base_access_vec = pointer_map_create ();
583 memset (&sra_stats, 0, sizeof (sra_stats));
584 encountered_apply_args = false;
585 encountered_recursive_call = false;
586 encountered_unchangable_recursive_call = false;
589 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
591 static bool
592 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
593 void *data ATTRIBUTE_UNUSED)
595 VEC (access_p, heap) *access_vec;
596 access_vec = (VEC (access_p, heap) *) *value;
597 VEC_free (access_p, heap, access_vec);
599 return true;
602 /* Deallocate all general structures. */
604 static void
605 sra_deinitialize (void)
607 BITMAP_FREE (candidate_bitmap);
608 BITMAP_FREE (should_scalarize_away_bitmap);
609 BITMAP_FREE (cannot_scalarize_away_bitmap);
610 free_alloc_pool (access_pool);
611 free_alloc_pool (link_pool);
612 obstack_free (&name_obstack, NULL);
614 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
615 pointer_map_destroy (base_access_vec);
618 /* Remove DECL from candidates for SRA and write REASON to the dump file if
619 there is one. */
620 static void
621 disqualify_candidate (tree decl, const char *reason)
623 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
625 if (dump_file && (dump_flags & TDF_DETAILS))
627 fprintf (dump_file, "! Disqualifying ");
628 print_generic_expr (dump_file, decl, 0);
629 fprintf (dump_file, " - %s\n", reason);
633 /* Return true iff the type contains a field or an element which does not allow
634 scalarization. */
636 static bool
637 type_internals_preclude_sra_p (tree type)
639 tree fld;
640 tree et;
642 switch (TREE_CODE (type))
644 case RECORD_TYPE:
645 case UNION_TYPE:
646 case QUAL_UNION_TYPE:
647 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
648 if (TREE_CODE (fld) == FIELD_DECL)
650 tree ft = TREE_TYPE (fld);
652 if (TREE_THIS_VOLATILE (fld)
653 || !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
654 || !host_integerp (DECL_FIELD_OFFSET (fld), 1)
655 || !host_integerp (DECL_SIZE (fld), 1)
656 || (DECL_BIT_FIELD (fld) && AGGREGATE_TYPE_P (ft)))
657 return true;
659 if (AGGREGATE_TYPE_P (ft)
660 && type_internals_preclude_sra_p (ft))
661 return true;
664 return false;
666 case ARRAY_TYPE:
667 et = TREE_TYPE (type);
669 if (AGGREGATE_TYPE_P (et))
670 return type_internals_preclude_sra_p (et);
671 else
672 return false;
674 default:
675 return false;
679 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
680 base variable if it is. Return T if it is not an SSA_NAME. */
682 static tree
683 get_ssa_base_param (tree t)
685 if (TREE_CODE (t) == SSA_NAME)
687 if (SSA_NAME_IS_DEFAULT_DEF (t))
688 return SSA_NAME_VAR (t);
689 else
690 return NULL_TREE;
692 return t;
695 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
696 belongs to, unless the BB has already been marked as a potentially
697 final. */
699 static void
700 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
702 basic_block bb = gimple_bb (stmt);
703 int idx, parm_index = 0;
704 tree parm;
706 if (bitmap_bit_p (final_bbs, bb->index))
707 return;
709 for (parm = DECL_ARGUMENTS (current_function_decl);
710 parm && parm != base;
711 parm = DECL_CHAIN (parm))
712 parm_index++;
714 gcc_assert (parm_index < func_param_count);
716 idx = bb->index * func_param_count + parm_index;
717 if (bb_dereferences[idx] < dist)
718 bb_dereferences[idx] = dist;
721 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
722 the three fields. Also add it to the vector of accesses corresponding to
723 the base. Finally, return the new access. */
725 static struct access *
726 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
728 VEC (access_p, heap) *vec;
729 struct access *access;
730 void **slot;
732 access = (struct access *) pool_alloc (access_pool);
733 memset (access, 0, sizeof (struct access));
734 access->base = base;
735 access->offset = offset;
736 access->size = size;
738 slot = pointer_map_contains (base_access_vec, base);
739 if (slot)
740 vec = (VEC (access_p, heap) *) *slot;
741 else
742 vec = VEC_alloc (access_p, heap, 32);
744 VEC_safe_push (access_p, heap, vec, access);
746 *((struct VEC (access_p,heap) **)
747 pointer_map_insert (base_access_vec, base)) = vec;
749 return access;
752 /* Create and insert access for EXPR. Return created access, or NULL if it is
753 not possible. */
755 static struct access *
756 create_access (tree expr, gimple stmt, bool write)
758 struct access *access;
759 HOST_WIDE_INT offset, size, max_size;
760 tree base = expr;
761 bool ptr, unscalarizable_region = false;
763 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
765 if (sra_mode == SRA_MODE_EARLY_IPA
766 && TREE_CODE (base) == MEM_REF)
768 base = get_ssa_base_param (TREE_OPERAND (base, 0));
769 if (!base)
770 return NULL;
771 ptr = true;
773 else
774 ptr = false;
776 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
777 return NULL;
779 if (sra_mode == SRA_MODE_EARLY_IPA)
781 if (size < 0 || size != max_size)
783 disqualify_candidate (base, "Encountered a variable sized access.");
784 return NULL;
786 if (TREE_CODE (expr) == COMPONENT_REF
787 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
789 disqualify_candidate (base, "Encountered a bit-field access.");
790 return NULL;
792 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
794 if (ptr)
795 mark_parm_dereference (base, offset + size, stmt);
797 else
799 if (size != max_size)
801 size = max_size;
802 unscalarizable_region = true;
804 if (size < 0)
806 disqualify_candidate (base, "Encountered an unconstrained access.");
807 return NULL;
811 access = create_access_1 (base, offset, size);
812 access->expr = expr;
813 access->type = TREE_TYPE (expr);
814 access->write = write;
815 access->grp_unscalarizable_region = unscalarizable_region;
816 access->stmt = stmt;
818 return access;
822 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
823 register types or (recursively) records with only these two kinds of fields.
824 It also returns false if any of these records contains a bit-field. */
826 static bool
827 type_consists_of_records_p (tree type)
829 tree fld;
831 if (TREE_CODE (type) != RECORD_TYPE)
832 return false;
834 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
835 if (TREE_CODE (fld) == FIELD_DECL)
837 tree ft = TREE_TYPE (fld);
839 if (DECL_BIT_FIELD (fld))
840 return false;
842 if (!is_gimple_reg_type (ft)
843 && !type_consists_of_records_p (ft))
844 return false;
847 return true;
850 /* Create total_scalarization accesses for all scalar type fields in DECL that
851 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
852 must be the top-most VAR_DECL representing the variable, OFFSET must be the
853 offset of DECL within BASE. REF must be the memory reference expression for
854 the given decl. */
856 static void
857 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
858 tree ref)
860 tree fld, decl_type = TREE_TYPE (decl);
862 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
863 if (TREE_CODE (fld) == FIELD_DECL)
865 HOST_WIDE_INT pos = offset + int_bit_position (fld);
866 tree ft = TREE_TYPE (fld);
867 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
868 NULL_TREE);
870 if (is_gimple_reg_type (ft))
872 struct access *access;
873 HOST_WIDE_INT size;
875 size = tree_low_cst (DECL_SIZE (fld), 1);
876 access = create_access_1 (base, pos, size);
877 access->expr = nref;
878 access->type = ft;
879 access->total_scalarization = 1;
880 /* Accesses for intraprocedural SRA can have their stmt NULL. */
882 else
883 completely_scalarize_record (base, fld, pos, nref);
888 /* Search the given tree for a declaration by skipping handled components and
889 exclude it from the candidates. */
891 static void
892 disqualify_base_of_expr (tree t, const char *reason)
894 t = get_base_address (t);
895 if (sra_mode == SRA_MODE_EARLY_IPA
896 && TREE_CODE (t) == MEM_REF)
897 t = get_ssa_base_param (TREE_OPERAND (t, 0));
899 if (t && DECL_P (t))
900 disqualify_candidate (t, reason);
903 /* Scan expression EXPR and create access structures for all accesses to
904 candidates for scalarization. Return the created access or NULL if none is
905 created. */
907 static struct access *
908 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
910 struct access *ret = NULL;
911 bool partial_ref;
913 if (TREE_CODE (expr) == BIT_FIELD_REF
914 || TREE_CODE (expr) == IMAGPART_EXPR
915 || TREE_CODE (expr) == REALPART_EXPR)
917 expr = TREE_OPERAND (expr, 0);
918 partial_ref = true;
920 else
921 partial_ref = false;
923 /* We need to dive through V_C_Es in order to get the size of its parameter
924 and not the result type. Ada produces such statements. We are also
925 capable of handling the topmost V_C_E but not any of those buried in other
926 handled components. */
927 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
928 expr = TREE_OPERAND (expr, 0);
930 if (contains_view_convert_expr_p (expr))
932 disqualify_base_of_expr (expr, "V_C_E under a different handled "
933 "component.");
934 return NULL;
937 switch (TREE_CODE (expr))
939 case MEM_REF:
940 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
941 && sra_mode != SRA_MODE_EARLY_IPA)
942 return NULL;
943 /* fall through */
944 case VAR_DECL:
945 case PARM_DECL:
946 case RESULT_DECL:
947 case COMPONENT_REF:
948 case ARRAY_REF:
949 case ARRAY_RANGE_REF:
950 ret = create_access (expr, stmt, write);
951 break;
953 default:
954 break;
957 if (write && partial_ref && ret)
958 ret->grp_partial_lhs = 1;
960 return ret;
963 /* Scan expression EXPR and create access structures for all accesses to
964 candidates for scalarization. Return true if any access has been inserted.
965 STMT must be the statement from which the expression is taken, WRITE must be
966 true if the expression is a store and false otherwise. */
968 static bool
969 build_access_from_expr (tree expr, gimple stmt, bool write)
971 struct access *access;
973 access = build_access_from_expr_1 (expr, stmt, write);
974 if (access)
976 /* This means the aggregate is accesses as a whole in a way other than an
977 assign statement and thus cannot be removed even if we had a scalar
978 replacement for everything. */
979 if (cannot_scalarize_away_bitmap)
980 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
981 return true;
983 return false;
986 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
987 modes in which it matters, return true iff they have been disqualified. RHS
988 may be NULL, in that case ignore it. If we scalarize an aggregate in
989 intra-SRA we may need to add statements after each statement. This is not
990 possible if a statement unconditionally has to end the basic block. */
991 static bool
992 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
994 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
995 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
997 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
998 if (rhs)
999 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1000 return true;
1002 return false;
1005 /* Scan expressions occuring in STMT, create access structures for all accesses
1006 to candidates for scalarization and remove those candidates which occur in
1007 statements or expressions that prevent them from being split apart. Return
1008 true if any access has been inserted. */
1010 static bool
1011 build_accesses_from_assign (gimple stmt)
1013 tree lhs, rhs;
1014 struct access *lacc, *racc;
1016 if (!gimple_assign_single_p (stmt))
1017 return false;
1019 lhs = gimple_assign_lhs (stmt);
1020 rhs = gimple_assign_rhs1 (stmt);
1022 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1023 return false;
1025 racc = build_access_from_expr_1 (rhs, stmt, false);
1026 lacc = build_access_from_expr_1 (lhs, stmt, true);
1028 if (lacc)
1029 lacc->grp_assignment_write = 1;
1031 if (racc)
1033 racc->grp_assignment_read = 1;
1034 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1035 && !is_gimple_reg_type (racc->type))
1036 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1039 if (lacc && racc
1040 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1041 && !lacc->grp_unscalarizable_region
1042 && !racc->grp_unscalarizable_region
1043 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1044 /* FIXME: Turn the following line into an assert after PR 40058 is
1045 fixed. */
1046 && lacc->size == racc->size
1047 && useless_type_conversion_p (lacc->type, racc->type))
1049 struct assign_link *link;
1051 link = (struct assign_link *) pool_alloc (link_pool);
1052 memset (link, 0, sizeof (struct assign_link));
1054 link->lacc = lacc;
1055 link->racc = racc;
1057 add_link_to_rhs (racc, link);
1060 return lacc || racc;
1063 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1064 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1066 static bool
1067 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1068 void *data ATTRIBUTE_UNUSED)
1070 op = get_base_address (op);
1071 if (op
1072 && DECL_P (op))
1073 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1075 return false;
1078 /* Return true iff callsite CALL has at least as many actual arguments as there
1079 are formal parameters of the function currently processed by IPA-SRA. */
1081 static inline bool
1082 callsite_has_enough_arguments_p (gimple call)
1084 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1087 /* Scan function and look for interesting expressions and create access
1088 structures for them. Return true iff any access is created. */
1090 static bool
1091 scan_function (void)
1093 basic_block bb;
1094 bool ret = false;
1096 FOR_EACH_BB (bb)
1098 gimple_stmt_iterator gsi;
1099 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1101 gimple stmt = gsi_stmt (gsi);
1102 tree t;
1103 unsigned i;
1105 if (final_bbs && stmt_can_throw_external (stmt))
1106 bitmap_set_bit (final_bbs, bb->index);
1107 switch (gimple_code (stmt))
1109 case GIMPLE_RETURN:
1110 t = gimple_return_retval (stmt);
1111 if (t != NULL_TREE)
1112 ret |= build_access_from_expr (t, stmt, false);
1113 if (final_bbs)
1114 bitmap_set_bit (final_bbs, bb->index);
1115 break;
1117 case GIMPLE_ASSIGN:
1118 ret |= build_accesses_from_assign (stmt);
1119 break;
1121 case GIMPLE_CALL:
1122 for (i = 0; i < gimple_call_num_args (stmt); i++)
1123 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1124 stmt, false);
1126 if (sra_mode == SRA_MODE_EARLY_IPA)
1128 tree dest = gimple_call_fndecl (stmt);
1129 int flags = gimple_call_flags (stmt);
1131 if (dest)
1133 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1134 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1135 encountered_apply_args = true;
1136 if (cgraph_get_node (dest)
1137 == cgraph_get_node (current_function_decl))
1139 encountered_recursive_call = true;
1140 if (!callsite_has_enough_arguments_p (stmt))
1141 encountered_unchangable_recursive_call = true;
1145 if (final_bbs
1146 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1147 bitmap_set_bit (final_bbs, bb->index);
1150 t = gimple_call_lhs (stmt);
1151 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1152 ret |= build_access_from_expr (t, stmt, true);
1153 break;
1155 case GIMPLE_ASM:
1156 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1157 asm_visit_addr);
1158 if (final_bbs)
1159 bitmap_set_bit (final_bbs, bb->index);
1161 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1163 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1164 ret |= build_access_from_expr (t, stmt, false);
1166 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1168 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1169 ret |= build_access_from_expr (t, stmt, true);
1171 break;
1173 default:
1174 break;
1179 return ret;
1182 /* Helper of QSORT function. There are pointers to accesses in the array. An
1183 access is considered smaller than another if it has smaller offset or if the
1184 offsets are the same but is size is bigger. */
1186 static int
1187 compare_access_positions (const void *a, const void *b)
1189 const access_p *fp1 = (const access_p *) a;
1190 const access_p *fp2 = (const access_p *) b;
1191 const access_p f1 = *fp1;
1192 const access_p f2 = *fp2;
1194 if (f1->offset != f2->offset)
1195 return f1->offset < f2->offset ? -1 : 1;
1197 if (f1->size == f2->size)
1199 if (f1->type == f2->type)
1200 return 0;
1201 /* Put any non-aggregate type before any aggregate type. */
1202 else if (!is_gimple_reg_type (f1->type)
1203 && is_gimple_reg_type (f2->type))
1204 return 1;
1205 else if (is_gimple_reg_type (f1->type)
1206 && !is_gimple_reg_type (f2->type))
1207 return -1;
1208 /* Put any complex or vector type before any other scalar type. */
1209 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1210 && TREE_CODE (f1->type) != VECTOR_TYPE
1211 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1212 || TREE_CODE (f2->type) == VECTOR_TYPE))
1213 return 1;
1214 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1215 || TREE_CODE (f1->type) == VECTOR_TYPE)
1216 && TREE_CODE (f2->type) != COMPLEX_TYPE
1217 && TREE_CODE (f2->type) != VECTOR_TYPE)
1218 return -1;
1219 /* Put the integral type with the bigger precision first. */
1220 else if (INTEGRAL_TYPE_P (f1->type)
1221 && INTEGRAL_TYPE_P (f2->type))
1222 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1223 /* Put any integral type with non-full precision last. */
1224 else if (INTEGRAL_TYPE_P (f1->type)
1225 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1226 != TYPE_PRECISION (f1->type)))
1227 return 1;
1228 else if (INTEGRAL_TYPE_P (f2->type)
1229 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1230 != TYPE_PRECISION (f2->type)))
1231 return -1;
1232 /* Stabilize the sort. */
1233 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1236 /* We want the bigger accesses first, thus the opposite operator in the next
1237 line: */
1238 return f1->size > f2->size ? -1 : 1;
1242 /* Append a name of the declaration to the name obstack. A helper function for
1243 make_fancy_name. */
1245 static void
1246 make_fancy_decl_name (tree decl)
1248 char buffer[32];
1250 tree name = DECL_NAME (decl);
1251 if (name)
1252 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1253 IDENTIFIER_LENGTH (name));
1254 else
1256 sprintf (buffer, "D%u", DECL_UID (decl));
1257 obstack_grow (&name_obstack, buffer, strlen (buffer));
1261 /* Helper for make_fancy_name. */
1263 static void
1264 make_fancy_name_1 (tree expr)
1266 char buffer[32];
1267 tree index;
1269 if (DECL_P (expr))
1271 make_fancy_decl_name (expr);
1272 return;
1275 switch (TREE_CODE (expr))
1277 case COMPONENT_REF:
1278 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1279 obstack_1grow (&name_obstack, '$');
1280 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1281 break;
1283 case ARRAY_REF:
1284 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1285 obstack_1grow (&name_obstack, '$');
1286 /* Arrays with only one element may not have a constant as their
1287 index. */
1288 index = TREE_OPERAND (expr, 1);
1289 if (TREE_CODE (index) != INTEGER_CST)
1290 break;
1291 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1292 obstack_grow (&name_obstack, buffer, strlen (buffer));
1293 break;
1295 case ADDR_EXPR:
1296 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1297 break;
1299 case MEM_REF:
1300 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1301 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1303 obstack_1grow (&name_obstack, '$');
1304 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1305 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1306 obstack_grow (&name_obstack, buffer, strlen (buffer));
1308 break;
1310 case BIT_FIELD_REF:
1311 case REALPART_EXPR:
1312 case IMAGPART_EXPR:
1313 gcc_unreachable (); /* we treat these as scalars. */
1314 break;
1315 default:
1316 break;
1320 /* Create a human readable name for replacement variable of ACCESS. */
1322 static char *
1323 make_fancy_name (tree expr)
1325 make_fancy_name_1 (expr);
1326 obstack_1grow (&name_obstack, '\0');
1327 return XOBFINISH (&name_obstack, char *);
1330 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1331 EXP_TYPE at the given OFFSET. If BASE is something for which
1332 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1333 to insert new statements either before or below the current one as specified
1334 by INSERT_AFTER. This function is not capable of handling bitfields. */
1336 tree
1337 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1338 tree exp_type, gimple_stmt_iterator *gsi,
1339 bool insert_after)
1341 tree prev_base = base;
1342 tree off;
1343 HOST_WIDE_INT base_offset;
1345 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1347 base = get_addr_base_and_unit_offset (base, &base_offset);
1349 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1350 offset such as array[var_index]. */
1351 if (!base)
1353 gimple stmt;
1354 tree tmp, addr;
1356 gcc_checking_assert (gsi);
1357 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1358 add_referenced_var (tmp);
1359 tmp = make_ssa_name (tmp, NULL);
1360 addr = build_fold_addr_expr (unshare_expr (prev_base));
1361 stmt = gimple_build_assign (tmp, addr);
1362 gimple_set_location (stmt, loc);
1363 SSA_NAME_DEF_STMT (tmp) = stmt;
1364 if (insert_after)
1365 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1366 else
1367 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1368 update_stmt (stmt);
1370 off = build_int_cst (reference_alias_ptr_type (prev_base),
1371 offset / BITS_PER_UNIT);
1372 base = tmp;
1374 else if (TREE_CODE (base) == MEM_REF)
1376 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1377 base_offset + offset / BITS_PER_UNIT);
1378 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off, 0);
1379 base = unshare_expr (TREE_OPERAND (base, 0));
1381 else
1383 off = build_int_cst (reference_alias_ptr_type (base),
1384 base_offset + offset / BITS_PER_UNIT);
1385 base = build_fold_addr_expr (unshare_expr (base));
1388 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1391 /* Construct a memory reference to a part of an aggregate BASE at the given
1392 OFFSET and of the same type as MODEL. In case this is a reference to a
1393 component, the function will replicate the last COMPONENT_REF of model's
1394 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1395 build_ref_for_offset. */
1397 static tree
1398 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1399 struct access *model, gimple_stmt_iterator *gsi,
1400 bool insert_after)
1402 if (TREE_CODE (model->expr) == COMPONENT_REF)
1404 tree t, exp_type;
1405 offset -= int_bit_position (TREE_OPERAND (model->expr, 1));
1406 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1407 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1408 return fold_build3_loc (loc, COMPONENT_REF, model->type, t,
1409 TREE_OPERAND (model->expr, 1), NULL_TREE);
1411 else
1412 return build_ref_for_offset (loc, base, offset, model->type,
1413 gsi, insert_after);
1416 /* Construct a memory reference consisting of component_refs and array_refs to
1417 a part of an aggregate *RES (which is of type TYPE). The requested part
1418 should have type EXP_TYPE at be the given OFFSET. This function might not
1419 succeed, it returns true when it does and only then *RES points to something
1420 meaningful. This function should be used only to build expressions that we
1421 might need to present to user (e.g. in warnings). In all other situations,
1422 build_ref_for_model or build_ref_for_offset should be used instead. */
1424 static bool
1425 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1426 tree exp_type)
1428 while (1)
1430 tree fld;
1431 tree tr_size, index, minidx;
1432 HOST_WIDE_INT el_size;
1434 if (offset == 0 && exp_type
1435 && types_compatible_p (exp_type, type))
1436 return true;
1438 switch (TREE_CODE (type))
1440 case UNION_TYPE:
1441 case QUAL_UNION_TYPE:
1442 case RECORD_TYPE:
1443 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1445 HOST_WIDE_INT pos, size;
1446 tree expr, *expr_ptr;
1448 if (TREE_CODE (fld) != FIELD_DECL)
1449 continue;
1451 pos = int_bit_position (fld);
1452 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1453 tr_size = DECL_SIZE (fld);
1454 if (!tr_size || !host_integerp (tr_size, 1))
1455 continue;
1456 size = tree_low_cst (tr_size, 1);
1457 if (size == 0)
1459 if (pos != offset)
1460 continue;
1462 else if (pos > offset || (pos + size) <= offset)
1463 continue;
1465 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1466 NULL_TREE);
1467 expr_ptr = &expr;
1468 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1469 offset - pos, exp_type))
1471 *res = expr;
1472 return true;
1475 return false;
1477 case ARRAY_TYPE:
1478 tr_size = TYPE_SIZE (TREE_TYPE (type));
1479 if (!tr_size || !host_integerp (tr_size, 1))
1480 return false;
1481 el_size = tree_low_cst (tr_size, 1);
1483 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1484 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1485 return false;
1486 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1487 if (!integer_zerop (minidx))
1488 index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1489 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1490 NULL_TREE, NULL_TREE);
1491 offset = offset % el_size;
1492 type = TREE_TYPE (type);
1493 break;
1495 default:
1496 if (offset != 0)
1497 return false;
1499 if (exp_type)
1500 return false;
1501 else
1502 return true;
1507 /* Return true iff TYPE is stdarg va_list type. */
1509 static inline bool
1510 is_va_list_type (tree type)
1512 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1515 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1516 those with type which is suitable for scalarization. */
1518 static bool
1519 find_var_candidates (void)
1521 tree var, type;
1522 referenced_var_iterator rvi;
1523 bool ret = false;
1525 FOR_EACH_REFERENCED_VAR (var, rvi)
1527 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1528 continue;
1529 type = TREE_TYPE (var);
1531 if (!AGGREGATE_TYPE_P (type)
1532 || needs_to_live_in_memory (var)
1533 || TREE_THIS_VOLATILE (var)
1534 || !COMPLETE_TYPE_P (type)
1535 || !host_integerp (TYPE_SIZE (type), 1)
1536 || tree_low_cst (TYPE_SIZE (type), 1) == 0
1537 || type_internals_preclude_sra_p (type)
1538 /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1539 we also want to schedule it rather late. Thus we ignore it in
1540 the early pass. */
1541 || (sra_mode == SRA_MODE_EARLY_INTRA
1542 && is_va_list_type (type)))
1543 continue;
1545 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1547 if (dump_file && (dump_flags & TDF_DETAILS))
1549 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1550 print_generic_expr (dump_file, var, 0);
1551 fprintf (dump_file, "\n");
1553 ret = true;
1556 return ret;
1559 /* Sort all accesses for the given variable, check for partial overlaps and
1560 return NULL if there are any. If there are none, pick a representative for
1561 each combination of offset and size and create a linked list out of them.
1562 Return the pointer to the first representative and make sure it is the first
1563 one in the vector of accesses. */
1565 static struct access *
1566 sort_and_splice_var_accesses (tree var)
1568 int i, j, access_count;
1569 struct access *res, **prev_acc_ptr = &res;
1570 VEC (access_p, heap) *access_vec;
1571 bool first = true;
1572 HOST_WIDE_INT low = -1, high = 0;
1574 access_vec = get_base_access_vector (var);
1575 if (!access_vec)
1576 return NULL;
1577 access_count = VEC_length (access_p, access_vec);
1579 /* Sort by <OFFSET, SIZE>. */
1580 VEC_qsort (access_p, access_vec, compare_access_positions);
1582 i = 0;
1583 while (i < access_count)
1585 struct access *access = VEC_index (access_p, access_vec, i);
1586 bool grp_write = access->write;
1587 bool grp_read = !access->write;
1588 bool grp_assignment_read = access->grp_assignment_read;
1589 bool grp_assignment_write = access->grp_assignment_write;
1590 bool multiple_reads = false;
1591 bool total_scalarization = access->total_scalarization;
1592 bool grp_partial_lhs = access->grp_partial_lhs;
1593 bool first_scalar = is_gimple_reg_type (access->type);
1594 bool unscalarizable_region = access->grp_unscalarizable_region;
1596 if (first || access->offset >= high)
1598 first = false;
1599 low = access->offset;
1600 high = access->offset + access->size;
1602 else if (access->offset > low && access->offset + access->size > high)
1603 return NULL;
1604 else
1605 gcc_assert (access->offset >= low
1606 && access->offset + access->size <= high);
1608 j = i + 1;
1609 while (j < access_count)
1611 struct access *ac2 = VEC_index (access_p, access_vec, j);
1612 if (ac2->offset != access->offset || ac2->size != access->size)
1613 break;
1614 if (ac2->write)
1615 grp_write = true;
1616 else
1618 if (grp_read)
1619 multiple_reads = true;
1620 else
1621 grp_read = true;
1623 grp_assignment_read |= ac2->grp_assignment_read;
1624 grp_assignment_write |= ac2->grp_assignment_write;
1625 grp_partial_lhs |= ac2->grp_partial_lhs;
1626 unscalarizable_region |= ac2->grp_unscalarizable_region;
1627 total_scalarization |= ac2->total_scalarization;
1628 relink_to_new_repr (access, ac2);
1630 /* If there are both aggregate-type and scalar-type accesses with
1631 this combination of size and offset, the comparison function
1632 should have put the scalars first. */
1633 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1634 ac2->group_representative = access;
1635 j++;
1638 i = j;
1640 access->group_representative = access;
1641 access->grp_write = grp_write;
1642 access->grp_read = grp_read;
1643 access->grp_assignment_read = grp_assignment_read;
1644 access->grp_assignment_write = grp_assignment_write;
1645 access->grp_hint = multiple_reads || total_scalarization;
1646 access->grp_partial_lhs = grp_partial_lhs;
1647 access->grp_unscalarizable_region = unscalarizable_region;
1648 if (access->first_link)
1649 add_access_to_work_queue (access);
1651 *prev_acc_ptr = access;
1652 prev_acc_ptr = &access->next_grp;
1655 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1656 return res;
1659 /* Create a variable for the given ACCESS which determines the type, name and a
1660 few other properties. Return the variable declaration and store it also to
1661 ACCESS->replacement. */
1663 static tree
1664 create_access_replacement (struct access *access, bool rename)
1666 tree repl;
1668 repl = create_tmp_var (access->type, "SR");
1669 get_var_ann (repl);
1670 add_referenced_var (repl);
1671 if (rename)
1672 mark_sym_for_renaming (repl);
1674 if (!access->grp_partial_lhs
1675 && (TREE_CODE (access->type) == COMPLEX_TYPE
1676 || TREE_CODE (access->type) == VECTOR_TYPE))
1677 DECL_GIMPLE_REG_P (repl) = 1;
1679 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1680 DECL_ARTIFICIAL (repl) = 1;
1681 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1683 if (DECL_NAME (access->base)
1684 && !DECL_IGNORED_P (access->base)
1685 && !DECL_ARTIFICIAL (access->base))
1687 char *pretty_name = make_fancy_name (access->expr);
1688 tree debug_expr = unshare_expr (access->expr), d;
1690 DECL_NAME (repl) = get_identifier (pretty_name);
1691 obstack_free (&name_obstack, pretty_name);
1693 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1694 as DECL_DEBUG_EXPR isn't considered when looking for still
1695 used SSA_NAMEs and thus they could be freed. All debug info
1696 generation cares is whether something is constant or variable
1697 and that get_ref_base_and_extent works properly on the
1698 expression. */
1699 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1700 switch (TREE_CODE (d))
1702 case ARRAY_REF:
1703 case ARRAY_RANGE_REF:
1704 if (TREE_OPERAND (d, 1)
1705 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1706 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1707 if (TREE_OPERAND (d, 3)
1708 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1709 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1710 /* FALLTHRU */
1711 case COMPONENT_REF:
1712 if (TREE_OPERAND (d, 2)
1713 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1714 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1715 break;
1716 default:
1717 break;
1719 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1720 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1721 if (access->grp_no_warning)
1722 TREE_NO_WARNING (repl) = 1;
1723 else
1724 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1726 else
1727 TREE_NO_WARNING (repl) = 1;
1729 if (dump_file)
1731 fprintf (dump_file, "Created a replacement for ");
1732 print_generic_expr (dump_file, access->base, 0);
1733 fprintf (dump_file, " offset: %u, size: %u: ",
1734 (unsigned) access->offset, (unsigned) access->size);
1735 print_generic_expr (dump_file, repl, 0);
1736 fprintf (dump_file, "\n");
1738 sra_stats.replacements++;
1740 return repl;
1743 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1745 static inline tree
1746 get_access_replacement (struct access *access)
1748 gcc_assert (access->grp_to_be_replaced);
1750 if (!access->replacement_decl)
1751 access->replacement_decl = create_access_replacement (access, true);
1752 return access->replacement_decl;
1755 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1756 not mark it for renaming. */
1758 static inline tree
1759 get_unrenamed_access_replacement (struct access *access)
1761 gcc_assert (!access->grp_to_be_replaced);
1763 if (!access->replacement_decl)
1764 access->replacement_decl = create_access_replacement (access, false);
1765 return access->replacement_decl;
1769 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1770 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1771 to it is not "within" the root. Return false iff some accesses partially
1772 overlap. */
1774 static bool
1775 build_access_subtree (struct access **access)
1777 struct access *root = *access, *last_child = NULL;
1778 HOST_WIDE_INT limit = root->offset + root->size;
1780 *access = (*access)->next_grp;
1781 while (*access && (*access)->offset + (*access)->size <= limit)
1783 if (!last_child)
1784 root->first_child = *access;
1785 else
1786 last_child->next_sibling = *access;
1787 last_child = *access;
1789 if (!build_access_subtree (access))
1790 return false;
1793 if (*access && (*access)->offset < limit)
1794 return false;
1796 return true;
1799 /* Build a tree of access representatives, ACCESS is the pointer to the first
1800 one, others are linked in a list by the next_grp field. Return false iff
1801 some accesses partially overlap. */
1803 static bool
1804 build_access_trees (struct access *access)
1806 while (access)
1808 struct access *root = access;
1810 if (!build_access_subtree (&access))
1811 return false;
1812 root->next_grp = access;
1814 return true;
1817 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1818 array. */
1820 static bool
1821 expr_with_var_bounded_array_refs_p (tree expr)
1823 while (handled_component_p (expr))
1825 if (TREE_CODE (expr) == ARRAY_REF
1826 && !host_integerp (array_ref_low_bound (expr), 0))
1827 return true;
1828 expr = TREE_OPERAND (expr, 0);
1830 return false;
1833 enum mark_rw_status { SRA_MRRW_NOTHING, SRA_MRRW_DIRECT, SRA_MRRW_ASSIGN};
1835 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1836 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1837 sorts of access flags appropriately along the way, notably always set
1838 grp_read and grp_assign_read according to MARK_READ and grp_write when
1839 MARK_WRITE is true.
1841 Creating a replacement for a scalar access is considered beneficial if its
1842 grp_hint is set (this means we are either attempting total scalarization or
1843 there is more than one direct read access) or according to the following
1844 table:
1846 Access written to individually (once or more times)
1848 | Parent written to in an assignment statement
1850 | | Access read individually _once_
1851 | | |
1852 | | | Parent read in an assignment statement
1853 | | | |
1854 | | | | Scalarize Comment
1855 -----------------------------------------------------------------------------
1856 0 0 0 0 No access for the scalar
1857 0 0 0 1 No access for the scalar
1858 0 0 1 0 No Single read - won't help
1859 0 0 1 1 No The same case
1860 0 1 0 0 No access for the scalar
1861 0 1 0 1 No access for the scalar
1862 0 1 1 0 Yes s = *g; return s.i;
1863 0 1 1 1 Yes The same case as above
1864 1 0 0 0 No Won't help
1865 1 0 0 1 Yes s.i = 1; *g = s;
1866 1 0 1 0 Yes s.i = 5; g = s.i;
1867 1 0 1 1 Yes The same case as above
1868 1 1 0 0 No Won't help.
1869 1 1 0 1 Yes s.i = 1; *g = s;
1870 1 1 1 0 Yes s = *g; return s.i;
1871 1 1 1 1 Yes Any of the above yeses */
1873 static bool
1874 analyze_access_subtree (struct access *root, bool allow_replacements,
1875 enum mark_rw_status mark_read,
1876 enum mark_rw_status mark_write)
1878 struct access *child;
1879 HOST_WIDE_INT limit = root->offset + root->size;
1880 HOST_WIDE_INT covered_to = root->offset;
1881 bool scalar = is_gimple_reg_type (root->type);
1882 bool hole = false, sth_created = false;
1883 bool direct_read = root->grp_read;
1884 bool direct_write = root->grp_write;
1886 if (root->grp_assignment_read)
1887 mark_read = SRA_MRRW_ASSIGN;
1888 else if (mark_read == SRA_MRRW_ASSIGN)
1890 root->grp_read = 1;
1891 root->grp_assignment_read = 1;
1893 else if (mark_read == SRA_MRRW_DIRECT)
1894 root->grp_read = 1;
1895 else if (root->grp_read)
1896 mark_read = SRA_MRRW_DIRECT;
1898 if (root->grp_assignment_write)
1899 mark_write = SRA_MRRW_ASSIGN;
1900 else if (mark_write == SRA_MRRW_ASSIGN)
1902 root->grp_write = 1;
1903 root->grp_assignment_write = 1;
1905 else if (mark_write == SRA_MRRW_DIRECT)
1906 root->grp_write = 1;
1907 else if (root->grp_write)
1908 mark_write = SRA_MRRW_DIRECT;
1910 if (root->grp_unscalarizable_region)
1911 allow_replacements = false;
1913 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1914 allow_replacements = false;
1916 for (child = root->first_child; child; child = child->next_sibling)
1918 if (!hole && child->offset < covered_to)
1919 hole = true;
1920 else
1921 covered_to += child->size;
1923 sth_created |= analyze_access_subtree (child,
1924 allow_replacements && !scalar,
1925 mark_read, mark_write);
1927 root->grp_unscalarized_data |= child->grp_unscalarized_data;
1928 hole |= !child->grp_covered;
1931 if (allow_replacements && scalar && !root->first_child
1932 && (root->grp_hint
1933 || ((direct_write || root->grp_assignment_write)
1934 && (direct_read || root->grp_assignment_read))))
1936 if (dump_file && (dump_flags & TDF_DETAILS))
1938 fprintf (dump_file, "Marking ");
1939 print_generic_expr (dump_file, root->base, 0);
1940 fprintf (dump_file, " offset: %u, size: %u: ",
1941 (unsigned) root->offset, (unsigned) root->size);
1942 fprintf (dump_file, " to be replaced.\n");
1945 root->grp_to_be_replaced = 1;
1946 sth_created = true;
1947 hole = false;
1949 else if (covered_to < limit)
1950 hole = true;
1952 if (sth_created && !hole)
1954 root->grp_covered = 1;
1955 return true;
1957 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1958 root->grp_unscalarized_data = 1; /* not covered and written to */
1959 if (sth_created)
1960 return true;
1961 return false;
1964 /* Analyze all access trees linked by next_grp by the means of
1965 analyze_access_subtree. */
1966 static bool
1967 analyze_access_trees (struct access *access)
1969 bool ret = false;
1971 while (access)
1973 if (analyze_access_subtree (access, true,
1974 SRA_MRRW_NOTHING, SRA_MRRW_NOTHING))
1975 ret = true;
1976 access = access->next_grp;
1979 return ret;
1982 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1983 SIZE would conflict with an already existing one. If exactly such a child
1984 already exists in LACC, store a pointer to it in EXACT_MATCH. */
1986 static bool
1987 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1988 HOST_WIDE_INT size, struct access **exact_match)
1990 struct access *child;
1992 for (child = lacc->first_child; child; child = child->next_sibling)
1994 if (child->offset == norm_offset && child->size == size)
1996 *exact_match = child;
1997 return true;
2000 if (child->offset < norm_offset + size
2001 && child->offset + child->size > norm_offset)
2002 return true;
2005 return false;
2008 /* Create a new child access of PARENT, with all properties just like MODEL
2009 except for its offset and with its grp_write false and grp_read true.
2010 Return the new access or NULL if it cannot be created. Note that this access
2011 is created long after all splicing and sorting, it's not located in any
2012 access vector and is automatically a representative of its group. */
2014 static struct access *
2015 create_artificial_child_access (struct access *parent, struct access *model,
2016 HOST_WIDE_INT new_offset)
2018 struct access *access;
2019 struct access **child;
2020 tree expr = parent->base;
2022 gcc_assert (!model->grp_unscalarizable_region);
2024 access = (struct access *) pool_alloc (access_pool);
2025 memset (access, 0, sizeof (struct access));
2026 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2027 model->type))
2029 access->grp_no_warning = true;
2030 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2031 new_offset, model, NULL, false);
2034 access->base = parent->base;
2035 access->expr = expr;
2036 access->offset = new_offset;
2037 access->size = model->size;
2038 access->type = model->type;
2039 access->grp_write = true;
2040 access->grp_read = false;
2042 child = &parent->first_child;
2043 while (*child && (*child)->offset < new_offset)
2044 child = &(*child)->next_sibling;
2046 access->next_sibling = *child;
2047 *child = access;
2049 return access;
2053 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2054 true if any new subaccess was created. Additionally, if RACC is a scalar
2055 access but LACC is not, change the type of the latter, if possible. */
2057 static bool
2058 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2060 struct access *rchild;
2061 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2062 bool ret = false;
2064 if (is_gimple_reg_type (lacc->type)
2065 || lacc->grp_unscalarizable_region
2066 || racc->grp_unscalarizable_region)
2067 return false;
2069 if (!lacc->first_child && !racc->first_child
2070 && is_gimple_reg_type (racc->type))
2072 tree t = lacc->base;
2074 lacc->type = racc->type;
2075 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t), lacc->offset,
2076 racc->type))
2077 lacc->expr = t;
2078 else
2080 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2081 lacc->base, lacc->offset,
2082 racc, NULL, false);
2083 lacc->grp_no_warning = true;
2085 return false;
2088 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2090 struct access *new_acc = NULL;
2091 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2093 if (rchild->grp_unscalarizable_region)
2094 continue;
2096 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2097 &new_acc))
2099 if (new_acc)
2101 rchild->grp_hint = 1;
2102 new_acc->grp_hint |= new_acc->grp_read;
2103 if (rchild->first_child)
2104 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2106 continue;
2109 rchild->grp_hint = 1;
2110 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2111 if (new_acc)
2113 ret = true;
2114 if (racc->first_child)
2115 propagate_subaccesses_across_link (new_acc, rchild);
2119 return ret;
2122 /* Propagate all subaccesses across assignment links. */
2124 static void
2125 propagate_all_subaccesses (void)
2127 while (work_queue_head)
2129 struct access *racc = pop_access_from_work_queue ();
2130 struct assign_link *link;
2132 gcc_assert (racc->first_link);
2134 for (link = racc->first_link; link; link = link->next)
2136 struct access *lacc = link->lacc;
2138 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2139 continue;
2140 lacc = lacc->group_representative;
2141 if (propagate_subaccesses_across_link (lacc, racc)
2142 && lacc->first_link)
2143 add_access_to_work_queue (lacc);
2148 /* Go through all accesses collected throughout the (intraprocedural) analysis
2149 stage, exclude overlapping ones, identify representatives and build trees
2150 out of them, making decisions about scalarization on the way. Return true
2151 iff there are any to-be-scalarized variables after this stage. */
2153 static bool
2154 analyze_all_variable_accesses (void)
2156 int res = 0;
2157 bitmap tmp = BITMAP_ALLOC (NULL);
2158 bitmap_iterator bi;
2159 unsigned i, max_total_scalarization_size;
2161 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2162 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2164 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2165 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2166 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2168 tree var = referenced_var (i);
2170 if (TREE_CODE (var) == VAR_DECL
2171 && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2172 <= max_total_scalarization_size)
2173 && type_consists_of_records_p (TREE_TYPE (var)))
2175 completely_scalarize_record (var, var, 0, var);
2176 if (dump_file && (dump_flags & TDF_DETAILS))
2178 fprintf (dump_file, "Will attempt to totally scalarize ");
2179 print_generic_expr (dump_file, var, 0);
2180 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2185 bitmap_copy (tmp, candidate_bitmap);
2186 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2188 tree var = referenced_var (i);
2189 struct access *access;
2191 access = sort_and_splice_var_accesses (var);
2192 if (!access || !build_access_trees (access))
2193 disqualify_candidate (var,
2194 "No or inhibitingly overlapping accesses.");
2197 propagate_all_subaccesses ();
2199 bitmap_copy (tmp, candidate_bitmap);
2200 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2202 tree var = referenced_var (i);
2203 struct access *access = get_first_repr_for_decl (var);
2205 if (analyze_access_trees (access))
2207 res++;
2208 if (dump_file && (dump_flags & TDF_DETAILS))
2210 fprintf (dump_file, "\nAccess trees for ");
2211 print_generic_expr (dump_file, var, 0);
2212 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2213 dump_access_tree (dump_file, access);
2214 fprintf (dump_file, "\n");
2217 else
2218 disqualify_candidate (var, "No scalar replacements to be created.");
2221 BITMAP_FREE (tmp);
2223 if (res)
2225 statistics_counter_event (cfun, "Scalarized aggregates", res);
2226 return true;
2228 else
2229 return false;
2232 /* Generate statements copying scalar replacements of accesses within a subtree
2233 into or out of AGG. ACCESS, all its children, siblings and their children
2234 are to be processed. AGG is an aggregate type expression (can be a
2235 declaration but does not have to be, it can for example also be a mem_ref or
2236 a series of handled components). TOP_OFFSET is the offset of the processed
2237 subtree which has to be subtracted from offsets of individual accesses to
2238 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2239 replacements in the interval <start_offset, start_offset + chunk_size>,
2240 otherwise copy all. GSI is a statement iterator used to place the new
2241 statements. WRITE should be true when the statements should write from AGG
2242 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2243 statements will be added after the current statement in GSI, they will be
2244 added before the statement otherwise. */
2246 static void
2247 generate_subtree_copies (struct access *access, tree agg,
2248 HOST_WIDE_INT top_offset,
2249 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2250 gimple_stmt_iterator *gsi, bool write,
2251 bool insert_after, location_t loc)
2255 if (chunk_size && access->offset >= start_offset + chunk_size)
2256 return;
2258 if (access->grp_to_be_replaced
2259 && (chunk_size == 0
2260 || access->offset + access->size > start_offset))
2262 tree expr, repl = get_access_replacement (access);
2263 gimple stmt;
2265 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2266 access, gsi, insert_after);
2268 if (write)
2270 if (access->grp_partial_lhs)
2271 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2272 !insert_after,
2273 insert_after ? GSI_NEW_STMT
2274 : GSI_SAME_STMT);
2275 stmt = gimple_build_assign (repl, expr);
2277 else
2279 TREE_NO_WARNING (repl) = 1;
2280 if (access->grp_partial_lhs)
2281 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2282 !insert_after,
2283 insert_after ? GSI_NEW_STMT
2284 : GSI_SAME_STMT);
2285 stmt = gimple_build_assign (expr, repl);
2287 gimple_set_location (stmt, loc);
2289 if (insert_after)
2290 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2291 else
2292 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2293 update_stmt (stmt);
2294 sra_stats.subtree_copies++;
2297 if (access->first_child)
2298 generate_subtree_copies (access->first_child, agg, top_offset,
2299 start_offset, chunk_size, gsi,
2300 write, insert_after, loc);
2302 access = access->next_sibling;
2304 while (access);
2307 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2308 the root of the subtree to be processed. GSI is the statement iterator used
2309 for inserting statements which are added after the current statement if
2310 INSERT_AFTER is true or before it otherwise. */
2312 static void
2313 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2314 bool insert_after, location_t loc)
2317 struct access *child;
2319 if (access->grp_to_be_replaced)
2321 gimple stmt;
2323 stmt = gimple_build_assign (get_access_replacement (access),
2324 build_zero_cst (access->type));
2325 if (insert_after)
2326 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2327 else
2328 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2329 update_stmt (stmt);
2330 gimple_set_location (stmt, loc);
2333 for (child = access->first_child; child; child = child->next_sibling)
2334 init_subtree_with_zero (child, gsi, insert_after, loc);
2337 /* Search for an access representative for the given expression EXPR and
2338 return it or NULL if it cannot be found. */
2340 static struct access *
2341 get_access_for_expr (tree expr)
2343 HOST_WIDE_INT offset, size, max_size;
2344 tree base;
2346 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2347 a different size than the size of its argument and we need the latter
2348 one. */
2349 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2350 expr = TREE_OPERAND (expr, 0);
2352 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2353 if (max_size == -1 || !DECL_P (base))
2354 return NULL;
2356 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2357 return NULL;
2359 return get_var_base_offset_size_access (base, offset, max_size);
2362 /* Replace the expression EXPR with a scalar replacement if there is one and
2363 generate other statements to do type conversion or subtree copying if
2364 necessary. GSI is used to place newly created statements, WRITE is true if
2365 the expression is being written to (it is on a LHS of a statement or output
2366 in an assembly statement). */
2368 static bool
2369 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2371 location_t loc;
2372 struct access *access;
2373 tree type, bfr;
2375 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2377 bfr = *expr;
2378 expr = &TREE_OPERAND (*expr, 0);
2380 else
2381 bfr = NULL_TREE;
2383 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2384 expr = &TREE_OPERAND (*expr, 0);
2385 access = get_access_for_expr (*expr);
2386 if (!access)
2387 return false;
2388 type = TREE_TYPE (*expr);
2390 loc = gimple_location (gsi_stmt (*gsi));
2391 if (access->grp_to_be_replaced)
2393 tree repl = get_access_replacement (access);
2394 /* If we replace a non-register typed access simply use the original
2395 access expression to extract the scalar component afterwards.
2396 This happens if scalarizing a function return value or parameter
2397 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2398 gcc.c-torture/compile/20011217-1.c.
2400 We also want to use this when accessing a complex or vector which can
2401 be accessed as a different type too, potentially creating a need for
2402 type conversion (see PR42196) and when scalarized unions are involved
2403 in assembler statements (see PR42398). */
2404 if (!useless_type_conversion_p (type, access->type))
2406 tree ref;
2408 ref = build_ref_for_model (loc, access->base, access->offset, access,
2409 NULL, false);
2411 if (write)
2413 gimple stmt;
2415 if (access->grp_partial_lhs)
2416 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2417 false, GSI_NEW_STMT);
2418 stmt = gimple_build_assign (repl, ref);
2419 gimple_set_location (stmt, loc);
2420 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2422 else
2424 gimple stmt;
2426 if (access->grp_partial_lhs)
2427 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2428 true, GSI_SAME_STMT);
2429 stmt = gimple_build_assign (ref, repl);
2430 gimple_set_location (stmt, loc);
2431 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2434 else
2435 *expr = repl;
2436 sra_stats.exprs++;
2439 if (access->first_child)
2441 HOST_WIDE_INT start_offset, chunk_size;
2442 if (bfr
2443 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2444 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2446 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2447 start_offset = access->offset
2448 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2450 else
2451 start_offset = chunk_size = 0;
2453 generate_subtree_copies (access->first_child, access->base, 0,
2454 start_offset, chunk_size, gsi, write, write,
2455 loc);
2457 return true;
2460 /* Where scalar replacements of the RHS have been written to when a replacement
2461 of a LHS of an assigments cannot be direclty loaded from a replacement of
2462 the RHS. */
2463 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2464 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2465 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2467 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2468 base aggregate if there are unscalarized data or directly to LHS of the
2469 statement that is pointed to by GSI otherwise. */
2471 static enum unscalarized_data_handling
2472 handle_unscalarized_data_in_subtree (struct access *top_racc,
2473 gimple_stmt_iterator *gsi)
2475 if (top_racc->grp_unscalarized_data)
2477 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2478 gsi, false, false,
2479 gimple_location (gsi_stmt (*gsi)));
2480 return SRA_UDH_RIGHT;
2482 else
2484 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2485 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2486 0, 0, gsi, false, false,
2487 gimple_location (gsi_stmt (*gsi)));
2488 return SRA_UDH_LEFT;
2493 /* Try to generate statements to load all sub-replacements in an access subtree
2494 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2495 If that is not possible, refresh the TOP_RACC base aggregate and load the
2496 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2497 copied. NEW_GSI is stmt iterator used for statement insertions after the
2498 original assignment, OLD_GSI is used to insert statements before the
2499 assignment. *REFRESHED keeps the information whether we have needed to
2500 refresh replacements of the LHS and from which side of the assignments this
2501 takes place. */
2503 static void
2504 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2505 HOST_WIDE_INT left_offset,
2506 gimple_stmt_iterator *old_gsi,
2507 gimple_stmt_iterator *new_gsi,
2508 enum unscalarized_data_handling *refreshed)
2510 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2511 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2513 if (lacc->grp_to_be_replaced)
2515 struct access *racc;
2516 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2517 gimple stmt;
2518 tree rhs;
2520 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2521 if (racc && racc->grp_to_be_replaced)
2523 rhs = get_access_replacement (racc);
2524 if (!useless_type_conversion_p (lacc->type, racc->type))
2525 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2527 else
2529 /* No suitable access on the right hand side, need to load from
2530 the aggregate. See if we have to update it first... */
2531 if (*refreshed == SRA_UDH_NONE)
2532 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2533 old_gsi);
2535 if (*refreshed == SRA_UDH_LEFT)
2536 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2537 new_gsi, true);
2538 else
2539 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2540 new_gsi, true);
2543 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2544 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2545 gimple_set_location (stmt, loc);
2546 update_stmt (stmt);
2547 sra_stats.subreplacements++;
2549 else if (*refreshed == SRA_UDH_NONE
2550 && lacc->grp_read && !lacc->grp_covered)
2551 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2552 old_gsi);
2554 if (lacc->first_child)
2555 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2556 old_gsi, new_gsi, refreshed);
2560 /* Result code for SRA assignment modification. */
2561 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2562 SRA_AM_MODIFIED, /* stmt changed but not
2563 removed */
2564 SRA_AM_REMOVED }; /* stmt eliminated */
2566 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2567 to the assignment and GSI is the statement iterator pointing at it. Returns
2568 the same values as sra_modify_assign. */
2570 static enum assignment_mod_result
2571 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2573 tree lhs = gimple_assign_lhs (*stmt);
2574 struct access *acc;
2575 location_t loc;
2577 acc = get_access_for_expr (lhs);
2578 if (!acc)
2579 return SRA_AM_NONE;
2581 loc = gimple_location (*stmt);
2582 if (VEC_length (constructor_elt,
2583 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2585 /* I have never seen this code path trigger but if it can happen the
2586 following should handle it gracefully. */
2587 if (access_has_children_p (acc))
2588 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2589 true, true, loc);
2590 return SRA_AM_MODIFIED;
2593 if (acc->grp_covered)
2595 init_subtree_with_zero (acc, gsi, false, loc);
2596 unlink_stmt_vdef (*stmt);
2597 gsi_remove (gsi, true);
2598 return SRA_AM_REMOVED;
2600 else
2602 init_subtree_with_zero (acc, gsi, true, loc);
2603 return SRA_AM_MODIFIED;
2607 /* Create and return a new suitable default definition SSA_NAME for RACC which
2608 is an access describing an uninitialized part of an aggregate that is being
2609 loaded. */
2611 static tree
2612 get_repl_default_def_ssa_name (struct access *racc)
2614 tree repl, decl;
2616 decl = get_unrenamed_access_replacement (racc);
2618 repl = gimple_default_def (cfun, decl);
2619 if (!repl)
2621 repl = make_ssa_name (decl, gimple_build_nop ());
2622 set_default_def (decl, repl);
2625 return repl;
2628 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2629 somewhere in it. */
2631 static inline bool
2632 contains_bitfld_comp_ref_p (const_tree ref)
2634 while (handled_component_p (ref))
2636 if (TREE_CODE (ref) == COMPONENT_REF
2637 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2638 return true;
2639 ref = TREE_OPERAND (ref, 0);
2642 return false;
2645 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2646 bit-field field declaration somewhere in it. */
2648 static inline bool
2649 contains_vce_or_bfcref_p (const_tree ref)
2651 while (handled_component_p (ref))
2653 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2654 || (TREE_CODE (ref) == COMPONENT_REF
2655 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2656 return true;
2657 ref = TREE_OPERAND (ref, 0);
2660 return false;
2663 /* Examine both sides of the assignment statement pointed to by STMT, replace
2664 them with a scalare replacement if there is one and generate copying of
2665 replacements if scalarized aggregates have been used in the assignment. GSI
2666 is used to hold generated statements for type conversions and subtree
2667 copying. */
2669 static enum assignment_mod_result
2670 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2672 struct access *lacc, *racc;
2673 tree lhs, rhs;
2674 bool modify_this_stmt = false;
2675 bool force_gimple_rhs = false;
2676 location_t loc;
2677 gimple_stmt_iterator orig_gsi = *gsi;
2679 if (!gimple_assign_single_p (*stmt))
2680 return SRA_AM_NONE;
2681 lhs = gimple_assign_lhs (*stmt);
2682 rhs = gimple_assign_rhs1 (*stmt);
2684 if (TREE_CODE (rhs) == CONSTRUCTOR)
2685 return sra_modify_constructor_assign (stmt, gsi);
2687 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2688 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2689 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2691 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2692 gsi, false);
2693 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2694 gsi, true);
2695 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2698 lacc = get_access_for_expr (lhs);
2699 racc = get_access_for_expr (rhs);
2700 if (!lacc && !racc)
2701 return SRA_AM_NONE;
2703 loc = gimple_location (*stmt);
2704 if (lacc && lacc->grp_to_be_replaced)
2706 lhs = get_access_replacement (lacc);
2707 gimple_assign_set_lhs (*stmt, lhs);
2708 modify_this_stmt = true;
2709 if (lacc->grp_partial_lhs)
2710 force_gimple_rhs = true;
2711 sra_stats.exprs++;
2714 if (racc && racc->grp_to_be_replaced)
2716 rhs = get_access_replacement (racc);
2717 modify_this_stmt = true;
2718 if (racc->grp_partial_lhs)
2719 force_gimple_rhs = true;
2720 sra_stats.exprs++;
2723 if (modify_this_stmt)
2725 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2727 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2728 ??? This should move to fold_stmt which we simply should
2729 call after building a VIEW_CONVERT_EXPR here. */
2730 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2731 && !contains_bitfld_comp_ref_p (lhs)
2732 && !access_has_children_p (lacc))
2734 lhs = build_ref_for_offset (loc, lhs, 0, TREE_TYPE (rhs),
2735 gsi, false);
2736 gimple_assign_set_lhs (*stmt, lhs);
2738 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2739 && !contains_vce_or_bfcref_p (rhs)
2740 && !access_has_children_p (racc))
2741 rhs = build_ref_for_offset (loc, rhs, 0, TREE_TYPE (lhs),
2742 gsi, false);
2744 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2746 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
2747 rhs);
2748 if (is_gimple_reg_type (TREE_TYPE (lhs))
2749 && TREE_CODE (lhs) != SSA_NAME)
2750 force_gimple_rhs = true;
2755 /* From this point on, the function deals with assignments in between
2756 aggregates when at least one has scalar reductions of some of its
2757 components. There are three possible scenarios: Both the LHS and RHS have
2758 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2760 In the first case, we would like to load the LHS components from RHS
2761 components whenever possible. If that is not possible, we would like to
2762 read it directly from the RHS (after updating it by storing in it its own
2763 components). If there are some necessary unscalarized data in the LHS,
2764 those will be loaded by the original assignment too. If neither of these
2765 cases happen, the original statement can be removed. Most of this is done
2766 by load_assign_lhs_subreplacements.
2768 In the second case, we would like to store all RHS scalarized components
2769 directly into LHS and if they cover the aggregate completely, remove the
2770 statement too. In the third case, we want the LHS components to be loaded
2771 directly from the RHS (DSE will remove the original statement if it
2772 becomes redundant).
2774 This is a bit complex but manageable when types match and when unions do
2775 not cause confusion in a way that we cannot really load a component of LHS
2776 from the RHS or vice versa (the access representing this level can have
2777 subaccesses that are accessible only through a different union field at a
2778 higher level - different from the one used in the examined expression).
2779 Unions are fun.
2781 Therefore, I specially handle a fourth case, happening when there is a
2782 specific type cast or it is impossible to locate a scalarized subaccess on
2783 the other side of the expression. If that happens, I simply "refresh" the
2784 RHS by storing in it is scalarized components leave the original statement
2785 there to do the copying and then load the scalar replacements of the LHS.
2786 This is what the first branch does. */
2788 if (gimple_has_volatile_ops (*stmt)
2789 || contains_vce_or_bfcref_p (rhs)
2790 || contains_vce_or_bfcref_p (lhs))
2792 if (access_has_children_p (racc))
2793 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2794 gsi, false, false, loc);
2795 if (access_has_children_p (lacc))
2796 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2797 gsi, true, true, loc);
2798 sra_stats.separate_lhs_rhs_handling++;
2800 else
2802 if (access_has_children_p (lacc) && access_has_children_p (racc))
2804 gimple_stmt_iterator orig_gsi = *gsi;
2805 enum unscalarized_data_handling refreshed;
2807 if (lacc->grp_read && !lacc->grp_covered)
2808 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
2809 else
2810 refreshed = SRA_UDH_NONE;
2812 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
2813 &orig_gsi, gsi, &refreshed);
2814 if (refreshed != SRA_UDH_RIGHT)
2816 gsi_next (gsi);
2817 unlink_stmt_vdef (*stmt);
2818 gsi_remove (&orig_gsi, true);
2819 sra_stats.deleted++;
2820 return SRA_AM_REMOVED;
2823 else
2825 if (racc)
2827 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2829 if (dump_file)
2831 fprintf (dump_file, "Removing load: ");
2832 print_gimple_stmt (dump_file, *stmt, 0, 0);
2835 if (TREE_CODE (lhs) == SSA_NAME)
2837 rhs = get_repl_default_def_ssa_name (racc);
2838 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2839 TREE_TYPE (rhs)))
2840 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
2841 TREE_TYPE (lhs), rhs);
2843 else
2845 if (racc->first_child)
2846 generate_subtree_copies (racc->first_child, lhs,
2847 racc->offset, 0, 0, gsi,
2848 false, false, loc);
2850 gcc_assert (*stmt == gsi_stmt (*gsi));
2851 unlink_stmt_vdef (*stmt);
2852 gsi_remove (gsi, true);
2853 sra_stats.deleted++;
2854 return SRA_AM_REMOVED;
2857 else if (racc->first_child)
2858 generate_subtree_copies (racc->first_child, lhs, racc->offset,
2859 0, 0, gsi, false, true, loc);
2861 if (access_has_children_p (lacc))
2862 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2863 0, 0, gsi, true, true, loc);
2867 /* This gimplification must be done after generate_subtree_copies, lest we
2868 insert the subtree copies in the middle of the gimplified sequence. */
2869 if (force_gimple_rhs)
2870 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2871 true, GSI_SAME_STMT);
2872 if (gimple_assign_rhs1 (*stmt) != rhs)
2874 modify_this_stmt = true;
2875 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2876 gcc_assert (*stmt == gsi_stmt (orig_gsi));
2879 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2882 /* Traverse the function body and all modifications as decided in
2883 analyze_all_variable_accesses. Return true iff the CFG has been
2884 changed. */
2886 static bool
2887 sra_modify_function_body (void)
2889 bool cfg_changed = false;
2890 basic_block bb;
2892 FOR_EACH_BB (bb)
2894 gimple_stmt_iterator gsi = gsi_start_bb (bb);
2895 while (!gsi_end_p (gsi))
2897 gimple stmt = gsi_stmt (gsi);
2898 enum assignment_mod_result assign_result;
2899 bool modified = false, deleted = false;
2900 tree *t;
2901 unsigned i;
2903 switch (gimple_code (stmt))
2905 case GIMPLE_RETURN:
2906 t = gimple_return_retval_ptr (stmt);
2907 if (*t != NULL_TREE)
2908 modified |= sra_modify_expr (t, &gsi, false);
2909 break;
2911 case GIMPLE_ASSIGN:
2912 assign_result = sra_modify_assign (&stmt, &gsi);
2913 modified |= assign_result == SRA_AM_MODIFIED;
2914 deleted = assign_result == SRA_AM_REMOVED;
2915 break;
2917 case GIMPLE_CALL:
2918 /* Operands must be processed before the lhs. */
2919 for (i = 0; i < gimple_call_num_args (stmt); i++)
2921 t = gimple_call_arg_ptr (stmt, i);
2922 modified |= sra_modify_expr (t, &gsi, false);
2925 if (gimple_call_lhs (stmt))
2927 t = gimple_call_lhs_ptr (stmt);
2928 modified |= sra_modify_expr (t, &gsi, true);
2930 break;
2932 case GIMPLE_ASM:
2933 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
2935 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
2936 modified |= sra_modify_expr (t, &gsi, false);
2938 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
2940 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
2941 modified |= sra_modify_expr (t, &gsi, true);
2943 break;
2945 default:
2946 break;
2949 if (modified)
2951 update_stmt (stmt);
2952 if (maybe_clean_eh_stmt (stmt)
2953 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2954 cfg_changed = true;
2956 if (!deleted)
2957 gsi_next (&gsi);
2961 return cfg_changed;
2964 /* Generate statements initializing scalar replacements of parts of function
2965 parameters. */
2967 static void
2968 initialize_parameter_reductions (void)
2970 gimple_stmt_iterator gsi;
2971 gimple_seq seq = NULL;
2972 tree parm;
2974 for (parm = DECL_ARGUMENTS (current_function_decl);
2975 parm;
2976 parm = DECL_CHAIN (parm))
2978 VEC (access_p, heap) *access_vec;
2979 struct access *access;
2981 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2982 continue;
2983 access_vec = get_base_access_vector (parm);
2984 if (!access_vec)
2985 continue;
2987 if (!seq)
2989 seq = gimple_seq_alloc ();
2990 gsi = gsi_start (seq);
2993 for (access = VEC_index (access_p, access_vec, 0);
2994 access;
2995 access = access->next_grp)
2996 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
2997 EXPR_LOCATION (parm));
3000 if (seq)
3001 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3004 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3005 it reveals there are components of some aggregates to be scalarized, it runs
3006 the required transformations. */
3007 static unsigned int
3008 perform_intra_sra (void)
3010 int ret = 0;
3011 sra_initialize ();
3013 if (!find_var_candidates ())
3014 goto out;
3016 if (!scan_function ())
3017 goto out;
3019 if (!analyze_all_variable_accesses ())
3020 goto out;
3022 if (sra_modify_function_body ())
3023 ret = TODO_update_ssa | TODO_cleanup_cfg;
3024 else
3025 ret = TODO_update_ssa;
3026 initialize_parameter_reductions ();
3028 statistics_counter_event (cfun, "Scalar replacements created",
3029 sra_stats.replacements);
3030 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3031 statistics_counter_event (cfun, "Subtree copy stmts",
3032 sra_stats.subtree_copies);
3033 statistics_counter_event (cfun, "Subreplacement stmts",
3034 sra_stats.subreplacements);
3035 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3036 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3037 sra_stats.separate_lhs_rhs_handling);
3039 out:
3040 sra_deinitialize ();
3041 return ret;
3044 /* Perform early intraprocedural SRA. */
3045 static unsigned int
3046 early_intra_sra (void)
3048 sra_mode = SRA_MODE_EARLY_INTRA;
3049 return perform_intra_sra ();
3052 /* Perform "late" intraprocedural SRA. */
3053 static unsigned int
3054 late_intra_sra (void)
3056 sra_mode = SRA_MODE_INTRA;
3057 return perform_intra_sra ();
3061 static bool
3062 gate_intra_sra (void)
3064 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3068 struct gimple_opt_pass pass_sra_early =
3071 GIMPLE_PASS,
3072 "esra", /* name */
3073 gate_intra_sra, /* gate */
3074 early_intra_sra, /* execute */
3075 NULL, /* sub */
3076 NULL, /* next */
3077 0, /* static_pass_number */
3078 TV_TREE_SRA, /* tv_id */
3079 PROP_cfg | PROP_ssa, /* properties_required */
3080 0, /* properties_provided */
3081 0, /* properties_destroyed */
3082 0, /* todo_flags_start */
3083 TODO_dump_func
3084 | TODO_update_ssa
3085 | TODO_ggc_collect
3086 | TODO_verify_ssa /* todo_flags_finish */
3090 struct gimple_opt_pass pass_sra =
3093 GIMPLE_PASS,
3094 "sra", /* name */
3095 gate_intra_sra, /* gate */
3096 late_intra_sra, /* execute */
3097 NULL, /* sub */
3098 NULL, /* next */
3099 0, /* static_pass_number */
3100 TV_TREE_SRA, /* tv_id */
3101 PROP_cfg | PROP_ssa, /* properties_required */
3102 0, /* properties_provided */
3103 0, /* properties_destroyed */
3104 TODO_update_address_taken, /* todo_flags_start */
3105 TODO_dump_func
3106 | TODO_update_ssa
3107 | TODO_ggc_collect
3108 | TODO_verify_ssa /* todo_flags_finish */
3113 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3114 parameter. */
3116 static bool
3117 is_unused_scalar_param (tree parm)
3119 tree name;
3120 return (is_gimple_reg (parm)
3121 && (!(name = gimple_default_def (cfun, parm))
3122 || has_zero_uses (name)));
3125 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3126 examine whether there are any direct or otherwise infeasible ones. If so,
3127 return true, otherwise return false. PARM must be a gimple register with a
3128 non-NULL default definition. */
3130 static bool
3131 ptr_parm_has_direct_uses (tree parm)
3133 imm_use_iterator ui;
3134 gimple stmt;
3135 tree name = gimple_default_def (cfun, parm);
3136 bool ret = false;
3138 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3140 int uses_ok = 0;
3141 use_operand_p use_p;
3143 if (is_gimple_debug (stmt))
3144 continue;
3146 /* Valid uses include dereferences on the lhs and the rhs. */
3147 if (gimple_has_lhs (stmt))
3149 tree lhs = gimple_get_lhs (stmt);
3150 while (handled_component_p (lhs))
3151 lhs = TREE_OPERAND (lhs, 0);
3152 if (TREE_CODE (lhs) == MEM_REF
3153 && TREE_OPERAND (lhs, 0) == name
3154 && integer_zerop (TREE_OPERAND (lhs, 1))
3155 && types_compatible_p (TREE_TYPE (lhs),
3156 TREE_TYPE (TREE_TYPE (name))))
3157 uses_ok++;
3159 if (gimple_assign_single_p (stmt))
3161 tree rhs = gimple_assign_rhs1 (stmt);
3162 while (handled_component_p (rhs))
3163 rhs = TREE_OPERAND (rhs, 0);
3164 if (TREE_CODE (rhs) == MEM_REF
3165 && TREE_OPERAND (rhs, 0) == name
3166 && integer_zerop (TREE_OPERAND (rhs, 1))
3167 && types_compatible_p (TREE_TYPE (rhs),
3168 TREE_TYPE (TREE_TYPE (name))))
3169 uses_ok++;
3171 else if (is_gimple_call (stmt))
3173 unsigned i;
3174 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3176 tree arg = gimple_call_arg (stmt, i);
3177 while (handled_component_p (arg))
3178 arg = TREE_OPERAND (arg, 0);
3179 if (TREE_CODE (arg) == MEM_REF
3180 && TREE_OPERAND (arg, 0) == name
3181 && integer_zerop (TREE_OPERAND (arg, 1))
3182 && types_compatible_p (TREE_TYPE (arg),
3183 TREE_TYPE (TREE_TYPE (name))))
3184 uses_ok++;
3188 /* If the number of valid uses does not match the number of
3189 uses in this stmt there is an unhandled use. */
3190 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3191 --uses_ok;
3193 if (uses_ok != 0)
3194 ret = true;
3196 if (ret)
3197 BREAK_FROM_IMM_USE_STMT (ui);
3200 return ret;
3203 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3204 them in candidate_bitmap. Note that these do not necessarily include
3205 parameter which are unused and thus can be removed. Return true iff any
3206 such candidate has been found. */
3208 static bool
3209 find_param_candidates (void)
3211 tree parm;
3212 int count = 0;
3213 bool ret = false;
3215 for (parm = DECL_ARGUMENTS (current_function_decl);
3216 parm;
3217 parm = DECL_CHAIN (parm))
3219 tree type = TREE_TYPE (parm);
3221 count++;
3223 if (TREE_THIS_VOLATILE (parm)
3224 || TREE_ADDRESSABLE (parm)
3225 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3226 continue;
3228 if (is_unused_scalar_param (parm))
3230 ret = true;
3231 continue;
3234 if (POINTER_TYPE_P (type))
3236 type = TREE_TYPE (type);
3238 if (TREE_CODE (type) == FUNCTION_TYPE
3239 || TYPE_VOLATILE (type)
3240 || (TREE_CODE (type) == ARRAY_TYPE
3241 && TYPE_NONALIASED_COMPONENT (type))
3242 || !is_gimple_reg (parm)
3243 || is_va_list_type (type)
3244 || ptr_parm_has_direct_uses (parm))
3245 continue;
3247 else if (!AGGREGATE_TYPE_P (type))
3248 continue;
3250 if (!COMPLETE_TYPE_P (type)
3251 || !host_integerp (TYPE_SIZE (type), 1)
3252 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3253 || (AGGREGATE_TYPE_P (type)
3254 && type_internals_preclude_sra_p (type)))
3255 continue;
3257 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3258 ret = true;
3259 if (dump_file && (dump_flags & TDF_DETAILS))
3261 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3262 print_generic_expr (dump_file, parm, 0);
3263 fprintf (dump_file, "\n");
3267 func_param_count = count;
3268 return ret;
3271 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3272 maybe_modified. */
3274 static bool
3275 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3276 void *data)
3278 struct access *repr = (struct access *) data;
3280 repr->grp_maybe_modified = 1;
3281 return true;
3284 /* Analyze what representatives (in linked lists accessible from
3285 REPRESENTATIVES) can be modified by side effects of statements in the
3286 current function. */
3288 static void
3289 analyze_modified_params (VEC (access_p, heap) *representatives)
3291 int i;
3293 for (i = 0; i < func_param_count; i++)
3295 struct access *repr;
3297 for (repr = VEC_index (access_p, representatives, i);
3298 repr;
3299 repr = repr->next_grp)
3301 struct access *access;
3302 bitmap visited;
3303 ao_ref ar;
3305 if (no_accesses_p (repr))
3306 continue;
3307 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3308 || repr->grp_maybe_modified)
3309 continue;
3311 ao_ref_init (&ar, repr->expr);
3312 visited = BITMAP_ALLOC (NULL);
3313 for (access = repr; access; access = access->next_sibling)
3315 /* All accesses are read ones, otherwise grp_maybe_modified would
3316 be trivially set. */
3317 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3318 mark_maybe_modified, repr, &visited);
3319 if (repr->grp_maybe_modified)
3320 break;
3322 BITMAP_FREE (visited);
3327 /* Propagate distances in bb_dereferences in the opposite direction than the
3328 control flow edges, in each step storing the maximum of the current value
3329 and the minimum of all successors. These steps are repeated until the table
3330 stabilizes. Note that BBs which might terminate the functions (according to
3331 final_bbs bitmap) never updated in this way. */
3333 static void
3334 propagate_dereference_distances (void)
3336 VEC (basic_block, heap) *queue;
3337 basic_block bb;
3339 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3340 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3341 FOR_EACH_BB (bb)
3343 VEC_quick_push (basic_block, queue, bb);
3344 bb->aux = bb;
3347 while (!VEC_empty (basic_block, queue))
3349 edge_iterator ei;
3350 edge e;
3351 bool change = false;
3352 int i;
3354 bb = VEC_pop (basic_block, queue);
3355 bb->aux = NULL;
3357 if (bitmap_bit_p (final_bbs, bb->index))
3358 continue;
3360 for (i = 0; i < func_param_count; i++)
3362 int idx = bb->index * func_param_count + i;
3363 bool first = true;
3364 HOST_WIDE_INT inh = 0;
3366 FOR_EACH_EDGE (e, ei, bb->succs)
3368 int succ_idx = e->dest->index * func_param_count + i;
3370 if (e->src == EXIT_BLOCK_PTR)
3371 continue;
3373 if (first)
3375 first = false;
3376 inh = bb_dereferences [succ_idx];
3378 else if (bb_dereferences [succ_idx] < inh)
3379 inh = bb_dereferences [succ_idx];
3382 if (!first && bb_dereferences[idx] < inh)
3384 bb_dereferences[idx] = inh;
3385 change = true;
3389 if (change && !bitmap_bit_p (final_bbs, bb->index))
3390 FOR_EACH_EDGE (e, ei, bb->preds)
3392 if (e->src->aux)
3393 continue;
3395 e->src->aux = e->src;
3396 VEC_quick_push (basic_block, queue, e->src);
3400 VEC_free (basic_block, heap, queue);
3403 /* Dump a dereferences TABLE with heading STR to file F. */
3405 static void
3406 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3408 basic_block bb;
3410 fprintf (dump_file, str);
3411 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3413 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3414 if (bb != EXIT_BLOCK_PTR)
3416 int i;
3417 for (i = 0; i < func_param_count; i++)
3419 int idx = bb->index * func_param_count + i;
3420 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3423 fprintf (f, "\n");
3425 fprintf (dump_file, "\n");
3428 /* Determine what (parts of) parameters passed by reference that are not
3429 assigned to are not certainly dereferenced in this function and thus the
3430 dereferencing cannot be safely moved to the caller without potentially
3431 introducing a segfault. Mark such REPRESENTATIVES as
3432 grp_not_necessarilly_dereferenced.
3434 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3435 part is calculated rather than simple booleans are calculated for each
3436 pointer parameter to handle cases when only a fraction of the whole
3437 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3438 an example).
3440 The maximum dereference distances for each pointer parameter and BB are
3441 already stored in bb_dereference. This routine simply propagates these
3442 values upwards by propagate_dereference_distances and then compares the
3443 distances of individual parameters in the ENTRY BB to the equivalent
3444 distances of each representative of a (fraction of a) parameter. */
3446 static void
3447 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3449 int i;
3451 if (dump_file && (dump_flags & TDF_DETAILS))
3452 dump_dereferences_table (dump_file,
3453 "Dereference table before propagation:\n",
3454 bb_dereferences);
3456 propagate_dereference_distances ();
3458 if (dump_file && (dump_flags & TDF_DETAILS))
3459 dump_dereferences_table (dump_file,
3460 "Dereference table after propagation:\n",
3461 bb_dereferences);
3463 for (i = 0; i < func_param_count; i++)
3465 struct access *repr = VEC_index (access_p, representatives, i);
3466 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3468 if (!repr || no_accesses_p (repr))
3469 continue;
3473 if ((repr->offset + repr->size) > bb_dereferences[idx])
3474 repr->grp_not_necessarilly_dereferenced = 1;
3475 repr = repr->next_grp;
3477 while (repr);
3481 /* Return the representative access for the parameter declaration PARM if it is
3482 a scalar passed by reference which is not written to and the pointer value
3483 is not used directly. Thus, if it is legal to dereference it in the caller
3484 and we can rule out modifications through aliases, such parameter should be
3485 turned into one passed by value. Return NULL otherwise. */
3487 static struct access *
3488 unmodified_by_ref_scalar_representative (tree parm)
3490 int i, access_count;
3491 struct access *repr;
3492 VEC (access_p, heap) *access_vec;
3494 access_vec = get_base_access_vector (parm);
3495 gcc_assert (access_vec);
3496 repr = VEC_index (access_p, access_vec, 0);
3497 if (repr->write)
3498 return NULL;
3499 repr->group_representative = repr;
3501 access_count = VEC_length (access_p, access_vec);
3502 for (i = 1; i < access_count; i++)
3504 struct access *access = VEC_index (access_p, access_vec, i);
3505 if (access->write)
3506 return NULL;
3507 access->group_representative = repr;
3508 access->next_sibling = repr->next_sibling;
3509 repr->next_sibling = access;
3512 repr->grp_read = 1;
3513 repr->grp_scalar_ptr = 1;
3514 return repr;
3517 /* Return true iff this access precludes IPA-SRA of the parameter it is
3518 associated with. */
3520 static bool
3521 access_precludes_ipa_sra_p (struct access *access)
3523 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3524 is incompatible assign in a call statement (and possibly even in asm
3525 statements). This can be relaxed by using a new temporary but only for
3526 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3527 intraprocedural SRA we deal with this by keeping the old aggregate around,
3528 something we cannot do in IPA-SRA.) */
3529 if (access->write
3530 && (is_gimple_call (access->stmt)
3531 || gimple_code (access->stmt) == GIMPLE_ASM))
3532 return true;
3534 return false;
3538 /* Sort collected accesses for parameter PARM, identify representatives for
3539 each accessed region and link them together. Return NULL if there are
3540 different but overlapping accesses, return the special ptr value meaning
3541 there are no accesses for this parameter if that is the case and return the
3542 first representative otherwise. Set *RO_GRP if there is a group of accesses
3543 with only read (i.e. no write) accesses. */
3545 static struct access *
3546 splice_param_accesses (tree parm, bool *ro_grp)
3548 int i, j, access_count, group_count;
3549 int agg_size, total_size = 0;
3550 struct access *access, *res, **prev_acc_ptr = &res;
3551 VEC (access_p, heap) *access_vec;
3553 access_vec = get_base_access_vector (parm);
3554 if (!access_vec)
3555 return &no_accesses_representant;
3556 access_count = VEC_length (access_p, access_vec);
3558 VEC_qsort (access_p, access_vec, compare_access_positions);
3560 i = 0;
3561 total_size = 0;
3562 group_count = 0;
3563 while (i < access_count)
3565 bool modification;
3566 tree a1_alias_type;
3567 access = VEC_index (access_p, access_vec, i);
3568 modification = access->write;
3569 if (access_precludes_ipa_sra_p (access))
3570 return NULL;
3571 a1_alias_type = reference_alias_ptr_type (access->expr);
3573 /* Access is about to become group representative unless we find some
3574 nasty overlap which would preclude us from breaking this parameter
3575 apart. */
3577 j = i + 1;
3578 while (j < access_count)
3580 struct access *ac2 = VEC_index (access_p, access_vec, j);
3581 if (ac2->offset != access->offset)
3583 /* All or nothing law for parameters. */
3584 if (access->offset + access->size > ac2->offset)
3585 return NULL;
3586 else
3587 break;
3589 else if (ac2->size != access->size)
3590 return NULL;
3592 if (access_precludes_ipa_sra_p (ac2)
3593 || (ac2->type != access->type
3594 && (TREE_ADDRESSABLE (ac2->type)
3595 || TREE_ADDRESSABLE (access->type)))
3596 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3597 return NULL;
3599 modification |= ac2->write;
3600 ac2->group_representative = access;
3601 ac2->next_sibling = access->next_sibling;
3602 access->next_sibling = ac2;
3603 j++;
3606 group_count++;
3607 access->grp_maybe_modified = modification;
3608 if (!modification)
3609 *ro_grp = true;
3610 *prev_acc_ptr = access;
3611 prev_acc_ptr = &access->next_grp;
3612 total_size += access->size;
3613 i = j;
3616 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3617 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3618 else
3619 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3620 if (total_size >= agg_size)
3621 return NULL;
3623 gcc_assert (group_count > 0);
3624 return res;
3627 /* Decide whether parameters with representative accesses given by REPR should
3628 be reduced into components. */
3630 static int
3631 decide_one_param_reduction (struct access *repr)
3633 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3634 bool by_ref;
3635 tree parm;
3637 parm = repr->base;
3638 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3639 gcc_assert (cur_parm_size > 0);
3641 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3643 by_ref = true;
3644 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3646 else
3648 by_ref = false;
3649 agg_size = cur_parm_size;
3652 if (dump_file)
3654 struct access *acc;
3655 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3656 print_generic_expr (dump_file, parm, 0);
3657 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3658 for (acc = repr; acc; acc = acc->next_grp)
3659 dump_access (dump_file, acc, true);
3662 total_size = 0;
3663 new_param_count = 0;
3665 for (; repr; repr = repr->next_grp)
3667 gcc_assert (parm == repr->base);
3668 new_param_count++;
3670 if (!by_ref || (!repr->grp_maybe_modified
3671 && !repr->grp_not_necessarilly_dereferenced))
3672 total_size += repr->size;
3673 else
3674 total_size += cur_parm_size;
3677 gcc_assert (new_param_count > 0);
3679 if (optimize_function_for_size_p (cfun))
3680 parm_size_limit = cur_parm_size;
3681 else
3682 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3683 * cur_parm_size);
3685 if (total_size < agg_size
3686 && total_size <= parm_size_limit)
3688 if (dump_file)
3689 fprintf (dump_file, " ....will be split into %i components\n",
3690 new_param_count);
3691 return new_param_count;
3693 else
3694 return 0;
3697 /* The order of the following enums is important, we need to do extra work for
3698 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3699 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3700 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3702 /* Identify representatives of all accesses to all candidate parameters for
3703 IPA-SRA. Return result based on what representatives have been found. */
3705 static enum ipa_splicing_result
3706 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3708 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3709 tree parm;
3710 struct access *repr;
3712 *representatives = VEC_alloc (access_p, heap, func_param_count);
3714 for (parm = DECL_ARGUMENTS (current_function_decl);
3715 parm;
3716 parm = DECL_CHAIN (parm))
3718 if (is_unused_scalar_param (parm))
3720 VEC_quick_push (access_p, *representatives,
3721 &no_accesses_representant);
3722 if (result == NO_GOOD_ACCESS)
3723 result = UNUSED_PARAMS;
3725 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3726 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3727 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3729 repr = unmodified_by_ref_scalar_representative (parm);
3730 VEC_quick_push (access_p, *representatives, repr);
3731 if (repr)
3732 result = UNMODIF_BY_REF_ACCESSES;
3734 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3736 bool ro_grp = false;
3737 repr = splice_param_accesses (parm, &ro_grp);
3738 VEC_quick_push (access_p, *representatives, repr);
3740 if (repr && !no_accesses_p (repr))
3742 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3744 if (ro_grp)
3745 result = UNMODIF_BY_REF_ACCESSES;
3746 else if (result < MODIF_BY_REF_ACCESSES)
3747 result = MODIF_BY_REF_ACCESSES;
3749 else if (result < BY_VAL_ACCESSES)
3750 result = BY_VAL_ACCESSES;
3752 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3753 result = UNUSED_PARAMS;
3755 else
3756 VEC_quick_push (access_p, *representatives, NULL);
3759 if (result == NO_GOOD_ACCESS)
3761 VEC_free (access_p, heap, *representatives);
3762 *representatives = NULL;
3763 return NO_GOOD_ACCESS;
3766 return result;
3769 /* Return the index of BASE in PARMS. Abort if it is not found. */
3771 static inline int
3772 get_param_index (tree base, VEC(tree, heap) *parms)
3774 int i, len;
3776 len = VEC_length (tree, parms);
3777 for (i = 0; i < len; i++)
3778 if (VEC_index (tree, parms, i) == base)
3779 return i;
3780 gcc_unreachable ();
3783 /* Convert the decisions made at the representative level into compact
3784 parameter adjustments. REPRESENTATIVES are pointers to first
3785 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3786 final number of adjustments. */
3788 static ipa_parm_adjustment_vec
3789 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3790 int adjustments_count)
3792 VEC (tree, heap) *parms;
3793 ipa_parm_adjustment_vec adjustments;
3794 tree parm;
3795 int i;
3797 gcc_assert (adjustments_count > 0);
3798 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3799 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3800 parm = DECL_ARGUMENTS (current_function_decl);
3801 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
3803 struct access *repr = VEC_index (access_p, representatives, i);
3805 if (!repr || no_accesses_p (repr))
3807 struct ipa_parm_adjustment *adj;
3809 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3810 memset (adj, 0, sizeof (*adj));
3811 adj->base_index = get_param_index (parm, parms);
3812 adj->base = parm;
3813 if (!repr)
3814 adj->copy_param = 1;
3815 else
3816 adj->remove_param = 1;
3818 else
3820 struct ipa_parm_adjustment *adj;
3821 int index = get_param_index (parm, parms);
3823 for (; repr; repr = repr->next_grp)
3825 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3826 memset (adj, 0, sizeof (*adj));
3827 gcc_assert (repr->base == parm);
3828 adj->base_index = index;
3829 adj->base = repr->base;
3830 adj->type = repr->type;
3831 adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
3832 adj->offset = repr->offset;
3833 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3834 && (repr->grp_maybe_modified
3835 || repr->grp_not_necessarilly_dereferenced));
3840 VEC_free (tree, heap, parms);
3841 return adjustments;
3844 /* Analyze the collected accesses and produce a plan what to do with the
3845 parameters in the form of adjustments, NULL meaning nothing. */
3847 static ipa_parm_adjustment_vec
3848 analyze_all_param_acesses (void)
3850 enum ipa_splicing_result repr_state;
3851 bool proceed = false;
3852 int i, adjustments_count = 0;
3853 VEC (access_p, heap) *representatives;
3854 ipa_parm_adjustment_vec adjustments;
3856 repr_state = splice_all_param_accesses (&representatives);
3857 if (repr_state == NO_GOOD_ACCESS)
3858 return NULL;
3860 /* If there are any parameters passed by reference which are not modified
3861 directly, we need to check whether they can be modified indirectly. */
3862 if (repr_state == UNMODIF_BY_REF_ACCESSES)
3864 analyze_caller_dereference_legality (representatives);
3865 analyze_modified_params (representatives);
3868 for (i = 0; i < func_param_count; i++)
3870 struct access *repr = VEC_index (access_p, representatives, i);
3872 if (repr && !no_accesses_p (repr))
3874 if (repr->grp_scalar_ptr)
3876 adjustments_count++;
3877 if (repr->grp_not_necessarilly_dereferenced
3878 || repr->grp_maybe_modified)
3879 VEC_replace (access_p, representatives, i, NULL);
3880 else
3882 proceed = true;
3883 sra_stats.scalar_by_ref_to_by_val++;
3886 else
3888 int new_components = decide_one_param_reduction (repr);
3890 if (new_components == 0)
3892 VEC_replace (access_p, representatives, i, NULL);
3893 adjustments_count++;
3895 else
3897 adjustments_count += new_components;
3898 sra_stats.aggregate_params_reduced++;
3899 sra_stats.param_reductions_created += new_components;
3900 proceed = true;
3904 else
3906 if (no_accesses_p (repr))
3908 proceed = true;
3909 sra_stats.deleted_unused_parameters++;
3911 adjustments_count++;
3915 if (!proceed && dump_file)
3916 fprintf (dump_file, "NOT proceeding to change params.\n");
3918 if (proceed)
3919 adjustments = turn_representatives_into_adjustments (representatives,
3920 adjustments_count);
3921 else
3922 adjustments = NULL;
3924 VEC_free (access_p, heap, representatives);
3925 return adjustments;
3928 /* If a parameter replacement identified by ADJ does not yet exist in the form
3929 of declaration, create it and record it, otherwise return the previously
3930 created one. */
3932 static tree
3933 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3935 tree repl;
3936 if (!adj->new_ssa_base)
3938 char *pretty_name = make_fancy_name (adj->base);
3940 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
3941 DECL_NAME (repl) = get_identifier (pretty_name);
3942 obstack_free (&name_obstack, pretty_name);
3944 get_var_ann (repl);
3945 add_referenced_var (repl);
3946 adj->new_ssa_base = repl;
3948 else
3949 repl = adj->new_ssa_base;
3950 return repl;
3953 /* Find the first adjustment for a particular parameter BASE in a vector of
3954 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
3955 adjustment. */
3957 static struct ipa_parm_adjustment *
3958 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3960 int i, len;
3962 len = VEC_length (ipa_parm_adjustment_t, adjustments);
3963 for (i = 0; i < len; i++)
3965 struct ipa_parm_adjustment *adj;
3967 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3968 if (!adj->copy_param && adj->base == base)
3969 return adj;
3972 return NULL;
3975 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
3976 removed because its value is not used, replace the SSA_NAME with a one
3977 relating to a created VAR_DECL together all of its uses and return true.
3978 ADJUSTMENTS is a pointer to an adjustments vector. */
3980 static bool
3981 replace_removed_params_ssa_names (gimple stmt,
3982 ipa_parm_adjustment_vec adjustments)
3984 struct ipa_parm_adjustment *adj;
3985 tree lhs, decl, repl, name;
3987 if (gimple_code (stmt) == GIMPLE_PHI)
3988 lhs = gimple_phi_result (stmt);
3989 else if (is_gimple_assign (stmt))
3990 lhs = gimple_assign_lhs (stmt);
3991 else if (is_gimple_call (stmt))
3992 lhs = gimple_call_lhs (stmt);
3993 else
3994 gcc_unreachable ();
3996 if (TREE_CODE (lhs) != SSA_NAME)
3997 return false;
3998 decl = SSA_NAME_VAR (lhs);
3999 if (TREE_CODE (decl) != PARM_DECL)
4000 return false;
4002 adj = get_adjustment_for_base (adjustments, decl);
4003 if (!adj)
4004 return false;
4006 repl = get_replaced_param_substitute (adj);
4007 name = make_ssa_name (repl, stmt);
4009 if (dump_file)
4011 fprintf (dump_file, "replacing an SSA name of a removed param ");
4012 print_generic_expr (dump_file, lhs, 0);
4013 fprintf (dump_file, " with ");
4014 print_generic_expr (dump_file, name, 0);
4015 fprintf (dump_file, "\n");
4018 if (is_gimple_assign (stmt))
4019 gimple_assign_set_lhs (stmt, name);
4020 else if (is_gimple_call (stmt))
4021 gimple_call_set_lhs (stmt, name);
4022 else
4023 gimple_phi_set_result (stmt, name);
4025 replace_uses_by (lhs, name);
4026 release_ssa_name (lhs);
4027 return true;
4030 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4031 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4032 specifies whether the function should care about type incompatibility the
4033 current and new expressions. If it is false, the function will leave
4034 incompatibility issues to the caller. Return true iff the expression
4035 was modified. */
4037 static bool
4038 sra_ipa_modify_expr (tree *expr, bool convert,
4039 ipa_parm_adjustment_vec adjustments)
4041 int i, len;
4042 struct ipa_parm_adjustment *adj, *cand = NULL;
4043 HOST_WIDE_INT offset, size, max_size;
4044 tree base, src;
4046 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4048 if (TREE_CODE (*expr) == BIT_FIELD_REF
4049 || TREE_CODE (*expr) == IMAGPART_EXPR
4050 || TREE_CODE (*expr) == REALPART_EXPR)
4052 expr = &TREE_OPERAND (*expr, 0);
4053 convert = true;
4056 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4057 if (!base || size == -1 || max_size == -1)
4058 return false;
4060 if (TREE_CODE (base) == MEM_REF)
4062 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4063 base = TREE_OPERAND (base, 0);
4066 base = get_ssa_base_param (base);
4067 if (!base || TREE_CODE (base) != PARM_DECL)
4068 return false;
4070 for (i = 0; i < len; i++)
4072 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4074 if (adj->base == base &&
4075 (adj->offset == offset || adj->remove_param))
4077 cand = adj;
4078 break;
4081 if (!cand || cand->copy_param || cand->remove_param)
4082 return false;
4084 if (cand->by_ref)
4085 src = build_simple_mem_ref (cand->reduction);
4086 else
4087 src = cand->reduction;
4089 if (dump_file && (dump_flags & TDF_DETAILS))
4091 fprintf (dump_file, "About to replace expr ");
4092 print_generic_expr (dump_file, *expr, 0);
4093 fprintf (dump_file, " with ");
4094 print_generic_expr (dump_file, src, 0);
4095 fprintf (dump_file, "\n");
4098 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4100 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4101 *expr = vce;
4103 else
4104 *expr = src;
4105 return true;
4108 /* If the statement pointed to by STMT_PTR contains any expressions that need
4109 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4110 potential type incompatibilities (GSI is used to accommodate conversion
4111 statements and must point to the statement). Return true iff the statement
4112 was modified. */
4114 static bool
4115 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4116 ipa_parm_adjustment_vec adjustments)
4118 gimple stmt = *stmt_ptr;
4119 tree *lhs_p, *rhs_p;
4120 bool any;
4122 if (!gimple_assign_single_p (stmt))
4123 return false;
4125 rhs_p = gimple_assign_rhs1_ptr (stmt);
4126 lhs_p = gimple_assign_lhs_ptr (stmt);
4128 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4129 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4130 if (any)
4132 tree new_rhs = NULL_TREE;
4134 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4136 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4138 /* V_C_Es of constructors can cause trouble (PR 42714). */
4139 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4140 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4141 else
4142 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4144 else
4145 new_rhs = fold_build1_loc (gimple_location (stmt),
4146 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4147 *rhs_p);
4149 else if (REFERENCE_CLASS_P (*rhs_p)
4150 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4151 && !is_gimple_reg (*lhs_p))
4152 /* This can happen when an assignment in between two single field
4153 structures is turned into an assignment in between two pointers to
4154 scalars (PR 42237). */
4155 new_rhs = *rhs_p;
4157 if (new_rhs)
4159 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4160 true, GSI_SAME_STMT);
4162 gimple_assign_set_rhs_from_tree (gsi, tmp);
4165 return true;
4168 return false;
4171 /* Traverse the function body and all modifications as described in
4172 ADJUSTMENTS. Return true iff the CFG has been changed. */
4174 static bool
4175 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4177 bool cfg_changed = false;
4178 basic_block bb;
4180 FOR_EACH_BB (bb)
4182 gimple_stmt_iterator gsi;
4184 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4185 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4187 gsi = gsi_start_bb (bb);
4188 while (!gsi_end_p (gsi))
4190 gimple stmt = gsi_stmt (gsi);
4191 bool modified = false;
4192 tree *t;
4193 unsigned i;
4195 switch (gimple_code (stmt))
4197 case GIMPLE_RETURN:
4198 t = gimple_return_retval_ptr (stmt);
4199 if (*t != NULL_TREE)
4200 modified |= sra_ipa_modify_expr (t, true, adjustments);
4201 break;
4203 case GIMPLE_ASSIGN:
4204 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4205 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4206 break;
4208 case GIMPLE_CALL:
4209 /* Operands must be processed before the lhs. */
4210 for (i = 0; i < gimple_call_num_args (stmt); i++)
4212 t = gimple_call_arg_ptr (stmt, i);
4213 modified |= sra_ipa_modify_expr (t, true, adjustments);
4216 if (gimple_call_lhs (stmt))
4218 t = gimple_call_lhs_ptr (stmt);
4219 modified |= sra_ipa_modify_expr (t, false, adjustments);
4220 modified |= replace_removed_params_ssa_names (stmt,
4221 adjustments);
4223 break;
4225 case GIMPLE_ASM:
4226 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4228 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4229 modified |= sra_ipa_modify_expr (t, true, adjustments);
4231 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4233 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4234 modified |= sra_ipa_modify_expr (t, false, adjustments);
4236 break;
4238 default:
4239 break;
4242 if (modified)
4244 update_stmt (stmt);
4245 if (maybe_clean_eh_stmt (stmt)
4246 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4247 cfg_changed = true;
4249 gsi_next (&gsi);
4253 return cfg_changed;
4256 /* Call gimple_debug_bind_reset_value on all debug statements describing
4257 gimple register parameters that are being removed or replaced. */
4259 static void
4260 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4262 int i, len;
4264 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4265 for (i = 0; i < len; i++)
4267 struct ipa_parm_adjustment *adj;
4268 imm_use_iterator ui;
4269 gimple stmt;
4270 tree name;
4272 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4273 if (adj->copy_param || !is_gimple_reg (adj->base))
4274 continue;
4275 name = gimple_default_def (cfun, adj->base);
4276 if (!name)
4277 continue;
4278 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4280 /* All other users must have been removed by
4281 ipa_sra_modify_function_body. */
4282 gcc_assert (is_gimple_debug (stmt));
4283 gimple_debug_bind_reset_value (stmt);
4284 update_stmt (stmt);
4289 /* Return true iff all callers have at least as many actual arguments as there
4290 are formal parameters in the current function. */
4292 static bool
4293 all_callers_have_enough_arguments_p (struct cgraph_node *node)
4295 struct cgraph_edge *cs;
4296 for (cs = node->callers; cs; cs = cs->next_caller)
4297 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4298 return false;
4300 return true;
4304 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4306 static void
4307 convert_callers (struct cgraph_node *node, tree old_decl,
4308 ipa_parm_adjustment_vec adjustments)
4310 tree old_cur_fndecl = current_function_decl;
4311 struct cgraph_edge *cs;
4312 basic_block this_block;
4313 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4315 for (cs = node->callers; cs; cs = cs->next_caller)
4317 current_function_decl = cs->caller->decl;
4318 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4320 if (dump_file)
4321 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4322 cs->caller->uid, cs->callee->uid,
4323 cgraph_node_name (cs->caller),
4324 cgraph_node_name (cs->callee));
4326 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4328 pop_cfun ();
4331 for (cs = node->callers; cs; cs = cs->next_caller)
4332 if (bitmap_set_bit (recomputed_callers, cs->caller->uid))
4333 compute_inline_parameters (cs->caller);
4334 BITMAP_FREE (recomputed_callers);
4336 current_function_decl = old_cur_fndecl;
4338 if (!encountered_recursive_call)
4339 return;
4341 FOR_EACH_BB (this_block)
4343 gimple_stmt_iterator gsi;
4345 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4347 gimple stmt = gsi_stmt (gsi);
4348 tree call_fndecl;
4349 if (gimple_code (stmt) != GIMPLE_CALL)
4350 continue;
4351 call_fndecl = gimple_call_fndecl (stmt);
4352 if (call_fndecl == old_decl)
4354 if (dump_file)
4355 fprintf (dump_file, "Adjusting recursive call");
4356 gimple_call_set_fndecl (stmt, node->decl);
4357 ipa_modify_call_arguments (NULL, stmt, adjustments);
4362 return;
4365 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4366 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4368 static bool
4369 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4371 struct cgraph_node *new_node;
4372 struct cgraph_edge *cs;
4373 bool cfg_changed;
4374 VEC (cgraph_edge_p, heap) * redirect_callers;
4375 int node_callers;
4377 node_callers = 0;
4378 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4379 node_callers++;
4380 redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers);
4381 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4382 VEC_quick_push (cgraph_edge_p, redirect_callers, cs);
4384 rebuild_cgraph_edges ();
4385 pop_cfun ();
4386 current_function_decl = NULL_TREE;
4388 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4389 NULL, NULL, "isra");
4390 current_function_decl = new_node->decl;
4391 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4393 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4394 cfg_changed = ipa_sra_modify_function_body (adjustments);
4395 sra_ipa_reset_debug_stmts (adjustments);
4396 convert_callers (new_node, node->decl, adjustments);
4397 cgraph_make_node_local (new_node);
4398 return cfg_changed;
4401 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4402 attributes, return true otherwise. NODE is the cgraph node of the current
4403 function. */
4405 static bool
4406 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4408 if (!cgraph_node_can_be_local_p (node))
4410 if (dump_file)
4411 fprintf (dump_file, "Function not local to this compilation unit.\n");
4412 return false;
4415 if (!tree_versionable_function_p (node->decl))
4417 if (dump_file)
4418 fprintf (dump_file, "Function is not versionable.\n");
4419 return false;
4422 if (DECL_VIRTUAL_P (current_function_decl))
4424 if (dump_file)
4425 fprintf (dump_file, "Function is a virtual method.\n");
4426 return false;
4429 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4430 && node->global.size >= MAX_INLINE_INSNS_AUTO)
4432 if (dump_file)
4433 fprintf (dump_file, "Function too big to be made truly local.\n");
4434 return false;
4437 if (!node->callers)
4439 if (dump_file)
4440 fprintf (dump_file,
4441 "Function has no callers in this compilation unit.\n");
4442 return false;
4445 if (cfun->stdarg)
4447 if (dump_file)
4448 fprintf (dump_file, "Function uses stdarg. \n");
4449 return false;
4452 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4453 return false;
4455 return true;
4458 /* Perform early interprocedural SRA. */
4460 static unsigned int
4461 ipa_early_sra (void)
4463 struct cgraph_node *node = cgraph_node (current_function_decl);
4464 ipa_parm_adjustment_vec adjustments;
4465 int ret = 0;
4467 if (!ipa_sra_preliminary_function_checks (node))
4468 return 0;
4470 sra_initialize ();
4471 sra_mode = SRA_MODE_EARLY_IPA;
4473 if (!find_param_candidates ())
4475 if (dump_file)
4476 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4477 goto simple_out;
4480 if (!all_callers_have_enough_arguments_p (node))
4482 if (dump_file)
4483 fprintf (dump_file, "There are callers with insufficient number of "
4484 "arguments.\n");
4485 goto simple_out;
4488 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4489 func_param_count
4490 * last_basic_block_for_function (cfun));
4491 final_bbs = BITMAP_ALLOC (NULL);
4493 scan_function ();
4494 if (encountered_apply_args)
4496 if (dump_file)
4497 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4498 goto out;
4501 if (encountered_unchangable_recursive_call)
4503 if (dump_file)
4504 fprintf (dump_file, "Function calls itself with insufficient "
4505 "number of arguments.\n");
4506 goto out;
4509 adjustments = analyze_all_param_acesses ();
4510 if (!adjustments)
4511 goto out;
4512 if (dump_file)
4513 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4515 if (modify_function (node, adjustments))
4516 ret = TODO_update_ssa | TODO_cleanup_cfg;
4517 else
4518 ret = TODO_update_ssa;
4519 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4521 statistics_counter_event (cfun, "Unused parameters deleted",
4522 sra_stats.deleted_unused_parameters);
4523 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4524 sra_stats.scalar_by_ref_to_by_val);
4525 statistics_counter_event (cfun, "Aggregate parameters broken up",
4526 sra_stats.aggregate_params_reduced);
4527 statistics_counter_event (cfun, "Aggregate parameter components created",
4528 sra_stats.param_reductions_created);
4530 out:
4531 BITMAP_FREE (final_bbs);
4532 free (bb_dereferences);
4533 simple_out:
4534 sra_deinitialize ();
4535 return ret;
4538 /* Return if early ipa sra shall be performed. */
4539 static bool
4540 ipa_early_sra_gate (void)
4542 return flag_ipa_sra && dbg_cnt (eipa_sra);
4545 struct gimple_opt_pass pass_early_ipa_sra =
4548 GIMPLE_PASS,
4549 "eipa_sra", /* name */
4550 ipa_early_sra_gate, /* gate */
4551 ipa_early_sra, /* execute */
4552 NULL, /* sub */
4553 NULL, /* next */
4554 0, /* static_pass_number */
4555 TV_IPA_SRA, /* tv_id */
4556 0, /* properties_required */
4557 0, /* properties_provided */
4558 0, /* properties_destroyed */
4559 0, /* todo_flags_start */
4560 TODO_dump_func | TODO_dump_cgraph /* todo_flags_finish */