2012-08-01 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / tree-sra.c
blob45d9f02350241c1abe3c14fa79f04f1bccd31a00
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "tree-pass.h"
84 #include "ipa-prop.h"
85 #include "statistics.h"
86 #include "params.h"
87 #include "target.h"
88 #include "flags.h"
89 #include "dbgcnt.h"
90 #include "tree-inline.h"
91 #include "gimple-pretty-print.h"
92 #include "ipa-inline.h"
94 /* Enumeration of all aggregate reductions we can do. */
95 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
96 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
97 SRA_MODE_INTRA }; /* late intraprocedural SRA */
99 /* Global variable describing which aggregate reduction we are performing at
100 the moment. */
101 static enum sra_mode sra_mode;
103 struct assign_link;
105 /* ACCESS represents each access to an aggregate variable (as a whole or a
106 part). It can also represent a group of accesses that refer to exactly the
107 same fragment of an aggregate (i.e. those that have exactly the same offset
108 and size). Such representatives for a single aggregate, once determined,
109 are linked in a linked list and have the group fields set.
111 Moreover, when doing intraprocedural SRA, a tree is built from those
112 representatives (by the means of first_child and next_sibling pointers), in
113 which all items in a subtree are "within" the root, i.e. their offset is
114 greater or equal to offset of the root and offset+size is smaller or equal
115 to offset+size of the root. Children of an access are sorted by offset.
117 Note that accesses to parts of vector and complex number types always
118 represented by an access to the whole complex number or a vector. It is a
119 duty of the modifying functions to replace them appropriately. */
121 struct access
123 /* Values returned by `get_ref_base_and_extent' for each component reference
124 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
125 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
126 HOST_WIDE_INT offset;
127 HOST_WIDE_INT size;
128 tree base;
130 /* Expression. It is context dependent so do not use it to create new
131 expressions to access the original aggregate. See PR 42154 for a
132 testcase. */
133 tree expr;
134 /* Type. */
135 tree type;
137 /* The statement this access belongs to. */
138 gimple stmt;
140 /* Next group representative for this aggregate. */
141 struct access *next_grp;
143 /* Pointer to the group representative. Pointer to itself if the struct is
144 the representative. */
145 struct access *group_representative;
147 /* If this access has any children (in terms of the definition above), this
148 points to the first one. */
149 struct access *first_child;
151 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
152 described above. In IPA-SRA this is a pointer to the next access
153 belonging to the same group (having the same representative). */
154 struct access *next_sibling;
156 /* Pointers to the first and last element in the linked list of assign
157 links. */
158 struct assign_link *first_link, *last_link;
160 /* Pointer to the next access in the work queue. */
161 struct access *next_queued;
163 /* Replacement variable for this access "region." Never to be accessed
164 directly, always only by the means of get_access_replacement() and only
165 when grp_to_be_replaced flag is set. */
166 tree replacement_decl;
168 /* Is this particular access write access? */
169 unsigned write : 1;
171 /* Is this access an access to a non-addressable field? */
172 unsigned non_addressable : 1;
174 /* Is this access currently in the work queue? */
175 unsigned grp_queued : 1;
177 /* Does this group contain a write access? This flag is propagated down the
178 access tree. */
179 unsigned grp_write : 1;
181 /* Does this group contain a read access? This flag is propagated down the
182 access tree. */
183 unsigned grp_read : 1;
185 /* Does this group contain a read access that comes from an assignment
186 statement? This flag is propagated down the access tree. */
187 unsigned grp_assignment_read : 1;
189 /* Does this group contain a write access that comes from an assignment
190 statement? This flag is propagated down the access tree. */
191 unsigned grp_assignment_write : 1;
193 /* Does this group contain a read access through a scalar type? This flag is
194 not propagated in the access tree in any direction. */
195 unsigned grp_scalar_read : 1;
197 /* Does this group contain a write access through a scalar type? This flag
198 is not propagated in the access tree in any direction. */
199 unsigned grp_scalar_write : 1;
201 /* Is this access an artificial one created to scalarize some record
202 entirely? */
203 unsigned grp_total_scalarization : 1;
205 /* Other passes of the analysis use this bit to make function
206 analyze_access_subtree create scalar replacements for this group if
207 possible. */
208 unsigned grp_hint : 1;
210 /* Is the subtree rooted in this access fully covered by scalar
211 replacements? */
212 unsigned grp_covered : 1;
214 /* If set to true, this access and all below it in an access tree must not be
215 scalarized. */
216 unsigned grp_unscalarizable_region : 1;
218 /* Whether data have been written to parts of the aggregate covered by this
219 access which is not to be scalarized. This flag is propagated up in the
220 access tree. */
221 unsigned grp_unscalarized_data : 1;
223 /* Does this access and/or group contain a write access through a
224 BIT_FIELD_REF? */
225 unsigned grp_partial_lhs : 1;
227 /* Set when a scalar replacement should be created for this variable. We do
228 the decision and creation at different places because create_tmp_var
229 cannot be called from within FOR_EACH_REFERENCED_VAR. */
230 unsigned grp_to_be_replaced : 1;
232 /* Should TREE_NO_WARNING of a replacement be set? */
233 unsigned grp_no_warning : 1;
235 /* Is it possible that the group refers to data which might be (directly or
236 otherwise) modified? */
237 unsigned grp_maybe_modified : 1;
239 /* Set when this is a representative of a pointer to scalar (i.e. by
240 reference) parameter which we consider for turning into a plain scalar
241 (i.e. a by value parameter). */
242 unsigned grp_scalar_ptr : 1;
244 /* Set when we discover that this pointer is not safe to dereference in the
245 caller. */
246 unsigned grp_not_necessarilly_dereferenced : 1;
249 typedef struct access *access_p;
251 DEF_VEC_P (access_p);
252 DEF_VEC_ALLOC_P (access_p, heap);
254 /* Alloc pool for allocating access structures. */
255 static alloc_pool access_pool;
257 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
258 are used to propagate subaccesses from rhs to lhs as long as they don't
259 conflict with what is already there. */
260 struct assign_link
262 struct access *lacc, *racc;
263 struct assign_link *next;
266 /* Alloc pool for allocating assign link structures. */
267 static alloc_pool link_pool;
269 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
270 static struct pointer_map_t *base_access_vec;
272 /* Bitmap of candidates. */
273 static bitmap candidate_bitmap;
275 /* Bitmap of candidates which we should try to entirely scalarize away and
276 those which cannot be (because they are and need be used as a whole). */
277 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
279 /* Obstack for creation of fancy names. */
280 static struct obstack name_obstack;
282 /* Head of a linked list of accesses that need to have its subaccesses
283 propagated to their assignment counterparts. */
284 static struct access *work_queue_head;
286 /* Number of parameters of the analyzed function when doing early ipa SRA. */
287 static int func_param_count;
289 /* scan_function sets the following to true if it encounters a call to
290 __builtin_apply_args. */
291 static bool encountered_apply_args;
293 /* Set by scan_function when it finds a recursive call. */
294 static bool encountered_recursive_call;
296 /* Set by scan_function when it finds a recursive call with less actual
297 arguments than formal parameters.. */
298 static bool encountered_unchangable_recursive_call;
300 /* This is a table in which for each basic block and parameter there is a
301 distance (offset + size) in that parameter which is dereferenced and
302 accessed in that BB. */
303 static HOST_WIDE_INT *bb_dereferences;
304 /* Bitmap of BBs that can cause the function to "stop" progressing by
305 returning, throwing externally, looping infinitely or calling a function
306 which might abort etc.. */
307 static bitmap final_bbs;
309 /* Representative of no accesses at all. */
310 static struct access no_accesses_representant;
312 /* Predicate to test the special value. */
314 static inline bool
315 no_accesses_p (struct access *access)
317 return access == &no_accesses_representant;
320 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
321 representative fields are dumped, otherwise those which only describe the
322 individual access are. */
324 static struct
326 /* Number of processed aggregates is readily available in
327 analyze_all_variable_accesses and so is not stored here. */
329 /* Number of created scalar replacements. */
330 int replacements;
332 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
333 expression. */
334 int exprs;
336 /* Number of statements created by generate_subtree_copies. */
337 int subtree_copies;
339 /* Number of statements created by load_assign_lhs_subreplacements. */
340 int subreplacements;
342 /* Number of times sra_modify_assign has deleted a statement. */
343 int deleted;
345 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
346 RHS reparately due to type conversions or nonexistent matching
347 references. */
348 int separate_lhs_rhs_handling;
350 /* Number of parameters that were removed because they were unused. */
351 int deleted_unused_parameters;
353 /* Number of scalars passed as parameters by reference that have been
354 converted to be passed by value. */
355 int scalar_by_ref_to_by_val;
357 /* Number of aggregate parameters that were replaced by one or more of their
358 components. */
359 int aggregate_params_reduced;
361 /* Numbber of components created when splitting aggregate parameters. */
362 int param_reductions_created;
363 } sra_stats;
365 static void
366 dump_access (FILE *f, struct access *access, bool grp)
368 fprintf (f, "access { ");
369 fprintf (f, "base = (%d)'", DECL_UID (access->base));
370 print_generic_expr (f, access->base, 0);
371 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
372 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
373 fprintf (f, ", expr = ");
374 print_generic_expr (f, access->expr, 0);
375 fprintf (f, ", type = ");
376 print_generic_expr (f, access->type, 0);
377 if (grp)
378 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
379 "grp_assignment_write = %d, grp_scalar_read = %d, "
380 "grp_scalar_write = %d, grp_total_scalarization = %d, "
381 "grp_hint = %d, grp_covered = %d, "
382 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
383 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
384 "grp_maybe_modified = %d, "
385 "grp_not_necessarilly_dereferenced = %d\n",
386 access->grp_read, access->grp_write, access->grp_assignment_read,
387 access->grp_assignment_write, access->grp_scalar_read,
388 access->grp_scalar_write, access->grp_total_scalarization,
389 access->grp_hint, access->grp_covered,
390 access->grp_unscalarizable_region, access->grp_unscalarized_data,
391 access->grp_partial_lhs, access->grp_to_be_replaced,
392 access->grp_maybe_modified,
393 access->grp_not_necessarilly_dereferenced);
394 else
395 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
396 "grp_partial_lhs = %d\n",
397 access->write, access->grp_total_scalarization,
398 access->grp_partial_lhs);
401 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
403 static void
404 dump_access_tree_1 (FILE *f, struct access *access, int level)
408 int i;
410 for (i = 0; i < level; i++)
411 fputs ("* ", dump_file);
413 dump_access (f, access, true);
415 if (access->first_child)
416 dump_access_tree_1 (f, access->first_child, level + 1);
418 access = access->next_sibling;
420 while (access);
423 /* Dump all access trees for a variable, given the pointer to the first root in
424 ACCESS. */
426 static void
427 dump_access_tree (FILE *f, struct access *access)
429 for (; access; access = access->next_grp)
430 dump_access_tree_1 (f, access, 0);
433 /* Return true iff ACC is non-NULL and has subaccesses. */
435 static inline bool
436 access_has_children_p (struct access *acc)
438 return acc && acc->first_child;
441 /* Return true iff ACC is (partly) covered by at least one replacement. */
443 static bool
444 access_has_replacements_p (struct access *acc)
446 struct access *child;
447 if (acc->grp_to_be_replaced)
448 return true;
449 for (child = acc->first_child; child; child = child->next_sibling)
450 if (access_has_replacements_p (child))
451 return true;
452 return false;
455 /* Return a vector of pointers to accesses for the variable given in BASE or
456 NULL if there is none. */
458 static VEC (access_p, heap) *
459 get_base_access_vector (tree base)
461 void **slot;
463 slot = pointer_map_contains (base_access_vec, base);
464 if (!slot)
465 return NULL;
466 else
467 return *(VEC (access_p, heap) **) slot;
470 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
471 in ACCESS. Return NULL if it cannot be found. */
473 static struct access *
474 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
475 HOST_WIDE_INT size)
477 while (access && (access->offset != offset || access->size != size))
479 struct access *child = access->first_child;
481 while (child && (child->offset + child->size <= offset))
482 child = child->next_sibling;
483 access = child;
486 return access;
489 /* Return the first group representative for DECL or NULL if none exists. */
491 static struct access *
492 get_first_repr_for_decl (tree base)
494 VEC (access_p, heap) *access_vec;
496 access_vec = get_base_access_vector (base);
497 if (!access_vec)
498 return NULL;
500 return VEC_index (access_p, access_vec, 0);
503 /* Find an access representative for the variable BASE and given OFFSET and
504 SIZE. Requires that access trees have already been built. Return NULL if
505 it cannot be found. */
507 static struct access *
508 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
509 HOST_WIDE_INT size)
511 struct access *access;
513 access = get_first_repr_for_decl (base);
514 while (access && (access->offset + access->size <= offset))
515 access = access->next_grp;
516 if (!access)
517 return NULL;
519 return find_access_in_subtree (access, offset, size);
522 /* Add LINK to the linked list of assign links of RACC. */
523 static void
524 add_link_to_rhs (struct access *racc, struct assign_link *link)
526 gcc_assert (link->racc == racc);
528 if (!racc->first_link)
530 gcc_assert (!racc->last_link);
531 racc->first_link = link;
533 else
534 racc->last_link->next = link;
536 racc->last_link = link;
537 link->next = NULL;
540 /* Move all link structures in their linked list in OLD_RACC to the linked list
541 in NEW_RACC. */
542 static void
543 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
545 if (!old_racc->first_link)
547 gcc_assert (!old_racc->last_link);
548 return;
551 if (new_racc->first_link)
553 gcc_assert (!new_racc->last_link->next);
554 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
556 new_racc->last_link->next = old_racc->first_link;
557 new_racc->last_link = old_racc->last_link;
559 else
561 gcc_assert (!new_racc->last_link);
563 new_racc->first_link = old_racc->first_link;
564 new_racc->last_link = old_racc->last_link;
566 old_racc->first_link = old_racc->last_link = NULL;
569 /* Add ACCESS to the work queue (which is actually a stack). */
571 static void
572 add_access_to_work_queue (struct access *access)
574 if (!access->grp_queued)
576 gcc_assert (!access->next_queued);
577 access->next_queued = work_queue_head;
578 access->grp_queued = 1;
579 work_queue_head = access;
583 /* Pop an access from the work queue, and return it, assuming there is one. */
585 static struct access *
586 pop_access_from_work_queue (void)
588 struct access *access = work_queue_head;
590 work_queue_head = access->next_queued;
591 access->next_queued = NULL;
592 access->grp_queued = 0;
593 return access;
597 /* Allocate necessary structures. */
599 static void
600 sra_initialize (void)
602 candidate_bitmap = BITMAP_ALLOC (NULL);
603 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
604 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
605 gcc_obstack_init (&name_obstack);
606 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
607 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
608 base_access_vec = pointer_map_create ();
609 memset (&sra_stats, 0, sizeof (sra_stats));
610 encountered_apply_args = false;
611 encountered_recursive_call = false;
612 encountered_unchangable_recursive_call = false;
615 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
617 static bool
618 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
619 void *data ATTRIBUTE_UNUSED)
621 VEC (access_p, heap) *access_vec;
622 access_vec = (VEC (access_p, heap) *) *value;
623 VEC_free (access_p, heap, access_vec);
625 return true;
628 /* Deallocate all general structures. */
630 static void
631 sra_deinitialize (void)
633 BITMAP_FREE (candidate_bitmap);
634 BITMAP_FREE (should_scalarize_away_bitmap);
635 BITMAP_FREE (cannot_scalarize_away_bitmap);
636 free_alloc_pool (access_pool);
637 free_alloc_pool (link_pool);
638 obstack_free (&name_obstack, NULL);
640 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
641 pointer_map_destroy (base_access_vec);
644 /* Remove DECL from candidates for SRA and write REASON to the dump file if
645 there is one. */
646 static void
647 disqualify_candidate (tree decl, const char *reason)
649 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
651 if (dump_file && (dump_flags & TDF_DETAILS))
653 fprintf (dump_file, "! Disqualifying ");
654 print_generic_expr (dump_file, decl, 0);
655 fprintf (dump_file, " - %s\n", reason);
659 /* Return true iff the type contains a field or an element which does not allow
660 scalarization. */
662 static bool
663 type_internals_preclude_sra_p (tree type, const char **msg)
665 tree fld;
666 tree et;
668 switch (TREE_CODE (type))
670 case RECORD_TYPE:
671 case UNION_TYPE:
672 case QUAL_UNION_TYPE:
673 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
674 if (TREE_CODE (fld) == FIELD_DECL)
676 tree ft = TREE_TYPE (fld);
678 if (TREE_THIS_VOLATILE (fld))
680 *msg = "volatile structure field";
681 return true;
683 if (!DECL_FIELD_OFFSET (fld))
685 *msg = "no structure field offset";
686 return true;
688 if (!DECL_SIZE (fld))
690 *msg = "zero structure field size";
691 return true;
693 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
695 *msg = "structure field offset not fixed";
696 return true;
698 if (!host_integerp (DECL_SIZE (fld), 1))
700 *msg = "structure field size not fixed";
701 return true;
703 if (AGGREGATE_TYPE_P (ft)
704 && int_bit_position (fld) % BITS_PER_UNIT != 0)
706 *msg = "structure field is bit field";
707 return true;
710 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
711 return true;
714 return false;
716 case ARRAY_TYPE:
717 et = TREE_TYPE (type);
719 if (TYPE_VOLATILE (et))
721 *msg = "element type is volatile";
722 return true;
725 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
726 return true;
728 return false;
730 default:
731 return false;
735 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
736 base variable if it is. Return T if it is not an SSA_NAME. */
738 static tree
739 get_ssa_base_param (tree t)
741 if (TREE_CODE (t) == SSA_NAME)
743 if (SSA_NAME_IS_DEFAULT_DEF (t))
744 return SSA_NAME_VAR (t);
745 else
746 return NULL_TREE;
748 return t;
751 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
752 belongs to, unless the BB has already been marked as a potentially
753 final. */
755 static void
756 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
758 basic_block bb = gimple_bb (stmt);
759 int idx, parm_index = 0;
760 tree parm;
762 if (bitmap_bit_p (final_bbs, bb->index))
763 return;
765 for (parm = DECL_ARGUMENTS (current_function_decl);
766 parm && parm != base;
767 parm = DECL_CHAIN (parm))
768 parm_index++;
770 gcc_assert (parm_index < func_param_count);
772 idx = bb->index * func_param_count + parm_index;
773 if (bb_dereferences[idx] < dist)
774 bb_dereferences[idx] = dist;
777 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
778 the three fields. Also add it to the vector of accesses corresponding to
779 the base. Finally, return the new access. */
781 static struct access *
782 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
784 VEC (access_p, heap) *vec;
785 struct access *access;
786 void **slot;
788 access = (struct access *) pool_alloc (access_pool);
789 memset (access, 0, sizeof (struct access));
790 access->base = base;
791 access->offset = offset;
792 access->size = size;
794 slot = pointer_map_contains (base_access_vec, base);
795 if (slot)
796 vec = (VEC (access_p, heap) *) *slot;
797 else
798 vec = VEC_alloc (access_p, heap, 32);
800 VEC_safe_push (access_p, heap, vec, access);
802 *((struct VEC (access_p,heap) **)
803 pointer_map_insert (base_access_vec, base)) = vec;
805 return access;
808 /* Create and insert access for EXPR. Return created access, or NULL if it is
809 not possible. */
811 static struct access *
812 create_access (tree expr, gimple stmt, bool write)
814 struct access *access;
815 HOST_WIDE_INT offset, size, max_size;
816 tree base = expr;
817 bool ptr, unscalarizable_region = false;
819 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
821 if (sra_mode == SRA_MODE_EARLY_IPA
822 && TREE_CODE (base) == MEM_REF)
824 base = get_ssa_base_param (TREE_OPERAND (base, 0));
825 if (!base)
826 return NULL;
827 ptr = true;
829 else
830 ptr = false;
832 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
833 return NULL;
835 if (sra_mode == SRA_MODE_EARLY_IPA)
837 if (size < 0 || size != max_size)
839 disqualify_candidate (base, "Encountered a variable sized access.");
840 return NULL;
842 if (TREE_CODE (expr) == COMPONENT_REF
843 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
845 disqualify_candidate (base, "Encountered a bit-field access.");
846 return NULL;
848 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
850 if (ptr)
851 mark_parm_dereference (base, offset + size, stmt);
853 else
855 if (size != max_size)
857 size = max_size;
858 unscalarizable_region = true;
860 if (size < 0)
862 disqualify_candidate (base, "Encountered an unconstrained access.");
863 return NULL;
867 access = create_access_1 (base, offset, size);
868 access->expr = expr;
869 access->type = TREE_TYPE (expr);
870 access->write = write;
871 access->grp_unscalarizable_region = unscalarizable_region;
872 access->stmt = stmt;
874 if (TREE_CODE (expr) == COMPONENT_REF
875 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
876 access->non_addressable = 1;
878 return access;
882 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
883 register types or (recursively) records with only these two kinds of fields.
884 It also returns false if any of these records contains a bit-field. */
886 static bool
887 type_consists_of_records_p (tree type)
889 tree fld;
891 if (TREE_CODE (type) != RECORD_TYPE)
892 return false;
894 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
895 if (TREE_CODE (fld) == FIELD_DECL)
897 tree ft = TREE_TYPE (fld);
899 if (DECL_BIT_FIELD (fld))
900 return false;
902 if (!is_gimple_reg_type (ft)
903 && !type_consists_of_records_p (ft))
904 return false;
907 return true;
910 /* Create total_scalarization accesses for all scalar type fields in DECL that
911 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
912 must be the top-most VAR_DECL representing the variable, OFFSET must be the
913 offset of DECL within BASE. REF must be the memory reference expression for
914 the given decl. */
916 static void
917 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
918 tree ref)
920 tree fld, decl_type = TREE_TYPE (decl);
922 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
923 if (TREE_CODE (fld) == FIELD_DECL)
925 HOST_WIDE_INT pos = offset + int_bit_position (fld);
926 tree ft = TREE_TYPE (fld);
927 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
928 NULL_TREE);
930 if (is_gimple_reg_type (ft))
932 struct access *access;
933 HOST_WIDE_INT size;
935 size = tree_low_cst (DECL_SIZE (fld), 1);
936 access = create_access_1 (base, pos, size);
937 access->expr = nref;
938 access->type = ft;
939 access->grp_total_scalarization = 1;
940 /* Accesses for intraprocedural SRA can have their stmt NULL. */
942 else
943 completely_scalarize_record (base, fld, pos, nref);
947 /* Create total_scalarization accesses for all scalar type fields in VAR and
948 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
949 type_consists_of_records_p. */
951 static void
952 completely_scalarize_var (tree var)
954 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
955 struct access *access;
957 access = create_access_1 (var, 0, size);
958 access->expr = var;
959 access->type = TREE_TYPE (var);
960 access->grp_total_scalarization = 1;
962 completely_scalarize_record (var, var, 0, var);
965 /* Search the given tree for a declaration by skipping handled components and
966 exclude it from the candidates. */
968 static void
969 disqualify_base_of_expr (tree t, const char *reason)
971 t = get_base_address (t);
972 if (sra_mode == SRA_MODE_EARLY_IPA
973 && TREE_CODE (t) == MEM_REF)
974 t = get_ssa_base_param (TREE_OPERAND (t, 0));
976 if (t && DECL_P (t))
977 disqualify_candidate (t, reason);
980 /* Scan expression EXPR and create access structures for all accesses to
981 candidates for scalarization. Return the created access or NULL if none is
982 created. */
984 static struct access *
985 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
987 struct access *ret = NULL;
988 bool partial_ref;
990 if (TREE_CODE (expr) == BIT_FIELD_REF
991 || TREE_CODE (expr) == IMAGPART_EXPR
992 || TREE_CODE (expr) == REALPART_EXPR)
994 expr = TREE_OPERAND (expr, 0);
995 partial_ref = true;
997 else
998 partial_ref = false;
1000 /* We need to dive through V_C_Es in order to get the size of its parameter
1001 and not the result type. Ada produces such statements. We are also
1002 capable of handling the topmost V_C_E but not any of those buried in other
1003 handled components. */
1004 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1005 expr = TREE_OPERAND (expr, 0);
1007 if (contains_view_convert_expr_p (expr))
1009 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1010 "component.");
1011 return NULL;
1014 switch (TREE_CODE (expr))
1016 case MEM_REF:
1017 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1018 && sra_mode != SRA_MODE_EARLY_IPA)
1019 return NULL;
1020 /* fall through */
1021 case VAR_DECL:
1022 case PARM_DECL:
1023 case RESULT_DECL:
1024 case COMPONENT_REF:
1025 case ARRAY_REF:
1026 case ARRAY_RANGE_REF:
1027 ret = create_access (expr, stmt, write);
1028 break;
1030 default:
1031 break;
1034 if (write && partial_ref && ret)
1035 ret->grp_partial_lhs = 1;
1037 return ret;
1040 /* Scan expression EXPR and create access structures for all accesses to
1041 candidates for scalarization. Return true if any access has been inserted.
1042 STMT must be the statement from which the expression is taken, WRITE must be
1043 true if the expression is a store and false otherwise. */
1045 static bool
1046 build_access_from_expr (tree expr, gimple stmt, bool write)
1048 struct access *access;
1050 access = build_access_from_expr_1 (expr, stmt, write);
1051 if (access)
1053 /* This means the aggregate is accesses as a whole in a way other than an
1054 assign statement and thus cannot be removed even if we had a scalar
1055 replacement for everything. */
1056 if (cannot_scalarize_away_bitmap)
1057 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1058 return true;
1060 return false;
1063 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1064 modes in which it matters, return true iff they have been disqualified. RHS
1065 may be NULL, in that case ignore it. If we scalarize an aggregate in
1066 intra-SRA we may need to add statements after each statement. This is not
1067 possible if a statement unconditionally has to end the basic block. */
1068 static bool
1069 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1071 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1072 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1074 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1075 if (rhs)
1076 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1077 return true;
1079 return false;
1082 /* Scan expressions occurring in STMT, create access structures for all accesses
1083 to candidates for scalarization and remove those candidates which occur in
1084 statements or expressions that prevent them from being split apart. Return
1085 true if any access has been inserted. */
1087 static bool
1088 build_accesses_from_assign (gimple stmt)
1090 tree lhs, rhs;
1091 struct access *lacc, *racc;
1093 if (!gimple_assign_single_p (stmt)
1094 /* Scope clobbers don't influence scalarization. */
1095 || gimple_clobber_p (stmt))
1096 return false;
1098 lhs = gimple_assign_lhs (stmt);
1099 rhs = gimple_assign_rhs1 (stmt);
1101 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1102 return false;
1104 racc = build_access_from_expr_1 (rhs, stmt, false);
1105 lacc = build_access_from_expr_1 (lhs, stmt, true);
1107 if (lacc)
1108 lacc->grp_assignment_write = 1;
1110 if (racc)
1112 racc->grp_assignment_read = 1;
1113 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1114 && !is_gimple_reg_type (racc->type))
1115 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1118 if (lacc && racc
1119 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1120 && !lacc->grp_unscalarizable_region
1121 && !racc->grp_unscalarizable_region
1122 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1123 && lacc->size == racc->size
1124 && useless_type_conversion_p (lacc->type, racc->type))
1126 struct assign_link *link;
1128 link = (struct assign_link *) pool_alloc (link_pool);
1129 memset (link, 0, sizeof (struct assign_link));
1131 link->lacc = lacc;
1132 link->racc = racc;
1134 add_link_to_rhs (racc, link);
1137 return lacc || racc;
1140 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1141 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1143 static bool
1144 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1145 void *data ATTRIBUTE_UNUSED)
1147 op = get_base_address (op);
1148 if (op
1149 && DECL_P (op))
1150 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1152 return false;
1155 /* Return true iff callsite CALL has at least as many actual arguments as there
1156 are formal parameters of the function currently processed by IPA-SRA. */
1158 static inline bool
1159 callsite_has_enough_arguments_p (gimple call)
1161 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1164 /* Scan function and look for interesting expressions and create access
1165 structures for them. Return true iff any access is created. */
1167 static bool
1168 scan_function (void)
1170 basic_block bb;
1171 bool ret = false;
1173 FOR_EACH_BB (bb)
1175 gimple_stmt_iterator gsi;
1176 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1178 gimple stmt = gsi_stmt (gsi);
1179 tree t;
1180 unsigned i;
1182 if (final_bbs && stmt_can_throw_external (stmt))
1183 bitmap_set_bit (final_bbs, bb->index);
1184 switch (gimple_code (stmt))
1186 case GIMPLE_RETURN:
1187 t = gimple_return_retval (stmt);
1188 if (t != NULL_TREE)
1189 ret |= build_access_from_expr (t, stmt, false);
1190 if (final_bbs)
1191 bitmap_set_bit (final_bbs, bb->index);
1192 break;
1194 case GIMPLE_ASSIGN:
1195 ret |= build_accesses_from_assign (stmt);
1196 break;
1198 case GIMPLE_CALL:
1199 for (i = 0; i < gimple_call_num_args (stmt); i++)
1200 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1201 stmt, false);
1203 if (sra_mode == SRA_MODE_EARLY_IPA)
1205 tree dest = gimple_call_fndecl (stmt);
1206 int flags = gimple_call_flags (stmt);
1208 if (dest)
1210 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1211 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1212 encountered_apply_args = true;
1213 if (cgraph_get_node (dest)
1214 == cgraph_get_node (current_function_decl))
1216 encountered_recursive_call = true;
1217 if (!callsite_has_enough_arguments_p (stmt))
1218 encountered_unchangable_recursive_call = true;
1222 if (final_bbs
1223 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1224 bitmap_set_bit (final_bbs, bb->index);
1227 t = gimple_call_lhs (stmt);
1228 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1229 ret |= build_access_from_expr (t, stmt, true);
1230 break;
1232 case GIMPLE_ASM:
1233 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1234 asm_visit_addr);
1235 if (final_bbs)
1236 bitmap_set_bit (final_bbs, bb->index);
1238 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1240 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1241 ret |= build_access_from_expr (t, stmt, false);
1243 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1245 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1246 ret |= build_access_from_expr (t, stmt, true);
1248 break;
1250 default:
1251 break;
1256 return ret;
1259 /* Helper of QSORT function. There are pointers to accesses in the array. An
1260 access is considered smaller than another if it has smaller offset or if the
1261 offsets are the same but is size is bigger. */
1263 static int
1264 compare_access_positions (const void *a, const void *b)
1266 const access_p *fp1 = (const access_p *) a;
1267 const access_p *fp2 = (const access_p *) b;
1268 const access_p f1 = *fp1;
1269 const access_p f2 = *fp2;
1271 if (f1->offset != f2->offset)
1272 return f1->offset < f2->offset ? -1 : 1;
1274 if (f1->size == f2->size)
1276 if (f1->type == f2->type)
1277 return 0;
1278 /* Put any non-aggregate type before any aggregate type. */
1279 else if (!is_gimple_reg_type (f1->type)
1280 && is_gimple_reg_type (f2->type))
1281 return 1;
1282 else if (is_gimple_reg_type (f1->type)
1283 && !is_gimple_reg_type (f2->type))
1284 return -1;
1285 /* Put any complex or vector type before any other scalar type. */
1286 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1287 && TREE_CODE (f1->type) != VECTOR_TYPE
1288 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1289 || TREE_CODE (f2->type) == VECTOR_TYPE))
1290 return 1;
1291 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1292 || TREE_CODE (f1->type) == VECTOR_TYPE)
1293 && TREE_CODE (f2->type) != COMPLEX_TYPE
1294 && TREE_CODE (f2->type) != VECTOR_TYPE)
1295 return -1;
1296 /* Put the integral type with the bigger precision first. */
1297 else if (INTEGRAL_TYPE_P (f1->type)
1298 && INTEGRAL_TYPE_P (f2->type))
1299 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1300 /* Put any integral type with non-full precision last. */
1301 else if (INTEGRAL_TYPE_P (f1->type)
1302 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1303 != TYPE_PRECISION (f1->type)))
1304 return 1;
1305 else if (INTEGRAL_TYPE_P (f2->type)
1306 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1307 != TYPE_PRECISION (f2->type)))
1308 return -1;
1309 /* Stabilize the sort. */
1310 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1313 /* We want the bigger accesses first, thus the opposite operator in the next
1314 line: */
1315 return f1->size > f2->size ? -1 : 1;
1319 /* Append a name of the declaration to the name obstack. A helper function for
1320 make_fancy_name. */
1322 static void
1323 make_fancy_decl_name (tree decl)
1325 char buffer[32];
1327 tree name = DECL_NAME (decl);
1328 if (name)
1329 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1330 IDENTIFIER_LENGTH (name));
1331 else
1333 sprintf (buffer, "D%u", DECL_UID (decl));
1334 obstack_grow (&name_obstack, buffer, strlen (buffer));
1338 /* Helper for make_fancy_name. */
1340 static void
1341 make_fancy_name_1 (tree expr)
1343 char buffer[32];
1344 tree index;
1346 if (DECL_P (expr))
1348 make_fancy_decl_name (expr);
1349 return;
1352 switch (TREE_CODE (expr))
1354 case COMPONENT_REF:
1355 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1356 obstack_1grow (&name_obstack, '$');
1357 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1358 break;
1360 case ARRAY_REF:
1361 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1362 obstack_1grow (&name_obstack, '$');
1363 /* Arrays with only one element may not have a constant as their
1364 index. */
1365 index = TREE_OPERAND (expr, 1);
1366 if (TREE_CODE (index) != INTEGER_CST)
1367 break;
1368 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1369 obstack_grow (&name_obstack, buffer, strlen (buffer));
1370 break;
1372 case ADDR_EXPR:
1373 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1374 break;
1376 case MEM_REF:
1377 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1378 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1380 obstack_1grow (&name_obstack, '$');
1381 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1382 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1383 obstack_grow (&name_obstack, buffer, strlen (buffer));
1385 break;
1387 case BIT_FIELD_REF:
1388 case REALPART_EXPR:
1389 case IMAGPART_EXPR:
1390 gcc_unreachable (); /* we treat these as scalars. */
1391 break;
1392 default:
1393 break;
1397 /* Create a human readable name for replacement variable of ACCESS. */
1399 static char *
1400 make_fancy_name (tree expr)
1402 make_fancy_name_1 (expr);
1403 obstack_1grow (&name_obstack, '\0');
1404 return XOBFINISH (&name_obstack, char *);
1407 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1408 EXP_TYPE at the given OFFSET. If BASE is something for which
1409 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1410 to insert new statements either before or below the current one as specified
1411 by INSERT_AFTER. This function is not capable of handling bitfields. */
1413 tree
1414 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1415 tree exp_type, gimple_stmt_iterator *gsi,
1416 bool insert_after)
1418 tree prev_base = base;
1419 tree off;
1420 HOST_WIDE_INT base_offset;
1421 unsigned HOST_WIDE_INT misalign;
1422 unsigned int align;
1424 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1426 base = get_addr_base_and_unit_offset (base, &base_offset);
1428 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1429 offset such as array[var_index]. */
1430 if (!base)
1432 gimple stmt;
1433 tree tmp, addr;
1435 gcc_checking_assert (gsi);
1436 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1437 add_referenced_var (tmp);
1438 tmp = make_ssa_name (tmp, NULL);
1439 addr = build_fold_addr_expr (unshare_expr (prev_base));
1440 STRIP_USELESS_TYPE_CONVERSION (addr);
1441 stmt = gimple_build_assign (tmp, addr);
1442 gimple_set_location (stmt, loc);
1443 SSA_NAME_DEF_STMT (tmp) = stmt;
1444 if (insert_after)
1445 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1446 else
1447 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1448 update_stmt (stmt);
1450 off = build_int_cst (reference_alias_ptr_type (prev_base),
1451 offset / BITS_PER_UNIT);
1452 base = tmp;
1454 else if (TREE_CODE (base) == MEM_REF)
1456 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1457 base_offset + offset / BITS_PER_UNIT);
1458 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1459 base = unshare_expr (TREE_OPERAND (base, 0));
1461 else
1463 off = build_int_cst (reference_alias_ptr_type (base),
1464 base_offset + offset / BITS_PER_UNIT);
1465 base = build_fold_addr_expr (unshare_expr (base));
1468 /* If prev_base were always an originally performed access
1469 we can extract more optimistic alignment information
1470 by looking at the access mode. That would constrain the
1471 alignment of base + base_offset which we would need to
1472 adjust according to offset. */
1473 if (!get_pointer_alignment_1 (base, &align, &misalign))
1475 gcc_assert (misalign == 0);
1476 if (TREE_CODE (prev_base) == MEM_REF
1477 || TREE_CODE (prev_base) == TARGET_MEM_REF)
1478 align = TYPE_ALIGN (TREE_TYPE (prev_base));
1480 misalign += (double_int_sext (tree_to_double_int (off),
1481 TYPE_PRECISION (TREE_TYPE (off))).low
1482 * BITS_PER_UNIT);
1483 misalign = misalign & (align - 1);
1484 if (misalign != 0)
1485 align = (misalign & -misalign);
1486 if (align < TYPE_ALIGN (exp_type))
1487 exp_type = build_aligned_type (exp_type, align);
1489 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1492 /* Construct a memory reference to a part of an aggregate BASE at the given
1493 OFFSET and of the same type as MODEL. In case this is a reference to a
1494 bit-field, the function will replicate the last component_ref of model's
1495 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1496 build_ref_for_offset. */
1498 static tree
1499 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1500 struct access *model, gimple_stmt_iterator *gsi,
1501 bool insert_after)
1503 if (TREE_CODE (model->expr) == COMPONENT_REF
1504 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1506 /* This access represents a bit-field. */
1507 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1509 offset -= int_bit_position (fld);
1510 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1511 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1512 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1513 NULL_TREE);
1515 else
1516 return build_ref_for_offset (loc, base, offset, model->type,
1517 gsi, insert_after);
1520 /* Construct a memory reference consisting of component_refs and array_refs to
1521 a part of an aggregate *RES (which is of type TYPE). The requested part
1522 should have type EXP_TYPE at be the given OFFSET. This function might not
1523 succeed, it returns true when it does and only then *RES points to something
1524 meaningful. This function should be used only to build expressions that we
1525 might need to present to user (e.g. in warnings). In all other situations,
1526 build_ref_for_model or build_ref_for_offset should be used instead. */
1528 static bool
1529 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1530 tree exp_type)
1532 while (1)
1534 tree fld;
1535 tree tr_size, index, minidx;
1536 HOST_WIDE_INT el_size;
1538 if (offset == 0 && exp_type
1539 && types_compatible_p (exp_type, type))
1540 return true;
1542 switch (TREE_CODE (type))
1544 case UNION_TYPE:
1545 case QUAL_UNION_TYPE:
1546 case RECORD_TYPE:
1547 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1549 HOST_WIDE_INT pos, size;
1550 tree tr_pos, expr, *expr_ptr;
1552 if (TREE_CODE (fld) != FIELD_DECL)
1553 continue;
1555 tr_pos = bit_position (fld);
1556 if (!tr_pos || !host_integerp (tr_pos, 1))
1557 continue;
1558 pos = TREE_INT_CST_LOW (tr_pos);
1559 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1560 tr_size = DECL_SIZE (fld);
1561 if (!tr_size || !host_integerp (tr_size, 1))
1562 continue;
1563 size = TREE_INT_CST_LOW (tr_size);
1564 if (size == 0)
1566 if (pos != offset)
1567 continue;
1569 else if (pos > offset || (pos + size) <= offset)
1570 continue;
1572 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1573 NULL_TREE);
1574 expr_ptr = &expr;
1575 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1576 offset - pos, exp_type))
1578 *res = expr;
1579 return true;
1582 return false;
1584 case ARRAY_TYPE:
1585 tr_size = TYPE_SIZE (TREE_TYPE (type));
1586 if (!tr_size || !host_integerp (tr_size, 1))
1587 return false;
1588 el_size = tree_low_cst (tr_size, 1);
1590 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1591 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1592 return false;
1593 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1594 if (!integer_zerop (minidx))
1595 index = int_const_binop (PLUS_EXPR, index, minidx);
1596 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1597 NULL_TREE, NULL_TREE);
1598 offset = offset % el_size;
1599 type = TREE_TYPE (type);
1600 break;
1602 default:
1603 if (offset != 0)
1604 return false;
1606 if (exp_type)
1607 return false;
1608 else
1609 return true;
1614 /* Return true iff TYPE is stdarg va_list type. */
1616 static inline bool
1617 is_va_list_type (tree type)
1619 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1622 /* Print message to dump file why a variable was rejected. */
1624 static void
1625 reject (tree var, const char *msg)
1627 if (dump_file && (dump_flags & TDF_DETAILS))
1629 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1630 print_generic_expr (dump_file, var, 0);
1631 fprintf (dump_file, "\n");
1635 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1636 those with type which is suitable for scalarization. */
1638 static bool
1639 find_var_candidates (void)
1641 tree var, type;
1642 referenced_var_iterator rvi;
1643 bool ret = false;
1644 const char *msg;
1646 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1648 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1649 continue;
1650 type = TREE_TYPE (var);
1652 if (!AGGREGATE_TYPE_P (type))
1654 reject (var, "not aggregate");
1655 continue;
1657 if (needs_to_live_in_memory (var))
1659 reject (var, "needs to live in memory");
1660 continue;
1662 if (TREE_THIS_VOLATILE (var))
1664 reject (var, "is volatile");
1665 continue;
1667 if (!COMPLETE_TYPE_P (type))
1669 reject (var, "has incomplete type");
1670 continue;
1672 if (!host_integerp (TYPE_SIZE (type), 1))
1674 reject (var, "type size not fixed");
1675 continue;
1677 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1679 reject (var, "type size is zero");
1680 continue;
1682 if (type_internals_preclude_sra_p (type, &msg))
1684 reject (var, msg);
1685 continue;
1687 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1688 we also want to schedule it rather late. Thus we ignore it in
1689 the early pass. */
1690 (sra_mode == SRA_MODE_EARLY_INTRA
1691 && is_va_list_type (type)))
1693 reject (var, "is va_list");
1694 continue;
1697 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1699 if (dump_file && (dump_flags & TDF_DETAILS))
1701 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1702 print_generic_expr (dump_file, var, 0);
1703 fprintf (dump_file, "\n");
1705 ret = true;
1708 return ret;
1711 /* Sort all accesses for the given variable, check for partial overlaps and
1712 return NULL if there are any. If there are none, pick a representative for
1713 each combination of offset and size and create a linked list out of them.
1714 Return the pointer to the first representative and make sure it is the first
1715 one in the vector of accesses. */
1717 static struct access *
1718 sort_and_splice_var_accesses (tree var)
1720 int i, j, access_count;
1721 struct access *res, **prev_acc_ptr = &res;
1722 VEC (access_p, heap) *access_vec;
1723 bool first = true;
1724 HOST_WIDE_INT low = -1, high = 0;
1726 access_vec = get_base_access_vector (var);
1727 if (!access_vec)
1728 return NULL;
1729 access_count = VEC_length (access_p, access_vec);
1731 /* Sort by <OFFSET, SIZE>. */
1732 VEC_qsort (access_p, access_vec, compare_access_positions);
1734 i = 0;
1735 while (i < access_count)
1737 struct access *access = VEC_index (access_p, access_vec, i);
1738 bool grp_write = access->write;
1739 bool grp_read = !access->write;
1740 bool grp_scalar_write = access->write
1741 && is_gimple_reg_type (access->type);
1742 bool grp_scalar_read = !access->write
1743 && is_gimple_reg_type (access->type);
1744 bool grp_assignment_read = access->grp_assignment_read;
1745 bool grp_assignment_write = access->grp_assignment_write;
1746 bool multiple_scalar_reads = false;
1747 bool total_scalarization = access->grp_total_scalarization;
1748 bool grp_partial_lhs = access->grp_partial_lhs;
1749 bool first_scalar = is_gimple_reg_type (access->type);
1750 bool unscalarizable_region = access->grp_unscalarizable_region;
1752 if (first || access->offset >= high)
1754 first = false;
1755 low = access->offset;
1756 high = access->offset + access->size;
1758 else if (access->offset > low && access->offset + access->size > high)
1759 return NULL;
1760 else
1761 gcc_assert (access->offset >= low
1762 && access->offset + access->size <= high);
1764 j = i + 1;
1765 while (j < access_count)
1767 struct access *ac2 = VEC_index (access_p, access_vec, j);
1768 if (ac2->offset != access->offset || ac2->size != access->size)
1769 break;
1770 if (ac2->write)
1772 grp_write = true;
1773 grp_scalar_write = (grp_scalar_write
1774 || is_gimple_reg_type (ac2->type));
1776 else
1778 grp_read = true;
1779 if (is_gimple_reg_type (ac2->type))
1781 if (grp_scalar_read)
1782 multiple_scalar_reads = true;
1783 else
1784 grp_scalar_read = true;
1787 grp_assignment_read |= ac2->grp_assignment_read;
1788 grp_assignment_write |= ac2->grp_assignment_write;
1789 grp_partial_lhs |= ac2->grp_partial_lhs;
1790 unscalarizable_region |= ac2->grp_unscalarizable_region;
1791 total_scalarization |= ac2->grp_total_scalarization;
1792 relink_to_new_repr (access, ac2);
1794 /* If there are both aggregate-type and scalar-type accesses with
1795 this combination of size and offset, the comparison function
1796 should have put the scalars first. */
1797 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1798 ac2->group_representative = access;
1799 j++;
1802 i = j;
1804 access->group_representative = access;
1805 access->grp_write = grp_write;
1806 access->grp_read = grp_read;
1807 access->grp_scalar_read = grp_scalar_read;
1808 access->grp_scalar_write = grp_scalar_write;
1809 access->grp_assignment_read = grp_assignment_read;
1810 access->grp_assignment_write = grp_assignment_write;
1811 access->grp_hint = multiple_scalar_reads || total_scalarization;
1812 access->grp_total_scalarization = total_scalarization;
1813 access->grp_partial_lhs = grp_partial_lhs;
1814 access->grp_unscalarizable_region = unscalarizable_region;
1815 if (access->first_link)
1816 add_access_to_work_queue (access);
1818 *prev_acc_ptr = access;
1819 prev_acc_ptr = &access->next_grp;
1822 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1823 return res;
1826 /* Create a variable for the given ACCESS which determines the type, name and a
1827 few other properties. Return the variable declaration and store it also to
1828 ACCESS->replacement. */
1830 static tree
1831 create_access_replacement (struct access *access)
1833 tree repl;
1835 repl = create_tmp_var (access->type, "SR");
1836 add_referenced_var (repl);
1838 if (TREE_CODE (access->type) == COMPLEX_TYPE
1839 || TREE_CODE (access->type) == VECTOR_TYPE)
1841 if (!access->grp_partial_lhs)
1842 DECL_GIMPLE_REG_P (repl) = 1;
1844 else if (access->grp_partial_lhs
1845 && is_gimple_reg_type (access->type))
1846 TREE_ADDRESSABLE (repl) = 1;
1848 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1849 DECL_ARTIFICIAL (repl) = 1;
1850 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1852 if (DECL_NAME (access->base)
1853 && !DECL_IGNORED_P (access->base)
1854 && !DECL_ARTIFICIAL (access->base))
1856 char *pretty_name = make_fancy_name (access->expr);
1857 tree debug_expr = unshare_expr (access->expr), d;
1859 DECL_NAME (repl) = get_identifier (pretty_name);
1860 obstack_free (&name_obstack, pretty_name);
1862 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1863 as DECL_DEBUG_EXPR isn't considered when looking for still
1864 used SSA_NAMEs and thus they could be freed. All debug info
1865 generation cares is whether something is constant or variable
1866 and that get_ref_base_and_extent works properly on the
1867 expression. */
1868 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1869 switch (TREE_CODE (d))
1871 case ARRAY_REF:
1872 case ARRAY_RANGE_REF:
1873 if (TREE_OPERAND (d, 1)
1874 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1875 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1876 if (TREE_OPERAND (d, 3)
1877 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1878 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1879 /* FALLTHRU */
1880 case COMPONENT_REF:
1881 if (TREE_OPERAND (d, 2)
1882 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1883 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1884 break;
1885 default:
1886 break;
1888 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1889 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1890 if (access->grp_no_warning)
1891 TREE_NO_WARNING (repl) = 1;
1892 else
1893 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1895 else
1896 TREE_NO_WARNING (repl) = 1;
1898 if (dump_file)
1900 fprintf (dump_file, "Created a replacement for ");
1901 print_generic_expr (dump_file, access->base, 0);
1902 fprintf (dump_file, " offset: %u, size: %u: ",
1903 (unsigned) access->offset, (unsigned) access->size);
1904 print_generic_expr (dump_file, repl, 0);
1905 fprintf (dump_file, "\n");
1907 sra_stats.replacements++;
1909 return repl;
1912 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1914 static inline tree
1915 get_access_replacement (struct access *access)
1917 if (!access->replacement_decl)
1918 access->replacement_decl = create_access_replacement (access);
1919 return access->replacement_decl;
1923 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1924 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1925 to it is not "within" the root. Return false iff some accesses partially
1926 overlap. */
1928 static bool
1929 build_access_subtree (struct access **access)
1931 struct access *root = *access, *last_child = NULL;
1932 HOST_WIDE_INT limit = root->offset + root->size;
1934 *access = (*access)->next_grp;
1935 while (*access && (*access)->offset + (*access)->size <= limit)
1937 if (!last_child)
1938 root->first_child = *access;
1939 else
1940 last_child->next_sibling = *access;
1941 last_child = *access;
1943 if (!build_access_subtree (access))
1944 return false;
1947 if (*access && (*access)->offset < limit)
1948 return false;
1950 return true;
1953 /* Build a tree of access representatives, ACCESS is the pointer to the first
1954 one, others are linked in a list by the next_grp field. Return false iff
1955 some accesses partially overlap. */
1957 static bool
1958 build_access_trees (struct access *access)
1960 while (access)
1962 struct access *root = access;
1964 if (!build_access_subtree (&access))
1965 return false;
1966 root->next_grp = access;
1968 return true;
1971 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1972 array. */
1974 static bool
1975 expr_with_var_bounded_array_refs_p (tree expr)
1977 while (handled_component_p (expr))
1979 if (TREE_CODE (expr) == ARRAY_REF
1980 && !host_integerp (array_ref_low_bound (expr), 0))
1981 return true;
1982 expr = TREE_OPERAND (expr, 0);
1984 return false;
1987 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1988 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1989 sorts of access flags appropriately along the way, notably always set
1990 grp_read and grp_assign_read according to MARK_READ and grp_write when
1991 MARK_WRITE is true.
1993 Creating a replacement for a scalar access is considered beneficial if its
1994 grp_hint is set (this means we are either attempting total scalarization or
1995 there is more than one direct read access) or according to the following
1996 table:
1998 Access written to through a scalar type (once or more times)
2000 | Written to in an assignment statement
2002 | | Access read as scalar _once_
2003 | | |
2004 | | | Read in an assignment statement
2005 | | | |
2006 | | | | Scalarize Comment
2007 -----------------------------------------------------------------------------
2008 0 0 0 0 No access for the scalar
2009 0 0 0 1 No access for the scalar
2010 0 0 1 0 No Single read - won't help
2011 0 0 1 1 No The same case
2012 0 1 0 0 No access for the scalar
2013 0 1 0 1 No access for the scalar
2014 0 1 1 0 Yes s = *g; return s.i;
2015 0 1 1 1 Yes The same case as above
2016 1 0 0 0 No Won't help
2017 1 0 0 1 Yes s.i = 1; *g = s;
2018 1 0 1 0 Yes s.i = 5; g = s.i;
2019 1 0 1 1 Yes The same case as above
2020 1 1 0 0 No Won't help.
2021 1 1 0 1 Yes s.i = 1; *g = s;
2022 1 1 1 0 Yes s = *g; return s.i;
2023 1 1 1 1 Yes Any of the above yeses */
2025 static bool
2026 analyze_access_subtree (struct access *root, struct access *parent,
2027 bool allow_replacements)
2029 struct access *child;
2030 HOST_WIDE_INT limit = root->offset + root->size;
2031 HOST_WIDE_INT covered_to = root->offset;
2032 bool scalar = is_gimple_reg_type (root->type);
2033 bool hole = false, sth_created = false;
2035 if (parent)
2037 if (parent->grp_read)
2038 root->grp_read = 1;
2039 if (parent->grp_assignment_read)
2040 root->grp_assignment_read = 1;
2041 if (parent->grp_write)
2042 root->grp_write = 1;
2043 if (parent->grp_assignment_write)
2044 root->grp_assignment_write = 1;
2045 if (parent->grp_total_scalarization)
2046 root->grp_total_scalarization = 1;
2049 if (root->grp_unscalarizable_region)
2050 allow_replacements = false;
2052 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2053 allow_replacements = false;
2055 for (child = root->first_child; child; child = child->next_sibling)
2057 hole |= covered_to < child->offset;
2058 sth_created |= analyze_access_subtree (child, root,
2059 allow_replacements && !scalar);
2061 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2062 root->grp_total_scalarization &= child->grp_total_scalarization;
2063 if (child->grp_covered)
2064 covered_to += child->size;
2065 else
2066 hole = true;
2069 if (allow_replacements && scalar && !root->first_child
2070 && (root->grp_hint
2071 || ((root->grp_scalar_read || root->grp_assignment_read)
2072 && (root->grp_scalar_write || root->grp_assignment_write))))
2074 bool new_integer_type;
2075 /* Always create access replacements that cover the whole access.
2076 For integral types this means the precision has to match.
2077 Avoid assumptions based on the integral type kind, too. */
2078 if (INTEGRAL_TYPE_P (root->type)
2079 && (TREE_CODE (root->type) != INTEGER_TYPE
2080 || TYPE_PRECISION (root->type) != root->size)
2081 /* But leave bitfield accesses alone. */
2082 && (TREE_CODE (root->expr) != COMPONENT_REF
2083 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2085 tree rt = root->type;
2086 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2087 && (root->size % BITS_PER_UNIT) == 0);
2088 root->type = build_nonstandard_integer_type (root->size,
2089 TYPE_UNSIGNED (rt));
2090 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2091 root->base, root->offset,
2092 root->type, NULL, false);
2093 new_integer_type = true;
2095 else
2096 new_integer_type = false;
2098 if (dump_file && (dump_flags & TDF_DETAILS))
2100 fprintf (dump_file, "Marking ");
2101 print_generic_expr (dump_file, root->base, 0);
2102 fprintf (dump_file, " offset: %u, size: %u ",
2103 (unsigned) root->offset, (unsigned) root->size);
2104 fprintf (dump_file, " to be replaced%s.\n",
2105 new_integer_type ? " with an integer": "");
2108 root->grp_to_be_replaced = 1;
2109 sth_created = true;
2110 hole = false;
2112 else
2114 if (covered_to < limit)
2115 hole = true;
2116 if (scalar)
2117 root->grp_total_scalarization = 0;
2120 if (sth_created
2121 && (!hole || root->grp_total_scalarization))
2123 root->grp_covered = 1;
2124 return true;
2126 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2127 root->grp_unscalarized_data = 1; /* not covered and written to */
2128 if (sth_created)
2129 return true;
2130 return false;
2133 /* Analyze all access trees linked by next_grp by the means of
2134 analyze_access_subtree. */
2135 static bool
2136 analyze_access_trees (struct access *access)
2138 bool ret = false;
2140 while (access)
2142 if (analyze_access_subtree (access, NULL, true))
2143 ret = true;
2144 access = access->next_grp;
2147 return ret;
2150 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2151 SIZE would conflict with an already existing one. If exactly such a child
2152 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2154 static bool
2155 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2156 HOST_WIDE_INT size, struct access **exact_match)
2158 struct access *child;
2160 for (child = lacc->first_child; child; child = child->next_sibling)
2162 if (child->offset == norm_offset && child->size == size)
2164 *exact_match = child;
2165 return true;
2168 if (child->offset < norm_offset + size
2169 && child->offset + child->size > norm_offset)
2170 return true;
2173 return false;
2176 /* Create a new child access of PARENT, with all properties just like MODEL
2177 except for its offset and with its grp_write false and grp_read true.
2178 Return the new access or NULL if it cannot be created. Note that this access
2179 is created long after all splicing and sorting, it's not located in any
2180 access vector and is automatically a representative of its group. */
2182 static struct access *
2183 create_artificial_child_access (struct access *parent, struct access *model,
2184 HOST_WIDE_INT new_offset)
2186 struct access *access;
2187 struct access **child;
2188 tree expr = parent->base;
2190 gcc_assert (!model->grp_unscalarizable_region);
2192 access = (struct access *) pool_alloc (access_pool);
2193 memset (access, 0, sizeof (struct access));
2194 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2195 model->type))
2197 access->grp_no_warning = true;
2198 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2199 new_offset, model, NULL, false);
2202 access->base = parent->base;
2203 access->expr = expr;
2204 access->offset = new_offset;
2205 access->size = model->size;
2206 access->type = model->type;
2207 access->grp_write = true;
2208 access->grp_read = false;
2210 child = &parent->first_child;
2211 while (*child && (*child)->offset < new_offset)
2212 child = &(*child)->next_sibling;
2214 access->next_sibling = *child;
2215 *child = access;
2217 return access;
2221 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2222 true if any new subaccess was created. Additionally, if RACC is a scalar
2223 access but LACC is not, change the type of the latter, if possible. */
2225 static bool
2226 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2228 struct access *rchild;
2229 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2230 bool ret = false;
2232 if (is_gimple_reg_type (lacc->type)
2233 || lacc->grp_unscalarizable_region
2234 || racc->grp_unscalarizable_region)
2235 return false;
2237 if (is_gimple_reg_type (racc->type))
2239 if (!lacc->first_child && !racc->first_child)
2241 tree t = lacc->base;
2243 lacc->type = racc->type;
2244 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2245 lacc->offset, racc->type))
2246 lacc->expr = t;
2247 else
2249 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2250 lacc->base, lacc->offset,
2251 racc, NULL, false);
2252 lacc->grp_no_warning = true;
2255 return false;
2258 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2260 struct access *new_acc = NULL;
2261 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2263 if (rchild->grp_unscalarizable_region)
2264 continue;
2266 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2267 &new_acc))
2269 if (new_acc)
2271 rchild->grp_hint = 1;
2272 new_acc->grp_hint |= new_acc->grp_read;
2273 if (rchild->first_child)
2274 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2276 continue;
2279 rchild->grp_hint = 1;
2280 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2281 if (new_acc)
2283 ret = true;
2284 if (racc->first_child)
2285 propagate_subaccesses_across_link (new_acc, rchild);
2289 return ret;
2292 /* Propagate all subaccesses across assignment links. */
2294 static void
2295 propagate_all_subaccesses (void)
2297 while (work_queue_head)
2299 struct access *racc = pop_access_from_work_queue ();
2300 struct assign_link *link;
2302 gcc_assert (racc->first_link);
2304 for (link = racc->first_link; link; link = link->next)
2306 struct access *lacc = link->lacc;
2308 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2309 continue;
2310 lacc = lacc->group_representative;
2311 if (propagate_subaccesses_across_link (lacc, racc)
2312 && lacc->first_link)
2313 add_access_to_work_queue (lacc);
2318 /* Go through all accesses collected throughout the (intraprocedural) analysis
2319 stage, exclude overlapping ones, identify representatives and build trees
2320 out of them, making decisions about scalarization on the way. Return true
2321 iff there are any to-be-scalarized variables after this stage. */
2323 static bool
2324 analyze_all_variable_accesses (void)
2326 int res = 0;
2327 bitmap tmp = BITMAP_ALLOC (NULL);
2328 bitmap_iterator bi;
2329 unsigned i, max_total_scalarization_size;
2331 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2332 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2334 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2335 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2336 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2338 tree var = referenced_var (i);
2340 if (TREE_CODE (var) == VAR_DECL
2341 && type_consists_of_records_p (TREE_TYPE (var)))
2343 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2344 <= max_total_scalarization_size)
2346 completely_scalarize_var (var);
2347 if (dump_file && (dump_flags & TDF_DETAILS))
2349 fprintf (dump_file, "Will attempt to totally scalarize ");
2350 print_generic_expr (dump_file, var, 0);
2351 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2354 else if (dump_file && (dump_flags & TDF_DETAILS))
2356 fprintf (dump_file, "Too big to totally scalarize: ");
2357 print_generic_expr (dump_file, var, 0);
2358 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2363 bitmap_copy (tmp, candidate_bitmap);
2364 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2366 tree var = referenced_var (i);
2367 struct access *access;
2369 access = sort_and_splice_var_accesses (var);
2370 if (!access || !build_access_trees (access))
2371 disqualify_candidate (var,
2372 "No or inhibitingly overlapping accesses.");
2375 propagate_all_subaccesses ();
2377 bitmap_copy (tmp, candidate_bitmap);
2378 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2380 tree var = referenced_var (i);
2381 struct access *access = get_first_repr_for_decl (var);
2383 if (analyze_access_trees (access))
2385 res++;
2386 if (dump_file && (dump_flags & TDF_DETAILS))
2388 fprintf (dump_file, "\nAccess trees for ");
2389 print_generic_expr (dump_file, var, 0);
2390 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2391 dump_access_tree (dump_file, access);
2392 fprintf (dump_file, "\n");
2395 else
2396 disqualify_candidate (var, "No scalar replacements to be created.");
2399 BITMAP_FREE (tmp);
2401 if (res)
2403 statistics_counter_event (cfun, "Scalarized aggregates", res);
2404 return true;
2406 else
2407 return false;
2410 /* Generate statements copying scalar replacements of accesses within a subtree
2411 into or out of AGG. ACCESS, all its children, siblings and their children
2412 are to be processed. AGG is an aggregate type expression (can be a
2413 declaration but does not have to be, it can for example also be a mem_ref or
2414 a series of handled components). TOP_OFFSET is the offset of the processed
2415 subtree which has to be subtracted from offsets of individual accesses to
2416 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2417 replacements in the interval <start_offset, start_offset + chunk_size>,
2418 otherwise copy all. GSI is a statement iterator used to place the new
2419 statements. WRITE should be true when the statements should write from AGG
2420 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2421 statements will be added after the current statement in GSI, they will be
2422 added before the statement otherwise. */
2424 static void
2425 generate_subtree_copies (struct access *access, tree agg,
2426 HOST_WIDE_INT top_offset,
2427 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2428 gimple_stmt_iterator *gsi, bool write,
2429 bool insert_after, location_t loc)
2433 if (chunk_size && access->offset >= start_offset + chunk_size)
2434 return;
2436 if (access->grp_to_be_replaced
2437 && (chunk_size == 0
2438 || access->offset + access->size > start_offset))
2440 tree expr, repl = get_access_replacement (access);
2441 gimple stmt;
2443 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2444 access, gsi, insert_after);
2446 if (write)
2448 if (access->grp_partial_lhs)
2449 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2450 !insert_after,
2451 insert_after ? GSI_NEW_STMT
2452 : GSI_SAME_STMT);
2453 stmt = gimple_build_assign (repl, expr);
2455 else
2457 TREE_NO_WARNING (repl) = 1;
2458 if (access->grp_partial_lhs)
2459 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2460 !insert_after,
2461 insert_after ? GSI_NEW_STMT
2462 : GSI_SAME_STMT);
2463 stmt = gimple_build_assign (expr, repl);
2465 gimple_set_location (stmt, loc);
2467 if (insert_after)
2468 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2469 else
2470 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2471 update_stmt (stmt);
2472 sra_stats.subtree_copies++;
2475 if (access->first_child)
2476 generate_subtree_copies (access->first_child, agg, top_offset,
2477 start_offset, chunk_size, gsi,
2478 write, insert_after, loc);
2480 access = access->next_sibling;
2482 while (access);
2485 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2486 the root of the subtree to be processed. GSI is the statement iterator used
2487 for inserting statements which are added after the current statement if
2488 INSERT_AFTER is true or before it otherwise. */
2490 static void
2491 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2492 bool insert_after, location_t loc)
2495 struct access *child;
2497 if (access->grp_to_be_replaced)
2499 gimple stmt;
2501 stmt = gimple_build_assign (get_access_replacement (access),
2502 build_zero_cst (access->type));
2503 if (insert_after)
2504 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2505 else
2506 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2507 update_stmt (stmt);
2508 gimple_set_location (stmt, loc);
2511 for (child = access->first_child; child; child = child->next_sibling)
2512 init_subtree_with_zero (child, gsi, insert_after, loc);
2515 /* Search for an access representative for the given expression EXPR and
2516 return it or NULL if it cannot be found. */
2518 static struct access *
2519 get_access_for_expr (tree expr)
2521 HOST_WIDE_INT offset, size, max_size;
2522 tree base;
2524 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2525 a different size than the size of its argument and we need the latter
2526 one. */
2527 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2528 expr = TREE_OPERAND (expr, 0);
2530 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2531 if (max_size == -1 || !DECL_P (base))
2532 return NULL;
2534 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2535 return NULL;
2537 return get_var_base_offset_size_access (base, offset, max_size);
2540 /* Replace the expression EXPR with a scalar replacement if there is one and
2541 generate other statements to do type conversion or subtree copying if
2542 necessary. GSI is used to place newly created statements, WRITE is true if
2543 the expression is being written to (it is on a LHS of a statement or output
2544 in an assembly statement). */
2546 static bool
2547 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2549 location_t loc;
2550 struct access *access;
2551 tree type, bfr;
2553 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2555 bfr = *expr;
2556 expr = &TREE_OPERAND (*expr, 0);
2558 else
2559 bfr = NULL_TREE;
2561 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2562 expr = &TREE_OPERAND (*expr, 0);
2563 access = get_access_for_expr (*expr);
2564 if (!access)
2565 return false;
2566 type = TREE_TYPE (*expr);
2568 loc = gimple_location (gsi_stmt (*gsi));
2569 if (access->grp_to_be_replaced)
2571 tree repl = get_access_replacement (access);
2572 /* If we replace a non-register typed access simply use the original
2573 access expression to extract the scalar component afterwards.
2574 This happens if scalarizing a function return value or parameter
2575 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2576 gcc.c-torture/compile/20011217-1.c.
2578 We also want to use this when accessing a complex or vector which can
2579 be accessed as a different type too, potentially creating a need for
2580 type conversion (see PR42196) and when scalarized unions are involved
2581 in assembler statements (see PR42398). */
2582 if (!useless_type_conversion_p (type, access->type))
2584 tree ref;
2586 ref = build_ref_for_model (loc, access->base, access->offset, access,
2587 NULL, false);
2589 if (write)
2591 gimple stmt;
2593 if (access->grp_partial_lhs)
2594 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2595 false, GSI_NEW_STMT);
2596 stmt = gimple_build_assign (repl, ref);
2597 gimple_set_location (stmt, loc);
2598 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2600 else
2602 gimple stmt;
2604 if (access->grp_partial_lhs)
2605 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2606 true, GSI_SAME_STMT);
2607 stmt = gimple_build_assign (ref, repl);
2608 gimple_set_location (stmt, loc);
2609 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2612 else
2613 *expr = repl;
2614 sra_stats.exprs++;
2617 if (access->first_child)
2619 HOST_WIDE_INT start_offset, chunk_size;
2620 if (bfr
2621 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2622 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2624 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2625 start_offset = access->offset
2626 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2628 else
2629 start_offset = chunk_size = 0;
2631 generate_subtree_copies (access->first_child, access->base, 0,
2632 start_offset, chunk_size, gsi, write, write,
2633 loc);
2635 return true;
2638 /* Where scalar replacements of the RHS have been written to when a replacement
2639 of a LHS of an assigments cannot be direclty loaded from a replacement of
2640 the RHS. */
2641 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2642 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2643 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2645 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2646 base aggregate if there are unscalarized data or directly to LHS of the
2647 statement that is pointed to by GSI otherwise. */
2649 static enum unscalarized_data_handling
2650 handle_unscalarized_data_in_subtree (struct access *top_racc,
2651 gimple_stmt_iterator *gsi)
2653 if (top_racc->grp_unscalarized_data)
2655 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2656 gsi, false, false,
2657 gimple_location (gsi_stmt (*gsi)));
2658 return SRA_UDH_RIGHT;
2660 else
2662 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2663 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2664 0, 0, gsi, false, false,
2665 gimple_location (gsi_stmt (*gsi)));
2666 return SRA_UDH_LEFT;
2671 /* Try to generate statements to load all sub-replacements in an access subtree
2672 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2673 If that is not possible, refresh the TOP_RACC base aggregate and load the
2674 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2675 copied. NEW_GSI is stmt iterator used for statement insertions after the
2676 original assignment, OLD_GSI is used to insert statements before the
2677 assignment. *REFRESHED keeps the information whether we have needed to
2678 refresh replacements of the LHS and from which side of the assignments this
2679 takes place. */
2681 static void
2682 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2683 HOST_WIDE_INT left_offset,
2684 gimple_stmt_iterator *old_gsi,
2685 gimple_stmt_iterator *new_gsi,
2686 enum unscalarized_data_handling *refreshed)
2688 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2689 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2691 if (lacc->grp_to_be_replaced)
2693 struct access *racc;
2694 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2695 gimple stmt;
2696 tree rhs;
2698 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2699 if (racc && racc->grp_to_be_replaced)
2701 rhs = get_access_replacement (racc);
2702 if (!useless_type_conversion_p (lacc->type, racc->type))
2703 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2705 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2706 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2707 true, GSI_SAME_STMT);
2709 else
2711 /* No suitable access on the right hand side, need to load from
2712 the aggregate. See if we have to update it first... */
2713 if (*refreshed == SRA_UDH_NONE)
2714 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2715 old_gsi);
2717 if (*refreshed == SRA_UDH_LEFT)
2718 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2719 new_gsi, true);
2720 else
2721 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2722 new_gsi, true);
2723 if (lacc->grp_partial_lhs)
2724 rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2725 false, GSI_NEW_STMT);
2728 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2729 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2730 gimple_set_location (stmt, loc);
2731 update_stmt (stmt);
2732 sra_stats.subreplacements++;
2734 else if (*refreshed == SRA_UDH_NONE
2735 && lacc->grp_read && !lacc->grp_covered)
2736 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2737 old_gsi);
2739 if (lacc->first_child)
2740 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2741 old_gsi, new_gsi, refreshed);
2745 /* Result code for SRA assignment modification. */
2746 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2747 SRA_AM_MODIFIED, /* stmt changed but not
2748 removed */
2749 SRA_AM_REMOVED }; /* stmt eliminated */
2751 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2752 to the assignment and GSI is the statement iterator pointing at it. Returns
2753 the same values as sra_modify_assign. */
2755 static enum assignment_mod_result
2756 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2758 tree lhs = gimple_assign_lhs (*stmt);
2759 struct access *acc;
2760 location_t loc;
2762 acc = get_access_for_expr (lhs);
2763 if (!acc)
2764 return SRA_AM_NONE;
2766 if (gimple_clobber_p (*stmt))
2768 /* Remove clobbers of fully scalarized variables, otherwise
2769 do nothing. */
2770 if (acc->grp_covered)
2772 unlink_stmt_vdef (*stmt);
2773 gsi_remove (gsi, true);
2774 release_defs (*stmt);
2775 return SRA_AM_REMOVED;
2777 else
2778 return SRA_AM_NONE;
2781 loc = gimple_location (*stmt);
2782 if (VEC_length (constructor_elt,
2783 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2785 /* I have never seen this code path trigger but if it can happen the
2786 following should handle it gracefully. */
2787 if (access_has_children_p (acc))
2788 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2789 true, true, loc);
2790 return SRA_AM_MODIFIED;
2793 if (acc->grp_covered)
2795 init_subtree_with_zero (acc, gsi, false, loc);
2796 unlink_stmt_vdef (*stmt);
2797 gsi_remove (gsi, true);
2798 release_defs (*stmt);
2799 return SRA_AM_REMOVED;
2801 else
2803 init_subtree_with_zero (acc, gsi, true, loc);
2804 return SRA_AM_MODIFIED;
2808 /* Create and return a new suitable default definition SSA_NAME for RACC which
2809 is an access describing an uninitialized part of an aggregate that is being
2810 loaded. */
2812 static tree
2813 get_repl_default_def_ssa_name (struct access *racc)
2815 tree repl, decl;
2817 decl = get_access_replacement (racc);
2819 repl = gimple_default_def (cfun, decl);
2820 if (!repl)
2822 repl = make_ssa_name (decl, gimple_build_nop ());
2823 set_default_def (decl, repl);
2826 return repl;
2829 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2830 somewhere in it. */
2832 static inline bool
2833 contains_bitfld_comp_ref_p (const_tree ref)
2835 while (handled_component_p (ref))
2837 if (TREE_CODE (ref) == COMPONENT_REF
2838 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2839 return true;
2840 ref = TREE_OPERAND (ref, 0);
2843 return false;
2846 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2847 bit-field field declaration somewhere in it. */
2849 static inline bool
2850 contains_vce_or_bfcref_p (const_tree ref)
2852 while (handled_component_p (ref))
2854 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2855 || (TREE_CODE (ref) == COMPONENT_REF
2856 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2857 return true;
2858 ref = TREE_OPERAND (ref, 0);
2861 return false;
2864 /* Examine both sides of the assignment statement pointed to by STMT, replace
2865 them with a scalare replacement if there is one and generate copying of
2866 replacements if scalarized aggregates have been used in the assignment. GSI
2867 is used to hold generated statements for type conversions and subtree
2868 copying. */
2870 static enum assignment_mod_result
2871 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2873 struct access *lacc, *racc;
2874 tree lhs, rhs;
2875 bool modify_this_stmt = false;
2876 bool force_gimple_rhs = false;
2877 location_t loc;
2878 gimple_stmt_iterator orig_gsi = *gsi;
2880 if (!gimple_assign_single_p (*stmt))
2881 return SRA_AM_NONE;
2882 lhs = gimple_assign_lhs (*stmt);
2883 rhs = gimple_assign_rhs1 (*stmt);
2885 if (TREE_CODE (rhs) == CONSTRUCTOR)
2886 return sra_modify_constructor_assign (stmt, gsi);
2888 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2889 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2890 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2892 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2893 gsi, false);
2894 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2895 gsi, true);
2896 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2899 lacc = get_access_for_expr (lhs);
2900 racc = get_access_for_expr (rhs);
2901 if (!lacc && !racc)
2902 return SRA_AM_NONE;
2904 loc = gimple_location (*stmt);
2905 if (lacc && lacc->grp_to_be_replaced)
2907 lhs = get_access_replacement (lacc);
2908 gimple_assign_set_lhs (*stmt, lhs);
2909 modify_this_stmt = true;
2910 if (lacc->grp_partial_lhs)
2911 force_gimple_rhs = true;
2912 sra_stats.exprs++;
2915 if (racc && racc->grp_to_be_replaced)
2917 rhs = get_access_replacement (racc);
2918 modify_this_stmt = true;
2919 if (racc->grp_partial_lhs)
2920 force_gimple_rhs = true;
2921 sra_stats.exprs++;
2923 else if (racc
2924 && !racc->grp_unscalarized_data
2925 && TREE_CODE (lhs) == SSA_NAME
2926 && !access_has_replacements_p (racc))
2928 rhs = get_repl_default_def_ssa_name (racc);
2929 modify_this_stmt = true;
2930 sra_stats.exprs++;
2933 if (modify_this_stmt)
2935 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2937 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2938 ??? This should move to fold_stmt which we simply should
2939 call after building a VIEW_CONVERT_EXPR here. */
2940 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2941 && !contains_bitfld_comp_ref_p (lhs)
2942 && !access_has_children_p (lacc))
2944 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
2945 gimple_assign_set_lhs (*stmt, lhs);
2947 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2948 && !contains_vce_or_bfcref_p (rhs)
2949 && !access_has_children_p (racc))
2950 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
2952 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2954 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
2955 rhs);
2956 if (is_gimple_reg_type (TREE_TYPE (lhs))
2957 && TREE_CODE (lhs) != SSA_NAME)
2958 force_gimple_rhs = true;
2963 /* From this point on, the function deals with assignments in between
2964 aggregates when at least one has scalar reductions of some of its
2965 components. There are three possible scenarios: Both the LHS and RHS have
2966 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2968 In the first case, we would like to load the LHS components from RHS
2969 components whenever possible. If that is not possible, we would like to
2970 read it directly from the RHS (after updating it by storing in it its own
2971 components). If there are some necessary unscalarized data in the LHS,
2972 those will be loaded by the original assignment too. If neither of these
2973 cases happen, the original statement can be removed. Most of this is done
2974 by load_assign_lhs_subreplacements.
2976 In the second case, we would like to store all RHS scalarized components
2977 directly into LHS and if they cover the aggregate completely, remove the
2978 statement too. In the third case, we want the LHS components to be loaded
2979 directly from the RHS (DSE will remove the original statement if it
2980 becomes redundant).
2982 This is a bit complex but manageable when types match and when unions do
2983 not cause confusion in a way that we cannot really load a component of LHS
2984 from the RHS or vice versa (the access representing this level can have
2985 subaccesses that are accessible only through a different union field at a
2986 higher level - different from the one used in the examined expression).
2987 Unions are fun.
2989 Therefore, I specially handle a fourth case, happening when there is a
2990 specific type cast or it is impossible to locate a scalarized subaccess on
2991 the other side of the expression. If that happens, I simply "refresh" the
2992 RHS by storing in it is scalarized components leave the original statement
2993 there to do the copying and then load the scalar replacements of the LHS.
2994 This is what the first branch does. */
2996 if (modify_this_stmt
2997 || gimple_has_volatile_ops (*stmt)
2998 || contains_vce_or_bfcref_p (rhs)
2999 || contains_vce_or_bfcref_p (lhs))
3001 if (access_has_children_p (racc))
3002 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3003 gsi, false, false, loc);
3004 if (access_has_children_p (lacc))
3005 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3006 gsi, true, true, loc);
3007 sra_stats.separate_lhs_rhs_handling++;
3009 /* This gimplification must be done after generate_subtree_copies,
3010 lest we insert the subtree copies in the middle of the gimplified
3011 sequence. */
3012 if (force_gimple_rhs)
3013 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3014 true, GSI_SAME_STMT);
3015 if (gimple_assign_rhs1 (*stmt) != rhs)
3017 modify_this_stmt = true;
3018 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3019 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3022 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3024 else
3026 if (access_has_children_p (lacc)
3027 && access_has_children_p (racc)
3028 /* When an access represents an unscalarizable region, it usually
3029 represents accesses with variable offset and thus must not be used
3030 to generate new memory accesses. */
3031 && !lacc->grp_unscalarizable_region
3032 && !racc->grp_unscalarizable_region)
3034 gimple_stmt_iterator orig_gsi = *gsi;
3035 enum unscalarized_data_handling refreshed;
3037 if (lacc->grp_read && !lacc->grp_covered)
3038 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3039 else
3040 refreshed = SRA_UDH_NONE;
3042 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3043 &orig_gsi, gsi, &refreshed);
3044 if (refreshed != SRA_UDH_RIGHT)
3046 gsi_next (gsi);
3047 unlink_stmt_vdef (*stmt);
3048 gsi_remove (&orig_gsi, true);
3049 release_defs (*stmt);
3050 sra_stats.deleted++;
3051 return SRA_AM_REMOVED;
3054 else
3056 if (access_has_children_p (racc)
3057 && !racc->grp_unscalarized_data)
3059 if (dump_file)
3061 fprintf (dump_file, "Removing load: ");
3062 print_gimple_stmt (dump_file, *stmt, 0, 0);
3064 generate_subtree_copies (racc->first_child, lhs,
3065 racc->offset, 0, 0, gsi,
3066 false, false, loc);
3067 gcc_assert (*stmt == gsi_stmt (*gsi));
3068 unlink_stmt_vdef (*stmt);
3069 gsi_remove (gsi, true);
3070 release_defs (*stmt);
3071 sra_stats.deleted++;
3072 return SRA_AM_REMOVED;
3074 /* Restore the aggregate RHS from its components so the
3075 prevailing aggregate copy does the right thing. */
3076 if (access_has_children_p (racc))
3077 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3078 gsi, false, false, loc);
3079 /* Re-load the components of the aggregate copy destination.
3080 But use the RHS aggregate to load from to expose more
3081 optimization opportunities. */
3082 if (access_has_children_p (lacc))
3083 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3084 0, 0, gsi, true, true, loc);
3087 return SRA_AM_NONE;
3091 /* Traverse the function body and all modifications as decided in
3092 analyze_all_variable_accesses. Return true iff the CFG has been
3093 changed. */
3095 static bool
3096 sra_modify_function_body (void)
3098 bool cfg_changed = false;
3099 basic_block bb;
3101 FOR_EACH_BB (bb)
3103 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3104 while (!gsi_end_p (gsi))
3106 gimple stmt = gsi_stmt (gsi);
3107 enum assignment_mod_result assign_result;
3108 bool modified = false, deleted = false;
3109 tree *t;
3110 unsigned i;
3112 switch (gimple_code (stmt))
3114 case GIMPLE_RETURN:
3115 t = gimple_return_retval_ptr (stmt);
3116 if (*t != NULL_TREE)
3117 modified |= sra_modify_expr (t, &gsi, false);
3118 break;
3120 case GIMPLE_ASSIGN:
3121 assign_result = sra_modify_assign (&stmt, &gsi);
3122 modified |= assign_result == SRA_AM_MODIFIED;
3123 deleted = assign_result == SRA_AM_REMOVED;
3124 break;
3126 case GIMPLE_CALL:
3127 /* Operands must be processed before the lhs. */
3128 for (i = 0; i < gimple_call_num_args (stmt); i++)
3130 t = gimple_call_arg_ptr (stmt, i);
3131 modified |= sra_modify_expr (t, &gsi, false);
3134 if (gimple_call_lhs (stmt))
3136 t = gimple_call_lhs_ptr (stmt);
3137 modified |= sra_modify_expr (t, &gsi, true);
3139 break;
3141 case GIMPLE_ASM:
3142 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3144 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3145 modified |= sra_modify_expr (t, &gsi, false);
3147 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3149 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3150 modified |= sra_modify_expr (t, &gsi, true);
3152 break;
3154 default:
3155 break;
3158 if (modified)
3160 update_stmt (stmt);
3161 if (maybe_clean_eh_stmt (stmt)
3162 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3163 cfg_changed = true;
3165 if (!deleted)
3166 gsi_next (&gsi);
3170 return cfg_changed;
3173 /* Generate statements initializing scalar replacements of parts of function
3174 parameters. */
3176 static void
3177 initialize_parameter_reductions (void)
3179 gimple_stmt_iterator gsi;
3180 gimple_seq seq = NULL;
3181 tree parm;
3183 gsi = gsi_start (seq);
3184 for (parm = DECL_ARGUMENTS (current_function_decl);
3185 parm;
3186 parm = DECL_CHAIN (parm))
3188 VEC (access_p, heap) *access_vec;
3189 struct access *access;
3191 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3192 continue;
3193 access_vec = get_base_access_vector (parm);
3194 if (!access_vec)
3195 continue;
3197 for (access = VEC_index (access_p, access_vec, 0);
3198 access;
3199 access = access->next_grp)
3200 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3201 EXPR_LOCATION (parm));
3204 seq = gsi_seq (gsi);
3205 if (seq)
3206 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3209 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3210 it reveals there are components of some aggregates to be scalarized, it runs
3211 the required transformations. */
3212 static unsigned int
3213 perform_intra_sra (void)
3215 int ret = 0;
3216 sra_initialize ();
3218 if (!find_var_candidates ())
3219 goto out;
3221 if (!scan_function ())
3222 goto out;
3224 if (!analyze_all_variable_accesses ())
3225 goto out;
3227 if (sra_modify_function_body ())
3228 ret = TODO_update_ssa | TODO_cleanup_cfg;
3229 else
3230 ret = TODO_update_ssa;
3231 initialize_parameter_reductions ();
3233 statistics_counter_event (cfun, "Scalar replacements created",
3234 sra_stats.replacements);
3235 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3236 statistics_counter_event (cfun, "Subtree copy stmts",
3237 sra_stats.subtree_copies);
3238 statistics_counter_event (cfun, "Subreplacement stmts",
3239 sra_stats.subreplacements);
3240 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3241 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3242 sra_stats.separate_lhs_rhs_handling);
3244 out:
3245 sra_deinitialize ();
3246 return ret;
3249 /* Perform early intraprocedural SRA. */
3250 static unsigned int
3251 early_intra_sra (void)
3253 sra_mode = SRA_MODE_EARLY_INTRA;
3254 return perform_intra_sra ();
3257 /* Perform "late" intraprocedural SRA. */
3258 static unsigned int
3259 late_intra_sra (void)
3261 sra_mode = SRA_MODE_INTRA;
3262 return perform_intra_sra ();
3266 static bool
3267 gate_intra_sra (void)
3269 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3273 struct gimple_opt_pass pass_sra_early =
3276 GIMPLE_PASS,
3277 "esra", /* name */
3278 gate_intra_sra, /* gate */
3279 early_intra_sra, /* execute */
3280 NULL, /* sub */
3281 NULL, /* next */
3282 0, /* static_pass_number */
3283 TV_TREE_SRA, /* tv_id */
3284 PROP_cfg | PROP_ssa, /* properties_required */
3285 0, /* properties_provided */
3286 0, /* properties_destroyed */
3287 0, /* todo_flags_start */
3288 TODO_update_ssa
3289 | TODO_ggc_collect
3290 | TODO_verify_ssa /* todo_flags_finish */
3294 struct gimple_opt_pass pass_sra =
3297 GIMPLE_PASS,
3298 "sra", /* name */
3299 gate_intra_sra, /* gate */
3300 late_intra_sra, /* execute */
3301 NULL, /* sub */
3302 NULL, /* next */
3303 0, /* static_pass_number */
3304 TV_TREE_SRA, /* tv_id */
3305 PROP_cfg | PROP_ssa, /* properties_required */
3306 0, /* properties_provided */
3307 0, /* properties_destroyed */
3308 TODO_update_address_taken, /* todo_flags_start */
3309 TODO_update_ssa
3310 | TODO_ggc_collect
3311 | TODO_verify_ssa /* todo_flags_finish */
3316 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3317 parameter. */
3319 static bool
3320 is_unused_scalar_param (tree parm)
3322 tree name;
3323 return (is_gimple_reg (parm)
3324 && (!(name = gimple_default_def (cfun, parm))
3325 || has_zero_uses (name)));
3328 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3329 examine whether there are any direct or otherwise infeasible ones. If so,
3330 return true, otherwise return false. PARM must be a gimple register with a
3331 non-NULL default definition. */
3333 static bool
3334 ptr_parm_has_direct_uses (tree parm)
3336 imm_use_iterator ui;
3337 gimple stmt;
3338 tree name = gimple_default_def (cfun, parm);
3339 bool ret = false;
3341 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3343 int uses_ok = 0;
3344 use_operand_p use_p;
3346 if (is_gimple_debug (stmt))
3347 continue;
3349 /* Valid uses include dereferences on the lhs and the rhs. */
3350 if (gimple_has_lhs (stmt))
3352 tree lhs = gimple_get_lhs (stmt);
3353 while (handled_component_p (lhs))
3354 lhs = TREE_OPERAND (lhs, 0);
3355 if (TREE_CODE (lhs) == MEM_REF
3356 && TREE_OPERAND (lhs, 0) == name
3357 && integer_zerop (TREE_OPERAND (lhs, 1))
3358 && types_compatible_p (TREE_TYPE (lhs),
3359 TREE_TYPE (TREE_TYPE (name)))
3360 && !TREE_THIS_VOLATILE (lhs))
3361 uses_ok++;
3363 if (gimple_assign_single_p (stmt))
3365 tree rhs = gimple_assign_rhs1 (stmt);
3366 while (handled_component_p (rhs))
3367 rhs = TREE_OPERAND (rhs, 0);
3368 if (TREE_CODE (rhs) == MEM_REF
3369 && TREE_OPERAND (rhs, 0) == name
3370 && integer_zerop (TREE_OPERAND (rhs, 1))
3371 && types_compatible_p (TREE_TYPE (rhs),
3372 TREE_TYPE (TREE_TYPE (name)))
3373 && !TREE_THIS_VOLATILE (rhs))
3374 uses_ok++;
3376 else if (is_gimple_call (stmt))
3378 unsigned i;
3379 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3381 tree arg = gimple_call_arg (stmt, i);
3382 while (handled_component_p (arg))
3383 arg = TREE_OPERAND (arg, 0);
3384 if (TREE_CODE (arg) == MEM_REF
3385 && TREE_OPERAND (arg, 0) == name
3386 && integer_zerop (TREE_OPERAND (arg, 1))
3387 && types_compatible_p (TREE_TYPE (arg),
3388 TREE_TYPE (TREE_TYPE (name)))
3389 && !TREE_THIS_VOLATILE (arg))
3390 uses_ok++;
3394 /* If the number of valid uses does not match the number of
3395 uses in this stmt there is an unhandled use. */
3396 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3397 --uses_ok;
3399 if (uses_ok != 0)
3400 ret = true;
3402 if (ret)
3403 BREAK_FROM_IMM_USE_STMT (ui);
3406 return ret;
3409 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3410 them in candidate_bitmap. Note that these do not necessarily include
3411 parameter which are unused and thus can be removed. Return true iff any
3412 such candidate has been found. */
3414 static bool
3415 find_param_candidates (void)
3417 tree parm;
3418 int count = 0;
3419 bool ret = false;
3420 const char *msg;
3422 for (parm = DECL_ARGUMENTS (current_function_decl);
3423 parm;
3424 parm = DECL_CHAIN (parm))
3426 tree type = TREE_TYPE (parm);
3428 count++;
3430 if (TREE_THIS_VOLATILE (parm)
3431 || TREE_ADDRESSABLE (parm)
3432 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3433 continue;
3435 if (is_unused_scalar_param (parm))
3437 ret = true;
3438 continue;
3441 if (POINTER_TYPE_P (type))
3443 type = TREE_TYPE (type);
3445 if (TREE_CODE (type) == FUNCTION_TYPE
3446 || TYPE_VOLATILE (type)
3447 || (TREE_CODE (type) == ARRAY_TYPE
3448 && TYPE_NONALIASED_COMPONENT (type))
3449 || !is_gimple_reg (parm)
3450 || is_va_list_type (type)
3451 || ptr_parm_has_direct_uses (parm))
3452 continue;
3454 else if (!AGGREGATE_TYPE_P (type))
3455 continue;
3457 if (!COMPLETE_TYPE_P (type)
3458 || !host_integerp (TYPE_SIZE (type), 1)
3459 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3460 || (AGGREGATE_TYPE_P (type)
3461 && type_internals_preclude_sra_p (type, &msg)))
3462 continue;
3464 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3465 ret = true;
3466 if (dump_file && (dump_flags & TDF_DETAILS))
3468 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3469 print_generic_expr (dump_file, parm, 0);
3470 fprintf (dump_file, "\n");
3474 func_param_count = count;
3475 return ret;
3478 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3479 maybe_modified. */
3481 static bool
3482 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3483 void *data)
3485 struct access *repr = (struct access *) data;
3487 repr->grp_maybe_modified = 1;
3488 return true;
3491 /* Analyze what representatives (in linked lists accessible from
3492 REPRESENTATIVES) can be modified by side effects of statements in the
3493 current function. */
3495 static void
3496 analyze_modified_params (VEC (access_p, heap) *representatives)
3498 int i;
3500 for (i = 0; i < func_param_count; i++)
3502 struct access *repr;
3504 for (repr = VEC_index (access_p, representatives, i);
3505 repr;
3506 repr = repr->next_grp)
3508 struct access *access;
3509 bitmap visited;
3510 ao_ref ar;
3512 if (no_accesses_p (repr))
3513 continue;
3514 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3515 || repr->grp_maybe_modified)
3516 continue;
3518 ao_ref_init (&ar, repr->expr);
3519 visited = BITMAP_ALLOC (NULL);
3520 for (access = repr; access; access = access->next_sibling)
3522 /* All accesses are read ones, otherwise grp_maybe_modified would
3523 be trivially set. */
3524 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3525 mark_maybe_modified, repr, &visited);
3526 if (repr->grp_maybe_modified)
3527 break;
3529 BITMAP_FREE (visited);
3534 /* Propagate distances in bb_dereferences in the opposite direction than the
3535 control flow edges, in each step storing the maximum of the current value
3536 and the minimum of all successors. These steps are repeated until the table
3537 stabilizes. Note that BBs which might terminate the functions (according to
3538 final_bbs bitmap) never updated in this way. */
3540 static void
3541 propagate_dereference_distances (void)
3543 VEC (basic_block, heap) *queue;
3544 basic_block bb;
3546 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3547 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3548 FOR_EACH_BB (bb)
3550 VEC_quick_push (basic_block, queue, bb);
3551 bb->aux = bb;
3554 while (!VEC_empty (basic_block, queue))
3556 edge_iterator ei;
3557 edge e;
3558 bool change = false;
3559 int i;
3561 bb = VEC_pop (basic_block, queue);
3562 bb->aux = NULL;
3564 if (bitmap_bit_p (final_bbs, bb->index))
3565 continue;
3567 for (i = 0; i < func_param_count; i++)
3569 int idx = bb->index * func_param_count + i;
3570 bool first = true;
3571 HOST_WIDE_INT inh = 0;
3573 FOR_EACH_EDGE (e, ei, bb->succs)
3575 int succ_idx = e->dest->index * func_param_count + i;
3577 if (e->src == EXIT_BLOCK_PTR)
3578 continue;
3580 if (first)
3582 first = false;
3583 inh = bb_dereferences [succ_idx];
3585 else if (bb_dereferences [succ_idx] < inh)
3586 inh = bb_dereferences [succ_idx];
3589 if (!first && bb_dereferences[idx] < inh)
3591 bb_dereferences[idx] = inh;
3592 change = true;
3596 if (change && !bitmap_bit_p (final_bbs, bb->index))
3597 FOR_EACH_EDGE (e, ei, bb->preds)
3599 if (e->src->aux)
3600 continue;
3602 e->src->aux = e->src;
3603 VEC_quick_push (basic_block, queue, e->src);
3607 VEC_free (basic_block, heap, queue);
3610 /* Dump a dereferences TABLE with heading STR to file F. */
3612 static void
3613 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3615 basic_block bb;
3617 fprintf (dump_file, str);
3618 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3620 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3621 if (bb != EXIT_BLOCK_PTR)
3623 int i;
3624 for (i = 0; i < func_param_count; i++)
3626 int idx = bb->index * func_param_count + i;
3627 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3630 fprintf (f, "\n");
3632 fprintf (dump_file, "\n");
3635 /* Determine what (parts of) parameters passed by reference that are not
3636 assigned to are not certainly dereferenced in this function and thus the
3637 dereferencing cannot be safely moved to the caller without potentially
3638 introducing a segfault. Mark such REPRESENTATIVES as
3639 grp_not_necessarilly_dereferenced.
3641 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3642 part is calculated rather than simple booleans are calculated for each
3643 pointer parameter to handle cases when only a fraction of the whole
3644 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3645 an example).
3647 The maximum dereference distances for each pointer parameter and BB are
3648 already stored in bb_dereference. This routine simply propagates these
3649 values upwards by propagate_dereference_distances and then compares the
3650 distances of individual parameters in the ENTRY BB to the equivalent
3651 distances of each representative of a (fraction of a) parameter. */
3653 static void
3654 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3656 int i;
3658 if (dump_file && (dump_flags & TDF_DETAILS))
3659 dump_dereferences_table (dump_file,
3660 "Dereference table before propagation:\n",
3661 bb_dereferences);
3663 propagate_dereference_distances ();
3665 if (dump_file && (dump_flags & TDF_DETAILS))
3666 dump_dereferences_table (dump_file,
3667 "Dereference table after propagation:\n",
3668 bb_dereferences);
3670 for (i = 0; i < func_param_count; i++)
3672 struct access *repr = VEC_index (access_p, representatives, i);
3673 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3675 if (!repr || no_accesses_p (repr))
3676 continue;
3680 if ((repr->offset + repr->size) > bb_dereferences[idx])
3681 repr->grp_not_necessarilly_dereferenced = 1;
3682 repr = repr->next_grp;
3684 while (repr);
3688 /* Return the representative access for the parameter declaration PARM if it is
3689 a scalar passed by reference which is not written to and the pointer value
3690 is not used directly. Thus, if it is legal to dereference it in the caller
3691 and we can rule out modifications through aliases, such parameter should be
3692 turned into one passed by value. Return NULL otherwise. */
3694 static struct access *
3695 unmodified_by_ref_scalar_representative (tree parm)
3697 int i, access_count;
3698 struct access *repr;
3699 VEC (access_p, heap) *access_vec;
3701 access_vec = get_base_access_vector (parm);
3702 gcc_assert (access_vec);
3703 repr = VEC_index (access_p, access_vec, 0);
3704 if (repr->write)
3705 return NULL;
3706 repr->group_representative = repr;
3708 access_count = VEC_length (access_p, access_vec);
3709 for (i = 1; i < access_count; i++)
3711 struct access *access = VEC_index (access_p, access_vec, i);
3712 if (access->write)
3713 return NULL;
3714 access->group_representative = repr;
3715 access->next_sibling = repr->next_sibling;
3716 repr->next_sibling = access;
3719 repr->grp_read = 1;
3720 repr->grp_scalar_ptr = 1;
3721 return repr;
3724 /* Return true iff this access precludes IPA-SRA of the parameter it is
3725 associated with. */
3727 static bool
3728 access_precludes_ipa_sra_p (struct access *access)
3730 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3731 is incompatible assign in a call statement (and possibly even in asm
3732 statements). This can be relaxed by using a new temporary but only for
3733 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3734 intraprocedural SRA we deal with this by keeping the old aggregate around,
3735 something we cannot do in IPA-SRA.) */
3736 if (access->write
3737 && (is_gimple_call (access->stmt)
3738 || gimple_code (access->stmt) == GIMPLE_ASM))
3739 return true;
3741 return false;
3745 /* Sort collected accesses for parameter PARM, identify representatives for
3746 each accessed region and link them together. Return NULL if there are
3747 different but overlapping accesses, return the special ptr value meaning
3748 there are no accesses for this parameter if that is the case and return the
3749 first representative otherwise. Set *RO_GRP if there is a group of accesses
3750 with only read (i.e. no write) accesses. */
3752 static struct access *
3753 splice_param_accesses (tree parm, bool *ro_grp)
3755 int i, j, access_count, group_count;
3756 int agg_size, total_size = 0;
3757 struct access *access, *res, **prev_acc_ptr = &res;
3758 VEC (access_p, heap) *access_vec;
3760 access_vec = get_base_access_vector (parm);
3761 if (!access_vec)
3762 return &no_accesses_representant;
3763 access_count = VEC_length (access_p, access_vec);
3765 VEC_qsort (access_p, access_vec, compare_access_positions);
3767 i = 0;
3768 total_size = 0;
3769 group_count = 0;
3770 while (i < access_count)
3772 bool modification;
3773 tree a1_alias_type;
3774 access = VEC_index (access_p, access_vec, i);
3775 modification = access->write;
3776 if (access_precludes_ipa_sra_p (access))
3777 return NULL;
3778 a1_alias_type = reference_alias_ptr_type (access->expr);
3780 /* Access is about to become group representative unless we find some
3781 nasty overlap which would preclude us from breaking this parameter
3782 apart. */
3784 j = i + 1;
3785 while (j < access_count)
3787 struct access *ac2 = VEC_index (access_p, access_vec, j);
3788 if (ac2->offset != access->offset)
3790 /* All or nothing law for parameters. */
3791 if (access->offset + access->size > ac2->offset)
3792 return NULL;
3793 else
3794 break;
3796 else if (ac2->size != access->size)
3797 return NULL;
3799 if (access_precludes_ipa_sra_p (ac2)
3800 || (ac2->type != access->type
3801 && (TREE_ADDRESSABLE (ac2->type)
3802 || TREE_ADDRESSABLE (access->type)))
3803 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3804 return NULL;
3806 modification |= ac2->write;
3807 ac2->group_representative = access;
3808 ac2->next_sibling = access->next_sibling;
3809 access->next_sibling = ac2;
3810 j++;
3813 group_count++;
3814 access->grp_maybe_modified = modification;
3815 if (!modification)
3816 *ro_grp = true;
3817 *prev_acc_ptr = access;
3818 prev_acc_ptr = &access->next_grp;
3819 total_size += access->size;
3820 i = j;
3823 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3824 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3825 else
3826 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3827 if (total_size >= agg_size)
3828 return NULL;
3830 gcc_assert (group_count > 0);
3831 return res;
3834 /* Decide whether parameters with representative accesses given by REPR should
3835 be reduced into components. */
3837 static int
3838 decide_one_param_reduction (struct access *repr)
3840 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3841 bool by_ref;
3842 tree parm;
3844 parm = repr->base;
3845 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3846 gcc_assert (cur_parm_size > 0);
3848 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3850 by_ref = true;
3851 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3853 else
3855 by_ref = false;
3856 agg_size = cur_parm_size;
3859 if (dump_file)
3861 struct access *acc;
3862 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3863 print_generic_expr (dump_file, parm, 0);
3864 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3865 for (acc = repr; acc; acc = acc->next_grp)
3866 dump_access (dump_file, acc, true);
3869 total_size = 0;
3870 new_param_count = 0;
3872 for (; repr; repr = repr->next_grp)
3874 gcc_assert (parm == repr->base);
3876 /* Taking the address of a non-addressable field is verboten. */
3877 if (by_ref && repr->non_addressable)
3878 return 0;
3880 /* Do not decompose a non-BLKmode param in a way that would
3881 create BLKmode params. Especially for by-reference passing
3882 (thus, pointer-type param) this is hardly worthwhile. */
3883 if (DECL_MODE (parm) != BLKmode
3884 && TYPE_MODE (repr->type) == BLKmode)
3885 return 0;
3887 if (!by_ref || (!repr->grp_maybe_modified
3888 && !repr->grp_not_necessarilly_dereferenced))
3889 total_size += repr->size;
3890 else
3891 total_size += cur_parm_size;
3893 new_param_count++;
3896 gcc_assert (new_param_count > 0);
3898 if (optimize_function_for_size_p (cfun))
3899 parm_size_limit = cur_parm_size;
3900 else
3901 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3902 * cur_parm_size);
3904 if (total_size < agg_size
3905 && total_size <= parm_size_limit)
3907 if (dump_file)
3908 fprintf (dump_file, " ....will be split into %i components\n",
3909 new_param_count);
3910 return new_param_count;
3912 else
3913 return 0;
3916 /* The order of the following enums is important, we need to do extra work for
3917 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3918 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3919 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3921 /* Identify representatives of all accesses to all candidate parameters for
3922 IPA-SRA. Return result based on what representatives have been found. */
3924 static enum ipa_splicing_result
3925 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3927 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3928 tree parm;
3929 struct access *repr;
3931 *representatives = VEC_alloc (access_p, heap, func_param_count);
3933 for (parm = DECL_ARGUMENTS (current_function_decl);
3934 parm;
3935 parm = DECL_CHAIN (parm))
3937 if (is_unused_scalar_param (parm))
3939 VEC_quick_push (access_p, *representatives,
3940 &no_accesses_representant);
3941 if (result == NO_GOOD_ACCESS)
3942 result = UNUSED_PARAMS;
3944 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3945 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3946 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3948 repr = unmodified_by_ref_scalar_representative (parm);
3949 VEC_quick_push (access_p, *representatives, repr);
3950 if (repr)
3951 result = UNMODIF_BY_REF_ACCESSES;
3953 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3955 bool ro_grp = false;
3956 repr = splice_param_accesses (parm, &ro_grp);
3957 VEC_quick_push (access_p, *representatives, repr);
3959 if (repr && !no_accesses_p (repr))
3961 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3963 if (ro_grp)
3964 result = UNMODIF_BY_REF_ACCESSES;
3965 else if (result < MODIF_BY_REF_ACCESSES)
3966 result = MODIF_BY_REF_ACCESSES;
3968 else if (result < BY_VAL_ACCESSES)
3969 result = BY_VAL_ACCESSES;
3971 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3972 result = UNUSED_PARAMS;
3974 else
3975 VEC_quick_push (access_p, *representatives, NULL);
3978 if (result == NO_GOOD_ACCESS)
3980 VEC_free (access_p, heap, *representatives);
3981 *representatives = NULL;
3982 return NO_GOOD_ACCESS;
3985 return result;
3988 /* Return the index of BASE in PARMS. Abort if it is not found. */
3990 static inline int
3991 get_param_index (tree base, VEC(tree, heap) *parms)
3993 int i, len;
3995 len = VEC_length (tree, parms);
3996 for (i = 0; i < len; i++)
3997 if (VEC_index (tree, parms, i) == base)
3998 return i;
3999 gcc_unreachable ();
4002 /* Convert the decisions made at the representative level into compact
4003 parameter adjustments. REPRESENTATIVES are pointers to first
4004 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4005 final number of adjustments. */
4007 static ipa_parm_adjustment_vec
4008 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
4009 int adjustments_count)
4011 VEC (tree, heap) *parms;
4012 ipa_parm_adjustment_vec adjustments;
4013 tree parm;
4014 int i;
4016 gcc_assert (adjustments_count > 0);
4017 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4018 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
4019 parm = DECL_ARGUMENTS (current_function_decl);
4020 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4022 struct access *repr = VEC_index (access_p, representatives, i);
4024 if (!repr || no_accesses_p (repr))
4026 struct ipa_parm_adjustment *adj;
4028 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4029 memset (adj, 0, sizeof (*adj));
4030 adj->base_index = get_param_index (parm, parms);
4031 adj->base = parm;
4032 if (!repr)
4033 adj->copy_param = 1;
4034 else
4035 adj->remove_param = 1;
4037 else
4039 struct ipa_parm_adjustment *adj;
4040 int index = get_param_index (parm, parms);
4042 for (; repr; repr = repr->next_grp)
4044 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
4045 memset (adj, 0, sizeof (*adj));
4046 gcc_assert (repr->base == parm);
4047 adj->base_index = index;
4048 adj->base = repr->base;
4049 adj->type = repr->type;
4050 adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
4051 adj->offset = repr->offset;
4052 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4053 && (repr->grp_maybe_modified
4054 || repr->grp_not_necessarilly_dereferenced));
4059 VEC_free (tree, heap, parms);
4060 return adjustments;
4063 /* Analyze the collected accesses and produce a plan what to do with the
4064 parameters in the form of adjustments, NULL meaning nothing. */
4066 static ipa_parm_adjustment_vec
4067 analyze_all_param_acesses (void)
4069 enum ipa_splicing_result repr_state;
4070 bool proceed = false;
4071 int i, adjustments_count = 0;
4072 VEC (access_p, heap) *representatives;
4073 ipa_parm_adjustment_vec adjustments;
4075 repr_state = splice_all_param_accesses (&representatives);
4076 if (repr_state == NO_GOOD_ACCESS)
4077 return NULL;
4079 /* If there are any parameters passed by reference which are not modified
4080 directly, we need to check whether they can be modified indirectly. */
4081 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4083 analyze_caller_dereference_legality (representatives);
4084 analyze_modified_params (representatives);
4087 for (i = 0; i < func_param_count; i++)
4089 struct access *repr = VEC_index (access_p, representatives, i);
4091 if (repr && !no_accesses_p (repr))
4093 if (repr->grp_scalar_ptr)
4095 adjustments_count++;
4096 if (repr->grp_not_necessarilly_dereferenced
4097 || repr->grp_maybe_modified)
4098 VEC_replace (access_p, representatives, i, NULL);
4099 else
4101 proceed = true;
4102 sra_stats.scalar_by_ref_to_by_val++;
4105 else
4107 int new_components = decide_one_param_reduction (repr);
4109 if (new_components == 0)
4111 VEC_replace (access_p, representatives, i, NULL);
4112 adjustments_count++;
4114 else
4116 adjustments_count += new_components;
4117 sra_stats.aggregate_params_reduced++;
4118 sra_stats.param_reductions_created += new_components;
4119 proceed = true;
4123 else
4125 if (no_accesses_p (repr))
4127 proceed = true;
4128 sra_stats.deleted_unused_parameters++;
4130 adjustments_count++;
4134 if (!proceed && dump_file)
4135 fprintf (dump_file, "NOT proceeding to change params.\n");
4137 if (proceed)
4138 adjustments = turn_representatives_into_adjustments (representatives,
4139 adjustments_count);
4140 else
4141 adjustments = NULL;
4143 VEC_free (access_p, heap, representatives);
4144 return adjustments;
4147 /* If a parameter replacement identified by ADJ does not yet exist in the form
4148 of declaration, create it and record it, otherwise return the previously
4149 created one. */
4151 static tree
4152 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4154 tree repl;
4155 if (!adj->new_ssa_base)
4157 char *pretty_name = make_fancy_name (adj->base);
4159 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4160 DECL_NAME (repl) = get_identifier (pretty_name);
4161 obstack_free (&name_obstack, pretty_name);
4163 add_referenced_var (repl);
4164 adj->new_ssa_base = repl;
4166 else
4167 repl = adj->new_ssa_base;
4168 return repl;
4171 /* Find the first adjustment for a particular parameter BASE in a vector of
4172 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4173 adjustment. */
4175 static struct ipa_parm_adjustment *
4176 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4178 int i, len;
4180 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4181 for (i = 0; i < len; i++)
4183 struct ipa_parm_adjustment *adj;
4185 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4186 if (!adj->copy_param && adj->base == base)
4187 return adj;
4190 return NULL;
4193 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4194 removed because its value is not used, replace the SSA_NAME with a one
4195 relating to a created VAR_DECL together all of its uses and return true.
4196 ADJUSTMENTS is a pointer to an adjustments vector. */
4198 static bool
4199 replace_removed_params_ssa_names (gimple stmt,
4200 ipa_parm_adjustment_vec adjustments)
4202 struct ipa_parm_adjustment *adj;
4203 tree lhs, decl, repl, name;
4205 if (gimple_code (stmt) == GIMPLE_PHI)
4206 lhs = gimple_phi_result (stmt);
4207 else if (is_gimple_assign (stmt))
4208 lhs = gimple_assign_lhs (stmt);
4209 else if (is_gimple_call (stmt))
4210 lhs = gimple_call_lhs (stmt);
4211 else
4212 gcc_unreachable ();
4214 if (TREE_CODE (lhs) != SSA_NAME)
4215 return false;
4216 decl = SSA_NAME_VAR (lhs);
4217 if (TREE_CODE (decl) != PARM_DECL)
4218 return false;
4220 adj = get_adjustment_for_base (adjustments, decl);
4221 if (!adj)
4222 return false;
4224 repl = get_replaced_param_substitute (adj);
4225 name = make_ssa_name (repl, stmt);
4227 if (dump_file)
4229 fprintf (dump_file, "replacing an SSA name of a removed param ");
4230 print_generic_expr (dump_file, lhs, 0);
4231 fprintf (dump_file, " with ");
4232 print_generic_expr (dump_file, name, 0);
4233 fprintf (dump_file, "\n");
4236 if (is_gimple_assign (stmt))
4237 gimple_assign_set_lhs (stmt, name);
4238 else if (is_gimple_call (stmt))
4239 gimple_call_set_lhs (stmt, name);
4240 else
4241 gimple_phi_set_result (stmt, name);
4243 replace_uses_by (lhs, name);
4244 release_ssa_name (lhs);
4245 return true;
4248 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4249 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4250 specifies whether the function should care about type incompatibility the
4251 current and new expressions. If it is false, the function will leave
4252 incompatibility issues to the caller. Return true iff the expression
4253 was modified. */
4255 static bool
4256 sra_ipa_modify_expr (tree *expr, bool convert,
4257 ipa_parm_adjustment_vec adjustments)
4259 int i, len;
4260 struct ipa_parm_adjustment *adj, *cand = NULL;
4261 HOST_WIDE_INT offset, size, max_size;
4262 tree base, src;
4264 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4266 if (TREE_CODE (*expr) == BIT_FIELD_REF
4267 || TREE_CODE (*expr) == IMAGPART_EXPR
4268 || TREE_CODE (*expr) == REALPART_EXPR)
4270 expr = &TREE_OPERAND (*expr, 0);
4271 convert = true;
4274 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4275 if (!base || size == -1 || max_size == -1)
4276 return false;
4278 if (TREE_CODE (base) == MEM_REF)
4280 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4281 base = TREE_OPERAND (base, 0);
4284 base = get_ssa_base_param (base);
4285 if (!base || TREE_CODE (base) != PARM_DECL)
4286 return false;
4288 for (i = 0; i < len; i++)
4290 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4292 if (adj->base == base &&
4293 (adj->offset == offset || adj->remove_param))
4295 cand = adj;
4296 break;
4299 if (!cand || cand->copy_param || cand->remove_param)
4300 return false;
4302 if (cand->by_ref)
4303 src = build_simple_mem_ref (cand->reduction);
4304 else
4305 src = cand->reduction;
4307 if (dump_file && (dump_flags & TDF_DETAILS))
4309 fprintf (dump_file, "About to replace expr ");
4310 print_generic_expr (dump_file, *expr, 0);
4311 fprintf (dump_file, " with ");
4312 print_generic_expr (dump_file, src, 0);
4313 fprintf (dump_file, "\n");
4316 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4318 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4319 *expr = vce;
4321 else
4322 *expr = src;
4323 return true;
4326 /* If the statement pointed to by STMT_PTR contains any expressions that need
4327 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4328 potential type incompatibilities (GSI is used to accommodate conversion
4329 statements and must point to the statement). Return true iff the statement
4330 was modified. */
4332 static bool
4333 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4334 ipa_parm_adjustment_vec adjustments)
4336 gimple stmt = *stmt_ptr;
4337 tree *lhs_p, *rhs_p;
4338 bool any;
4340 if (!gimple_assign_single_p (stmt))
4341 return false;
4343 rhs_p = gimple_assign_rhs1_ptr (stmt);
4344 lhs_p = gimple_assign_lhs_ptr (stmt);
4346 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4347 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4348 if (any)
4350 tree new_rhs = NULL_TREE;
4352 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4354 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4356 /* V_C_Es of constructors can cause trouble (PR 42714). */
4357 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4358 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4359 else
4360 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4362 else
4363 new_rhs = fold_build1_loc (gimple_location (stmt),
4364 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4365 *rhs_p);
4367 else if (REFERENCE_CLASS_P (*rhs_p)
4368 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4369 && !is_gimple_reg (*lhs_p))
4370 /* This can happen when an assignment in between two single field
4371 structures is turned into an assignment in between two pointers to
4372 scalars (PR 42237). */
4373 new_rhs = *rhs_p;
4375 if (new_rhs)
4377 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4378 true, GSI_SAME_STMT);
4380 gimple_assign_set_rhs_from_tree (gsi, tmp);
4383 return true;
4386 return false;
4389 /* Traverse the function body and all modifications as described in
4390 ADJUSTMENTS. Return true iff the CFG has been changed. */
4392 static bool
4393 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4395 bool cfg_changed = false;
4396 basic_block bb;
4398 FOR_EACH_BB (bb)
4400 gimple_stmt_iterator gsi;
4402 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4403 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4405 gsi = gsi_start_bb (bb);
4406 while (!gsi_end_p (gsi))
4408 gimple stmt = gsi_stmt (gsi);
4409 bool modified = false;
4410 tree *t;
4411 unsigned i;
4413 switch (gimple_code (stmt))
4415 case GIMPLE_RETURN:
4416 t = gimple_return_retval_ptr (stmt);
4417 if (*t != NULL_TREE)
4418 modified |= sra_ipa_modify_expr (t, true, adjustments);
4419 break;
4421 case GIMPLE_ASSIGN:
4422 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4423 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4424 break;
4426 case GIMPLE_CALL:
4427 /* Operands must be processed before the lhs. */
4428 for (i = 0; i < gimple_call_num_args (stmt); i++)
4430 t = gimple_call_arg_ptr (stmt, i);
4431 modified |= sra_ipa_modify_expr (t, true, adjustments);
4434 if (gimple_call_lhs (stmt))
4436 t = gimple_call_lhs_ptr (stmt);
4437 modified |= sra_ipa_modify_expr (t, false, adjustments);
4438 modified |= replace_removed_params_ssa_names (stmt,
4439 adjustments);
4441 break;
4443 case GIMPLE_ASM:
4444 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4446 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4447 modified |= sra_ipa_modify_expr (t, true, adjustments);
4449 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4451 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4452 modified |= sra_ipa_modify_expr (t, false, adjustments);
4454 break;
4456 default:
4457 break;
4460 if (modified)
4462 update_stmt (stmt);
4463 if (maybe_clean_eh_stmt (stmt)
4464 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4465 cfg_changed = true;
4467 gsi_next (&gsi);
4471 return cfg_changed;
4474 /* Call gimple_debug_bind_reset_value on all debug statements describing
4475 gimple register parameters that are being removed or replaced. */
4477 static void
4478 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4480 int i, len;
4481 gimple_stmt_iterator *gsip = NULL, gsi;
4483 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4485 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4486 gsip = &gsi;
4488 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4489 for (i = 0; i < len; i++)
4491 struct ipa_parm_adjustment *adj;
4492 imm_use_iterator ui;
4493 gimple stmt, def_temp;
4494 tree name, vexpr, copy = NULL_TREE;
4495 use_operand_p use_p;
4497 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4498 if (adj->copy_param || !is_gimple_reg (adj->base))
4499 continue;
4500 name = gimple_default_def (cfun, adj->base);
4501 vexpr = NULL;
4502 if (name)
4503 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4505 /* All other users must have been removed by
4506 ipa_sra_modify_function_body. */
4507 gcc_assert (is_gimple_debug (stmt));
4508 if (vexpr == NULL && gsip != NULL)
4510 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4511 vexpr = make_node (DEBUG_EXPR_DECL);
4512 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4513 NULL);
4514 DECL_ARTIFICIAL (vexpr) = 1;
4515 TREE_TYPE (vexpr) = TREE_TYPE (name);
4516 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4517 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4519 if (vexpr)
4521 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4522 SET_USE (use_p, vexpr);
4524 else
4525 gimple_debug_bind_reset_value (stmt);
4526 update_stmt (stmt);
4528 /* Create a VAR_DECL for debug info purposes. */
4529 if (!DECL_IGNORED_P (adj->base))
4531 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4532 VAR_DECL, DECL_NAME (adj->base),
4533 TREE_TYPE (adj->base));
4534 if (DECL_PT_UID_SET_P (adj->base))
4535 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4536 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4537 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4538 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4539 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4540 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4541 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4542 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4543 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4544 SET_DECL_RTL (copy, 0);
4545 TREE_USED (copy) = 1;
4546 DECL_CONTEXT (copy) = current_function_decl;
4547 add_referenced_var (copy);
4548 add_local_decl (cfun, copy);
4549 DECL_CHAIN (copy) =
4550 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4551 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4553 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4555 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4556 if (vexpr)
4557 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4558 else
4559 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4560 NULL);
4561 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4566 /* Return false iff all callers have at least as many actual arguments as there
4567 are formal parameters in the current function. */
4569 static bool
4570 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4571 void *data ATTRIBUTE_UNUSED)
4573 struct cgraph_edge *cs;
4574 for (cs = node->callers; cs; cs = cs->next_caller)
4575 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4576 return true;
4578 return false;
4581 /* Convert all callers of NODE. */
4583 static bool
4584 convert_callers_for_node (struct cgraph_node *node,
4585 void *data)
4587 ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
4588 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4589 struct cgraph_edge *cs;
4591 for (cs = node->callers; cs; cs = cs->next_caller)
4593 current_function_decl = cs->caller->symbol.decl;
4594 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl));
4596 if (dump_file)
4597 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4598 cs->caller->uid, cs->callee->uid,
4599 xstrdup (cgraph_node_name (cs->caller)),
4600 xstrdup (cgraph_node_name (cs->callee)));
4602 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4604 pop_cfun ();
4607 for (cs = node->callers; cs; cs = cs->next_caller)
4608 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4609 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl)))
4610 compute_inline_parameters (cs->caller, true);
4611 BITMAP_FREE (recomputed_callers);
4613 return true;
4616 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4618 static void
4619 convert_callers (struct cgraph_node *node, tree old_decl,
4620 ipa_parm_adjustment_vec adjustments)
4622 tree old_cur_fndecl = current_function_decl;
4623 basic_block this_block;
4625 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4626 adjustments, false);
4628 current_function_decl = old_cur_fndecl;
4630 if (!encountered_recursive_call)
4631 return;
4633 FOR_EACH_BB (this_block)
4635 gimple_stmt_iterator gsi;
4637 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4639 gimple stmt = gsi_stmt (gsi);
4640 tree call_fndecl;
4641 if (gimple_code (stmt) != GIMPLE_CALL)
4642 continue;
4643 call_fndecl = gimple_call_fndecl (stmt);
4644 if (call_fndecl == old_decl)
4646 if (dump_file)
4647 fprintf (dump_file, "Adjusting recursive call");
4648 gimple_call_set_fndecl (stmt, node->symbol.decl);
4649 ipa_modify_call_arguments (NULL, stmt, adjustments);
4654 return;
4657 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4658 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4660 static bool
4661 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4663 struct cgraph_node *new_node;
4664 bool cfg_changed;
4665 VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
4667 rebuild_cgraph_edges ();
4668 free_dominance_info (CDI_DOMINATORS);
4669 pop_cfun ();
4670 current_function_decl = NULL_TREE;
4672 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4673 false, NULL, NULL, "isra");
4674 current_function_decl = new_node->symbol.decl;
4675 push_cfun (DECL_STRUCT_FUNCTION (new_node->symbol.decl));
4677 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4678 cfg_changed = ipa_sra_modify_function_body (adjustments);
4679 sra_ipa_reset_debug_stmts (adjustments);
4680 convert_callers (new_node, node->symbol.decl, adjustments);
4681 cgraph_make_node_local (new_node);
4682 return cfg_changed;
4685 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4686 attributes, return true otherwise. NODE is the cgraph node of the current
4687 function. */
4689 static bool
4690 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4692 if (!cgraph_node_can_be_local_p (node))
4694 if (dump_file)
4695 fprintf (dump_file, "Function not local to this compilation unit.\n");
4696 return false;
4699 if (!node->local.can_change_signature)
4701 if (dump_file)
4702 fprintf (dump_file, "Function can not change signature.\n");
4703 return false;
4706 if (!tree_versionable_function_p (node->symbol.decl))
4708 if (dump_file)
4709 fprintf (dump_file, "Function is not versionable.\n");
4710 return false;
4713 if (DECL_VIRTUAL_P (current_function_decl))
4715 if (dump_file)
4716 fprintf (dump_file, "Function is a virtual method.\n");
4717 return false;
4720 if ((DECL_COMDAT (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
4721 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4723 if (dump_file)
4724 fprintf (dump_file, "Function too big to be made truly local.\n");
4725 return false;
4728 if (!node->callers)
4730 if (dump_file)
4731 fprintf (dump_file,
4732 "Function has no callers in this compilation unit.\n");
4733 return false;
4736 if (cfun->stdarg)
4738 if (dump_file)
4739 fprintf (dump_file, "Function uses stdarg. \n");
4740 return false;
4743 if (TYPE_ATTRIBUTES (TREE_TYPE (node->symbol.decl)))
4744 return false;
4746 return true;
4749 /* Perform early interprocedural SRA. */
4751 static unsigned int
4752 ipa_early_sra (void)
4754 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4755 ipa_parm_adjustment_vec adjustments;
4756 int ret = 0;
4758 if (!ipa_sra_preliminary_function_checks (node))
4759 return 0;
4761 sra_initialize ();
4762 sra_mode = SRA_MODE_EARLY_IPA;
4764 if (!find_param_candidates ())
4766 if (dump_file)
4767 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4768 goto simple_out;
4771 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4772 NULL, true))
4774 if (dump_file)
4775 fprintf (dump_file, "There are callers with insufficient number of "
4776 "arguments.\n");
4777 goto simple_out;
4780 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4781 func_param_count
4782 * last_basic_block_for_function (cfun));
4783 final_bbs = BITMAP_ALLOC (NULL);
4785 scan_function ();
4786 if (encountered_apply_args)
4788 if (dump_file)
4789 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4790 goto out;
4793 if (encountered_unchangable_recursive_call)
4795 if (dump_file)
4796 fprintf (dump_file, "Function calls itself with insufficient "
4797 "number of arguments.\n");
4798 goto out;
4801 adjustments = analyze_all_param_acesses ();
4802 if (!adjustments)
4803 goto out;
4804 if (dump_file)
4805 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4807 if (modify_function (node, adjustments))
4808 ret = TODO_update_ssa | TODO_cleanup_cfg;
4809 else
4810 ret = TODO_update_ssa;
4811 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4813 statistics_counter_event (cfun, "Unused parameters deleted",
4814 sra_stats.deleted_unused_parameters);
4815 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4816 sra_stats.scalar_by_ref_to_by_val);
4817 statistics_counter_event (cfun, "Aggregate parameters broken up",
4818 sra_stats.aggregate_params_reduced);
4819 statistics_counter_event (cfun, "Aggregate parameter components created",
4820 sra_stats.param_reductions_created);
4822 out:
4823 BITMAP_FREE (final_bbs);
4824 free (bb_dereferences);
4825 simple_out:
4826 sra_deinitialize ();
4827 return ret;
4830 /* Return if early ipa sra shall be performed. */
4831 static bool
4832 ipa_early_sra_gate (void)
4834 return flag_ipa_sra && dbg_cnt (eipa_sra);
4837 struct gimple_opt_pass pass_early_ipa_sra =
4840 GIMPLE_PASS,
4841 "eipa_sra", /* name */
4842 ipa_early_sra_gate, /* gate */
4843 ipa_early_sra, /* execute */
4844 NULL, /* sub */
4845 NULL, /* next */
4846 0, /* static_pass_number */
4847 TV_IPA_SRA, /* tv_id */
4848 0, /* properties_required */
4849 0, /* properties_provided */
4850 0, /* properties_destroyed */
4851 0, /* todo_flags_start */
4852 TODO_dump_symtab /* todo_flags_finish */