Merge trunk version 195330 into gupc branch.
[official-gcc.git] / gcc / tree-sra.c
blob6101c6007cba9ce80a0dfc9f642e176bd2aa548b
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2013 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "tree-pass.h"
84 #include "ipa-prop.h"
85 #include "statistics.h"
86 #include "params.h"
87 #include "target.h"
88 #include "flags.h"
89 #include "dbgcnt.h"
90 #include "tree-inline.h"
91 #include "gimple-pretty-print.h"
92 #include "ipa-inline.h"
94 /* Enumeration of all aggregate reductions we can do. */
95 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
96 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
97 SRA_MODE_INTRA }; /* late intraprocedural SRA */
99 /* Global variable describing which aggregate reduction we are performing at
100 the moment. */
101 static enum sra_mode sra_mode;
103 struct assign_link;
105 /* ACCESS represents each access to an aggregate variable (as a whole or a
106 part). It can also represent a group of accesses that refer to exactly the
107 same fragment of an aggregate (i.e. those that have exactly the same offset
108 and size). Such representatives for a single aggregate, once determined,
109 are linked in a linked list and have the group fields set.
111 Moreover, when doing intraprocedural SRA, a tree is built from those
112 representatives (by the means of first_child and next_sibling pointers), in
113 which all items in a subtree are "within" the root, i.e. their offset is
114 greater or equal to offset of the root and offset+size is smaller or equal
115 to offset+size of the root. Children of an access are sorted by offset.
117 Note that accesses to parts of vector and complex number types always
118 represented by an access to the whole complex number or a vector. It is a
119 duty of the modifying functions to replace them appropriately. */
121 struct access
123 /* Values returned by `get_ref_base_and_extent' for each component reference
124 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
125 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
126 HOST_WIDE_INT offset;
127 HOST_WIDE_INT size;
128 tree base;
130 /* Expression. It is context dependent so do not use it to create new
131 expressions to access the original aggregate. See PR 42154 for a
132 testcase. */
133 tree expr;
134 /* Type. */
135 tree type;
137 /* The statement this access belongs to. */
138 gimple stmt;
140 /* Next group representative for this aggregate. */
141 struct access *next_grp;
143 /* Pointer to the group representative. Pointer to itself if the struct is
144 the representative. */
145 struct access *group_representative;
147 /* If this access has any children (in terms of the definition above), this
148 points to the first one. */
149 struct access *first_child;
151 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
152 described above. In IPA-SRA this is a pointer to the next access
153 belonging to the same group (having the same representative). */
154 struct access *next_sibling;
156 /* Pointers to the first and last element in the linked list of assign
157 links. */
158 struct assign_link *first_link, *last_link;
160 /* Pointer to the next access in the work queue. */
161 struct access *next_queued;
163 /* Replacement variable for this access "region." Never to be accessed
164 directly, always only by the means of get_access_replacement() and only
165 when grp_to_be_replaced flag is set. */
166 tree replacement_decl;
168 /* Is this particular access write access? */
169 unsigned write : 1;
171 /* Is this access an access to a non-addressable field? */
172 unsigned non_addressable : 1;
174 /* Is this access currently in the work queue? */
175 unsigned grp_queued : 1;
177 /* Does this group contain a write access? This flag is propagated down the
178 access tree. */
179 unsigned grp_write : 1;
181 /* Does this group contain a read access? This flag is propagated down the
182 access tree. */
183 unsigned grp_read : 1;
185 /* Does this group contain a read access that comes from an assignment
186 statement? This flag is propagated down the access tree. */
187 unsigned grp_assignment_read : 1;
189 /* Does this group contain a write access that comes from an assignment
190 statement? This flag is propagated down the access tree. */
191 unsigned grp_assignment_write : 1;
193 /* Does this group contain a read access through a scalar type? This flag is
194 not propagated in the access tree in any direction. */
195 unsigned grp_scalar_read : 1;
197 /* Does this group contain a write access through a scalar type? This flag
198 is not propagated in the access tree in any direction. */
199 unsigned grp_scalar_write : 1;
201 /* Is this access an artificial one created to scalarize some record
202 entirely? */
203 unsigned grp_total_scalarization : 1;
205 /* Other passes of the analysis use this bit to make function
206 analyze_access_subtree create scalar replacements for this group if
207 possible. */
208 unsigned grp_hint : 1;
210 /* Is the subtree rooted in this access fully covered by scalar
211 replacements? */
212 unsigned grp_covered : 1;
214 /* If set to true, this access and all below it in an access tree must not be
215 scalarized. */
216 unsigned grp_unscalarizable_region : 1;
218 /* Whether data have been written to parts of the aggregate covered by this
219 access which is not to be scalarized. This flag is propagated up in the
220 access tree. */
221 unsigned grp_unscalarized_data : 1;
223 /* Does this access and/or group contain a write access through a
224 BIT_FIELD_REF? */
225 unsigned grp_partial_lhs : 1;
227 /* Set when a scalar replacement should be created for this variable. */
228 unsigned grp_to_be_replaced : 1;
230 /* Set when we want a replacement for the sole purpose of having it in
231 generated debug statements. */
232 unsigned grp_to_be_debug_replaced : 1;
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning : 1;
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified : 1;
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr : 1;
246 /* Set when we discover that this pointer is not safe to dereference in the
247 caller. */
248 unsigned grp_not_necessarilly_dereferenced : 1;
251 typedef struct access *access_p;
254 /* Alloc pool for allocating access structures. */
255 static alloc_pool access_pool;
257 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
258 are used to propagate subaccesses from rhs to lhs as long as they don't
259 conflict with what is already there. */
260 struct assign_link
262 struct access *lacc, *racc;
263 struct assign_link *next;
266 /* Alloc pool for allocating assign link structures. */
267 static alloc_pool link_pool;
269 /* Base (tree) -> Vector (vec<access_p> *) map. */
270 static struct pointer_map_t *base_access_vec;
272 /* Set of candidates. */
273 static bitmap candidate_bitmap;
274 static htab_t candidates;
276 /* For a candidate UID return the candidates decl. */
278 static inline tree
279 candidate (unsigned uid)
281 struct tree_decl_minimal t;
282 t.uid = uid;
283 return (tree) htab_find_with_hash (candidates, &t, uid);
286 /* Bitmap of candidates which we should try to entirely scalarize away and
287 those which cannot be (because they are and need be used as a whole). */
288 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
290 /* Obstack for creation of fancy names. */
291 static struct obstack name_obstack;
293 /* Head of a linked list of accesses that need to have its subaccesses
294 propagated to their assignment counterparts. */
295 static struct access *work_queue_head;
297 /* Number of parameters of the analyzed function when doing early ipa SRA. */
298 static int func_param_count;
300 /* scan_function sets the following to true if it encounters a call to
301 __builtin_apply_args. */
302 static bool encountered_apply_args;
304 /* Set by scan_function when it finds a recursive call. */
305 static bool encountered_recursive_call;
307 /* Set by scan_function when it finds a recursive call with less actual
308 arguments than formal parameters.. */
309 static bool encountered_unchangable_recursive_call;
311 /* This is a table in which for each basic block and parameter there is a
312 distance (offset + size) in that parameter which is dereferenced and
313 accessed in that BB. */
314 static HOST_WIDE_INT *bb_dereferences;
315 /* Bitmap of BBs that can cause the function to "stop" progressing by
316 returning, throwing externally, looping infinitely or calling a function
317 which might abort etc.. */
318 static bitmap final_bbs;
320 /* Representative of no accesses at all. */
321 static struct access no_accesses_representant;
323 /* Predicate to test the special value. */
325 static inline bool
326 no_accesses_p (struct access *access)
328 return access == &no_accesses_representant;
331 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
332 representative fields are dumped, otherwise those which only describe the
333 individual access are. */
335 static struct
337 /* Number of processed aggregates is readily available in
338 analyze_all_variable_accesses and so is not stored here. */
340 /* Number of created scalar replacements. */
341 int replacements;
343 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
344 expression. */
345 int exprs;
347 /* Number of statements created by generate_subtree_copies. */
348 int subtree_copies;
350 /* Number of statements created by load_assign_lhs_subreplacements. */
351 int subreplacements;
353 /* Number of times sra_modify_assign has deleted a statement. */
354 int deleted;
356 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
357 RHS reparately due to type conversions or nonexistent matching
358 references. */
359 int separate_lhs_rhs_handling;
361 /* Number of parameters that were removed because they were unused. */
362 int deleted_unused_parameters;
364 /* Number of scalars passed as parameters by reference that have been
365 converted to be passed by value. */
366 int scalar_by_ref_to_by_val;
368 /* Number of aggregate parameters that were replaced by one or more of their
369 components. */
370 int aggregate_params_reduced;
372 /* Numbber of components created when splitting aggregate parameters. */
373 int param_reductions_created;
374 } sra_stats;
376 static void
377 dump_access (FILE *f, struct access *access, bool grp)
379 fprintf (f, "access { ");
380 fprintf (f, "base = (%d)'", DECL_UID (access->base));
381 print_generic_expr (f, access->base, 0);
382 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
383 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
384 fprintf (f, ", expr = ");
385 print_generic_expr (f, access->expr, 0);
386 fprintf (f, ", type = ");
387 print_generic_expr (f, access->type, 0);
388 if (grp)
389 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
390 "grp_assignment_write = %d, grp_scalar_read = %d, "
391 "grp_scalar_write = %d, grp_total_scalarization = %d, "
392 "grp_hint = %d, grp_covered = %d, "
393 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
394 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
395 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
396 "grp_not_necessarilly_dereferenced = %d\n",
397 access->grp_read, access->grp_write, access->grp_assignment_read,
398 access->grp_assignment_write, access->grp_scalar_read,
399 access->grp_scalar_write, access->grp_total_scalarization,
400 access->grp_hint, access->grp_covered,
401 access->grp_unscalarizable_region, access->grp_unscalarized_data,
402 access->grp_partial_lhs, access->grp_to_be_replaced,
403 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
404 access->grp_not_necessarilly_dereferenced);
405 else
406 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
407 "grp_partial_lhs = %d\n",
408 access->write, access->grp_total_scalarization,
409 access->grp_partial_lhs);
412 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
414 static void
415 dump_access_tree_1 (FILE *f, struct access *access, int level)
419 int i;
421 for (i = 0; i < level; i++)
422 fputs ("* ", dump_file);
424 dump_access (f, access, true);
426 if (access->first_child)
427 dump_access_tree_1 (f, access->first_child, level + 1);
429 access = access->next_sibling;
431 while (access);
434 /* Dump all access trees for a variable, given the pointer to the first root in
435 ACCESS. */
437 static void
438 dump_access_tree (FILE *f, struct access *access)
440 for (; access; access = access->next_grp)
441 dump_access_tree_1 (f, access, 0);
444 /* Return true iff ACC is non-NULL and has subaccesses. */
446 static inline bool
447 access_has_children_p (struct access *acc)
449 return acc && acc->first_child;
452 /* Return true iff ACC is (partly) covered by at least one replacement. */
454 static bool
455 access_has_replacements_p (struct access *acc)
457 struct access *child;
458 if (acc->grp_to_be_replaced)
459 return true;
460 for (child = acc->first_child; child; child = child->next_sibling)
461 if (access_has_replacements_p (child))
462 return true;
463 return false;
466 /* Return a vector of pointers to accesses for the variable given in BASE or
467 NULL if there is none. */
469 static vec<access_p> *
470 get_base_access_vector (tree base)
472 void **slot;
474 slot = pointer_map_contains (base_access_vec, base);
475 if (!slot)
476 return NULL;
477 else
478 return *(vec<access_p> **) slot;
481 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
482 in ACCESS. Return NULL if it cannot be found. */
484 static struct access *
485 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
486 HOST_WIDE_INT size)
488 while (access && (access->offset != offset || access->size != size))
490 struct access *child = access->first_child;
492 while (child && (child->offset + child->size <= offset))
493 child = child->next_sibling;
494 access = child;
497 return access;
500 /* Return the first group representative for DECL or NULL if none exists. */
502 static struct access *
503 get_first_repr_for_decl (tree base)
505 vec<access_p> *access_vec;
507 access_vec = get_base_access_vector (base);
508 if (!access_vec)
509 return NULL;
511 return (*access_vec)[0];
514 /* Find an access representative for the variable BASE and given OFFSET and
515 SIZE. Requires that access trees have already been built. Return NULL if
516 it cannot be found. */
518 static struct access *
519 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
520 HOST_WIDE_INT size)
522 struct access *access;
524 access = get_first_repr_for_decl (base);
525 while (access && (access->offset + access->size <= offset))
526 access = access->next_grp;
527 if (!access)
528 return NULL;
530 return find_access_in_subtree (access, offset, size);
533 /* Add LINK to the linked list of assign links of RACC. */
534 static void
535 add_link_to_rhs (struct access *racc, struct assign_link *link)
537 gcc_assert (link->racc == racc);
539 if (!racc->first_link)
541 gcc_assert (!racc->last_link);
542 racc->first_link = link;
544 else
545 racc->last_link->next = link;
547 racc->last_link = link;
548 link->next = NULL;
551 /* Move all link structures in their linked list in OLD_RACC to the linked list
552 in NEW_RACC. */
553 static void
554 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
556 if (!old_racc->first_link)
558 gcc_assert (!old_racc->last_link);
559 return;
562 if (new_racc->first_link)
564 gcc_assert (!new_racc->last_link->next);
565 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
567 new_racc->last_link->next = old_racc->first_link;
568 new_racc->last_link = old_racc->last_link;
570 else
572 gcc_assert (!new_racc->last_link);
574 new_racc->first_link = old_racc->first_link;
575 new_racc->last_link = old_racc->last_link;
577 old_racc->first_link = old_racc->last_link = NULL;
580 /* Add ACCESS to the work queue (which is actually a stack). */
582 static void
583 add_access_to_work_queue (struct access *access)
585 if (!access->grp_queued)
587 gcc_assert (!access->next_queued);
588 access->next_queued = work_queue_head;
589 access->grp_queued = 1;
590 work_queue_head = access;
594 /* Pop an access from the work queue, and return it, assuming there is one. */
596 static struct access *
597 pop_access_from_work_queue (void)
599 struct access *access = work_queue_head;
601 work_queue_head = access->next_queued;
602 access->next_queued = NULL;
603 access->grp_queued = 0;
604 return access;
608 /* Allocate necessary structures. */
610 static void
611 sra_initialize (void)
613 candidate_bitmap = BITMAP_ALLOC (NULL);
614 candidates = htab_create (vec_safe_length (cfun->local_decls) / 2,
615 uid_decl_map_hash, uid_decl_map_eq, NULL);
616 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
617 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
618 gcc_obstack_init (&name_obstack);
619 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
620 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
621 base_access_vec = pointer_map_create ();
622 memset (&sra_stats, 0, sizeof (sra_stats));
623 encountered_apply_args = false;
624 encountered_recursive_call = false;
625 encountered_unchangable_recursive_call = false;
628 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
630 static bool
631 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
632 void *data ATTRIBUTE_UNUSED)
634 vec<access_p> *access_vec = (vec<access_p> *) *value;
635 vec_free (access_vec);
636 return true;
639 /* Deallocate all general structures. */
641 static void
642 sra_deinitialize (void)
644 BITMAP_FREE (candidate_bitmap);
645 htab_delete (candidates);
646 BITMAP_FREE (should_scalarize_away_bitmap);
647 BITMAP_FREE (cannot_scalarize_away_bitmap);
648 free_alloc_pool (access_pool);
649 free_alloc_pool (link_pool);
650 obstack_free (&name_obstack, NULL);
652 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
653 pointer_map_destroy (base_access_vec);
656 /* Remove DECL from candidates for SRA and write REASON to the dump file if
657 there is one. */
658 static void
659 disqualify_candidate (tree decl, const char *reason)
661 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
662 htab_clear_slot (candidates,
663 htab_find_slot_with_hash (candidates, decl,
664 DECL_UID (decl), NO_INSERT));
666 if (dump_file && (dump_flags & TDF_DETAILS))
668 fprintf (dump_file, "! Disqualifying ");
669 print_generic_expr (dump_file, decl, 0);
670 fprintf (dump_file, " - %s\n", reason);
674 /* Return true iff the type contains a field or an element which does not allow
675 scalarization. */
677 static bool
678 type_internals_preclude_sra_p (tree type, const char **msg)
680 tree fld;
681 tree et;
683 switch (TREE_CODE (type))
685 case RECORD_TYPE:
686 case UNION_TYPE:
687 case QUAL_UNION_TYPE:
688 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
689 if (TREE_CODE (fld) == FIELD_DECL)
691 tree ft = TREE_TYPE (fld);
693 if (TREE_THIS_VOLATILE (fld))
695 *msg = "volatile structure field";
696 return true;
698 if (!DECL_FIELD_OFFSET (fld))
700 *msg = "no structure field offset";
701 return true;
703 if (!DECL_SIZE (fld))
705 *msg = "zero structure field size";
706 return true;
708 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
710 *msg = "structure field offset not fixed";
711 return true;
713 if (!host_integerp (DECL_SIZE (fld), 1))
715 *msg = "structure field size not fixed";
716 return true;
718 if (!host_integerp (bit_position (fld), 0))
720 *msg = "structure field size too big";
721 return true;
723 if (AGGREGATE_TYPE_P (ft)
724 && int_bit_position (fld) % BITS_PER_UNIT != 0)
726 *msg = "structure field is bit field";
727 return true;
730 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
731 return true;
734 return false;
736 case ARRAY_TYPE:
737 et = TREE_TYPE (type);
739 if (TYPE_VOLATILE (et))
741 *msg = "element type is volatile";
742 return true;
745 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
746 return true;
748 return false;
750 default:
751 return false;
755 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
756 base variable if it is. Return T if it is not an SSA_NAME. */
758 static tree
759 get_ssa_base_param (tree t)
761 if (TREE_CODE (t) == SSA_NAME)
763 if (SSA_NAME_IS_DEFAULT_DEF (t))
764 return SSA_NAME_VAR (t);
765 else
766 return NULL_TREE;
768 return t;
771 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
772 belongs to, unless the BB has already been marked as a potentially
773 final. */
775 static void
776 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
778 basic_block bb = gimple_bb (stmt);
779 int idx, parm_index = 0;
780 tree parm;
782 if (bitmap_bit_p (final_bbs, bb->index))
783 return;
785 for (parm = DECL_ARGUMENTS (current_function_decl);
786 parm && parm != base;
787 parm = DECL_CHAIN (parm))
788 parm_index++;
790 gcc_assert (parm_index < func_param_count);
792 idx = bb->index * func_param_count + parm_index;
793 if (bb_dereferences[idx] < dist)
794 bb_dereferences[idx] = dist;
797 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
798 the three fields. Also add it to the vector of accesses corresponding to
799 the base. Finally, return the new access. */
801 static struct access *
802 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
804 vec<access_p> *v;
805 struct access *access;
806 void **slot;
808 access = (struct access *) pool_alloc (access_pool);
809 memset (access, 0, sizeof (struct access));
810 access->base = base;
811 access->offset = offset;
812 access->size = size;
814 slot = pointer_map_contains (base_access_vec, base);
815 if (slot)
816 v = (vec<access_p> *) *slot;
817 else
818 vec_alloc (v, 32);
820 v->safe_push (access);
822 *((vec<access_p> **)
823 pointer_map_insert (base_access_vec, base)) = v;
825 return access;
828 /* Create and insert access for EXPR. Return created access, or NULL if it is
829 not possible. */
831 static struct access *
832 create_access (tree expr, gimple stmt, bool write)
834 struct access *access;
835 HOST_WIDE_INT offset, size, max_size;
836 tree base = expr;
837 bool ptr, unscalarizable_region = false;
839 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
841 if (sra_mode == SRA_MODE_EARLY_IPA
842 && TREE_CODE (base) == MEM_REF)
844 base = get_ssa_base_param (TREE_OPERAND (base, 0));
845 if (!base)
846 return NULL;
847 ptr = true;
849 else
850 ptr = false;
852 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
853 return NULL;
855 if (sra_mode == SRA_MODE_EARLY_IPA)
857 if (size < 0 || size != max_size)
859 disqualify_candidate (base, "Encountered a variable sized access.");
860 return NULL;
862 if (TREE_CODE (expr) == COMPONENT_REF
863 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
865 disqualify_candidate (base, "Encountered a bit-field access.");
866 return NULL;
868 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
870 if (ptr)
871 mark_parm_dereference (base, offset + size, stmt);
873 else
875 if (size != max_size)
877 size = max_size;
878 unscalarizable_region = true;
880 if (size < 0)
882 disqualify_candidate (base, "Encountered an unconstrained access.");
883 return NULL;
887 access = create_access_1 (base, offset, size);
888 access->expr = expr;
889 access->type = TREE_TYPE (expr);
890 access->write = write;
891 access->grp_unscalarizable_region = unscalarizable_region;
892 access->stmt = stmt;
894 if (TREE_CODE (expr) == COMPONENT_REF
895 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
896 access->non_addressable = 1;
898 return access;
902 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
903 register types or (recursively) records with only these two kinds of fields.
904 It also returns false if any of these records contains a bit-field. */
906 static bool
907 type_consists_of_records_p (tree type)
909 tree fld;
911 if (TREE_CODE (type) != RECORD_TYPE)
912 return false;
914 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
915 if (TREE_CODE (fld) == FIELD_DECL)
917 tree ft = TREE_TYPE (fld);
919 if (DECL_BIT_FIELD (fld))
920 return false;
922 if (!is_gimple_reg_type (ft)
923 && !type_consists_of_records_p (ft))
924 return false;
927 return true;
930 /* Create total_scalarization accesses for all scalar type fields in DECL that
931 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
932 must be the top-most VAR_DECL representing the variable, OFFSET must be the
933 offset of DECL within BASE. REF must be the memory reference expression for
934 the given decl. */
936 static void
937 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
938 tree ref)
940 tree fld, decl_type = TREE_TYPE (decl);
942 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
943 if (TREE_CODE (fld) == FIELD_DECL)
945 HOST_WIDE_INT pos = offset + int_bit_position (fld);
946 tree ft = TREE_TYPE (fld);
947 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
948 NULL_TREE);
950 if (is_gimple_reg_type (ft))
952 struct access *access;
953 HOST_WIDE_INT size;
955 size = tree_low_cst (DECL_SIZE (fld), 1);
956 access = create_access_1 (base, pos, size);
957 access->expr = nref;
958 access->type = ft;
959 access->grp_total_scalarization = 1;
960 /* Accesses for intraprocedural SRA can have their stmt NULL. */
962 else
963 completely_scalarize_record (base, fld, pos, nref);
967 /* Create total_scalarization accesses for all scalar type fields in VAR and
968 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
969 type_consists_of_records_p. */
971 static void
972 completely_scalarize_var (tree var)
974 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
975 struct access *access;
977 access = create_access_1 (var, 0, size);
978 access->expr = var;
979 access->type = TREE_TYPE (var);
980 access->grp_total_scalarization = 1;
982 completely_scalarize_record (var, var, 0, var);
985 /* Search the given tree for a declaration by skipping handled components and
986 exclude it from the candidates. */
988 static void
989 disqualify_base_of_expr (tree t, const char *reason)
991 t = get_base_address (t);
992 if (sra_mode == SRA_MODE_EARLY_IPA
993 && TREE_CODE (t) == MEM_REF)
994 t = get_ssa_base_param (TREE_OPERAND (t, 0));
996 if (t && DECL_P (t))
997 disqualify_candidate (t, reason);
1000 /* Scan expression EXPR and create access structures for all accesses to
1001 candidates for scalarization. Return the created access or NULL if none is
1002 created. */
1004 static struct access *
1005 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1007 struct access *ret = NULL;
1008 bool partial_ref;
1010 if (TREE_CODE (expr) == BIT_FIELD_REF
1011 || TREE_CODE (expr) == IMAGPART_EXPR
1012 || TREE_CODE (expr) == REALPART_EXPR)
1014 expr = TREE_OPERAND (expr, 0);
1015 partial_ref = true;
1017 else
1018 partial_ref = false;
1020 /* We need to dive through V_C_Es in order to get the size of its parameter
1021 and not the result type. Ada produces such statements. We are also
1022 capable of handling the topmost V_C_E but not any of those buried in other
1023 handled components. */
1024 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1025 expr = TREE_OPERAND (expr, 0);
1027 if (contains_view_convert_expr_p (expr))
1029 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1030 "component.");
1031 return NULL;
1034 switch (TREE_CODE (expr))
1036 case MEM_REF:
1037 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1038 && sra_mode != SRA_MODE_EARLY_IPA)
1039 return NULL;
1040 /* fall through */
1041 case VAR_DECL:
1042 case PARM_DECL:
1043 case RESULT_DECL:
1044 case COMPONENT_REF:
1045 case ARRAY_REF:
1046 case ARRAY_RANGE_REF:
1047 ret = create_access (expr, stmt, write);
1048 break;
1050 default:
1051 break;
1054 if (write && partial_ref && ret)
1055 ret->grp_partial_lhs = 1;
1057 return ret;
1060 /* Scan expression EXPR and create access structures for all accesses to
1061 candidates for scalarization. Return true if any access has been inserted.
1062 STMT must be the statement from which the expression is taken, WRITE must be
1063 true if the expression is a store and false otherwise. */
1065 static bool
1066 build_access_from_expr (tree expr, gimple stmt, bool write)
1068 struct access *access;
1070 access = build_access_from_expr_1 (expr, stmt, write);
1071 if (access)
1073 /* This means the aggregate is accesses as a whole in a way other than an
1074 assign statement and thus cannot be removed even if we had a scalar
1075 replacement for everything. */
1076 if (cannot_scalarize_away_bitmap)
1077 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1078 return true;
1080 return false;
1083 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1084 modes in which it matters, return true iff they have been disqualified. RHS
1085 may be NULL, in that case ignore it. If we scalarize an aggregate in
1086 intra-SRA we may need to add statements after each statement. This is not
1087 possible if a statement unconditionally has to end the basic block. */
1088 static bool
1089 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1091 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1092 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1094 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1095 if (rhs)
1096 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1097 return true;
1099 return false;
1102 /* Scan expressions occurring in STMT, create access structures for all accesses
1103 to candidates for scalarization and remove those candidates which occur in
1104 statements or expressions that prevent them from being split apart. Return
1105 true if any access has been inserted. */
1107 static bool
1108 build_accesses_from_assign (gimple stmt)
1110 tree lhs, rhs;
1111 struct access *lacc, *racc;
1113 if (!gimple_assign_single_p (stmt)
1114 /* Scope clobbers don't influence scalarization. */
1115 || gimple_clobber_p (stmt))
1116 return false;
1118 lhs = gimple_assign_lhs (stmt);
1119 rhs = gimple_assign_rhs1 (stmt);
1121 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1122 return false;
1124 racc = build_access_from_expr_1 (rhs, stmt, false);
1125 lacc = build_access_from_expr_1 (lhs, stmt, true);
1127 if (lacc)
1128 lacc->grp_assignment_write = 1;
1130 if (racc)
1132 racc->grp_assignment_read = 1;
1133 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1134 && !is_gimple_reg_type (racc->type))
1135 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1138 if (lacc && racc
1139 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1140 && !lacc->grp_unscalarizable_region
1141 && !racc->grp_unscalarizable_region
1142 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1143 && lacc->size == racc->size
1144 && useless_type_conversion_p (lacc->type, racc->type))
1146 struct assign_link *link;
1148 link = (struct assign_link *) pool_alloc (link_pool);
1149 memset (link, 0, sizeof (struct assign_link));
1151 link->lacc = lacc;
1152 link->racc = racc;
1154 add_link_to_rhs (racc, link);
1157 return lacc || racc;
1160 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1161 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1163 static bool
1164 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1165 void *data ATTRIBUTE_UNUSED)
1167 op = get_base_address (op);
1168 if (op
1169 && DECL_P (op))
1170 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1172 return false;
1175 /* Return true iff callsite CALL has at least as many actual arguments as there
1176 are formal parameters of the function currently processed by IPA-SRA. */
1178 static inline bool
1179 callsite_has_enough_arguments_p (gimple call)
1181 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1184 /* Scan function and look for interesting expressions and create access
1185 structures for them. Return true iff any access is created. */
1187 static bool
1188 scan_function (void)
1190 basic_block bb;
1191 bool ret = false;
1193 FOR_EACH_BB (bb)
1195 gimple_stmt_iterator gsi;
1196 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1198 gimple stmt = gsi_stmt (gsi);
1199 tree t;
1200 unsigned i;
1202 if (final_bbs && stmt_can_throw_external (stmt))
1203 bitmap_set_bit (final_bbs, bb->index);
1204 switch (gimple_code (stmt))
1206 case GIMPLE_RETURN:
1207 t = gimple_return_retval (stmt);
1208 if (t != NULL_TREE)
1209 ret |= build_access_from_expr (t, stmt, false);
1210 if (final_bbs)
1211 bitmap_set_bit (final_bbs, bb->index);
1212 break;
1214 case GIMPLE_ASSIGN:
1215 ret |= build_accesses_from_assign (stmt);
1216 break;
1218 case GIMPLE_CALL:
1219 for (i = 0; i < gimple_call_num_args (stmt); i++)
1220 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1221 stmt, false);
1223 if (sra_mode == SRA_MODE_EARLY_IPA)
1225 tree dest = gimple_call_fndecl (stmt);
1226 int flags = gimple_call_flags (stmt);
1228 if (dest)
1230 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1231 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1232 encountered_apply_args = true;
1233 if (cgraph_get_node (dest)
1234 == cgraph_get_node (current_function_decl))
1236 encountered_recursive_call = true;
1237 if (!callsite_has_enough_arguments_p (stmt))
1238 encountered_unchangable_recursive_call = true;
1242 if (final_bbs
1243 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1244 bitmap_set_bit (final_bbs, bb->index);
1247 t = gimple_call_lhs (stmt);
1248 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1249 ret |= build_access_from_expr (t, stmt, true);
1250 break;
1252 case GIMPLE_ASM:
1253 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1254 asm_visit_addr);
1255 if (final_bbs)
1256 bitmap_set_bit (final_bbs, bb->index);
1258 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1260 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1261 ret |= build_access_from_expr (t, stmt, false);
1263 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1265 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1266 ret |= build_access_from_expr (t, stmt, true);
1268 break;
1270 default:
1271 break;
1276 return ret;
1279 /* Helper of QSORT function. There are pointers to accesses in the array. An
1280 access is considered smaller than another if it has smaller offset or if the
1281 offsets are the same but is size is bigger. */
1283 static int
1284 compare_access_positions (const void *a, const void *b)
1286 const access_p *fp1 = (const access_p *) a;
1287 const access_p *fp2 = (const access_p *) b;
1288 const access_p f1 = *fp1;
1289 const access_p f2 = *fp2;
1291 if (f1->offset != f2->offset)
1292 return f1->offset < f2->offset ? -1 : 1;
1294 if (f1->size == f2->size)
1296 if (f1->type == f2->type)
1297 return 0;
1298 /* Put any non-aggregate type before any aggregate type. */
1299 else if (!is_gimple_reg_type (f1->type)
1300 && is_gimple_reg_type (f2->type))
1301 return 1;
1302 else if (is_gimple_reg_type (f1->type)
1303 && !is_gimple_reg_type (f2->type))
1304 return -1;
1305 /* Put any complex or vector type before any other scalar type. */
1306 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1307 && TREE_CODE (f1->type) != VECTOR_TYPE
1308 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1309 || TREE_CODE (f2->type) == VECTOR_TYPE))
1310 return 1;
1311 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1312 || TREE_CODE (f1->type) == VECTOR_TYPE)
1313 && TREE_CODE (f2->type) != COMPLEX_TYPE
1314 && TREE_CODE (f2->type) != VECTOR_TYPE)
1315 return -1;
1316 /* Put the integral type with the bigger precision first. */
1317 else if (INTEGRAL_TYPE_P (f1->type)
1318 && INTEGRAL_TYPE_P (f2->type))
1319 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1320 /* Put any integral type with non-full precision last. */
1321 else if (INTEGRAL_TYPE_P (f1->type)
1322 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1323 != TYPE_PRECISION (f1->type)))
1324 return 1;
1325 else if (INTEGRAL_TYPE_P (f2->type)
1326 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1327 != TYPE_PRECISION (f2->type)))
1328 return -1;
1329 /* Stabilize the sort. */
1330 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1333 /* We want the bigger accesses first, thus the opposite operator in the next
1334 line: */
1335 return f1->size > f2->size ? -1 : 1;
1339 /* Append a name of the declaration to the name obstack. A helper function for
1340 make_fancy_name. */
1342 static void
1343 make_fancy_decl_name (tree decl)
1345 char buffer[32];
1347 tree name = DECL_NAME (decl);
1348 if (name)
1349 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1350 IDENTIFIER_LENGTH (name));
1351 else
1353 sprintf (buffer, "D%u", DECL_UID (decl));
1354 obstack_grow (&name_obstack, buffer, strlen (buffer));
1358 /* Helper for make_fancy_name. */
1360 static void
1361 make_fancy_name_1 (tree expr)
1363 char buffer[32];
1364 tree index;
1366 if (DECL_P (expr))
1368 make_fancy_decl_name (expr);
1369 return;
1372 switch (TREE_CODE (expr))
1374 case COMPONENT_REF:
1375 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1376 obstack_1grow (&name_obstack, '$');
1377 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1378 break;
1380 case ARRAY_REF:
1381 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1382 obstack_1grow (&name_obstack, '$');
1383 /* Arrays with only one element may not have a constant as their
1384 index. */
1385 index = TREE_OPERAND (expr, 1);
1386 if (TREE_CODE (index) != INTEGER_CST)
1387 break;
1388 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1389 obstack_grow (&name_obstack, buffer, strlen (buffer));
1390 break;
1392 case ADDR_EXPR:
1393 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1394 break;
1396 case MEM_REF:
1397 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1398 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1400 obstack_1grow (&name_obstack, '$');
1401 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1402 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1403 obstack_grow (&name_obstack, buffer, strlen (buffer));
1405 break;
1407 case BIT_FIELD_REF:
1408 case REALPART_EXPR:
1409 case IMAGPART_EXPR:
1410 gcc_unreachable (); /* we treat these as scalars. */
1411 break;
1412 default:
1413 break;
1417 /* Create a human readable name for replacement variable of ACCESS. */
1419 static char *
1420 make_fancy_name (tree expr)
1422 make_fancy_name_1 (expr);
1423 obstack_1grow (&name_obstack, '\0');
1424 return XOBFINISH (&name_obstack, char *);
1427 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1428 EXP_TYPE at the given OFFSET. If BASE is something for which
1429 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1430 to insert new statements either before or below the current one as specified
1431 by INSERT_AFTER. This function is not capable of handling bitfields.
1433 BASE must be either a declaration or a memory reference that has correct
1434 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1436 tree
1437 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1438 tree exp_type, gimple_stmt_iterator *gsi,
1439 bool insert_after)
1441 tree prev_base = base;
1442 tree off;
1443 HOST_WIDE_INT base_offset;
1444 unsigned HOST_WIDE_INT misalign;
1445 unsigned int align;
1447 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1448 get_object_alignment_1 (base, &align, &misalign);
1449 base = get_addr_base_and_unit_offset (base, &base_offset);
1451 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1452 offset such as array[var_index]. */
1453 if (!base)
1455 gimple stmt;
1456 tree tmp, addr;
1458 gcc_checking_assert (gsi);
1459 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1460 addr = build_fold_addr_expr (unshare_expr (prev_base));
1461 STRIP_USELESS_TYPE_CONVERSION (addr);
1462 stmt = gimple_build_assign (tmp, addr);
1463 gimple_set_location (stmt, loc);
1464 if (insert_after)
1465 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1466 else
1467 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1469 off = build_int_cst (reference_alias_ptr_type (prev_base),
1470 offset / BITS_PER_UNIT);
1471 base = tmp;
1473 else if (TREE_CODE (base) == MEM_REF)
1475 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1476 base_offset + offset / BITS_PER_UNIT);
1477 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1478 base = unshare_expr (TREE_OPERAND (base, 0));
1480 else
1482 off = build_int_cst (reference_alias_ptr_type (base),
1483 base_offset + offset / BITS_PER_UNIT);
1484 base = build_fold_addr_expr (unshare_expr (base));
1487 misalign = (misalign + offset) & (align - 1);
1488 if (misalign != 0)
1489 align = (misalign & -misalign);
1490 if (align < TYPE_ALIGN (exp_type))
1491 exp_type = build_aligned_type (exp_type, align);
1493 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1496 /* Construct a memory reference to a part of an aggregate BASE at the given
1497 OFFSET and of the same type as MODEL. In case this is a reference to a
1498 bit-field, the function will replicate the last component_ref of model's
1499 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1500 build_ref_for_offset. */
1502 static tree
1503 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1504 struct access *model, gimple_stmt_iterator *gsi,
1505 bool insert_after)
1507 if (TREE_CODE (model->expr) == COMPONENT_REF
1508 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1510 /* This access represents a bit-field. */
1511 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1513 offset -= int_bit_position (fld);
1514 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1515 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1516 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1517 NULL_TREE);
1519 else
1520 return build_ref_for_offset (loc, base, offset, model->type,
1521 gsi, insert_after);
1524 /* Attempt to build a memory reference that we could but into a gimple
1525 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1526 create statements and return s NULL instead. This function also ignores
1527 alignment issues and so its results should never end up in non-debug
1528 statements. */
1530 static tree
1531 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1532 struct access *model)
1534 HOST_WIDE_INT base_offset;
1535 tree off;
1537 if (TREE_CODE (model->expr) == COMPONENT_REF
1538 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1539 return NULL_TREE;
1541 base = get_addr_base_and_unit_offset (base, &base_offset);
1542 if (!base)
1543 return NULL_TREE;
1544 if (TREE_CODE (base) == MEM_REF)
1546 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1547 base_offset + offset / BITS_PER_UNIT);
1548 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1549 base = unshare_expr (TREE_OPERAND (base, 0));
1551 else
1553 off = build_int_cst (reference_alias_ptr_type (base),
1554 base_offset + offset / BITS_PER_UNIT);
1555 base = build_fold_addr_expr (unshare_expr (base));
1558 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1561 /* Construct a memory reference consisting of component_refs and array_refs to
1562 a part of an aggregate *RES (which is of type TYPE). The requested part
1563 should have type EXP_TYPE at be the given OFFSET. This function might not
1564 succeed, it returns true when it does and only then *RES points to something
1565 meaningful. This function should be used only to build expressions that we
1566 might need to present to user (e.g. in warnings). In all other situations,
1567 build_ref_for_model or build_ref_for_offset should be used instead. */
1569 static bool
1570 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1571 tree exp_type)
1573 while (1)
1575 tree fld;
1576 tree tr_size, index, minidx;
1577 HOST_WIDE_INT el_size;
1579 if (offset == 0 && exp_type
1580 && types_compatible_p (exp_type, type))
1581 return true;
1583 switch (TREE_CODE (type))
1585 case UNION_TYPE:
1586 case QUAL_UNION_TYPE:
1587 case RECORD_TYPE:
1588 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1590 HOST_WIDE_INT pos, size;
1591 tree tr_pos, expr, *expr_ptr;
1593 if (TREE_CODE (fld) != FIELD_DECL)
1594 continue;
1596 tr_pos = bit_position (fld);
1597 if (!tr_pos || !host_integerp (tr_pos, 1))
1598 continue;
1599 pos = TREE_INT_CST_LOW (tr_pos);
1600 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1601 tr_size = DECL_SIZE (fld);
1602 if (!tr_size || !host_integerp (tr_size, 1))
1603 continue;
1604 size = TREE_INT_CST_LOW (tr_size);
1605 if (size == 0)
1607 if (pos != offset)
1608 continue;
1610 else if (pos > offset || (pos + size) <= offset)
1611 continue;
1613 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1614 NULL_TREE);
1615 expr_ptr = &expr;
1616 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1617 offset - pos, exp_type))
1619 *res = expr;
1620 return true;
1623 return false;
1625 case ARRAY_TYPE:
1626 tr_size = TYPE_SIZE (TREE_TYPE (type));
1627 if (!tr_size || !host_integerp (tr_size, 1))
1628 return false;
1629 el_size = tree_low_cst (tr_size, 1);
1631 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1632 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1633 return false;
1634 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1635 if (!integer_zerop (minidx))
1636 index = int_const_binop (PLUS_EXPR, index, minidx);
1637 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1638 NULL_TREE, NULL_TREE);
1639 offset = offset % el_size;
1640 type = TREE_TYPE (type);
1641 break;
1643 default:
1644 if (offset != 0)
1645 return false;
1647 if (exp_type)
1648 return false;
1649 else
1650 return true;
1655 /* Return true iff TYPE is stdarg va_list type. */
1657 static inline bool
1658 is_va_list_type (tree type)
1660 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1663 /* Print message to dump file why a variable was rejected. */
1665 static void
1666 reject (tree var, const char *msg)
1668 if (dump_file && (dump_flags & TDF_DETAILS))
1670 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1671 print_generic_expr (dump_file, var, 0);
1672 fprintf (dump_file, "\n");
1676 /* Return true if VAR is a candidate for SRA. */
1678 static bool
1679 maybe_add_sra_candidate (tree var)
1681 tree type = TREE_TYPE (var);
1682 const char *msg;
1683 void **slot;
1685 if (!AGGREGATE_TYPE_P (type))
1687 reject (var, "not aggregate");
1688 return false;
1690 if (needs_to_live_in_memory (var))
1692 reject (var, "needs to live in memory");
1693 return false;
1695 if (TREE_THIS_VOLATILE (var))
1697 reject (var, "is volatile");
1698 return false;
1700 if (!COMPLETE_TYPE_P (type))
1702 reject (var, "has incomplete type");
1703 return false;
1705 if (!host_integerp (TYPE_SIZE (type), 1))
1707 reject (var, "type size not fixed");
1708 return false;
1710 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1712 reject (var, "type size is zero");
1713 return false;
1715 if (type_internals_preclude_sra_p (type, &msg))
1717 reject (var, msg);
1718 return false;
1720 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1721 we also want to schedule it rather late. Thus we ignore it in
1722 the early pass. */
1723 (sra_mode == SRA_MODE_EARLY_INTRA
1724 && is_va_list_type (type)))
1726 reject (var, "is va_list");
1727 return false;
1730 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1731 slot = htab_find_slot_with_hash (candidates, var, DECL_UID (var), INSERT);
1732 *slot = (void *) var;
1734 if (dump_file && (dump_flags & TDF_DETAILS))
1736 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1737 print_generic_expr (dump_file, var, 0);
1738 fprintf (dump_file, "\n");
1741 return true;
1744 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1745 those with type which is suitable for scalarization. */
1747 static bool
1748 find_var_candidates (void)
1750 tree var, parm;
1751 unsigned int i;
1752 bool ret = false;
1754 for (parm = DECL_ARGUMENTS (current_function_decl);
1755 parm;
1756 parm = DECL_CHAIN (parm))
1757 ret |= maybe_add_sra_candidate (parm);
1759 FOR_EACH_LOCAL_DECL (cfun, i, var)
1761 if (TREE_CODE (var) != VAR_DECL)
1762 continue;
1764 ret |= maybe_add_sra_candidate (var);
1767 return ret;
1770 /* Sort all accesses for the given variable, check for partial overlaps and
1771 return NULL if there are any. If there are none, pick a representative for
1772 each combination of offset and size and create a linked list out of them.
1773 Return the pointer to the first representative and make sure it is the first
1774 one in the vector of accesses. */
1776 static struct access *
1777 sort_and_splice_var_accesses (tree var)
1779 int i, j, access_count;
1780 struct access *res, **prev_acc_ptr = &res;
1781 vec<access_p> *access_vec;
1782 bool first = true;
1783 HOST_WIDE_INT low = -1, high = 0;
1785 access_vec = get_base_access_vector (var);
1786 if (!access_vec)
1787 return NULL;
1788 access_count = access_vec->length ();
1790 /* Sort by <OFFSET, SIZE>. */
1791 access_vec->qsort (compare_access_positions);
1793 i = 0;
1794 while (i < access_count)
1796 struct access *access = (*access_vec)[i];
1797 bool grp_write = access->write;
1798 bool grp_read = !access->write;
1799 bool grp_scalar_write = access->write
1800 && is_gimple_reg_type (access->type);
1801 bool grp_scalar_read = !access->write
1802 && is_gimple_reg_type (access->type);
1803 bool grp_assignment_read = access->grp_assignment_read;
1804 bool grp_assignment_write = access->grp_assignment_write;
1805 bool multiple_scalar_reads = false;
1806 bool total_scalarization = access->grp_total_scalarization;
1807 bool grp_partial_lhs = access->grp_partial_lhs;
1808 bool first_scalar = is_gimple_reg_type (access->type);
1809 bool unscalarizable_region = access->grp_unscalarizable_region;
1811 if (first || access->offset >= high)
1813 first = false;
1814 low = access->offset;
1815 high = access->offset + access->size;
1817 else if (access->offset > low && access->offset + access->size > high)
1818 return NULL;
1819 else
1820 gcc_assert (access->offset >= low
1821 && access->offset + access->size <= high);
1823 j = i + 1;
1824 while (j < access_count)
1826 struct access *ac2 = (*access_vec)[j];
1827 if (ac2->offset != access->offset || ac2->size != access->size)
1828 break;
1829 if (ac2->write)
1831 grp_write = true;
1832 grp_scalar_write = (grp_scalar_write
1833 || is_gimple_reg_type (ac2->type));
1835 else
1837 grp_read = true;
1838 if (is_gimple_reg_type (ac2->type))
1840 if (grp_scalar_read)
1841 multiple_scalar_reads = true;
1842 else
1843 grp_scalar_read = true;
1846 grp_assignment_read |= ac2->grp_assignment_read;
1847 grp_assignment_write |= ac2->grp_assignment_write;
1848 grp_partial_lhs |= ac2->grp_partial_lhs;
1849 unscalarizable_region |= ac2->grp_unscalarizable_region;
1850 total_scalarization |= ac2->grp_total_scalarization;
1851 relink_to_new_repr (access, ac2);
1853 /* If there are both aggregate-type and scalar-type accesses with
1854 this combination of size and offset, the comparison function
1855 should have put the scalars first. */
1856 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1857 ac2->group_representative = access;
1858 j++;
1861 i = j;
1863 access->group_representative = access;
1864 access->grp_write = grp_write;
1865 access->grp_read = grp_read;
1866 access->grp_scalar_read = grp_scalar_read;
1867 access->grp_scalar_write = grp_scalar_write;
1868 access->grp_assignment_read = grp_assignment_read;
1869 access->grp_assignment_write = grp_assignment_write;
1870 access->grp_hint = multiple_scalar_reads || total_scalarization;
1871 access->grp_total_scalarization = total_scalarization;
1872 access->grp_partial_lhs = grp_partial_lhs;
1873 access->grp_unscalarizable_region = unscalarizable_region;
1874 if (access->first_link)
1875 add_access_to_work_queue (access);
1877 *prev_acc_ptr = access;
1878 prev_acc_ptr = &access->next_grp;
1881 gcc_assert (res == (*access_vec)[0]);
1882 return res;
1885 /* Create a variable for the given ACCESS which determines the type, name and a
1886 few other properties. Return the variable declaration and store it also to
1887 ACCESS->replacement. */
1889 static tree
1890 create_access_replacement (struct access *access)
1892 tree repl;
1894 if (access->grp_to_be_debug_replaced)
1896 repl = create_tmp_var_raw (access->type, NULL);
1897 DECL_CONTEXT (repl) = current_function_decl;
1899 else
1900 repl = create_tmp_var (access->type, "SR");
1901 if (TREE_CODE (access->type) == COMPLEX_TYPE
1902 || TREE_CODE (access->type) == VECTOR_TYPE)
1904 if (!access->grp_partial_lhs)
1905 DECL_GIMPLE_REG_P (repl) = 1;
1907 else if (access->grp_partial_lhs
1908 && is_gimple_reg_type (access->type))
1909 TREE_ADDRESSABLE (repl) = 1;
1911 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1912 DECL_ARTIFICIAL (repl) = 1;
1913 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1915 if (DECL_NAME (access->base)
1916 && !DECL_IGNORED_P (access->base)
1917 && !DECL_ARTIFICIAL (access->base))
1919 char *pretty_name = make_fancy_name (access->expr);
1920 tree debug_expr = unshare_expr (access->expr), d;
1921 bool fail = false;
1923 DECL_NAME (repl) = get_identifier (pretty_name);
1924 obstack_free (&name_obstack, pretty_name);
1926 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1927 as DECL_DEBUG_EXPR isn't considered when looking for still
1928 used SSA_NAMEs and thus they could be freed. All debug info
1929 generation cares is whether something is constant or variable
1930 and that get_ref_base_and_extent works properly on the
1931 expression. It cannot handle accesses at a non-constant offset
1932 though, so just give up in those cases. */
1933 for (d = debug_expr;
1934 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
1935 d = TREE_OPERAND (d, 0))
1936 switch (TREE_CODE (d))
1938 case ARRAY_REF:
1939 case ARRAY_RANGE_REF:
1940 if (TREE_OPERAND (d, 1)
1941 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
1942 fail = true;
1943 if (TREE_OPERAND (d, 3)
1944 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
1945 fail = true;
1946 /* FALLTHRU */
1947 case COMPONENT_REF:
1948 if (TREE_OPERAND (d, 2)
1949 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
1950 fail = true;
1951 break;
1952 case MEM_REF:
1953 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
1954 fail = true;
1955 else
1956 d = TREE_OPERAND (d, 0);
1957 break;
1958 default:
1959 break;
1961 if (!fail)
1963 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1964 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1966 if (access->grp_no_warning)
1967 TREE_NO_WARNING (repl) = 1;
1968 else
1969 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1971 else
1972 TREE_NO_WARNING (repl) = 1;
1974 if (dump_file)
1976 if (access->grp_to_be_debug_replaced)
1978 fprintf (dump_file, "Created a debug-only replacement for ");
1979 print_generic_expr (dump_file, access->base, 0);
1980 fprintf (dump_file, " offset: %u, size: %u\n",
1981 (unsigned) access->offset, (unsigned) access->size);
1983 else
1985 fprintf (dump_file, "Created a replacement for ");
1986 print_generic_expr (dump_file, access->base, 0);
1987 fprintf (dump_file, " offset: %u, size: %u: ",
1988 (unsigned) access->offset, (unsigned) access->size);
1989 print_generic_expr (dump_file, repl, 0);
1990 fprintf (dump_file, "\n");
1993 sra_stats.replacements++;
1995 return repl;
1998 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2000 static inline tree
2001 get_access_replacement (struct access *access)
2003 if (!access->replacement_decl)
2004 access->replacement_decl = create_access_replacement (access);
2005 return access->replacement_decl;
2009 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2010 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2011 to it is not "within" the root. Return false iff some accesses partially
2012 overlap. */
2014 static bool
2015 build_access_subtree (struct access **access)
2017 struct access *root = *access, *last_child = NULL;
2018 HOST_WIDE_INT limit = root->offset + root->size;
2020 *access = (*access)->next_grp;
2021 while (*access && (*access)->offset + (*access)->size <= limit)
2023 if (!last_child)
2024 root->first_child = *access;
2025 else
2026 last_child->next_sibling = *access;
2027 last_child = *access;
2029 if (!build_access_subtree (access))
2030 return false;
2033 if (*access && (*access)->offset < limit)
2034 return false;
2036 return true;
2039 /* Build a tree of access representatives, ACCESS is the pointer to the first
2040 one, others are linked in a list by the next_grp field. Return false iff
2041 some accesses partially overlap. */
2043 static bool
2044 build_access_trees (struct access *access)
2046 while (access)
2048 struct access *root = access;
2050 if (!build_access_subtree (&access))
2051 return false;
2052 root->next_grp = access;
2054 return true;
2057 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2058 array. */
2060 static bool
2061 expr_with_var_bounded_array_refs_p (tree expr)
2063 while (handled_component_p (expr))
2065 if (TREE_CODE (expr) == ARRAY_REF
2066 && !host_integerp (array_ref_low_bound (expr), 0))
2067 return true;
2068 expr = TREE_OPERAND (expr, 0);
2070 return false;
2073 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2074 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2075 sorts of access flags appropriately along the way, notably always set
2076 grp_read and grp_assign_read according to MARK_READ and grp_write when
2077 MARK_WRITE is true.
2079 Creating a replacement for a scalar access is considered beneficial if its
2080 grp_hint is set (this means we are either attempting total scalarization or
2081 there is more than one direct read access) or according to the following
2082 table:
2084 Access written to through a scalar type (once or more times)
2086 | Written to in an assignment statement
2088 | | Access read as scalar _once_
2089 | | |
2090 | | | Read in an assignment statement
2091 | | | |
2092 | | | | Scalarize Comment
2093 -----------------------------------------------------------------------------
2094 0 0 0 0 No access for the scalar
2095 0 0 0 1 No access for the scalar
2096 0 0 1 0 No Single read - won't help
2097 0 0 1 1 No The same case
2098 0 1 0 0 No access for the scalar
2099 0 1 0 1 No access for the scalar
2100 0 1 1 0 Yes s = *g; return s.i;
2101 0 1 1 1 Yes The same case as above
2102 1 0 0 0 No Won't help
2103 1 0 0 1 Yes s.i = 1; *g = s;
2104 1 0 1 0 Yes s.i = 5; g = s.i;
2105 1 0 1 1 Yes The same case as above
2106 1 1 0 0 No Won't help.
2107 1 1 0 1 Yes s.i = 1; *g = s;
2108 1 1 1 0 Yes s = *g; return s.i;
2109 1 1 1 1 Yes Any of the above yeses */
2111 static bool
2112 analyze_access_subtree (struct access *root, struct access *parent,
2113 bool allow_replacements)
2115 struct access *child;
2116 HOST_WIDE_INT limit = root->offset + root->size;
2117 HOST_WIDE_INT covered_to = root->offset;
2118 bool scalar = is_gimple_reg_type (root->type);
2119 bool hole = false, sth_created = false;
2121 if (parent)
2123 if (parent->grp_read)
2124 root->grp_read = 1;
2125 if (parent->grp_assignment_read)
2126 root->grp_assignment_read = 1;
2127 if (parent->grp_write)
2128 root->grp_write = 1;
2129 if (parent->grp_assignment_write)
2130 root->grp_assignment_write = 1;
2131 if (parent->grp_total_scalarization)
2132 root->grp_total_scalarization = 1;
2135 if (root->grp_unscalarizable_region)
2136 allow_replacements = false;
2138 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2139 allow_replacements = false;
2141 for (child = root->first_child; child; child = child->next_sibling)
2143 hole |= covered_to < child->offset;
2144 sth_created |= analyze_access_subtree (child, root,
2145 allow_replacements && !scalar);
2147 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2148 root->grp_total_scalarization &= child->grp_total_scalarization;
2149 if (child->grp_covered)
2150 covered_to += child->size;
2151 else
2152 hole = true;
2155 if (allow_replacements && scalar && !root->first_child
2156 && (root->grp_hint
2157 || ((root->grp_scalar_read || root->grp_assignment_read)
2158 && (root->grp_scalar_write || root->grp_assignment_write))))
2160 bool new_integer_type;
2161 /* Always create access replacements that cover the whole access.
2162 For integral types this means the precision has to match.
2163 Avoid assumptions based on the integral type kind, too. */
2164 if (INTEGRAL_TYPE_P (root->type)
2165 && (TREE_CODE (root->type) != INTEGER_TYPE
2166 || TYPE_PRECISION (root->type) != root->size)
2167 /* But leave bitfield accesses alone. */
2168 && (TREE_CODE (root->expr) != COMPONENT_REF
2169 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2171 tree rt = root->type;
2172 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2173 && (root->size % BITS_PER_UNIT) == 0);
2174 root->type = build_nonstandard_integer_type (root->size,
2175 TYPE_UNSIGNED (rt));
2176 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2177 root->base, root->offset,
2178 root->type, NULL, false);
2179 new_integer_type = true;
2181 else
2182 new_integer_type = false;
2184 if (dump_file && (dump_flags & TDF_DETAILS))
2186 fprintf (dump_file, "Marking ");
2187 print_generic_expr (dump_file, root->base, 0);
2188 fprintf (dump_file, " offset: %u, size: %u ",
2189 (unsigned) root->offset, (unsigned) root->size);
2190 fprintf (dump_file, " to be replaced%s.\n",
2191 new_integer_type ? " with an integer": "");
2194 root->grp_to_be_replaced = 1;
2195 sth_created = true;
2196 hole = false;
2198 else
2200 if (allow_replacements
2201 && scalar && !root->first_child
2202 && (root->grp_scalar_write || root->grp_assignment_write)
2203 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2204 DECL_UID (root->base)))
2206 gcc_checking_assert (!root->grp_scalar_read
2207 && !root->grp_assignment_read);
2208 sth_created = true;
2209 if (MAY_HAVE_DEBUG_STMTS)
2211 root->grp_to_be_debug_replaced = 1;
2212 if (dump_file && (dump_flags & TDF_DETAILS))
2214 fprintf (dump_file, "Marking ");
2215 print_generic_expr (dump_file, root->base, 0);
2216 fprintf (dump_file, " offset: %u, size: %u ",
2217 (unsigned) root->offset, (unsigned) root->size);
2218 fprintf (dump_file, " to be replaced with debug "
2219 "statements.\n");
2224 if (covered_to < limit)
2225 hole = true;
2226 if (scalar)
2227 root->grp_total_scalarization = 0;
2230 if (!hole || root->grp_total_scalarization)
2231 root->grp_covered = 1;
2232 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2233 root->grp_unscalarized_data = 1; /* not covered and written to */
2234 return sth_created;
2237 /* Analyze all access trees linked by next_grp by the means of
2238 analyze_access_subtree. */
2239 static bool
2240 analyze_access_trees (struct access *access)
2242 bool ret = false;
2244 while (access)
2246 if (analyze_access_subtree (access, NULL, true))
2247 ret = true;
2248 access = access->next_grp;
2251 return ret;
2254 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2255 SIZE would conflict with an already existing one. If exactly such a child
2256 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2258 static bool
2259 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2260 HOST_WIDE_INT size, struct access **exact_match)
2262 struct access *child;
2264 for (child = lacc->first_child; child; child = child->next_sibling)
2266 if (child->offset == norm_offset && child->size == size)
2268 *exact_match = child;
2269 return true;
2272 if (child->offset < norm_offset + size
2273 && child->offset + child->size > norm_offset)
2274 return true;
2277 return false;
2280 /* Create a new child access of PARENT, with all properties just like MODEL
2281 except for its offset and with its grp_write false and grp_read true.
2282 Return the new access or NULL if it cannot be created. Note that this access
2283 is created long after all splicing and sorting, it's not located in any
2284 access vector and is automatically a representative of its group. */
2286 static struct access *
2287 create_artificial_child_access (struct access *parent, struct access *model,
2288 HOST_WIDE_INT new_offset)
2290 struct access *access;
2291 struct access **child;
2292 tree expr = parent->base;
2294 gcc_assert (!model->grp_unscalarizable_region);
2296 access = (struct access *) pool_alloc (access_pool);
2297 memset (access, 0, sizeof (struct access));
2298 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2299 model->type))
2301 access->grp_no_warning = true;
2302 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2303 new_offset, model, NULL, false);
2306 access->base = parent->base;
2307 access->expr = expr;
2308 access->offset = new_offset;
2309 access->size = model->size;
2310 access->type = model->type;
2311 access->grp_write = true;
2312 access->grp_read = false;
2314 child = &parent->first_child;
2315 while (*child && (*child)->offset < new_offset)
2316 child = &(*child)->next_sibling;
2318 access->next_sibling = *child;
2319 *child = access;
2321 return access;
2325 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2326 true if any new subaccess was created. Additionally, if RACC is a scalar
2327 access but LACC is not, change the type of the latter, if possible. */
2329 static bool
2330 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2332 struct access *rchild;
2333 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2334 bool ret = false;
2336 if (is_gimple_reg_type (lacc->type)
2337 || lacc->grp_unscalarizable_region
2338 || racc->grp_unscalarizable_region)
2339 return false;
2341 if (is_gimple_reg_type (racc->type))
2343 if (!lacc->first_child && !racc->first_child)
2345 tree t = lacc->base;
2347 lacc->type = racc->type;
2348 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2349 lacc->offset, racc->type))
2350 lacc->expr = t;
2351 else
2353 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2354 lacc->base, lacc->offset,
2355 racc, NULL, false);
2356 lacc->grp_no_warning = true;
2359 return false;
2362 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2364 struct access *new_acc = NULL;
2365 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2367 if (rchild->grp_unscalarizable_region)
2368 continue;
2370 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2371 &new_acc))
2373 if (new_acc)
2375 rchild->grp_hint = 1;
2376 new_acc->grp_hint |= new_acc->grp_read;
2377 if (rchild->first_child)
2378 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2380 continue;
2383 rchild->grp_hint = 1;
2384 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2385 if (new_acc)
2387 ret = true;
2388 if (racc->first_child)
2389 propagate_subaccesses_across_link (new_acc, rchild);
2393 return ret;
2396 /* Propagate all subaccesses across assignment links. */
2398 static void
2399 propagate_all_subaccesses (void)
2401 while (work_queue_head)
2403 struct access *racc = pop_access_from_work_queue ();
2404 struct assign_link *link;
2406 gcc_assert (racc->first_link);
2408 for (link = racc->first_link; link; link = link->next)
2410 struct access *lacc = link->lacc;
2412 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2413 continue;
2414 lacc = lacc->group_representative;
2415 if (propagate_subaccesses_across_link (lacc, racc)
2416 && lacc->first_link)
2417 add_access_to_work_queue (lacc);
2422 /* Go through all accesses collected throughout the (intraprocedural) analysis
2423 stage, exclude overlapping ones, identify representatives and build trees
2424 out of them, making decisions about scalarization on the way. Return true
2425 iff there are any to-be-scalarized variables after this stage. */
2427 static bool
2428 analyze_all_variable_accesses (void)
2430 int res = 0;
2431 bitmap tmp = BITMAP_ALLOC (NULL);
2432 bitmap_iterator bi;
2433 unsigned i, max_total_scalarization_size;
2435 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2436 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2438 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2439 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2440 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2442 tree var = candidate (i);
2444 if (TREE_CODE (var) == VAR_DECL
2445 && type_consists_of_records_p (TREE_TYPE (var)))
2447 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2448 <= max_total_scalarization_size)
2450 completely_scalarize_var (var);
2451 if (dump_file && (dump_flags & TDF_DETAILS))
2453 fprintf (dump_file, "Will attempt to totally scalarize ");
2454 print_generic_expr (dump_file, var, 0);
2455 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2458 else if (dump_file && (dump_flags & TDF_DETAILS))
2460 fprintf (dump_file, "Too big to totally scalarize: ");
2461 print_generic_expr (dump_file, var, 0);
2462 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2467 bitmap_copy (tmp, candidate_bitmap);
2468 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2470 tree var = candidate (i);
2471 struct access *access;
2473 access = sort_and_splice_var_accesses (var);
2474 if (!access || !build_access_trees (access))
2475 disqualify_candidate (var,
2476 "No or inhibitingly overlapping accesses.");
2479 propagate_all_subaccesses ();
2481 bitmap_copy (tmp, candidate_bitmap);
2482 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2484 tree var = candidate (i);
2485 struct access *access = get_first_repr_for_decl (var);
2487 if (analyze_access_trees (access))
2489 res++;
2490 if (dump_file && (dump_flags & TDF_DETAILS))
2492 fprintf (dump_file, "\nAccess trees for ");
2493 print_generic_expr (dump_file, var, 0);
2494 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2495 dump_access_tree (dump_file, access);
2496 fprintf (dump_file, "\n");
2499 else
2500 disqualify_candidate (var, "No scalar replacements to be created.");
2503 BITMAP_FREE (tmp);
2505 if (res)
2507 statistics_counter_event (cfun, "Scalarized aggregates", res);
2508 return true;
2510 else
2511 return false;
2514 /* Generate statements copying scalar replacements of accesses within a subtree
2515 into or out of AGG. ACCESS, all its children, siblings and their children
2516 are to be processed. AGG is an aggregate type expression (can be a
2517 declaration but does not have to be, it can for example also be a mem_ref or
2518 a series of handled components). TOP_OFFSET is the offset of the processed
2519 subtree which has to be subtracted from offsets of individual accesses to
2520 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2521 replacements in the interval <start_offset, start_offset + chunk_size>,
2522 otherwise copy all. GSI is a statement iterator used to place the new
2523 statements. WRITE should be true when the statements should write from AGG
2524 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2525 statements will be added after the current statement in GSI, they will be
2526 added before the statement otherwise. */
2528 static void
2529 generate_subtree_copies (struct access *access, tree agg,
2530 HOST_WIDE_INT top_offset,
2531 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2532 gimple_stmt_iterator *gsi, bool write,
2533 bool insert_after, location_t loc)
2537 if (chunk_size && access->offset >= start_offset + chunk_size)
2538 return;
2540 if (access->grp_to_be_replaced
2541 && (chunk_size == 0
2542 || access->offset + access->size > start_offset))
2544 tree expr, repl = get_access_replacement (access);
2545 gimple stmt;
2547 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2548 access, gsi, insert_after);
2550 if (write)
2552 if (access->grp_partial_lhs)
2553 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2554 !insert_after,
2555 insert_after ? GSI_NEW_STMT
2556 : GSI_SAME_STMT);
2557 stmt = gimple_build_assign (repl, expr);
2559 else
2561 TREE_NO_WARNING (repl) = 1;
2562 if (access->grp_partial_lhs)
2563 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2564 !insert_after,
2565 insert_after ? GSI_NEW_STMT
2566 : GSI_SAME_STMT);
2567 stmt = gimple_build_assign (expr, repl);
2569 gimple_set_location (stmt, loc);
2571 if (insert_after)
2572 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2573 else
2574 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2575 update_stmt (stmt);
2576 sra_stats.subtree_copies++;
2578 else if (write
2579 && access->grp_to_be_debug_replaced
2580 && (chunk_size == 0
2581 || access->offset + access->size > start_offset))
2583 gimple ds;
2584 tree drhs = build_debug_ref_for_model (loc, agg,
2585 access->offset - top_offset,
2586 access);
2587 ds = gimple_build_debug_bind (get_access_replacement (access),
2588 drhs, gsi_stmt (*gsi));
2589 if (insert_after)
2590 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2591 else
2592 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2595 if (access->first_child)
2596 generate_subtree_copies (access->first_child, agg, top_offset,
2597 start_offset, chunk_size, gsi,
2598 write, insert_after, loc);
2600 access = access->next_sibling;
2602 while (access);
2605 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2606 the root of the subtree to be processed. GSI is the statement iterator used
2607 for inserting statements which are added after the current statement if
2608 INSERT_AFTER is true or before it otherwise. */
2610 static void
2611 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2612 bool insert_after, location_t loc)
2615 struct access *child;
2617 if (access->grp_to_be_replaced)
2619 gimple stmt;
2621 stmt = gimple_build_assign (get_access_replacement (access),
2622 build_zero_cst (access->type));
2623 if (insert_after)
2624 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2625 else
2626 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2627 update_stmt (stmt);
2628 gimple_set_location (stmt, loc);
2630 else if (access->grp_to_be_debug_replaced)
2632 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2633 build_zero_cst (access->type),
2634 gsi_stmt (*gsi));
2635 if (insert_after)
2636 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2637 else
2638 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2641 for (child = access->first_child; child; child = child->next_sibling)
2642 init_subtree_with_zero (child, gsi, insert_after, loc);
2645 /* Search for an access representative for the given expression EXPR and
2646 return it or NULL if it cannot be found. */
2648 static struct access *
2649 get_access_for_expr (tree expr)
2651 HOST_WIDE_INT offset, size, max_size;
2652 tree base;
2654 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2655 a different size than the size of its argument and we need the latter
2656 one. */
2657 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2658 expr = TREE_OPERAND (expr, 0);
2660 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2661 if (max_size == -1 || !DECL_P (base))
2662 return NULL;
2664 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2665 return NULL;
2667 return get_var_base_offset_size_access (base, offset, max_size);
2670 /* Replace the expression EXPR with a scalar replacement if there is one and
2671 generate other statements to do type conversion or subtree copying if
2672 necessary. GSI is used to place newly created statements, WRITE is true if
2673 the expression is being written to (it is on a LHS of a statement or output
2674 in an assembly statement). */
2676 static bool
2677 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2679 location_t loc;
2680 struct access *access;
2681 tree type, bfr;
2683 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2685 bfr = *expr;
2686 expr = &TREE_OPERAND (*expr, 0);
2688 else
2689 bfr = NULL_TREE;
2691 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2692 expr = &TREE_OPERAND (*expr, 0);
2693 access = get_access_for_expr (*expr);
2694 if (!access)
2695 return false;
2696 type = TREE_TYPE (*expr);
2698 loc = gimple_location (gsi_stmt (*gsi));
2699 if (access->grp_to_be_replaced)
2701 tree repl = get_access_replacement (access);
2702 /* If we replace a non-register typed access simply use the original
2703 access expression to extract the scalar component afterwards.
2704 This happens if scalarizing a function return value or parameter
2705 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2706 gcc.c-torture/compile/20011217-1.c.
2708 We also want to use this when accessing a complex or vector which can
2709 be accessed as a different type too, potentially creating a need for
2710 type conversion (see PR42196) and when scalarized unions are involved
2711 in assembler statements (see PR42398). */
2712 if (!useless_type_conversion_p (type, access->type))
2714 tree ref;
2716 ref = build_ref_for_model (loc, access->base, access->offset, access,
2717 NULL, false);
2719 if (write)
2721 gimple stmt;
2723 if (access->grp_partial_lhs)
2724 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2725 false, GSI_NEW_STMT);
2726 stmt = gimple_build_assign (repl, ref);
2727 gimple_set_location (stmt, loc);
2728 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2730 else
2732 gimple stmt;
2734 if (access->grp_partial_lhs)
2735 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2736 true, GSI_SAME_STMT);
2737 stmt = gimple_build_assign (ref, repl);
2738 gimple_set_location (stmt, loc);
2739 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2742 else
2743 *expr = repl;
2744 sra_stats.exprs++;
2746 else if (write && access->grp_to_be_debug_replaced)
2748 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2749 NULL_TREE,
2750 gsi_stmt (*gsi));
2751 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2754 if (access->first_child)
2756 HOST_WIDE_INT start_offset, chunk_size;
2757 if (bfr
2758 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2759 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2761 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2762 start_offset = access->offset
2763 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2765 else
2766 start_offset = chunk_size = 0;
2768 generate_subtree_copies (access->first_child, access->base, 0,
2769 start_offset, chunk_size, gsi, write, write,
2770 loc);
2772 return true;
2775 /* Where scalar replacements of the RHS have been written to when a replacement
2776 of a LHS of an assigments cannot be direclty loaded from a replacement of
2777 the RHS. */
2778 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2779 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2780 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2782 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2783 base aggregate if there are unscalarized data or directly to LHS of the
2784 statement that is pointed to by GSI otherwise. */
2786 static enum unscalarized_data_handling
2787 handle_unscalarized_data_in_subtree (struct access *top_racc,
2788 gimple_stmt_iterator *gsi)
2790 if (top_racc->grp_unscalarized_data)
2792 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2793 gsi, false, false,
2794 gimple_location (gsi_stmt (*gsi)));
2795 return SRA_UDH_RIGHT;
2797 else
2799 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2800 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2801 0, 0, gsi, false, false,
2802 gimple_location (gsi_stmt (*gsi)));
2803 return SRA_UDH_LEFT;
2808 /* Try to generate statements to load all sub-replacements in an access subtree
2809 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2810 If that is not possible, refresh the TOP_RACC base aggregate and load the
2811 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2812 copied. NEW_GSI is stmt iterator used for statement insertions after the
2813 original assignment, OLD_GSI is used to insert statements before the
2814 assignment. *REFRESHED keeps the information whether we have needed to
2815 refresh replacements of the LHS and from which side of the assignments this
2816 takes place. */
2818 static void
2819 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2820 HOST_WIDE_INT left_offset,
2821 gimple_stmt_iterator *old_gsi,
2822 gimple_stmt_iterator *new_gsi,
2823 enum unscalarized_data_handling *refreshed)
2825 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2826 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2828 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2830 if (lacc->grp_to_be_replaced)
2832 struct access *racc;
2833 gimple stmt;
2834 tree rhs;
2836 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2837 if (racc && racc->grp_to_be_replaced)
2839 rhs = get_access_replacement (racc);
2840 if (!useless_type_conversion_p (lacc->type, racc->type))
2841 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2843 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2844 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2845 true, GSI_SAME_STMT);
2847 else
2849 /* No suitable access on the right hand side, need to load from
2850 the aggregate. See if we have to update it first... */
2851 if (*refreshed == SRA_UDH_NONE)
2852 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2853 old_gsi);
2855 if (*refreshed == SRA_UDH_LEFT)
2856 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2857 new_gsi, true);
2858 else
2859 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2860 new_gsi, true);
2861 if (lacc->grp_partial_lhs)
2862 rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2863 false, GSI_NEW_STMT);
2866 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2867 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2868 gimple_set_location (stmt, loc);
2869 update_stmt (stmt);
2870 sra_stats.subreplacements++;
2872 else
2874 if (*refreshed == SRA_UDH_NONE
2875 && lacc->grp_read && !lacc->grp_covered)
2876 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2877 old_gsi);
2878 if (lacc && lacc->grp_to_be_debug_replaced)
2880 gimple ds;
2881 tree drhs;
2882 struct access *racc = find_access_in_subtree (top_racc, offset,
2883 lacc->size);
2885 if (racc && racc->grp_to_be_replaced)
2886 drhs = get_access_replacement (racc);
2887 else if (*refreshed == SRA_UDH_LEFT)
2888 drhs = build_debug_ref_for_model (loc, lacc->base, lacc->offset,
2889 lacc);
2890 else if (*refreshed == SRA_UDH_RIGHT)
2891 drhs = build_debug_ref_for_model (loc, top_racc->base, offset,
2892 lacc);
2893 else
2894 drhs = NULL_TREE;
2895 ds = gimple_build_debug_bind (get_access_replacement (lacc),
2896 drhs, gsi_stmt (*old_gsi));
2897 gsi_insert_after (new_gsi, ds, GSI_NEW_STMT);
2901 if (lacc->first_child)
2902 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2903 old_gsi, new_gsi, refreshed);
2907 /* Result code for SRA assignment modification. */
2908 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2909 SRA_AM_MODIFIED, /* stmt changed but not
2910 removed */
2911 SRA_AM_REMOVED }; /* stmt eliminated */
2913 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2914 to the assignment and GSI is the statement iterator pointing at it. Returns
2915 the same values as sra_modify_assign. */
2917 static enum assignment_mod_result
2918 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2920 tree lhs = gimple_assign_lhs (*stmt);
2921 struct access *acc;
2922 location_t loc;
2924 acc = get_access_for_expr (lhs);
2925 if (!acc)
2926 return SRA_AM_NONE;
2928 if (gimple_clobber_p (*stmt))
2930 /* Remove clobbers of fully scalarized variables, otherwise
2931 do nothing. */
2932 if (acc->grp_covered)
2934 unlink_stmt_vdef (*stmt);
2935 gsi_remove (gsi, true);
2936 release_defs (*stmt);
2937 return SRA_AM_REMOVED;
2939 else
2940 return SRA_AM_NONE;
2943 loc = gimple_location (*stmt);
2944 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2946 /* I have never seen this code path trigger but if it can happen the
2947 following should handle it gracefully. */
2948 if (access_has_children_p (acc))
2949 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2950 true, true, loc);
2951 return SRA_AM_MODIFIED;
2954 if (acc->grp_covered)
2956 init_subtree_with_zero (acc, gsi, false, loc);
2957 unlink_stmt_vdef (*stmt);
2958 gsi_remove (gsi, true);
2959 release_defs (*stmt);
2960 return SRA_AM_REMOVED;
2962 else
2964 init_subtree_with_zero (acc, gsi, true, loc);
2965 return SRA_AM_MODIFIED;
2969 /* Create and return a new suitable default definition SSA_NAME for RACC which
2970 is an access describing an uninitialized part of an aggregate that is being
2971 loaded. */
2973 static tree
2974 get_repl_default_def_ssa_name (struct access *racc)
2976 return get_or_create_ssa_default_def (cfun, get_access_replacement (racc));
2979 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2980 somewhere in it. */
2982 static inline bool
2983 contains_bitfld_comp_ref_p (const_tree ref)
2985 while (handled_component_p (ref))
2987 if (TREE_CODE (ref) == COMPONENT_REF
2988 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2989 return true;
2990 ref = TREE_OPERAND (ref, 0);
2993 return false;
2996 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2997 bit-field field declaration somewhere in it. */
2999 static inline bool
3000 contains_vce_or_bfcref_p (const_tree ref)
3002 while (handled_component_p (ref))
3004 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3005 || (TREE_CODE (ref) == COMPONENT_REF
3006 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3007 return true;
3008 ref = TREE_OPERAND (ref, 0);
3011 return false;
3014 /* Examine both sides of the assignment statement pointed to by STMT, replace
3015 them with a scalare replacement if there is one and generate copying of
3016 replacements if scalarized aggregates have been used in the assignment. GSI
3017 is used to hold generated statements for type conversions and subtree
3018 copying. */
3020 static enum assignment_mod_result
3021 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3023 struct access *lacc, *racc;
3024 tree lhs, rhs;
3025 bool modify_this_stmt = false;
3026 bool force_gimple_rhs = false;
3027 location_t loc;
3028 gimple_stmt_iterator orig_gsi = *gsi;
3030 if (!gimple_assign_single_p (*stmt))
3031 return SRA_AM_NONE;
3032 lhs = gimple_assign_lhs (*stmt);
3033 rhs = gimple_assign_rhs1 (*stmt);
3035 if (TREE_CODE (rhs) == CONSTRUCTOR)
3036 return sra_modify_constructor_assign (stmt, gsi);
3038 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3039 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3040 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3042 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
3043 gsi, false);
3044 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
3045 gsi, true);
3046 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3049 lacc = get_access_for_expr (lhs);
3050 racc = get_access_for_expr (rhs);
3051 if (!lacc && !racc)
3052 return SRA_AM_NONE;
3054 loc = gimple_location (*stmt);
3055 if (lacc && lacc->grp_to_be_replaced)
3057 lhs = get_access_replacement (lacc);
3058 gimple_assign_set_lhs (*stmt, lhs);
3059 modify_this_stmt = true;
3060 if (lacc->grp_partial_lhs)
3061 force_gimple_rhs = true;
3062 sra_stats.exprs++;
3065 if (racc && racc->grp_to_be_replaced)
3067 rhs = get_access_replacement (racc);
3068 modify_this_stmt = true;
3069 if (racc->grp_partial_lhs)
3070 force_gimple_rhs = true;
3071 sra_stats.exprs++;
3073 else if (racc
3074 && !racc->grp_unscalarized_data
3075 && TREE_CODE (lhs) == SSA_NAME
3076 && !access_has_replacements_p (racc))
3078 rhs = get_repl_default_def_ssa_name (racc);
3079 modify_this_stmt = true;
3080 sra_stats.exprs++;
3083 if (modify_this_stmt)
3085 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3087 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3088 ??? This should move to fold_stmt which we simply should
3089 call after building a VIEW_CONVERT_EXPR here. */
3090 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3091 && !contains_bitfld_comp_ref_p (lhs))
3093 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3094 gimple_assign_set_lhs (*stmt, lhs);
3096 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3097 && !contains_vce_or_bfcref_p (rhs))
3098 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3100 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3102 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3103 rhs);
3104 if (is_gimple_reg_type (TREE_TYPE (lhs))
3105 && TREE_CODE (lhs) != SSA_NAME)
3106 force_gimple_rhs = true;
3111 if (lacc && lacc->grp_to_be_debug_replaced)
3113 tree dlhs = get_access_replacement (lacc);
3114 tree drhs = unshare_expr (rhs);
3115 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3117 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3118 && !contains_vce_or_bfcref_p (drhs))
3119 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3120 if (drhs
3121 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3122 TREE_TYPE (drhs)))
3123 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3124 TREE_TYPE (dlhs), drhs);
3126 gimple ds = gimple_build_debug_bind (dlhs, drhs, *stmt);
3127 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3130 /* From this point on, the function deals with assignments in between
3131 aggregates when at least one has scalar reductions of some of its
3132 components. There are three possible scenarios: Both the LHS and RHS have
3133 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3135 In the first case, we would like to load the LHS components from RHS
3136 components whenever possible. If that is not possible, we would like to
3137 read it directly from the RHS (after updating it by storing in it its own
3138 components). If there are some necessary unscalarized data in the LHS,
3139 those will be loaded by the original assignment too. If neither of these
3140 cases happen, the original statement can be removed. Most of this is done
3141 by load_assign_lhs_subreplacements.
3143 In the second case, we would like to store all RHS scalarized components
3144 directly into LHS and if they cover the aggregate completely, remove the
3145 statement too. In the third case, we want the LHS components to be loaded
3146 directly from the RHS (DSE will remove the original statement if it
3147 becomes redundant).
3149 This is a bit complex but manageable when types match and when unions do
3150 not cause confusion in a way that we cannot really load a component of LHS
3151 from the RHS or vice versa (the access representing this level can have
3152 subaccesses that are accessible only through a different union field at a
3153 higher level - different from the one used in the examined expression).
3154 Unions are fun.
3156 Therefore, I specially handle a fourth case, happening when there is a
3157 specific type cast or it is impossible to locate a scalarized subaccess on
3158 the other side of the expression. If that happens, I simply "refresh" the
3159 RHS by storing in it is scalarized components leave the original statement
3160 there to do the copying and then load the scalar replacements of the LHS.
3161 This is what the first branch does. */
3163 if (modify_this_stmt
3164 || gimple_has_volatile_ops (*stmt)
3165 || contains_vce_or_bfcref_p (rhs)
3166 || contains_vce_or_bfcref_p (lhs))
3168 if (access_has_children_p (racc))
3169 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3170 gsi, false, false, loc);
3171 if (access_has_children_p (lacc))
3172 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3173 gsi, true, true, loc);
3174 sra_stats.separate_lhs_rhs_handling++;
3176 /* This gimplification must be done after generate_subtree_copies,
3177 lest we insert the subtree copies in the middle of the gimplified
3178 sequence. */
3179 if (force_gimple_rhs)
3180 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3181 true, GSI_SAME_STMT);
3182 if (gimple_assign_rhs1 (*stmt) != rhs)
3184 modify_this_stmt = true;
3185 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3186 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3189 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3191 else
3193 if (access_has_children_p (lacc)
3194 && access_has_children_p (racc)
3195 /* When an access represents an unscalarizable region, it usually
3196 represents accesses with variable offset and thus must not be used
3197 to generate new memory accesses. */
3198 && !lacc->grp_unscalarizable_region
3199 && !racc->grp_unscalarizable_region)
3201 gimple_stmt_iterator orig_gsi = *gsi;
3202 enum unscalarized_data_handling refreshed;
3204 if (lacc->grp_read && !lacc->grp_covered)
3205 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3206 else
3207 refreshed = SRA_UDH_NONE;
3209 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3210 &orig_gsi, gsi, &refreshed);
3211 if (refreshed != SRA_UDH_RIGHT)
3213 gsi_next (gsi);
3214 unlink_stmt_vdef (*stmt);
3215 gsi_remove (&orig_gsi, true);
3216 release_defs (*stmt);
3217 sra_stats.deleted++;
3218 return SRA_AM_REMOVED;
3221 else
3223 if (access_has_children_p (racc)
3224 && !racc->grp_unscalarized_data)
3226 if (dump_file)
3228 fprintf (dump_file, "Removing load: ");
3229 print_gimple_stmt (dump_file, *stmt, 0, 0);
3231 generate_subtree_copies (racc->first_child, lhs,
3232 racc->offset, 0, 0, gsi,
3233 false, false, loc);
3234 gcc_assert (*stmt == gsi_stmt (*gsi));
3235 unlink_stmt_vdef (*stmt);
3236 gsi_remove (gsi, true);
3237 release_defs (*stmt);
3238 sra_stats.deleted++;
3239 return SRA_AM_REMOVED;
3241 /* Restore the aggregate RHS from its components so the
3242 prevailing aggregate copy does the right thing. */
3243 if (access_has_children_p (racc))
3244 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3245 gsi, false, false, loc);
3246 /* Re-load the components of the aggregate copy destination.
3247 But use the RHS aggregate to load from to expose more
3248 optimization opportunities. */
3249 if (access_has_children_p (lacc))
3250 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3251 0, 0, gsi, true, true, loc);
3254 return SRA_AM_NONE;
3258 /* Traverse the function body and all modifications as decided in
3259 analyze_all_variable_accesses. Return true iff the CFG has been
3260 changed. */
3262 static bool
3263 sra_modify_function_body (void)
3265 bool cfg_changed = false;
3266 basic_block bb;
3268 FOR_EACH_BB (bb)
3270 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3271 while (!gsi_end_p (gsi))
3273 gimple stmt = gsi_stmt (gsi);
3274 enum assignment_mod_result assign_result;
3275 bool modified = false, deleted = false;
3276 tree *t;
3277 unsigned i;
3279 switch (gimple_code (stmt))
3281 case GIMPLE_RETURN:
3282 t = gimple_return_retval_ptr (stmt);
3283 if (*t != NULL_TREE)
3284 modified |= sra_modify_expr (t, &gsi, false);
3285 break;
3287 case GIMPLE_ASSIGN:
3288 assign_result = sra_modify_assign (&stmt, &gsi);
3289 modified |= assign_result == SRA_AM_MODIFIED;
3290 deleted = assign_result == SRA_AM_REMOVED;
3291 break;
3293 case GIMPLE_CALL:
3294 /* Operands must be processed before the lhs. */
3295 for (i = 0; i < gimple_call_num_args (stmt); i++)
3297 t = gimple_call_arg_ptr (stmt, i);
3298 modified |= sra_modify_expr (t, &gsi, false);
3301 if (gimple_call_lhs (stmt))
3303 t = gimple_call_lhs_ptr (stmt);
3304 modified |= sra_modify_expr (t, &gsi, true);
3306 break;
3308 case GIMPLE_ASM:
3309 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3311 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3312 modified |= sra_modify_expr (t, &gsi, false);
3314 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3316 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3317 modified |= sra_modify_expr (t, &gsi, true);
3319 break;
3321 default:
3322 break;
3325 if (modified)
3327 update_stmt (stmt);
3328 if (maybe_clean_eh_stmt (stmt)
3329 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3330 cfg_changed = true;
3332 if (!deleted)
3333 gsi_next (&gsi);
3337 return cfg_changed;
3340 /* Generate statements initializing scalar replacements of parts of function
3341 parameters. */
3343 static void
3344 initialize_parameter_reductions (void)
3346 gimple_stmt_iterator gsi;
3347 gimple_seq seq = NULL;
3348 tree parm;
3350 gsi = gsi_start (seq);
3351 for (parm = DECL_ARGUMENTS (current_function_decl);
3352 parm;
3353 parm = DECL_CHAIN (parm))
3355 vec<access_p> *access_vec;
3356 struct access *access;
3358 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3359 continue;
3360 access_vec = get_base_access_vector (parm);
3361 if (!access_vec)
3362 continue;
3364 for (access = (*access_vec)[0];
3365 access;
3366 access = access->next_grp)
3367 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3368 EXPR_LOCATION (parm));
3371 seq = gsi_seq (gsi);
3372 if (seq)
3373 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3376 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3377 it reveals there are components of some aggregates to be scalarized, it runs
3378 the required transformations. */
3379 static unsigned int
3380 perform_intra_sra (void)
3382 int ret = 0;
3383 sra_initialize ();
3385 if (!find_var_candidates ())
3386 goto out;
3388 if (!scan_function ())
3389 goto out;
3391 if (!analyze_all_variable_accesses ())
3392 goto out;
3394 if (sra_modify_function_body ())
3395 ret = TODO_update_ssa | TODO_cleanup_cfg;
3396 else
3397 ret = TODO_update_ssa;
3398 initialize_parameter_reductions ();
3400 statistics_counter_event (cfun, "Scalar replacements created",
3401 sra_stats.replacements);
3402 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3403 statistics_counter_event (cfun, "Subtree copy stmts",
3404 sra_stats.subtree_copies);
3405 statistics_counter_event (cfun, "Subreplacement stmts",
3406 sra_stats.subreplacements);
3407 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3408 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3409 sra_stats.separate_lhs_rhs_handling);
3411 out:
3412 sra_deinitialize ();
3413 return ret;
3416 /* Perform early intraprocedural SRA. */
3417 static unsigned int
3418 early_intra_sra (void)
3420 sra_mode = SRA_MODE_EARLY_INTRA;
3421 return perform_intra_sra ();
3424 /* Perform "late" intraprocedural SRA. */
3425 static unsigned int
3426 late_intra_sra (void)
3428 sra_mode = SRA_MODE_INTRA;
3429 return perform_intra_sra ();
3433 static bool
3434 gate_intra_sra (void)
3436 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3440 struct gimple_opt_pass pass_sra_early =
3443 GIMPLE_PASS,
3444 "esra", /* name */
3445 OPTGROUP_NONE, /* optinfo_flags */
3446 gate_intra_sra, /* gate */
3447 early_intra_sra, /* execute */
3448 NULL, /* sub */
3449 NULL, /* next */
3450 0, /* static_pass_number */
3451 TV_TREE_SRA, /* tv_id */
3452 PROP_cfg | PROP_ssa, /* properties_required */
3453 0, /* properties_provided */
3454 0, /* properties_destroyed */
3455 0, /* todo_flags_start */
3456 TODO_update_ssa
3457 | TODO_ggc_collect
3458 | TODO_verify_ssa /* todo_flags_finish */
3462 struct gimple_opt_pass pass_sra =
3465 GIMPLE_PASS,
3466 "sra", /* name */
3467 OPTGROUP_NONE, /* optinfo_flags */
3468 gate_intra_sra, /* gate */
3469 late_intra_sra, /* execute */
3470 NULL, /* sub */
3471 NULL, /* next */
3472 0, /* static_pass_number */
3473 TV_TREE_SRA, /* tv_id */
3474 PROP_cfg | PROP_ssa, /* properties_required */
3475 0, /* properties_provided */
3476 0, /* properties_destroyed */
3477 TODO_update_address_taken, /* todo_flags_start */
3478 TODO_update_ssa
3479 | TODO_ggc_collect
3480 | TODO_verify_ssa /* todo_flags_finish */
3485 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3486 parameter. */
3488 static bool
3489 is_unused_scalar_param (tree parm)
3491 tree name;
3492 return (is_gimple_reg (parm)
3493 && (!(name = ssa_default_def (cfun, parm))
3494 || has_zero_uses (name)));
3497 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3498 examine whether there are any direct or otherwise infeasible ones. If so,
3499 return true, otherwise return false. PARM must be a gimple register with a
3500 non-NULL default definition. */
3502 static bool
3503 ptr_parm_has_direct_uses (tree parm)
3505 imm_use_iterator ui;
3506 gimple stmt;
3507 tree name = ssa_default_def (cfun, parm);
3508 bool ret = false;
3510 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3512 int uses_ok = 0;
3513 use_operand_p use_p;
3515 if (is_gimple_debug (stmt))
3516 continue;
3518 /* Valid uses include dereferences on the lhs and the rhs. */
3519 if (gimple_has_lhs (stmt))
3521 tree lhs = gimple_get_lhs (stmt);
3522 while (handled_component_p (lhs))
3523 lhs = TREE_OPERAND (lhs, 0);
3524 if (TREE_CODE (lhs) == MEM_REF
3525 && TREE_OPERAND (lhs, 0) == name
3526 && integer_zerop (TREE_OPERAND (lhs, 1))
3527 && types_compatible_p (TREE_TYPE (lhs),
3528 TREE_TYPE (TREE_TYPE (name)))
3529 && !TREE_THIS_VOLATILE (lhs))
3530 uses_ok++;
3532 if (gimple_assign_single_p (stmt))
3534 tree rhs = gimple_assign_rhs1 (stmt);
3535 while (handled_component_p (rhs))
3536 rhs = TREE_OPERAND (rhs, 0);
3537 if (TREE_CODE (rhs) == MEM_REF
3538 && TREE_OPERAND (rhs, 0) == name
3539 && integer_zerop (TREE_OPERAND (rhs, 1))
3540 && types_compatible_p (TREE_TYPE (rhs),
3541 TREE_TYPE (TREE_TYPE (name)))
3542 && !TREE_THIS_VOLATILE (rhs))
3543 uses_ok++;
3545 else if (is_gimple_call (stmt))
3547 unsigned i;
3548 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3550 tree arg = gimple_call_arg (stmt, i);
3551 while (handled_component_p (arg))
3552 arg = TREE_OPERAND (arg, 0);
3553 if (TREE_CODE (arg) == MEM_REF
3554 && TREE_OPERAND (arg, 0) == name
3555 && integer_zerop (TREE_OPERAND (arg, 1))
3556 && types_compatible_p (TREE_TYPE (arg),
3557 TREE_TYPE (TREE_TYPE (name)))
3558 && !TREE_THIS_VOLATILE (arg))
3559 uses_ok++;
3563 /* If the number of valid uses does not match the number of
3564 uses in this stmt there is an unhandled use. */
3565 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3566 --uses_ok;
3568 if (uses_ok != 0)
3569 ret = true;
3571 if (ret)
3572 BREAK_FROM_IMM_USE_STMT (ui);
3575 return ret;
3578 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3579 them in candidate_bitmap. Note that these do not necessarily include
3580 parameter which are unused and thus can be removed. Return true iff any
3581 such candidate has been found. */
3583 static bool
3584 find_param_candidates (void)
3586 tree parm;
3587 int count = 0;
3588 bool ret = false;
3589 const char *msg;
3591 for (parm = DECL_ARGUMENTS (current_function_decl);
3592 parm;
3593 parm = DECL_CHAIN (parm))
3595 tree type = TREE_TYPE (parm);
3596 void **slot;
3598 count++;
3600 if (TREE_THIS_VOLATILE (parm)
3601 || TREE_ADDRESSABLE (parm)
3602 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3603 continue;
3605 if (is_unused_scalar_param (parm))
3607 ret = true;
3608 continue;
3611 if (POINTER_TYPE_P (type))
3613 type = TREE_TYPE (type);
3615 if (TREE_CODE (type) == FUNCTION_TYPE
3616 || TYPE_VOLATILE (type)
3617 || upc_shared_type_p (type)
3618 || (TREE_CODE (type) == ARRAY_TYPE
3619 && TYPE_NONALIASED_COMPONENT (type))
3620 || !is_gimple_reg (parm)
3621 || is_va_list_type (type)
3622 || ptr_parm_has_direct_uses (parm))
3623 continue;
3625 else if (!AGGREGATE_TYPE_P (type))
3626 continue;
3628 if (!COMPLETE_TYPE_P (type)
3629 || !host_integerp (TYPE_SIZE (type), 1)
3630 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3631 || (AGGREGATE_TYPE_P (type)
3632 && type_internals_preclude_sra_p (type, &msg)))
3633 continue;
3635 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3636 slot = htab_find_slot_with_hash (candidates, parm,
3637 DECL_UID (parm), INSERT);
3638 *slot = (void *) parm;
3640 ret = true;
3641 if (dump_file && (dump_flags & TDF_DETAILS))
3643 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3644 print_generic_expr (dump_file, parm, 0);
3645 fprintf (dump_file, "\n");
3649 func_param_count = count;
3650 return ret;
3653 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3654 maybe_modified. */
3656 static bool
3657 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3658 void *data)
3660 struct access *repr = (struct access *) data;
3662 repr->grp_maybe_modified = 1;
3663 return true;
3666 /* Analyze what representatives (in linked lists accessible from
3667 REPRESENTATIVES) can be modified by side effects of statements in the
3668 current function. */
3670 static void
3671 analyze_modified_params (vec<access_p> representatives)
3673 int i;
3675 for (i = 0; i < func_param_count; i++)
3677 struct access *repr;
3679 for (repr = representatives[i];
3680 repr;
3681 repr = repr->next_grp)
3683 struct access *access;
3684 bitmap visited;
3685 ao_ref ar;
3687 if (no_accesses_p (repr))
3688 continue;
3689 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3690 || repr->grp_maybe_modified)
3691 continue;
3693 ao_ref_init (&ar, repr->expr);
3694 visited = BITMAP_ALLOC (NULL);
3695 for (access = repr; access; access = access->next_sibling)
3697 /* All accesses are read ones, otherwise grp_maybe_modified would
3698 be trivially set. */
3699 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3700 mark_maybe_modified, repr, &visited);
3701 if (repr->grp_maybe_modified)
3702 break;
3704 BITMAP_FREE (visited);
3709 /* Propagate distances in bb_dereferences in the opposite direction than the
3710 control flow edges, in each step storing the maximum of the current value
3711 and the minimum of all successors. These steps are repeated until the table
3712 stabilizes. Note that BBs which might terminate the functions (according to
3713 final_bbs bitmap) never updated in this way. */
3715 static void
3716 propagate_dereference_distances (void)
3718 vec<basic_block> queue;
3719 basic_block bb;
3721 queue.create (last_basic_block_for_function (cfun));
3722 queue.quick_push (ENTRY_BLOCK_PTR);
3723 FOR_EACH_BB (bb)
3725 queue.quick_push (bb);
3726 bb->aux = bb;
3729 while (!queue.is_empty ())
3731 edge_iterator ei;
3732 edge e;
3733 bool change = false;
3734 int i;
3736 bb = queue.pop ();
3737 bb->aux = NULL;
3739 if (bitmap_bit_p (final_bbs, bb->index))
3740 continue;
3742 for (i = 0; i < func_param_count; i++)
3744 int idx = bb->index * func_param_count + i;
3745 bool first = true;
3746 HOST_WIDE_INT inh = 0;
3748 FOR_EACH_EDGE (e, ei, bb->succs)
3750 int succ_idx = e->dest->index * func_param_count + i;
3752 if (e->src == EXIT_BLOCK_PTR)
3753 continue;
3755 if (first)
3757 first = false;
3758 inh = bb_dereferences [succ_idx];
3760 else if (bb_dereferences [succ_idx] < inh)
3761 inh = bb_dereferences [succ_idx];
3764 if (!first && bb_dereferences[idx] < inh)
3766 bb_dereferences[idx] = inh;
3767 change = true;
3771 if (change && !bitmap_bit_p (final_bbs, bb->index))
3772 FOR_EACH_EDGE (e, ei, bb->preds)
3774 if (e->src->aux)
3775 continue;
3777 e->src->aux = e->src;
3778 queue.quick_push (e->src);
3782 queue.release ();
3785 /* Dump a dereferences TABLE with heading STR to file F. */
3787 static void
3788 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3790 basic_block bb;
3792 fprintf (dump_file, str);
3793 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3795 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3796 if (bb != EXIT_BLOCK_PTR)
3798 int i;
3799 for (i = 0; i < func_param_count; i++)
3801 int idx = bb->index * func_param_count + i;
3802 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3805 fprintf (f, "\n");
3807 fprintf (dump_file, "\n");
3810 /* Determine what (parts of) parameters passed by reference that are not
3811 assigned to are not certainly dereferenced in this function and thus the
3812 dereferencing cannot be safely moved to the caller without potentially
3813 introducing a segfault. Mark such REPRESENTATIVES as
3814 grp_not_necessarilly_dereferenced.
3816 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3817 part is calculated rather than simple booleans are calculated for each
3818 pointer parameter to handle cases when only a fraction of the whole
3819 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3820 an example).
3822 The maximum dereference distances for each pointer parameter and BB are
3823 already stored in bb_dereference. This routine simply propagates these
3824 values upwards by propagate_dereference_distances and then compares the
3825 distances of individual parameters in the ENTRY BB to the equivalent
3826 distances of each representative of a (fraction of a) parameter. */
3828 static void
3829 analyze_caller_dereference_legality (vec<access_p> representatives)
3831 int i;
3833 if (dump_file && (dump_flags & TDF_DETAILS))
3834 dump_dereferences_table (dump_file,
3835 "Dereference table before propagation:\n",
3836 bb_dereferences);
3838 propagate_dereference_distances ();
3840 if (dump_file && (dump_flags & TDF_DETAILS))
3841 dump_dereferences_table (dump_file,
3842 "Dereference table after propagation:\n",
3843 bb_dereferences);
3845 for (i = 0; i < func_param_count; i++)
3847 struct access *repr = representatives[i];
3848 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3850 if (!repr || no_accesses_p (repr))
3851 continue;
3855 if ((repr->offset + repr->size) > bb_dereferences[idx])
3856 repr->grp_not_necessarilly_dereferenced = 1;
3857 repr = repr->next_grp;
3859 while (repr);
3863 /* Return the representative access for the parameter declaration PARM if it is
3864 a scalar passed by reference which is not written to and the pointer value
3865 is not used directly. Thus, if it is legal to dereference it in the caller
3866 and we can rule out modifications through aliases, such parameter should be
3867 turned into one passed by value. Return NULL otherwise. */
3869 static struct access *
3870 unmodified_by_ref_scalar_representative (tree parm)
3872 int i, access_count;
3873 struct access *repr;
3874 vec<access_p> *access_vec;
3876 access_vec = get_base_access_vector (parm);
3877 gcc_assert (access_vec);
3878 repr = (*access_vec)[0];
3879 if (repr->write)
3880 return NULL;
3881 repr->group_representative = repr;
3883 access_count = access_vec->length ();
3884 for (i = 1; i < access_count; i++)
3886 struct access *access = (*access_vec)[i];
3887 if (access->write)
3888 return NULL;
3889 access->group_representative = repr;
3890 access->next_sibling = repr->next_sibling;
3891 repr->next_sibling = access;
3894 repr->grp_read = 1;
3895 repr->grp_scalar_ptr = 1;
3896 return repr;
3899 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
3900 associated with. REQ_ALIGN is the minimum required alignment. */
3902 static bool
3903 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
3905 unsigned int exp_align;
3906 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3907 is incompatible assign in a call statement (and possibly even in asm
3908 statements). This can be relaxed by using a new temporary but only for
3909 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3910 intraprocedural SRA we deal with this by keeping the old aggregate around,
3911 something we cannot do in IPA-SRA.) */
3912 if (access->write
3913 && (is_gimple_call (access->stmt)
3914 || gimple_code (access->stmt) == GIMPLE_ASM))
3915 return true;
3917 exp_align = get_object_alignment (access->expr);
3918 if (exp_align < req_align)
3919 return true;
3921 return false;
3925 /* Sort collected accesses for parameter PARM, identify representatives for
3926 each accessed region and link them together. Return NULL if there are
3927 different but overlapping accesses, return the special ptr value meaning
3928 there are no accesses for this parameter if that is the case and return the
3929 first representative otherwise. Set *RO_GRP if there is a group of accesses
3930 with only read (i.e. no write) accesses. */
3932 static struct access *
3933 splice_param_accesses (tree parm, bool *ro_grp)
3935 int i, j, access_count, group_count;
3936 int agg_size, total_size = 0;
3937 struct access *access, *res, **prev_acc_ptr = &res;
3938 vec<access_p> *access_vec;
3940 access_vec = get_base_access_vector (parm);
3941 if (!access_vec)
3942 return &no_accesses_representant;
3943 access_count = access_vec->length ();
3945 access_vec->qsort (compare_access_positions);
3947 i = 0;
3948 total_size = 0;
3949 group_count = 0;
3950 while (i < access_count)
3952 bool modification;
3953 tree a1_alias_type;
3954 access = (*access_vec)[i];
3955 modification = access->write;
3956 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
3957 return NULL;
3958 a1_alias_type = reference_alias_ptr_type (access->expr);
3960 /* Access is about to become group representative unless we find some
3961 nasty overlap which would preclude us from breaking this parameter
3962 apart. */
3964 j = i + 1;
3965 while (j < access_count)
3967 struct access *ac2 = (*access_vec)[j];
3968 if (ac2->offset != access->offset)
3970 /* All or nothing law for parameters. */
3971 if (access->offset + access->size > ac2->offset)
3972 return NULL;
3973 else
3974 break;
3976 else if (ac2->size != access->size)
3977 return NULL;
3979 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
3980 || (ac2->type != access->type
3981 && (TREE_ADDRESSABLE (ac2->type)
3982 || TREE_ADDRESSABLE (access->type)))
3983 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3984 return NULL;
3986 modification |= ac2->write;
3987 ac2->group_representative = access;
3988 ac2->next_sibling = access->next_sibling;
3989 access->next_sibling = ac2;
3990 j++;
3993 group_count++;
3994 access->grp_maybe_modified = modification;
3995 if (!modification)
3996 *ro_grp = true;
3997 *prev_acc_ptr = access;
3998 prev_acc_ptr = &access->next_grp;
3999 total_size += access->size;
4000 i = j;
4003 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4004 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
4005 else
4006 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
4007 if (total_size >= agg_size)
4008 return NULL;
4010 gcc_assert (group_count > 0);
4011 return res;
4014 /* Decide whether parameters with representative accesses given by REPR should
4015 be reduced into components. */
4017 static int
4018 decide_one_param_reduction (struct access *repr)
4020 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4021 bool by_ref;
4022 tree parm;
4024 parm = repr->base;
4025 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
4026 gcc_assert (cur_parm_size > 0);
4028 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4030 by_ref = true;
4031 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
4033 else
4035 by_ref = false;
4036 agg_size = cur_parm_size;
4039 if (dump_file)
4041 struct access *acc;
4042 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4043 print_generic_expr (dump_file, parm, 0);
4044 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4045 for (acc = repr; acc; acc = acc->next_grp)
4046 dump_access (dump_file, acc, true);
4049 total_size = 0;
4050 new_param_count = 0;
4052 for (; repr; repr = repr->next_grp)
4054 gcc_assert (parm == repr->base);
4056 /* Taking the address of a non-addressable field is verboten. */
4057 if (by_ref && repr->non_addressable)
4058 return 0;
4060 /* Do not decompose a non-BLKmode param in a way that would
4061 create BLKmode params. Especially for by-reference passing
4062 (thus, pointer-type param) this is hardly worthwhile. */
4063 if (DECL_MODE (parm) != BLKmode
4064 && TYPE_MODE (repr->type) == BLKmode)
4065 return 0;
4067 if (!by_ref || (!repr->grp_maybe_modified
4068 && !repr->grp_not_necessarilly_dereferenced))
4069 total_size += repr->size;
4070 else
4071 total_size += cur_parm_size;
4073 new_param_count++;
4076 gcc_assert (new_param_count > 0);
4078 if (optimize_function_for_size_p (cfun))
4079 parm_size_limit = cur_parm_size;
4080 else
4081 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4082 * cur_parm_size);
4084 if (total_size < agg_size
4085 && total_size <= parm_size_limit)
4087 if (dump_file)
4088 fprintf (dump_file, " ....will be split into %i components\n",
4089 new_param_count);
4090 return new_param_count;
4092 else
4093 return 0;
4096 /* The order of the following enums is important, we need to do extra work for
4097 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4098 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4099 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4101 /* Identify representatives of all accesses to all candidate parameters for
4102 IPA-SRA. Return result based on what representatives have been found. */
4104 static enum ipa_splicing_result
4105 splice_all_param_accesses (vec<access_p> &representatives)
4107 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4108 tree parm;
4109 struct access *repr;
4111 representatives.create (func_param_count);
4113 for (parm = DECL_ARGUMENTS (current_function_decl);
4114 parm;
4115 parm = DECL_CHAIN (parm))
4117 if (is_unused_scalar_param (parm))
4119 representatives.quick_push (&no_accesses_representant);
4120 if (result == NO_GOOD_ACCESS)
4121 result = UNUSED_PARAMS;
4123 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4124 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4125 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4127 repr = unmodified_by_ref_scalar_representative (parm);
4128 representatives.quick_push (repr);
4129 if (repr)
4130 result = UNMODIF_BY_REF_ACCESSES;
4132 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4134 bool ro_grp = false;
4135 repr = splice_param_accesses (parm, &ro_grp);
4136 representatives.quick_push (repr);
4138 if (repr && !no_accesses_p (repr))
4140 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4142 if (ro_grp)
4143 result = UNMODIF_BY_REF_ACCESSES;
4144 else if (result < MODIF_BY_REF_ACCESSES)
4145 result = MODIF_BY_REF_ACCESSES;
4147 else if (result < BY_VAL_ACCESSES)
4148 result = BY_VAL_ACCESSES;
4150 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4151 result = UNUSED_PARAMS;
4153 else
4154 representatives.quick_push (NULL);
4157 if (result == NO_GOOD_ACCESS)
4159 representatives.release ();
4160 return NO_GOOD_ACCESS;
4163 return result;
4166 /* Return the index of BASE in PARMS. Abort if it is not found. */
4168 static inline int
4169 get_param_index (tree base, vec<tree> parms)
4171 int i, len;
4173 len = parms.length ();
4174 for (i = 0; i < len; i++)
4175 if (parms[i] == base)
4176 return i;
4177 gcc_unreachable ();
4180 /* Convert the decisions made at the representative level into compact
4181 parameter adjustments. REPRESENTATIVES are pointers to first
4182 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4183 final number of adjustments. */
4185 static ipa_parm_adjustment_vec
4186 turn_representatives_into_adjustments (vec<access_p> representatives,
4187 int adjustments_count)
4189 vec<tree> parms;
4190 ipa_parm_adjustment_vec adjustments;
4191 tree parm;
4192 int i;
4194 gcc_assert (adjustments_count > 0);
4195 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4196 adjustments.create (adjustments_count);
4197 parm = DECL_ARGUMENTS (current_function_decl);
4198 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4200 struct access *repr = representatives[i];
4202 if (!repr || no_accesses_p (repr))
4204 struct ipa_parm_adjustment adj;
4206 memset (&adj, 0, sizeof (adj));
4207 adj.base_index = get_param_index (parm, parms);
4208 adj.base = parm;
4209 if (!repr)
4210 adj.copy_param = 1;
4211 else
4212 adj.remove_param = 1;
4213 adjustments.quick_push (adj);
4215 else
4217 struct ipa_parm_adjustment adj;
4218 int index = get_param_index (parm, parms);
4220 for (; repr; repr = repr->next_grp)
4222 memset (&adj, 0, sizeof (adj));
4223 gcc_assert (repr->base == parm);
4224 adj.base_index = index;
4225 adj.base = repr->base;
4226 adj.type = repr->type;
4227 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4228 adj.offset = repr->offset;
4229 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4230 && (repr->grp_maybe_modified
4231 || repr->grp_not_necessarilly_dereferenced));
4232 adjustments.quick_push (adj);
4236 parms.release ();
4237 return adjustments;
4240 /* Analyze the collected accesses and produce a plan what to do with the
4241 parameters in the form of adjustments, NULL meaning nothing. */
4243 static ipa_parm_adjustment_vec
4244 analyze_all_param_acesses (void)
4246 enum ipa_splicing_result repr_state;
4247 bool proceed = false;
4248 int i, adjustments_count = 0;
4249 vec<access_p> representatives;
4250 ipa_parm_adjustment_vec adjustments;
4252 repr_state = splice_all_param_accesses (representatives);
4253 if (repr_state == NO_GOOD_ACCESS)
4254 return ipa_parm_adjustment_vec();
4256 /* If there are any parameters passed by reference which are not modified
4257 directly, we need to check whether they can be modified indirectly. */
4258 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4260 analyze_caller_dereference_legality (representatives);
4261 analyze_modified_params (representatives);
4264 for (i = 0; i < func_param_count; i++)
4266 struct access *repr = representatives[i];
4268 if (repr && !no_accesses_p (repr))
4270 if (repr->grp_scalar_ptr)
4272 adjustments_count++;
4273 if (repr->grp_not_necessarilly_dereferenced
4274 || repr->grp_maybe_modified)
4275 representatives[i] = NULL;
4276 else
4278 proceed = true;
4279 sra_stats.scalar_by_ref_to_by_val++;
4282 else
4284 int new_components = decide_one_param_reduction (repr);
4286 if (new_components == 0)
4288 representatives[i] = NULL;
4289 adjustments_count++;
4291 else
4293 adjustments_count += new_components;
4294 sra_stats.aggregate_params_reduced++;
4295 sra_stats.param_reductions_created += new_components;
4296 proceed = true;
4300 else
4302 if (no_accesses_p (repr))
4304 proceed = true;
4305 sra_stats.deleted_unused_parameters++;
4307 adjustments_count++;
4311 if (!proceed && dump_file)
4312 fprintf (dump_file, "NOT proceeding to change params.\n");
4314 if (proceed)
4315 adjustments = turn_representatives_into_adjustments (representatives,
4316 adjustments_count);
4317 else
4318 adjustments = ipa_parm_adjustment_vec();
4320 representatives.release ();
4321 return adjustments;
4324 /* If a parameter replacement identified by ADJ does not yet exist in the form
4325 of declaration, create it and record it, otherwise return the previously
4326 created one. */
4328 static tree
4329 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4331 tree repl;
4332 if (!adj->new_ssa_base)
4334 char *pretty_name = make_fancy_name (adj->base);
4336 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4337 DECL_NAME (repl) = get_identifier (pretty_name);
4338 obstack_free (&name_obstack, pretty_name);
4340 adj->new_ssa_base = repl;
4342 else
4343 repl = adj->new_ssa_base;
4344 return repl;
4347 /* Find the first adjustment for a particular parameter BASE in a vector of
4348 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4349 adjustment. */
4351 static struct ipa_parm_adjustment *
4352 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4354 int i, len;
4356 len = adjustments.length ();
4357 for (i = 0; i < len; i++)
4359 struct ipa_parm_adjustment *adj;
4361 adj = &adjustments[i];
4362 if (!adj->copy_param && adj->base == base)
4363 return adj;
4366 return NULL;
4369 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4370 removed because its value is not used, replace the SSA_NAME with a one
4371 relating to a created VAR_DECL together all of its uses and return true.
4372 ADJUSTMENTS is a pointer to an adjustments vector. */
4374 static bool
4375 replace_removed_params_ssa_names (gimple stmt,
4376 ipa_parm_adjustment_vec adjustments)
4378 struct ipa_parm_adjustment *adj;
4379 tree lhs, decl, repl, name;
4381 if (gimple_code (stmt) == GIMPLE_PHI)
4382 lhs = gimple_phi_result (stmt);
4383 else if (is_gimple_assign (stmt))
4384 lhs = gimple_assign_lhs (stmt);
4385 else if (is_gimple_call (stmt))
4386 lhs = gimple_call_lhs (stmt);
4387 else
4388 gcc_unreachable ();
4390 if (TREE_CODE (lhs) != SSA_NAME)
4391 return false;
4393 decl = SSA_NAME_VAR (lhs);
4394 if (decl == NULL_TREE
4395 || TREE_CODE (decl) != PARM_DECL)
4396 return false;
4398 adj = get_adjustment_for_base (adjustments, decl);
4399 if (!adj)
4400 return false;
4402 repl = get_replaced_param_substitute (adj);
4403 name = make_ssa_name (repl, stmt);
4405 if (dump_file)
4407 fprintf (dump_file, "replacing an SSA name of a removed param ");
4408 print_generic_expr (dump_file, lhs, 0);
4409 fprintf (dump_file, " with ");
4410 print_generic_expr (dump_file, name, 0);
4411 fprintf (dump_file, "\n");
4414 if (is_gimple_assign (stmt))
4415 gimple_assign_set_lhs (stmt, name);
4416 else if (is_gimple_call (stmt))
4417 gimple_call_set_lhs (stmt, name);
4418 else
4419 gimple_phi_set_result (stmt, name);
4421 replace_uses_by (lhs, name);
4422 release_ssa_name (lhs);
4423 return true;
4426 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4427 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4428 specifies whether the function should care about type incompatibility the
4429 current and new expressions. If it is false, the function will leave
4430 incompatibility issues to the caller. Return true iff the expression
4431 was modified. */
4433 static bool
4434 sra_ipa_modify_expr (tree *expr, bool convert,
4435 ipa_parm_adjustment_vec adjustments)
4437 int i, len;
4438 struct ipa_parm_adjustment *adj, *cand = NULL;
4439 HOST_WIDE_INT offset, size, max_size;
4440 tree base, src;
4442 len = adjustments.length ();
4444 if (TREE_CODE (*expr) == BIT_FIELD_REF
4445 || TREE_CODE (*expr) == IMAGPART_EXPR
4446 || TREE_CODE (*expr) == REALPART_EXPR)
4448 expr = &TREE_OPERAND (*expr, 0);
4449 convert = true;
4452 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4453 if (!base || size == -1 || max_size == -1)
4454 return false;
4456 if (TREE_CODE (base) == MEM_REF)
4458 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4459 base = TREE_OPERAND (base, 0);
4462 base = get_ssa_base_param (base);
4463 if (!base || TREE_CODE (base) != PARM_DECL)
4464 return false;
4466 for (i = 0; i < len; i++)
4468 adj = &adjustments[i];
4470 if (adj->base == base &&
4471 (adj->offset == offset || adj->remove_param))
4473 cand = adj;
4474 break;
4477 if (!cand || cand->copy_param || cand->remove_param)
4478 return false;
4480 if (cand->by_ref)
4481 src = build_simple_mem_ref (cand->reduction);
4482 else
4483 src = cand->reduction;
4485 if (dump_file && (dump_flags & TDF_DETAILS))
4487 fprintf (dump_file, "About to replace expr ");
4488 print_generic_expr (dump_file, *expr, 0);
4489 fprintf (dump_file, " with ");
4490 print_generic_expr (dump_file, src, 0);
4491 fprintf (dump_file, "\n");
4494 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4496 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4497 *expr = vce;
4499 else
4500 *expr = src;
4501 return true;
4504 /* If the statement pointed to by STMT_PTR contains any expressions that need
4505 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4506 potential type incompatibilities (GSI is used to accommodate conversion
4507 statements and must point to the statement). Return true iff the statement
4508 was modified. */
4510 static bool
4511 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4512 ipa_parm_adjustment_vec adjustments)
4514 gimple stmt = *stmt_ptr;
4515 tree *lhs_p, *rhs_p;
4516 bool any;
4518 if (!gimple_assign_single_p (stmt))
4519 return false;
4521 rhs_p = gimple_assign_rhs1_ptr (stmt);
4522 lhs_p = gimple_assign_lhs_ptr (stmt);
4524 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4525 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4526 if (any)
4528 tree new_rhs = NULL_TREE;
4530 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4532 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4534 /* V_C_Es of constructors can cause trouble (PR 42714). */
4535 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4536 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4537 else
4538 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4539 NULL);
4541 else
4542 new_rhs = fold_build1_loc (gimple_location (stmt),
4543 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4544 *rhs_p);
4546 else if (REFERENCE_CLASS_P (*rhs_p)
4547 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4548 && !is_gimple_reg (*lhs_p))
4549 /* This can happen when an assignment in between two single field
4550 structures is turned into an assignment in between two pointers to
4551 scalars (PR 42237). */
4552 new_rhs = *rhs_p;
4554 if (new_rhs)
4556 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4557 true, GSI_SAME_STMT);
4559 gimple_assign_set_rhs_from_tree (gsi, tmp);
4562 return true;
4565 return false;
4568 /* Traverse the function body and all modifications as described in
4569 ADJUSTMENTS. Return true iff the CFG has been changed. */
4571 static bool
4572 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4574 bool cfg_changed = false;
4575 basic_block bb;
4577 FOR_EACH_BB (bb)
4579 gimple_stmt_iterator gsi;
4581 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4582 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4584 gsi = gsi_start_bb (bb);
4585 while (!gsi_end_p (gsi))
4587 gimple stmt = gsi_stmt (gsi);
4588 bool modified = false;
4589 tree *t;
4590 unsigned i;
4592 switch (gimple_code (stmt))
4594 case GIMPLE_RETURN:
4595 t = gimple_return_retval_ptr (stmt);
4596 if (*t != NULL_TREE)
4597 modified |= sra_ipa_modify_expr (t, true, adjustments);
4598 break;
4600 case GIMPLE_ASSIGN:
4601 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4602 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4603 break;
4605 case GIMPLE_CALL:
4606 /* Operands must be processed before the lhs. */
4607 for (i = 0; i < gimple_call_num_args (stmt); i++)
4609 t = gimple_call_arg_ptr (stmt, i);
4610 modified |= sra_ipa_modify_expr (t, true, adjustments);
4613 if (gimple_call_lhs (stmt))
4615 t = gimple_call_lhs_ptr (stmt);
4616 modified |= sra_ipa_modify_expr (t, false, adjustments);
4617 modified |= replace_removed_params_ssa_names (stmt,
4618 adjustments);
4620 break;
4622 case GIMPLE_ASM:
4623 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4625 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4626 modified |= sra_ipa_modify_expr (t, true, adjustments);
4628 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4630 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4631 modified |= sra_ipa_modify_expr (t, false, adjustments);
4633 break;
4635 default:
4636 break;
4639 if (modified)
4641 update_stmt (stmt);
4642 if (maybe_clean_eh_stmt (stmt)
4643 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4644 cfg_changed = true;
4646 gsi_next (&gsi);
4650 return cfg_changed;
4653 /* Call gimple_debug_bind_reset_value on all debug statements describing
4654 gimple register parameters that are being removed or replaced. */
4656 static void
4657 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4659 int i, len;
4660 gimple_stmt_iterator *gsip = NULL, gsi;
4662 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4664 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4665 gsip = &gsi;
4667 len = adjustments.length ();
4668 for (i = 0; i < len; i++)
4670 struct ipa_parm_adjustment *adj;
4671 imm_use_iterator ui;
4672 gimple stmt, def_temp;
4673 tree name, vexpr, copy = NULL_TREE;
4674 use_operand_p use_p;
4676 adj = &adjustments[i];
4677 if (adj->copy_param || !is_gimple_reg (adj->base))
4678 continue;
4679 name = ssa_default_def (cfun, adj->base);
4680 vexpr = NULL;
4681 if (name)
4682 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4684 /* All other users must have been removed by
4685 ipa_sra_modify_function_body. */
4686 gcc_assert (is_gimple_debug (stmt));
4687 if (vexpr == NULL && gsip != NULL)
4689 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4690 vexpr = make_node (DEBUG_EXPR_DECL);
4691 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4692 NULL);
4693 DECL_ARTIFICIAL (vexpr) = 1;
4694 TREE_TYPE (vexpr) = TREE_TYPE (name);
4695 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4696 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4698 if (vexpr)
4700 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4701 SET_USE (use_p, vexpr);
4703 else
4704 gimple_debug_bind_reset_value (stmt);
4705 update_stmt (stmt);
4707 /* Create a VAR_DECL for debug info purposes. */
4708 if (!DECL_IGNORED_P (adj->base))
4710 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4711 VAR_DECL, DECL_NAME (adj->base),
4712 TREE_TYPE (adj->base));
4713 if (DECL_PT_UID_SET_P (adj->base))
4714 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4715 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4716 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4717 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4718 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4719 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4720 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4721 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4722 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4723 SET_DECL_RTL (copy, 0);
4724 TREE_USED (copy) = 1;
4725 DECL_CONTEXT (copy) = current_function_decl;
4726 add_local_decl (cfun, copy);
4727 DECL_CHAIN (copy) =
4728 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4729 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4731 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4733 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4734 if (vexpr)
4735 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4736 else
4737 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4738 NULL);
4739 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4744 /* Return false iff all callers have at least as many actual arguments as there
4745 are formal parameters in the current function. */
4747 static bool
4748 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4749 void *data ATTRIBUTE_UNUSED)
4751 struct cgraph_edge *cs;
4752 for (cs = node->callers; cs; cs = cs->next_caller)
4753 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4754 return true;
4756 return false;
4759 /* Convert all callers of NODE. */
4761 static bool
4762 convert_callers_for_node (struct cgraph_node *node,
4763 void *data)
4765 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4766 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4767 struct cgraph_edge *cs;
4769 for (cs = node->callers; cs; cs = cs->next_caller)
4771 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl));
4773 if (dump_file)
4774 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4775 cs->caller->uid, cs->callee->uid,
4776 xstrdup (cgraph_node_name (cs->caller)),
4777 xstrdup (cgraph_node_name (cs->callee)));
4779 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4781 pop_cfun ();
4784 for (cs = node->callers; cs; cs = cs->next_caller)
4785 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4786 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl)))
4787 compute_inline_parameters (cs->caller, true);
4788 BITMAP_FREE (recomputed_callers);
4790 return true;
4793 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4795 static void
4796 convert_callers (struct cgraph_node *node, tree old_decl,
4797 ipa_parm_adjustment_vec adjustments)
4799 basic_block this_block;
4801 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4802 &adjustments, false);
4804 if (!encountered_recursive_call)
4805 return;
4807 FOR_EACH_BB (this_block)
4809 gimple_stmt_iterator gsi;
4811 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4813 gimple stmt = gsi_stmt (gsi);
4814 tree call_fndecl;
4815 if (gimple_code (stmt) != GIMPLE_CALL)
4816 continue;
4817 call_fndecl = gimple_call_fndecl (stmt);
4818 if (call_fndecl == old_decl)
4820 if (dump_file)
4821 fprintf (dump_file, "Adjusting recursive call");
4822 gimple_call_set_fndecl (stmt, node->symbol.decl);
4823 ipa_modify_call_arguments (NULL, stmt, adjustments);
4828 return;
4831 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4832 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4834 static bool
4835 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4837 struct cgraph_node *new_node;
4838 bool cfg_changed;
4839 vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
4841 rebuild_cgraph_edges ();
4842 free_dominance_info (CDI_DOMINATORS);
4843 pop_cfun ();
4845 new_node = cgraph_function_versioning (node, redirect_callers,
4846 NULL,
4847 NULL, false, NULL, NULL, "isra");
4848 redirect_callers.release ();
4850 push_cfun (DECL_STRUCT_FUNCTION (new_node->symbol.decl));
4851 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4852 cfg_changed = ipa_sra_modify_function_body (adjustments);
4853 sra_ipa_reset_debug_stmts (adjustments);
4854 convert_callers (new_node, node->symbol.decl, adjustments);
4855 cgraph_make_node_local (new_node);
4856 return cfg_changed;
4859 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4860 attributes, return true otherwise. NODE is the cgraph node of the current
4861 function. */
4863 static bool
4864 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4866 if (!cgraph_node_can_be_local_p (node))
4868 if (dump_file)
4869 fprintf (dump_file, "Function not local to this compilation unit.\n");
4870 return false;
4873 if (!node->local.can_change_signature)
4875 if (dump_file)
4876 fprintf (dump_file, "Function can not change signature.\n");
4877 return false;
4880 if (!tree_versionable_function_p (node->symbol.decl))
4882 if (dump_file)
4883 fprintf (dump_file, "Function is not versionable.\n");
4884 return false;
4887 if (DECL_VIRTUAL_P (current_function_decl))
4889 if (dump_file)
4890 fprintf (dump_file, "Function is a virtual method.\n");
4891 return false;
4894 if ((DECL_COMDAT (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
4895 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4897 if (dump_file)
4898 fprintf (dump_file, "Function too big to be made truly local.\n");
4899 return false;
4902 if (!node->callers)
4904 if (dump_file)
4905 fprintf (dump_file,
4906 "Function has no callers in this compilation unit.\n");
4907 return false;
4910 if (cfun->stdarg)
4912 if (dump_file)
4913 fprintf (dump_file, "Function uses stdarg. \n");
4914 return false;
4917 if (TYPE_ATTRIBUTES (TREE_TYPE (node->symbol.decl)))
4918 return false;
4920 return true;
4923 /* Perform early interprocedural SRA. */
4925 static unsigned int
4926 ipa_early_sra (void)
4928 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4929 ipa_parm_adjustment_vec adjustments;
4930 int ret = 0;
4932 if (!ipa_sra_preliminary_function_checks (node))
4933 return 0;
4935 sra_initialize ();
4936 sra_mode = SRA_MODE_EARLY_IPA;
4938 if (!find_param_candidates ())
4940 if (dump_file)
4941 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4942 goto simple_out;
4945 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4946 NULL, true))
4948 if (dump_file)
4949 fprintf (dump_file, "There are callers with insufficient number of "
4950 "arguments.\n");
4951 goto simple_out;
4954 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4955 func_param_count
4956 * last_basic_block_for_function (cfun));
4957 final_bbs = BITMAP_ALLOC (NULL);
4959 scan_function ();
4960 if (encountered_apply_args)
4962 if (dump_file)
4963 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4964 goto out;
4967 if (encountered_unchangable_recursive_call)
4969 if (dump_file)
4970 fprintf (dump_file, "Function calls itself with insufficient "
4971 "number of arguments.\n");
4972 goto out;
4975 adjustments = analyze_all_param_acesses ();
4976 if (!adjustments.exists ())
4977 goto out;
4978 if (dump_file)
4979 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4981 if (modify_function (node, adjustments))
4982 ret = TODO_update_ssa | TODO_cleanup_cfg;
4983 else
4984 ret = TODO_update_ssa;
4985 adjustments.release ();
4987 statistics_counter_event (cfun, "Unused parameters deleted",
4988 sra_stats.deleted_unused_parameters);
4989 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4990 sra_stats.scalar_by_ref_to_by_val);
4991 statistics_counter_event (cfun, "Aggregate parameters broken up",
4992 sra_stats.aggregate_params_reduced);
4993 statistics_counter_event (cfun, "Aggregate parameter components created",
4994 sra_stats.param_reductions_created);
4996 out:
4997 BITMAP_FREE (final_bbs);
4998 free (bb_dereferences);
4999 simple_out:
5000 sra_deinitialize ();
5001 return ret;
5004 /* Return if early ipa sra shall be performed. */
5005 static bool
5006 ipa_early_sra_gate (void)
5008 return flag_ipa_sra && dbg_cnt (eipa_sra);
5011 struct gimple_opt_pass pass_early_ipa_sra =
5014 GIMPLE_PASS,
5015 "eipa_sra", /* name */
5016 OPTGROUP_NONE, /* optinfo_flags */
5017 ipa_early_sra_gate, /* gate */
5018 ipa_early_sra, /* execute */
5019 NULL, /* sub */
5020 NULL, /* next */
5021 0, /* static_pass_number */
5022 TV_IPA_SRA, /* tv_id */
5023 0, /* properties_required */
5024 0, /* properties_provided */
5025 0, /* properties_destroyed */
5026 0, /* todo_flags_start */
5027 TODO_dump_symtab /* todo_flags_finish */