Mark ChangeLog
[official-gcc.git] / gcc / tree-sra.c
blob644861d7f211ca2f955fa3dc6f47fe5c8dae5989
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2013 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "tree-pass.h"
84 #include "ipa-prop.h"
85 #include "statistics.h"
86 #include "params.h"
87 #include "target.h"
88 #include "flags.h"
89 #include "dbgcnt.h"
90 #include "tree-inline.h"
91 #include "gimple-pretty-print.h"
92 #include "ipa-inline.h"
94 /* Enumeration of all aggregate reductions we can do. */
95 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
96 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
97 SRA_MODE_INTRA }; /* late intraprocedural SRA */
99 /* Global variable describing which aggregate reduction we are performing at
100 the moment. */
101 static enum sra_mode sra_mode;
103 struct assign_link;
105 /* ACCESS represents each access to an aggregate variable (as a whole or a
106 part). It can also represent a group of accesses that refer to exactly the
107 same fragment of an aggregate (i.e. those that have exactly the same offset
108 and size). Such representatives for a single aggregate, once determined,
109 are linked in a linked list and have the group fields set.
111 Moreover, when doing intraprocedural SRA, a tree is built from those
112 representatives (by the means of first_child and next_sibling pointers), in
113 which all items in a subtree are "within" the root, i.e. their offset is
114 greater or equal to offset of the root and offset+size is smaller or equal
115 to offset+size of the root. Children of an access are sorted by offset.
117 Note that accesses to parts of vector and complex number types always
118 represented by an access to the whole complex number or a vector. It is a
119 duty of the modifying functions to replace them appropriately. */
121 struct access
123 /* Values returned by `get_ref_base_and_extent' for each component reference
124 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
125 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
126 HOST_WIDE_INT offset;
127 HOST_WIDE_INT size;
128 tree base;
130 /* Expression. It is context dependent so do not use it to create new
131 expressions to access the original aggregate. See PR 42154 for a
132 testcase. */
133 tree expr;
134 /* Type. */
135 tree type;
137 /* The statement this access belongs to. */
138 gimple stmt;
140 /* Next group representative for this aggregate. */
141 struct access *next_grp;
143 /* Pointer to the group representative. Pointer to itself if the struct is
144 the representative. */
145 struct access *group_representative;
147 /* If this access has any children (in terms of the definition above), this
148 points to the first one. */
149 struct access *first_child;
151 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
152 described above. In IPA-SRA this is a pointer to the next access
153 belonging to the same group (having the same representative). */
154 struct access *next_sibling;
156 /* Pointers to the first and last element in the linked list of assign
157 links. */
158 struct assign_link *first_link, *last_link;
160 /* Pointer to the next access in the work queue. */
161 struct access *next_queued;
163 /* Replacement variable for this access "region." Never to be accessed
164 directly, always only by the means of get_access_replacement() and only
165 when grp_to_be_replaced flag is set. */
166 tree replacement_decl;
168 /* Is this particular access write access? */
169 unsigned write : 1;
171 /* Is this access an access to a non-addressable field? */
172 unsigned non_addressable : 1;
174 /* Is this access currently in the work queue? */
175 unsigned grp_queued : 1;
177 /* Does this group contain a write access? This flag is propagated down the
178 access tree. */
179 unsigned grp_write : 1;
181 /* Does this group contain a read access? This flag is propagated down the
182 access tree. */
183 unsigned grp_read : 1;
185 /* Does this group contain a read access that comes from an assignment
186 statement? This flag is propagated down the access tree. */
187 unsigned grp_assignment_read : 1;
189 /* Does this group contain a write access that comes from an assignment
190 statement? This flag is propagated down the access tree. */
191 unsigned grp_assignment_write : 1;
193 /* Does this group contain a read access through a scalar type? This flag is
194 not propagated in the access tree in any direction. */
195 unsigned grp_scalar_read : 1;
197 /* Does this group contain a write access through a scalar type? This flag
198 is not propagated in the access tree in any direction. */
199 unsigned grp_scalar_write : 1;
201 /* Is this access an artificial one created to scalarize some record
202 entirely? */
203 unsigned grp_total_scalarization : 1;
205 /* Other passes of the analysis use this bit to make function
206 analyze_access_subtree create scalar replacements for this group if
207 possible. */
208 unsigned grp_hint : 1;
210 /* Is the subtree rooted in this access fully covered by scalar
211 replacements? */
212 unsigned grp_covered : 1;
214 /* If set to true, this access and all below it in an access tree must not be
215 scalarized. */
216 unsigned grp_unscalarizable_region : 1;
218 /* Whether data have been written to parts of the aggregate covered by this
219 access which is not to be scalarized. This flag is propagated up in the
220 access tree. */
221 unsigned grp_unscalarized_data : 1;
223 /* Does this access and/or group contain a write access through a
224 BIT_FIELD_REF? */
225 unsigned grp_partial_lhs : 1;
227 /* Set when a scalar replacement should be created for this variable. */
228 unsigned grp_to_be_replaced : 1;
230 /* Set when we want a replacement for the sole purpose of having it in
231 generated debug statements. */
232 unsigned grp_to_be_debug_replaced : 1;
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning : 1;
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified : 1;
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr : 1;
246 /* Set when we discover that this pointer is not safe to dereference in the
247 caller. */
248 unsigned grp_not_necessarilly_dereferenced : 1;
251 typedef struct access *access_p;
254 /* Alloc pool for allocating access structures. */
255 static alloc_pool access_pool;
257 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
258 are used to propagate subaccesses from rhs to lhs as long as they don't
259 conflict with what is already there. */
260 struct assign_link
262 struct access *lacc, *racc;
263 struct assign_link *next;
266 /* Alloc pool for allocating assign link structures. */
267 static alloc_pool link_pool;
269 /* Base (tree) -> Vector (vec<access_p> *) map. */
270 static struct pointer_map_t *base_access_vec;
272 /* Set of candidates. */
273 static bitmap candidate_bitmap;
274 static htab_t candidates;
276 /* For a candidate UID return the candidates decl. */
278 static inline tree
279 candidate (unsigned uid)
281 struct tree_decl_minimal t;
282 t.uid = uid;
283 return (tree) htab_find_with_hash (candidates, &t, uid);
286 /* Bitmap of candidates which we should try to entirely scalarize away and
287 those which cannot be (because they are and need be used as a whole). */
288 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
290 /* Obstack for creation of fancy names. */
291 static struct obstack name_obstack;
293 /* Head of a linked list of accesses that need to have its subaccesses
294 propagated to their assignment counterparts. */
295 static struct access *work_queue_head;
297 /* Number of parameters of the analyzed function when doing early ipa SRA. */
298 static int func_param_count;
300 /* scan_function sets the following to true if it encounters a call to
301 __builtin_apply_args. */
302 static bool encountered_apply_args;
304 /* Set by scan_function when it finds a recursive call. */
305 static bool encountered_recursive_call;
307 /* Set by scan_function when it finds a recursive call with less actual
308 arguments than formal parameters.. */
309 static bool encountered_unchangable_recursive_call;
311 /* This is a table in which for each basic block and parameter there is a
312 distance (offset + size) in that parameter which is dereferenced and
313 accessed in that BB. */
314 static HOST_WIDE_INT *bb_dereferences;
315 /* Bitmap of BBs that can cause the function to "stop" progressing by
316 returning, throwing externally, looping infinitely or calling a function
317 which might abort etc.. */
318 static bitmap final_bbs;
320 /* Representative of no accesses at all. */
321 static struct access no_accesses_representant;
323 /* Predicate to test the special value. */
325 static inline bool
326 no_accesses_p (struct access *access)
328 return access == &no_accesses_representant;
331 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
332 representative fields are dumped, otherwise those which only describe the
333 individual access are. */
335 static struct
337 /* Number of processed aggregates is readily available in
338 analyze_all_variable_accesses and so is not stored here. */
340 /* Number of created scalar replacements. */
341 int replacements;
343 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
344 expression. */
345 int exprs;
347 /* Number of statements created by generate_subtree_copies. */
348 int subtree_copies;
350 /* Number of statements created by load_assign_lhs_subreplacements. */
351 int subreplacements;
353 /* Number of times sra_modify_assign has deleted a statement. */
354 int deleted;
356 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
357 RHS reparately due to type conversions or nonexistent matching
358 references. */
359 int separate_lhs_rhs_handling;
361 /* Number of parameters that were removed because they were unused. */
362 int deleted_unused_parameters;
364 /* Number of scalars passed as parameters by reference that have been
365 converted to be passed by value. */
366 int scalar_by_ref_to_by_val;
368 /* Number of aggregate parameters that were replaced by one or more of their
369 components. */
370 int aggregate_params_reduced;
372 /* Numbber of components created when splitting aggregate parameters. */
373 int param_reductions_created;
374 } sra_stats;
376 static void
377 dump_access (FILE *f, struct access *access, bool grp)
379 fprintf (f, "access { ");
380 fprintf (f, "base = (%d)'", DECL_UID (access->base));
381 print_generic_expr (f, access->base, 0);
382 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
383 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
384 fprintf (f, ", expr = ");
385 print_generic_expr (f, access->expr, 0);
386 fprintf (f, ", type = ");
387 print_generic_expr (f, access->type, 0);
388 if (grp)
389 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
390 "grp_assignment_write = %d, grp_scalar_read = %d, "
391 "grp_scalar_write = %d, grp_total_scalarization = %d, "
392 "grp_hint = %d, grp_covered = %d, "
393 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
394 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
395 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
396 "grp_not_necessarilly_dereferenced = %d\n",
397 access->grp_read, access->grp_write, access->grp_assignment_read,
398 access->grp_assignment_write, access->grp_scalar_read,
399 access->grp_scalar_write, access->grp_total_scalarization,
400 access->grp_hint, access->grp_covered,
401 access->grp_unscalarizable_region, access->grp_unscalarized_data,
402 access->grp_partial_lhs, access->grp_to_be_replaced,
403 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
404 access->grp_not_necessarilly_dereferenced);
405 else
406 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
407 "grp_partial_lhs = %d\n",
408 access->write, access->grp_total_scalarization,
409 access->grp_partial_lhs);
412 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
414 static void
415 dump_access_tree_1 (FILE *f, struct access *access, int level)
419 int i;
421 for (i = 0; i < level; i++)
422 fputs ("* ", dump_file);
424 dump_access (f, access, true);
426 if (access->first_child)
427 dump_access_tree_1 (f, access->first_child, level + 1);
429 access = access->next_sibling;
431 while (access);
434 /* Dump all access trees for a variable, given the pointer to the first root in
435 ACCESS. */
437 static void
438 dump_access_tree (FILE *f, struct access *access)
440 for (; access; access = access->next_grp)
441 dump_access_tree_1 (f, access, 0);
444 /* Return true iff ACC is non-NULL and has subaccesses. */
446 static inline bool
447 access_has_children_p (struct access *acc)
449 return acc && acc->first_child;
452 /* Return true iff ACC is (partly) covered by at least one replacement. */
454 static bool
455 access_has_replacements_p (struct access *acc)
457 struct access *child;
458 if (acc->grp_to_be_replaced)
459 return true;
460 for (child = acc->first_child; child; child = child->next_sibling)
461 if (access_has_replacements_p (child))
462 return true;
463 return false;
466 /* Return a vector of pointers to accesses for the variable given in BASE or
467 NULL if there is none. */
469 static vec<access_p> *
470 get_base_access_vector (tree base)
472 void **slot;
474 slot = pointer_map_contains (base_access_vec, base);
475 if (!slot)
476 return NULL;
477 else
478 return *(vec<access_p> **) slot;
481 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
482 in ACCESS. Return NULL if it cannot be found. */
484 static struct access *
485 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
486 HOST_WIDE_INT size)
488 while (access && (access->offset != offset || access->size != size))
490 struct access *child = access->first_child;
492 while (child && (child->offset + child->size <= offset))
493 child = child->next_sibling;
494 access = child;
497 return access;
500 /* Return the first group representative for DECL or NULL if none exists. */
502 static struct access *
503 get_first_repr_for_decl (tree base)
505 vec<access_p> *access_vec;
507 access_vec = get_base_access_vector (base);
508 if (!access_vec)
509 return NULL;
511 return (*access_vec)[0];
514 /* Find an access representative for the variable BASE and given OFFSET and
515 SIZE. Requires that access trees have already been built. Return NULL if
516 it cannot be found. */
518 static struct access *
519 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
520 HOST_WIDE_INT size)
522 struct access *access;
524 access = get_first_repr_for_decl (base);
525 while (access && (access->offset + access->size <= offset))
526 access = access->next_grp;
527 if (!access)
528 return NULL;
530 return find_access_in_subtree (access, offset, size);
533 /* Add LINK to the linked list of assign links of RACC. */
534 static void
535 add_link_to_rhs (struct access *racc, struct assign_link *link)
537 gcc_assert (link->racc == racc);
539 if (!racc->first_link)
541 gcc_assert (!racc->last_link);
542 racc->first_link = link;
544 else
545 racc->last_link->next = link;
547 racc->last_link = link;
548 link->next = NULL;
551 /* Move all link structures in their linked list in OLD_RACC to the linked list
552 in NEW_RACC. */
553 static void
554 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
556 if (!old_racc->first_link)
558 gcc_assert (!old_racc->last_link);
559 return;
562 if (new_racc->first_link)
564 gcc_assert (!new_racc->last_link->next);
565 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
567 new_racc->last_link->next = old_racc->first_link;
568 new_racc->last_link = old_racc->last_link;
570 else
572 gcc_assert (!new_racc->last_link);
574 new_racc->first_link = old_racc->first_link;
575 new_racc->last_link = old_racc->last_link;
577 old_racc->first_link = old_racc->last_link = NULL;
580 /* Add ACCESS to the work queue (which is actually a stack). */
582 static void
583 add_access_to_work_queue (struct access *access)
585 if (!access->grp_queued)
587 gcc_assert (!access->next_queued);
588 access->next_queued = work_queue_head;
589 access->grp_queued = 1;
590 work_queue_head = access;
594 /* Pop an access from the work queue, and return it, assuming there is one. */
596 static struct access *
597 pop_access_from_work_queue (void)
599 struct access *access = work_queue_head;
601 work_queue_head = access->next_queued;
602 access->next_queued = NULL;
603 access->grp_queued = 0;
604 return access;
608 /* Allocate necessary structures. */
610 static void
611 sra_initialize (void)
613 candidate_bitmap = BITMAP_ALLOC (NULL);
614 candidates = htab_create (vec_safe_length (cfun->local_decls) / 2,
615 uid_decl_map_hash, uid_decl_map_eq, NULL);
616 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
617 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
618 gcc_obstack_init (&name_obstack);
619 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
620 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
621 base_access_vec = pointer_map_create ();
622 memset (&sra_stats, 0, sizeof (sra_stats));
623 encountered_apply_args = false;
624 encountered_recursive_call = false;
625 encountered_unchangable_recursive_call = false;
628 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
630 static bool
631 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
632 void *data ATTRIBUTE_UNUSED)
634 vec<access_p> *access_vec = (vec<access_p> *) *value;
635 vec_free (access_vec);
636 return true;
639 /* Deallocate all general structures. */
641 static void
642 sra_deinitialize (void)
644 BITMAP_FREE (candidate_bitmap);
645 htab_delete (candidates);
646 BITMAP_FREE (should_scalarize_away_bitmap);
647 BITMAP_FREE (cannot_scalarize_away_bitmap);
648 free_alloc_pool (access_pool);
649 free_alloc_pool (link_pool);
650 obstack_free (&name_obstack, NULL);
652 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
653 pointer_map_destroy (base_access_vec);
656 /* Remove DECL from candidates for SRA and write REASON to the dump file if
657 there is one. */
658 static void
659 disqualify_candidate (tree decl, const char *reason)
661 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
662 htab_clear_slot (candidates,
663 htab_find_slot_with_hash (candidates, decl,
664 DECL_UID (decl), NO_INSERT));
666 if (dump_file && (dump_flags & TDF_DETAILS))
668 fprintf (dump_file, "! Disqualifying ");
669 print_generic_expr (dump_file, decl, 0);
670 fprintf (dump_file, " - %s\n", reason);
674 /* Return true iff the type contains a field or an element which does not allow
675 scalarization. */
677 static bool
678 type_internals_preclude_sra_p (tree type, const char **msg)
680 tree fld;
681 tree et;
683 switch (TREE_CODE (type))
685 case RECORD_TYPE:
686 case UNION_TYPE:
687 case QUAL_UNION_TYPE:
688 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
689 if (TREE_CODE (fld) == FIELD_DECL)
691 tree ft = TREE_TYPE (fld);
693 if (TREE_THIS_VOLATILE (fld))
695 *msg = "volatile structure field";
696 return true;
698 if (!DECL_FIELD_OFFSET (fld))
700 *msg = "no structure field offset";
701 return true;
703 if (!DECL_SIZE (fld))
705 *msg = "zero structure field size";
706 return true;
708 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
710 *msg = "structure field offset not fixed";
711 return true;
713 if (!host_integerp (DECL_SIZE (fld), 1))
715 *msg = "structure field size not fixed";
716 return true;
718 if (!host_integerp (bit_position (fld), 0))
720 *msg = "structure field size too big";
721 return true;
723 if (AGGREGATE_TYPE_P (ft)
724 && int_bit_position (fld) % BITS_PER_UNIT != 0)
726 *msg = "structure field is bit field";
727 return true;
730 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
731 return true;
734 return false;
736 case ARRAY_TYPE:
737 et = TREE_TYPE (type);
739 if (TYPE_VOLATILE (et))
741 *msg = "element type is volatile";
742 return true;
745 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
746 return true;
748 return false;
750 default:
751 return false;
755 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
756 base variable if it is. Return T if it is not an SSA_NAME. */
758 static tree
759 get_ssa_base_param (tree t)
761 if (TREE_CODE (t) == SSA_NAME)
763 if (SSA_NAME_IS_DEFAULT_DEF (t))
764 return SSA_NAME_VAR (t);
765 else
766 return NULL_TREE;
768 return t;
771 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
772 belongs to, unless the BB has already been marked as a potentially
773 final. */
775 static void
776 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
778 basic_block bb = gimple_bb (stmt);
779 int idx, parm_index = 0;
780 tree parm;
782 if (bitmap_bit_p (final_bbs, bb->index))
783 return;
785 for (parm = DECL_ARGUMENTS (current_function_decl);
786 parm && parm != base;
787 parm = DECL_CHAIN (parm))
788 parm_index++;
790 gcc_assert (parm_index < func_param_count);
792 idx = bb->index * func_param_count + parm_index;
793 if (bb_dereferences[idx] < dist)
794 bb_dereferences[idx] = dist;
797 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
798 the three fields. Also add it to the vector of accesses corresponding to
799 the base. Finally, return the new access. */
801 static struct access *
802 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
804 vec<access_p> *v;
805 struct access *access;
806 void **slot;
808 access = (struct access *) pool_alloc (access_pool);
809 memset (access, 0, sizeof (struct access));
810 access->base = base;
811 access->offset = offset;
812 access->size = size;
814 slot = pointer_map_contains (base_access_vec, base);
815 if (slot)
816 v = (vec<access_p> *) *slot;
817 else
818 vec_alloc (v, 32);
820 v->safe_push (access);
822 *((vec<access_p> **)
823 pointer_map_insert (base_access_vec, base)) = v;
825 return access;
828 /* Create and insert access for EXPR. Return created access, or NULL if it is
829 not possible. */
831 static struct access *
832 create_access (tree expr, gimple stmt, bool write)
834 struct access *access;
835 HOST_WIDE_INT offset, size, max_size;
836 tree base = expr;
837 bool ptr, unscalarizable_region = false;
839 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
841 if (sra_mode == SRA_MODE_EARLY_IPA
842 && TREE_CODE (base) == MEM_REF)
844 base = get_ssa_base_param (TREE_OPERAND (base, 0));
845 if (!base)
846 return NULL;
847 ptr = true;
849 else
850 ptr = false;
852 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
853 return NULL;
855 if (sra_mode == SRA_MODE_EARLY_IPA)
857 if (size < 0 || size != max_size)
859 disqualify_candidate (base, "Encountered a variable sized access.");
860 return NULL;
862 if (TREE_CODE (expr) == COMPONENT_REF
863 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
865 disqualify_candidate (base, "Encountered a bit-field access.");
866 return NULL;
868 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
870 if (ptr)
871 mark_parm_dereference (base, offset + size, stmt);
873 else
875 if (size != max_size)
877 size = max_size;
878 unscalarizable_region = true;
880 if (size < 0)
882 disqualify_candidate (base, "Encountered an unconstrained access.");
883 return NULL;
887 access = create_access_1 (base, offset, size);
888 access->expr = expr;
889 access->type = TREE_TYPE (expr);
890 access->write = write;
891 access->grp_unscalarizable_region = unscalarizable_region;
892 access->stmt = stmt;
894 if (TREE_CODE (expr) == COMPONENT_REF
895 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
896 access->non_addressable = 1;
898 return access;
902 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
903 register types or (recursively) records with only these two kinds of fields.
904 It also returns false if any of these records contains a bit-field. */
906 static bool
907 type_consists_of_records_p (tree type)
909 tree fld;
911 if (TREE_CODE (type) != RECORD_TYPE)
912 return false;
914 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
915 if (TREE_CODE (fld) == FIELD_DECL)
917 tree ft = TREE_TYPE (fld);
919 if (DECL_BIT_FIELD (fld))
920 return false;
922 if (!is_gimple_reg_type (ft)
923 && !type_consists_of_records_p (ft))
924 return false;
927 return true;
930 /* Create total_scalarization accesses for all scalar type fields in DECL that
931 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
932 must be the top-most VAR_DECL representing the variable, OFFSET must be the
933 offset of DECL within BASE. REF must be the memory reference expression for
934 the given decl. */
936 static void
937 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
938 tree ref)
940 tree fld, decl_type = TREE_TYPE (decl);
942 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
943 if (TREE_CODE (fld) == FIELD_DECL)
945 HOST_WIDE_INT pos = offset + int_bit_position (fld);
946 tree ft = TREE_TYPE (fld);
947 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
948 NULL_TREE);
950 if (is_gimple_reg_type (ft))
952 struct access *access;
953 HOST_WIDE_INT size;
955 size = tree_low_cst (DECL_SIZE (fld), 1);
956 access = create_access_1 (base, pos, size);
957 access->expr = nref;
958 access->type = ft;
959 access->grp_total_scalarization = 1;
960 /* Accesses for intraprocedural SRA can have their stmt NULL. */
962 else
963 completely_scalarize_record (base, fld, pos, nref);
967 /* Create total_scalarization accesses for all scalar type fields in VAR and
968 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
969 type_consists_of_records_p. */
971 static void
972 completely_scalarize_var (tree var)
974 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
975 struct access *access;
977 access = create_access_1 (var, 0, size);
978 access->expr = var;
979 access->type = TREE_TYPE (var);
980 access->grp_total_scalarization = 1;
982 completely_scalarize_record (var, var, 0, var);
985 /* Search the given tree for a declaration by skipping handled components and
986 exclude it from the candidates. */
988 static void
989 disqualify_base_of_expr (tree t, const char *reason)
991 t = get_base_address (t);
992 if (sra_mode == SRA_MODE_EARLY_IPA
993 && TREE_CODE (t) == MEM_REF)
994 t = get_ssa_base_param (TREE_OPERAND (t, 0));
996 if (t && DECL_P (t))
997 disqualify_candidate (t, reason);
1000 /* Scan expression EXPR and create access structures for all accesses to
1001 candidates for scalarization. Return the created access or NULL if none is
1002 created. */
1004 static struct access *
1005 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1007 struct access *ret = NULL;
1008 bool partial_ref;
1010 if (TREE_CODE (expr) == BIT_FIELD_REF
1011 || TREE_CODE (expr) == IMAGPART_EXPR
1012 || TREE_CODE (expr) == REALPART_EXPR)
1014 expr = TREE_OPERAND (expr, 0);
1015 partial_ref = true;
1017 else
1018 partial_ref = false;
1020 /* We need to dive through V_C_Es in order to get the size of its parameter
1021 and not the result type. Ada produces such statements. We are also
1022 capable of handling the topmost V_C_E but not any of those buried in other
1023 handled components. */
1024 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1025 expr = TREE_OPERAND (expr, 0);
1027 if (contains_view_convert_expr_p (expr))
1029 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1030 "component.");
1031 return NULL;
1033 if (TREE_THIS_VOLATILE (expr))
1035 disqualify_base_of_expr (expr, "part of a volatile reference.");
1036 return NULL;
1039 switch (TREE_CODE (expr))
1041 case MEM_REF:
1042 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1043 && sra_mode != SRA_MODE_EARLY_IPA)
1044 return NULL;
1045 /* fall through */
1046 case VAR_DECL:
1047 case PARM_DECL:
1048 case RESULT_DECL:
1049 case COMPONENT_REF:
1050 case ARRAY_REF:
1051 case ARRAY_RANGE_REF:
1052 ret = create_access (expr, stmt, write);
1053 break;
1055 default:
1056 break;
1059 if (write && partial_ref && ret)
1060 ret->grp_partial_lhs = 1;
1062 return ret;
1065 /* Scan expression EXPR and create access structures for all accesses to
1066 candidates for scalarization. Return true if any access has been inserted.
1067 STMT must be the statement from which the expression is taken, WRITE must be
1068 true if the expression is a store and false otherwise. */
1070 static bool
1071 build_access_from_expr (tree expr, gimple stmt, bool write)
1073 struct access *access;
1075 access = build_access_from_expr_1 (expr, stmt, write);
1076 if (access)
1078 /* This means the aggregate is accesses as a whole in a way other than an
1079 assign statement and thus cannot be removed even if we had a scalar
1080 replacement for everything. */
1081 if (cannot_scalarize_away_bitmap)
1082 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1083 return true;
1085 return false;
1088 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1089 modes in which it matters, return true iff they have been disqualified. RHS
1090 may be NULL, in that case ignore it. If we scalarize an aggregate in
1091 intra-SRA we may need to add statements after each statement. This is not
1092 possible if a statement unconditionally has to end the basic block. */
1093 static bool
1094 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1096 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1097 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1099 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1100 if (rhs)
1101 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1102 return true;
1104 return false;
1107 /* Scan expressions occurring in STMT, create access structures for all accesses
1108 to candidates for scalarization and remove those candidates which occur in
1109 statements or expressions that prevent them from being split apart. Return
1110 true if any access has been inserted. */
1112 static bool
1113 build_accesses_from_assign (gimple stmt)
1115 tree lhs, rhs;
1116 struct access *lacc, *racc;
1118 if (!gimple_assign_single_p (stmt)
1119 /* Scope clobbers don't influence scalarization. */
1120 || gimple_clobber_p (stmt))
1121 return false;
1123 lhs = gimple_assign_lhs (stmt);
1124 rhs = gimple_assign_rhs1 (stmt);
1126 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1127 return false;
1129 racc = build_access_from_expr_1 (rhs, stmt, false);
1130 lacc = build_access_from_expr_1 (lhs, stmt, true);
1132 if (lacc)
1133 lacc->grp_assignment_write = 1;
1135 if (racc)
1137 racc->grp_assignment_read = 1;
1138 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1139 && !is_gimple_reg_type (racc->type))
1140 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1143 if (lacc && racc
1144 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1145 && !lacc->grp_unscalarizable_region
1146 && !racc->grp_unscalarizable_region
1147 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1148 && lacc->size == racc->size
1149 && useless_type_conversion_p (lacc->type, racc->type))
1151 struct assign_link *link;
1153 link = (struct assign_link *) pool_alloc (link_pool);
1154 memset (link, 0, sizeof (struct assign_link));
1156 link->lacc = lacc;
1157 link->racc = racc;
1159 add_link_to_rhs (racc, link);
1162 return lacc || racc;
1165 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1166 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1168 static bool
1169 asm_visit_addr (gimple, tree op, tree, void *)
1171 op = get_base_address (op);
1172 if (op
1173 && DECL_P (op))
1174 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1176 return false;
1179 /* Return true iff callsite CALL has at least as many actual arguments as there
1180 are formal parameters of the function currently processed by IPA-SRA. */
1182 static inline bool
1183 callsite_has_enough_arguments_p (gimple call)
1185 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1188 /* Scan function and look for interesting expressions and create access
1189 structures for them. Return true iff any access is created. */
1191 static bool
1192 scan_function (void)
1194 basic_block bb;
1195 bool ret = false;
1197 FOR_EACH_BB (bb)
1199 gimple_stmt_iterator gsi;
1200 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1202 gimple stmt = gsi_stmt (gsi);
1203 tree t;
1204 unsigned i;
1206 if (final_bbs && stmt_can_throw_external (stmt))
1207 bitmap_set_bit (final_bbs, bb->index);
1208 switch (gimple_code (stmt))
1210 case GIMPLE_RETURN:
1211 t = gimple_return_retval (stmt);
1212 if (t != NULL_TREE)
1213 ret |= build_access_from_expr (t, stmt, false);
1214 if (final_bbs)
1215 bitmap_set_bit (final_bbs, bb->index);
1216 break;
1218 case GIMPLE_ASSIGN:
1219 ret |= build_accesses_from_assign (stmt);
1220 break;
1222 case GIMPLE_CALL:
1223 for (i = 0; i < gimple_call_num_args (stmt); i++)
1224 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1225 stmt, false);
1227 if (sra_mode == SRA_MODE_EARLY_IPA)
1229 tree dest = gimple_call_fndecl (stmt);
1230 int flags = gimple_call_flags (stmt);
1232 if (dest)
1234 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1235 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1236 encountered_apply_args = true;
1237 if (cgraph_get_node (dest)
1238 == cgraph_get_node (current_function_decl))
1240 encountered_recursive_call = true;
1241 if (!callsite_has_enough_arguments_p (stmt))
1242 encountered_unchangable_recursive_call = true;
1246 if (final_bbs
1247 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1248 bitmap_set_bit (final_bbs, bb->index);
1251 t = gimple_call_lhs (stmt);
1252 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1253 ret |= build_access_from_expr (t, stmt, true);
1254 break;
1256 case GIMPLE_ASM:
1257 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1258 asm_visit_addr);
1259 if (final_bbs)
1260 bitmap_set_bit (final_bbs, bb->index);
1262 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1264 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1265 ret |= build_access_from_expr (t, stmt, false);
1267 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1269 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1270 ret |= build_access_from_expr (t, stmt, true);
1272 break;
1274 default:
1275 break;
1280 return ret;
1283 /* Helper of QSORT function. There are pointers to accesses in the array. An
1284 access is considered smaller than another if it has smaller offset or if the
1285 offsets are the same but is size is bigger. */
1287 static int
1288 compare_access_positions (const void *a, const void *b)
1290 const access_p *fp1 = (const access_p *) a;
1291 const access_p *fp2 = (const access_p *) b;
1292 const access_p f1 = *fp1;
1293 const access_p f2 = *fp2;
1295 if (f1->offset != f2->offset)
1296 return f1->offset < f2->offset ? -1 : 1;
1298 if (f1->size == f2->size)
1300 if (f1->type == f2->type)
1301 return 0;
1302 /* Put any non-aggregate type before any aggregate type. */
1303 else if (!is_gimple_reg_type (f1->type)
1304 && is_gimple_reg_type (f2->type))
1305 return 1;
1306 else if (is_gimple_reg_type (f1->type)
1307 && !is_gimple_reg_type (f2->type))
1308 return -1;
1309 /* Put any complex or vector type before any other scalar type. */
1310 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1311 && TREE_CODE (f1->type) != VECTOR_TYPE
1312 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1313 || TREE_CODE (f2->type) == VECTOR_TYPE))
1314 return 1;
1315 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1316 || TREE_CODE (f1->type) == VECTOR_TYPE)
1317 && TREE_CODE (f2->type) != COMPLEX_TYPE
1318 && TREE_CODE (f2->type) != VECTOR_TYPE)
1319 return -1;
1320 /* Put the integral type with the bigger precision first. */
1321 else if (INTEGRAL_TYPE_P (f1->type)
1322 && INTEGRAL_TYPE_P (f2->type))
1323 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1324 /* Put any integral type with non-full precision last. */
1325 else if (INTEGRAL_TYPE_P (f1->type)
1326 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1327 != TYPE_PRECISION (f1->type)))
1328 return 1;
1329 else if (INTEGRAL_TYPE_P (f2->type)
1330 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1331 != TYPE_PRECISION (f2->type)))
1332 return -1;
1333 /* Stabilize the sort. */
1334 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1337 /* We want the bigger accesses first, thus the opposite operator in the next
1338 line: */
1339 return f1->size > f2->size ? -1 : 1;
1343 /* Append a name of the declaration to the name obstack. A helper function for
1344 make_fancy_name. */
1346 static void
1347 make_fancy_decl_name (tree decl)
1349 char buffer[32];
1351 tree name = DECL_NAME (decl);
1352 if (name)
1353 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1354 IDENTIFIER_LENGTH (name));
1355 else
1357 sprintf (buffer, "D%u", DECL_UID (decl));
1358 obstack_grow (&name_obstack, buffer, strlen (buffer));
1362 /* Helper for make_fancy_name. */
1364 static void
1365 make_fancy_name_1 (tree expr)
1367 char buffer[32];
1368 tree index;
1370 if (DECL_P (expr))
1372 make_fancy_decl_name (expr);
1373 return;
1376 switch (TREE_CODE (expr))
1378 case COMPONENT_REF:
1379 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1380 obstack_1grow (&name_obstack, '$');
1381 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1382 break;
1384 case ARRAY_REF:
1385 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1386 obstack_1grow (&name_obstack, '$');
1387 /* Arrays with only one element may not have a constant as their
1388 index. */
1389 index = TREE_OPERAND (expr, 1);
1390 if (TREE_CODE (index) != INTEGER_CST)
1391 break;
1392 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1393 obstack_grow (&name_obstack, buffer, strlen (buffer));
1394 break;
1396 case ADDR_EXPR:
1397 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1398 break;
1400 case MEM_REF:
1401 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1402 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1404 obstack_1grow (&name_obstack, '$');
1405 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1406 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1407 obstack_grow (&name_obstack, buffer, strlen (buffer));
1409 break;
1411 case BIT_FIELD_REF:
1412 case REALPART_EXPR:
1413 case IMAGPART_EXPR:
1414 gcc_unreachable (); /* we treat these as scalars. */
1415 break;
1416 default:
1417 break;
1421 /* Create a human readable name for replacement variable of ACCESS. */
1423 static char *
1424 make_fancy_name (tree expr)
1426 make_fancy_name_1 (expr);
1427 obstack_1grow (&name_obstack, '\0');
1428 return XOBFINISH (&name_obstack, char *);
1431 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1432 EXP_TYPE at the given OFFSET. If BASE is something for which
1433 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1434 to insert new statements either before or below the current one as specified
1435 by INSERT_AFTER. This function is not capable of handling bitfields.
1437 BASE must be either a declaration or a memory reference that has correct
1438 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1440 tree
1441 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1442 tree exp_type, gimple_stmt_iterator *gsi,
1443 bool insert_after)
1445 tree prev_base = base;
1446 tree off;
1447 tree mem_ref;
1448 HOST_WIDE_INT base_offset;
1449 unsigned HOST_WIDE_INT misalign;
1450 unsigned int align;
1452 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1453 get_object_alignment_1 (base, &align, &misalign);
1454 base = get_addr_base_and_unit_offset (base, &base_offset);
1456 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1457 offset such as array[var_index]. */
1458 if (!base)
1460 gimple stmt;
1461 tree tmp, addr;
1463 gcc_checking_assert (gsi);
1464 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1465 addr = build_fold_addr_expr (unshare_expr (prev_base));
1466 STRIP_USELESS_TYPE_CONVERSION (addr);
1467 stmt = gimple_build_assign (tmp, addr);
1468 gimple_set_location (stmt, loc);
1469 if (insert_after)
1470 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1471 else
1472 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1474 off = build_int_cst (reference_alias_ptr_type (prev_base),
1475 offset / BITS_PER_UNIT);
1476 base = tmp;
1478 else if (TREE_CODE (base) == MEM_REF)
1480 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1481 base_offset + offset / BITS_PER_UNIT);
1482 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1483 base = unshare_expr (TREE_OPERAND (base, 0));
1485 else
1487 off = build_int_cst (reference_alias_ptr_type (base),
1488 base_offset + offset / BITS_PER_UNIT);
1489 base = build_fold_addr_expr (unshare_expr (base));
1492 misalign = (misalign + offset) & (align - 1);
1493 if (misalign != 0)
1494 align = (misalign & -misalign);
1495 if (align < TYPE_ALIGN (exp_type))
1496 exp_type = build_aligned_type (exp_type, align);
1498 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1499 if (TREE_THIS_VOLATILE (prev_base))
1500 TREE_THIS_VOLATILE (mem_ref) = 1;
1501 if (TREE_SIDE_EFFECTS (prev_base))
1502 TREE_SIDE_EFFECTS (mem_ref) = 1;
1503 return mem_ref;
1506 /* Construct a memory reference to a part of an aggregate BASE at the given
1507 OFFSET and of the same type as MODEL. In case this is a reference to a
1508 bit-field, the function will replicate the last component_ref of model's
1509 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1510 build_ref_for_offset. */
1512 static tree
1513 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1514 struct access *model, gimple_stmt_iterator *gsi,
1515 bool insert_after)
1517 if (TREE_CODE (model->expr) == COMPONENT_REF
1518 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1520 /* This access represents a bit-field. */
1521 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1523 offset -= int_bit_position (fld);
1524 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1525 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1526 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1527 NULL_TREE);
1529 else
1530 return build_ref_for_offset (loc, base, offset, model->type,
1531 gsi, insert_after);
1534 /* Attempt to build a memory reference that we could but into a gimple
1535 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1536 create statements and return s NULL instead. This function also ignores
1537 alignment issues and so its results should never end up in non-debug
1538 statements. */
1540 static tree
1541 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1542 struct access *model)
1544 HOST_WIDE_INT base_offset;
1545 tree off;
1547 if (TREE_CODE (model->expr) == COMPONENT_REF
1548 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1549 return NULL_TREE;
1551 base = get_addr_base_and_unit_offset (base, &base_offset);
1552 if (!base)
1553 return NULL_TREE;
1554 if (TREE_CODE (base) == MEM_REF)
1556 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1557 base_offset + offset / BITS_PER_UNIT);
1558 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1559 base = unshare_expr (TREE_OPERAND (base, 0));
1561 else
1563 off = build_int_cst (reference_alias_ptr_type (base),
1564 base_offset + offset / BITS_PER_UNIT);
1565 base = build_fold_addr_expr (unshare_expr (base));
1568 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1571 /* Construct a memory reference consisting of component_refs and array_refs to
1572 a part of an aggregate *RES (which is of type TYPE). The requested part
1573 should have type EXP_TYPE at be the given OFFSET. This function might not
1574 succeed, it returns true when it does and only then *RES points to something
1575 meaningful. This function should be used only to build expressions that we
1576 might need to present to user (e.g. in warnings). In all other situations,
1577 build_ref_for_model or build_ref_for_offset should be used instead. */
1579 static bool
1580 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1581 tree exp_type)
1583 while (1)
1585 tree fld;
1586 tree tr_size, index, minidx;
1587 HOST_WIDE_INT el_size;
1589 if (offset == 0 && exp_type
1590 && types_compatible_p (exp_type, type))
1591 return true;
1593 switch (TREE_CODE (type))
1595 case UNION_TYPE:
1596 case QUAL_UNION_TYPE:
1597 case RECORD_TYPE:
1598 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1600 HOST_WIDE_INT pos, size;
1601 tree tr_pos, expr, *expr_ptr;
1603 if (TREE_CODE (fld) != FIELD_DECL)
1604 continue;
1606 tr_pos = bit_position (fld);
1607 if (!tr_pos || !host_integerp (tr_pos, 1))
1608 continue;
1609 pos = TREE_INT_CST_LOW (tr_pos);
1610 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1611 tr_size = DECL_SIZE (fld);
1612 if (!tr_size || !host_integerp (tr_size, 1))
1613 continue;
1614 size = TREE_INT_CST_LOW (tr_size);
1615 if (size == 0)
1617 if (pos != offset)
1618 continue;
1620 else if (pos > offset || (pos + size) <= offset)
1621 continue;
1623 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1624 NULL_TREE);
1625 expr_ptr = &expr;
1626 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1627 offset - pos, exp_type))
1629 *res = expr;
1630 return true;
1633 return false;
1635 case ARRAY_TYPE:
1636 tr_size = TYPE_SIZE (TREE_TYPE (type));
1637 if (!tr_size || !host_integerp (tr_size, 1))
1638 return false;
1639 el_size = tree_low_cst (tr_size, 1);
1641 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1642 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1643 return false;
1644 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1645 if (!integer_zerop (minidx))
1646 index = int_const_binop (PLUS_EXPR, index, minidx);
1647 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1648 NULL_TREE, NULL_TREE);
1649 offset = offset % el_size;
1650 type = TREE_TYPE (type);
1651 break;
1653 default:
1654 if (offset != 0)
1655 return false;
1657 if (exp_type)
1658 return false;
1659 else
1660 return true;
1665 /* Return true iff TYPE is stdarg va_list type. */
1667 static inline bool
1668 is_va_list_type (tree type)
1670 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1673 /* Print message to dump file why a variable was rejected. */
1675 static void
1676 reject (tree var, const char *msg)
1678 if (dump_file && (dump_flags & TDF_DETAILS))
1680 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1681 print_generic_expr (dump_file, var, 0);
1682 fprintf (dump_file, "\n");
1686 /* Return true if VAR is a candidate for SRA. */
1688 static bool
1689 maybe_add_sra_candidate (tree var)
1691 tree type = TREE_TYPE (var);
1692 const char *msg;
1693 void **slot;
1695 if (!AGGREGATE_TYPE_P (type))
1697 reject (var, "not aggregate");
1698 return false;
1700 if (needs_to_live_in_memory (var))
1702 reject (var, "needs to live in memory");
1703 return false;
1705 if (TREE_THIS_VOLATILE (var))
1707 reject (var, "is volatile");
1708 return false;
1710 if (!COMPLETE_TYPE_P (type))
1712 reject (var, "has incomplete type");
1713 return false;
1715 if (!host_integerp (TYPE_SIZE (type), 1))
1717 reject (var, "type size not fixed");
1718 return false;
1720 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1722 reject (var, "type size is zero");
1723 return false;
1725 if (type_internals_preclude_sra_p (type, &msg))
1727 reject (var, msg);
1728 return false;
1730 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1731 we also want to schedule it rather late. Thus we ignore it in
1732 the early pass. */
1733 (sra_mode == SRA_MODE_EARLY_INTRA
1734 && is_va_list_type (type)))
1736 reject (var, "is va_list");
1737 return false;
1740 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1741 slot = htab_find_slot_with_hash (candidates, var, DECL_UID (var), INSERT);
1742 *slot = (void *) var;
1744 if (dump_file && (dump_flags & TDF_DETAILS))
1746 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1747 print_generic_expr (dump_file, var, 0);
1748 fprintf (dump_file, "\n");
1751 return true;
1754 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1755 those with type which is suitable for scalarization. */
1757 static bool
1758 find_var_candidates (void)
1760 tree var, parm;
1761 unsigned int i;
1762 bool ret = false;
1764 for (parm = DECL_ARGUMENTS (current_function_decl);
1765 parm;
1766 parm = DECL_CHAIN (parm))
1767 ret |= maybe_add_sra_candidate (parm);
1769 FOR_EACH_LOCAL_DECL (cfun, i, var)
1771 if (TREE_CODE (var) != VAR_DECL)
1772 continue;
1774 ret |= maybe_add_sra_candidate (var);
1777 return ret;
1780 /* Sort all accesses for the given variable, check for partial overlaps and
1781 return NULL if there are any. If there are none, pick a representative for
1782 each combination of offset and size and create a linked list out of them.
1783 Return the pointer to the first representative and make sure it is the first
1784 one in the vector of accesses. */
1786 static struct access *
1787 sort_and_splice_var_accesses (tree var)
1789 int i, j, access_count;
1790 struct access *res, **prev_acc_ptr = &res;
1791 vec<access_p> *access_vec;
1792 bool first = true;
1793 HOST_WIDE_INT low = -1, high = 0;
1795 access_vec = get_base_access_vector (var);
1796 if (!access_vec)
1797 return NULL;
1798 access_count = access_vec->length ();
1800 /* Sort by <OFFSET, SIZE>. */
1801 access_vec->qsort (compare_access_positions);
1803 i = 0;
1804 while (i < access_count)
1806 struct access *access = (*access_vec)[i];
1807 bool grp_write = access->write;
1808 bool grp_read = !access->write;
1809 bool grp_scalar_write = access->write
1810 && is_gimple_reg_type (access->type);
1811 bool grp_scalar_read = !access->write
1812 && is_gimple_reg_type (access->type);
1813 bool grp_assignment_read = access->grp_assignment_read;
1814 bool grp_assignment_write = access->grp_assignment_write;
1815 bool multiple_scalar_reads = false;
1816 bool total_scalarization = access->grp_total_scalarization;
1817 bool grp_partial_lhs = access->grp_partial_lhs;
1818 bool first_scalar = is_gimple_reg_type (access->type);
1819 bool unscalarizable_region = access->grp_unscalarizable_region;
1821 if (first || access->offset >= high)
1823 first = false;
1824 low = access->offset;
1825 high = access->offset + access->size;
1827 else if (access->offset > low && access->offset + access->size > high)
1828 return NULL;
1829 else
1830 gcc_assert (access->offset >= low
1831 && access->offset + access->size <= high);
1833 j = i + 1;
1834 while (j < access_count)
1836 struct access *ac2 = (*access_vec)[j];
1837 if (ac2->offset != access->offset || ac2->size != access->size)
1838 break;
1839 if (ac2->write)
1841 grp_write = true;
1842 grp_scalar_write = (grp_scalar_write
1843 || is_gimple_reg_type (ac2->type));
1845 else
1847 grp_read = true;
1848 if (is_gimple_reg_type (ac2->type))
1850 if (grp_scalar_read)
1851 multiple_scalar_reads = true;
1852 else
1853 grp_scalar_read = true;
1856 grp_assignment_read |= ac2->grp_assignment_read;
1857 grp_assignment_write |= ac2->grp_assignment_write;
1858 grp_partial_lhs |= ac2->grp_partial_lhs;
1859 unscalarizable_region |= ac2->grp_unscalarizable_region;
1860 total_scalarization |= ac2->grp_total_scalarization;
1861 relink_to_new_repr (access, ac2);
1863 /* If there are both aggregate-type and scalar-type accesses with
1864 this combination of size and offset, the comparison function
1865 should have put the scalars first. */
1866 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1867 ac2->group_representative = access;
1868 j++;
1871 i = j;
1873 access->group_representative = access;
1874 access->grp_write = grp_write;
1875 access->grp_read = grp_read;
1876 access->grp_scalar_read = grp_scalar_read;
1877 access->grp_scalar_write = grp_scalar_write;
1878 access->grp_assignment_read = grp_assignment_read;
1879 access->grp_assignment_write = grp_assignment_write;
1880 access->grp_hint = multiple_scalar_reads || total_scalarization;
1881 access->grp_total_scalarization = total_scalarization;
1882 access->grp_partial_lhs = grp_partial_lhs;
1883 access->grp_unscalarizable_region = unscalarizable_region;
1884 if (access->first_link)
1885 add_access_to_work_queue (access);
1887 *prev_acc_ptr = access;
1888 prev_acc_ptr = &access->next_grp;
1891 gcc_assert (res == (*access_vec)[0]);
1892 return res;
1895 /* Create a variable for the given ACCESS which determines the type, name and a
1896 few other properties. Return the variable declaration and store it also to
1897 ACCESS->replacement. */
1899 static tree
1900 create_access_replacement (struct access *access)
1902 tree repl;
1904 if (access->grp_to_be_debug_replaced)
1906 repl = create_tmp_var_raw (access->type, NULL);
1907 DECL_CONTEXT (repl) = current_function_decl;
1909 else
1910 repl = create_tmp_var (access->type, "SR");
1911 if (TREE_CODE (access->type) == COMPLEX_TYPE
1912 || TREE_CODE (access->type) == VECTOR_TYPE)
1914 if (!access->grp_partial_lhs)
1915 DECL_GIMPLE_REG_P (repl) = 1;
1917 else if (access->grp_partial_lhs
1918 && is_gimple_reg_type (access->type))
1919 TREE_ADDRESSABLE (repl) = 1;
1921 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1922 DECL_ARTIFICIAL (repl) = 1;
1923 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1925 if (DECL_NAME (access->base)
1926 && !DECL_IGNORED_P (access->base)
1927 && !DECL_ARTIFICIAL (access->base))
1929 char *pretty_name = make_fancy_name (access->expr);
1930 tree debug_expr = unshare_expr_without_location (access->expr), d;
1931 bool fail = false;
1933 DECL_NAME (repl) = get_identifier (pretty_name);
1934 obstack_free (&name_obstack, pretty_name);
1936 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1937 as DECL_DEBUG_EXPR isn't considered when looking for still
1938 used SSA_NAMEs and thus they could be freed. All debug info
1939 generation cares is whether something is constant or variable
1940 and that get_ref_base_and_extent works properly on the
1941 expression. It cannot handle accesses at a non-constant offset
1942 though, so just give up in those cases. */
1943 for (d = debug_expr;
1944 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
1945 d = TREE_OPERAND (d, 0))
1946 switch (TREE_CODE (d))
1948 case ARRAY_REF:
1949 case ARRAY_RANGE_REF:
1950 if (TREE_OPERAND (d, 1)
1951 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
1952 fail = true;
1953 if (TREE_OPERAND (d, 3)
1954 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
1955 fail = true;
1956 /* FALLTHRU */
1957 case COMPONENT_REF:
1958 if (TREE_OPERAND (d, 2)
1959 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
1960 fail = true;
1961 break;
1962 case MEM_REF:
1963 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
1964 fail = true;
1965 else
1966 d = TREE_OPERAND (d, 0);
1967 break;
1968 default:
1969 break;
1971 if (!fail)
1973 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1974 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1976 if (access->grp_no_warning)
1977 TREE_NO_WARNING (repl) = 1;
1978 else
1979 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1981 else
1982 TREE_NO_WARNING (repl) = 1;
1984 if (dump_file)
1986 if (access->grp_to_be_debug_replaced)
1988 fprintf (dump_file, "Created a debug-only replacement for ");
1989 print_generic_expr (dump_file, access->base, 0);
1990 fprintf (dump_file, " offset: %u, size: %u\n",
1991 (unsigned) access->offset, (unsigned) access->size);
1993 else
1995 fprintf (dump_file, "Created a replacement for ");
1996 print_generic_expr (dump_file, access->base, 0);
1997 fprintf (dump_file, " offset: %u, size: %u: ",
1998 (unsigned) access->offset, (unsigned) access->size);
1999 print_generic_expr (dump_file, repl, 0);
2000 fprintf (dump_file, "\n");
2003 sra_stats.replacements++;
2005 return repl;
2008 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2010 static inline tree
2011 get_access_replacement (struct access *access)
2013 gcc_checking_assert (access->replacement_decl);
2014 return access->replacement_decl;
2018 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2019 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2020 to it is not "within" the root. Return false iff some accesses partially
2021 overlap. */
2023 static bool
2024 build_access_subtree (struct access **access)
2026 struct access *root = *access, *last_child = NULL;
2027 HOST_WIDE_INT limit = root->offset + root->size;
2029 *access = (*access)->next_grp;
2030 while (*access && (*access)->offset + (*access)->size <= limit)
2032 if (!last_child)
2033 root->first_child = *access;
2034 else
2035 last_child->next_sibling = *access;
2036 last_child = *access;
2038 if (!build_access_subtree (access))
2039 return false;
2042 if (*access && (*access)->offset < limit)
2043 return false;
2045 return true;
2048 /* Build a tree of access representatives, ACCESS is the pointer to the first
2049 one, others are linked in a list by the next_grp field. Return false iff
2050 some accesses partially overlap. */
2052 static bool
2053 build_access_trees (struct access *access)
2055 while (access)
2057 struct access *root = access;
2059 if (!build_access_subtree (&access))
2060 return false;
2061 root->next_grp = access;
2063 return true;
2066 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2067 array. */
2069 static bool
2070 expr_with_var_bounded_array_refs_p (tree expr)
2072 while (handled_component_p (expr))
2074 if (TREE_CODE (expr) == ARRAY_REF
2075 && !host_integerp (array_ref_low_bound (expr), 0))
2076 return true;
2077 expr = TREE_OPERAND (expr, 0);
2079 return false;
2082 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2083 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2084 sorts of access flags appropriately along the way, notably always set
2085 grp_read and grp_assign_read according to MARK_READ and grp_write when
2086 MARK_WRITE is true.
2088 Creating a replacement for a scalar access is considered beneficial if its
2089 grp_hint is set (this means we are either attempting total scalarization or
2090 there is more than one direct read access) or according to the following
2091 table:
2093 Access written to through a scalar type (once or more times)
2095 | Written to in an assignment statement
2097 | | Access read as scalar _once_
2098 | | |
2099 | | | Read in an assignment statement
2100 | | | |
2101 | | | | Scalarize Comment
2102 -----------------------------------------------------------------------------
2103 0 0 0 0 No access for the scalar
2104 0 0 0 1 No access for the scalar
2105 0 0 1 0 No Single read - won't help
2106 0 0 1 1 No The same case
2107 0 1 0 0 No access for the scalar
2108 0 1 0 1 No access for the scalar
2109 0 1 1 0 Yes s = *g; return s.i;
2110 0 1 1 1 Yes The same case as above
2111 1 0 0 0 No Won't help
2112 1 0 0 1 Yes s.i = 1; *g = s;
2113 1 0 1 0 Yes s.i = 5; g = s.i;
2114 1 0 1 1 Yes The same case as above
2115 1 1 0 0 No Won't help.
2116 1 1 0 1 Yes s.i = 1; *g = s;
2117 1 1 1 0 Yes s = *g; return s.i;
2118 1 1 1 1 Yes Any of the above yeses */
2120 static bool
2121 analyze_access_subtree (struct access *root, struct access *parent,
2122 bool allow_replacements)
2124 struct access *child;
2125 HOST_WIDE_INT limit = root->offset + root->size;
2126 HOST_WIDE_INT covered_to = root->offset;
2127 bool scalar = is_gimple_reg_type (root->type);
2128 bool hole = false, sth_created = false;
2130 if (parent)
2132 if (parent->grp_read)
2133 root->grp_read = 1;
2134 if (parent->grp_assignment_read)
2135 root->grp_assignment_read = 1;
2136 if (parent->grp_write)
2137 root->grp_write = 1;
2138 if (parent->grp_assignment_write)
2139 root->grp_assignment_write = 1;
2140 if (parent->grp_total_scalarization)
2141 root->grp_total_scalarization = 1;
2144 if (root->grp_unscalarizable_region)
2145 allow_replacements = false;
2147 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2148 allow_replacements = false;
2150 for (child = root->first_child; child; child = child->next_sibling)
2152 hole |= covered_to < child->offset;
2153 sth_created |= analyze_access_subtree (child, root,
2154 allow_replacements && !scalar);
2156 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2157 root->grp_total_scalarization &= child->grp_total_scalarization;
2158 if (child->grp_covered)
2159 covered_to += child->size;
2160 else
2161 hole = true;
2164 if (allow_replacements && scalar && !root->first_child
2165 && (root->grp_hint
2166 || ((root->grp_scalar_read || root->grp_assignment_read)
2167 && (root->grp_scalar_write || root->grp_assignment_write))))
2169 /* Always create access replacements that cover the whole access.
2170 For integral types this means the precision has to match.
2171 Avoid assumptions based on the integral type kind, too. */
2172 if (INTEGRAL_TYPE_P (root->type)
2173 && (TREE_CODE (root->type) != INTEGER_TYPE
2174 || TYPE_PRECISION (root->type) != root->size)
2175 /* But leave bitfield accesses alone. */
2176 && (TREE_CODE (root->expr) != COMPONENT_REF
2177 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2179 tree rt = root->type;
2180 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2181 && (root->size % BITS_PER_UNIT) == 0);
2182 root->type = build_nonstandard_integer_type (root->size,
2183 TYPE_UNSIGNED (rt));
2184 root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2185 root->base, root->offset,
2186 root->type, NULL, false);
2188 if (dump_file && (dump_flags & TDF_DETAILS))
2190 fprintf (dump_file, "Changing the type of a replacement for ");
2191 print_generic_expr (dump_file, root->base, 0);
2192 fprintf (dump_file, " offset: %u, size: %u ",
2193 (unsigned) root->offset, (unsigned) root->size);
2194 fprintf (dump_file, " to an integer.\n");
2198 root->grp_to_be_replaced = 1;
2199 root->replacement_decl = create_access_replacement (root);
2200 sth_created = true;
2201 hole = false;
2203 else
2205 if (allow_replacements
2206 && scalar && !root->first_child
2207 && (root->grp_scalar_write || root->grp_assignment_write)
2208 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2209 DECL_UID (root->base)))
2211 gcc_checking_assert (!root->grp_scalar_read
2212 && !root->grp_assignment_read);
2213 sth_created = true;
2214 if (MAY_HAVE_DEBUG_STMTS)
2216 root->grp_to_be_debug_replaced = 1;
2217 root->replacement_decl = create_access_replacement (root);
2221 if (covered_to < limit)
2222 hole = true;
2223 if (scalar)
2224 root->grp_total_scalarization = 0;
2227 if (!hole || root->grp_total_scalarization)
2228 root->grp_covered = 1;
2229 else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2230 root->grp_unscalarized_data = 1; /* not covered and written to */
2231 return sth_created;
2234 /* Analyze all access trees linked by next_grp by the means of
2235 analyze_access_subtree. */
2236 static bool
2237 analyze_access_trees (struct access *access)
2239 bool ret = false;
2241 while (access)
2243 if (analyze_access_subtree (access, NULL, true))
2244 ret = true;
2245 access = access->next_grp;
2248 return ret;
2251 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2252 SIZE would conflict with an already existing one. If exactly such a child
2253 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2255 static bool
2256 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2257 HOST_WIDE_INT size, struct access **exact_match)
2259 struct access *child;
2261 for (child = lacc->first_child; child; child = child->next_sibling)
2263 if (child->offset == norm_offset && child->size == size)
2265 *exact_match = child;
2266 return true;
2269 if (child->offset < norm_offset + size
2270 && child->offset + child->size > norm_offset)
2271 return true;
2274 return false;
2277 /* Create a new child access of PARENT, with all properties just like MODEL
2278 except for its offset and with its grp_write false and grp_read true.
2279 Return the new access or NULL if it cannot be created. Note that this access
2280 is created long after all splicing and sorting, it's not located in any
2281 access vector and is automatically a representative of its group. */
2283 static struct access *
2284 create_artificial_child_access (struct access *parent, struct access *model,
2285 HOST_WIDE_INT new_offset)
2287 struct access *access;
2288 struct access **child;
2289 tree expr = parent->base;
2291 gcc_assert (!model->grp_unscalarizable_region);
2293 access = (struct access *) pool_alloc (access_pool);
2294 memset (access, 0, sizeof (struct access));
2295 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2296 model->type))
2298 access->grp_no_warning = true;
2299 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2300 new_offset, model, NULL, false);
2303 access->base = parent->base;
2304 access->expr = expr;
2305 access->offset = new_offset;
2306 access->size = model->size;
2307 access->type = model->type;
2308 access->grp_write = true;
2309 access->grp_read = false;
2311 child = &parent->first_child;
2312 while (*child && (*child)->offset < new_offset)
2313 child = &(*child)->next_sibling;
2315 access->next_sibling = *child;
2316 *child = access;
2318 return access;
2322 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2323 true if any new subaccess was created. Additionally, if RACC is a scalar
2324 access but LACC is not, change the type of the latter, if possible. */
2326 static bool
2327 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2329 struct access *rchild;
2330 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2331 bool ret = false;
2333 if (is_gimple_reg_type (lacc->type)
2334 || lacc->grp_unscalarizable_region
2335 || racc->grp_unscalarizable_region)
2336 return false;
2338 if (is_gimple_reg_type (racc->type))
2340 if (!lacc->first_child && !racc->first_child)
2342 tree t = lacc->base;
2344 lacc->type = racc->type;
2345 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2346 lacc->offset, racc->type))
2347 lacc->expr = t;
2348 else
2350 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2351 lacc->base, lacc->offset,
2352 racc, NULL, false);
2353 lacc->grp_no_warning = true;
2356 return false;
2359 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2361 struct access *new_acc = NULL;
2362 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2364 if (rchild->grp_unscalarizable_region)
2365 continue;
2367 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2368 &new_acc))
2370 if (new_acc)
2372 rchild->grp_hint = 1;
2373 new_acc->grp_hint |= new_acc->grp_read;
2374 if (rchild->first_child)
2375 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2377 continue;
2380 rchild->grp_hint = 1;
2381 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2382 if (new_acc)
2384 ret = true;
2385 if (racc->first_child)
2386 propagate_subaccesses_across_link (new_acc, rchild);
2390 return ret;
2393 /* Propagate all subaccesses across assignment links. */
2395 static void
2396 propagate_all_subaccesses (void)
2398 while (work_queue_head)
2400 struct access *racc = pop_access_from_work_queue ();
2401 struct assign_link *link;
2403 gcc_assert (racc->first_link);
2405 for (link = racc->first_link; link; link = link->next)
2407 struct access *lacc = link->lacc;
2409 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2410 continue;
2411 lacc = lacc->group_representative;
2412 if (propagate_subaccesses_across_link (lacc, racc)
2413 && lacc->first_link)
2414 add_access_to_work_queue (lacc);
2419 /* Go through all accesses collected throughout the (intraprocedural) analysis
2420 stage, exclude overlapping ones, identify representatives and build trees
2421 out of them, making decisions about scalarization on the way. Return true
2422 iff there are any to-be-scalarized variables after this stage. */
2424 static bool
2425 analyze_all_variable_accesses (void)
2427 int res = 0;
2428 bitmap tmp = BITMAP_ALLOC (NULL);
2429 bitmap_iterator bi;
2430 unsigned i, max_total_scalarization_size;
2432 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2433 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2435 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2436 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2437 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2439 tree var = candidate (i);
2441 if (TREE_CODE (var) == VAR_DECL
2442 && type_consists_of_records_p (TREE_TYPE (var)))
2444 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2445 <= max_total_scalarization_size)
2447 completely_scalarize_var (var);
2448 if (dump_file && (dump_flags & TDF_DETAILS))
2450 fprintf (dump_file, "Will attempt to totally scalarize ");
2451 print_generic_expr (dump_file, var, 0);
2452 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2455 else if (dump_file && (dump_flags & TDF_DETAILS))
2457 fprintf (dump_file, "Too big to totally scalarize: ");
2458 print_generic_expr (dump_file, var, 0);
2459 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2464 bitmap_copy (tmp, candidate_bitmap);
2465 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2467 tree var = candidate (i);
2468 struct access *access;
2470 access = sort_and_splice_var_accesses (var);
2471 if (!access || !build_access_trees (access))
2472 disqualify_candidate (var,
2473 "No or inhibitingly overlapping accesses.");
2476 propagate_all_subaccesses ();
2478 bitmap_copy (tmp, candidate_bitmap);
2479 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2481 tree var = candidate (i);
2482 struct access *access = get_first_repr_for_decl (var);
2484 if (analyze_access_trees (access))
2486 res++;
2487 if (dump_file && (dump_flags & TDF_DETAILS))
2489 fprintf (dump_file, "\nAccess trees for ");
2490 print_generic_expr (dump_file, var, 0);
2491 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2492 dump_access_tree (dump_file, access);
2493 fprintf (dump_file, "\n");
2496 else
2497 disqualify_candidate (var, "No scalar replacements to be created.");
2500 BITMAP_FREE (tmp);
2502 if (res)
2504 statistics_counter_event (cfun, "Scalarized aggregates", res);
2505 return true;
2507 else
2508 return false;
2511 /* Generate statements copying scalar replacements of accesses within a subtree
2512 into or out of AGG. ACCESS, all its children, siblings and their children
2513 are to be processed. AGG is an aggregate type expression (can be a
2514 declaration but does not have to be, it can for example also be a mem_ref or
2515 a series of handled components). TOP_OFFSET is the offset of the processed
2516 subtree which has to be subtracted from offsets of individual accesses to
2517 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2518 replacements in the interval <start_offset, start_offset + chunk_size>,
2519 otherwise copy all. GSI is a statement iterator used to place the new
2520 statements. WRITE should be true when the statements should write from AGG
2521 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2522 statements will be added after the current statement in GSI, they will be
2523 added before the statement otherwise. */
2525 static void
2526 generate_subtree_copies (struct access *access, tree agg,
2527 HOST_WIDE_INT top_offset,
2528 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2529 gimple_stmt_iterator *gsi, bool write,
2530 bool insert_after, location_t loc)
2534 if (chunk_size && access->offset >= start_offset + chunk_size)
2535 return;
2537 if (access->grp_to_be_replaced
2538 && (chunk_size == 0
2539 || access->offset + access->size > start_offset))
2541 tree expr, repl = get_access_replacement (access);
2542 gimple stmt;
2544 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2545 access, gsi, insert_after);
2547 if (write)
2549 if (access->grp_partial_lhs)
2550 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2551 !insert_after,
2552 insert_after ? GSI_NEW_STMT
2553 : GSI_SAME_STMT);
2554 stmt = gimple_build_assign (repl, expr);
2556 else
2558 TREE_NO_WARNING (repl) = 1;
2559 if (access->grp_partial_lhs)
2560 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2561 !insert_after,
2562 insert_after ? GSI_NEW_STMT
2563 : GSI_SAME_STMT);
2564 stmt = gimple_build_assign (expr, repl);
2566 gimple_set_location (stmt, loc);
2568 if (insert_after)
2569 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2570 else
2571 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2572 update_stmt (stmt);
2573 sra_stats.subtree_copies++;
2575 else if (write
2576 && access->grp_to_be_debug_replaced
2577 && (chunk_size == 0
2578 || access->offset + access->size > start_offset))
2580 gimple ds;
2581 tree drhs = build_debug_ref_for_model (loc, agg,
2582 access->offset - top_offset,
2583 access);
2584 ds = gimple_build_debug_bind (get_access_replacement (access),
2585 drhs, gsi_stmt (*gsi));
2586 if (insert_after)
2587 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2588 else
2589 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2592 if (access->first_child)
2593 generate_subtree_copies (access->first_child, agg, top_offset,
2594 start_offset, chunk_size, gsi,
2595 write, insert_after, loc);
2597 access = access->next_sibling;
2599 while (access);
2602 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2603 the root of the subtree to be processed. GSI is the statement iterator used
2604 for inserting statements which are added after the current statement if
2605 INSERT_AFTER is true or before it otherwise. */
2607 static void
2608 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2609 bool insert_after, location_t loc)
2612 struct access *child;
2614 if (access->grp_to_be_replaced)
2616 gimple stmt;
2618 stmt = gimple_build_assign (get_access_replacement (access),
2619 build_zero_cst (access->type));
2620 if (insert_after)
2621 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2622 else
2623 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2624 update_stmt (stmt);
2625 gimple_set_location (stmt, loc);
2627 else if (access->grp_to_be_debug_replaced)
2629 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2630 build_zero_cst (access->type),
2631 gsi_stmt (*gsi));
2632 if (insert_after)
2633 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2634 else
2635 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2638 for (child = access->first_child; child; child = child->next_sibling)
2639 init_subtree_with_zero (child, gsi, insert_after, loc);
2642 /* Search for an access representative for the given expression EXPR and
2643 return it or NULL if it cannot be found. */
2645 static struct access *
2646 get_access_for_expr (tree expr)
2648 HOST_WIDE_INT offset, size, max_size;
2649 tree base;
2651 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2652 a different size than the size of its argument and we need the latter
2653 one. */
2654 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2655 expr = TREE_OPERAND (expr, 0);
2657 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2658 if (max_size == -1 || !DECL_P (base))
2659 return NULL;
2661 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2662 return NULL;
2664 return get_var_base_offset_size_access (base, offset, max_size);
2667 /* Replace the expression EXPR with a scalar replacement if there is one and
2668 generate other statements to do type conversion or subtree copying if
2669 necessary. GSI is used to place newly created statements, WRITE is true if
2670 the expression is being written to (it is on a LHS of a statement or output
2671 in an assembly statement). */
2673 static bool
2674 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2676 location_t loc;
2677 struct access *access;
2678 tree type, bfr;
2680 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2682 bfr = *expr;
2683 expr = &TREE_OPERAND (*expr, 0);
2685 else
2686 bfr = NULL_TREE;
2688 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2689 expr = &TREE_OPERAND (*expr, 0);
2690 access = get_access_for_expr (*expr);
2691 if (!access)
2692 return false;
2693 type = TREE_TYPE (*expr);
2695 loc = gimple_location (gsi_stmt (*gsi));
2696 if (access->grp_to_be_replaced)
2698 tree repl = get_access_replacement (access);
2699 /* If we replace a non-register typed access simply use the original
2700 access expression to extract the scalar component afterwards.
2701 This happens if scalarizing a function return value or parameter
2702 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2703 gcc.c-torture/compile/20011217-1.c.
2705 We also want to use this when accessing a complex or vector which can
2706 be accessed as a different type too, potentially creating a need for
2707 type conversion (see PR42196) and when scalarized unions are involved
2708 in assembler statements (see PR42398). */
2709 if (!useless_type_conversion_p (type, access->type))
2711 tree ref;
2713 ref = build_ref_for_model (loc, access->base, access->offset, access,
2714 NULL, false);
2716 if (write)
2718 gimple stmt;
2720 if (access->grp_partial_lhs)
2721 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2722 false, GSI_NEW_STMT);
2723 stmt = gimple_build_assign (repl, ref);
2724 gimple_set_location (stmt, loc);
2725 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2727 else
2729 gimple stmt;
2731 if (access->grp_partial_lhs)
2732 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2733 true, GSI_SAME_STMT);
2734 stmt = gimple_build_assign (ref, repl);
2735 gimple_set_location (stmt, loc);
2736 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2739 else
2740 *expr = repl;
2741 sra_stats.exprs++;
2743 else if (write && access->grp_to_be_debug_replaced)
2745 gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2746 NULL_TREE,
2747 gsi_stmt (*gsi));
2748 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2751 if (access->first_child)
2753 HOST_WIDE_INT start_offset, chunk_size;
2754 if (bfr
2755 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2756 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2758 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2759 start_offset = access->offset
2760 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2762 else
2763 start_offset = chunk_size = 0;
2765 generate_subtree_copies (access->first_child, access->base, 0,
2766 start_offset, chunk_size, gsi, write, write,
2767 loc);
2769 return true;
2772 /* Where scalar replacements of the RHS have been written to when a replacement
2773 of a LHS of an assigments cannot be direclty loaded from a replacement of
2774 the RHS. */
2775 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2776 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2777 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2779 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2780 base aggregate if there are unscalarized data or directly to LHS of the
2781 statement that is pointed to by GSI otherwise. */
2783 static enum unscalarized_data_handling
2784 handle_unscalarized_data_in_subtree (struct access *top_racc,
2785 gimple_stmt_iterator *gsi)
2787 if (top_racc->grp_unscalarized_data)
2789 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2790 gsi, false, false,
2791 gimple_location (gsi_stmt (*gsi)));
2792 return SRA_UDH_RIGHT;
2794 else
2796 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2797 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2798 0, 0, gsi, false, false,
2799 gimple_location (gsi_stmt (*gsi)));
2800 return SRA_UDH_LEFT;
2805 /* Try to generate statements to load all sub-replacements in an access subtree
2806 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2807 If that is not possible, refresh the TOP_RACC base aggregate and load the
2808 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2809 copied. NEW_GSI is stmt iterator used for statement insertions after the
2810 original assignment, OLD_GSI is used to insert statements before the
2811 assignment. *REFRESHED keeps the information whether we have needed to
2812 refresh replacements of the LHS and from which side of the assignments this
2813 takes place. */
2815 static void
2816 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2817 HOST_WIDE_INT left_offset,
2818 gimple_stmt_iterator *old_gsi,
2819 gimple_stmt_iterator *new_gsi,
2820 enum unscalarized_data_handling *refreshed)
2822 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2823 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2825 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2827 if (lacc->grp_to_be_replaced)
2829 struct access *racc;
2830 gimple stmt;
2831 tree rhs;
2833 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2834 if (racc && racc->grp_to_be_replaced)
2836 rhs = get_access_replacement (racc);
2837 if (!useless_type_conversion_p (lacc->type, racc->type))
2838 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2840 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2841 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2842 true, GSI_SAME_STMT);
2844 else
2846 /* No suitable access on the right hand side, need to load from
2847 the aggregate. See if we have to update it first... */
2848 if (*refreshed == SRA_UDH_NONE)
2849 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2850 old_gsi);
2852 if (*refreshed == SRA_UDH_LEFT)
2853 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2854 new_gsi, true);
2855 else
2856 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2857 new_gsi, true);
2858 if (lacc->grp_partial_lhs)
2859 rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2860 false, GSI_NEW_STMT);
2863 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2864 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2865 gimple_set_location (stmt, loc);
2866 update_stmt (stmt);
2867 sra_stats.subreplacements++;
2869 else
2871 if (*refreshed == SRA_UDH_NONE
2872 && lacc->grp_read && !lacc->grp_covered)
2873 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2874 old_gsi);
2875 if (lacc && lacc->grp_to_be_debug_replaced)
2877 gimple ds;
2878 tree drhs;
2879 struct access *racc = find_access_in_subtree (top_racc, offset,
2880 lacc->size);
2882 if (racc && racc->grp_to_be_replaced)
2884 if (racc->grp_write)
2885 drhs = get_access_replacement (racc);
2886 else
2887 drhs = NULL;
2889 else if (*refreshed == SRA_UDH_LEFT)
2890 drhs = build_debug_ref_for_model (loc, lacc->base, lacc->offset,
2891 lacc);
2892 else if (*refreshed == SRA_UDH_RIGHT)
2893 drhs = build_debug_ref_for_model (loc, top_racc->base, offset,
2894 lacc);
2895 else
2896 drhs = NULL_TREE;
2897 if (drhs
2898 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
2899 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
2900 lacc->type, drhs);
2901 ds = gimple_build_debug_bind (get_access_replacement (lacc),
2902 drhs, gsi_stmt (*old_gsi));
2903 gsi_insert_after (new_gsi, ds, GSI_NEW_STMT);
2907 if (lacc->first_child)
2908 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2909 old_gsi, new_gsi, refreshed);
2913 /* Result code for SRA assignment modification. */
2914 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2915 SRA_AM_MODIFIED, /* stmt changed but not
2916 removed */
2917 SRA_AM_REMOVED }; /* stmt eliminated */
2919 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2920 to the assignment and GSI is the statement iterator pointing at it. Returns
2921 the same values as sra_modify_assign. */
2923 static enum assignment_mod_result
2924 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2926 tree lhs = gimple_assign_lhs (*stmt);
2927 struct access *acc;
2928 location_t loc;
2930 acc = get_access_for_expr (lhs);
2931 if (!acc)
2932 return SRA_AM_NONE;
2934 if (gimple_clobber_p (*stmt))
2936 /* Remove clobbers of fully scalarized variables, otherwise
2937 do nothing. */
2938 if (acc->grp_covered)
2940 unlink_stmt_vdef (*stmt);
2941 gsi_remove (gsi, true);
2942 release_defs (*stmt);
2943 return SRA_AM_REMOVED;
2945 else
2946 return SRA_AM_NONE;
2949 loc = gimple_location (*stmt);
2950 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2952 /* I have never seen this code path trigger but if it can happen the
2953 following should handle it gracefully. */
2954 if (access_has_children_p (acc))
2955 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2956 true, true, loc);
2957 return SRA_AM_MODIFIED;
2960 if (acc->grp_covered)
2962 init_subtree_with_zero (acc, gsi, false, loc);
2963 unlink_stmt_vdef (*stmt);
2964 gsi_remove (gsi, true);
2965 release_defs (*stmt);
2966 return SRA_AM_REMOVED;
2968 else
2970 init_subtree_with_zero (acc, gsi, true, loc);
2971 return SRA_AM_MODIFIED;
2975 /* Create and return a new suitable default definition SSA_NAME for RACC which
2976 is an access describing an uninitialized part of an aggregate that is being
2977 loaded. */
2979 static tree
2980 get_repl_default_def_ssa_name (struct access *racc)
2982 gcc_checking_assert (!racc->grp_to_be_replaced &&
2983 !racc->grp_to_be_debug_replaced);
2984 if (!racc->replacement_decl)
2985 racc->replacement_decl = create_access_replacement (racc);
2986 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
2989 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2990 bit-field field declaration somewhere in it. */
2992 static inline bool
2993 contains_vce_or_bfcref_p (const_tree ref)
2995 while (handled_component_p (ref))
2997 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2998 || (TREE_CODE (ref) == COMPONENT_REF
2999 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3000 return true;
3001 ref = TREE_OPERAND (ref, 0);
3004 return false;
3007 /* Examine both sides of the assignment statement pointed to by STMT, replace
3008 them with a scalare replacement if there is one and generate copying of
3009 replacements if scalarized aggregates have been used in the assignment. GSI
3010 is used to hold generated statements for type conversions and subtree
3011 copying. */
3013 static enum assignment_mod_result
3014 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3016 struct access *lacc, *racc;
3017 tree lhs, rhs;
3018 bool modify_this_stmt = false;
3019 bool force_gimple_rhs = false;
3020 location_t loc;
3021 gimple_stmt_iterator orig_gsi = *gsi;
3023 if (!gimple_assign_single_p (*stmt))
3024 return SRA_AM_NONE;
3025 lhs = gimple_assign_lhs (*stmt);
3026 rhs = gimple_assign_rhs1 (*stmt);
3028 if (TREE_CODE (rhs) == CONSTRUCTOR)
3029 return sra_modify_constructor_assign (stmt, gsi);
3031 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3032 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3033 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3035 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
3036 gsi, false);
3037 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
3038 gsi, true);
3039 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3042 lacc = get_access_for_expr (lhs);
3043 racc = get_access_for_expr (rhs);
3044 if (!lacc && !racc)
3045 return SRA_AM_NONE;
3047 loc = gimple_location (*stmt);
3048 if (lacc && lacc->grp_to_be_replaced)
3050 lhs = get_access_replacement (lacc);
3051 gimple_assign_set_lhs (*stmt, lhs);
3052 modify_this_stmt = true;
3053 if (lacc->grp_partial_lhs)
3054 force_gimple_rhs = true;
3055 sra_stats.exprs++;
3058 if (racc && racc->grp_to_be_replaced)
3060 rhs = get_access_replacement (racc);
3061 modify_this_stmt = true;
3062 if (racc->grp_partial_lhs)
3063 force_gimple_rhs = true;
3064 sra_stats.exprs++;
3066 else if (racc
3067 && !racc->grp_unscalarized_data
3068 && TREE_CODE (lhs) == SSA_NAME
3069 && !access_has_replacements_p (racc))
3071 rhs = get_repl_default_def_ssa_name (racc);
3072 modify_this_stmt = true;
3073 sra_stats.exprs++;
3076 if (modify_this_stmt)
3078 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3080 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3081 ??? This should move to fold_stmt which we simply should
3082 call after building a VIEW_CONVERT_EXPR here. */
3083 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3084 && !contains_bitfld_component_ref_p (lhs))
3086 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3087 gimple_assign_set_lhs (*stmt, lhs);
3089 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3090 && !contains_vce_or_bfcref_p (rhs))
3091 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3093 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3095 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3096 rhs);
3097 if (is_gimple_reg_type (TREE_TYPE (lhs))
3098 && TREE_CODE (lhs) != SSA_NAME)
3099 force_gimple_rhs = true;
3104 if (lacc && lacc->grp_to_be_debug_replaced)
3106 tree dlhs = get_access_replacement (lacc);
3107 tree drhs = unshare_expr (rhs);
3108 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3110 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3111 && !contains_vce_or_bfcref_p (drhs))
3112 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3113 if (drhs
3114 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3115 TREE_TYPE (drhs)))
3116 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3117 TREE_TYPE (dlhs), drhs);
3119 gimple ds = gimple_build_debug_bind (dlhs, drhs, *stmt);
3120 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3123 /* From this point on, the function deals with assignments in between
3124 aggregates when at least one has scalar reductions of some of its
3125 components. There are three possible scenarios: Both the LHS and RHS have
3126 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3128 In the first case, we would like to load the LHS components from RHS
3129 components whenever possible. If that is not possible, we would like to
3130 read it directly from the RHS (after updating it by storing in it its own
3131 components). If there are some necessary unscalarized data in the LHS,
3132 those will be loaded by the original assignment too. If neither of these
3133 cases happen, the original statement can be removed. Most of this is done
3134 by load_assign_lhs_subreplacements.
3136 In the second case, we would like to store all RHS scalarized components
3137 directly into LHS and if they cover the aggregate completely, remove the
3138 statement too. In the third case, we want the LHS components to be loaded
3139 directly from the RHS (DSE will remove the original statement if it
3140 becomes redundant).
3142 This is a bit complex but manageable when types match and when unions do
3143 not cause confusion in a way that we cannot really load a component of LHS
3144 from the RHS or vice versa (the access representing this level can have
3145 subaccesses that are accessible only through a different union field at a
3146 higher level - different from the one used in the examined expression).
3147 Unions are fun.
3149 Therefore, I specially handle a fourth case, happening when there is a
3150 specific type cast or it is impossible to locate a scalarized subaccess on
3151 the other side of the expression. If that happens, I simply "refresh" the
3152 RHS by storing in it is scalarized components leave the original statement
3153 there to do the copying and then load the scalar replacements of the LHS.
3154 This is what the first branch does. */
3156 if (modify_this_stmt
3157 || gimple_has_volatile_ops (*stmt)
3158 || contains_vce_or_bfcref_p (rhs)
3159 || contains_vce_or_bfcref_p (lhs))
3161 if (access_has_children_p (racc))
3162 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3163 gsi, false, false, loc);
3164 if (access_has_children_p (lacc))
3165 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3166 gsi, true, true, loc);
3167 sra_stats.separate_lhs_rhs_handling++;
3169 /* This gimplification must be done after generate_subtree_copies,
3170 lest we insert the subtree copies in the middle of the gimplified
3171 sequence. */
3172 if (force_gimple_rhs)
3173 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3174 true, GSI_SAME_STMT);
3175 if (gimple_assign_rhs1 (*stmt) != rhs)
3177 modify_this_stmt = true;
3178 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3179 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3182 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3184 else
3186 if (access_has_children_p (lacc)
3187 && access_has_children_p (racc)
3188 /* When an access represents an unscalarizable region, it usually
3189 represents accesses with variable offset and thus must not be used
3190 to generate new memory accesses. */
3191 && !lacc->grp_unscalarizable_region
3192 && !racc->grp_unscalarizable_region)
3194 gimple_stmt_iterator orig_gsi = *gsi;
3195 enum unscalarized_data_handling refreshed;
3197 if (lacc->grp_read && !lacc->grp_covered)
3198 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3199 else
3200 refreshed = SRA_UDH_NONE;
3202 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3203 &orig_gsi, gsi, &refreshed);
3204 if (refreshed != SRA_UDH_RIGHT)
3206 gsi_next (gsi);
3207 unlink_stmt_vdef (*stmt);
3208 gsi_remove (&orig_gsi, true);
3209 release_defs (*stmt);
3210 sra_stats.deleted++;
3211 return SRA_AM_REMOVED;
3214 else
3216 if (access_has_children_p (racc)
3217 && !racc->grp_unscalarized_data)
3219 if (dump_file)
3221 fprintf (dump_file, "Removing load: ");
3222 print_gimple_stmt (dump_file, *stmt, 0, 0);
3224 generate_subtree_copies (racc->first_child, lhs,
3225 racc->offset, 0, 0, gsi,
3226 false, false, loc);
3227 gcc_assert (*stmt == gsi_stmt (*gsi));
3228 unlink_stmt_vdef (*stmt);
3229 gsi_remove (gsi, true);
3230 release_defs (*stmt);
3231 sra_stats.deleted++;
3232 return SRA_AM_REMOVED;
3234 /* Restore the aggregate RHS from its components so the
3235 prevailing aggregate copy does the right thing. */
3236 if (access_has_children_p (racc))
3237 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3238 gsi, false, false, loc);
3239 /* Re-load the components of the aggregate copy destination.
3240 But use the RHS aggregate to load from to expose more
3241 optimization opportunities. */
3242 if (access_has_children_p (lacc))
3243 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3244 0, 0, gsi, true, true, loc);
3247 return SRA_AM_NONE;
3251 /* Traverse the function body and all modifications as decided in
3252 analyze_all_variable_accesses. Return true iff the CFG has been
3253 changed. */
3255 static bool
3256 sra_modify_function_body (void)
3258 bool cfg_changed = false;
3259 basic_block bb;
3261 FOR_EACH_BB (bb)
3263 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3264 while (!gsi_end_p (gsi))
3266 gimple stmt = gsi_stmt (gsi);
3267 enum assignment_mod_result assign_result;
3268 bool modified = false, deleted = false;
3269 tree *t;
3270 unsigned i;
3272 switch (gimple_code (stmt))
3274 case GIMPLE_RETURN:
3275 t = gimple_return_retval_ptr (stmt);
3276 if (*t != NULL_TREE)
3277 modified |= sra_modify_expr (t, &gsi, false);
3278 break;
3280 case GIMPLE_ASSIGN:
3281 assign_result = sra_modify_assign (&stmt, &gsi);
3282 modified |= assign_result == SRA_AM_MODIFIED;
3283 deleted = assign_result == SRA_AM_REMOVED;
3284 break;
3286 case GIMPLE_CALL:
3287 /* Operands must be processed before the lhs. */
3288 for (i = 0; i < gimple_call_num_args (stmt); i++)
3290 t = gimple_call_arg_ptr (stmt, i);
3291 modified |= sra_modify_expr (t, &gsi, false);
3294 if (gimple_call_lhs (stmt))
3296 t = gimple_call_lhs_ptr (stmt);
3297 modified |= sra_modify_expr (t, &gsi, true);
3299 break;
3301 case GIMPLE_ASM:
3302 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3304 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3305 modified |= sra_modify_expr (t, &gsi, false);
3307 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3309 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3310 modified |= sra_modify_expr (t, &gsi, true);
3312 break;
3314 default:
3315 break;
3318 if (modified)
3320 update_stmt (stmt);
3321 if (maybe_clean_eh_stmt (stmt)
3322 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3323 cfg_changed = true;
3325 if (!deleted)
3326 gsi_next (&gsi);
3330 return cfg_changed;
3333 /* Generate statements initializing scalar replacements of parts of function
3334 parameters. */
3336 static void
3337 initialize_parameter_reductions (void)
3339 gimple_stmt_iterator gsi;
3340 gimple_seq seq = NULL;
3341 tree parm;
3343 gsi = gsi_start (seq);
3344 for (parm = DECL_ARGUMENTS (current_function_decl);
3345 parm;
3346 parm = DECL_CHAIN (parm))
3348 vec<access_p> *access_vec;
3349 struct access *access;
3351 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3352 continue;
3353 access_vec = get_base_access_vector (parm);
3354 if (!access_vec)
3355 continue;
3357 for (access = (*access_vec)[0];
3358 access;
3359 access = access->next_grp)
3360 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3361 EXPR_LOCATION (parm));
3364 seq = gsi_seq (gsi);
3365 if (seq)
3366 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3369 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3370 it reveals there are components of some aggregates to be scalarized, it runs
3371 the required transformations. */
3372 static unsigned int
3373 perform_intra_sra (void)
3375 int ret = 0;
3376 sra_initialize ();
3378 if (!find_var_candidates ())
3379 goto out;
3381 if (!scan_function ())
3382 goto out;
3384 if (!analyze_all_variable_accesses ())
3385 goto out;
3387 if (sra_modify_function_body ())
3388 ret = TODO_update_ssa | TODO_cleanup_cfg;
3389 else
3390 ret = TODO_update_ssa;
3391 initialize_parameter_reductions ();
3393 statistics_counter_event (cfun, "Scalar replacements created",
3394 sra_stats.replacements);
3395 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3396 statistics_counter_event (cfun, "Subtree copy stmts",
3397 sra_stats.subtree_copies);
3398 statistics_counter_event (cfun, "Subreplacement stmts",
3399 sra_stats.subreplacements);
3400 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3401 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3402 sra_stats.separate_lhs_rhs_handling);
3404 out:
3405 sra_deinitialize ();
3406 return ret;
3409 /* Perform early intraprocedural SRA. */
3410 static unsigned int
3411 early_intra_sra (void)
3413 sra_mode = SRA_MODE_EARLY_INTRA;
3414 return perform_intra_sra ();
3417 /* Perform "late" intraprocedural SRA. */
3418 static unsigned int
3419 late_intra_sra (void)
3421 sra_mode = SRA_MODE_INTRA;
3422 return perform_intra_sra ();
3426 static bool
3427 gate_intra_sra (void)
3429 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3433 struct gimple_opt_pass pass_sra_early =
3436 GIMPLE_PASS,
3437 "esra", /* name */
3438 OPTGROUP_NONE, /* optinfo_flags */
3439 gate_intra_sra, /* gate */
3440 early_intra_sra, /* execute */
3441 NULL, /* sub */
3442 NULL, /* next */
3443 0, /* static_pass_number */
3444 TV_TREE_SRA, /* tv_id */
3445 PROP_cfg | PROP_ssa, /* properties_required */
3446 0, /* properties_provided */
3447 0, /* properties_destroyed */
3448 0, /* todo_flags_start */
3449 TODO_update_ssa
3450 | TODO_ggc_collect
3451 | TODO_verify_ssa /* todo_flags_finish */
3455 struct gimple_opt_pass pass_sra =
3458 GIMPLE_PASS,
3459 "sra", /* name */
3460 OPTGROUP_NONE, /* optinfo_flags */
3461 gate_intra_sra, /* gate */
3462 late_intra_sra, /* execute */
3463 NULL, /* sub */
3464 NULL, /* next */
3465 0, /* static_pass_number */
3466 TV_TREE_SRA, /* tv_id */
3467 PROP_cfg | PROP_ssa, /* properties_required */
3468 0, /* properties_provided */
3469 0, /* properties_destroyed */
3470 TODO_update_address_taken, /* todo_flags_start */
3471 TODO_update_ssa
3472 | TODO_ggc_collect
3473 | TODO_verify_ssa /* todo_flags_finish */
3478 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3479 parameter. */
3481 static bool
3482 is_unused_scalar_param (tree parm)
3484 tree name;
3485 return (is_gimple_reg (parm)
3486 && (!(name = ssa_default_def (cfun, parm))
3487 || has_zero_uses (name)));
3490 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3491 examine whether there are any direct or otherwise infeasible ones. If so,
3492 return true, otherwise return false. PARM must be a gimple register with a
3493 non-NULL default definition. */
3495 static bool
3496 ptr_parm_has_direct_uses (tree parm)
3498 imm_use_iterator ui;
3499 gimple stmt;
3500 tree name = ssa_default_def (cfun, parm);
3501 bool ret = false;
3503 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3505 int uses_ok = 0;
3506 use_operand_p use_p;
3508 if (is_gimple_debug (stmt))
3509 continue;
3511 /* Valid uses include dereferences on the lhs and the rhs. */
3512 if (gimple_has_lhs (stmt))
3514 tree lhs = gimple_get_lhs (stmt);
3515 while (handled_component_p (lhs))
3516 lhs = TREE_OPERAND (lhs, 0);
3517 if (TREE_CODE (lhs) == MEM_REF
3518 && TREE_OPERAND (lhs, 0) == name
3519 && integer_zerop (TREE_OPERAND (lhs, 1))
3520 && types_compatible_p (TREE_TYPE (lhs),
3521 TREE_TYPE (TREE_TYPE (name)))
3522 && !TREE_THIS_VOLATILE (lhs))
3523 uses_ok++;
3525 if (gimple_assign_single_p (stmt))
3527 tree rhs = gimple_assign_rhs1 (stmt);
3528 while (handled_component_p (rhs))
3529 rhs = TREE_OPERAND (rhs, 0);
3530 if (TREE_CODE (rhs) == MEM_REF
3531 && TREE_OPERAND (rhs, 0) == name
3532 && integer_zerop (TREE_OPERAND (rhs, 1))
3533 && types_compatible_p (TREE_TYPE (rhs),
3534 TREE_TYPE (TREE_TYPE (name)))
3535 && !TREE_THIS_VOLATILE (rhs))
3536 uses_ok++;
3538 else if (is_gimple_call (stmt))
3540 unsigned i;
3541 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3543 tree arg = gimple_call_arg (stmt, i);
3544 while (handled_component_p (arg))
3545 arg = TREE_OPERAND (arg, 0);
3546 if (TREE_CODE (arg) == MEM_REF
3547 && TREE_OPERAND (arg, 0) == name
3548 && integer_zerop (TREE_OPERAND (arg, 1))
3549 && types_compatible_p (TREE_TYPE (arg),
3550 TREE_TYPE (TREE_TYPE (name)))
3551 && !TREE_THIS_VOLATILE (arg))
3552 uses_ok++;
3556 /* If the number of valid uses does not match the number of
3557 uses in this stmt there is an unhandled use. */
3558 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3559 --uses_ok;
3561 if (uses_ok != 0)
3562 ret = true;
3564 if (ret)
3565 BREAK_FROM_IMM_USE_STMT (ui);
3568 return ret;
3571 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3572 them in candidate_bitmap. Note that these do not necessarily include
3573 parameter which are unused and thus can be removed. Return true iff any
3574 such candidate has been found. */
3576 static bool
3577 find_param_candidates (void)
3579 tree parm;
3580 int count = 0;
3581 bool ret = false;
3582 const char *msg;
3584 for (parm = DECL_ARGUMENTS (current_function_decl);
3585 parm;
3586 parm = DECL_CHAIN (parm))
3588 tree type = TREE_TYPE (parm);
3589 void **slot;
3591 count++;
3593 if (TREE_THIS_VOLATILE (parm)
3594 || TREE_ADDRESSABLE (parm)
3595 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3596 continue;
3598 if (is_unused_scalar_param (parm))
3600 ret = true;
3601 continue;
3604 if (POINTER_TYPE_P (type))
3606 type = TREE_TYPE (type);
3608 if (TREE_CODE (type) == FUNCTION_TYPE
3609 || TYPE_VOLATILE (type)
3610 || (TREE_CODE (type) == ARRAY_TYPE
3611 && TYPE_NONALIASED_COMPONENT (type))
3612 || !is_gimple_reg (parm)
3613 || is_va_list_type (type)
3614 || ptr_parm_has_direct_uses (parm))
3615 continue;
3617 else if (!AGGREGATE_TYPE_P (type))
3618 continue;
3620 if (!COMPLETE_TYPE_P (type)
3621 || !host_integerp (TYPE_SIZE (type), 1)
3622 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3623 || (AGGREGATE_TYPE_P (type)
3624 && type_internals_preclude_sra_p (type, &msg)))
3625 continue;
3627 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3628 slot = htab_find_slot_with_hash (candidates, parm,
3629 DECL_UID (parm), INSERT);
3630 *slot = (void *) parm;
3632 ret = true;
3633 if (dump_file && (dump_flags & TDF_DETAILS))
3635 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3636 print_generic_expr (dump_file, parm, 0);
3637 fprintf (dump_file, "\n");
3641 func_param_count = count;
3642 return ret;
3645 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3646 maybe_modified. */
3648 static bool
3649 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3650 void *data)
3652 struct access *repr = (struct access *) data;
3654 repr->grp_maybe_modified = 1;
3655 return true;
3658 /* Analyze what representatives (in linked lists accessible from
3659 REPRESENTATIVES) can be modified by side effects of statements in the
3660 current function. */
3662 static void
3663 analyze_modified_params (vec<access_p> representatives)
3665 int i;
3667 for (i = 0; i < func_param_count; i++)
3669 struct access *repr;
3671 for (repr = representatives[i];
3672 repr;
3673 repr = repr->next_grp)
3675 struct access *access;
3676 bitmap visited;
3677 ao_ref ar;
3679 if (no_accesses_p (repr))
3680 continue;
3681 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3682 || repr->grp_maybe_modified)
3683 continue;
3685 ao_ref_init (&ar, repr->expr);
3686 visited = BITMAP_ALLOC (NULL);
3687 for (access = repr; access; access = access->next_sibling)
3689 /* All accesses are read ones, otherwise grp_maybe_modified would
3690 be trivially set. */
3691 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3692 mark_maybe_modified, repr, &visited);
3693 if (repr->grp_maybe_modified)
3694 break;
3696 BITMAP_FREE (visited);
3701 /* Propagate distances in bb_dereferences in the opposite direction than the
3702 control flow edges, in each step storing the maximum of the current value
3703 and the minimum of all successors. These steps are repeated until the table
3704 stabilizes. Note that BBs which might terminate the functions (according to
3705 final_bbs bitmap) never updated in this way. */
3707 static void
3708 propagate_dereference_distances (void)
3710 vec<basic_block> queue;
3711 basic_block bb;
3713 queue.create (last_basic_block_for_function (cfun));
3714 queue.quick_push (ENTRY_BLOCK_PTR);
3715 FOR_EACH_BB (bb)
3717 queue.quick_push (bb);
3718 bb->aux = bb;
3721 while (!queue.is_empty ())
3723 edge_iterator ei;
3724 edge e;
3725 bool change = false;
3726 int i;
3728 bb = queue.pop ();
3729 bb->aux = NULL;
3731 if (bitmap_bit_p (final_bbs, bb->index))
3732 continue;
3734 for (i = 0; i < func_param_count; i++)
3736 int idx = bb->index * func_param_count + i;
3737 bool first = true;
3738 HOST_WIDE_INT inh = 0;
3740 FOR_EACH_EDGE (e, ei, bb->succs)
3742 int succ_idx = e->dest->index * func_param_count + i;
3744 if (e->src == EXIT_BLOCK_PTR)
3745 continue;
3747 if (first)
3749 first = false;
3750 inh = bb_dereferences [succ_idx];
3752 else if (bb_dereferences [succ_idx] < inh)
3753 inh = bb_dereferences [succ_idx];
3756 if (!first && bb_dereferences[idx] < inh)
3758 bb_dereferences[idx] = inh;
3759 change = true;
3763 if (change && !bitmap_bit_p (final_bbs, bb->index))
3764 FOR_EACH_EDGE (e, ei, bb->preds)
3766 if (e->src->aux)
3767 continue;
3769 e->src->aux = e->src;
3770 queue.quick_push (e->src);
3774 queue.release ();
3777 /* Dump a dereferences TABLE with heading STR to file F. */
3779 static void
3780 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3782 basic_block bb;
3784 fprintf (dump_file, str);
3785 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3787 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3788 if (bb != EXIT_BLOCK_PTR)
3790 int i;
3791 for (i = 0; i < func_param_count; i++)
3793 int idx = bb->index * func_param_count + i;
3794 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3797 fprintf (f, "\n");
3799 fprintf (dump_file, "\n");
3802 /* Determine what (parts of) parameters passed by reference that are not
3803 assigned to are not certainly dereferenced in this function and thus the
3804 dereferencing cannot be safely moved to the caller without potentially
3805 introducing a segfault. Mark such REPRESENTATIVES as
3806 grp_not_necessarilly_dereferenced.
3808 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3809 part is calculated rather than simple booleans are calculated for each
3810 pointer parameter to handle cases when only a fraction of the whole
3811 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3812 an example).
3814 The maximum dereference distances for each pointer parameter and BB are
3815 already stored in bb_dereference. This routine simply propagates these
3816 values upwards by propagate_dereference_distances and then compares the
3817 distances of individual parameters in the ENTRY BB to the equivalent
3818 distances of each representative of a (fraction of a) parameter. */
3820 static void
3821 analyze_caller_dereference_legality (vec<access_p> representatives)
3823 int i;
3825 if (dump_file && (dump_flags & TDF_DETAILS))
3826 dump_dereferences_table (dump_file,
3827 "Dereference table before propagation:\n",
3828 bb_dereferences);
3830 propagate_dereference_distances ();
3832 if (dump_file && (dump_flags & TDF_DETAILS))
3833 dump_dereferences_table (dump_file,
3834 "Dereference table after propagation:\n",
3835 bb_dereferences);
3837 for (i = 0; i < func_param_count; i++)
3839 struct access *repr = representatives[i];
3840 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3842 if (!repr || no_accesses_p (repr))
3843 continue;
3847 if ((repr->offset + repr->size) > bb_dereferences[idx])
3848 repr->grp_not_necessarilly_dereferenced = 1;
3849 repr = repr->next_grp;
3851 while (repr);
3855 /* Return the representative access for the parameter declaration PARM if it is
3856 a scalar passed by reference which is not written to and the pointer value
3857 is not used directly. Thus, if it is legal to dereference it in the caller
3858 and we can rule out modifications through aliases, such parameter should be
3859 turned into one passed by value. Return NULL otherwise. */
3861 static struct access *
3862 unmodified_by_ref_scalar_representative (tree parm)
3864 int i, access_count;
3865 struct access *repr;
3866 vec<access_p> *access_vec;
3868 access_vec = get_base_access_vector (parm);
3869 gcc_assert (access_vec);
3870 repr = (*access_vec)[0];
3871 if (repr->write)
3872 return NULL;
3873 repr->group_representative = repr;
3875 access_count = access_vec->length ();
3876 for (i = 1; i < access_count; i++)
3878 struct access *access = (*access_vec)[i];
3879 if (access->write)
3880 return NULL;
3881 access->group_representative = repr;
3882 access->next_sibling = repr->next_sibling;
3883 repr->next_sibling = access;
3886 repr->grp_read = 1;
3887 repr->grp_scalar_ptr = 1;
3888 return repr;
3891 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
3892 associated with. REQ_ALIGN is the minimum required alignment. */
3894 static bool
3895 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
3897 unsigned int exp_align;
3898 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3899 is incompatible assign in a call statement (and possibly even in asm
3900 statements). This can be relaxed by using a new temporary but only for
3901 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3902 intraprocedural SRA we deal with this by keeping the old aggregate around,
3903 something we cannot do in IPA-SRA.) */
3904 if (access->write
3905 && (is_gimple_call (access->stmt)
3906 || gimple_code (access->stmt) == GIMPLE_ASM))
3907 return true;
3909 exp_align = get_object_alignment (access->expr);
3910 if (exp_align < req_align)
3911 return true;
3913 return false;
3917 /* Sort collected accesses for parameter PARM, identify representatives for
3918 each accessed region and link them together. Return NULL if there are
3919 different but overlapping accesses, return the special ptr value meaning
3920 there are no accesses for this parameter if that is the case and return the
3921 first representative otherwise. Set *RO_GRP if there is a group of accesses
3922 with only read (i.e. no write) accesses. */
3924 static struct access *
3925 splice_param_accesses (tree parm, bool *ro_grp)
3927 int i, j, access_count, group_count;
3928 int agg_size, total_size = 0;
3929 struct access *access, *res, **prev_acc_ptr = &res;
3930 vec<access_p> *access_vec;
3932 access_vec = get_base_access_vector (parm);
3933 if (!access_vec)
3934 return &no_accesses_representant;
3935 access_count = access_vec->length ();
3937 access_vec->qsort (compare_access_positions);
3939 i = 0;
3940 total_size = 0;
3941 group_count = 0;
3942 while (i < access_count)
3944 bool modification;
3945 tree a1_alias_type;
3946 access = (*access_vec)[i];
3947 modification = access->write;
3948 if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
3949 return NULL;
3950 a1_alias_type = reference_alias_ptr_type (access->expr);
3952 /* Access is about to become group representative unless we find some
3953 nasty overlap which would preclude us from breaking this parameter
3954 apart. */
3956 j = i + 1;
3957 while (j < access_count)
3959 struct access *ac2 = (*access_vec)[j];
3960 if (ac2->offset != access->offset)
3962 /* All or nothing law for parameters. */
3963 if (access->offset + access->size > ac2->offset)
3964 return NULL;
3965 else
3966 break;
3968 else if (ac2->size != access->size)
3969 return NULL;
3971 if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
3972 || (ac2->type != access->type
3973 && (TREE_ADDRESSABLE (ac2->type)
3974 || TREE_ADDRESSABLE (access->type)))
3975 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3976 return NULL;
3978 modification |= ac2->write;
3979 ac2->group_representative = access;
3980 ac2->next_sibling = access->next_sibling;
3981 access->next_sibling = ac2;
3982 j++;
3985 group_count++;
3986 access->grp_maybe_modified = modification;
3987 if (!modification)
3988 *ro_grp = true;
3989 *prev_acc_ptr = access;
3990 prev_acc_ptr = &access->next_grp;
3991 total_size += access->size;
3992 i = j;
3995 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3996 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3997 else
3998 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3999 if (total_size >= agg_size)
4000 return NULL;
4002 gcc_assert (group_count > 0);
4003 return res;
4006 /* Decide whether parameters with representative accesses given by REPR should
4007 be reduced into components. */
4009 static int
4010 decide_one_param_reduction (struct access *repr)
4012 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4013 bool by_ref;
4014 tree parm;
4016 parm = repr->base;
4017 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
4018 gcc_assert (cur_parm_size > 0);
4020 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4022 by_ref = true;
4023 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
4025 else
4027 by_ref = false;
4028 agg_size = cur_parm_size;
4031 if (dump_file)
4033 struct access *acc;
4034 fprintf (dump_file, "Evaluating PARAM group sizes for ");
4035 print_generic_expr (dump_file, parm, 0);
4036 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4037 for (acc = repr; acc; acc = acc->next_grp)
4038 dump_access (dump_file, acc, true);
4041 total_size = 0;
4042 new_param_count = 0;
4044 for (; repr; repr = repr->next_grp)
4046 gcc_assert (parm == repr->base);
4048 /* Taking the address of a non-addressable field is verboten. */
4049 if (by_ref && repr->non_addressable)
4050 return 0;
4052 /* Do not decompose a non-BLKmode param in a way that would
4053 create BLKmode params. Especially for by-reference passing
4054 (thus, pointer-type param) this is hardly worthwhile. */
4055 if (DECL_MODE (parm) != BLKmode
4056 && TYPE_MODE (repr->type) == BLKmode)
4057 return 0;
4059 if (!by_ref || (!repr->grp_maybe_modified
4060 && !repr->grp_not_necessarilly_dereferenced))
4061 total_size += repr->size;
4062 else
4063 total_size += cur_parm_size;
4065 new_param_count++;
4068 gcc_assert (new_param_count > 0);
4070 if (optimize_function_for_size_p (cfun))
4071 parm_size_limit = cur_parm_size;
4072 else
4073 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4074 * cur_parm_size);
4076 if (total_size < agg_size
4077 && total_size <= parm_size_limit)
4079 if (dump_file)
4080 fprintf (dump_file, " ....will be split into %i components\n",
4081 new_param_count);
4082 return new_param_count;
4084 else
4085 return 0;
4088 /* The order of the following enums is important, we need to do extra work for
4089 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4090 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4091 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4093 /* Identify representatives of all accesses to all candidate parameters for
4094 IPA-SRA. Return result based on what representatives have been found. */
4096 static enum ipa_splicing_result
4097 splice_all_param_accesses (vec<access_p> &representatives)
4099 enum ipa_splicing_result result = NO_GOOD_ACCESS;
4100 tree parm;
4101 struct access *repr;
4103 representatives.create (func_param_count);
4105 for (parm = DECL_ARGUMENTS (current_function_decl);
4106 parm;
4107 parm = DECL_CHAIN (parm))
4109 if (is_unused_scalar_param (parm))
4111 representatives.quick_push (&no_accesses_representant);
4112 if (result == NO_GOOD_ACCESS)
4113 result = UNUSED_PARAMS;
4115 else if (POINTER_TYPE_P (TREE_TYPE (parm))
4116 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4117 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4119 repr = unmodified_by_ref_scalar_representative (parm);
4120 representatives.quick_push (repr);
4121 if (repr)
4122 result = UNMODIF_BY_REF_ACCESSES;
4124 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4126 bool ro_grp = false;
4127 repr = splice_param_accesses (parm, &ro_grp);
4128 representatives.quick_push (repr);
4130 if (repr && !no_accesses_p (repr))
4132 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4134 if (ro_grp)
4135 result = UNMODIF_BY_REF_ACCESSES;
4136 else if (result < MODIF_BY_REF_ACCESSES)
4137 result = MODIF_BY_REF_ACCESSES;
4139 else if (result < BY_VAL_ACCESSES)
4140 result = BY_VAL_ACCESSES;
4142 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4143 result = UNUSED_PARAMS;
4145 else
4146 representatives.quick_push (NULL);
4149 if (result == NO_GOOD_ACCESS)
4151 representatives.release ();
4152 return NO_GOOD_ACCESS;
4155 return result;
4158 /* Return the index of BASE in PARMS. Abort if it is not found. */
4160 static inline int
4161 get_param_index (tree base, vec<tree> parms)
4163 int i, len;
4165 len = parms.length ();
4166 for (i = 0; i < len; i++)
4167 if (parms[i] == base)
4168 return i;
4169 gcc_unreachable ();
4172 /* Convert the decisions made at the representative level into compact
4173 parameter adjustments. REPRESENTATIVES are pointers to first
4174 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4175 final number of adjustments. */
4177 static ipa_parm_adjustment_vec
4178 turn_representatives_into_adjustments (vec<access_p> representatives,
4179 int adjustments_count)
4181 vec<tree> parms;
4182 ipa_parm_adjustment_vec adjustments;
4183 tree parm;
4184 int i;
4186 gcc_assert (adjustments_count > 0);
4187 parms = ipa_get_vector_of_formal_parms (current_function_decl);
4188 adjustments.create (adjustments_count);
4189 parm = DECL_ARGUMENTS (current_function_decl);
4190 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4192 struct access *repr = representatives[i];
4194 if (!repr || no_accesses_p (repr))
4196 struct ipa_parm_adjustment adj;
4198 memset (&adj, 0, sizeof (adj));
4199 adj.base_index = get_param_index (parm, parms);
4200 adj.base = parm;
4201 if (!repr)
4202 adj.copy_param = 1;
4203 else
4204 adj.remove_param = 1;
4205 adjustments.quick_push (adj);
4207 else
4209 struct ipa_parm_adjustment adj;
4210 int index = get_param_index (parm, parms);
4212 for (; repr; repr = repr->next_grp)
4214 memset (&adj, 0, sizeof (adj));
4215 gcc_assert (repr->base == parm);
4216 adj.base_index = index;
4217 adj.base = repr->base;
4218 adj.type = repr->type;
4219 adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4220 adj.offset = repr->offset;
4221 adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4222 && (repr->grp_maybe_modified
4223 || repr->grp_not_necessarilly_dereferenced));
4224 adjustments.quick_push (adj);
4228 parms.release ();
4229 return adjustments;
4232 /* Analyze the collected accesses and produce a plan what to do with the
4233 parameters in the form of adjustments, NULL meaning nothing. */
4235 static ipa_parm_adjustment_vec
4236 analyze_all_param_acesses (void)
4238 enum ipa_splicing_result repr_state;
4239 bool proceed = false;
4240 int i, adjustments_count = 0;
4241 vec<access_p> representatives;
4242 ipa_parm_adjustment_vec adjustments;
4244 repr_state = splice_all_param_accesses (representatives);
4245 if (repr_state == NO_GOOD_ACCESS)
4246 return ipa_parm_adjustment_vec();
4248 /* If there are any parameters passed by reference which are not modified
4249 directly, we need to check whether they can be modified indirectly. */
4250 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4252 analyze_caller_dereference_legality (representatives);
4253 analyze_modified_params (representatives);
4256 for (i = 0; i < func_param_count; i++)
4258 struct access *repr = representatives[i];
4260 if (repr && !no_accesses_p (repr))
4262 if (repr->grp_scalar_ptr)
4264 adjustments_count++;
4265 if (repr->grp_not_necessarilly_dereferenced
4266 || repr->grp_maybe_modified)
4267 representatives[i] = NULL;
4268 else
4270 proceed = true;
4271 sra_stats.scalar_by_ref_to_by_val++;
4274 else
4276 int new_components = decide_one_param_reduction (repr);
4278 if (new_components == 0)
4280 representatives[i] = NULL;
4281 adjustments_count++;
4283 else
4285 adjustments_count += new_components;
4286 sra_stats.aggregate_params_reduced++;
4287 sra_stats.param_reductions_created += new_components;
4288 proceed = true;
4292 else
4294 if (no_accesses_p (repr))
4296 proceed = true;
4297 sra_stats.deleted_unused_parameters++;
4299 adjustments_count++;
4303 if (!proceed && dump_file)
4304 fprintf (dump_file, "NOT proceeding to change params.\n");
4306 if (proceed)
4307 adjustments = turn_representatives_into_adjustments (representatives,
4308 adjustments_count);
4309 else
4310 adjustments = ipa_parm_adjustment_vec();
4312 representatives.release ();
4313 return adjustments;
4316 /* If a parameter replacement identified by ADJ does not yet exist in the form
4317 of declaration, create it and record it, otherwise return the previously
4318 created one. */
4320 static tree
4321 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4323 tree repl;
4324 if (!adj->new_ssa_base)
4326 char *pretty_name = make_fancy_name (adj->base);
4328 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4329 DECL_NAME (repl) = get_identifier (pretty_name);
4330 obstack_free (&name_obstack, pretty_name);
4332 adj->new_ssa_base = repl;
4334 else
4335 repl = adj->new_ssa_base;
4336 return repl;
4339 /* Find the first adjustment for a particular parameter BASE in a vector of
4340 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4341 adjustment. */
4343 static struct ipa_parm_adjustment *
4344 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4346 int i, len;
4348 len = adjustments.length ();
4349 for (i = 0; i < len; i++)
4351 struct ipa_parm_adjustment *adj;
4353 adj = &adjustments[i];
4354 if (!adj->copy_param && adj->base == base)
4355 return adj;
4358 return NULL;
4361 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4362 removed because its value is not used, replace the SSA_NAME with a one
4363 relating to a created VAR_DECL together all of its uses and return true.
4364 ADJUSTMENTS is a pointer to an adjustments vector. */
4366 static bool
4367 replace_removed_params_ssa_names (gimple stmt,
4368 ipa_parm_adjustment_vec adjustments)
4370 struct ipa_parm_adjustment *adj;
4371 tree lhs, decl, repl, name;
4373 if (gimple_code (stmt) == GIMPLE_PHI)
4374 lhs = gimple_phi_result (stmt);
4375 else if (is_gimple_assign (stmt))
4376 lhs = gimple_assign_lhs (stmt);
4377 else if (is_gimple_call (stmt))
4378 lhs = gimple_call_lhs (stmt);
4379 else
4380 gcc_unreachable ();
4382 if (TREE_CODE (lhs) != SSA_NAME)
4383 return false;
4385 decl = SSA_NAME_VAR (lhs);
4386 if (decl == NULL_TREE
4387 || TREE_CODE (decl) != PARM_DECL)
4388 return false;
4390 adj = get_adjustment_for_base (adjustments, decl);
4391 if (!adj)
4392 return false;
4394 repl = get_replaced_param_substitute (adj);
4395 name = make_ssa_name (repl, stmt);
4397 if (dump_file)
4399 fprintf (dump_file, "replacing an SSA name of a removed param ");
4400 print_generic_expr (dump_file, lhs, 0);
4401 fprintf (dump_file, " with ");
4402 print_generic_expr (dump_file, name, 0);
4403 fprintf (dump_file, "\n");
4406 if (is_gimple_assign (stmt))
4407 gimple_assign_set_lhs (stmt, name);
4408 else if (is_gimple_call (stmt))
4409 gimple_call_set_lhs (stmt, name);
4410 else
4411 gimple_phi_set_result (stmt, name);
4413 replace_uses_by (lhs, name);
4414 release_ssa_name (lhs);
4415 return true;
4418 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4419 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4420 specifies whether the function should care about type incompatibility the
4421 current and new expressions. If it is false, the function will leave
4422 incompatibility issues to the caller. Return true iff the expression
4423 was modified. */
4425 static bool
4426 sra_ipa_modify_expr (tree *expr, bool convert,
4427 ipa_parm_adjustment_vec adjustments)
4429 int i, len;
4430 struct ipa_parm_adjustment *adj, *cand = NULL;
4431 HOST_WIDE_INT offset, size, max_size;
4432 tree base, src;
4434 len = adjustments.length ();
4436 if (TREE_CODE (*expr) == BIT_FIELD_REF
4437 || TREE_CODE (*expr) == IMAGPART_EXPR
4438 || TREE_CODE (*expr) == REALPART_EXPR)
4440 expr = &TREE_OPERAND (*expr, 0);
4441 convert = true;
4444 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4445 if (!base || size == -1 || max_size == -1)
4446 return false;
4448 if (TREE_CODE (base) == MEM_REF)
4450 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4451 base = TREE_OPERAND (base, 0);
4454 base = get_ssa_base_param (base);
4455 if (!base || TREE_CODE (base) != PARM_DECL)
4456 return false;
4458 for (i = 0; i < len; i++)
4460 adj = &adjustments[i];
4462 if (adj->base == base &&
4463 (adj->offset == offset || adj->remove_param))
4465 cand = adj;
4466 break;
4469 if (!cand || cand->copy_param || cand->remove_param)
4470 return false;
4472 if (cand->by_ref)
4473 src = build_simple_mem_ref (cand->reduction);
4474 else
4475 src = cand->reduction;
4477 if (dump_file && (dump_flags & TDF_DETAILS))
4479 fprintf (dump_file, "About to replace expr ");
4480 print_generic_expr (dump_file, *expr, 0);
4481 fprintf (dump_file, " with ");
4482 print_generic_expr (dump_file, src, 0);
4483 fprintf (dump_file, "\n");
4486 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4488 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4489 *expr = vce;
4491 else
4492 *expr = src;
4493 return true;
4496 /* If the statement pointed to by STMT_PTR contains any expressions that need
4497 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4498 potential type incompatibilities (GSI is used to accommodate conversion
4499 statements and must point to the statement). Return true iff the statement
4500 was modified. */
4502 static bool
4503 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4504 ipa_parm_adjustment_vec adjustments)
4506 gimple stmt = *stmt_ptr;
4507 tree *lhs_p, *rhs_p;
4508 bool any;
4510 if (!gimple_assign_single_p (stmt))
4511 return false;
4513 rhs_p = gimple_assign_rhs1_ptr (stmt);
4514 lhs_p = gimple_assign_lhs_ptr (stmt);
4516 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4517 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4518 if (any)
4520 tree new_rhs = NULL_TREE;
4522 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4524 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4526 /* V_C_Es of constructors can cause trouble (PR 42714). */
4527 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4528 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4529 else
4530 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4531 NULL);
4533 else
4534 new_rhs = fold_build1_loc (gimple_location (stmt),
4535 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4536 *rhs_p);
4538 else if (REFERENCE_CLASS_P (*rhs_p)
4539 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4540 && !is_gimple_reg (*lhs_p))
4541 /* This can happen when an assignment in between two single field
4542 structures is turned into an assignment in between two pointers to
4543 scalars (PR 42237). */
4544 new_rhs = *rhs_p;
4546 if (new_rhs)
4548 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4549 true, GSI_SAME_STMT);
4551 gimple_assign_set_rhs_from_tree (gsi, tmp);
4554 return true;
4557 return false;
4560 /* Traverse the function body and all modifications as described in
4561 ADJUSTMENTS. Return true iff the CFG has been changed. */
4563 static bool
4564 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4566 bool cfg_changed = false;
4567 basic_block bb;
4569 FOR_EACH_BB (bb)
4571 gimple_stmt_iterator gsi;
4573 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4574 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4576 gsi = gsi_start_bb (bb);
4577 while (!gsi_end_p (gsi))
4579 gimple stmt = gsi_stmt (gsi);
4580 bool modified = false;
4581 tree *t;
4582 unsigned i;
4584 switch (gimple_code (stmt))
4586 case GIMPLE_RETURN:
4587 t = gimple_return_retval_ptr (stmt);
4588 if (*t != NULL_TREE)
4589 modified |= sra_ipa_modify_expr (t, true, adjustments);
4590 break;
4592 case GIMPLE_ASSIGN:
4593 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4594 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4595 break;
4597 case GIMPLE_CALL:
4598 /* Operands must be processed before the lhs. */
4599 for (i = 0; i < gimple_call_num_args (stmt); i++)
4601 t = gimple_call_arg_ptr (stmt, i);
4602 modified |= sra_ipa_modify_expr (t, true, adjustments);
4605 if (gimple_call_lhs (stmt))
4607 t = gimple_call_lhs_ptr (stmt);
4608 modified |= sra_ipa_modify_expr (t, false, adjustments);
4609 modified |= replace_removed_params_ssa_names (stmt,
4610 adjustments);
4612 break;
4614 case GIMPLE_ASM:
4615 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4617 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4618 modified |= sra_ipa_modify_expr (t, true, adjustments);
4620 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4622 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4623 modified |= sra_ipa_modify_expr (t, false, adjustments);
4625 break;
4627 default:
4628 break;
4631 if (modified)
4633 update_stmt (stmt);
4634 if (maybe_clean_eh_stmt (stmt)
4635 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4636 cfg_changed = true;
4638 gsi_next (&gsi);
4642 return cfg_changed;
4645 /* Call gimple_debug_bind_reset_value on all debug statements describing
4646 gimple register parameters that are being removed or replaced. */
4648 static void
4649 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4651 int i, len;
4652 gimple_stmt_iterator *gsip = NULL, gsi;
4654 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4656 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4657 gsip = &gsi;
4659 len = adjustments.length ();
4660 for (i = 0; i < len; i++)
4662 struct ipa_parm_adjustment *adj;
4663 imm_use_iterator ui;
4664 gimple stmt, def_temp;
4665 tree name, vexpr, copy = NULL_TREE;
4666 use_operand_p use_p;
4668 adj = &adjustments[i];
4669 if (adj->copy_param || !is_gimple_reg (adj->base))
4670 continue;
4671 name = ssa_default_def (cfun, adj->base);
4672 vexpr = NULL;
4673 if (name)
4674 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4676 /* All other users must have been removed by
4677 ipa_sra_modify_function_body. */
4678 gcc_assert (is_gimple_debug (stmt));
4679 if (vexpr == NULL && gsip != NULL)
4681 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4682 vexpr = make_node (DEBUG_EXPR_DECL);
4683 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4684 NULL);
4685 DECL_ARTIFICIAL (vexpr) = 1;
4686 TREE_TYPE (vexpr) = TREE_TYPE (name);
4687 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4688 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4690 if (vexpr)
4692 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4693 SET_USE (use_p, vexpr);
4695 else
4696 gimple_debug_bind_reset_value (stmt);
4697 update_stmt (stmt);
4699 /* Create a VAR_DECL for debug info purposes. */
4700 if (!DECL_IGNORED_P (adj->base))
4702 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4703 VAR_DECL, DECL_NAME (adj->base),
4704 TREE_TYPE (adj->base));
4705 if (DECL_PT_UID_SET_P (adj->base))
4706 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4707 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4708 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4709 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4710 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4711 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4712 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4713 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4714 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4715 SET_DECL_RTL (copy, 0);
4716 TREE_USED (copy) = 1;
4717 DECL_CONTEXT (copy) = current_function_decl;
4718 add_local_decl (cfun, copy);
4719 DECL_CHAIN (copy) =
4720 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4721 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4723 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4725 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4726 if (vexpr)
4727 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4728 else
4729 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4730 NULL);
4731 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4736 /* Return false iff all callers have at least as many actual arguments as there
4737 are formal parameters in the current function. */
4739 static bool
4740 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4741 void *data ATTRIBUTE_UNUSED)
4743 struct cgraph_edge *cs;
4744 for (cs = node->callers; cs; cs = cs->next_caller)
4745 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4746 return true;
4748 return false;
4751 /* Convert all callers of NODE. */
4753 static bool
4754 convert_callers_for_node (struct cgraph_node *node,
4755 void *data)
4757 ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4758 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4759 struct cgraph_edge *cs;
4761 for (cs = node->callers; cs; cs = cs->next_caller)
4763 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl));
4765 if (dump_file)
4766 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4767 cs->caller->uid, cs->callee->uid,
4768 xstrdup (cgraph_node_name (cs->caller)),
4769 xstrdup (cgraph_node_name (cs->callee)));
4771 ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4773 pop_cfun ();
4776 for (cs = node->callers; cs; cs = cs->next_caller)
4777 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4778 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl)))
4779 compute_inline_parameters (cs->caller, true);
4780 BITMAP_FREE (recomputed_callers);
4782 return true;
4785 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4787 static void
4788 convert_callers (struct cgraph_node *node, tree old_decl,
4789 ipa_parm_adjustment_vec adjustments)
4791 basic_block this_block;
4793 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4794 &adjustments, false);
4796 if (!encountered_recursive_call)
4797 return;
4799 FOR_EACH_BB (this_block)
4801 gimple_stmt_iterator gsi;
4803 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4805 gimple stmt = gsi_stmt (gsi);
4806 tree call_fndecl;
4807 if (gimple_code (stmt) != GIMPLE_CALL)
4808 continue;
4809 call_fndecl = gimple_call_fndecl (stmt);
4810 if (call_fndecl == old_decl)
4812 if (dump_file)
4813 fprintf (dump_file, "Adjusting recursive call");
4814 gimple_call_set_fndecl (stmt, node->symbol.decl);
4815 ipa_modify_call_arguments (NULL, stmt, adjustments);
4820 return;
4823 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4824 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4826 static bool
4827 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4829 struct cgraph_node *new_node;
4830 bool cfg_changed;
4831 vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
4833 rebuild_cgraph_edges ();
4834 free_dominance_info (CDI_DOMINATORS);
4835 pop_cfun ();
4837 new_node = cgraph_function_versioning (node, redirect_callers,
4838 NULL,
4839 NULL, false, NULL, NULL, "isra");
4840 redirect_callers.release ();
4842 push_cfun (DECL_STRUCT_FUNCTION (new_node->symbol.decl));
4843 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4844 cfg_changed = ipa_sra_modify_function_body (adjustments);
4845 sra_ipa_reset_debug_stmts (adjustments);
4846 convert_callers (new_node, node->symbol.decl, adjustments);
4847 cgraph_make_node_local (new_node);
4848 return cfg_changed;
4851 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4852 attributes, return true otherwise. NODE is the cgraph node of the current
4853 function. */
4855 static bool
4856 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4858 if (!cgraph_node_can_be_local_p (node))
4860 if (dump_file)
4861 fprintf (dump_file, "Function not local to this compilation unit.\n");
4862 return false;
4865 if (!node->local.can_change_signature)
4867 if (dump_file)
4868 fprintf (dump_file, "Function can not change signature.\n");
4869 return false;
4872 if (!tree_versionable_function_p (node->symbol.decl))
4874 if (dump_file)
4875 fprintf (dump_file, "Function is not versionable.\n");
4876 return false;
4879 if (!opt_for_fn (node->symbol.decl, optimize)
4880 || !opt_for_fn (node->symbol.decl, flag_ipa_sra))
4882 if (dump_file)
4883 fprintf (dump_file, "Function not optimized.\n");
4884 return false;
4887 if (DECL_VIRTUAL_P (current_function_decl))
4889 if (dump_file)
4890 fprintf (dump_file, "Function is a virtual method.\n");
4891 return false;
4894 if ((DECL_COMDAT (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
4895 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4897 if (dump_file)
4898 fprintf (dump_file, "Function too big to be made truly local.\n");
4899 return false;
4902 if (!node->callers)
4904 if (dump_file)
4905 fprintf (dump_file,
4906 "Function has no callers in this compilation unit.\n");
4907 return false;
4910 if (cfun->stdarg)
4912 if (dump_file)
4913 fprintf (dump_file, "Function uses stdarg. \n");
4914 return false;
4917 if (TYPE_ATTRIBUTES (TREE_TYPE (node->symbol.decl)))
4918 return false;
4920 return true;
4923 /* Perform early interprocedural SRA. */
4925 static unsigned int
4926 ipa_early_sra (void)
4928 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4929 ipa_parm_adjustment_vec adjustments;
4930 int ret = 0;
4932 if (!ipa_sra_preliminary_function_checks (node))
4933 return 0;
4935 sra_initialize ();
4936 sra_mode = SRA_MODE_EARLY_IPA;
4938 if (!find_param_candidates ())
4940 if (dump_file)
4941 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4942 goto simple_out;
4945 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4946 NULL, true))
4948 if (dump_file)
4949 fprintf (dump_file, "There are callers with insufficient number of "
4950 "arguments.\n");
4951 goto simple_out;
4954 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4955 func_param_count
4956 * last_basic_block_for_function (cfun));
4957 final_bbs = BITMAP_ALLOC (NULL);
4959 scan_function ();
4960 if (encountered_apply_args)
4962 if (dump_file)
4963 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4964 goto out;
4967 if (encountered_unchangable_recursive_call)
4969 if (dump_file)
4970 fprintf (dump_file, "Function calls itself with insufficient "
4971 "number of arguments.\n");
4972 goto out;
4975 adjustments = analyze_all_param_acesses ();
4976 if (!adjustments.exists ())
4977 goto out;
4978 if (dump_file)
4979 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4981 if (modify_function (node, adjustments))
4982 ret = TODO_update_ssa | TODO_cleanup_cfg;
4983 else
4984 ret = TODO_update_ssa;
4985 adjustments.release ();
4987 statistics_counter_event (cfun, "Unused parameters deleted",
4988 sra_stats.deleted_unused_parameters);
4989 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4990 sra_stats.scalar_by_ref_to_by_val);
4991 statistics_counter_event (cfun, "Aggregate parameters broken up",
4992 sra_stats.aggregate_params_reduced);
4993 statistics_counter_event (cfun, "Aggregate parameter components created",
4994 sra_stats.param_reductions_created);
4996 out:
4997 BITMAP_FREE (final_bbs);
4998 free (bb_dereferences);
4999 simple_out:
5000 sra_deinitialize ();
5001 return ret;
5004 /* Return if early ipa sra shall be performed. */
5005 static bool
5006 ipa_early_sra_gate (void)
5008 return flag_ipa_sra && dbg_cnt (eipa_sra);
5011 struct gimple_opt_pass pass_early_ipa_sra =
5014 GIMPLE_PASS,
5015 "eipa_sra", /* name */
5016 OPTGROUP_NONE, /* optinfo_flags */
5017 ipa_early_sra_gate, /* gate */
5018 ipa_early_sra, /* execute */
5019 NULL, /* sub */
5020 NULL, /* next */
5021 0, /* static_pass_number */
5022 TV_IPA_SRA, /* tv_id */
5023 0, /* properties_required */
5024 0, /* properties_provided */
5025 0, /* properties_destroyed */
5026 0, /* todo_flags_start */
5027 TODO_dump_symtab /* todo_flags_finish */