1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "tree-pass.h"
34 #include "regs.h" /* For reg_renumber. */
38 #include "diagnostic.h"
39 #include "fold-const.h"
41 #include "stor-layout.h"
43 #include "print-tree.h"
47 #include "cfgcleanup.h"
52 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
61 #include "gimple-pretty-print.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "stringpool.h"
74 #include "tree-ssa-address.h"
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
83 #define NAME__MAIN "__main"
86 /* This variable holds information helping the rewriting of SSA trees
90 /* This variable holds the currently expanded gimple statement for purposes
91 of comminucating the profile info to the builtin expanders. */
92 gimple
*currently_expanding_gimple_stmt
;
94 static rtx
expand_debug_expr (tree
);
96 static bool defer_stack_allocation (tree
, bool);
98 static void record_alignment_for_reg_var (unsigned int);
100 /* Return an expression tree corresponding to the RHS of GIMPLE
104 gimple_assign_rhs_to_tree (gimple
*stmt
)
107 enum gimple_rhs_class grhs_class
;
109 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
111 if (grhs_class
== GIMPLE_TERNARY_RHS
)
112 t
= build3 (gimple_assign_rhs_code (stmt
),
113 TREE_TYPE (gimple_assign_lhs (stmt
)),
114 gimple_assign_rhs1 (stmt
),
115 gimple_assign_rhs2 (stmt
),
116 gimple_assign_rhs3 (stmt
));
117 else if (grhs_class
== GIMPLE_BINARY_RHS
)
118 t
= build2 (gimple_assign_rhs_code (stmt
),
119 TREE_TYPE (gimple_assign_lhs (stmt
)),
120 gimple_assign_rhs1 (stmt
),
121 gimple_assign_rhs2 (stmt
));
122 else if (grhs_class
== GIMPLE_UNARY_RHS
)
123 t
= build1 (gimple_assign_rhs_code (stmt
),
124 TREE_TYPE (gimple_assign_lhs (stmt
)),
125 gimple_assign_rhs1 (stmt
));
126 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
128 t
= gimple_assign_rhs1 (stmt
);
129 /* Avoid modifying this tree in place below. */
130 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
131 && gimple_location (stmt
) != EXPR_LOCATION (t
))
132 || (gimple_block (stmt
)
133 && currently_expanding_to_rtl
140 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
141 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
147 #ifndef STACK_ALIGNMENT_NEEDED
148 #define STACK_ALIGNMENT_NEEDED 1
151 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
153 /* Choose either CUR or NEXT as the leader DECL for a partition.
154 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155 out of the same user variable being in multiple partitions (this is
156 less likely for compiler-introduced temps). */
159 leader_merge (tree cur
, tree next
)
161 if (cur
== NULL
|| cur
== next
)
164 if (DECL_P (cur
) && DECL_IGNORED_P (cur
))
167 if (DECL_P (next
) && DECL_IGNORED_P (next
))
173 /* Associate declaration T with storage space X. If T is no
174 SSA name this is exactly SET_DECL_RTL, otherwise make the
175 partition of T associated with X. */
177 set_rtl (tree t
, rtx x
)
179 gcc_checking_assert (!x
180 || !(TREE_CODE (t
) == SSA_NAME
|| is_gimple_reg (t
))
181 || (use_register_for_decl (t
)
183 || (GET_CODE (x
) == CONCAT
184 && (REG_P (XEXP (x
, 0))
185 || SUBREG_P (XEXP (x
, 0)))
186 && (REG_P (XEXP (x
, 1))
187 || SUBREG_P (XEXP (x
, 1))))
188 /* We need to accept PARALLELs for RESUT_DECLs
189 because of vector types with BLKmode returned
190 in multiple registers, but they are supposed
191 to be uncoalesced. */
192 || (GET_CODE (x
) == PARALLEL
194 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
195 && (GET_MODE (x
) == BLKmode
196 || !flag_tree_coalesce_vars
)))
197 : (MEM_P (x
) || x
== pc_rtx
198 || (GET_CODE (x
) == CONCAT
199 && MEM_P (XEXP (x
, 0))
200 && MEM_P (XEXP (x
, 1))))));
201 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202 RESULT_DECLs has the expected mode. For memory, we accept
203 unpromoted modes, since that's what we're likely to get. For
204 PARM_DECLs and RESULT_DECLs, we'll have been called by
205 set_parm_rtl, which will give us the default def, so we don't
206 have to compute it ourselves. For RESULT_DECLs, we accept mode
207 mismatches too, as long as we have BLKmode or are not coalescing
208 across variables, so that we don't reject BLKmode PARALLELs or
210 gcc_checking_assert (!x
|| x
== pc_rtx
|| TREE_CODE (t
) != SSA_NAME
212 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
213 && (promote_ssa_mode (t
, NULL
) == BLKmode
214 || !flag_tree_coalesce_vars
))
215 || !use_register_for_decl (t
)
216 || GET_MODE (x
) == promote_ssa_mode (t
, NULL
));
221 tree cur
= NULL_TREE
;
229 else if (SUBREG_P (xm
))
231 gcc_assert (subreg_lowpart_p (xm
));
232 xm
= SUBREG_REG (xm
);
235 else if (GET_CODE (xm
) == CONCAT
)
240 else if (GET_CODE (xm
) == PARALLEL
)
242 xm
= XVECEXP (xm
, 0, 0);
243 gcc_assert (GET_CODE (xm
) == EXPR_LIST
);
247 else if (xm
== pc_rtx
)
252 tree next
= skip
? cur
: leader_merge (cur
, SSAVAR (t
) ? SSAVAR (t
) : t
);
257 set_mem_attributes (x
,
258 next
&& TREE_CODE (next
) == SSA_NAME
262 set_reg_attrs_for_decl_rtl (next
, x
);
266 if (TREE_CODE (t
) == SSA_NAME
)
268 int part
= var_to_partition (SA
.map
, t
);
269 if (part
!= NO_PARTITION
)
271 if (SA
.partition_to_pseudo
[part
])
272 gcc_assert (SA
.partition_to_pseudo
[part
] == x
);
273 else if (x
!= pc_rtx
)
274 SA
.partition_to_pseudo
[part
] = x
;
276 /* For the benefit of debug information at -O0 (where
277 vartracking doesn't run) record the place also in the base
278 DECL. For PARMs and RESULTs, do so only when setting the
280 if (x
&& x
!= pc_rtx
&& SSA_NAME_VAR (t
)
281 && (VAR_P (SSA_NAME_VAR (t
))
282 || SSA_NAME_IS_DEFAULT_DEF (t
)))
284 tree var
= SSA_NAME_VAR (t
);
285 /* If we don't yet have something recorded, just record it now. */
286 if (!DECL_RTL_SET_P (var
))
287 SET_DECL_RTL (var
, x
);
288 /* If we have it set already to "multiple places" don't
290 else if (DECL_RTL (var
) == pc_rtx
)
292 /* If we have something recorded and it's not the same place
293 as we want to record now, we have multiple partitions for the
294 same base variable, with different places. We can't just
295 randomly chose one, hence we have to say that we don't know.
296 This only happens with optimization, and there var-tracking
297 will figure out the right thing. */
298 else if (DECL_RTL (var
) != x
)
299 SET_DECL_RTL (var
, pc_rtx
);
306 /* This structure holds data relevant to one variable that will be
307 placed in a stack slot. */
314 /* Initially, the size of the variable. Later, the size of the partition,
315 if this variable becomes it's partition's representative. */
318 /* The *byte* alignment required for this variable. Or as, with the
319 size, the alignment for this partition. */
322 /* The partition representative. */
323 size_t representative
;
325 /* The next stack variable in the partition, or EOC. */
328 /* The numbers of conflicting stack variables. */
332 #define EOC ((size_t)-1)
334 /* We have an array of such objects while deciding allocation. */
335 static class stack_var
*stack_vars
;
336 static size_t stack_vars_alloc
;
337 static size_t stack_vars_num
;
338 static hash_map
<tree
, size_t> *decl_to_stack_part
;
340 /* Conflict bitmaps go on this obstack. This allows us to destroy
341 all of them in one big sweep. */
342 static bitmap_obstack stack_var_bitmap_obstack
;
344 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
345 is non-decreasing. */
346 static size_t *stack_vars_sorted
;
348 /* The phase of the stack frame. This is the known misalignment of
349 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
350 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
351 static int frame_phase
;
353 /* Used during expand_used_vars to remember if we saw any decls for
354 which we'd like to enable stack smashing protection. */
355 static bool has_protected_decls
;
357 /* Used during expand_used_vars. Remember if we say a character buffer
358 smaller than our cutoff threshold. Used for -Wstack-protector. */
359 static bool has_short_buffer
;
361 /* Compute the byte alignment to use for DECL. Ignore alignment
362 we can't do with expected alignment of the stack boundary. */
365 align_local_variable (tree decl
, bool really_expand
)
369 if (TREE_CODE (decl
) == SSA_NAME
)
370 align
= TYPE_ALIGN (TREE_TYPE (decl
));
373 align
= LOCAL_DECL_ALIGNMENT (decl
);
374 /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
375 That is done before IPA and could bump alignment based on host
376 backend even for offloaded code which wants different
377 LOCAL_DECL_ALIGNMENT. */
379 SET_DECL_ALIGN (decl
, align
);
381 return align
/ BITS_PER_UNIT
;
384 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
385 down otherwise. Return truncated BASE value. */
387 static inline unsigned HOST_WIDE_INT
388 align_base (HOST_WIDE_INT base
, unsigned HOST_WIDE_INT align
, bool align_up
)
390 return align_up
? (base
+ align
- 1) & -align
: base
& -align
;
393 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
394 Return the frame offset. */
397 alloc_stack_frame_space (poly_int64 size
, unsigned HOST_WIDE_INT align
)
399 poly_int64 offset
, new_frame_offset
;
401 if (FRAME_GROWS_DOWNWARD
)
404 = aligned_lower_bound (frame_offset
- frame_phase
- size
,
405 align
) + frame_phase
;
406 offset
= new_frame_offset
;
411 = aligned_upper_bound (frame_offset
- frame_phase
,
412 align
) + frame_phase
;
413 offset
= new_frame_offset
;
414 new_frame_offset
+= size
;
416 frame_offset
= new_frame_offset
;
418 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
419 frame_offset
= offset
= 0;
424 /* Accumulate DECL into STACK_VARS. */
427 add_stack_var (tree decl
, bool really_expand
)
431 if (stack_vars_num
>= stack_vars_alloc
)
433 if (stack_vars_alloc
)
434 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
436 stack_vars_alloc
= 32;
438 = XRESIZEVEC (class stack_var
, stack_vars
, stack_vars_alloc
);
440 if (!decl_to_stack_part
)
441 decl_to_stack_part
= new hash_map
<tree
, size_t>;
443 v
= &stack_vars
[stack_vars_num
];
444 decl_to_stack_part
->put (decl
, stack_vars_num
);
447 tree size
= TREE_CODE (decl
) == SSA_NAME
448 ? TYPE_SIZE_UNIT (TREE_TYPE (decl
))
449 : DECL_SIZE_UNIT (decl
);
450 v
->size
= tree_to_poly_uint64 (size
);
451 /* Ensure that all variables have size, so that &a != &b for any two
452 variables that are simultaneously live. */
453 if (known_eq (v
->size
, 0U))
455 v
->alignb
= align_local_variable (decl
, really_expand
);
456 /* An alignment of zero can mightily confuse us later. */
457 gcc_assert (v
->alignb
!= 0);
459 /* All variables are initially in their own partition. */
460 v
->representative
= stack_vars_num
;
463 /* All variables initially conflict with no other. */
466 /* Ensure that this decl doesn't get put onto the list twice. */
467 set_rtl (decl
, pc_rtx
);
472 /* Make the decls associated with luid's X and Y conflict. */
475 add_stack_var_conflict (size_t x
, size_t y
)
477 class stack_var
*a
= &stack_vars
[x
];
478 class stack_var
*b
= &stack_vars
[y
];
482 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
484 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
485 bitmap_set_bit (a
->conflicts
, y
);
486 bitmap_set_bit (b
->conflicts
, x
);
489 /* Check whether the decls associated with luid's X and Y conflict. */
492 stack_var_conflict_p (size_t x
, size_t y
)
494 class stack_var
*a
= &stack_vars
[x
];
495 class stack_var
*b
= &stack_vars
[y
];
498 /* Partitions containing an SSA name result from gimple registers
499 with things like unsupported modes. They are top-level and
500 hence conflict with everything else. */
501 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
504 if (!a
->conflicts
|| !b
->conflicts
)
506 return bitmap_bit_p (a
->conflicts
, y
);
509 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
510 enter its partition number into bitmap DATA. */
513 visit_op (gimple
*, tree op
, tree
, void *data
)
515 bitmap active
= (bitmap
)data
;
516 op
= get_base_address (op
);
519 && DECL_RTL_IF_SET (op
) == pc_rtx
)
521 size_t *v
= decl_to_stack_part
->get (op
);
523 bitmap_set_bit (active
, *v
);
528 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
529 record conflicts between it and all currently active other partitions
533 visit_conflict (gimple
*, tree op
, tree
, void *data
)
535 bitmap active
= (bitmap
)data
;
536 op
= get_base_address (op
);
539 && DECL_RTL_IF_SET (op
) == pc_rtx
)
541 size_t *v
= decl_to_stack_part
->get (op
);
542 if (v
&& bitmap_set_bit (active
, *v
))
547 gcc_assert (num
< stack_vars_num
);
548 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
549 add_stack_var_conflict (num
, i
);
555 /* Helper routine for add_scope_conflicts, calculating the active partitions
556 at the end of BB, leaving the result in WORK. We're called to generate
557 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
561 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
565 gimple_stmt_iterator gsi
;
566 walk_stmt_load_store_addr_fn visit
;
569 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
570 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
574 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
576 gimple
*stmt
= gsi_stmt (gsi
);
577 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
579 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
581 gimple
*stmt
= gsi_stmt (gsi
);
583 if (gimple_clobber_p (stmt
))
585 tree lhs
= gimple_assign_lhs (stmt
);
587 /* Nested function lowering might introduce LHSs
588 that are COMPONENT_REFs. */
591 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
592 && (v
= decl_to_stack_part
->get (lhs
)))
593 bitmap_clear_bit (work
, *v
);
595 else if (!is_gimple_debug (stmt
))
598 && visit
== visit_op
)
600 /* If this is the first real instruction in this BB we need
601 to add conflicts for everything live at this point now.
602 Unlike classical liveness for named objects we can't
603 rely on seeing a def/use of the names we're interested in.
604 There might merely be indirect loads/stores. We'd not add any
605 conflicts for such partitions. */
608 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
610 class stack_var
*a
= &stack_vars
[i
];
612 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
613 bitmap_ior_into (a
->conflicts
, work
);
615 visit
= visit_conflict
;
617 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
622 /* Generate stack partition conflicts between all partitions that are
623 simultaneously live. */
626 add_scope_conflicts (void)
630 bitmap work
= BITMAP_ALLOC (NULL
);
634 /* We approximate the live range of a stack variable by taking the first
635 mention of its name as starting point(s), and by the end-of-scope
636 death clobber added by gimplify as ending point(s) of the range.
637 This overapproximates in the case we for instance moved an address-taken
638 operation upward, without also moving a dereference to it upwards.
639 But it's conservatively correct as a variable never can hold values
640 before its name is mentioned at least once.
642 We then do a mostly classical bitmap liveness algorithm. */
644 FOR_ALL_BB_FN (bb
, cfun
)
645 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
647 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
648 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
655 for (i
= 0; i
< n_bbs
; i
++)
658 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
659 active
= (bitmap
)bb
->aux
;
660 add_scope_conflicts_1 (bb
, work
, false);
661 if (bitmap_ior_into (active
, work
))
666 FOR_EACH_BB_FN (bb
, cfun
)
667 add_scope_conflicts_1 (bb
, work
, true);
671 FOR_ALL_BB_FN (bb
, cfun
)
672 BITMAP_FREE (bb
->aux
);
675 /* A subroutine of partition_stack_vars. A comparison function for qsort,
676 sorting an array of indices by the properties of the object. */
679 stack_var_cmp (const void *a
, const void *b
)
681 size_t ia
= *(const size_t *)a
;
682 size_t ib
= *(const size_t *)b
;
683 unsigned int aligna
= stack_vars
[ia
].alignb
;
684 unsigned int alignb
= stack_vars
[ib
].alignb
;
685 poly_int64 sizea
= stack_vars
[ia
].size
;
686 poly_int64 sizeb
= stack_vars
[ib
].size
;
687 tree decla
= stack_vars
[ia
].decl
;
688 tree declb
= stack_vars
[ib
].decl
;
690 unsigned int uida
, uidb
;
692 /* Primary compare on "large" alignment. Large comes first. */
693 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
694 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
695 if (largea
!= largeb
)
696 return (int)largeb
- (int)largea
;
698 /* Secondary compare on size, decreasing */
699 int diff
= compare_sizes_for_sort (sizeb
, sizea
);
703 /* Tertiary compare on true alignment, decreasing. */
709 /* Final compare on ID for sort stability, increasing.
710 Two SSA names are compared by their version, SSA names come before
711 non-SSA names, and two normal decls are compared by their DECL_UID. */
712 if (TREE_CODE (decla
) == SSA_NAME
)
714 if (TREE_CODE (declb
) == SSA_NAME
)
715 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
719 else if (TREE_CODE (declb
) == SSA_NAME
)
722 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
730 struct part_traits
: unbounded_int_hashmap_traits
<size_t, bitmap
> {};
731 typedef hash_map
<size_t, bitmap
, part_traits
> part_hashmap
;
733 /* If the points-to solution *PI points to variables that are in a partition
734 together with other variables add all partition members to the pointed-to
738 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
739 part_hashmap
*decls_to_partitions
,
740 hash_set
<bitmap
> *visited
, bitmap temp
)
748 /* The pointed-to vars bitmap is shared, it is enough to
750 || visited
->add (pt
->vars
))
755 /* By using a temporary bitmap to store all members of the partitions
756 we have to add we make sure to visit each of the partitions only
758 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
760 || !bitmap_bit_p (temp
, i
))
761 && (part
= decls_to_partitions
->get (i
)))
762 bitmap_ior_into (temp
, *part
);
763 if (!bitmap_empty_p (temp
))
764 bitmap_ior_into (pt
->vars
, temp
);
767 /* Update points-to sets based on partition info, so we can use them on RTL.
768 The bitmaps representing stack partitions will be saved until expand,
769 where partitioned decls used as bases in memory expressions will be
773 update_alias_info_with_stack_vars (void)
775 part_hashmap
*decls_to_partitions
= NULL
;
777 tree var
= NULL_TREE
;
779 for (i
= 0; i
< stack_vars_num
; i
++)
783 struct ptr_info_def
*pi
;
785 /* Not interested in partitions with single variable. */
786 if (stack_vars
[i
].representative
!= i
787 || stack_vars
[i
].next
== EOC
)
790 if (!decls_to_partitions
)
792 decls_to_partitions
= new part_hashmap
;
793 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
796 /* Create an SSA_NAME that points to the partition for use
797 as base during alias-oracle queries on RTL for bases that
798 have been partitioned. */
799 if (var
== NULL_TREE
)
800 var
= create_tmp_var (ptr_type_node
);
801 name
= make_ssa_name (var
);
803 /* Create bitmaps representing partitions. They will be used for
804 points-to sets later, so use GGC alloc. */
805 part
= BITMAP_GGC_ALLOC ();
806 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
808 tree decl
= stack_vars
[j
].decl
;
809 unsigned int uid
= DECL_PT_UID (decl
);
810 bitmap_set_bit (part
, uid
);
811 decls_to_partitions
->put (uid
, part
);
812 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
813 if (TREE_ADDRESSABLE (decl
))
814 TREE_ADDRESSABLE (name
) = 1;
817 /* Make the SSA name point to all partition members. */
818 pi
= get_ptr_info (name
);
819 pt_solution_set (&pi
->pt
, part
, false);
822 /* Make all points-to sets that contain one member of a partition
823 contain all members of the partition. */
824 if (decls_to_partitions
)
828 hash_set
<bitmap
> visited
;
829 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
831 FOR_EACH_SSA_NAME (i
, name
, cfun
)
833 struct ptr_info_def
*pi
;
835 if (POINTER_TYPE_P (TREE_TYPE (name
))
836 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
837 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
841 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
842 decls_to_partitions
, &visited
, temp
);
844 delete decls_to_partitions
;
849 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
850 partitioning algorithm. Partitions A and B are known to be non-conflicting.
851 Merge them into a single partition A. */
854 union_stack_vars (size_t a
, size_t b
)
856 class stack_var
*vb
= &stack_vars
[b
];
860 gcc_assert (stack_vars
[b
].next
== EOC
);
861 /* Add B to A's partition. */
862 stack_vars
[b
].next
= stack_vars
[a
].next
;
863 stack_vars
[b
].representative
= a
;
864 stack_vars
[a
].next
= b
;
866 /* Update the required alignment of partition A to account for B. */
867 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
868 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
870 /* Update the interference graph and merge the conflicts. */
873 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
874 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
875 BITMAP_FREE (vb
->conflicts
);
879 /* A subroutine of expand_used_vars. Binpack the variables into
880 partitions constrained by the interference graph. The overall
881 algorithm used is as follows:
883 Sort the objects by size in descending order.
888 Look for the largest non-conflicting object B with size <= S.
895 partition_stack_vars (void)
897 size_t si
, sj
, n
= stack_vars_num
;
899 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
900 for (si
= 0; si
< n
; ++si
)
901 stack_vars_sorted
[si
] = si
;
906 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
908 for (si
= 0; si
< n
; ++si
)
910 size_t i
= stack_vars_sorted
[si
];
911 unsigned int ialign
= stack_vars
[i
].alignb
;
912 poly_int64 isize
= stack_vars
[i
].size
;
914 /* Ignore objects that aren't partition representatives. If we
915 see a var that is not a partition representative, it must
916 have been merged earlier. */
917 if (stack_vars
[i
].representative
!= i
)
920 for (sj
= si
+ 1; sj
< n
; ++sj
)
922 size_t j
= stack_vars_sorted
[sj
];
923 unsigned int jalign
= stack_vars
[j
].alignb
;
924 poly_int64 jsize
= stack_vars
[j
].size
;
926 /* Ignore objects that aren't partition representatives. */
927 if (stack_vars
[j
].representative
!= j
)
930 /* Do not mix objects of "small" (supported) alignment
931 and "large" (unsupported) alignment. */
932 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
933 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
936 /* For Address Sanitizer do not mix objects with different
937 sizes, as the shorter vars wouldn't be adequately protected.
938 Don't do that for "large" (unsupported) alignment objects,
939 those aren't protected anyway. */
940 if (asan_sanitize_stack_p ()
941 && maybe_ne (isize
, jsize
)
942 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
945 /* Ignore conflicting objects. */
946 if (stack_var_conflict_p (i
, j
))
949 /* UNION the objects, placing J at OFFSET. */
950 union_stack_vars (i
, j
);
954 update_alias_info_with_stack_vars ();
957 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
960 dump_stack_var_partition (void)
962 size_t si
, i
, j
, n
= stack_vars_num
;
964 for (si
= 0; si
< n
; ++si
)
966 i
= stack_vars_sorted
[si
];
968 /* Skip variables that aren't partition representatives, for now. */
969 if (stack_vars
[i
].representative
!= i
)
972 fprintf (dump_file
, "Partition %lu: size ", (unsigned long) i
);
973 print_dec (stack_vars
[i
].size
, dump_file
);
974 fprintf (dump_file
, " align %u\n", stack_vars
[i
].alignb
);
976 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
978 fputc ('\t', dump_file
);
979 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
981 fputc ('\n', dump_file
);
985 /* Assign rtl to DECL at BASE + OFFSET. */
988 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
994 /* If this fails, we've overflowed the stack frame. Error nicely? */
995 gcc_assert (known_eq (offset
, trunc_int_for_mode (offset
, Pmode
)));
997 x
= plus_constant (Pmode
, base
, offset
);
998 x
= gen_rtx_MEM (TREE_CODE (decl
) == SSA_NAME
999 ? TYPE_MODE (TREE_TYPE (decl
))
1000 : DECL_MODE (SSAVAR (decl
)), x
);
1002 if (TREE_CODE (decl
) != SSA_NAME
)
1004 /* Set alignment we actually gave this decl if it isn't an SSA name.
1005 If it is we generate stack slots only accidentally so it isn't as
1006 important, we'll simply use the alignment that is already set. */
1007 if (base
== virtual_stack_vars_rtx
)
1008 offset
-= frame_phase
;
1009 align
= known_alignment (offset
);
1010 align
*= BITS_PER_UNIT
;
1011 if (align
== 0 || align
> base_align
)
1014 /* One would think that we could assert that we're not decreasing
1015 alignment here, but (at least) the i386 port does exactly this
1016 via the MINIMUM_ALIGNMENT hook. */
1018 SET_DECL_ALIGN (decl
, align
);
1019 DECL_USER_ALIGN (decl
) = 0;
1025 class stack_vars_data
1028 /* Vector of offset pairs, always end of some padding followed
1029 by start of the padding that needs Address Sanitizer protection.
1030 The vector is in reversed, highest offset pairs come first. */
1031 auto_vec
<HOST_WIDE_INT
> asan_vec
;
1033 /* Vector of partition representative decls in between the paddings. */
1034 auto_vec
<tree
> asan_decl_vec
;
1036 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1039 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1040 unsigned int asan_alignb
;
1043 /* A subroutine of expand_used_vars. Give each partition representative
1044 a unique location within the stack frame. Update each partition member
1045 with that location. */
1048 expand_stack_vars (bool (*pred
) (size_t), class stack_vars_data
*data
)
1050 size_t si
, i
, j
, n
= stack_vars_num
;
1051 poly_uint64 large_size
= 0, large_alloc
= 0;
1052 rtx large_base
= NULL
;
1053 unsigned large_align
= 0;
1054 bool large_allocation_done
= false;
1057 /* Determine if there are any variables requiring "large" alignment.
1058 Since these are dynamically allocated, we only process these if
1059 no predicate involved. */
1060 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
1061 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1063 /* Find the total size of these variables. */
1064 for (si
= 0; si
< n
; ++si
)
1068 i
= stack_vars_sorted
[si
];
1069 alignb
= stack_vars
[i
].alignb
;
1071 /* All "large" alignment decls come before all "small" alignment
1072 decls, but "large" alignment decls are not sorted based on
1073 their alignment. Increase large_align to track the largest
1074 required alignment. */
1075 if ((alignb
* BITS_PER_UNIT
) > large_align
)
1076 large_align
= alignb
* BITS_PER_UNIT
;
1078 /* Stop when we get to the first decl with "small" alignment. */
1079 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1082 /* Skip variables that aren't partition representatives. */
1083 if (stack_vars
[i
].representative
!= i
)
1086 /* Skip variables that have already had rtl assigned. See also
1087 add_stack_var where we perpetrate this pc_rtx hack. */
1088 decl
= stack_vars
[i
].decl
;
1089 if (TREE_CODE (decl
) == SSA_NAME
1090 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1091 : DECL_RTL (decl
) != pc_rtx
)
1094 large_size
= aligned_upper_bound (large_size
, alignb
);
1095 large_size
+= stack_vars
[i
].size
;
1099 for (si
= 0; si
< n
; ++si
)
1102 unsigned base_align
, alignb
;
1105 i
= stack_vars_sorted
[si
];
1107 /* Skip variables that aren't partition representatives, for now. */
1108 if (stack_vars
[i
].representative
!= i
)
1111 /* Skip variables that have already had rtl assigned. See also
1112 add_stack_var where we perpetrate this pc_rtx hack. */
1113 decl
= stack_vars
[i
].decl
;
1114 if (TREE_CODE (decl
) == SSA_NAME
1115 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1116 : DECL_RTL (decl
) != pc_rtx
)
1119 /* Check the predicate to see whether this variable should be
1120 allocated in this pass. */
1121 if (pred
&& !pred (i
))
1124 alignb
= stack_vars
[i
].alignb
;
1125 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1127 base
= virtual_stack_vars_rtx
;
1128 /* ASAN description strings don't yet have a syntax for expressing
1129 polynomial offsets. */
1130 HOST_WIDE_INT prev_offset
;
1131 if (asan_sanitize_stack_p ()
1133 && frame_offset
.is_constant (&prev_offset
)
1134 && stack_vars
[i
].size
.is_constant ())
1136 if (data
->asan_vec
.is_empty ())
1138 alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE
);
1139 prev_offset
= frame_offset
.to_constant ();
1141 prev_offset
= align_base (prev_offset
,
1142 ASAN_MIN_RED_ZONE_SIZE
,
1143 !FRAME_GROWS_DOWNWARD
);
1144 tree repr_decl
= NULL_TREE
;
1145 unsigned HOST_WIDE_INT size
1146 = asan_var_and_redzone_size (stack_vars
[i
].size
.to_constant ());
1147 if (data
->asan_vec
.is_empty ())
1148 size
= MAX (size
, ASAN_RED_ZONE_SIZE
);
1150 unsigned HOST_WIDE_INT alignment
= MAX (alignb
,
1151 ASAN_MIN_RED_ZONE_SIZE
);
1152 offset
= alloc_stack_frame_space (size
, alignment
);
1154 data
->asan_vec
.safe_push (prev_offset
);
1155 /* Allocating a constant amount of space from a constant
1156 starting offset must give a constant result. */
1157 data
->asan_vec
.safe_push ((offset
+ stack_vars
[i
].size
)
1159 /* Find best representative of the partition.
1160 Prefer those with DECL_NAME, even better
1161 satisfying asan_protect_stack_decl predicate. */
1162 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1163 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1164 && DECL_NAME (stack_vars
[j
].decl
))
1166 repr_decl
= stack_vars
[j
].decl
;
1169 else if (repr_decl
== NULL_TREE
1170 && DECL_P (stack_vars
[j
].decl
)
1171 && DECL_NAME (stack_vars
[j
].decl
))
1172 repr_decl
= stack_vars
[j
].decl
;
1173 if (repr_decl
== NULL_TREE
)
1174 repr_decl
= stack_vars
[i
].decl
;
1175 data
->asan_decl_vec
.safe_push (repr_decl
);
1177 /* Make sure a representative is unpoison if another
1178 variable in the partition is handled by
1179 use-after-scope sanitization. */
1180 if (asan_handled_variables
!= NULL
1181 && !asan_handled_variables
->contains (repr_decl
))
1183 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1184 if (asan_handled_variables
->contains (stack_vars
[j
].decl
))
1187 asan_handled_variables
->add (repr_decl
);
1190 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1191 if (data
->asan_base
== NULL
)
1192 data
->asan_base
= gen_reg_rtx (Pmode
);
1193 base
= data
->asan_base
;
1195 if (!STRICT_ALIGNMENT
)
1196 base_align
= crtl
->max_used_stack_slot_alignment
;
1198 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1199 GET_MODE_ALIGNMENT (SImode
)
1200 << ASAN_SHADOW_SHIFT
);
1204 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1205 base_align
= crtl
->max_used_stack_slot_alignment
;
1210 /* Large alignment is only processed in the last pass. */
1214 /* If there were any variables requiring "large" alignment, allocate
1216 if (maybe_ne (large_size
, 0U) && ! large_allocation_done
)
1219 rtx large_allocsize
;
1221 large_allocsize
= gen_int_mode (large_size
, Pmode
);
1222 get_dynamic_stack_size (&large_allocsize
, 0, large_align
, NULL
);
1223 loffset
= alloc_stack_frame_space
1224 (rtx_to_poly_int64 (large_allocsize
),
1225 PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
);
1226 large_base
= get_dynamic_stack_base (loffset
, large_align
);
1227 large_allocation_done
= true;
1229 gcc_assert (large_base
!= NULL
);
1231 large_alloc
= aligned_upper_bound (large_alloc
, alignb
);
1232 offset
= large_alloc
;
1233 large_alloc
+= stack_vars
[i
].size
;
1236 base_align
= large_align
;
1239 /* Create rtl for each variable based on their location within the
1241 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1243 expand_one_stack_var_at (stack_vars
[j
].decl
,
1249 gcc_assert (known_eq (large_alloc
, large_size
));
1252 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1254 account_stack_vars (void)
1256 size_t si
, j
, i
, n
= stack_vars_num
;
1257 poly_uint64 size
= 0;
1259 for (si
= 0; si
< n
; ++si
)
1261 i
= stack_vars_sorted
[si
];
1263 /* Skip variables that aren't partition representatives, for now. */
1264 if (stack_vars
[i
].representative
!= i
)
1267 size
+= stack_vars
[i
].size
;
1268 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1269 set_rtl (stack_vars
[j
].decl
, NULL
);
1274 /* Record the RTL assignment X for the default def of PARM. */
1277 set_parm_rtl (tree parm
, rtx x
)
1279 gcc_assert (TREE_CODE (parm
) == PARM_DECL
1280 || TREE_CODE (parm
) == RESULT_DECL
);
1282 if (x
&& !MEM_P (x
))
1284 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (parm
),
1285 TYPE_MODE (TREE_TYPE (parm
)),
1286 TYPE_ALIGN (TREE_TYPE (parm
)));
1288 /* If the variable alignment is very large we'll dynamicaly
1289 allocate it, which means that in-frame portion is just a
1290 pointer. ??? We've got a pseudo for sure here, do we
1291 actually dynamically allocate its spilling area if needed?
1292 ??? Isn't it a problem when Pmode alignment also exceeds
1293 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1294 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1295 align
= GET_MODE_ALIGNMENT (Pmode
);
1297 record_alignment_for_reg_var (align
);
1300 tree ssa
= ssa_default_def (cfun
, parm
);
1302 return set_rtl (parm
, x
);
1304 int part
= var_to_partition (SA
.map
, ssa
);
1305 gcc_assert (part
!= NO_PARTITION
);
1307 bool changed
= bitmap_bit_p (SA
.partitions_for_parm_default_defs
, part
);
1308 gcc_assert (changed
);
1311 gcc_assert (DECL_RTL (parm
) == x
);
1314 /* A subroutine of expand_one_var. Called to immediately assign rtl
1315 to a variable to be allocated in the stack frame. */
1318 expand_one_stack_var_1 (tree var
)
1322 unsigned byte_align
;
1324 if (TREE_CODE (var
) == SSA_NAME
)
1326 tree type
= TREE_TYPE (var
);
1327 size
= tree_to_poly_uint64 (TYPE_SIZE_UNIT (type
));
1328 byte_align
= TYPE_ALIGN_UNIT (type
);
1332 size
= tree_to_poly_uint64 (DECL_SIZE_UNIT (var
));
1333 byte_align
= align_local_variable (var
, true);
1336 /* We handle highly aligned variables in expand_stack_vars. */
1337 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1339 offset
= alloc_stack_frame_space (size
, byte_align
);
1341 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1342 crtl
->max_used_stack_slot_alignment
, offset
);
1345 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1346 already assigned some MEM. */
1349 expand_one_stack_var (tree var
)
1351 if (TREE_CODE (var
) == SSA_NAME
)
1353 int part
= var_to_partition (SA
.map
, var
);
1354 if (part
!= NO_PARTITION
)
1356 rtx x
= SA
.partition_to_pseudo
[part
];
1358 gcc_assert (MEM_P (x
));
1363 return expand_one_stack_var_1 (var
);
1366 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1367 that will reside in a hard register. */
1370 expand_one_hard_reg_var (tree var
)
1372 rest_of_decl_compilation (var
, 0, 0);
1375 /* Record the alignment requirements of some variable assigned to a
1379 record_alignment_for_reg_var (unsigned int align
)
1381 if (SUPPORTS_STACK_ALIGNMENT
1382 && crtl
->stack_alignment_estimated
< align
)
1384 /* stack_alignment_estimated shouldn't change after stack
1385 realign decision made */
1386 gcc_assert (!crtl
->stack_realign_processed
);
1387 crtl
->stack_alignment_estimated
= align
;
1390 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1391 So here we only make sure stack_alignment_needed >= align. */
1392 if (crtl
->stack_alignment_needed
< align
)
1393 crtl
->stack_alignment_needed
= align
;
1394 if (crtl
->max_used_stack_slot_alignment
< align
)
1395 crtl
->max_used_stack_slot_alignment
= align
;
1398 /* Create RTL for an SSA partition. */
1401 expand_one_ssa_partition (tree var
)
1403 int part
= var_to_partition (SA
.map
, var
);
1404 gcc_assert (part
!= NO_PARTITION
);
1406 if (SA
.partition_to_pseudo
[part
])
1409 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1410 TYPE_MODE (TREE_TYPE (var
)),
1411 TYPE_ALIGN (TREE_TYPE (var
)));
1413 /* If the variable alignment is very large we'll dynamicaly allocate
1414 it, which means that in-frame portion is just a pointer. */
1415 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1416 align
= GET_MODE_ALIGNMENT (Pmode
);
1418 record_alignment_for_reg_var (align
);
1420 if (!use_register_for_decl (var
))
1422 if (defer_stack_allocation (var
, true))
1423 add_stack_var (var
, true);
1425 expand_one_stack_var_1 (var
);
1429 machine_mode reg_mode
= promote_ssa_mode (var
, NULL
);
1430 rtx x
= gen_reg_rtx (reg_mode
);
1434 /* For a promoted variable, X will not be used directly but wrapped in a
1435 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1436 will assume that its upper bits can be inferred from its lower bits.
1437 Therefore, if X isn't initialized on every path from the entry, then
1438 we must do it manually in order to fulfill the above assumption. */
1439 if (reg_mode
!= TYPE_MODE (TREE_TYPE (var
))
1440 && bitmap_bit_p (SA
.partitions_for_undefined_values
, part
))
1441 emit_move_insn (x
, CONST0_RTX (reg_mode
));
1444 /* Record the association between the RTL generated for partition PART
1445 and the underlying variable of the SSA_NAME VAR. */
1448 adjust_one_expanded_partition_var (tree var
)
1453 tree decl
= SSA_NAME_VAR (var
);
1455 int part
= var_to_partition (SA
.map
, var
);
1456 if (part
== NO_PARTITION
)
1459 rtx x
= SA
.partition_to_pseudo
[part
];
1468 /* Note if the object is a user variable. */
1469 if (decl
&& !DECL_ARTIFICIAL (decl
))
1472 if (POINTER_TYPE_P (decl
? TREE_TYPE (decl
) : TREE_TYPE (var
)))
1473 mark_reg_pointer (x
, get_pointer_alignment (var
));
1476 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1477 that will reside in a pseudo register. */
1480 expand_one_register_var (tree var
)
1482 if (TREE_CODE (var
) == SSA_NAME
)
1484 int part
= var_to_partition (SA
.map
, var
);
1485 if (part
!= NO_PARTITION
)
1487 rtx x
= SA
.partition_to_pseudo
[part
];
1489 gcc_assert (REG_P (x
));
1496 tree type
= TREE_TYPE (decl
);
1497 machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1498 rtx x
= gen_reg_rtx (reg_mode
);
1502 /* Note if the object is a user variable. */
1503 if (!DECL_ARTIFICIAL (decl
))
1506 if (POINTER_TYPE_P (type
))
1507 mark_reg_pointer (x
, get_pointer_alignment (var
));
1510 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1511 has some associated error, e.g. its type is error-mark. We just need
1512 to pick something that won't crash the rest of the compiler. */
1515 expand_one_error_var (tree var
)
1517 machine_mode mode
= DECL_MODE (var
);
1520 if (mode
== BLKmode
)
1521 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1522 else if (mode
== VOIDmode
)
1525 x
= gen_reg_rtx (mode
);
1527 SET_DECL_RTL (var
, x
);
1530 /* A subroutine of expand_one_var. VAR is a variable that will be
1531 allocated to the local stack frame. Return true if we wish to
1532 add VAR to STACK_VARS so that it will be coalesced with other
1533 variables. Return false to allocate VAR immediately.
1535 This function is used to reduce the number of variables considered
1536 for coalescing, which reduces the size of the quadratic problem. */
1539 defer_stack_allocation (tree var
, bool toplevel
)
1541 tree size_unit
= TREE_CODE (var
) == SSA_NAME
1542 ? TYPE_SIZE_UNIT (TREE_TYPE (var
))
1543 : DECL_SIZE_UNIT (var
);
1546 /* Whether the variable is small enough for immediate allocation not to be
1547 a problem with regard to the frame size. */
1549 = (poly_int_tree_p (size_unit
, &size
)
1550 && (estimated_poly_value (size
)
1551 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING
)));
1553 /* If stack protection is enabled, *all* stack variables must be deferred,
1554 so that we can re-order the strings to the top of the frame.
1555 Similarly for Address Sanitizer. */
1556 if (flag_stack_protect
|| asan_sanitize_stack_p ())
1559 unsigned int align
= TREE_CODE (var
) == SSA_NAME
1560 ? TYPE_ALIGN (TREE_TYPE (var
))
1563 /* We handle "large" alignment via dynamic allocation. We want to handle
1564 this extra complication in only one place, so defer them. */
1565 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1568 bool ignored
= TREE_CODE (var
) == SSA_NAME
1569 ? !SSAVAR (var
) || DECL_IGNORED_P (SSA_NAME_VAR (var
))
1570 : DECL_IGNORED_P (var
);
1572 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1573 might be detached from their block and appear at toplevel when we reach
1574 here. We want to coalesce them with variables from other blocks when
1575 the immediate contribution to the frame size would be noticeable. */
1576 if (toplevel
&& optimize
> 0 && ignored
&& !smallish
)
1579 /* Variables declared in the outermost scope automatically conflict
1580 with every other variable. The only reason to want to defer them
1581 at all is that, after sorting, we can more efficiently pack
1582 small variables in the stack frame. Continue to defer at -O2. */
1583 if (toplevel
&& optimize
< 2)
1586 /* Without optimization, *most* variables are allocated from the
1587 stack, which makes the quadratic problem large exactly when we
1588 want compilation to proceed as quickly as possible. On the
1589 other hand, we don't want the function's stack frame size to
1590 get completely out of hand. So we avoid adding scalars and
1591 "small" aggregates to the list at all. */
1592 if (optimize
== 0 && smallish
)
1598 /* A subroutine of expand_used_vars. Expand one variable according to
1599 its flavor. Variables to be placed on the stack are not actually
1600 expanded yet, merely recorded.
1601 When REALLY_EXPAND is false, only add stack values to be allocated.
1602 Return stack usage this variable is supposed to take.
1606 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1608 unsigned int align
= BITS_PER_UNIT
;
1613 if (TREE_TYPE (var
) != error_mark_node
&& VAR_P (var
))
1615 if (is_global_var (var
))
1618 /* Because we don't know if VAR will be in register or on stack,
1619 we conservatively assume it will be on stack even if VAR is
1620 eventually put into register after RA pass. For non-automatic
1621 variables, which won't be on stack, we collect alignment of
1622 type and ignore user specified alignment. Similarly for
1623 SSA_NAMEs for which use_register_for_decl returns true. */
1624 if (TREE_STATIC (var
)
1625 || DECL_EXTERNAL (var
)
1626 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1627 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1628 TYPE_MODE (TREE_TYPE (var
)),
1629 TYPE_ALIGN (TREE_TYPE (var
)));
1630 else if (DECL_HAS_VALUE_EXPR_P (var
)
1631 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1632 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1633 or variables which were assigned a stack slot already by
1634 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1635 changed from the offset chosen to it. */
1636 align
= crtl
->stack_alignment_estimated
;
1638 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1640 /* If the variable alignment is very large we'll dynamicaly allocate
1641 it, which means that in-frame portion is just a pointer. */
1642 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1643 align
= GET_MODE_ALIGNMENT (Pmode
);
1646 record_alignment_for_reg_var (align
);
1649 if (TREE_CODE (origvar
) == SSA_NAME
)
1651 gcc_assert (!VAR_P (var
)
1652 || (!DECL_EXTERNAL (var
)
1653 && !DECL_HAS_VALUE_EXPR_P (var
)
1654 && !TREE_STATIC (var
)
1655 && TREE_TYPE (var
) != error_mark_node
1656 && !DECL_HARD_REGISTER (var
)
1659 if (!VAR_P (var
) && TREE_CODE (origvar
) != SSA_NAME
)
1661 else if (DECL_EXTERNAL (var
))
1663 else if (DECL_HAS_VALUE_EXPR_P (var
))
1665 else if (TREE_STATIC (var
))
1667 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1669 else if (TREE_TYPE (var
) == error_mark_node
)
1672 expand_one_error_var (var
);
1674 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1678 expand_one_hard_reg_var (var
);
1679 if (!DECL_HARD_REGISTER (var
))
1680 /* Invalid register specification. */
1681 expand_one_error_var (var
);
1684 else if (use_register_for_decl (var
))
1687 expand_one_register_var (origvar
);
1689 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var
), &size
)
1690 || !valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1692 /* Reject variables which cover more than half of the address-space. */
1695 if (DECL_NONLOCAL_FRAME (var
))
1696 error_at (DECL_SOURCE_LOCATION (current_function_decl
),
1697 "total size of local objects is too large");
1699 error_at (DECL_SOURCE_LOCATION (var
),
1700 "size of variable %q+D is too large", var
);
1701 expand_one_error_var (var
);
1704 else if (defer_stack_allocation (var
, toplevel
))
1705 add_stack_var (origvar
, really_expand
);
1710 if (lookup_attribute ("naked",
1711 DECL_ATTRIBUTES (current_function_decl
)))
1712 error ("cannot allocate stack for variable %q+D, naked function",
1715 expand_one_stack_var (origvar
);
1722 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1723 expanding variables. Those variables that can be put into registers
1724 are allocated pseudos; those that can't are put on the stack.
1726 TOPLEVEL is true if this is the outermost BLOCK. */
1729 expand_used_vars_for_block (tree block
, bool toplevel
)
1733 /* Expand all variables at this level. */
1734 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1736 && ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1737 || !DECL_NONSHAREABLE (t
)))
1738 expand_one_var (t
, toplevel
, true);
1740 /* Expand all variables at containing levels. */
1741 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1742 expand_used_vars_for_block (t
, false);
1745 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1746 and clear TREE_USED on all local variables. */
1749 clear_tree_used (tree block
)
1753 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1754 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1755 if ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1756 || !DECL_NONSHAREABLE (t
))
1759 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1760 clear_tree_used (t
);
1764 SPCT_FLAG_DEFAULT
= 1,
1766 SPCT_FLAG_STRONG
= 3,
1767 SPCT_FLAG_EXPLICIT
= 4
1770 /* Examine TYPE and determine a bit mask of the following features. */
1772 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1773 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1774 #define SPCT_HAS_ARRAY 4
1775 #define SPCT_HAS_AGGREGATE 8
1778 stack_protect_classify_type (tree type
)
1780 unsigned int ret
= 0;
1783 switch (TREE_CODE (type
))
1786 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1787 if (t
== char_type_node
1788 || t
== signed_char_type_node
1789 || t
== unsigned_char_type_node
)
1791 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1792 unsigned HOST_WIDE_INT len
;
1794 if (!TYPE_SIZE_UNIT (type
)
1795 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1798 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1801 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1803 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1806 ret
= SPCT_HAS_ARRAY
;
1810 case QUAL_UNION_TYPE
:
1812 ret
= SPCT_HAS_AGGREGATE
;
1813 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1814 if (TREE_CODE (t
) == FIELD_DECL
)
1815 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1825 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1826 part of the local stack frame. Remember if we ever return nonzero for
1827 any variable in this function. The return value is the phase number in
1828 which the variable should be allocated. */
1831 stack_protect_decl_phase (tree decl
)
1833 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1836 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1837 has_short_buffer
= true;
1839 if (flag_stack_protect
== SPCT_FLAG_ALL
1840 || flag_stack_protect
== SPCT_FLAG_STRONG
1841 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1842 && lookup_attribute ("stack_protect",
1843 DECL_ATTRIBUTES (current_function_decl
))))
1845 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1846 && !(bits
& SPCT_HAS_AGGREGATE
))
1848 else if (bits
& SPCT_HAS_ARRAY
)
1852 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1855 has_protected_decls
= true;
1860 /* Two helper routines that check for phase 1 and phase 2. These are used
1861 as callbacks for expand_stack_vars. */
1864 stack_protect_decl_phase_1 (size_t i
)
1866 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1870 stack_protect_decl_phase_2 (size_t i
)
1872 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1875 /* And helper function that checks for asan phase (with stack protector
1876 it is phase 3). This is used as callback for expand_stack_vars.
1877 Returns true if any of the vars in the partition need to be protected. */
1880 asan_decl_phase_3 (size_t i
)
1884 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1886 i
= stack_vars
[i
].next
;
1891 /* Ensure that variables in different stack protection phases conflict
1892 so that they are not merged and share the same stack slot. */
1895 add_stack_protection_conflicts (void)
1897 size_t i
, j
, n
= stack_vars_num
;
1898 unsigned char *phase
;
1900 phase
= XNEWVEC (unsigned char, n
);
1901 for (i
= 0; i
< n
; ++i
)
1902 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1904 for (i
= 0; i
< n
; ++i
)
1906 unsigned char ph_i
= phase
[i
];
1907 for (j
= i
+ 1; j
< n
; ++j
)
1908 if (ph_i
!= phase
[j
])
1909 add_stack_var_conflict (i
, j
);
1915 /* Create a decl for the guard at the top of the stack frame. */
1918 create_stack_guard (void)
1920 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1921 VAR_DECL
, NULL
, ptr_type_node
);
1922 TREE_THIS_VOLATILE (guard
) = 1;
1923 TREE_USED (guard
) = 1;
1924 expand_one_stack_var (guard
);
1925 crtl
->stack_protect_guard
= guard
;
1928 /* Prepare for expanding variables. */
1930 init_vars_expansion (void)
1932 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1933 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1935 /* A map from decl to stack partition. */
1936 decl_to_stack_part
= new hash_map
<tree
, size_t>;
1938 /* Initialize local stack smashing state. */
1939 has_protected_decls
= false;
1940 has_short_buffer
= false;
1943 /* Free up stack variable graph data. */
1945 fini_vars_expansion (void)
1947 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1949 XDELETEVEC (stack_vars
);
1950 if (stack_vars_sorted
)
1951 XDELETEVEC (stack_vars_sorted
);
1953 stack_vars_sorted
= NULL
;
1954 stack_vars_alloc
= stack_vars_num
= 0;
1955 delete decl_to_stack_part
;
1956 decl_to_stack_part
= NULL
;
1959 /* Make a fair guess for the size of the stack frame of the function
1960 in NODE. This doesn't have to be exact, the result is only used in
1961 the inline heuristics. So we don't want to run the full stack var
1962 packing algorithm (which is quadratic in the number of stack vars).
1963 Instead, we calculate the total size of all stack vars. This turns
1964 out to be a pretty fair estimate -- packing of stack vars doesn't
1965 happen very often. */
1968 estimated_stack_frame_size (struct cgraph_node
*node
)
1970 poly_int64 size
= 0;
1973 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1977 init_vars_expansion ();
1979 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1980 if (auto_var_in_fn_p (var
, fn
->decl
))
1981 size
+= expand_one_var (var
, true, false);
1983 if (stack_vars_num
> 0)
1985 /* Fake sorting the stack vars for account_stack_vars (). */
1986 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1987 for (i
= 0; i
< stack_vars_num
; ++i
)
1988 stack_vars_sorted
[i
] = i
;
1989 size
+= account_stack_vars ();
1992 fini_vars_expansion ();
1994 return estimated_poly_value (size
);
1997 /* Helper routine to check if a record or union contains an array field. */
2000 record_or_union_type_has_array_p (const_tree tree_type
)
2002 tree fields
= TYPE_FIELDS (tree_type
);
2005 for (f
= fields
; f
; f
= DECL_CHAIN (f
))
2006 if (TREE_CODE (f
) == FIELD_DECL
)
2008 tree field_type
= TREE_TYPE (f
);
2009 if (RECORD_OR_UNION_TYPE_P (field_type
)
2010 && record_or_union_type_has_array_p (field_type
))
2012 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
2018 /* Check if the current function has local referenced variables that
2019 have their addresses taken, contain an array, or are arrays. */
2022 stack_protect_decl_p ()
2027 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2028 if (!is_global_var (var
))
2030 tree var_type
= TREE_TYPE (var
);
2032 && (TREE_CODE (var_type
) == ARRAY_TYPE
2033 || TREE_ADDRESSABLE (var
)
2034 || (RECORD_OR_UNION_TYPE_P (var_type
)
2035 && record_or_union_type_has_array_p (var_type
))))
2041 /* Check if the current function has calls that use a return slot. */
2044 stack_protect_return_slot_p ()
2048 FOR_ALL_BB_FN (bb
, cfun
)
2049 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
2050 !gsi_end_p (gsi
); gsi_next (&gsi
))
2052 gimple
*stmt
= gsi_stmt (gsi
);
2053 /* This assumes that calls to internal-only functions never
2054 use a return slot. */
2055 if (is_gimple_call (stmt
)
2056 && !gimple_call_internal_p (stmt
)
2057 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
2058 gimple_call_fndecl (stmt
)))
2064 /* Expand all variables used in the function. */
2067 expand_used_vars (void)
2069 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
2070 auto_vec
<tree
> maybe_local_decls
;
2071 rtx_insn
*var_end_seq
= NULL
;
2074 bool gen_stack_protect_signal
= false;
2076 /* Compute the phase of the stack frame for this function. */
2078 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2079 int off
= targetm
.starting_frame_offset () % align
;
2080 frame_phase
= off
? align
- off
: 0;
2083 /* Set TREE_USED on all variables in the local_decls. */
2084 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2085 TREE_USED (var
) = 1;
2086 /* Clear TREE_USED on all variables associated with a block scope. */
2087 clear_tree_used (DECL_INITIAL (current_function_decl
));
2089 init_vars_expansion ();
2091 if (targetm
.use_pseudo_pic_reg ())
2092 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
2094 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
2096 if (bitmap_bit_p (SA
.partitions_for_parm_default_defs
, i
))
2099 tree var
= partition_to_var (SA
.map
, i
);
2101 gcc_assert (!virtual_operand_p (var
));
2103 expand_one_ssa_partition (var
);
2106 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
2107 gen_stack_protect_signal
2108 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2110 /* At this point all variables on the local_decls with TREE_USED
2111 set are not associated with any block scope. Lay them out. */
2113 len
= vec_safe_length (cfun
->local_decls
);
2114 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2116 bool expand_now
= false;
2118 /* Expanded above already. */
2119 if (is_gimple_reg (var
))
2121 TREE_USED (var
) = 0;
2124 /* We didn't set a block for static or extern because it's hard
2125 to tell the difference between a global variable (re)declared
2126 in a local scope, and one that's really declared there to
2127 begin with. And it doesn't really matter much, since we're
2128 not giving them stack space. Expand them now. */
2129 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
2132 /* Expand variables not associated with any block now. Those created by
2133 the optimizers could be live anywhere in the function. Those that
2134 could possibly have been scoped originally and detached from their
2135 block will have their allocation deferred so we coalesce them with
2136 others when optimization is enabled. */
2137 else if (TREE_USED (var
))
2140 /* Finally, mark all variables on the list as used. We'll use
2141 this in a moment when we expand those associated with scopes. */
2142 TREE_USED (var
) = 1;
2145 expand_one_var (var
, true, true);
2148 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
2150 rtx rtl
= DECL_RTL_IF_SET (var
);
2152 /* Keep artificial non-ignored vars in cfun->local_decls
2153 chain until instantiate_decls. */
2154 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2155 add_local_decl (cfun
, var
);
2156 else if (rtl
== NULL_RTX
)
2157 /* If rtl isn't set yet, which can happen e.g. with
2158 -fstack-protector, retry before returning from this
2160 maybe_local_decls
.safe_push (var
);
2164 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2166 +-----------------+-----------------+
2167 | ...processed... | ...duplicates...|
2168 +-----------------+-----------------+
2170 +-- LEN points here.
2172 We just want the duplicates, as those are the artificial
2173 non-ignored vars that we want to keep until instantiate_decls.
2174 Move them down and truncate the array. */
2175 if (!vec_safe_is_empty (cfun
->local_decls
))
2176 cfun
->local_decls
->block_remove (0, len
);
2178 /* At this point, all variables within the block tree with TREE_USED
2179 set are actually used by the optimized function. Lay them out. */
2180 expand_used_vars_for_block (outer_block
, true);
2182 if (stack_vars_num
> 0)
2184 add_scope_conflicts ();
2186 /* If stack protection is enabled, we don't share space between
2187 vulnerable data and non-vulnerable data. */
2188 if (flag_stack_protect
!= 0
2189 && (flag_stack_protect
!= SPCT_FLAG_EXPLICIT
2190 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2191 && lookup_attribute ("stack_protect",
2192 DECL_ATTRIBUTES (current_function_decl
)))))
2193 add_stack_protection_conflicts ();
2195 /* Now that we have collected all stack variables, and have computed a
2196 minimal interference graph, attempt to save some stack space. */
2197 partition_stack_vars ();
2199 dump_stack_var_partition ();
2202 switch (flag_stack_protect
)
2205 create_stack_guard ();
2208 case SPCT_FLAG_STRONG
:
2209 if (gen_stack_protect_signal
2210 || cfun
->calls_alloca
|| has_protected_decls
2211 || lookup_attribute ("stack_protect",
2212 DECL_ATTRIBUTES (current_function_decl
)))
2213 create_stack_guard ();
2216 case SPCT_FLAG_DEFAULT
:
2217 if (cfun
->calls_alloca
|| has_protected_decls
2218 || lookup_attribute ("stack_protect",
2219 DECL_ATTRIBUTES (current_function_decl
)))
2220 create_stack_guard ();
2223 case SPCT_FLAG_EXPLICIT
:
2224 if (lookup_attribute ("stack_protect",
2225 DECL_ATTRIBUTES (current_function_decl
)))
2226 create_stack_guard ();
2232 /* Assign rtl to each variable based on these partitions. */
2233 if (stack_vars_num
> 0)
2235 class stack_vars_data data
;
2237 data
.asan_base
= NULL_RTX
;
2238 data
.asan_alignb
= 0;
2240 /* Reorder decls to be protected by iterating over the variables
2241 array multiple times, and allocating out of each phase in turn. */
2242 /* ??? We could probably integrate this into the qsort we did
2243 earlier, such that we naturally see these variables first,
2244 and thus naturally allocate things in the right order. */
2245 if (has_protected_decls
)
2247 /* Phase 1 contains only character arrays. */
2248 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
2250 /* Phase 2 contains other kinds of arrays. */
2251 if (flag_stack_protect
== SPCT_FLAG_ALL
2252 || flag_stack_protect
== SPCT_FLAG_STRONG
2253 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2254 && lookup_attribute ("stack_protect",
2255 DECL_ATTRIBUTES (current_function_decl
))))
2256 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
2259 if (asan_sanitize_stack_p ())
2260 /* Phase 3, any partitions that need asan protection
2261 in addition to phase 1 and 2. */
2262 expand_stack_vars (asan_decl_phase_3
, &data
);
2264 /* ASAN description strings don't yet have a syntax for expressing
2265 polynomial offsets. */
2266 HOST_WIDE_INT prev_offset
;
2267 if (!data
.asan_vec
.is_empty ()
2268 && frame_offset
.is_constant (&prev_offset
))
2270 HOST_WIDE_INT offset
, sz
, redzonesz
;
2271 redzonesz
= ASAN_RED_ZONE_SIZE
;
2272 sz
= data
.asan_vec
[0] - prev_offset
;
2273 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
2274 && data
.asan_alignb
<= 4096
2275 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
2276 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
2277 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
2278 /* Allocating a constant amount of space from a constant
2279 starting offset must give a constant result. */
2280 offset
= (alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
)
2282 data
.asan_vec
.safe_push (prev_offset
);
2283 data
.asan_vec
.safe_push (offset
);
2284 /* Leave space for alignment if STRICT_ALIGNMENT. */
2285 if (STRICT_ALIGNMENT
)
2286 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
2287 << ASAN_SHADOW_SHIFT
)
2288 / BITS_PER_UNIT
, 1);
2291 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
2294 data
.asan_vec
.address (),
2295 data
.asan_decl_vec
.address (),
2296 data
.asan_vec
.length ());
2299 expand_stack_vars (NULL
, &data
);
2302 if (asan_sanitize_allocas_p () && cfun
->calls_alloca
)
2303 var_end_seq
= asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx
,
2304 virtual_stack_vars_rtx
,
2307 fini_vars_expansion ();
2309 /* If there were any artificial non-ignored vars without rtl
2310 found earlier, see if deferred stack allocation hasn't assigned
2312 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
2314 rtx rtl
= DECL_RTL_IF_SET (var
);
2316 /* Keep artificial non-ignored vars in cfun->local_decls
2317 chain until instantiate_decls. */
2318 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2319 add_local_decl (cfun
, var
);
2322 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2323 if (STACK_ALIGNMENT_NEEDED
)
2325 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2326 if (FRAME_GROWS_DOWNWARD
)
2327 frame_offset
= aligned_lower_bound (frame_offset
, align
);
2329 frame_offset
= aligned_upper_bound (frame_offset
, align
);
2336 /* If we need to produce a detailed dump, print the tree representation
2337 for STMT to the dump file. SINCE is the last RTX after which the RTL
2338 generated for STMT should have been appended. */
2341 maybe_dump_rtl_for_gimple_stmt (gimple
*stmt
, rtx_insn
*since
)
2343 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2345 fprintf (dump_file
, "\n;; ");
2346 print_gimple_stmt (dump_file
, stmt
, 0,
2347 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
2348 fprintf (dump_file
, "\n");
2350 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
2354 /* Maps the blocks that do not contain tree labels to rtx labels. */
2356 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
2358 /* Returns the label_rtx expression for a label starting basic block BB. */
2360 static rtx_code_label
*
2361 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
2363 gimple_stmt_iterator gsi
;
2366 if (bb
->flags
& BB_RTL
)
2367 return block_label (bb
);
2369 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
2373 /* Find the tree label if it is present. */
2375 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2379 lab_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
2383 lab
= gimple_label_label (lab_stmt
);
2384 if (DECL_NONLOCAL (lab
))
2387 return jump_target_rtx (lab
);
2390 rtx_code_label
*l
= gen_label_rtx ();
2391 lab_rtx_for_bb
->put (bb
, l
);
2396 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2397 of a basic block where we just expanded the conditional at the end,
2398 possibly clean up the CFG and instruction sequence. LAST is the
2399 last instruction before the just emitted jump sequence. */
2402 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2404 /* Special case: when jumpif decides that the condition is
2405 trivial it emits an unconditional jump (and the necessary
2406 barrier). But we still have two edges, the fallthru one is
2407 wrong. purge_dead_edges would clean this up later. Unfortunately
2408 we have to insert insns (and split edges) before
2409 find_many_sub_basic_blocks and hence before purge_dead_edges.
2410 But splitting edges might create new blocks which depend on the
2411 fact that if there are two edges there's no barrier. So the
2412 barrier would get lost and verify_flow_info would ICE. Instead
2413 of auditing all edge splitters to care for the barrier (which
2414 normally isn't there in a cleaned CFG), fix it here. */
2415 if (BARRIER_P (get_last_insn ()))
2419 /* Now, we have a single successor block, if we have insns to
2420 insert on the remaining edge we potentially will insert
2421 it at the end of this block (if the dest block isn't feasible)
2422 in order to avoid splitting the edge. This insertion will take
2423 place in front of the last jump. But we might have emitted
2424 multiple jumps (conditional and one unconditional) to the
2425 same destination. Inserting in front of the last one then
2426 is a problem. See PR 40021. We fix this by deleting all
2427 jumps except the last unconditional one. */
2428 insn
= PREV_INSN (get_last_insn ());
2429 /* Make sure we have an unconditional jump. Otherwise we're
2431 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2432 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2434 insn
= PREV_INSN (insn
);
2435 if (JUMP_P (NEXT_INSN (insn
)))
2437 if (!any_condjump_p (NEXT_INSN (insn
)))
2439 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2440 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2442 delete_insn (NEXT_INSN (insn
));
2448 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2449 Returns a new basic block if we've terminated the current basic
2450 block and created a new one. */
2453 expand_gimple_cond (basic_block bb
, gcond
*stmt
)
2455 basic_block new_bb
, dest
;
2458 rtx_insn
*last2
, *last
;
2459 enum tree_code code
;
2462 code
= gimple_cond_code (stmt
);
2463 op0
= gimple_cond_lhs (stmt
);
2464 op1
= gimple_cond_rhs (stmt
);
2465 /* We're sometimes presented with such code:
2469 This would expand to two comparisons which then later might
2470 be cleaned up by combine. But some pattern matchers like if-conversion
2471 work better when there's only one compare, so make up for this
2472 here as special exception if TER would have made the same change. */
2474 && TREE_CODE (op0
) == SSA_NAME
2475 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2476 && TREE_CODE (op1
) == INTEGER_CST
2477 && ((gimple_cond_code (stmt
) == NE_EXPR
2478 && integer_zerop (op1
))
2479 || (gimple_cond_code (stmt
) == EQ_EXPR
2480 && integer_onep (op1
)))
2481 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2483 gimple
*second
= SSA_NAME_DEF_STMT (op0
);
2484 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2486 enum tree_code code2
= gimple_assign_rhs_code (second
);
2487 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2490 op0
= gimple_assign_rhs1 (second
);
2491 op1
= gimple_assign_rhs2 (second
);
2493 /* If jumps are cheap and the target does not support conditional
2494 compare, turn some more codes into jumpy sequences. */
2495 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2496 && targetm
.gen_ccmp_first
== NULL
)
2498 if ((code2
== BIT_AND_EXPR
2499 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2500 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2501 || code2
== TRUTH_AND_EXPR
)
2503 code
= TRUTH_ANDIF_EXPR
;
2504 op0
= gimple_assign_rhs1 (second
);
2505 op1
= gimple_assign_rhs2 (second
);
2507 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2509 code
= TRUTH_ORIF_EXPR
;
2510 op0
= gimple_assign_rhs1 (second
);
2511 op1
= gimple_assign_rhs2 (second
);
2517 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2518 into (x - C2) * C3 < C4. */
2519 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
2520 && TREE_CODE (op0
) == SSA_NAME
2521 && TREE_CODE (op1
) == INTEGER_CST
)
2522 code
= maybe_optimize_mod_cmp (code
, &op0
, &op1
);
2524 last2
= last
= get_last_insn ();
2526 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2527 set_curr_insn_location (gimple_location (stmt
));
2529 /* These flags have no purpose in RTL land. */
2530 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2531 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2533 /* We can either have a pure conditional jump with one fallthru edge or
2534 two-way jump that needs to be decomposed into two basic blocks. */
2535 if (false_edge
->dest
== bb
->next_bb
)
2537 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2538 true_edge
->probability
);
2539 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2540 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2541 set_curr_insn_location (true_edge
->goto_locus
);
2542 false_edge
->flags
|= EDGE_FALLTHRU
;
2543 maybe_cleanup_end_of_block (false_edge
, last
);
2546 if (true_edge
->dest
== bb
->next_bb
)
2548 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2549 false_edge
->probability
);
2550 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2551 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2552 set_curr_insn_location (false_edge
->goto_locus
);
2553 true_edge
->flags
|= EDGE_FALLTHRU
;
2554 maybe_cleanup_end_of_block (true_edge
, last
);
2558 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2559 true_edge
->probability
);
2560 last
= get_last_insn ();
2561 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2562 set_curr_insn_location (false_edge
->goto_locus
);
2563 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2566 if (BARRIER_P (BB_END (bb
)))
2567 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2568 update_bb_for_insn (bb
);
2570 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2571 dest
= false_edge
->dest
;
2572 redirect_edge_succ (false_edge
, new_bb
);
2573 false_edge
->flags
|= EDGE_FALLTHRU
;
2574 new_bb
->count
= false_edge
->count ();
2575 loop_p loop
= find_common_loop (bb
->loop_father
, dest
->loop_father
);
2576 add_bb_to_loop (new_bb
, loop
);
2577 if (loop
->latch
== bb
2578 && loop
->header
== dest
)
2579 loop
->latch
= new_bb
;
2580 make_single_succ_edge (new_bb
, dest
, 0);
2581 if (BARRIER_P (BB_END (new_bb
)))
2582 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2583 update_bb_for_insn (new_bb
);
2585 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2587 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2589 set_curr_insn_location (true_edge
->goto_locus
);
2590 true_edge
->goto_locus
= curr_insn_location ();
2596 /* Mark all calls that can have a transaction restart. */
2599 mark_transaction_restart_calls (gimple
*stmt
)
2601 struct tm_restart_node dummy
;
2602 tm_restart_node
**slot
;
2604 if (!cfun
->gimple_df
->tm_restart
)
2608 slot
= cfun
->gimple_df
->tm_restart
->find_slot (&dummy
, NO_INSERT
);
2611 struct tm_restart_node
*n
= *slot
;
2612 tree list
= n
->label_or_list
;
2615 for (insn
= next_real_insn (get_last_insn ());
2617 insn
= next_real_insn (insn
))
2620 if (TREE_CODE (list
) == LABEL_DECL
)
2621 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2623 for (; list
; list
= TREE_CHAIN (list
))
2624 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2628 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2632 expand_call_stmt (gcall
*stmt
)
2634 tree exp
, decl
, lhs
;
2638 if (gimple_call_internal_p (stmt
))
2640 expand_internal_call (stmt
);
2644 /* If this is a call to a built-in function and it has no effect other
2645 than setting the lhs, try to implement it using an internal function
2647 decl
= gimple_call_fndecl (stmt
);
2648 if (gimple_call_lhs (stmt
)
2649 && !gimple_has_side_effects (stmt
)
2650 && (optimize
|| (decl
&& called_as_built_in (decl
))))
2652 internal_fn ifn
= replacement_internal_fn (stmt
);
2653 if (ifn
!= IFN_LAST
)
2655 expand_internal_call (ifn
, stmt
);
2660 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2662 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2663 builtin_p
= decl
&& fndecl_built_in_p (decl
);
2665 /* If this is not a builtin function, the function type through which the
2666 call is made may be different from the type of the function. */
2669 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2670 CALL_EXPR_FN (exp
));
2672 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2673 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2675 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2677 tree arg
= gimple_call_arg (stmt
, i
);
2679 /* TER addresses into arguments of builtin functions so we have a
2680 chance to infer more correct alignment information. See PR39954. */
2682 && TREE_CODE (arg
) == SSA_NAME
2683 && (def
= get_gimple_for_ssa_name (arg
))
2684 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2685 arg
= gimple_assign_rhs1 (def
);
2686 CALL_EXPR_ARG (exp
, i
) = arg
;
2689 if (gimple_has_side_effects (stmt
))
2690 TREE_SIDE_EFFECTS (exp
) = 1;
2692 if (gimple_call_nothrow_p (stmt
))
2693 TREE_NOTHROW (exp
) = 1;
2695 if (gimple_no_warning_p (stmt
))
2696 TREE_NO_WARNING (exp
) = 1;
2698 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2699 CALL_EXPR_MUST_TAIL_CALL (exp
) = gimple_call_must_tail_p (stmt
);
2700 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2702 && fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
2703 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl
)))
2704 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2706 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2707 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2708 CALL_EXPR_BY_DESCRIPTOR (exp
) = gimple_call_by_descriptor_p (stmt
);
2709 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2711 /* Ensure RTL is created for debug args. */
2712 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2714 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2719 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2721 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2722 expand_debug_expr (dtemp
);
2726 rtx_insn
*before_call
= get_last_insn ();
2727 lhs
= gimple_call_lhs (stmt
);
2729 expand_assignment (lhs
, exp
, false);
2731 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2733 /* If the gimple call is an indirect call and has 'nocf_check'
2734 attribute find a generated CALL insn to mark it as no
2735 control-flow verification is needed. */
2736 if (gimple_call_nocf_check_p (stmt
)
2737 && !gimple_call_fndecl (stmt
))
2739 rtx_insn
*last
= get_last_insn ();
2740 while (!CALL_P (last
)
2741 && last
!= before_call
)
2742 last
= PREV_INSN (last
);
2744 if (last
!= before_call
)
2745 add_reg_note (last
, REG_CALL_NOCF_CHECK
, const0_rtx
);
2748 mark_transaction_restart_calls (stmt
);
2752 /* Generate RTL for an asm statement (explicit assembler code).
2753 STRING is a STRING_CST node containing the assembler code text,
2754 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2755 insn is volatile; don't optimize it. */
2758 expand_asm_loc (tree string
, int vol
, location_t locus
)
2762 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2763 ggc_strdup (TREE_STRING_POINTER (string
)),
2766 MEM_VOLATILE_P (body
) = vol
;
2768 /* Non-empty basic ASM implicitly clobbers memory. */
2769 if (TREE_STRING_LENGTH (string
) != 0)
2772 unsigned i
, nclobbers
;
2773 auto_vec
<rtx
> input_rvec
, output_rvec
;
2774 auto_vec
<const char *> constraints
;
2775 auto_vec
<rtx
> clobber_rvec
;
2776 HARD_REG_SET clobbered_regs
;
2777 CLEAR_HARD_REG_SET (clobbered_regs
);
2779 clob
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2780 clobber_rvec
.safe_push (clob
);
2782 if (targetm
.md_asm_adjust
)
2783 targetm
.md_asm_adjust (output_rvec
, input_rvec
,
2784 constraints
, clobber_rvec
,
2788 nclobbers
= clobber_rvec
.length ();
2789 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (1 + nclobbers
));
2791 XVECEXP (body
, 0, 0) = asm_op
;
2792 for (i
= 0; i
< nclobbers
; i
++)
2793 XVECEXP (body
, 0, i
+ 1) = gen_rtx_CLOBBER (VOIDmode
, clobber_rvec
[i
]);
2799 /* Return the number of times character C occurs in string S. */
2801 n_occurrences (int c
, const char *s
)
2809 /* A subroutine of expand_asm_operands. Check that all operands have
2810 the same number of alternatives. Return true if so. */
2813 check_operand_nalternatives (const vec
<const char *> &constraints
)
2815 unsigned len
= constraints
.length();
2818 int nalternatives
= n_occurrences (',', constraints
[0]);
2820 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2822 error ("too many alternatives in %<asm%>");
2826 for (unsigned i
= 1; i
< len
; ++i
)
2827 if (n_occurrences (',', constraints
[i
]) != nalternatives
)
2829 error ("operand constraints for %<asm%> differ "
2830 "in number of alternatives");
2837 /* Check for overlap between registers marked in CLOBBERED_REGS and
2838 anything inappropriate in T. Emit error and return the register
2839 variable definition for error, NULL_TREE for ok. */
2842 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
2844 /* Conflicts between asm-declared register variables and the clobber
2845 list are not allowed. */
2846 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
2850 error ("%<asm%> specifier for variable %qE conflicts with "
2851 "%<asm%> clobber list",
2852 DECL_NAME (overlap
));
2854 /* Reset registerness to stop multiple errors emitted for a single
2856 DECL_REGISTER (overlap
) = 0;
2863 /* Check that the given REGNO spanning NREGS is a valid
2864 asm clobber operand. Some HW registers cannot be
2865 saved/restored, hence they should not be clobbered by
2868 asm_clobber_reg_is_valid (int regno
, int nregs
, const char *regname
)
2870 bool is_valid
= true;
2871 HARD_REG_SET regset
;
2873 CLEAR_HARD_REG_SET (regset
);
2875 add_range_to_hard_reg_set (®set
, regno
, nregs
);
2877 /* Clobbering the PIC register is an error. */
2878 if (PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
2879 && overlaps_hard_reg_set_p (regset
, Pmode
, PIC_OFFSET_TABLE_REGNUM
))
2881 /* ??? Diagnose during gimplification? */
2882 error ("PIC register clobbered by %qs in %<asm%>", regname
);
2885 else if (!in_hard_reg_set_p
2886 (accessible_reg_set
, reg_raw_mode
[regno
], regno
))
2888 /* ??? Diagnose during gimplification? */
2889 error ("the register %qs cannot be clobbered in %<asm%>"
2890 " for the current target", regname
);
2894 /* Clobbering the stack pointer register is deprecated. GCC expects
2895 the value of the stack pointer after an asm statement to be the same
2896 as it was before, so no asm can validly clobber the stack pointer in
2897 the usual sense. Adding the stack pointer to the clobber list has
2898 traditionally had some undocumented and somewhat obscure side-effects. */
2899 if (overlaps_hard_reg_set_p (regset
, Pmode
, STACK_POINTER_REGNUM
)
2900 && warning (OPT_Wdeprecated
, "listing the stack pointer register"
2901 " %qs in a clobber list is deprecated", regname
))
2902 inform (input_location
, "the value of the stack pointer after an %<asm%>"
2903 " statement must be the same as it was before the statement");
2908 /* Generate RTL for an asm statement with arguments.
2909 STRING is the instruction template.
2910 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2911 Each output or input has an expression in the TREE_VALUE and
2912 a tree list in TREE_PURPOSE which in turn contains a constraint
2913 name in TREE_VALUE (or NULL_TREE) and a constraint string
2915 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2916 that is clobbered by this insn.
2918 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2919 should be the fallthru basic block of the asm goto.
2921 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2922 Some elements of OUTPUTS may be replaced with trees representing temporary
2923 values. The caller should copy those temporary values to the originally
2926 VOL nonzero means the insn is volatile; don't optimize it. */
2929 expand_asm_stmt (gasm
*stmt
)
2931 class save_input_location
2936 explicit save_input_location(location_t where
)
2938 old
= input_location
;
2939 input_location
= where
;
2942 ~save_input_location()
2944 input_location
= old
;
2948 location_t locus
= gimple_location (stmt
);
2950 if (gimple_asm_input_p (stmt
))
2952 const char *s
= gimple_asm_string (stmt
);
2953 tree string
= build_string (strlen (s
), s
);
2954 expand_asm_loc (string
, gimple_asm_volatile_p (stmt
), locus
);
2958 /* There are some legacy diagnostics in here, and also avoids a
2959 sixth parameger to targetm.md_asm_adjust. */
2960 save_input_location
s_i_l(locus
);
2962 unsigned noutputs
= gimple_asm_noutputs (stmt
);
2963 unsigned ninputs
= gimple_asm_ninputs (stmt
);
2964 unsigned nlabels
= gimple_asm_nlabels (stmt
);
2967 /* ??? Diagnose during gimplification? */
2968 if (ninputs
+ noutputs
+ nlabels
> MAX_RECOG_OPERANDS
)
2970 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
2974 auto_vec
<tree
, MAX_RECOG_OPERANDS
> output_tvec
;
2975 auto_vec
<tree
, MAX_RECOG_OPERANDS
> input_tvec
;
2976 auto_vec
<const char *, MAX_RECOG_OPERANDS
> constraints
;
2978 /* Copy the gimple vectors into new vectors that we can manipulate. */
2980 output_tvec
.safe_grow (noutputs
);
2981 input_tvec
.safe_grow (ninputs
);
2982 constraints
.safe_grow (noutputs
+ ninputs
);
2984 for (i
= 0; i
< noutputs
; ++i
)
2986 tree t
= gimple_asm_output_op (stmt
, i
);
2987 output_tvec
[i
] = TREE_VALUE (t
);
2988 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2990 for (i
= 0; i
< ninputs
; i
++)
2992 tree t
= gimple_asm_input_op (stmt
, i
);
2993 input_tvec
[i
] = TREE_VALUE (t
);
2994 constraints
[i
+ noutputs
]
2995 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2998 /* ??? Diagnose during gimplification? */
2999 if (! check_operand_nalternatives (constraints
))
3002 /* Count the number of meaningful clobbered registers, ignoring what
3003 we would ignore later. */
3004 auto_vec
<rtx
> clobber_rvec
;
3005 HARD_REG_SET clobbered_regs
;
3006 CLEAR_HARD_REG_SET (clobbered_regs
);
3008 if (unsigned n
= gimple_asm_nclobbers (stmt
))
3010 clobber_rvec
.reserve (n
);
3011 for (i
= 0; i
< n
; i
++)
3013 tree t
= gimple_asm_clobber_op (stmt
, i
);
3014 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (t
));
3017 j
= decode_reg_name_and_count (regname
, &nregs
);
3022 /* ??? Diagnose during gimplification? */
3023 error ("unknown register name %qs in %<asm%>", regname
);
3027 rtx x
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
3028 clobber_rvec
.safe_push (x
);
3032 /* Otherwise we should have -1 == empty string
3033 or -3 == cc, which is not a register. */
3034 gcc_assert (j
== -1 || j
== -3);
3038 for (int reg
= j
; reg
< j
+ nregs
; reg
++)
3040 if (!asm_clobber_reg_is_valid (reg
, nregs
, regname
))
3043 SET_HARD_REG_BIT (clobbered_regs
, reg
);
3044 rtx x
= gen_rtx_REG (reg_raw_mode
[reg
], reg
);
3045 clobber_rvec
.safe_push (x
);
3050 /* First pass over inputs and outputs checks validity and sets
3051 mark_addressable if needed. */
3052 /* ??? Diagnose during gimplification? */
3054 for (i
= 0; i
< noutputs
; ++i
)
3056 tree val
= output_tvec
[i
];
3057 tree type
= TREE_TYPE (val
);
3058 const char *constraint
;
3063 /* Try to parse the output constraint. If that fails, there's
3064 no point in going further. */
3065 constraint
= constraints
[i
];
3066 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
3067 &allows_mem
, &allows_reg
, &is_inout
))
3070 /* If the output is a hard register, verify it doesn't conflict with
3071 any other operand's possible hard register use. */
3073 && REG_P (DECL_RTL (val
))
3074 && HARD_REGISTER_P (DECL_RTL (val
)))
3076 unsigned j
, output_hregno
= REGNO (DECL_RTL (val
));
3077 bool early_clobber_p
= strchr (constraints
[i
], '&') != NULL
;
3078 unsigned long match
;
3080 /* Verify the other outputs do not use the same hard register. */
3081 for (j
= i
+ 1; j
< noutputs
; ++j
)
3082 if (DECL_P (output_tvec
[j
])
3083 && REG_P (DECL_RTL (output_tvec
[j
]))
3084 && HARD_REGISTER_P (DECL_RTL (output_tvec
[j
]))
3085 && output_hregno
== REGNO (DECL_RTL (output_tvec
[j
])))
3086 error ("invalid hard register usage between output operands");
3088 /* Verify matching constraint operands use the same hard register
3089 and that the non-matching constraint operands do not use the same
3090 hard register if the output is an early clobber operand. */
3091 for (j
= 0; j
< ninputs
; ++j
)
3092 if (DECL_P (input_tvec
[j
])
3093 && REG_P (DECL_RTL (input_tvec
[j
]))
3094 && HARD_REGISTER_P (DECL_RTL (input_tvec
[j
])))
3096 unsigned input_hregno
= REGNO (DECL_RTL (input_tvec
[j
]));
3097 switch (*constraints
[j
+ noutputs
])
3099 case '0': case '1': case '2': case '3': case '4':
3100 case '5': case '6': case '7': case '8': case '9':
3101 match
= strtoul (constraints
[j
+ noutputs
], NULL
, 10);
3108 && output_hregno
!= input_hregno
)
3109 error ("invalid hard register usage between output operand "
3110 "and matching constraint operand");
3111 else if (early_clobber_p
3113 && output_hregno
== input_hregno
)
3114 error ("invalid hard register usage between earlyclobber "
3115 "operand and input operand");
3123 && REG_P (DECL_RTL (val
))
3124 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
3125 mark_addressable (val
);
3128 for (i
= 0; i
< ninputs
; ++i
)
3130 bool allows_reg
, allows_mem
;
3131 const char *constraint
;
3133 constraint
= constraints
[i
+ noutputs
];
3134 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3135 constraints
.address (),
3136 &allows_mem
, &allows_reg
))
3139 if (! allows_reg
&& allows_mem
)
3140 mark_addressable (input_tvec
[i
]);
3143 /* Second pass evaluates arguments. */
3145 /* Make sure stack is consistent for asm goto. */
3147 do_pending_stack_adjust ();
3148 int old_generating_concat_p
= generating_concat_p
;
3150 /* Vector of RTX's of evaluated output operands. */
3151 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> output_rvec
;
3152 auto_vec
<int, MAX_RECOG_OPERANDS
> inout_opnum
;
3153 rtx_insn
*after_rtl_seq
= NULL
, *after_rtl_end
= NULL
;
3155 output_rvec
.safe_grow (noutputs
);
3157 for (i
= 0; i
< noutputs
; ++i
)
3159 tree val
= output_tvec
[i
];
3160 tree type
= TREE_TYPE (val
);
3161 bool is_inout
, allows_reg
, allows_mem
, ok
;
3164 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
3165 noutputs
, &allows_mem
, &allows_reg
,
3169 /* If an output operand is not a decl or indirect ref and our constraint
3170 allows a register, make a temporary to act as an intermediate.
3171 Make the asm insn write into that, then we will copy it to
3172 the real output operand. Likewise for promoted variables. */
3174 generating_concat_p
= 0;
3176 if ((TREE_CODE (val
) == INDIRECT_REF
&& allows_mem
)
3178 && (allows_mem
|| REG_P (DECL_RTL (val
)))
3179 && ! (REG_P (DECL_RTL (val
))
3180 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
3183 || TREE_ADDRESSABLE (type
))
3185 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3186 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
3188 op
= validize_mem (op
);
3190 if (! allows_reg
&& !MEM_P (op
))
3191 error ("output number %d not directly addressable", i
);
3192 if ((! allows_mem
&& MEM_P (op
) && GET_MODE (op
) != BLKmode
)
3193 || GET_CODE (op
) == CONCAT
)
3196 op
= gen_reg_rtx (GET_MODE (op
));
3198 generating_concat_p
= old_generating_concat_p
;
3201 emit_move_insn (op
, old_op
);
3203 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3204 emit_move_insn (old_op
, op
);
3205 after_rtl_seq
= get_insns ();
3206 after_rtl_end
= get_last_insn ();
3212 op
= assign_temp (type
, 0, 1);
3213 op
= validize_mem (op
);
3214 if (!MEM_P (op
) && TREE_CODE (val
) == SSA_NAME
)
3215 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val
), op
);
3217 generating_concat_p
= old_generating_concat_p
;
3219 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3220 expand_assignment (val
, make_tree (type
, op
), false);
3221 after_rtl_seq
= get_insns ();
3222 after_rtl_end
= get_last_insn ();
3225 output_rvec
[i
] = op
;
3228 inout_opnum
.safe_push (i
);
3231 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> input_rvec
;
3232 auto_vec
<machine_mode
, MAX_RECOG_OPERANDS
> input_mode
;
3234 input_rvec
.safe_grow (ninputs
);
3235 input_mode
.safe_grow (ninputs
);
3237 generating_concat_p
= 0;
3239 for (i
= 0; i
< ninputs
; ++i
)
3241 tree val
= input_tvec
[i
];
3242 tree type
= TREE_TYPE (val
);
3243 bool allows_reg
, allows_mem
, ok
;
3244 const char *constraint
;
3247 constraint
= constraints
[i
+ noutputs
];
3248 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3249 constraints
.address (),
3250 &allows_mem
, &allows_reg
);
3253 /* EXPAND_INITIALIZER will not generate code for valid initializer
3254 constants, but will still generate code for other types of operand.
3255 This is the behavior we want for constant constraints. */
3256 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3257 allows_reg
? EXPAND_NORMAL
3258 : allows_mem
? EXPAND_MEMORY
3259 : EXPAND_INITIALIZER
);
3261 /* Never pass a CONCAT to an ASM. */
3262 if (GET_CODE (op
) == CONCAT
)
3263 op
= force_reg (GET_MODE (op
), op
);
3264 else if (MEM_P (op
))
3265 op
= validize_mem (op
);
3267 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
3269 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
3270 op
= force_reg (TYPE_MODE (type
), op
);
3271 else if (!allows_mem
)
3272 warning (0, "%<asm%> operand %d probably does not match "
3275 else if (MEM_P (op
))
3277 /* We won't recognize either volatile memory or memory
3278 with a queued address as available a memory_operand
3279 at this point. Ignore it: clearly this *is* a memory. */
3285 input_mode
[i
] = TYPE_MODE (type
);
3288 /* For in-out operands, copy output rtx to input rtx. */
3289 unsigned ninout
= inout_opnum
.length();
3290 for (i
= 0; i
< ninout
; i
++)
3292 int j
= inout_opnum
[i
];
3293 rtx o
= output_rvec
[j
];
3295 input_rvec
.safe_push (o
);
3296 input_mode
.safe_push (GET_MODE (o
));
3299 sprintf (buffer
, "%d", j
);
3300 constraints
.safe_push (ggc_strdup (buffer
));
3304 /* Sometimes we wish to automatically clobber registers across an asm.
3305 Case in point is when the i386 backend moved from cc0 to a hard reg --
3306 maintaining source-level compatibility means automatically clobbering
3307 the flags register. */
3308 rtx_insn
*after_md_seq
= NULL
;
3309 if (targetm
.md_asm_adjust
)
3310 after_md_seq
= targetm
.md_asm_adjust (output_rvec
, input_rvec
,
3311 constraints
, clobber_rvec
,
3314 /* Do not allow the hook to change the output and input count,
3315 lest it mess up the operand numbering. */
3316 gcc_assert (output_rvec
.length() == noutputs
);
3317 gcc_assert (input_rvec
.length() == ninputs
);
3318 gcc_assert (constraints
.length() == noutputs
+ ninputs
);
3320 /* But it certainly can adjust the clobbers. */
3321 unsigned nclobbers
= clobber_rvec
.length ();
3323 /* Third pass checks for easy conflicts. */
3324 /* ??? Why are we doing this on trees instead of rtx. */
3326 bool clobber_conflict_found
= 0;
3327 for (i
= 0; i
< noutputs
; ++i
)
3328 if (tree_conflicts_with_clobbers_p (output_tvec
[i
], &clobbered_regs
))
3329 clobber_conflict_found
= 1;
3330 for (i
= 0; i
< ninputs
- ninout
; ++i
)
3331 if (tree_conflicts_with_clobbers_p (input_tvec
[i
], &clobbered_regs
))
3332 clobber_conflict_found
= 1;
3334 /* Make vectors for the expression-rtx, constraint strings,
3335 and named operands. */
3337 rtvec argvec
= rtvec_alloc (ninputs
);
3338 rtvec constraintvec
= rtvec_alloc (ninputs
);
3339 rtvec labelvec
= rtvec_alloc (nlabels
);
3341 rtx body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
3342 : GET_MODE (output_rvec
[0])),
3343 ggc_strdup (gimple_asm_string (stmt
)),
3344 "", 0, argvec
, constraintvec
,
3346 MEM_VOLATILE_P (body
) = gimple_asm_volatile_p (stmt
);
3348 for (i
= 0; i
< ninputs
; ++i
)
3350 ASM_OPERANDS_INPUT (body
, i
) = input_rvec
[i
];
3351 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
3352 = gen_rtx_ASM_INPUT_loc (input_mode
[i
],
3353 constraints
[i
+ noutputs
],
3357 /* Copy labels to the vector. */
3358 rtx_code_label
*fallthru_label
= NULL
;
3361 basic_block fallthru_bb
= NULL
;
3362 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
3364 fallthru_bb
= fallthru
->dest
;
3366 for (i
= 0; i
< nlabels
; ++i
)
3368 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
3370 /* If asm goto has any labels in the fallthru basic block, use
3371 a label that we emit immediately after the asm goto. Expansion
3372 may insert further instructions into the same basic block after
3373 asm goto and if we don't do this, insertion of instructions on
3374 the fallthru edge might misbehave. See PR58670. */
3375 if (fallthru_bb
&& label_to_block (cfun
, label
) == fallthru_bb
)
3377 if (fallthru_label
== NULL_RTX
)
3378 fallthru_label
= gen_label_rtx ();
3382 r
= label_rtx (label
);
3383 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
3387 /* Now, for each output, construct an rtx
3388 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3389 ARGVEC CONSTRAINTS OPNAMES))
3390 If there is more than one, put them inside a PARALLEL. */
3392 if (nlabels
> 0 && nclobbers
== 0)
3394 gcc_assert (noutputs
== 0);
3395 emit_jump_insn (body
);
3397 else if (noutputs
== 0 && nclobbers
== 0)
3399 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3402 else if (noutputs
== 1 && nclobbers
== 0)
3404 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = constraints
[0];
3405 emit_insn (gen_rtx_SET (output_rvec
[0], body
));
3415 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
3417 /* For each output operand, store a SET. */
3418 for (i
= 0; i
< noutputs
; ++i
)
3420 rtx src
, o
= output_rvec
[i
];
3423 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody
) = constraints
[0];
3428 src
= gen_rtx_ASM_OPERANDS (GET_MODE (o
),
3429 ASM_OPERANDS_TEMPLATE (obody
),
3430 constraints
[i
], i
, argvec
,
3431 constraintvec
, labelvec
, locus
);
3432 MEM_VOLATILE_P (src
) = gimple_asm_volatile_p (stmt
);
3434 XVECEXP (body
, 0, i
) = gen_rtx_SET (o
, src
);
3437 /* If there are no outputs (but there are some clobbers)
3438 store the bare ASM_OPERANDS into the PARALLEL. */
3440 XVECEXP (body
, 0, i
++) = obody
;
3442 /* Store (clobber REG) for each clobbered register specified. */
3443 for (unsigned j
= 0; j
< nclobbers
; ++j
)
3445 rtx clobbered_reg
= clobber_rvec
[j
];
3447 /* Do sanity check for overlap between clobbers and respectively
3448 input and outputs that hasn't been handled. Such overlap
3449 should have been detected and reported above. */
3450 if (!clobber_conflict_found
&& REG_P (clobbered_reg
))
3452 /* We test the old body (obody) contents to avoid
3453 tripping over the under-construction body. */
3454 for (unsigned k
= 0; k
< noutputs
; ++k
)
3455 if (reg_overlap_mentioned_p (clobbered_reg
, output_rvec
[k
]))
3456 internal_error ("%<asm%> clobber conflict with "
3459 for (unsigned k
= 0; k
< ninputs
- ninout
; ++k
)
3460 if (reg_overlap_mentioned_p (clobbered_reg
, input_rvec
[k
]))
3461 internal_error ("%<asm%> clobber conflict with "
3465 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
3469 emit_jump_insn (body
);
3474 generating_concat_p
= old_generating_concat_p
;
3477 emit_label (fallthru_label
);
3480 emit_insn (after_md_seq
);
3482 emit_insn (after_rtl_seq
);
3485 crtl
->has_asm_statement
= 1;
3488 /* Emit code to jump to the address
3489 specified by the pointer expression EXP. */
3492 expand_computed_goto (tree exp
)
3494 rtx x
= expand_normal (exp
);
3496 do_pending_stack_adjust ();
3497 emit_indirect_jump (x
);
3500 /* Generate RTL code for a `goto' statement with target label LABEL.
3501 LABEL should be a LABEL_DECL tree node that was or will later be
3502 defined with `expand_label'. */
3505 expand_goto (tree label
)
3509 /* Check for a nonlocal goto to a containing function. Should have
3510 gotten translated to __builtin_nonlocal_goto. */
3511 tree context
= decl_function_context (label
);
3512 gcc_assert (!context
|| context
== current_function_decl
);
3515 emit_jump (jump_target_rtx (label
));
3518 /* Output a return with no value. */
3521 expand_null_return_1 (void)
3523 clear_pending_stack_adjust ();
3524 do_pending_stack_adjust ();
3525 emit_jump (return_label
);
3528 /* Generate RTL to return from the current function, with no value.
3529 (That is, we do not do anything about returning any value.) */
3532 expand_null_return (void)
3534 /* If this function was declared to return a value, but we
3535 didn't, clobber the return registers so that they are not
3536 propagated live to the rest of the function. */
3537 clobber_return_register ();
3539 expand_null_return_1 ();
3542 /* Generate RTL to return from the current function, with value VAL. */
3545 expand_value_return (rtx val
)
3547 /* Copy the value to the return location unless it's already there. */
3549 tree decl
= DECL_RESULT (current_function_decl
);
3550 rtx return_reg
= DECL_RTL (decl
);
3551 if (return_reg
!= val
)
3553 tree funtype
= TREE_TYPE (current_function_decl
);
3554 tree type
= TREE_TYPE (decl
);
3555 int unsignedp
= TYPE_UNSIGNED (type
);
3556 machine_mode old_mode
= DECL_MODE (decl
);
3558 if (DECL_BY_REFERENCE (decl
))
3559 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3561 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3563 if (mode
!= old_mode
)
3564 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3566 if (GET_CODE (return_reg
) == PARALLEL
)
3567 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3569 emit_move_insn (return_reg
, val
);
3572 expand_null_return_1 ();
3575 /* Generate RTL to evaluate the expression RETVAL and return it
3576 from the current function. */
3579 expand_return (tree retval
)
3585 /* If function wants no value, give it none. */
3586 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
3588 expand_normal (retval
);
3589 expand_null_return ();
3593 if (retval
== error_mark_node
)
3595 /* Treat this like a return of no value from a function that
3597 expand_null_return ();
3600 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3601 || TREE_CODE (retval
) == INIT_EXPR
)
3602 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3603 retval_rhs
= TREE_OPERAND (retval
, 1);
3605 retval_rhs
= retval
;
3607 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3609 /* If we are returning the RESULT_DECL, then the value has already
3610 been stored into it, so we don't have to do anything special. */
3611 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3612 expand_value_return (result_rtl
);
3614 /* If the result is an aggregate that is being returned in one (or more)
3615 registers, load the registers here. */
3617 else if (retval_rhs
!= 0
3618 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3619 && REG_P (result_rtl
))
3621 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3624 /* Use the mode of the result value on the return register. */
3625 PUT_MODE (result_rtl
, GET_MODE (val
));
3626 expand_value_return (val
);
3629 expand_null_return ();
3631 else if (retval_rhs
!= 0
3632 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3633 && (REG_P (result_rtl
)
3634 || (GET_CODE (result_rtl
) == PARALLEL
)))
3636 /* Compute the return value into a temporary (usually a pseudo reg). */
3638 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3639 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3640 val
= force_not_mem (val
);
3641 expand_value_return (val
);
3645 /* No hard reg used; calculate value into hard return reg. */
3646 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3647 expand_value_return (result_rtl
);
3651 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3652 register, tell the rtl optimizers that its value is no longer
3656 expand_clobber (tree lhs
)
3660 rtx decl_rtl
= DECL_RTL_IF_SET (lhs
);
3661 if (decl_rtl
&& REG_P (decl_rtl
))
3663 machine_mode decl_mode
= GET_MODE (decl_rtl
);
3664 if (maybe_gt (GET_MODE_SIZE (decl_mode
),
3665 REGMODE_NATURAL_SIZE (decl_mode
)))
3666 emit_clobber (decl_rtl
);
3671 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3672 STMT that doesn't require special handling for outgoing edges. That
3673 is no tailcalls and no GIMPLE_COND. */
3676 expand_gimple_stmt_1 (gimple
*stmt
)
3680 set_curr_insn_location (gimple_location (stmt
));
3682 switch (gimple_code (stmt
))
3685 op0
= gimple_goto_dest (stmt
);
3686 if (TREE_CODE (op0
) == LABEL_DECL
)
3689 expand_computed_goto (op0
);
3692 expand_label (gimple_label_label (as_a
<glabel
*> (stmt
)));
3695 case GIMPLE_PREDICT
:
3699 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
3700 if (gimple_switch_num_labels (swtch
) == 1)
3701 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch
)));
3703 expand_case (swtch
);
3707 expand_asm_stmt (as_a
<gasm
*> (stmt
));
3710 expand_call_stmt (as_a
<gcall
*> (stmt
));
3715 op0
= gimple_return_retval (as_a
<greturn
*> (stmt
));
3717 /* If a return doesn't have a location, it very likely represents
3718 multiple user returns so we cannot let it inherit the location
3719 of the last statement of the previous basic block in RTL. */
3720 if (!gimple_has_location (stmt
))
3721 set_curr_insn_location (cfun
->function_end_locus
);
3723 if (op0
&& op0
!= error_mark_node
)
3725 tree result
= DECL_RESULT (current_function_decl
);
3727 /* If we are not returning the current function's RESULT_DECL,
3728 build an assignment to it. */
3731 /* I believe that a function's RESULT_DECL is unique. */
3732 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3734 /* ??? We'd like to use simply expand_assignment here,
3735 but this fails if the value is of BLKmode but the return
3736 decl is a register. expand_return has special handling
3737 for this combination, which eventually should move
3738 to common code. See comments there. Until then, let's
3739 build a modify expression :-/ */
3740 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3746 expand_null_return ();
3748 expand_return (op0
);
3754 gassign
*assign_stmt
= as_a
<gassign
*> (stmt
);
3755 tree lhs
= gimple_assign_lhs (assign_stmt
);
3757 /* Tree expand used to fiddle with |= and &= of two bitfield
3758 COMPONENT_REFs here. This can't happen with gimple, the LHS
3759 of binary assigns must be a gimple reg. */
3761 if (TREE_CODE (lhs
) != SSA_NAME
3762 || get_gimple_rhs_class (gimple_expr_code (stmt
))
3763 == GIMPLE_SINGLE_RHS
)
3765 tree rhs
= gimple_assign_rhs1 (assign_stmt
);
3766 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
3767 == GIMPLE_SINGLE_RHS
);
3768 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
)
3769 /* Do not put locations on possibly shared trees. */
3770 && !is_gimple_min_invariant (rhs
))
3771 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
3772 if (TREE_CLOBBER_P (rhs
))
3773 /* This is a clobber to mark the going out of scope for
3775 expand_clobber (lhs
);
3777 expand_assignment (lhs
, rhs
,
3778 gimple_assign_nontemporal_move_p (
3784 bool nontemporal
= gimple_assign_nontemporal_move_p (assign_stmt
);
3785 struct separate_ops ops
;
3786 bool promoted
= false;
3788 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3789 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3792 ops
.code
= gimple_assign_rhs_code (assign_stmt
);
3793 ops
.type
= TREE_TYPE (lhs
);
3794 switch (get_gimple_rhs_class (ops
.code
))
3796 case GIMPLE_TERNARY_RHS
:
3797 ops
.op2
= gimple_assign_rhs3 (assign_stmt
);
3799 case GIMPLE_BINARY_RHS
:
3800 ops
.op1
= gimple_assign_rhs2 (assign_stmt
);
3802 case GIMPLE_UNARY_RHS
:
3803 ops
.op0
= gimple_assign_rhs1 (assign_stmt
);
3808 ops
.location
= gimple_location (stmt
);
3810 /* If we want to use a nontemporal store, force the value to
3811 register first. If we store into a promoted register,
3812 don't directly expand to target. */
3813 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
3814 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
3821 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
3822 /* If TEMP is a VOIDmode constant, use convert_modes to make
3823 sure that we properly convert it. */
3824 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3826 temp
= convert_modes (GET_MODE (target
),
3827 TYPE_MODE (ops
.type
),
3829 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3830 GET_MODE (target
), temp
, unsignedp
);
3833 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
3835 else if (nontemporal
&& emit_storent_insn (target
, temp
))
3839 temp
= force_operand (temp
, target
);
3841 emit_move_insn (target
, temp
);
3852 /* Expand one gimple statement STMT and return the last RTL instruction
3853 before any of the newly generated ones.
3855 In addition to generating the necessary RTL instructions this also
3856 sets REG_EH_REGION notes if necessary and sets the current source
3857 location for diagnostics. */
3860 expand_gimple_stmt (gimple
*stmt
)
3862 location_t saved_location
= input_location
;
3863 rtx_insn
*last
= get_last_insn ();
3868 /* We need to save and restore the current source location so that errors
3869 discovered during expansion are emitted with the right location. But
3870 it would be better if the diagnostic routines used the source location
3871 embedded in the tree nodes rather than globals. */
3872 if (gimple_has_location (stmt
))
3873 input_location
= gimple_location (stmt
);
3875 expand_gimple_stmt_1 (stmt
);
3877 /* Free any temporaries used to evaluate this statement. */
3880 input_location
= saved_location
;
3882 /* Mark all insns that may trap. */
3883 lp_nr
= lookup_stmt_eh_lp (stmt
);
3887 for (insn
= next_real_insn (last
); insn
;
3888 insn
= next_real_insn (insn
))
3890 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
3891 /* If we want exceptions for non-call insns, any
3892 may_trap_p instruction may throw. */
3893 && GET_CODE (PATTERN (insn
)) != CLOBBER
3894 && GET_CODE (PATTERN (insn
)) != CLOBBER_HIGH
3895 && GET_CODE (PATTERN (insn
)) != USE
3896 && insn_could_throw_p (insn
))
3897 make_reg_eh_region_note (insn
, 0, lp_nr
);
3904 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3905 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3906 generated a tail call (something that might be denied by the ABI
3907 rules governing the call; see calls.c).
3909 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3910 can still reach the rest of BB. The case here is __builtin_sqrt,
3911 where the NaN result goes through the external function (with a
3912 tailcall) and the normal result happens via a sqrt instruction. */
3915 expand_gimple_tailcall (basic_block bb
, gcall
*stmt
, bool *can_fallthru
)
3917 rtx_insn
*last2
, *last
;
3920 profile_probability probability
;
3922 last2
= last
= expand_gimple_stmt (stmt
);
3924 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
3925 if (CALL_P (last
) && SIBLING_CALL_P (last
))
3928 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3930 *can_fallthru
= true;
3934 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3935 Any instructions emitted here are about to be deleted. */
3936 do_pending_stack_adjust ();
3938 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3939 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3940 EH or abnormal edges, we shouldn't have created a tail call in
3941 the first place. So it seems to me we should just be removing
3942 all edges here, or redirecting the existing fallthru edge to
3945 probability
= profile_probability::never ();
3947 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3949 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
3951 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3952 e
->dest
->count
-= e
->count ();
3953 probability
+= e
->probability
;
3960 /* This is somewhat ugly: the call_expr expander often emits instructions
3961 after the sibcall (to perform the function return). These confuse the
3962 find_many_sub_basic_blocks code, so we need to get rid of these. */
3963 last
= NEXT_INSN (last
);
3964 gcc_assert (BARRIER_P (last
));
3966 *can_fallthru
= false;
3967 while (NEXT_INSN (last
))
3969 /* For instance an sqrt builtin expander expands if with
3970 sibcall in the then and label for `else`. */
3971 if (LABEL_P (NEXT_INSN (last
)))
3973 *can_fallthru
= true;
3976 delete_insn (NEXT_INSN (last
));
3979 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
3981 e
->probability
= probability
;
3983 update_bb_for_insn (bb
);
3985 if (NEXT_INSN (last
))
3987 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
3990 if (BARRIER_P (last
))
3991 BB_END (bb
) = PREV_INSN (last
);
3994 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3999 /* Return the difference between the floor and the truncated result of
4000 a signed division by OP1 with remainder MOD. */
4002 floor_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4004 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
4005 return gen_rtx_IF_THEN_ELSE
4006 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
4007 gen_rtx_IF_THEN_ELSE
4008 (mode
, gen_rtx_LT (BImode
,
4009 gen_rtx_DIV (mode
, op1
, mod
),
4011 constm1_rtx
, const0_rtx
),
4015 /* Return the difference between the ceil and the truncated result of
4016 a signed division by OP1 with remainder MOD. */
4018 ceil_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4020 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
4021 return gen_rtx_IF_THEN_ELSE
4022 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
4023 gen_rtx_IF_THEN_ELSE
4024 (mode
, gen_rtx_GT (BImode
,
4025 gen_rtx_DIV (mode
, op1
, mod
),
4027 const1_rtx
, const0_rtx
),
4031 /* Return the difference between the ceil and the truncated result of
4032 an unsigned division by OP1 with remainder MOD. */
4034 ceil_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
4036 /* (mod != 0 ? 1 : 0) */
4037 return gen_rtx_IF_THEN_ELSE
4038 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
4039 const1_rtx
, const0_rtx
);
4042 /* Return the difference between the rounded and the truncated result
4043 of a signed division by OP1 with remainder MOD. Halfway cases are
4044 rounded away from zero, rather than to the nearest even number. */
4046 round_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4048 /* (abs (mod) >= abs (op1) - abs (mod)
4049 ? (op1 / mod > 0 ? 1 : -1)
4051 return gen_rtx_IF_THEN_ELSE
4052 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
4053 gen_rtx_MINUS (mode
,
4054 gen_rtx_ABS (mode
, op1
),
4055 gen_rtx_ABS (mode
, mod
))),
4056 gen_rtx_IF_THEN_ELSE
4057 (mode
, gen_rtx_GT (BImode
,
4058 gen_rtx_DIV (mode
, op1
, mod
),
4060 const1_rtx
, constm1_rtx
),
4064 /* Return the difference between the rounded and the truncated result
4065 of a unsigned division by OP1 with remainder MOD. Halfway cases
4066 are rounded away from zero, rather than to the nearest even
4069 round_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4071 /* (mod >= op1 - mod ? 1 : 0) */
4072 return gen_rtx_IF_THEN_ELSE
4073 (mode
, gen_rtx_GE (BImode
, mod
,
4074 gen_rtx_MINUS (mode
, op1
, mod
)),
4075 const1_rtx
, const0_rtx
);
4078 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4082 convert_debug_memory_address (scalar_int_mode mode
, rtx x
,
4085 #ifndef POINTERS_EXTEND_UNSIGNED
4086 gcc_assert (mode
== Pmode
4087 || mode
== targetm
.addr_space
.address_mode (as
));
4088 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
4092 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
4094 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
4097 /* X must have some form of address mode already. */
4098 scalar_int_mode xmode
= as_a
<scalar_int_mode
> (GET_MODE (x
));
4099 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
4100 x
= lowpart_subreg (mode
, x
, xmode
);
4101 else if (POINTERS_EXTEND_UNSIGNED
> 0)
4102 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
4103 else if (!POINTERS_EXTEND_UNSIGNED
)
4104 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
4107 switch (GET_CODE (x
))
4110 if ((SUBREG_PROMOTED_VAR_P (x
)
4111 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
4112 || (GET_CODE (SUBREG_REG (x
)) == PLUS
4113 && REG_P (XEXP (SUBREG_REG (x
), 0))
4114 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
4115 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
4116 && GET_MODE (SUBREG_REG (x
)) == mode
)
4117 return SUBREG_REG (x
);
4120 temp
= gen_rtx_LABEL_REF (mode
, label_ref_label (x
));
4121 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
4124 temp
= shallow_copy_rtx (x
);
4125 PUT_MODE (temp
, mode
);
4128 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4130 temp
= gen_rtx_CONST (mode
, temp
);
4134 if (CONST_INT_P (XEXP (x
, 1)))
4136 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4138 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
4144 /* Don't know how to express ptr_extend as operation in debug info. */
4147 #endif /* POINTERS_EXTEND_UNSIGNED */
4152 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4153 by avoid_deep_ter_for_debug. */
4155 static hash_map
<tree
, tree
> *deep_ter_debug_map
;
4157 /* Split too deep TER chains for debug stmts using debug temporaries. */
4160 avoid_deep_ter_for_debug (gimple
*stmt
, int depth
)
4162 use_operand_p use_p
;
4164 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4166 tree use
= USE_FROM_PTR (use_p
);
4167 if (TREE_CODE (use
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (use
))
4169 gimple
*g
= get_gimple_for_ssa_name (use
);
4172 if (depth
> 6 && !stmt_ends_bb_p (g
))
4174 if (deep_ter_debug_map
== NULL
)
4175 deep_ter_debug_map
= new hash_map
<tree
, tree
>;
4177 tree
&vexpr
= deep_ter_debug_map
->get_or_insert (use
);
4180 vexpr
= make_node (DEBUG_EXPR_DECL
);
4181 gimple
*def_temp
= gimple_build_debug_bind (vexpr
, use
, g
);
4182 DECL_ARTIFICIAL (vexpr
) = 1;
4183 TREE_TYPE (vexpr
) = TREE_TYPE (use
);
4184 SET_DECL_MODE (vexpr
, TYPE_MODE (TREE_TYPE (use
)));
4185 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
4186 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
4187 avoid_deep_ter_for_debug (def_temp
, 0);
4190 avoid_deep_ter_for_debug (g
, depth
+ 1);
4194 /* Return an RTX equivalent to the value of the parameter DECL. */
4197 expand_debug_parm_decl (tree decl
)
4199 rtx incoming
= DECL_INCOMING_RTL (decl
);
4202 && GET_MODE (incoming
) != BLKmode
4203 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
4204 || (MEM_P (incoming
)
4205 && REG_P (XEXP (incoming
, 0))
4206 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
4208 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
4210 #ifdef HAVE_window_save
4211 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4212 If the target machine has an explicit window save instruction, the
4213 actual entry value is the corresponding OUTGOING_REGNO instead. */
4214 if (REG_P (incoming
)
4215 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
4217 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
4218 OUTGOING_REGNO (REGNO (incoming
)), 0);
4219 else if (MEM_P (incoming
))
4221 rtx reg
= XEXP (incoming
, 0);
4222 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
4224 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
4225 incoming
= replace_equiv_address_nv (incoming
, reg
);
4228 incoming
= copy_rtx (incoming
);
4232 ENTRY_VALUE_EXP (rtl
) = incoming
;
4237 && GET_MODE (incoming
) != BLKmode
4238 && !TREE_ADDRESSABLE (decl
)
4240 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
4241 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
4242 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
4243 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
4244 return copy_rtx (incoming
);
4249 /* Return an RTX equivalent to the value of the tree expression EXP. */
4252 expand_debug_expr (tree exp
)
4254 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
4255 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4256 machine_mode inner_mode
= VOIDmode
;
4257 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4259 scalar_int_mode op0_mode
, op1_mode
, addr_mode
;
4261 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4263 case tcc_expression
:
4264 switch (TREE_CODE (exp
))
4269 case WIDEN_MULT_PLUS_EXPR
:
4270 case WIDEN_MULT_MINUS_EXPR
:
4273 case TRUTH_ANDIF_EXPR
:
4274 case TRUTH_ORIF_EXPR
:
4275 case TRUTH_AND_EXPR
:
4277 case TRUTH_XOR_EXPR
:
4280 case TRUTH_NOT_EXPR
:
4289 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
4296 if (mode
== BLKmode
)
4298 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4301 switch (TREE_CODE (exp
))
4307 case WIDEN_LSHIFT_EXPR
:
4308 /* Ensure second operand isn't wider than the first one. */
4309 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
4310 if (is_a
<scalar_int_mode
> (inner_mode
, &op1_mode
)
4311 && (GET_MODE_UNIT_PRECISION (mode
)
4312 < GET_MODE_PRECISION (op1_mode
)))
4313 op1
= lowpart_subreg (GET_MODE_INNER (mode
), op1
, op1_mode
);
4322 if (mode
== BLKmode
)
4324 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4325 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4330 case tcc_comparison
:
4331 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4339 case tcc_exceptional
:
4340 case tcc_declaration
:
4346 switch (TREE_CODE (exp
))
4349 if (!lookup_constant_def (exp
))
4351 if (strlen (TREE_STRING_POINTER (exp
)) + 1
4352 != (size_t) TREE_STRING_LENGTH (exp
))
4354 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
4355 op0
= gen_rtx_MEM (BLKmode
, op0
);
4356 set_mem_attributes (op0
, exp
, 0);
4364 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
4368 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
4371 gcc_assert (COMPLEX_MODE_P (mode
));
4372 op0
= expand_debug_expr (TREE_REALPART (exp
));
4373 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
4374 return gen_rtx_CONCAT (mode
, op0
, op1
);
4376 case DEBUG_EXPR_DECL
:
4377 op0
= DECL_RTL_IF_SET (exp
);
4382 op0
= gen_rtx_DEBUG_EXPR (mode
);
4383 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
4384 SET_DECL_RTL (exp
, op0
);
4394 op0
= DECL_RTL_IF_SET (exp
);
4396 /* This decl was probably optimized away. */
4398 /* At least label RTXen are sometimes replaced by
4399 NOTE_INSN_DELETED_LABEL. Any notes here are not
4400 handled by copy_rtx. */
4404 || DECL_EXTERNAL (exp
)
4405 || !TREE_STATIC (exp
)
4407 || DECL_HARD_REGISTER (exp
)
4408 || DECL_IN_CONSTANT_POOL (exp
)
4409 || mode
== VOIDmode
)
4412 op0
= make_decl_rtl_for_debug (exp
);
4414 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
4415 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
4419 op0
= copy_rtx (op0
);
4421 if (GET_MODE (op0
) == BLKmode
4422 /* If op0 is not BLKmode, but mode is, adjust_mode
4423 below would ICE. While it is likely a FE bug,
4424 try to be robust here. See PR43166. */
4426 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
4428 gcc_assert (MEM_P (op0
));
4429 op0
= adjust_address_nv (op0
, mode
, 0);
4439 inner_mode
= GET_MODE (op0
);
4441 if (mode
== inner_mode
)
4444 if (inner_mode
== VOIDmode
)
4446 if (TREE_CODE (exp
) == SSA_NAME
)
4447 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
4449 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4450 if (mode
== inner_mode
)
4454 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4456 if (GET_MODE_UNIT_BITSIZE (mode
)
4457 == GET_MODE_UNIT_BITSIZE (inner_mode
))
4458 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4459 else if (GET_MODE_UNIT_BITSIZE (mode
)
4460 < GET_MODE_UNIT_BITSIZE (inner_mode
))
4461 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4463 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4465 else if (FLOAT_MODE_P (mode
))
4467 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
4468 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4469 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
4471 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
4473 else if (FLOAT_MODE_P (inner_mode
))
4476 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4478 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4480 else if (GET_MODE_UNIT_PRECISION (mode
)
4481 == GET_MODE_UNIT_PRECISION (inner_mode
))
4482 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
4483 else if (GET_MODE_UNIT_PRECISION (mode
)
4484 < GET_MODE_UNIT_PRECISION (inner_mode
))
4485 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
4486 else if (UNARY_CLASS_P (exp
)
4487 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4489 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4491 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4497 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4499 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
4500 TREE_OPERAND (exp
, 0),
4501 TREE_OPERAND (exp
, 1));
4503 return expand_debug_expr (newexp
);
4507 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4508 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4512 if (TREE_CODE (exp
) == MEM_REF
)
4514 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4515 || (GET_CODE (op0
) == PLUS
4516 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
4517 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4518 Instead just use get_inner_reference. */
4521 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4523 if (!op1
|| !poly_int_rtx_p (op1
, &offset
))
4526 op0
= plus_constant (inner_mode
, op0
, offset
);
4529 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4531 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4533 if (op0
== NULL_RTX
)
4536 op0
= gen_rtx_MEM (mode
, op0
);
4537 set_mem_attributes (op0
, exp
, 0);
4538 if (TREE_CODE (exp
) == MEM_REF
4539 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4540 set_mem_expr (op0
, NULL_TREE
);
4541 set_mem_addr_space (op0
, as
);
4545 case TARGET_MEM_REF
:
4546 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
4547 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
4550 op0
= expand_debug_expr
4551 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
4555 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4556 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4558 if (op0
== NULL_RTX
)
4561 op0
= gen_rtx_MEM (mode
, op0
);
4563 set_mem_attributes (op0
, exp
, 0);
4564 set_mem_addr_space (op0
, as
);
4570 case ARRAY_RANGE_REF
:
4575 case VIEW_CONVERT_EXPR
:
4578 poly_int64 bitsize
, bitpos
;
4580 int reversep
, volatilep
= 0;
4582 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
4583 &unsignedp
, &reversep
, &volatilep
);
4586 if (known_eq (bitsize
, 0))
4589 orig_op0
= op0
= expand_debug_expr (tem
);
4596 machine_mode addrmode
, offmode
;
4601 op0
= XEXP (op0
, 0);
4602 addrmode
= GET_MODE (op0
);
4603 if (addrmode
== VOIDmode
)
4606 op1
= expand_debug_expr (offset
);
4610 offmode
= GET_MODE (op1
);
4611 if (offmode
== VOIDmode
)
4612 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4614 if (addrmode
!= offmode
)
4615 op1
= lowpart_subreg (addrmode
, op1
, offmode
);
4617 /* Don't use offset_address here, we don't need a
4618 recognizable address, and we don't want to generate
4620 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4626 if (mode1
== VOIDmode
)
4628 if (maybe_gt (bitsize
, MAX_BITSIZE_MODE_ANY_INT
))
4631 mode1
= smallest_int_mode_for_size (bitsize
);
4633 poly_int64 bytepos
= bits_to_bytes_round_down (bitpos
);
4634 if (maybe_ne (bytepos
, 0))
4636 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
4637 bitpos
= num_trailing_bits (bitpos
);
4639 else if (known_eq (bitpos
, 0)
4640 && known_eq (bitsize
, GET_MODE_BITSIZE (mode
)))
4641 op0
= adjust_address_nv (op0
, mode
, 0);
4642 else if (GET_MODE (op0
) != mode1
)
4643 op0
= adjust_address_nv (op0
, mode1
, 0);
4645 op0
= copy_rtx (op0
);
4646 if (op0
== orig_op0
)
4647 op0
= shallow_copy_rtx (op0
);
4648 set_mem_attributes (op0
, exp
, 0);
4651 if (known_eq (bitpos
, 0) && mode
== GET_MODE (op0
))
4654 if (maybe_lt (bitpos
, 0))
4657 if (GET_MODE (op0
) == BLKmode
|| mode
== BLKmode
)
4661 if (multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
4662 && known_eq (bitsize
, GET_MODE_BITSIZE (mode1
)))
4664 machine_mode opmode
= GET_MODE (op0
);
4666 if (opmode
== VOIDmode
)
4667 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4669 /* This condition may hold if we're expanding the address
4670 right past the end of an array that turned out not to
4671 be addressable (i.e., the address was only computed in
4672 debug stmts). The gen_subreg below would rightfully
4673 crash, and the address doesn't really exist, so just
4675 if (known_ge (bitpos
, GET_MODE_BITSIZE (opmode
)))
4678 if (multiple_p (bitpos
, GET_MODE_BITSIZE (mode
)))
4679 return simplify_gen_subreg (mode
, op0
, opmode
, bytepos
);
4682 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4683 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4685 : ZERO_EXTRACT
, mode
,
4686 GET_MODE (op0
) != VOIDmode
4688 : TYPE_MODE (TREE_TYPE (tem
)),
4689 op0
, gen_int_mode (bitsize
, word_mode
),
4690 gen_int_mode (bitpos
, word_mode
));
4695 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4698 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4701 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4704 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4706 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4709 case FIX_TRUNC_EXPR
:
4710 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4713 case POINTER_PLUS_EXPR
:
4714 /* For the rare target where pointers are not the same size as
4715 size_t, we need to check for mis-matched modes and correct
4718 && is_a
<scalar_int_mode
> (GET_MODE (op0
), &op0_mode
)
4719 && is_a
<scalar_int_mode
> (GET_MODE (op1
), &op1_mode
)
4720 && op0_mode
!= op1_mode
)
4722 if (GET_MODE_BITSIZE (op0_mode
) < GET_MODE_BITSIZE (op1_mode
)
4723 /* If OP0 is a partial mode, then we must truncate, even
4724 if it has the same bitsize as OP1 as GCC's
4725 representation of partial modes is opaque. */
4726 || (GET_MODE_CLASS (op0_mode
) == MODE_PARTIAL_INT
4727 && (GET_MODE_BITSIZE (op0_mode
)
4728 == GET_MODE_BITSIZE (op1_mode
))))
4729 op1
= simplify_gen_unary (TRUNCATE
, op0_mode
, op1
, op1_mode
);
4731 /* We always sign-extend, regardless of the signedness of
4732 the operand, because the operand is always unsigned
4733 here even if the original C expression is signed. */
4734 op1
= simplify_gen_unary (SIGN_EXTEND
, op0_mode
, op1
, op1_mode
);
4738 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4741 case POINTER_DIFF_EXPR
:
4742 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4745 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4748 case TRUNC_DIV_EXPR
:
4749 case EXACT_DIV_EXPR
:
4751 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4753 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4755 case TRUNC_MOD_EXPR
:
4756 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4758 case FLOOR_DIV_EXPR
:
4760 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4763 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4764 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4765 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4766 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4769 case FLOOR_MOD_EXPR
:
4771 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4774 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4775 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4776 adj
= simplify_gen_unary (NEG
, mode
,
4777 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4779 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4785 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4786 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4787 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4788 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4792 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4793 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4794 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4795 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4801 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4802 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4803 adj
= simplify_gen_unary (NEG
, mode
,
4804 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4806 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4810 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4811 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4812 adj
= simplify_gen_unary (NEG
, mode
,
4813 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4815 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4818 case ROUND_DIV_EXPR
:
4821 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4822 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4823 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4824 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4828 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4829 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4830 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4831 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4834 case ROUND_MOD_EXPR
:
4837 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4838 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4839 adj
= simplify_gen_unary (NEG
, mode
,
4840 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4842 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4846 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4847 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4848 adj
= simplify_gen_unary (NEG
, mode
,
4849 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4851 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4855 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
4859 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
4861 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
4864 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
4867 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
4870 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
4873 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
4876 case TRUTH_AND_EXPR
:
4877 return simplify_gen_binary (AND
, mode
, op0
, op1
);
4881 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
4884 case TRUTH_XOR_EXPR
:
4885 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
4887 case TRUTH_ANDIF_EXPR
:
4888 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
4890 case TRUTH_ORIF_EXPR
:
4891 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
4893 case TRUTH_NOT_EXPR
:
4894 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
4897 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
4901 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
4905 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
4909 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
4913 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
4916 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
4918 case UNORDERED_EXPR
:
4919 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
4922 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
4925 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
4928 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
4931 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
4934 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
4937 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
4940 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
4943 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
4946 gcc_assert (COMPLEX_MODE_P (mode
));
4947 if (GET_MODE (op0
) == VOIDmode
)
4948 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
4949 if (GET_MODE (op1
) == VOIDmode
)
4950 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
4951 return gen_rtx_CONCAT (mode
, op0
, op1
);
4954 if (GET_CODE (op0
) == CONCAT
)
4955 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
4956 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
4958 GET_MODE_INNER (mode
)));
4961 scalar_mode imode
= GET_MODE_INNER (mode
);
4966 re
= adjust_address_nv (op0
, imode
, 0);
4967 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
4971 scalar_int_mode ifmode
;
4972 scalar_int_mode ihmode
;
4974 if (!int_mode_for_mode (mode
).exists (&ifmode
)
4975 || !int_mode_for_mode (imode
).exists (&ihmode
))
4977 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
4980 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
4981 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
4982 if (imode
!= ihmode
)
4983 re
= gen_rtx_SUBREG (imode
, re
, 0);
4984 im
= copy_rtx (op0
);
4986 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
4987 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
4988 if (imode
!= ihmode
)
4989 im
= gen_rtx_SUBREG (imode
, im
, 0);
4991 im
= gen_rtx_NEG (imode
, im
);
4992 return gen_rtx_CONCAT (mode
, re
, im
);
4996 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4997 if (!op0
|| !MEM_P (op0
))
4999 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
5000 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
5001 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
5002 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
5003 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
5004 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
5006 if (handled_component_p (TREE_OPERAND (exp
, 0)))
5008 poly_int64 bitoffset
, bitsize
, maxsize
, byteoffset
;
5011 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0), &bitoffset
,
5012 &bitsize
, &maxsize
, &reverse
);
5014 || TREE_CODE (decl
) == PARM_DECL
5015 || TREE_CODE (decl
) == RESULT_DECL
)
5016 && (!TREE_ADDRESSABLE (decl
)
5017 || target_for_debug_bind (decl
))
5018 && multiple_p (bitoffset
, BITS_PER_UNIT
, &byteoffset
)
5019 && known_gt (bitsize
, 0)
5020 && known_eq (bitsize
, maxsize
))
5022 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
5023 return plus_constant (mode
, base
, byteoffset
);
5027 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
5028 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5031 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
5034 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
5035 || (GET_CODE (op0
) == PLUS
5036 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
5037 && CONST_INT_P (XEXP (op0
, 1)))))
5039 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
5042 if (!op1
|| !poly_int_rtx_p (op1
, &offset
))
5045 return plus_constant (mode
, op0
, offset
);
5052 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
5053 addr_mode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5054 op0
= convert_debug_memory_address (addr_mode
, XEXP (op0
, 0), as
);
5060 unsigned HOST_WIDE_INT i
, nelts
;
5062 if (!VECTOR_CST_NELTS (exp
).is_constant (&nelts
))
5065 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
5067 for (i
= 0; i
< nelts
; ++i
)
5069 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
5072 XVECEXP (op0
, 0, i
) = op1
;
5079 if (TREE_CLOBBER_P (exp
))
5081 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
5084 unsigned HOST_WIDE_INT nelts
;
5087 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)).is_constant (&nelts
))
5088 goto flag_unsupported
;
5090 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
5092 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
5094 op1
= expand_debug_expr (val
);
5097 XVECEXP (op0
, 0, i
) = op1
;
5102 op1
= expand_debug_expr
5103 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
5108 for (; i
< nelts
; i
++)
5109 XVECEXP (op0
, 0, i
) = op1
;
5115 goto flag_unsupported
;
5118 /* ??? Maybe handle some builtins? */
5123 gimple
*g
= get_gimple_for_ssa_name (exp
);
5127 if (deep_ter_debug_map
)
5129 tree
*slot
= deep_ter_debug_map
->get (exp
);
5134 t
= gimple_assign_rhs_to_tree (g
);
5135 op0
= expand_debug_expr (t
);
5141 /* If this is a reference to an incoming value of
5142 parameter that is never used in the code or where the
5143 incoming value is never used in the code, use
5144 PARM_DECL's DECL_RTL if set. */
5145 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
5146 && SSA_NAME_VAR (exp
)
5147 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
5148 && has_zero_uses (exp
))
5150 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
5153 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
5158 int part
= var_to_partition (SA
.map
, exp
);
5160 if (part
== NO_PARTITION
)
5163 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
5165 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
5173 /* Vector stuff. For most of the codes we don't have rtl codes. */
5174 case REALIGN_LOAD_EXPR
:
5176 case VEC_PACK_FIX_TRUNC_EXPR
:
5177 case VEC_PACK_FLOAT_EXPR
:
5178 case VEC_PACK_SAT_EXPR
:
5179 case VEC_PACK_TRUNC_EXPR
:
5180 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
5181 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
5182 case VEC_UNPACK_FLOAT_HI_EXPR
:
5183 case VEC_UNPACK_FLOAT_LO_EXPR
:
5184 case VEC_UNPACK_HI_EXPR
:
5185 case VEC_UNPACK_LO_EXPR
:
5186 case VEC_WIDEN_MULT_HI_EXPR
:
5187 case VEC_WIDEN_MULT_LO_EXPR
:
5188 case VEC_WIDEN_MULT_EVEN_EXPR
:
5189 case VEC_WIDEN_MULT_ODD_EXPR
:
5190 case VEC_WIDEN_LSHIFT_HI_EXPR
:
5191 case VEC_WIDEN_LSHIFT_LO_EXPR
:
5193 case VEC_DUPLICATE_EXPR
:
5194 case VEC_SERIES_EXPR
:
5198 case ADDR_SPACE_CONVERT_EXPR
:
5199 case FIXED_CONVERT_EXPR
:
5201 case WITH_SIZE_EXPR
:
5202 case BIT_INSERT_EXPR
:
5206 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5207 && SCALAR_INT_MODE_P (mode
))
5210 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5212 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5215 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5217 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
5219 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5220 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5224 case WIDEN_MULT_EXPR
:
5225 case WIDEN_MULT_PLUS_EXPR
:
5226 case WIDEN_MULT_MINUS_EXPR
:
5227 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5228 && SCALAR_INT_MODE_P (mode
))
5230 inner_mode
= GET_MODE (op0
);
5231 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5232 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5234 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5235 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5236 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
5238 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
5239 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5240 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
5242 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
5243 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5245 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
5249 case MULT_HIGHPART_EXPR
:
5250 /* ??? Similar to the above. */
5253 case WIDEN_SUM_EXPR
:
5254 case WIDEN_LSHIFT_EXPR
:
5255 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5256 && SCALAR_INT_MODE_P (mode
))
5259 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5261 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5263 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
5264 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
5279 /* Return an RTX equivalent to the source bind value of the tree expression
5283 expand_debug_source_expr (tree exp
)
5286 machine_mode mode
= VOIDmode
, inner_mode
;
5288 switch (TREE_CODE (exp
))
5291 if (DECL_ABSTRACT_ORIGIN (exp
))
5292 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp
));
5296 mode
= DECL_MODE (exp
);
5297 op0
= expand_debug_parm_decl (exp
);
5300 /* See if this isn't an argument that has been completely
5302 if (!DECL_RTL_SET_P (exp
)
5303 && !DECL_INCOMING_RTL (exp
)
5304 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
5306 tree aexp
= DECL_ORIGIN (exp
);
5307 if (DECL_CONTEXT (aexp
)
5308 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
5310 vec
<tree
, va_gc
> **debug_args
;
5313 debug_args
= decl_debug_args_lookup (current_function_decl
);
5314 if (debug_args
!= NULL
)
5316 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
5319 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
5329 if (op0
== NULL_RTX
)
5332 inner_mode
= GET_MODE (op0
);
5333 if (mode
== inner_mode
)
5336 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
5338 if (GET_MODE_UNIT_BITSIZE (mode
)
5339 == GET_MODE_UNIT_BITSIZE (inner_mode
))
5340 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
5341 else if (GET_MODE_UNIT_BITSIZE (mode
)
5342 < GET_MODE_UNIT_BITSIZE (inner_mode
))
5343 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
5345 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
5347 else if (FLOAT_MODE_P (mode
))
5349 else if (FLOAT_MODE_P (inner_mode
))
5351 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5352 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
5354 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
5356 else if (GET_MODE_UNIT_PRECISION (mode
)
5357 == GET_MODE_UNIT_PRECISION (inner_mode
))
5358 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
5359 else if (GET_MODE_UNIT_PRECISION (mode
)
5360 < GET_MODE_UNIT_PRECISION (inner_mode
))
5361 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
5362 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5363 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5365 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5370 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5371 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5372 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5375 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
5379 if (exp
== NULL_RTX
)
5382 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
5387 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5388 rtx dval
= make_debug_expr_from_rtl (exp
);
5390 /* Emit a debug bind insn before INSN. */
5391 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
5392 DEBUG_EXPR_TREE_DECL (dval
), exp
,
5393 VAR_INIT_STATUS_INITIALIZED
);
5395 emit_debug_insn_before (bind
, insn
);
5400 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
5402 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
5403 switch (*format_ptr
++)
5406 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
5411 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
5412 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
5420 /* Expand the _LOCs in debug insns. We run this after expanding all
5421 regular insns, so that any variables referenced in the function
5422 will have their DECL_RTLs set. */
5425 expand_debug_locations (void)
5428 rtx_insn
*last
= get_last_insn ();
5429 int save_strict_alias
= flag_strict_aliasing
;
5431 /* New alias sets while setting up memory attributes cause
5432 -fcompare-debug failures, even though it doesn't bring about any
5434 flag_strict_aliasing
= 0;
5436 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5437 if (DEBUG_BIND_INSN_P (insn
))
5439 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
5441 rtx_insn
*prev_insn
, *insn2
;
5444 if (value
== NULL_TREE
)
5448 if (INSN_VAR_LOCATION_STATUS (insn
)
5449 == VAR_INIT_STATUS_UNINITIALIZED
)
5450 val
= expand_debug_source_expr (value
);
5451 /* The avoid_deep_ter_for_debug function inserts
5452 debug bind stmts after SSA_NAME definition, with the
5453 SSA_NAME as the whole bind location. Disable temporarily
5454 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5455 being defined in this DEBUG_INSN. */
5456 else if (deep_ter_debug_map
&& TREE_CODE (value
) == SSA_NAME
)
5458 tree
*slot
= deep_ter_debug_map
->get (value
);
5461 if (*slot
== INSN_VAR_LOCATION_DECL (insn
))
5466 val
= expand_debug_expr (value
);
5468 *slot
= INSN_VAR_LOCATION_DECL (insn
);
5471 val
= expand_debug_expr (value
);
5472 gcc_assert (last
== get_last_insn ());
5476 val
= gen_rtx_UNKNOWN_VAR_LOC ();
5479 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
5481 gcc_assert (mode
== GET_MODE (val
)
5482 || (GET_MODE (val
) == VOIDmode
5483 && (CONST_SCALAR_INT_P (val
)
5484 || GET_CODE (val
) == CONST_FIXED
5485 || GET_CODE (val
) == LABEL_REF
)));
5488 INSN_VAR_LOCATION_LOC (insn
) = val
;
5489 prev_insn
= PREV_INSN (insn
);
5490 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
5491 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
5494 flag_strict_aliasing
= save_strict_alias
;
5497 /* Performs swapping operands of commutative operations to expand
5498 the expensive one first. */
5501 reorder_operands (basic_block bb
)
5503 unsigned int *lattice
; /* Hold cost of each statement. */
5504 unsigned int i
= 0, n
= 0;
5505 gimple_stmt_iterator gsi
;
5511 use_operand_p use_p
;
5512 gimple
*def0
, *def1
;
5514 /* Compute cost of each statement using estimate_num_insns. */
5515 stmts
= bb_seq (bb
);
5516 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5518 stmt
= gsi_stmt (gsi
);
5519 if (!is_gimple_debug (stmt
))
5520 gimple_set_uid (stmt
, n
++);
5522 lattice
= XNEWVEC (unsigned int, n
);
5523 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5526 stmt
= gsi_stmt (gsi
);
5527 if (is_gimple_debug (stmt
))
5529 cost
= estimate_num_insns (stmt
, &eni_size_weights
);
5531 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5533 tree use
= USE_FROM_PTR (use_p
);
5535 if (TREE_CODE (use
) != SSA_NAME
)
5537 def_stmt
= get_gimple_for_ssa_name (use
);
5540 lattice
[i
] += lattice
[gimple_uid (def_stmt
)];
5543 if (!is_gimple_assign (stmt
)
5544 || !commutative_tree_code (gimple_assign_rhs_code (stmt
)))
5546 op0
= gimple_op (stmt
, 1);
5547 op1
= gimple_op (stmt
, 2);
5548 if (TREE_CODE (op0
) != SSA_NAME
5549 || TREE_CODE (op1
) != SSA_NAME
)
5551 /* Swap operands if the second one is more expensive. */
5552 def0
= get_gimple_for_ssa_name (op0
);
5553 def1
= get_gimple_for_ssa_name (op1
);
5557 if (!def0
|| lattice
[gimple_uid (def1
)] > lattice
[gimple_uid (def0
)])
5561 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5563 fprintf (dump_file
, "Swap operands in stmt:\n");
5564 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5565 fprintf (dump_file
, "Cost left opnd=%d, right opnd=%d\n",
5566 def0
? lattice
[gimple_uid (def0
)] : 0,
5567 lattice
[gimple_uid (def1
)]);
5569 swap_ssa_operands (stmt
, gimple_assign_rhs1_ptr (stmt
),
5570 gimple_assign_rhs2_ptr (stmt
));
5576 /* Expand basic block BB from GIMPLE trees to RTL. */
5579 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
5581 gimple_stmt_iterator gsi
;
5583 gimple
*stmt
= NULL
;
5584 rtx_note
*note
= NULL
;
5590 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
5593 /* Note that since we are now transitioning from GIMPLE to RTL, we
5594 cannot use the gsi_*_bb() routines because they expect the basic
5595 block to be in GIMPLE, instead of RTL. Therefore, we need to
5596 access the BB sequence directly. */
5598 reorder_operands (bb
);
5599 stmts
= bb_seq (bb
);
5600 bb
->il
.gimple
.seq
= NULL
;
5601 bb
->il
.gimple
.phi_nodes
= NULL
;
5602 rtl_profile_for_bb (bb
);
5603 init_rtl_bb_info (bb
);
5604 bb
->flags
|= BB_RTL
;
5606 /* Remove the RETURN_EXPR if we may fall though to the exit
5608 gsi
= gsi_last (stmts
);
5609 if (!gsi_end_p (gsi
)
5610 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
5612 greturn
*ret_stmt
= as_a
<greturn
*> (gsi_stmt (gsi
));
5614 gcc_assert (single_succ_p (bb
));
5615 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
5617 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5618 && !gimple_return_retval (ret_stmt
))
5620 gsi_remove (&gsi
, false);
5621 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
5625 gsi
= gsi_start (stmts
);
5626 if (!gsi_end_p (gsi
))
5628 stmt
= gsi_stmt (gsi
);
5629 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5633 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
5637 gcc_checking_assert (!note
);
5638 last
= get_last_insn ();
5642 expand_gimple_stmt (stmt
);
5649 BB_HEAD (bb
) = NEXT_INSN (last
);
5650 if (NOTE_P (BB_HEAD (bb
)))
5651 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
5652 gcc_assert (LABEL_P (BB_HEAD (bb
)));
5653 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
5655 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5658 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
5661 NOTE_BASIC_BLOCK (note
) = bb
;
5663 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5667 stmt
= gsi_stmt (gsi
);
5669 /* If this statement is a non-debug one, and we generate debug
5670 insns, then this one might be the last real use of a TERed
5671 SSA_NAME, but where there are still some debug uses further
5672 down. Expanding the current SSA name in such further debug
5673 uses by their RHS might lead to wrong debug info, as coalescing
5674 might make the operands of such RHS be placed into the same
5675 pseudo as something else. Like so:
5676 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5680 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5681 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5682 the write to a_2 would actually have clobbered the place which
5685 So, instead of that, we recognize the situation, and generate
5686 debug temporaries at the last real use of TERed SSA names:
5693 if (MAY_HAVE_DEBUG_BIND_INSNS
5695 && !is_gimple_debug (stmt
))
5701 location_t sloc
= curr_insn_location ();
5703 /* Look for SSA names that have their last use here (TERed
5704 names always have only one real use). */
5705 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5706 if ((def
= get_gimple_for_ssa_name (op
)))
5708 imm_use_iterator imm_iter
;
5709 use_operand_p use_p
;
5710 bool have_debug_uses
= false;
5712 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5714 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5716 have_debug_uses
= true;
5721 if (have_debug_uses
)
5723 /* OP is a TERed SSA name, with DEF its defining
5724 statement, and where OP is used in further debug
5725 instructions. Generate a debug temporary, and
5726 replace all uses of OP in debug insns with that
5729 tree value
= gimple_assign_rhs_to_tree (def
);
5730 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5734 set_curr_insn_location (gimple_location (def
));
5736 DECL_ARTIFICIAL (vexpr
) = 1;
5737 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
5739 mode
= DECL_MODE (value
);
5741 mode
= TYPE_MODE (TREE_TYPE (value
));
5742 SET_DECL_MODE (vexpr
, mode
);
5744 val
= gen_rtx_VAR_LOCATION
5745 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5747 emit_debug_insn (val
);
5749 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5751 if (!gimple_debug_bind_p (debugstmt
))
5754 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5755 SET_USE (use_p
, vexpr
);
5757 update_stmt (debugstmt
);
5761 set_curr_insn_location (sloc
);
5764 currently_expanding_gimple_stmt
= stmt
;
5766 /* Expand this statement, then evaluate the resulting RTL and
5767 fixup the CFG accordingly. */
5768 if (gimple_code (stmt
) == GIMPLE_COND
)
5770 new_bb
= expand_gimple_cond (bb
, as_a
<gcond
*> (stmt
));
5774 else if (is_gimple_debug (stmt
))
5776 location_t sloc
= curr_insn_location ();
5777 gimple_stmt_iterator nsi
= gsi
;
5782 tree value
= NULL_TREE
;
5786 if (!gimple_debug_nonbind_marker_p (stmt
))
5788 if (gimple_debug_bind_p (stmt
))
5790 var
= gimple_debug_bind_get_var (stmt
);
5792 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
5793 && TREE_CODE (var
) != LABEL_DECL
5794 && !target_for_debug_bind (var
))
5795 goto delink_debug_stmt
;
5798 mode
= DECL_MODE (var
);
5800 mode
= TYPE_MODE (TREE_TYPE (var
));
5802 if (gimple_debug_bind_has_value_p (stmt
))
5803 value
= gimple_debug_bind_get_value (stmt
);
5805 val
= gen_rtx_VAR_LOCATION
5806 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5808 else if (gimple_debug_source_bind_p (stmt
))
5810 var
= gimple_debug_source_bind_get_var (stmt
);
5812 value
= gimple_debug_source_bind_get_value (stmt
);
5814 mode
= DECL_MODE (var
);
5816 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
5817 VAR_INIT_STATUS_UNINITIALIZED
);
5822 /* If this function was first compiled with markers
5823 enabled, but they're now disable (e.g. LTO), drop
5824 them on the floor. */
5825 else if (gimple_debug_nonbind_marker_p (stmt
)
5826 && !MAY_HAVE_DEBUG_MARKER_INSNS
)
5827 goto delink_debug_stmt
;
5828 else if (gimple_debug_begin_stmt_p (stmt
))
5829 val
= GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5830 else if (gimple_debug_inline_entry_p (stmt
))
5832 tree block
= gimple_block (stmt
);
5835 val
= GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5837 goto delink_debug_stmt
;
5842 last
= get_last_insn ();
5844 set_curr_insn_location (gimple_location (stmt
));
5846 emit_debug_insn (val
);
5848 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5850 /* We can't dump the insn with a TREE where an RTX
5852 if (GET_CODE (val
) == VAR_LOCATION
)
5854 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val
) == (rtx
)value
);
5855 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5857 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5858 if (GET_CODE (val
) == VAR_LOCATION
)
5859 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5863 /* In order not to generate too many debug temporaries,
5864 we delink all uses of debug statements we already expanded.
5865 Therefore debug statements between definition and real
5866 use of TERed SSA names will continue to use the SSA name,
5867 and not be replaced with debug temps. */
5868 delink_stmt_imm_use (stmt
);
5872 if (gsi_end_p (nsi
))
5874 stmt
= gsi_stmt (nsi
);
5875 if (!is_gimple_debug (stmt
))
5879 set_curr_insn_location (sloc
);
5883 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
5885 && gimple_call_tail_p (call_stmt
)
5886 && disable_tail_calls
)
5887 gimple_call_set_tail (call_stmt
, false);
5889 if (call_stmt
&& gimple_call_tail_p (call_stmt
))
5892 new_bb
= expand_gimple_tailcall (bb
, call_stmt
, &can_fallthru
);
5903 def_operand_p def_p
;
5904 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
5908 /* Ignore this stmt if it is in the list of
5909 replaceable expressions. */
5911 && bitmap_bit_p (SA
.values
,
5912 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
5915 last
= expand_gimple_stmt (stmt
);
5916 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5921 currently_expanding_gimple_stmt
= NULL
;
5923 /* Expand implicit goto and convert goto_locus. */
5924 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5926 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5927 set_curr_insn_location (e
->goto_locus
);
5928 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
5930 emit_jump (label_rtx_for_bb (e
->dest
));
5931 e
->flags
&= ~EDGE_FALLTHRU
;
5935 /* Expanded RTL can create a jump in the last instruction of block.
5936 This later might be assumed to be a jump to successor and break edge insertion.
5937 We need to insert dummy move to prevent this. PR41440. */
5938 if (single_succ_p (bb
)
5939 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
5940 && (last
= get_last_insn ())
5942 || (DEBUG_INSN_P (last
)
5943 && JUMP_P (prev_nondebug_insn (last
)))))
5945 rtx dummy
= gen_reg_rtx (SImode
);
5946 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
5949 do_pending_stack_adjust ();
5951 /* Find the block tail. The last insn in the block is the insn
5952 before a barrier and/or table jump insn. */
5953 last
= get_last_insn ();
5954 if (BARRIER_P (last
))
5955 last
= PREV_INSN (last
);
5956 if (JUMP_TABLE_DATA_P (last
))
5957 last
= PREV_INSN (PREV_INSN (last
));
5958 if (BARRIER_P (last
))
5959 last
= PREV_INSN (last
);
5962 update_bb_for_insn (bb
);
5968 /* Create a basic block for initialization code. */
5971 construct_init_block (void)
5973 basic_block init_block
, first_block
;
5977 /* Multiple entry points not supported yet. */
5978 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
5979 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5980 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5981 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5982 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5984 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
5986 /* When entry edge points to first basic block, we don't need jump,
5987 otherwise we have to jump into proper target. */
5988 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
5990 tree label
= gimple_block_label (e
->dest
);
5992 emit_jump (jump_target_rtx (label
));
5996 flags
= EDGE_FALLTHRU
;
5998 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
6000 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6001 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
6002 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
6005 first_block
= e
->dest
;
6006 redirect_edge_succ (e
, init_block
);
6007 make_single_succ_edge (init_block
, first_block
, flags
);
6010 make_single_succ_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
6013 update_bb_for_insn (init_block
);
6017 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
6018 found in the block tree. */
6021 set_block_levels (tree block
, int level
)
6025 BLOCK_NUMBER (block
) = level
;
6026 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
6027 block
= BLOCK_CHAIN (block
);
6031 /* Create a block containing landing pads and similar stuff. */
6034 construct_exit_block (void)
6036 rtx_insn
*head
= get_last_insn ();
6038 basic_block exit_block
;
6042 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
6043 rtx_insn
*orig_end
= BB_END (prev_bb
);
6045 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
6047 /* Make sure the locus is set to the end of the function, so that
6048 epilogue line numbers and warnings are set properly. */
6049 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
6050 input_location
= cfun
->function_end_locus
;
6052 /* Generate rtl for function exit. */
6053 expand_function_end ();
6055 end
= get_last_insn ();
6058 /* While emitting the function end we could move end of the last basic
6060 BB_END (prev_bb
) = orig_end
;
6061 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
6062 head
= NEXT_INSN (head
);
6063 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6064 bb count counting will be confused. Any instructions before that
6065 label are emitted for the case where PREV_BB falls through into the
6066 exit block, so append those instructions to prev_bb in that case. */
6067 if (NEXT_INSN (head
) != return_label
)
6069 while (NEXT_INSN (head
) != return_label
)
6071 if (!NOTE_P (NEXT_INSN (head
)))
6072 BB_END (prev_bb
) = NEXT_INSN (head
);
6073 head
= NEXT_INSN (head
);
6076 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
6077 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
6078 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
6081 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
6083 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
6084 if (!(e
->flags
& EDGE_ABNORMAL
))
6085 redirect_edge_succ (e
, exit_block
);
6090 e
= make_single_succ_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
6092 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6095 exit_block
->count
-= e2
->count ();
6097 update_bb_for_insn (exit_block
);
6100 /* Helper function for discover_nonconstant_array_refs.
6101 Look for ARRAY_REF nodes with non-constant indexes and mark them
6105 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
6106 void *data ATTRIBUTE_UNUSED
)
6110 if (IS_TYPE_OR_DECL_P (t
))
6112 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6114 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6115 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
6116 && (!TREE_OPERAND (t
, 2)
6117 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6118 || (TREE_CODE (t
) == COMPONENT_REF
6119 && (!TREE_OPERAND (t
,2)
6120 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6121 || TREE_CODE (t
) == BIT_FIELD_REF
6122 || TREE_CODE (t
) == REALPART_EXPR
6123 || TREE_CODE (t
) == IMAGPART_EXPR
6124 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
6125 || CONVERT_EXPR_P (t
))
6126 t
= TREE_OPERAND (t
, 0);
6128 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6130 t
= get_base_address (t
);
6132 && DECL_MODE (t
) != BLKmode
)
6133 TREE_ADDRESSABLE (t
) = 1;
6142 /* RTL expansion is not able to compile array references with variable
6143 offsets for arrays stored in single register. Discover such
6144 expressions and mark variables as addressable to avoid this
6148 discover_nonconstant_array_refs (void)
6151 gimple_stmt_iterator gsi
;
6153 FOR_EACH_BB_FN (bb
, cfun
)
6154 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6156 gimple
*stmt
= gsi_stmt (gsi
);
6157 if (!is_gimple_debug (stmt
))
6159 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
6160 gcall
*call
= dyn_cast
<gcall
*> (stmt
);
6161 if (call
&& gimple_call_internal_p (call
))
6162 switch (gimple_call_internal_fn (call
))
6164 case IFN_LOAD_LANES
:
6165 /* The source must be a MEM. */
6166 mark_addressable (gimple_call_arg (call
, 0));
6168 case IFN_STORE_LANES
:
6169 /* The destination must be a MEM. */
6170 mark_addressable (gimple_call_lhs (call
));
6179 /* This function sets crtl->args.internal_arg_pointer to a virtual
6180 register if DRAP is needed. Local register allocator will replace
6181 virtual_incoming_args_rtx with the virtual register. */
6184 expand_stack_alignment (void)
6187 unsigned int preferred_stack_boundary
;
6189 if (! SUPPORTS_STACK_ALIGNMENT
)
6192 if (cfun
->calls_alloca
6193 || cfun
->has_nonlocal_label
6194 || crtl
->has_nonlocal_goto
)
6195 crtl
->need_drap
= true;
6197 /* Call update_stack_boundary here again to update incoming stack
6198 boundary. It may set incoming stack alignment to a different
6199 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6200 use the minimum incoming stack alignment to check if it is OK
6201 to perform sibcall optimization since sibcall optimization will
6202 only align the outgoing stack to incoming stack boundary. */
6203 if (targetm
.calls
.update_stack_boundary
)
6204 targetm
.calls
.update_stack_boundary ();
6206 /* The incoming stack frame has to be aligned at least at
6207 parm_stack_boundary. */
6208 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
6210 /* Update crtl->stack_alignment_estimated and use it later to align
6211 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6212 exceptions since callgraph doesn't collect incoming stack alignment
6214 if (cfun
->can_throw_non_call_exceptions
6215 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
6216 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
6218 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
6219 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
6220 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
6221 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
6222 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
6224 gcc_assert (crtl
->stack_alignment_needed
6225 <= crtl
->stack_alignment_estimated
);
6227 crtl
->stack_realign_needed
6228 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
6229 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
6231 crtl
->stack_realign_processed
= true;
6233 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6235 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
6236 drap_rtx
= targetm
.calls
.get_drap_rtx ();
6238 /* stack_realign_drap and drap_rtx must match. */
6239 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
6241 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6242 if (drap_rtx
!= NULL
)
6244 crtl
->args
.internal_arg_pointer
= drap_rtx
;
6246 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6248 fixup_tail_calls ();
6254 expand_main_function (void)
6256 #if (defined(INVOKE__main) \
6257 || (!defined(HAS_INIT_SECTION) \
6258 && !defined(INIT_SECTION_ASM_OP) \
6259 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6260 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
);
6265 /* Expand code to initialize the stack_protect_guard. This is invoked at
6266 the beginning of a function to be protected. */
6269 stack_protect_prologue (void)
6271 tree guard_decl
= targetm
.stack_protect_guard ();
6274 crtl
->stack_protect_guard_decl
= guard_decl
;
6275 x
= expand_normal (crtl
->stack_protect_guard
);
6277 if (targetm
.have_stack_protect_combined_set () && guard_decl
)
6279 gcc_assert (DECL_P (guard_decl
));
6280 y
= DECL_RTL (guard_decl
);
6282 /* Allow the target to compute address of Y and copy it to X without
6283 leaking Y into a register. This combined address + copy pattern
6284 allows the target to prevent spilling of any intermediate results by
6285 splitting it after register allocator. */
6286 if (rtx_insn
*insn
= targetm
.gen_stack_protect_combined_set (x
, y
))
6294 y
= expand_normal (guard_decl
);
6298 /* Allow the target to copy from Y to X without leaking Y into a
6300 if (targetm
.have_stack_protect_set ())
6301 if (rtx_insn
*insn
= targetm
.gen_stack_protect_set (x
, y
))
6307 /* Otherwise do a straight move. */
6308 emit_move_insn (x
, y
);
6311 /* Translate the intermediate representation contained in the CFG
6312 from GIMPLE trees to RTL.
6314 We do conversion per basic block and preserve/update the tree CFG.
6315 This implies we have to do some magic as the CFG can simultaneously
6316 consist of basic blocks containing RTL and GIMPLE trees. This can
6317 confuse the CFG hooks, so be careful to not manipulate CFG during
6322 const pass_data pass_data_expand
=
6324 RTL_PASS
, /* type */
6325 "expand", /* name */
6326 OPTGROUP_NONE
, /* optinfo_flags */
6327 TV_EXPAND
, /* tv_id */
6328 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
6331 | PROP_gimple_lva
), /* properties_required */
6332 PROP_rtl
, /* properties_provided */
6333 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
6334 0, /* todo_flags_start */
6335 0, /* todo_flags_finish */
6338 class pass_expand
: public rtl_opt_pass
6341 pass_expand (gcc::context
*ctxt
)
6342 : rtl_opt_pass (pass_data_expand
, ctxt
)
6345 /* opt_pass methods: */
6346 virtual unsigned int execute (function
*);
6348 }; // class pass_expand
6351 pass_expand::execute (function
*fun
)
6353 basic_block bb
, init_block
;
6356 rtx_insn
*var_seq
, *var_ret_seq
;
6359 timevar_push (TV_OUT_OF_SSA
);
6360 rewrite_out_of_ssa (&SA
);
6361 timevar_pop (TV_OUT_OF_SSA
);
6362 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
6364 if (MAY_HAVE_DEBUG_BIND_STMTS
&& flag_tree_ter
)
6366 gimple_stmt_iterator gsi
;
6367 FOR_EACH_BB_FN (bb
, cfun
)
6368 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6369 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
6370 avoid_deep_ter_for_debug (gsi_stmt (gsi
), 0);
6373 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6374 discover_nonconstant_array_refs ();
6376 /* Make sure all values used by the optimization passes have sane
6380 /* Some backends want to know that we are expanding to RTL. */
6381 currently_expanding_to_rtl
= 1;
6382 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6383 free_dominance_info (CDI_DOMINATORS
);
6385 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6387 insn_locations_init ();
6388 if (!DECL_IS_BUILTIN (current_function_decl
))
6390 /* Eventually, all FEs should explicitly set function_start_locus. */
6391 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
6392 set_curr_insn_location
6393 (DECL_SOURCE_LOCATION (current_function_decl
));
6395 set_curr_insn_location (fun
->function_start_locus
);
6398 set_curr_insn_location (UNKNOWN_LOCATION
);
6399 prologue_location
= curr_insn_location ();
6401 #ifdef INSN_SCHEDULING
6402 init_sched_attrs ();
6405 /* Make sure first insn is a note even if we don't want linenums.
6406 This makes sure the first insn will never be deleted.
6407 Also, final expects a note to appear there. */
6408 emit_note (NOTE_INSN_DELETED
);
6410 targetm
.expand_to_rtl_hook ();
6411 crtl
->init_stack_alignment ();
6412 fun
->cfg
->max_jumptable_ents
= 0;
6414 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6415 of the function section at exapnsion time to predict distance of calls. */
6416 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
6418 /* Expand the variables recorded during gimple lowering. */
6419 timevar_push (TV_VAR_EXPAND
);
6422 var_ret_seq
= expand_used_vars ();
6424 var_seq
= get_insns ();
6426 timevar_pop (TV_VAR_EXPAND
);
6428 /* Honor stack protection warnings. */
6429 if (warn_stack_protect
)
6431 if (fun
->calls_alloca
)
6432 warning (OPT_Wstack_protector
,
6433 "stack protector not protecting local variables: "
6434 "variable length buffer");
6435 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
6436 warning (OPT_Wstack_protector
,
6437 "stack protector not protecting function: "
6438 "all local arrays are less than %d bytes long",
6439 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
6442 /* Set up parameters and prepare for return, for the function. */
6443 expand_function_start (current_function_decl
);
6445 /* If we emitted any instructions for setting up the variables,
6446 emit them before the FUNCTION_START note. */
6449 emit_insn_before (var_seq
, parm_birth_insn
);
6451 /* In expand_function_end we'll insert the alloca save/restore
6452 before parm_birth_insn. We've just insertted an alloca call.
6453 Adjust the pointer to match. */
6454 parm_birth_insn
= var_seq
;
6457 /* Now propagate the RTL assignment of each partition to the
6458 underlying var of each SSA_NAME. */
6461 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6463 /* We might have generated new SSA names in
6464 update_alias_info_with_stack_vars. They will have a NULL
6465 defining statements, and won't be part of the partitioning,
6467 if (!SSA_NAME_DEF_STMT (name
))
6470 adjust_one_expanded_partition_var (name
);
6473 /* Clean up RTL of variables that straddle across multiple
6474 partitions, and check that the rtl of any PARM_DECLs that are not
6475 cleaned up is that of their default defs. */
6476 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6480 /* We might have generated new SSA names in
6481 update_alias_info_with_stack_vars. They will have a NULL
6482 defining statements, and won't be part of the partitioning,
6484 if (!SSA_NAME_DEF_STMT (name
))
6486 part
= var_to_partition (SA
.map
, name
);
6487 if (part
== NO_PARTITION
)
6490 /* If this decl was marked as living in multiple places, reset
6491 this now to NULL. */
6492 tree var
= SSA_NAME_VAR (name
);
6493 if (var
&& DECL_RTL_IF_SET (var
) == pc_rtx
)
6494 SET_DECL_RTL (var
, NULL
);
6495 /* Check that the pseudos chosen by assign_parms are those of
6496 the corresponding default defs. */
6497 else if (SSA_NAME_IS_DEFAULT_DEF (name
)
6498 && (TREE_CODE (var
) == PARM_DECL
6499 || TREE_CODE (var
) == RESULT_DECL
))
6501 rtx in
= DECL_RTL_IF_SET (var
);
6503 rtx out
= SA
.partition_to_pseudo
[part
];
6504 gcc_assert (in
== out
);
6506 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6507 those expected by debug backends for each parm and for
6508 the result. This is particularly important for stabs,
6509 whose register elimination from parm's DECL_RTL may cause
6510 -fcompare-debug differences as SET_DECL_RTL changes reg's
6511 attrs. So, make sure the RTL already has the parm as the
6512 EXPR, so that it won't change. */
6513 SET_DECL_RTL (var
, NULL_RTX
);
6515 set_mem_attributes (in
, var
, true);
6516 SET_DECL_RTL (var
, in
);
6520 /* If this function is `main', emit a call to `__main'
6521 to run global initializers, etc. */
6522 if (DECL_NAME (current_function_decl
)
6523 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
6524 && DECL_FILE_SCOPE_P (current_function_decl
))
6525 expand_main_function ();
6527 /* Initialize the stack_protect_guard field. This must happen after the
6528 call to __main (if any) so that the external decl is initialized. */
6529 if (crtl
->stack_protect_guard
&& targetm
.stack_protect_runtime_enabled_p ())
6530 stack_protect_prologue ();
6532 expand_phi_nodes (&SA
);
6534 /* Release any stale SSA redirection data. */
6535 redirect_edge_var_map_empty ();
6537 /* Register rtl specific functions for cfg. */
6538 rtl_register_cfg_hooks ();
6540 init_block
= construct_init_block ();
6542 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6543 remaining edges later. */
6544 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
6545 e
->flags
&= ~EDGE_EXECUTABLE
;
6547 /* If the function has too many markers, drop them while expanding. */
6548 if (cfun
->debug_marker_count
6549 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT
))
6550 cfun
->debug_nonbind_markers
= false;
6552 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
6553 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
6555 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
6557 if (MAY_HAVE_DEBUG_BIND_INSNS
)
6558 expand_debug_locations ();
6560 if (deep_ter_debug_map
)
6562 delete deep_ter_debug_map
;
6563 deep_ter_debug_map
= NULL
;
6566 /* Free stuff we no longer need after GIMPLE optimizations. */
6567 free_dominance_info (CDI_DOMINATORS
);
6568 free_dominance_info (CDI_POST_DOMINATORS
);
6569 delete_tree_cfg_annotations (fun
);
6571 timevar_push (TV_OUT_OF_SSA
);
6572 finish_out_of_ssa (&SA
);
6573 timevar_pop (TV_OUT_OF_SSA
);
6575 timevar_push (TV_POST_EXPAND
);
6576 /* We are no longer in SSA form. */
6577 fun
->gimple_df
->in_ssa_p
= false;
6578 loops_state_clear (LOOP_CLOSED_SSA
);
6580 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6581 conservatively to true until they are all profile aware. */
6582 delete lab_rtx_for_bb
;
6583 free_histograms (fun
);
6585 construct_exit_block ();
6586 insn_locations_finalize ();
6590 rtx_insn
*after
= return_label
;
6591 rtx_insn
*next
= NEXT_INSN (after
);
6592 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
6594 emit_insn_after (var_ret_seq
, after
);
6597 /* Zap the tree EH table. */
6598 set_eh_throw_stmt_table (fun
, NULL
);
6600 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6601 split edges which edge insertions might do. */
6602 rebuild_jump_labels (get_insns ());
6604 /* If we have a single successor to the entry block, put the pending insns
6605 after parm birth, but before NOTE_INSNS_FUNCTION_BEG. */
6606 if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
6608 edge e
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6611 rtx_insn
*insns
= e
->insns
.r
;
6613 rebuild_jump_labels_chain (insns
);
6614 if (NOTE_P (parm_birth_insn
)
6615 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
6616 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
6618 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
6622 /* Otherwise, as well as for other edges, take the usual way. */
6623 commit_edge_insertions ();
6625 /* We're done expanding trees to RTL. */
6626 currently_expanding_to_rtl
= 0;
6628 flush_mark_addressable_queue ();
6630 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
6631 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6635 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6637 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6638 e
->flags
&= ~EDGE_EXECUTABLE
;
6640 /* At the moment not all abnormal edges match the RTL
6641 representation. It is safe to remove them here as
6642 find_many_sub_basic_blocks will rediscover them.
6643 In the future we should get this fixed properly. */
6644 if ((e
->flags
& EDGE_ABNORMAL
)
6645 && !(e
->flags
& EDGE_SIBCALL
))
6652 auto_sbitmap
blocks (last_basic_block_for_fn (fun
));
6653 bitmap_ones (blocks
);
6654 find_many_sub_basic_blocks (blocks
);
6655 purge_all_dead_edges ();
6657 /* After initial rtl generation, call back to finish generating
6658 exception support code. We need to do this before cleaning up
6659 the CFG as the code does not expect dead landing pads. */
6660 if (fun
->eh
->region_tree
!= NULL
)
6661 finish_eh_generation ();
6663 /* Call expand_stack_alignment after finishing all
6664 updates to crtl->preferred_stack_boundary. */
6665 expand_stack_alignment ();
6667 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6669 if (crtl
->tail_call_emit
)
6670 fixup_tail_calls ();
6672 /* BB subdivision may have created basic blocks that are are only reachable
6673 from unlikely bbs but not marked as such in the profile. */
6675 propagate_unlikely_bbs_forward ();
6677 /* Remove unreachable blocks, otherwise we cannot compute dominators
6678 which are needed for loop state verification. As a side-effect
6679 this also compacts blocks.
6680 ??? We cannot remove trivially dead insns here as for example
6681 the DRAP reg on i?86 is not magically live at this point.
6682 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6683 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
6685 checking_verify_flow_info ();
6687 /* Initialize pseudos allocated for hard registers. */
6688 emit_initial_value_sets ();
6690 /* And finally unshare all RTL. */
6693 /* There's no need to defer outputting this function any more; we
6694 know we want to output it. */
6695 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
6697 /* Now that we're done expanding trees to RTL, we shouldn't have any
6698 more CONCATs anywhere. */
6699 generating_concat_p
= 0;
6704 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6705 /* And the pass manager will dump RTL for us. */
6708 /* If we're emitting a nested function, make sure its parent gets
6709 emitted as well. Doing otherwise confuses debug info. */
6712 for (parent
= DECL_CONTEXT (current_function_decl
);
6713 parent
!= NULL_TREE
;
6714 parent
= get_containing_scope (parent
))
6715 if (TREE_CODE (parent
) == FUNCTION_DECL
)
6716 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
6719 TREE_ASM_WRITTEN (current_function_decl
) = 1;
6721 /* After expanding, the return labels are no longer needed. */
6722 return_label
= NULL
;
6723 naked_return_label
= NULL
;
6725 /* After expanding, the tm_restart map is no longer needed. */
6726 if (fun
->gimple_df
->tm_restart
)
6727 fun
->gimple_df
->tm_restart
= NULL
;
6729 /* Tag the blocks with a depth number so that change_scope can find
6730 the common parent easily. */
6731 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
6732 default_rtl_profile ();
6734 /* For -dx discard loops now, otherwise IL verify in clean_state will
6736 if (rtl_dump_and_exit
)
6738 cfun
->curr_properties
&= ~PROP_loops
;
6739 loop_optimizer_finalize ();
6742 timevar_pop (TV_POST_EXPAND
);
6750 make_pass_expand (gcc::context
*ctxt
)
6752 return new pass_expand (ctxt
);