1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "tree-pass.h"
34 #include "regs.h" /* For reg_renumber. */
38 #include "diagnostic.h"
39 #include "fold-const.h"
41 #include "stor-layout.h"
43 #include "print-tree.h"
47 #include "cfgcleanup.h"
52 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
61 #include "gimple-pretty-print.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "stringpool.h"
74 #include "tree-ssa-address.h"
77 #include "tree-chkp.h"
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
85 #define NAME__MAIN "__main"
88 /* This variable holds information helping the rewriting of SSA trees
92 /* This variable holds the currently expanded gimple statement for purposes
93 of comminucating the profile info to the builtin expanders. */
94 gimple
*currently_expanding_gimple_stmt
;
96 static rtx
expand_debug_expr (tree
);
98 static bool defer_stack_allocation (tree
, bool);
100 static void record_alignment_for_reg_var (unsigned int);
102 /* Return an expression tree corresponding to the RHS of GIMPLE
106 gimple_assign_rhs_to_tree (gimple
*stmt
)
109 enum gimple_rhs_class grhs_class
;
111 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
113 if (grhs_class
== GIMPLE_TERNARY_RHS
)
114 t
= build3 (gimple_assign_rhs_code (stmt
),
115 TREE_TYPE (gimple_assign_lhs (stmt
)),
116 gimple_assign_rhs1 (stmt
),
117 gimple_assign_rhs2 (stmt
),
118 gimple_assign_rhs3 (stmt
));
119 else if (grhs_class
== GIMPLE_BINARY_RHS
)
120 t
= build2 (gimple_assign_rhs_code (stmt
),
121 TREE_TYPE (gimple_assign_lhs (stmt
)),
122 gimple_assign_rhs1 (stmt
),
123 gimple_assign_rhs2 (stmt
));
124 else if (grhs_class
== GIMPLE_UNARY_RHS
)
125 t
= build1 (gimple_assign_rhs_code (stmt
),
126 TREE_TYPE (gimple_assign_lhs (stmt
)),
127 gimple_assign_rhs1 (stmt
));
128 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
130 t
= gimple_assign_rhs1 (stmt
);
131 /* Avoid modifying this tree in place below. */
132 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
133 && gimple_location (stmt
) != EXPR_LOCATION (t
))
134 || (gimple_block (stmt
)
135 && currently_expanding_to_rtl
142 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
143 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
149 #ifndef STACK_ALIGNMENT_NEEDED
150 #define STACK_ALIGNMENT_NEEDED 1
153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
155 /* Choose either CUR or NEXT as the leader DECL for a partition.
156 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157 out of the same user variable being in multiple partitions (this is
158 less likely for compiler-introduced temps). */
161 leader_merge (tree cur
, tree next
)
163 if (cur
== NULL
|| cur
== next
)
166 if (DECL_P (cur
) && DECL_IGNORED_P (cur
))
169 if (DECL_P (next
) && DECL_IGNORED_P (next
))
175 /* Associate declaration T with storage space X. If T is no
176 SSA name this is exactly SET_DECL_RTL, otherwise make the
177 partition of T associated with X. */
179 set_rtl (tree t
, rtx x
)
181 gcc_checking_assert (!x
182 || !(TREE_CODE (t
) == SSA_NAME
|| is_gimple_reg (t
))
183 || (use_register_for_decl (t
)
185 || (GET_CODE (x
) == CONCAT
186 && (REG_P (XEXP (x
, 0))
187 || SUBREG_P (XEXP (x
, 0)))
188 && (REG_P (XEXP (x
, 1))
189 || SUBREG_P (XEXP (x
, 1))))
190 /* We need to accept PARALLELs for RESUT_DECLs
191 because of vector types with BLKmode returned
192 in multiple registers, but they are supposed
193 to be uncoalesced. */
194 || (GET_CODE (x
) == PARALLEL
196 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
197 && (GET_MODE (x
) == BLKmode
198 || !flag_tree_coalesce_vars
)))
199 : (MEM_P (x
) || x
== pc_rtx
200 || (GET_CODE (x
) == CONCAT
201 && MEM_P (XEXP (x
, 0))
202 && MEM_P (XEXP (x
, 1))))));
203 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204 RESULT_DECLs has the expected mode. For memory, we accept
205 unpromoted modes, since that's what we're likely to get. For
206 PARM_DECLs and RESULT_DECLs, we'll have been called by
207 set_parm_rtl, which will give us the default def, so we don't
208 have to compute it ourselves. For RESULT_DECLs, we accept mode
209 mismatches too, as long as we have BLKmode or are not coalescing
210 across variables, so that we don't reject BLKmode PARALLELs or
212 gcc_checking_assert (!x
|| x
== pc_rtx
|| TREE_CODE (t
) != SSA_NAME
214 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
215 && (promote_ssa_mode (t
, NULL
) == BLKmode
216 || !flag_tree_coalesce_vars
))
217 || !use_register_for_decl (t
)
218 || GET_MODE (x
) == promote_ssa_mode (t
, NULL
));
223 tree cur
= NULL_TREE
;
231 else if (SUBREG_P (xm
))
233 gcc_assert (subreg_lowpart_p (xm
));
234 xm
= SUBREG_REG (xm
);
237 else if (GET_CODE (xm
) == CONCAT
)
242 else if (GET_CODE (xm
) == PARALLEL
)
244 xm
= XVECEXP (xm
, 0, 0);
245 gcc_assert (GET_CODE (xm
) == EXPR_LIST
);
249 else if (xm
== pc_rtx
)
254 tree next
= skip
? cur
: leader_merge (cur
, SSAVAR (t
) ? SSAVAR (t
) : t
);
259 set_mem_attributes (x
,
260 next
&& TREE_CODE (next
) == SSA_NAME
264 set_reg_attrs_for_decl_rtl (next
, x
);
268 if (TREE_CODE (t
) == SSA_NAME
)
270 int part
= var_to_partition (SA
.map
, t
);
271 if (part
!= NO_PARTITION
)
273 if (SA
.partition_to_pseudo
[part
])
274 gcc_assert (SA
.partition_to_pseudo
[part
] == x
);
275 else if (x
!= pc_rtx
)
276 SA
.partition_to_pseudo
[part
] = x
;
278 /* For the benefit of debug information at -O0 (where
279 vartracking doesn't run) record the place also in the base
280 DECL. For PARMs and RESULTs, do so only when setting the
282 if (x
&& x
!= pc_rtx
&& SSA_NAME_VAR (t
)
283 && (VAR_P (SSA_NAME_VAR (t
))
284 || SSA_NAME_IS_DEFAULT_DEF (t
)))
286 tree var
= SSA_NAME_VAR (t
);
287 /* If we don't yet have something recorded, just record it now. */
288 if (!DECL_RTL_SET_P (var
))
289 SET_DECL_RTL (var
, x
);
290 /* If we have it set already to "multiple places" don't
292 else if (DECL_RTL (var
) == pc_rtx
)
294 /* If we have something recorded and it's not the same place
295 as we want to record now, we have multiple partitions for the
296 same base variable, with different places. We can't just
297 randomly chose one, hence we have to say that we don't know.
298 This only happens with optimization, and there var-tracking
299 will figure out the right thing. */
300 else if (DECL_RTL (var
) != x
)
301 SET_DECL_RTL (var
, pc_rtx
);
308 /* This structure holds data relevant to one variable that will be
309 placed in a stack slot. */
315 /* Initially, the size of the variable. Later, the size of the partition,
316 if this variable becomes it's partition's representative. */
319 /* The *byte* alignment required for this variable. Or as, with the
320 size, the alignment for this partition. */
323 /* The partition representative. */
324 size_t representative
;
326 /* The next stack variable in the partition, or EOC. */
329 /* The numbers of conflicting stack variables. */
333 #define EOC ((size_t)-1)
335 /* We have an array of such objects while deciding allocation. */
336 static struct stack_var
*stack_vars
;
337 static size_t stack_vars_alloc
;
338 static size_t stack_vars_num
;
339 static hash_map
<tree
, size_t> *decl_to_stack_part
;
341 /* Conflict bitmaps go on this obstack. This allows us to destroy
342 all of them in one big sweep. */
343 static bitmap_obstack stack_var_bitmap_obstack
;
345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
346 is non-decreasing. */
347 static size_t *stack_vars_sorted
;
349 /* The phase of the stack frame. This is the known misalignment of
350 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
351 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
352 static int frame_phase
;
354 /* Used during expand_used_vars to remember if we saw any decls for
355 which we'd like to enable stack smashing protection. */
356 static bool has_protected_decls
;
358 /* Used during expand_used_vars. Remember if we say a character buffer
359 smaller than our cutoff threshold. Used for -Wstack-protector. */
360 static bool has_short_buffer
;
362 /* Compute the byte alignment to use for DECL. Ignore alignment
363 we can't do with expected alignment of the stack boundary. */
366 align_local_variable (tree decl
)
370 if (TREE_CODE (decl
) == SSA_NAME
)
371 align
= TYPE_ALIGN (TREE_TYPE (decl
));
374 align
= LOCAL_DECL_ALIGNMENT (decl
);
375 SET_DECL_ALIGN (decl
, align
);
377 return align
/ BITS_PER_UNIT
;
380 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
381 down otherwise. Return truncated BASE value. */
383 static inline unsigned HOST_WIDE_INT
384 align_base (HOST_WIDE_INT base
, unsigned HOST_WIDE_INT align
, bool align_up
)
386 return align_up
? (base
+ align
- 1) & -align
: base
& -align
;
389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390 Return the frame offset. */
393 alloc_stack_frame_space (poly_int64 size
, unsigned HOST_WIDE_INT align
)
395 poly_int64 offset
, new_frame_offset
;
397 if (FRAME_GROWS_DOWNWARD
)
400 = aligned_lower_bound (frame_offset
- frame_phase
- size
,
401 align
) + frame_phase
;
402 offset
= new_frame_offset
;
407 = aligned_upper_bound (frame_offset
- frame_phase
,
408 align
) + frame_phase
;
409 offset
= new_frame_offset
;
410 new_frame_offset
+= size
;
412 frame_offset
= new_frame_offset
;
414 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
415 frame_offset
= offset
= 0;
420 /* Accumulate DECL into STACK_VARS. */
423 add_stack_var (tree decl
)
427 if (stack_vars_num
>= stack_vars_alloc
)
429 if (stack_vars_alloc
)
430 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
432 stack_vars_alloc
= 32;
434 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
436 if (!decl_to_stack_part
)
437 decl_to_stack_part
= new hash_map
<tree
, size_t>;
439 v
= &stack_vars
[stack_vars_num
];
440 decl_to_stack_part
->put (decl
, stack_vars_num
);
443 tree size
= TREE_CODE (decl
) == SSA_NAME
444 ? TYPE_SIZE_UNIT (TREE_TYPE (decl
))
445 : DECL_SIZE_UNIT (decl
);
446 v
->size
= tree_to_poly_uint64 (size
);
447 /* Ensure that all variables have size, so that &a != &b for any two
448 variables that are simultaneously live. */
449 if (known_eq (v
->size
, 0U))
451 v
->alignb
= align_local_variable (decl
);
452 /* An alignment of zero can mightily confuse us later. */
453 gcc_assert (v
->alignb
!= 0);
455 /* All variables are initially in their own partition. */
456 v
->representative
= stack_vars_num
;
459 /* All variables initially conflict with no other. */
462 /* Ensure that this decl doesn't get put onto the list twice. */
463 set_rtl (decl
, pc_rtx
);
468 /* Make the decls associated with luid's X and Y conflict. */
471 add_stack_var_conflict (size_t x
, size_t y
)
473 struct stack_var
*a
= &stack_vars
[x
];
474 struct stack_var
*b
= &stack_vars
[y
];
476 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
478 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
479 bitmap_set_bit (a
->conflicts
, y
);
480 bitmap_set_bit (b
->conflicts
, x
);
483 /* Check whether the decls associated with luid's X and Y conflict. */
486 stack_var_conflict_p (size_t x
, size_t y
)
488 struct stack_var
*a
= &stack_vars
[x
];
489 struct stack_var
*b
= &stack_vars
[y
];
492 /* Partitions containing an SSA name result from gimple registers
493 with things like unsupported modes. They are top-level and
494 hence conflict with everything else. */
495 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
498 if (!a
->conflicts
|| !b
->conflicts
)
500 return bitmap_bit_p (a
->conflicts
, y
);
503 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
504 enter its partition number into bitmap DATA. */
507 visit_op (gimple
*, tree op
, tree
, void *data
)
509 bitmap active
= (bitmap
)data
;
510 op
= get_base_address (op
);
513 && DECL_RTL_IF_SET (op
) == pc_rtx
)
515 size_t *v
= decl_to_stack_part
->get (op
);
517 bitmap_set_bit (active
, *v
);
522 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
523 record conflicts between it and all currently active other partitions
527 visit_conflict (gimple
*, tree op
, tree
, void *data
)
529 bitmap active
= (bitmap
)data
;
530 op
= get_base_address (op
);
533 && DECL_RTL_IF_SET (op
) == pc_rtx
)
535 size_t *v
= decl_to_stack_part
->get (op
);
536 if (v
&& bitmap_set_bit (active
, *v
))
541 gcc_assert (num
< stack_vars_num
);
542 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
543 add_stack_var_conflict (num
, i
);
549 /* Helper routine for add_scope_conflicts, calculating the active partitions
550 at the end of BB, leaving the result in WORK. We're called to generate
551 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
555 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
559 gimple_stmt_iterator gsi
;
560 walk_stmt_load_store_addr_fn visit
;
563 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
564 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
568 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
570 gimple
*stmt
= gsi_stmt (gsi
);
571 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
573 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
575 gimple
*stmt
= gsi_stmt (gsi
);
577 if (gimple_clobber_p (stmt
))
579 tree lhs
= gimple_assign_lhs (stmt
);
581 /* Nested function lowering might introduce LHSs
582 that are COMPONENT_REFs. */
585 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
586 && (v
= decl_to_stack_part
->get (lhs
)))
587 bitmap_clear_bit (work
, *v
);
589 else if (!is_gimple_debug (stmt
))
592 && visit
== visit_op
)
594 /* If this is the first real instruction in this BB we need
595 to add conflicts for everything live at this point now.
596 Unlike classical liveness for named objects we can't
597 rely on seeing a def/use of the names we're interested in.
598 There might merely be indirect loads/stores. We'd not add any
599 conflicts for such partitions. */
602 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
604 struct stack_var
*a
= &stack_vars
[i
];
606 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
607 bitmap_ior_into (a
->conflicts
, work
);
609 visit
= visit_conflict
;
611 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
616 /* Generate stack partition conflicts between all partitions that are
617 simultaneously live. */
620 add_scope_conflicts (void)
624 bitmap work
= BITMAP_ALLOC (NULL
);
628 /* We approximate the live range of a stack variable by taking the first
629 mention of its name as starting point(s), and by the end-of-scope
630 death clobber added by gimplify as ending point(s) of the range.
631 This overapproximates in the case we for instance moved an address-taken
632 operation upward, without also moving a dereference to it upwards.
633 But it's conservatively correct as a variable never can hold values
634 before its name is mentioned at least once.
636 We then do a mostly classical bitmap liveness algorithm. */
638 FOR_ALL_BB_FN (bb
, cfun
)
639 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
641 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
642 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
649 for (i
= 0; i
< n_bbs
; i
++)
652 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
653 active
= (bitmap
)bb
->aux
;
654 add_scope_conflicts_1 (bb
, work
, false);
655 if (bitmap_ior_into (active
, work
))
660 FOR_EACH_BB_FN (bb
, cfun
)
661 add_scope_conflicts_1 (bb
, work
, true);
665 FOR_ALL_BB_FN (bb
, cfun
)
666 BITMAP_FREE (bb
->aux
);
669 /* A subroutine of partition_stack_vars. A comparison function for qsort,
670 sorting an array of indices by the properties of the object. */
673 stack_var_cmp (const void *a
, const void *b
)
675 size_t ia
= *(const size_t *)a
;
676 size_t ib
= *(const size_t *)b
;
677 unsigned int aligna
= stack_vars
[ia
].alignb
;
678 unsigned int alignb
= stack_vars
[ib
].alignb
;
679 poly_int64 sizea
= stack_vars
[ia
].size
;
680 poly_int64 sizeb
= stack_vars
[ib
].size
;
681 tree decla
= stack_vars
[ia
].decl
;
682 tree declb
= stack_vars
[ib
].decl
;
684 unsigned int uida
, uidb
;
686 /* Primary compare on "large" alignment. Large comes first. */
687 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
688 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
689 if (largea
!= largeb
)
690 return (int)largeb
- (int)largea
;
692 /* Secondary compare on size, decreasing */
693 int diff
= compare_sizes_for_sort (sizeb
, sizea
);
697 /* Tertiary compare on true alignment, decreasing. */
703 /* Final compare on ID for sort stability, increasing.
704 Two SSA names are compared by their version, SSA names come before
705 non-SSA names, and two normal decls are compared by their DECL_UID. */
706 if (TREE_CODE (decla
) == SSA_NAME
)
708 if (TREE_CODE (declb
) == SSA_NAME
)
709 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
713 else if (TREE_CODE (declb
) == SSA_NAME
)
716 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
724 struct part_traits
: unbounded_int_hashmap_traits
<size_t, bitmap
> {};
725 typedef hash_map
<size_t, bitmap
, part_traits
> part_hashmap
;
727 /* If the points-to solution *PI points to variables that are in a partition
728 together with other variables add all partition members to the pointed-to
732 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
733 part_hashmap
*decls_to_partitions
,
734 hash_set
<bitmap
> *visited
, bitmap temp
)
742 /* The pointed-to vars bitmap is shared, it is enough to
744 || visited
->add (pt
->vars
))
749 /* By using a temporary bitmap to store all members of the partitions
750 we have to add we make sure to visit each of the partitions only
752 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
754 || !bitmap_bit_p (temp
, i
))
755 && (part
= decls_to_partitions
->get (i
)))
756 bitmap_ior_into (temp
, *part
);
757 if (!bitmap_empty_p (temp
))
758 bitmap_ior_into (pt
->vars
, temp
);
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762 The bitmaps representing stack partitions will be saved until expand,
763 where partitioned decls used as bases in memory expressions will be
767 update_alias_info_with_stack_vars (void)
769 part_hashmap
*decls_to_partitions
= NULL
;
771 tree var
= NULL_TREE
;
773 for (i
= 0; i
< stack_vars_num
; i
++)
777 struct ptr_info_def
*pi
;
779 /* Not interested in partitions with single variable. */
780 if (stack_vars
[i
].representative
!= i
781 || stack_vars
[i
].next
== EOC
)
784 if (!decls_to_partitions
)
786 decls_to_partitions
= new part_hashmap
;
787 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
790 /* Create an SSA_NAME that points to the partition for use
791 as base during alias-oracle queries on RTL for bases that
792 have been partitioned. */
793 if (var
== NULL_TREE
)
794 var
= create_tmp_var (ptr_type_node
);
795 name
= make_ssa_name (var
);
797 /* Create bitmaps representing partitions. They will be used for
798 points-to sets later, so use GGC alloc. */
799 part
= BITMAP_GGC_ALLOC ();
800 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
802 tree decl
= stack_vars
[j
].decl
;
803 unsigned int uid
= DECL_PT_UID (decl
);
804 bitmap_set_bit (part
, uid
);
805 decls_to_partitions
->put (uid
, part
);
806 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
807 if (TREE_ADDRESSABLE (decl
))
808 TREE_ADDRESSABLE (name
) = 1;
811 /* Make the SSA name point to all partition members. */
812 pi
= get_ptr_info (name
);
813 pt_solution_set (&pi
->pt
, part
, false);
816 /* Make all points-to sets that contain one member of a partition
817 contain all members of the partition. */
818 if (decls_to_partitions
)
822 hash_set
<bitmap
> visited
;
823 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
825 FOR_EACH_SSA_NAME (i
, name
, cfun
)
827 struct ptr_info_def
*pi
;
829 if (POINTER_TYPE_P (TREE_TYPE (name
))
830 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
831 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
835 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
836 decls_to_partitions
, &visited
, temp
);
838 delete decls_to_partitions
;
843 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
844 partitioning algorithm. Partitions A and B are known to be non-conflicting.
845 Merge them into a single partition A. */
848 union_stack_vars (size_t a
, size_t b
)
850 struct stack_var
*vb
= &stack_vars
[b
];
854 gcc_assert (stack_vars
[b
].next
== EOC
);
855 /* Add B to A's partition. */
856 stack_vars
[b
].next
= stack_vars
[a
].next
;
857 stack_vars
[b
].representative
= a
;
858 stack_vars
[a
].next
= b
;
860 /* Update the required alignment of partition A to account for B. */
861 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
862 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
864 /* Update the interference graph and merge the conflicts. */
867 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
868 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
869 BITMAP_FREE (vb
->conflicts
);
873 /* A subroutine of expand_used_vars. Binpack the variables into
874 partitions constrained by the interference graph. The overall
875 algorithm used is as follows:
877 Sort the objects by size in descending order.
882 Look for the largest non-conflicting object B with size <= S.
889 partition_stack_vars (void)
891 size_t si
, sj
, n
= stack_vars_num
;
893 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
894 for (si
= 0; si
< n
; ++si
)
895 stack_vars_sorted
[si
] = si
;
900 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
902 for (si
= 0; si
< n
; ++si
)
904 size_t i
= stack_vars_sorted
[si
];
905 unsigned int ialign
= stack_vars
[i
].alignb
;
906 poly_int64 isize
= stack_vars
[i
].size
;
908 /* Ignore objects that aren't partition representatives. If we
909 see a var that is not a partition representative, it must
910 have been merged earlier. */
911 if (stack_vars
[i
].representative
!= i
)
914 for (sj
= si
+ 1; sj
< n
; ++sj
)
916 size_t j
= stack_vars_sorted
[sj
];
917 unsigned int jalign
= stack_vars
[j
].alignb
;
918 poly_int64 jsize
= stack_vars
[j
].size
;
920 /* Ignore objects that aren't partition representatives. */
921 if (stack_vars
[j
].representative
!= j
)
924 /* Do not mix objects of "small" (supported) alignment
925 and "large" (unsupported) alignment. */
926 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
927 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
930 /* For Address Sanitizer do not mix objects with different
931 sizes, as the shorter vars wouldn't be adequately protected.
932 Don't do that for "large" (unsupported) alignment objects,
933 those aren't protected anyway. */
934 if (asan_sanitize_stack_p ()
935 && maybe_ne (isize
, jsize
)
936 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
939 /* Ignore conflicting objects. */
940 if (stack_var_conflict_p (i
, j
))
943 /* UNION the objects, placing J at OFFSET. */
944 union_stack_vars (i
, j
);
948 update_alias_info_with_stack_vars ();
951 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
954 dump_stack_var_partition (void)
956 size_t si
, i
, j
, n
= stack_vars_num
;
958 for (si
= 0; si
< n
; ++si
)
960 i
= stack_vars_sorted
[si
];
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars
[i
].representative
!= i
)
966 fprintf (dump_file
, "Partition %lu: size ", (unsigned long) i
);
967 print_dec (stack_vars
[i
].size
, dump_file
);
968 fprintf (dump_file
, " align %u\n", stack_vars
[i
].alignb
);
970 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
972 fputc ('\t', dump_file
);
973 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
975 fputc ('\n', dump_file
);
979 /* Assign rtl to DECL at BASE + OFFSET. */
982 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
988 /* If this fails, we've overflowed the stack frame. Error nicely? */
989 gcc_assert (known_eq (offset
, trunc_int_for_mode (offset
, Pmode
)));
991 x
= plus_constant (Pmode
, base
, offset
);
992 x
= gen_rtx_MEM (TREE_CODE (decl
) == SSA_NAME
993 ? TYPE_MODE (TREE_TYPE (decl
))
994 : DECL_MODE (SSAVAR (decl
)), x
);
996 if (TREE_CODE (decl
) != SSA_NAME
)
998 /* Set alignment we actually gave this decl if it isn't an SSA name.
999 If it is we generate stack slots only accidentally so it isn't as
1000 important, we'll simply use the alignment that is already set. */
1001 if (base
== virtual_stack_vars_rtx
)
1002 offset
-= frame_phase
;
1003 align
= known_alignment (offset
);
1004 align
*= BITS_PER_UNIT
;
1005 if (align
== 0 || align
> base_align
)
1008 /* One would think that we could assert that we're not decreasing
1009 alignment here, but (at least) the i386 port does exactly this
1010 via the MINIMUM_ALIGNMENT hook. */
1012 SET_DECL_ALIGN (decl
, align
);
1013 DECL_USER_ALIGN (decl
) = 0;
1019 struct stack_vars_data
1021 /* Vector of offset pairs, always end of some padding followed
1022 by start of the padding that needs Address Sanitizer protection.
1023 The vector is in reversed, highest offset pairs come first. */
1024 auto_vec
<HOST_WIDE_INT
> asan_vec
;
1026 /* Vector of partition representative decls in between the paddings. */
1027 auto_vec
<tree
> asan_decl_vec
;
1029 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1032 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1033 unsigned int asan_alignb
;
1036 /* A subroutine of expand_used_vars. Give each partition representative
1037 a unique location within the stack frame. Update each partition member
1038 with that location. */
1041 expand_stack_vars (bool (*pred
) (size_t), struct stack_vars_data
*data
)
1043 size_t si
, i
, j
, n
= stack_vars_num
;
1044 poly_uint64 large_size
= 0, large_alloc
= 0;
1045 rtx large_base
= NULL
;
1046 unsigned large_align
= 0;
1047 bool large_allocation_done
= false;
1050 /* Determine if there are any variables requiring "large" alignment.
1051 Since these are dynamically allocated, we only process these if
1052 no predicate involved. */
1053 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
1054 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1056 /* Find the total size of these variables. */
1057 for (si
= 0; si
< n
; ++si
)
1061 i
= stack_vars_sorted
[si
];
1062 alignb
= stack_vars
[i
].alignb
;
1064 /* All "large" alignment decls come before all "small" alignment
1065 decls, but "large" alignment decls are not sorted based on
1066 their alignment. Increase large_align to track the largest
1067 required alignment. */
1068 if ((alignb
* BITS_PER_UNIT
) > large_align
)
1069 large_align
= alignb
* BITS_PER_UNIT
;
1071 /* Stop when we get to the first decl with "small" alignment. */
1072 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1075 /* Skip variables that aren't partition representatives. */
1076 if (stack_vars
[i
].representative
!= i
)
1079 /* Skip variables that have already had rtl assigned. See also
1080 add_stack_var where we perpetrate this pc_rtx hack. */
1081 decl
= stack_vars
[i
].decl
;
1082 if (TREE_CODE (decl
) == SSA_NAME
1083 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1084 : DECL_RTL (decl
) != pc_rtx
)
1087 large_size
= aligned_upper_bound (large_size
, alignb
);
1088 large_size
+= stack_vars
[i
].size
;
1092 for (si
= 0; si
< n
; ++si
)
1095 unsigned base_align
, alignb
;
1098 i
= stack_vars_sorted
[si
];
1100 /* Skip variables that aren't partition representatives, for now. */
1101 if (stack_vars
[i
].representative
!= i
)
1104 /* Skip variables that have already had rtl assigned. See also
1105 add_stack_var where we perpetrate this pc_rtx hack. */
1106 decl
= stack_vars
[i
].decl
;
1107 if (TREE_CODE (decl
) == SSA_NAME
1108 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1109 : DECL_RTL (decl
) != pc_rtx
)
1112 /* Check the predicate to see whether this variable should be
1113 allocated in this pass. */
1114 if (pred
&& !pred (i
))
1117 alignb
= stack_vars
[i
].alignb
;
1118 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1120 base
= virtual_stack_vars_rtx
;
1121 /* ASAN description strings don't yet have a syntax for expressing
1122 polynomial offsets. */
1123 HOST_WIDE_INT prev_offset
;
1124 if (asan_sanitize_stack_p ()
1126 && frame_offset
.is_constant (&prev_offset
)
1127 && stack_vars
[i
].size
.is_constant ())
1129 prev_offset
= align_base (prev_offset
,
1130 MAX (alignb
, ASAN_RED_ZONE_SIZE
),
1131 !FRAME_GROWS_DOWNWARD
);
1132 tree repr_decl
= NULL_TREE
;
1134 = alloc_stack_frame_space (stack_vars
[i
].size
1135 + ASAN_RED_ZONE_SIZE
,
1136 MAX (alignb
, ASAN_RED_ZONE_SIZE
));
1138 data
->asan_vec
.safe_push (prev_offset
);
1139 /* Allocating a constant amount of space from a constant
1140 starting offset must give a constant result. */
1141 data
->asan_vec
.safe_push ((offset
+ stack_vars
[i
].size
)
1143 /* Find best representative of the partition.
1144 Prefer those with DECL_NAME, even better
1145 satisfying asan_protect_stack_decl predicate. */
1146 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1147 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1148 && DECL_NAME (stack_vars
[j
].decl
))
1150 repr_decl
= stack_vars
[j
].decl
;
1153 else if (repr_decl
== NULL_TREE
1154 && DECL_P (stack_vars
[j
].decl
)
1155 && DECL_NAME (stack_vars
[j
].decl
))
1156 repr_decl
= stack_vars
[j
].decl
;
1157 if (repr_decl
== NULL_TREE
)
1158 repr_decl
= stack_vars
[i
].decl
;
1159 data
->asan_decl_vec
.safe_push (repr_decl
);
1161 /* Make sure a representative is unpoison if another
1162 variable in the partition is handled by
1163 use-after-scope sanitization. */
1164 if (asan_handled_variables
!= NULL
1165 && !asan_handled_variables
->contains (repr_decl
))
1167 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1168 if (asan_handled_variables
->contains (stack_vars
[j
].decl
))
1171 asan_handled_variables
->add (repr_decl
);
1174 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1175 if (data
->asan_base
== NULL
)
1176 data
->asan_base
= gen_reg_rtx (Pmode
);
1177 base
= data
->asan_base
;
1179 if (!STRICT_ALIGNMENT
)
1180 base_align
= crtl
->max_used_stack_slot_alignment
;
1182 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1183 GET_MODE_ALIGNMENT (SImode
)
1184 << ASAN_SHADOW_SHIFT
);
1188 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1189 base_align
= crtl
->max_used_stack_slot_alignment
;
1194 /* Large alignment is only processed in the last pass. */
1198 /* If there were any variables requiring "large" alignment, allocate
1200 if (maybe_ne (large_size
, 0U) && ! large_allocation_done
)
1203 rtx large_allocsize
;
1205 large_allocsize
= gen_int_mode (large_size
, Pmode
);
1206 get_dynamic_stack_size (&large_allocsize
, 0, large_align
, NULL
);
1207 loffset
= alloc_stack_frame_space
1208 (rtx_to_poly_int64 (large_allocsize
),
1209 PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
);
1210 large_base
= get_dynamic_stack_base (loffset
, large_align
);
1211 large_allocation_done
= true;
1213 gcc_assert (large_base
!= NULL
);
1215 large_alloc
= aligned_upper_bound (large_alloc
, alignb
);
1216 offset
= large_alloc
;
1217 large_alloc
+= stack_vars
[i
].size
;
1220 base_align
= large_align
;
1223 /* Create rtl for each variable based on their location within the
1225 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1227 expand_one_stack_var_at (stack_vars
[j
].decl
,
1233 gcc_assert (known_eq (large_alloc
, large_size
));
1236 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1238 account_stack_vars (void)
1240 size_t si
, j
, i
, n
= stack_vars_num
;
1241 poly_uint64 size
= 0;
1243 for (si
= 0; si
< n
; ++si
)
1245 i
= stack_vars_sorted
[si
];
1247 /* Skip variables that aren't partition representatives, for now. */
1248 if (stack_vars
[i
].representative
!= i
)
1251 size
+= stack_vars
[i
].size
;
1252 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1253 set_rtl (stack_vars
[j
].decl
, NULL
);
1258 /* Record the RTL assignment X for the default def of PARM. */
1261 set_parm_rtl (tree parm
, rtx x
)
1263 gcc_assert (TREE_CODE (parm
) == PARM_DECL
1264 || TREE_CODE (parm
) == RESULT_DECL
);
1266 if (x
&& !MEM_P (x
))
1268 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (parm
),
1269 TYPE_MODE (TREE_TYPE (parm
)),
1270 TYPE_ALIGN (TREE_TYPE (parm
)));
1272 /* If the variable alignment is very large we'll dynamicaly
1273 allocate it, which means that in-frame portion is just a
1274 pointer. ??? We've got a pseudo for sure here, do we
1275 actually dynamically allocate its spilling area if needed?
1276 ??? Isn't it a problem when Pmode alignment also exceeds
1277 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1278 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1279 align
= GET_MODE_ALIGNMENT (Pmode
);
1281 record_alignment_for_reg_var (align
);
1284 tree ssa
= ssa_default_def (cfun
, parm
);
1286 return set_rtl (parm
, x
);
1288 int part
= var_to_partition (SA
.map
, ssa
);
1289 gcc_assert (part
!= NO_PARTITION
);
1291 bool changed
= bitmap_bit_p (SA
.partitions_for_parm_default_defs
, part
);
1292 gcc_assert (changed
);
1295 gcc_assert (DECL_RTL (parm
) == x
);
1298 /* A subroutine of expand_one_var. Called to immediately assign rtl
1299 to a variable to be allocated in the stack frame. */
1302 expand_one_stack_var_1 (tree var
)
1306 unsigned byte_align
;
1308 if (TREE_CODE (var
) == SSA_NAME
)
1310 tree type
= TREE_TYPE (var
);
1311 size
= tree_to_poly_uint64 (TYPE_SIZE_UNIT (type
));
1312 byte_align
= TYPE_ALIGN_UNIT (type
);
1316 size
= tree_to_poly_uint64 (DECL_SIZE_UNIT (var
));
1317 byte_align
= align_local_variable (var
);
1320 /* We handle highly aligned variables in expand_stack_vars. */
1321 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1323 offset
= alloc_stack_frame_space (size
, byte_align
);
1325 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1326 crtl
->max_used_stack_slot_alignment
, offset
);
1329 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1330 already assigned some MEM. */
1333 expand_one_stack_var (tree var
)
1335 if (TREE_CODE (var
) == SSA_NAME
)
1337 int part
= var_to_partition (SA
.map
, var
);
1338 if (part
!= NO_PARTITION
)
1340 rtx x
= SA
.partition_to_pseudo
[part
];
1342 gcc_assert (MEM_P (x
));
1347 return expand_one_stack_var_1 (var
);
1350 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1351 that will reside in a hard register. */
1354 expand_one_hard_reg_var (tree var
)
1356 rest_of_decl_compilation (var
, 0, 0);
1359 /* Record the alignment requirements of some variable assigned to a
1363 record_alignment_for_reg_var (unsigned int align
)
1365 if (SUPPORTS_STACK_ALIGNMENT
1366 && crtl
->stack_alignment_estimated
< align
)
1368 /* stack_alignment_estimated shouldn't change after stack
1369 realign decision made */
1370 gcc_assert (!crtl
->stack_realign_processed
);
1371 crtl
->stack_alignment_estimated
= align
;
1374 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1375 So here we only make sure stack_alignment_needed >= align. */
1376 if (crtl
->stack_alignment_needed
< align
)
1377 crtl
->stack_alignment_needed
= align
;
1378 if (crtl
->max_used_stack_slot_alignment
< align
)
1379 crtl
->max_used_stack_slot_alignment
= align
;
1382 /* Create RTL for an SSA partition. */
1385 expand_one_ssa_partition (tree var
)
1387 int part
= var_to_partition (SA
.map
, var
);
1388 gcc_assert (part
!= NO_PARTITION
);
1390 if (SA
.partition_to_pseudo
[part
])
1393 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1394 TYPE_MODE (TREE_TYPE (var
)),
1395 TYPE_ALIGN (TREE_TYPE (var
)));
1397 /* If the variable alignment is very large we'll dynamicaly allocate
1398 it, which means that in-frame portion is just a pointer. */
1399 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1400 align
= GET_MODE_ALIGNMENT (Pmode
);
1402 record_alignment_for_reg_var (align
);
1404 if (!use_register_for_decl (var
))
1406 if (defer_stack_allocation (var
, true))
1407 add_stack_var (var
);
1409 expand_one_stack_var_1 (var
);
1413 machine_mode reg_mode
= promote_ssa_mode (var
, NULL
);
1414 rtx x
= gen_reg_rtx (reg_mode
);
1418 /* For a promoted variable, X will not be used directly but wrapped in a
1419 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1420 will assume that its upper bits can be inferred from its lower bits.
1421 Therefore, if X isn't initialized on every path from the entry, then
1422 we must do it manually in order to fulfill the above assumption. */
1423 if (reg_mode
!= TYPE_MODE (TREE_TYPE (var
))
1424 && bitmap_bit_p (SA
.partitions_for_undefined_values
, part
))
1425 emit_move_insn (x
, CONST0_RTX (reg_mode
));
1428 /* Record the association between the RTL generated for partition PART
1429 and the underlying variable of the SSA_NAME VAR. */
1432 adjust_one_expanded_partition_var (tree var
)
1437 tree decl
= SSA_NAME_VAR (var
);
1439 int part
= var_to_partition (SA
.map
, var
);
1440 if (part
== NO_PARTITION
)
1443 rtx x
= SA
.partition_to_pseudo
[part
];
1452 /* Note if the object is a user variable. */
1453 if (decl
&& !DECL_ARTIFICIAL (decl
))
1456 if (POINTER_TYPE_P (decl
? TREE_TYPE (decl
) : TREE_TYPE (var
)))
1457 mark_reg_pointer (x
, get_pointer_alignment (var
));
1460 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1461 that will reside in a pseudo register. */
1464 expand_one_register_var (tree var
)
1466 if (TREE_CODE (var
) == SSA_NAME
)
1468 int part
= var_to_partition (SA
.map
, var
);
1469 if (part
!= NO_PARTITION
)
1471 rtx x
= SA
.partition_to_pseudo
[part
];
1473 gcc_assert (REG_P (x
));
1480 tree type
= TREE_TYPE (decl
);
1481 machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1482 rtx x
= gen_reg_rtx (reg_mode
);
1486 /* Note if the object is a user variable. */
1487 if (!DECL_ARTIFICIAL (decl
))
1490 if (POINTER_TYPE_P (type
))
1491 mark_reg_pointer (x
, get_pointer_alignment (var
));
1494 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1495 has some associated error, e.g. its type is error-mark. We just need
1496 to pick something that won't crash the rest of the compiler. */
1499 expand_one_error_var (tree var
)
1501 machine_mode mode
= DECL_MODE (var
);
1504 if (mode
== BLKmode
)
1505 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1506 else if (mode
== VOIDmode
)
1509 x
= gen_reg_rtx (mode
);
1511 SET_DECL_RTL (var
, x
);
1514 /* A subroutine of expand_one_var. VAR is a variable that will be
1515 allocated to the local stack frame. Return true if we wish to
1516 add VAR to STACK_VARS so that it will be coalesced with other
1517 variables. Return false to allocate VAR immediately.
1519 This function is used to reduce the number of variables considered
1520 for coalescing, which reduces the size of the quadratic problem. */
1523 defer_stack_allocation (tree var
, bool toplevel
)
1525 tree size_unit
= TREE_CODE (var
) == SSA_NAME
1526 ? TYPE_SIZE_UNIT (TREE_TYPE (var
))
1527 : DECL_SIZE_UNIT (var
);
1530 /* Whether the variable is small enough for immediate allocation not to be
1531 a problem with regard to the frame size. */
1533 = (poly_int_tree_p (size_unit
, &size
)
1534 && (estimated_poly_value (size
)
1535 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING
)));
1537 /* If stack protection is enabled, *all* stack variables must be deferred,
1538 so that we can re-order the strings to the top of the frame.
1539 Similarly for Address Sanitizer. */
1540 if (flag_stack_protect
|| asan_sanitize_stack_p ())
1543 unsigned int align
= TREE_CODE (var
) == SSA_NAME
1544 ? TYPE_ALIGN (TREE_TYPE (var
))
1547 /* We handle "large" alignment via dynamic allocation. We want to handle
1548 this extra complication in only one place, so defer them. */
1549 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1552 bool ignored
= TREE_CODE (var
) == SSA_NAME
1553 ? !SSAVAR (var
) || DECL_IGNORED_P (SSA_NAME_VAR (var
))
1554 : DECL_IGNORED_P (var
);
1556 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1557 might be detached from their block and appear at toplevel when we reach
1558 here. We want to coalesce them with variables from other blocks when
1559 the immediate contribution to the frame size would be noticeable. */
1560 if (toplevel
&& optimize
> 0 && ignored
&& !smallish
)
1563 /* Variables declared in the outermost scope automatically conflict
1564 with every other variable. The only reason to want to defer them
1565 at all is that, after sorting, we can more efficiently pack
1566 small variables in the stack frame. Continue to defer at -O2. */
1567 if (toplevel
&& optimize
< 2)
1570 /* Without optimization, *most* variables are allocated from the
1571 stack, which makes the quadratic problem large exactly when we
1572 want compilation to proceed as quickly as possible. On the
1573 other hand, we don't want the function's stack frame size to
1574 get completely out of hand. So we avoid adding scalars and
1575 "small" aggregates to the list at all. */
1576 if (optimize
== 0 && smallish
)
1582 /* A subroutine of expand_used_vars. Expand one variable according to
1583 its flavor. Variables to be placed on the stack are not actually
1584 expanded yet, merely recorded.
1585 When REALLY_EXPAND is false, only add stack values to be allocated.
1586 Return stack usage this variable is supposed to take.
1590 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1592 unsigned int align
= BITS_PER_UNIT
;
1597 if (TREE_TYPE (var
) != error_mark_node
&& VAR_P (var
))
1599 if (is_global_var (var
))
1602 /* Because we don't know if VAR will be in register or on stack,
1603 we conservatively assume it will be on stack even if VAR is
1604 eventually put into register after RA pass. For non-automatic
1605 variables, which won't be on stack, we collect alignment of
1606 type and ignore user specified alignment. Similarly for
1607 SSA_NAMEs for which use_register_for_decl returns true. */
1608 if (TREE_STATIC (var
)
1609 || DECL_EXTERNAL (var
)
1610 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1611 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1612 TYPE_MODE (TREE_TYPE (var
)),
1613 TYPE_ALIGN (TREE_TYPE (var
)));
1614 else if (DECL_HAS_VALUE_EXPR_P (var
)
1615 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1616 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1617 or variables which were assigned a stack slot already by
1618 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1619 changed from the offset chosen to it. */
1620 align
= crtl
->stack_alignment_estimated
;
1622 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1624 /* If the variable alignment is very large we'll dynamicaly allocate
1625 it, which means that in-frame portion is just a pointer. */
1626 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1627 align
= GET_MODE_ALIGNMENT (Pmode
);
1630 record_alignment_for_reg_var (align
);
1633 if (TREE_CODE (origvar
) == SSA_NAME
)
1635 gcc_assert (!VAR_P (var
)
1636 || (!DECL_EXTERNAL (var
)
1637 && !DECL_HAS_VALUE_EXPR_P (var
)
1638 && !TREE_STATIC (var
)
1639 && TREE_TYPE (var
) != error_mark_node
1640 && !DECL_HARD_REGISTER (var
)
1643 if (!VAR_P (var
) && TREE_CODE (origvar
) != SSA_NAME
)
1645 else if (DECL_EXTERNAL (var
))
1647 else if (DECL_HAS_VALUE_EXPR_P (var
))
1649 else if (TREE_STATIC (var
))
1651 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1653 else if (TREE_TYPE (var
) == error_mark_node
)
1656 expand_one_error_var (var
);
1658 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1662 expand_one_hard_reg_var (var
);
1663 if (!DECL_HARD_REGISTER (var
))
1664 /* Invalid register specification. */
1665 expand_one_error_var (var
);
1668 else if (use_register_for_decl (var
))
1671 expand_one_register_var (origvar
);
1673 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var
), &size
)
1674 || !valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1676 /* Reject variables which cover more than half of the address-space. */
1679 error ("size of variable %q+D is too large", var
);
1680 expand_one_error_var (var
);
1683 else if (defer_stack_allocation (var
, toplevel
))
1684 add_stack_var (origvar
);
1689 if (lookup_attribute ("naked",
1690 DECL_ATTRIBUTES (current_function_decl
)))
1691 error ("cannot allocate stack for variable %q+D, naked function.",
1694 expand_one_stack_var (origvar
);
1701 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1702 expanding variables. Those variables that can be put into registers
1703 are allocated pseudos; those that can't are put on the stack.
1705 TOPLEVEL is true if this is the outermost BLOCK. */
1708 expand_used_vars_for_block (tree block
, bool toplevel
)
1712 /* Expand all variables at this level. */
1713 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1715 && ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1716 || !DECL_NONSHAREABLE (t
)))
1717 expand_one_var (t
, toplevel
, true);
1719 /* Expand all variables at containing levels. */
1720 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1721 expand_used_vars_for_block (t
, false);
1724 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1725 and clear TREE_USED on all local variables. */
1728 clear_tree_used (tree block
)
1732 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1733 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1734 if ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1735 || !DECL_NONSHAREABLE (t
))
1738 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1739 clear_tree_used (t
);
1743 SPCT_FLAG_DEFAULT
= 1,
1745 SPCT_FLAG_STRONG
= 3,
1746 SPCT_FLAG_EXPLICIT
= 4
1749 /* Examine TYPE and determine a bit mask of the following features. */
1751 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1752 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1753 #define SPCT_HAS_ARRAY 4
1754 #define SPCT_HAS_AGGREGATE 8
1757 stack_protect_classify_type (tree type
)
1759 unsigned int ret
= 0;
1762 switch (TREE_CODE (type
))
1765 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1766 if (t
== char_type_node
1767 || t
== signed_char_type_node
1768 || t
== unsigned_char_type_node
)
1770 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1771 unsigned HOST_WIDE_INT len
;
1773 if (!TYPE_SIZE_UNIT (type
)
1774 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1777 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1780 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1782 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1785 ret
= SPCT_HAS_ARRAY
;
1789 case QUAL_UNION_TYPE
:
1791 ret
= SPCT_HAS_AGGREGATE
;
1792 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1793 if (TREE_CODE (t
) == FIELD_DECL
)
1794 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1804 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1805 part of the local stack frame. Remember if we ever return nonzero for
1806 any variable in this function. The return value is the phase number in
1807 which the variable should be allocated. */
1810 stack_protect_decl_phase (tree decl
)
1812 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1815 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1816 has_short_buffer
= true;
1818 if (flag_stack_protect
== SPCT_FLAG_ALL
1819 || flag_stack_protect
== SPCT_FLAG_STRONG
1820 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1821 && lookup_attribute ("stack_protect",
1822 DECL_ATTRIBUTES (current_function_decl
))))
1824 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1825 && !(bits
& SPCT_HAS_AGGREGATE
))
1827 else if (bits
& SPCT_HAS_ARRAY
)
1831 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1834 has_protected_decls
= true;
1839 /* Two helper routines that check for phase 1 and phase 2. These are used
1840 as callbacks for expand_stack_vars. */
1843 stack_protect_decl_phase_1 (size_t i
)
1845 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1849 stack_protect_decl_phase_2 (size_t i
)
1851 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1854 /* And helper function that checks for asan phase (with stack protector
1855 it is phase 3). This is used as callback for expand_stack_vars.
1856 Returns true if any of the vars in the partition need to be protected. */
1859 asan_decl_phase_3 (size_t i
)
1863 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1865 i
= stack_vars
[i
].next
;
1870 /* Ensure that variables in different stack protection phases conflict
1871 so that they are not merged and share the same stack slot. */
1874 add_stack_protection_conflicts (void)
1876 size_t i
, j
, n
= stack_vars_num
;
1877 unsigned char *phase
;
1879 phase
= XNEWVEC (unsigned char, n
);
1880 for (i
= 0; i
< n
; ++i
)
1881 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1883 for (i
= 0; i
< n
; ++i
)
1885 unsigned char ph_i
= phase
[i
];
1886 for (j
= i
+ 1; j
< n
; ++j
)
1887 if (ph_i
!= phase
[j
])
1888 add_stack_var_conflict (i
, j
);
1894 /* Create a decl for the guard at the top of the stack frame. */
1897 create_stack_guard (void)
1899 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1900 VAR_DECL
, NULL
, ptr_type_node
);
1901 TREE_THIS_VOLATILE (guard
) = 1;
1902 TREE_USED (guard
) = 1;
1903 expand_one_stack_var (guard
);
1904 crtl
->stack_protect_guard
= guard
;
1907 /* Prepare for expanding variables. */
1909 init_vars_expansion (void)
1911 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1912 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1914 /* A map from decl to stack partition. */
1915 decl_to_stack_part
= new hash_map
<tree
, size_t>;
1917 /* Initialize local stack smashing state. */
1918 has_protected_decls
= false;
1919 has_short_buffer
= false;
1922 /* Free up stack variable graph data. */
1924 fini_vars_expansion (void)
1926 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1928 XDELETEVEC (stack_vars
);
1929 if (stack_vars_sorted
)
1930 XDELETEVEC (stack_vars_sorted
);
1932 stack_vars_sorted
= NULL
;
1933 stack_vars_alloc
= stack_vars_num
= 0;
1934 delete decl_to_stack_part
;
1935 decl_to_stack_part
= NULL
;
1938 /* Make a fair guess for the size of the stack frame of the function
1939 in NODE. This doesn't have to be exact, the result is only used in
1940 the inline heuristics. So we don't want to run the full stack var
1941 packing algorithm (which is quadratic in the number of stack vars).
1942 Instead, we calculate the total size of all stack vars. This turns
1943 out to be a pretty fair estimate -- packing of stack vars doesn't
1944 happen very often. */
1947 estimated_stack_frame_size (struct cgraph_node
*node
)
1949 poly_int64 size
= 0;
1952 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1956 init_vars_expansion ();
1958 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1959 if (auto_var_in_fn_p (var
, fn
->decl
))
1960 size
+= expand_one_var (var
, true, false);
1962 if (stack_vars_num
> 0)
1964 /* Fake sorting the stack vars for account_stack_vars (). */
1965 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1966 for (i
= 0; i
< stack_vars_num
; ++i
)
1967 stack_vars_sorted
[i
] = i
;
1968 size
+= account_stack_vars ();
1971 fini_vars_expansion ();
1973 return estimated_poly_value (size
);
1976 /* Helper routine to check if a record or union contains an array field. */
1979 record_or_union_type_has_array_p (const_tree tree_type
)
1981 tree fields
= TYPE_FIELDS (tree_type
);
1984 for (f
= fields
; f
; f
= DECL_CHAIN (f
))
1985 if (TREE_CODE (f
) == FIELD_DECL
)
1987 tree field_type
= TREE_TYPE (f
);
1988 if (RECORD_OR_UNION_TYPE_P (field_type
)
1989 && record_or_union_type_has_array_p (field_type
))
1991 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1997 /* Check if the current function has local referenced variables that
1998 have their addresses taken, contain an array, or are arrays. */
2001 stack_protect_decl_p ()
2006 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2007 if (!is_global_var (var
))
2009 tree var_type
= TREE_TYPE (var
);
2011 && (TREE_CODE (var_type
) == ARRAY_TYPE
2012 || TREE_ADDRESSABLE (var
)
2013 || (RECORD_OR_UNION_TYPE_P (var_type
)
2014 && record_or_union_type_has_array_p (var_type
))))
2020 /* Check if the current function has calls that use a return slot. */
2023 stack_protect_return_slot_p ()
2027 FOR_ALL_BB_FN (bb
, cfun
)
2028 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
2029 !gsi_end_p (gsi
); gsi_next (&gsi
))
2031 gimple
*stmt
= gsi_stmt (gsi
);
2032 /* This assumes that calls to internal-only functions never
2033 use a return slot. */
2034 if (is_gimple_call (stmt
)
2035 && !gimple_call_internal_p (stmt
)
2036 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
2037 gimple_call_fndecl (stmt
)))
2043 /* Expand all variables used in the function. */
2046 expand_used_vars (void)
2048 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
2049 auto_vec
<tree
> maybe_local_decls
;
2050 rtx_insn
*var_end_seq
= NULL
;
2053 bool gen_stack_protect_signal
= false;
2055 /* Compute the phase of the stack frame for this function. */
2057 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2058 int off
= targetm
.starting_frame_offset () % align
;
2059 frame_phase
= off
? align
- off
: 0;
2062 /* Set TREE_USED on all variables in the local_decls. */
2063 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2064 TREE_USED (var
) = 1;
2065 /* Clear TREE_USED on all variables associated with a block scope. */
2066 clear_tree_used (DECL_INITIAL (current_function_decl
));
2068 init_vars_expansion ();
2070 if (targetm
.use_pseudo_pic_reg ())
2071 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
2073 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
2075 if (bitmap_bit_p (SA
.partitions_for_parm_default_defs
, i
))
2078 tree var
= partition_to_var (SA
.map
, i
);
2080 gcc_assert (!virtual_operand_p (var
));
2082 expand_one_ssa_partition (var
);
2085 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
2086 gen_stack_protect_signal
2087 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2089 /* At this point all variables on the local_decls with TREE_USED
2090 set are not associated with any block scope. Lay them out. */
2092 len
= vec_safe_length (cfun
->local_decls
);
2093 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2095 bool expand_now
= false;
2097 /* Expanded above already. */
2098 if (is_gimple_reg (var
))
2100 TREE_USED (var
) = 0;
2103 /* We didn't set a block for static or extern because it's hard
2104 to tell the difference between a global variable (re)declared
2105 in a local scope, and one that's really declared there to
2106 begin with. And it doesn't really matter much, since we're
2107 not giving them stack space. Expand them now. */
2108 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
2111 /* Expand variables not associated with any block now. Those created by
2112 the optimizers could be live anywhere in the function. Those that
2113 could possibly have been scoped originally and detached from their
2114 block will have their allocation deferred so we coalesce them with
2115 others when optimization is enabled. */
2116 else if (TREE_USED (var
))
2119 /* Finally, mark all variables on the list as used. We'll use
2120 this in a moment when we expand those associated with scopes. */
2121 TREE_USED (var
) = 1;
2124 expand_one_var (var
, true, true);
2127 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
2129 rtx rtl
= DECL_RTL_IF_SET (var
);
2131 /* Keep artificial non-ignored vars in cfun->local_decls
2132 chain until instantiate_decls. */
2133 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2134 add_local_decl (cfun
, var
);
2135 else if (rtl
== NULL_RTX
)
2136 /* If rtl isn't set yet, which can happen e.g. with
2137 -fstack-protector, retry before returning from this
2139 maybe_local_decls
.safe_push (var
);
2143 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2145 +-----------------+-----------------+
2146 | ...processed... | ...duplicates...|
2147 +-----------------+-----------------+
2149 +-- LEN points here.
2151 We just want the duplicates, as those are the artificial
2152 non-ignored vars that we want to keep until instantiate_decls.
2153 Move them down and truncate the array. */
2154 if (!vec_safe_is_empty (cfun
->local_decls
))
2155 cfun
->local_decls
->block_remove (0, len
);
2157 /* At this point, all variables within the block tree with TREE_USED
2158 set are actually used by the optimized function. Lay them out. */
2159 expand_used_vars_for_block (outer_block
, true);
2161 if (stack_vars_num
> 0)
2163 add_scope_conflicts ();
2165 /* If stack protection is enabled, we don't share space between
2166 vulnerable data and non-vulnerable data. */
2167 if (flag_stack_protect
!= 0
2168 && (flag_stack_protect
!= SPCT_FLAG_EXPLICIT
2169 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2170 && lookup_attribute ("stack_protect",
2171 DECL_ATTRIBUTES (current_function_decl
)))))
2172 add_stack_protection_conflicts ();
2174 /* Now that we have collected all stack variables, and have computed a
2175 minimal interference graph, attempt to save some stack space. */
2176 partition_stack_vars ();
2178 dump_stack_var_partition ();
2181 switch (flag_stack_protect
)
2184 create_stack_guard ();
2187 case SPCT_FLAG_STRONG
:
2188 if (gen_stack_protect_signal
2189 || cfun
->calls_alloca
|| has_protected_decls
2190 || lookup_attribute ("stack_protect",
2191 DECL_ATTRIBUTES (current_function_decl
)))
2192 create_stack_guard ();
2195 case SPCT_FLAG_DEFAULT
:
2196 if (cfun
->calls_alloca
|| has_protected_decls
2197 || lookup_attribute ("stack_protect",
2198 DECL_ATTRIBUTES (current_function_decl
)))
2199 create_stack_guard ();
2202 case SPCT_FLAG_EXPLICIT
:
2203 if (lookup_attribute ("stack_protect",
2204 DECL_ATTRIBUTES (current_function_decl
)))
2205 create_stack_guard ();
2211 /* Assign rtl to each variable based on these partitions. */
2212 if (stack_vars_num
> 0)
2214 struct stack_vars_data data
;
2216 data
.asan_base
= NULL_RTX
;
2217 data
.asan_alignb
= 0;
2219 /* Reorder decls to be protected by iterating over the variables
2220 array multiple times, and allocating out of each phase in turn. */
2221 /* ??? We could probably integrate this into the qsort we did
2222 earlier, such that we naturally see these variables first,
2223 and thus naturally allocate things in the right order. */
2224 if (has_protected_decls
)
2226 /* Phase 1 contains only character arrays. */
2227 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
2229 /* Phase 2 contains other kinds of arrays. */
2230 if (flag_stack_protect
== SPCT_FLAG_ALL
2231 || flag_stack_protect
== SPCT_FLAG_STRONG
2232 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2233 && lookup_attribute ("stack_protect",
2234 DECL_ATTRIBUTES (current_function_decl
))))
2235 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
2238 if (asan_sanitize_stack_p ())
2239 /* Phase 3, any partitions that need asan protection
2240 in addition to phase 1 and 2. */
2241 expand_stack_vars (asan_decl_phase_3
, &data
);
2243 /* ASAN description strings don't yet have a syntax for expressing
2244 polynomial offsets. */
2245 HOST_WIDE_INT prev_offset
;
2246 if (!data
.asan_vec
.is_empty ()
2247 && frame_offset
.is_constant (&prev_offset
))
2249 HOST_WIDE_INT offset
, sz
, redzonesz
;
2250 redzonesz
= ASAN_RED_ZONE_SIZE
;
2251 sz
= data
.asan_vec
[0] - prev_offset
;
2252 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
2253 && data
.asan_alignb
<= 4096
2254 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
2255 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
2256 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
2257 /* Allocating a constant amount of space from a constant
2258 starting offset must give a constant result. */
2259 offset
= (alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
)
2261 data
.asan_vec
.safe_push (prev_offset
);
2262 data
.asan_vec
.safe_push (offset
);
2263 /* Leave space for alignment if STRICT_ALIGNMENT. */
2264 if (STRICT_ALIGNMENT
)
2265 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
2266 << ASAN_SHADOW_SHIFT
)
2267 / BITS_PER_UNIT
, 1);
2270 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
2273 data
.asan_vec
.address (),
2274 data
.asan_decl_vec
.address (),
2275 data
.asan_vec
.length ());
2278 expand_stack_vars (NULL
, &data
);
2281 if (asan_sanitize_allocas_p () && cfun
->calls_alloca
)
2282 var_end_seq
= asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx
,
2283 virtual_stack_vars_rtx
,
2286 fini_vars_expansion ();
2288 /* If there were any artificial non-ignored vars without rtl
2289 found earlier, see if deferred stack allocation hasn't assigned
2291 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
2293 rtx rtl
= DECL_RTL_IF_SET (var
);
2295 /* Keep artificial non-ignored vars in cfun->local_decls
2296 chain until instantiate_decls. */
2297 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2298 add_local_decl (cfun
, var
);
2301 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2302 if (STACK_ALIGNMENT_NEEDED
)
2304 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2305 if (FRAME_GROWS_DOWNWARD
)
2306 frame_offset
= aligned_lower_bound (frame_offset
, align
);
2308 frame_offset
= aligned_upper_bound (frame_offset
, align
);
2315 /* If we need to produce a detailed dump, print the tree representation
2316 for STMT to the dump file. SINCE is the last RTX after which the RTL
2317 generated for STMT should have been appended. */
2320 maybe_dump_rtl_for_gimple_stmt (gimple
*stmt
, rtx_insn
*since
)
2322 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2324 fprintf (dump_file
, "\n;; ");
2325 print_gimple_stmt (dump_file
, stmt
, 0,
2326 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
2327 fprintf (dump_file
, "\n");
2329 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
2333 /* Maps the blocks that do not contain tree labels to rtx labels. */
2335 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
2337 /* Returns the label_rtx expression for a label starting basic block BB. */
2339 static rtx_code_label
*
2340 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
2342 gimple_stmt_iterator gsi
;
2345 if (bb
->flags
& BB_RTL
)
2346 return block_label (bb
);
2348 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
2352 /* Find the tree label if it is present. */
2354 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2358 lab_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
2362 lab
= gimple_label_label (lab_stmt
);
2363 if (DECL_NONLOCAL (lab
))
2366 return jump_target_rtx (lab
);
2369 rtx_code_label
*l
= gen_label_rtx ();
2370 lab_rtx_for_bb
->put (bb
, l
);
2375 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2376 of a basic block where we just expanded the conditional at the end,
2377 possibly clean up the CFG and instruction sequence. LAST is the
2378 last instruction before the just emitted jump sequence. */
2381 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2383 /* Special case: when jumpif decides that the condition is
2384 trivial it emits an unconditional jump (and the necessary
2385 barrier). But we still have two edges, the fallthru one is
2386 wrong. purge_dead_edges would clean this up later. Unfortunately
2387 we have to insert insns (and split edges) before
2388 find_many_sub_basic_blocks and hence before purge_dead_edges.
2389 But splitting edges might create new blocks which depend on the
2390 fact that if there are two edges there's no barrier. So the
2391 barrier would get lost and verify_flow_info would ICE. Instead
2392 of auditing all edge splitters to care for the barrier (which
2393 normally isn't there in a cleaned CFG), fix it here. */
2394 if (BARRIER_P (get_last_insn ()))
2398 /* Now, we have a single successor block, if we have insns to
2399 insert on the remaining edge we potentially will insert
2400 it at the end of this block (if the dest block isn't feasible)
2401 in order to avoid splitting the edge. This insertion will take
2402 place in front of the last jump. But we might have emitted
2403 multiple jumps (conditional and one unconditional) to the
2404 same destination. Inserting in front of the last one then
2405 is a problem. See PR 40021. We fix this by deleting all
2406 jumps except the last unconditional one. */
2407 insn
= PREV_INSN (get_last_insn ());
2408 /* Make sure we have an unconditional jump. Otherwise we're
2410 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2411 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2413 insn
= PREV_INSN (insn
);
2414 if (JUMP_P (NEXT_INSN (insn
)))
2416 if (!any_condjump_p (NEXT_INSN (insn
)))
2418 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2419 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2421 delete_insn (NEXT_INSN (insn
));
2427 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2428 Returns a new basic block if we've terminated the current basic
2429 block and created a new one. */
2432 expand_gimple_cond (basic_block bb
, gcond
*stmt
)
2434 basic_block new_bb
, dest
;
2437 rtx_insn
*last2
, *last
;
2438 enum tree_code code
;
2441 code
= gimple_cond_code (stmt
);
2442 op0
= gimple_cond_lhs (stmt
);
2443 op1
= gimple_cond_rhs (stmt
);
2444 /* We're sometimes presented with such code:
2448 This would expand to two comparisons which then later might
2449 be cleaned up by combine. But some pattern matchers like if-conversion
2450 work better when there's only one compare, so make up for this
2451 here as special exception if TER would have made the same change. */
2453 && TREE_CODE (op0
) == SSA_NAME
2454 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2455 && TREE_CODE (op1
) == INTEGER_CST
2456 && ((gimple_cond_code (stmt
) == NE_EXPR
2457 && integer_zerop (op1
))
2458 || (gimple_cond_code (stmt
) == EQ_EXPR
2459 && integer_onep (op1
)))
2460 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2462 gimple
*second
= SSA_NAME_DEF_STMT (op0
);
2463 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2465 enum tree_code code2
= gimple_assign_rhs_code (second
);
2466 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2469 op0
= gimple_assign_rhs1 (second
);
2470 op1
= gimple_assign_rhs2 (second
);
2472 /* If jumps are cheap and the target does not support conditional
2473 compare, turn some more codes into jumpy sequences. */
2474 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2475 && targetm
.gen_ccmp_first
== NULL
)
2477 if ((code2
== BIT_AND_EXPR
2478 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2479 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2480 || code2
== TRUTH_AND_EXPR
)
2482 code
= TRUTH_ANDIF_EXPR
;
2483 op0
= gimple_assign_rhs1 (second
);
2484 op1
= gimple_assign_rhs2 (second
);
2486 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2488 code
= TRUTH_ORIF_EXPR
;
2489 op0
= gimple_assign_rhs1 (second
);
2490 op1
= gimple_assign_rhs2 (second
);
2496 last2
= last
= get_last_insn ();
2498 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2499 set_curr_insn_location (gimple_location (stmt
));
2501 /* These flags have no purpose in RTL land. */
2502 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2503 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2505 /* We can either have a pure conditional jump with one fallthru edge or
2506 two-way jump that needs to be decomposed into two basic blocks. */
2507 if (false_edge
->dest
== bb
->next_bb
)
2509 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2510 true_edge
->probability
);
2511 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2512 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2513 set_curr_insn_location (true_edge
->goto_locus
);
2514 false_edge
->flags
|= EDGE_FALLTHRU
;
2515 maybe_cleanup_end_of_block (false_edge
, last
);
2518 if (true_edge
->dest
== bb
->next_bb
)
2520 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2521 false_edge
->probability
);
2522 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2523 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2524 set_curr_insn_location (false_edge
->goto_locus
);
2525 true_edge
->flags
|= EDGE_FALLTHRU
;
2526 maybe_cleanup_end_of_block (true_edge
, last
);
2530 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2531 true_edge
->probability
);
2532 last
= get_last_insn ();
2533 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2534 set_curr_insn_location (false_edge
->goto_locus
);
2535 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2538 if (BARRIER_P (BB_END (bb
)))
2539 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2540 update_bb_for_insn (bb
);
2542 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2543 dest
= false_edge
->dest
;
2544 redirect_edge_succ (false_edge
, new_bb
);
2545 false_edge
->flags
|= EDGE_FALLTHRU
;
2546 new_bb
->count
= false_edge
->count ();
2547 loop_p loop
= find_common_loop (bb
->loop_father
, dest
->loop_father
);
2548 add_bb_to_loop (new_bb
, loop
);
2549 if (loop
->latch
== bb
2550 && loop
->header
== dest
)
2551 loop
->latch
= new_bb
;
2552 make_single_succ_edge (new_bb
, dest
, 0);
2553 if (BARRIER_P (BB_END (new_bb
)))
2554 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2555 update_bb_for_insn (new_bb
);
2557 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2559 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2561 set_curr_insn_location (true_edge
->goto_locus
);
2562 true_edge
->goto_locus
= curr_insn_location ();
2568 /* Mark all calls that can have a transaction restart. */
2571 mark_transaction_restart_calls (gimple
*stmt
)
2573 struct tm_restart_node dummy
;
2574 tm_restart_node
**slot
;
2576 if (!cfun
->gimple_df
->tm_restart
)
2580 slot
= cfun
->gimple_df
->tm_restart
->find_slot (&dummy
, NO_INSERT
);
2583 struct tm_restart_node
*n
= *slot
;
2584 tree list
= n
->label_or_list
;
2587 for (insn
= next_real_insn (get_last_insn ());
2589 insn
= next_real_insn (insn
))
2592 if (TREE_CODE (list
) == LABEL_DECL
)
2593 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2595 for (; list
; list
= TREE_CHAIN (list
))
2596 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2600 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2604 expand_call_stmt (gcall
*stmt
)
2606 tree exp
, decl
, lhs
;
2610 if (gimple_call_internal_p (stmt
))
2612 expand_internal_call (stmt
);
2616 /* If this is a call to a built-in function and it has no effect other
2617 than setting the lhs, try to implement it using an internal function
2619 decl
= gimple_call_fndecl (stmt
);
2620 if (gimple_call_lhs (stmt
)
2621 && !gimple_has_side_effects (stmt
)
2622 && (optimize
|| (decl
&& called_as_built_in (decl
))))
2624 internal_fn ifn
= replacement_internal_fn (stmt
);
2625 if (ifn
!= IFN_LAST
)
2627 expand_internal_call (ifn
, stmt
);
2632 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2634 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2635 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
2637 /* If this is not a builtin function, the function type through which the
2638 call is made may be different from the type of the function. */
2641 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2642 CALL_EXPR_FN (exp
));
2644 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2645 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2647 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2649 tree arg
= gimple_call_arg (stmt
, i
);
2651 /* TER addresses into arguments of builtin functions so we have a
2652 chance to infer more correct alignment information. See PR39954. */
2654 && TREE_CODE (arg
) == SSA_NAME
2655 && (def
= get_gimple_for_ssa_name (arg
))
2656 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2657 arg
= gimple_assign_rhs1 (def
);
2658 CALL_EXPR_ARG (exp
, i
) = arg
;
2661 if (gimple_has_side_effects (stmt
))
2662 TREE_SIDE_EFFECTS (exp
) = 1;
2664 if (gimple_call_nothrow_p (stmt
))
2665 TREE_NOTHROW (exp
) = 1;
2667 if (gimple_no_warning_p (stmt
))
2668 TREE_NO_WARNING (exp
) = 1;
2670 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2671 CALL_EXPR_MUST_TAIL_CALL (exp
) = gimple_call_must_tail_p (stmt
);
2672 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2674 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
2675 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl
)))
2676 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2678 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2679 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2680 CALL_EXPR_BY_DESCRIPTOR (exp
) = gimple_call_by_descriptor_p (stmt
);
2681 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2682 CALL_WITH_BOUNDS_P (exp
) = gimple_call_with_bounds_p (stmt
);
2684 /* Ensure RTL is created for debug args. */
2685 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2687 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2692 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2694 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2695 expand_debug_expr (dtemp
);
2699 rtx_insn
*before_call
= get_last_insn ();
2700 lhs
= gimple_call_lhs (stmt
);
2702 expand_assignment (lhs
, exp
, false);
2704 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2706 /* If the gimple call is an indirect call and has 'nocf_check'
2707 attribute find a generated CALL insn to mark it as no
2708 control-flow verification is needed. */
2709 if (gimple_call_nocf_check_p (stmt
)
2710 && !gimple_call_fndecl (stmt
))
2712 rtx_insn
*last
= get_last_insn ();
2713 while (!CALL_P (last
)
2714 && last
!= before_call
)
2715 last
= PREV_INSN (last
);
2717 if (last
!= before_call
)
2718 add_reg_note (last
, REG_CALL_NOCF_CHECK
, const0_rtx
);
2721 mark_transaction_restart_calls (stmt
);
2725 /* Generate RTL for an asm statement (explicit assembler code).
2726 STRING is a STRING_CST node containing the assembler code text,
2727 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2728 insn is volatile; don't optimize it. */
2731 expand_asm_loc (tree string
, int vol
, location_t locus
)
2735 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2736 ggc_strdup (TREE_STRING_POINTER (string
)),
2739 MEM_VOLATILE_P (body
) = vol
;
2741 /* Non-empty basic ASM implicitly clobbers memory. */
2742 if (TREE_STRING_LENGTH (string
) != 0)
2745 unsigned i
, nclobbers
;
2746 auto_vec
<rtx
> input_rvec
, output_rvec
;
2747 auto_vec
<const char *> constraints
;
2748 auto_vec
<rtx
> clobber_rvec
;
2749 HARD_REG_SET clobbered_regs
;
2750 CLEAR_HARD_REG_SET (clobbered_regs
);
2752 clob
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2753 clobber_rvec
.safe_push (clob
);
2755 if (targetm
.md_asm_adjust
)
2756 targetm
.md_asm_adjust (output_rvec
, input_rvec
,
2757 constraints
, clobber_rvec
,
2761 nclobbers
= clobber_rvec
.length ();
2762 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (1 + nclobbers
));
2764 XVECEXP (body
, 0, 0) = asm_op
;
2765 for (i
= 0; i
< nclobbers
; i
++)
2766 XVECEXP (body
, 0, i
+ 1) = gen_rtx_CLOBBER (VOIDmode
, clobber_rvec
[i
]);
2772 /* Return the number of times character C occurs in string S. */
2774 n_occurrences (int c
, const char *s
)
2782 /* A subroutine of expand_asm_operands. Check that all operands have
2783 the same number of alternatives. Return true if so. */
2786 check_operand_nalternatives (const vec
<const char *> &constraints
)
2788 unsigned len
= constraints
.length();
2791 int nalternatives
= n_occurrences (',', constraints
[0]);
2793 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2795 error ("too many alternatives in %<asm%>");
2799 for (unsigned i
= 1; i
< len
; ++i
)
2800 if (n_occurrences (',', constraints
[i
]) != nalternatives
)
2802 error ("operand constraints for %<asm%> differ "
2803 "in number of alternatives");
2810 /* Check for overlap between registers marked in CLOBBERED_REGS and
2811 anything inappropriate in T. Emit error and return the register
2812 variable definition for error, NULL_TREE for ok. */
2815 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
2817 /* Conflicts between asm-declared register variables and the clobber
2818 list are not allowed. */
2819 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
2823 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2824 DECL_NAME (overlap
));
2826 /* Reset registerness to stop multiple errors emitted for a single
2828 DECL_REGISTER (overlap
) = 0;
2835 /* Generate RTL for an asm statement with arguments.
2836 STRING is the instruction template.
2837 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2838 Each output or input has an expression in the TREE_VALUE and
2839 a tree list in TREE_PURPOSE which in turn contains a constraint
2840 name in TREE_VALUE (or NULL_TREE) and a constraint string
2842 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2843 that is clobbered by this insn.
2845 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2846 should be the fallthru basic block of the asm goto.
2848 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2849 Some elements of OUTPUTS may be replaced with trees representing temporary
2850 values. The caller should copy those temporary values to the originally
2853 VOL nonzero means the insn is volatile; don't optimize it. */
2856 expand_asm_stmt (gasm
*stmt
)
2858 class save_input_location
2863 explicit save_input_location(location_t where
)
2865 old
= input_location
;
2866 input_location
= where
;
2869 ~save_input_location()
2871 input_location
= old
;
2875 location_t locus
= gimple_location (stmt
);
2877 if (gimple_asm_input_p (stmt
))
2879 const char *s
= gimple_asm_string (stmt
);
2880 tree string
= build_string (strlen (s
), s
);
2881 expand_asm_loc (string
, gimple_asm_volatile_p (stmt
), locus
);
2885 /* There are some legacy diagnostics in here, and also avoids a
2886 sixth parameger to targetm.md_asm_adjust. */
2887 save_input_location
s_i_l(locus
);
2889 unsigned noutputs
= gimple_asm_noutputs (stmt
);
2890 unsigned ninputs
= gimple_asm_ninputs (stmt
);
2891 unsigned nlabels
= gimple_asm_nlabels (stmt
);
2894 /* ??? Diagnose during gimplification? */
2895 if (ninputs
+ noutputs
+ nlabels
> MAX_RECOG_OPERANDS
)
2897 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
2901 auto_vec
<tree
, MAX_RECOG_OPERANDS
> output_tvec
;
2902 auto_vec
<tree
, MAX_RECOG_OPERANDS
> input_tvec
;
2903 auto_vec
<const char *, MAX_RECOG_OPERANDS
> constraints
;
2905 /* Copy the gimple vectors into new vectors that we can manipulate. */
2907 output_tvec
.safe_grow (noutputs
);
2908 input_tvec
.safe_grow (ninputs
);
2909 constraints
.safe_grow (noutputs
+ ninputs
);
2911 for (i
= 0; i
< noutputs
; ++i
)
2913 tree t
= gimple_asm_output_op (stmt
, i
);
2914 output_tvec
[i
] = TREE_VALUE (t
);
2915 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2917 for (i
= 0; i
< ninputs
; i
++)
2919 tree t
= gimple_asm_input_op (stmt
, i
);
2920 input_tvec
[i
] = TREE_VALUE (t
);
2921 constraints
[i
+ noutputs
]
2922 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2925 /* ??? Diagnose during gimplification? */
2926 if (! check_operand_nalternatives (constraints
))
2929 /* Count the number of meaningful clobbered registers, ignoring what
2930 we would ignore later. */
2931 auto_vec
<rtx
> clobber_rvec
;
2932 HARD_REG_SET clobbered_regs
;
2933 CLEAR_HARD_REG_SET (clobbered_regs
);
2935 if (unsigned n
= gimple_asm_nclobbers (stmt
))
2937 clobber_rvec
.reserve (n
);
2938 for (i
= 0; i
< n
; i
++)
2940 tree t
= gimple_asm_clobber_op (stmt
, i
);
2941 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (t
));
2944 j
= decode_reg_name_and_count (regname
, &nregs
);
2949 /* ??? Diagnose during gimplification? */
2950 error ("unknown register name %qs in %<asm%>", regname
);
2954 rtx x
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2955 clobber_rvec
.safe_push (x
);
2959 /* Otherwise we should have -1 == empty string
2960 or -3 == cc, which is not a register. */
2961 gcc_assert (j
== -1 || j
== -3);
2965 for (int reg
= j
; reg
< j
+ nregs
; reg
++)
2967 /* Clobbering the PIC register is an error. */
2968 if (reg
== (int) PIC_OFFSET_TABLE_REGNUM
)
2970 /* ??? Diagnose during gimplification? */
2971 error ("PIC register clobbered by %qs in %<asm%>",
2976 SET_HARD_REG_BIT (clobbered_regs
, reg
);
2977 rtx x
= gen_rtx_REG (reg_raw_mode
[reg
], reg
);
2978 clobber_rvec
.safe_push (x
);
2982 unsigned nclobbers
= clobber_rvec
.length();
2984 /* First pass over inputs and outputs checks validity and sets
2985 mark_addressable if needed. */
2986 /* ??? Diagnose during gimplification? */
2988 for (i
= 0; i
< noutputs
; ++i
)
2990 tree val
= output_tvec
[i
];
2991 tree type
= TREE_TYPE (val
);
2992 const char *constraint
;
2997 /* Try to parse the output constraint. If that fails, there's
2998 no point in going further. */
2999 constraint
= constraints
[i
];
3000 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
3001 &allows_mem
, &allows_reg
, &is_inout
))
3008 && REG_P (DECL_RTL (val
))
3009 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
3010 mark_addressable (val
);
3013 for (i
= 0; i
< ninputs
; ++i
)
3015 bool allows_reg
, allows_mem
;
3016 const char *constraint
;
3018 constraint
= constraints
[i
+ noutputs
];
3019 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3020 constraints
.address (),
3021 &allows_mem
, &allows_reg
))
3024 if (! allows_reg
&& allows_mem
)
3025 mark_addressable (input_tvec
[i
]);
3028 /* Second pass evaluates arguments. */
3030 /* Make sure stack is consistent for asm goto. */
3032 do_pending_stack_adjust ();
3033 int old_generating_concat_p
= generating_concat_p
;
3035 /* Vector of RTX's of evaluated output operands. */
3036 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> output_rvec
;
3037 auto_vec
<int, MAX_RECOG_OPERANDS
> inout_opnum
;
3038 rtx_insn
*after_rtl_seq
= NULL
, *after_rtl_end
= NULL
;
3040 output_rvec
.safe_grow (noutputs
);
3042 for (i
= 0; i
< noutputs
; ++i
)
3044 tree val
= output_tvec
[i
];
3045 tree type
= TREE_TYPE (val
);
3046 bool is_inout
, allows_reg
, allows_mem
, ok
;
3049 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
3050 noutputs
, &allows_mem
, &allows_reg
,
3054 /* If an output operand is not a decl or indirect ref and our constraint
3055 allows a register, make a temporary to act as an intermediate.
3056 Make the asm insn write into that, then we will copy it to
3057 the real output operand. Likewise for promoted variables. */
3059 generating_concat_p
= 0;
3061 if ((TREE_CODE (val
) == INDIRECT_REF
&& allows_mem
)
3063 && (allows_mem
|| REG_P (DECL_RTL (val
)))
3064 && ! (REG_P (DECL_RTL (val
))
3065 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
3068 || TREE_ADDRESSABLE (type
))
3070 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3071 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
3073 op
= validize_mem (op
);
3075 if (! allows_reg
&& !MEM_P (op
))
3076 error ("output number %d not directly addressable", i
);
3077 if ((! allows_mem
&& MEM_P (op
) && GET_MODE (op
) != BLKmode
)
3078 || GET_CODE (op
) == CONCAT
)
3081 op
= gen_reg_rtx (GET_MODE (op
));
3083 generating_concat_p
= old_generating_concat_p
;
3086 emit_move_insn (op
, old_op
);
3088 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3089 emit_move_insn (old_op
, op
);
3090 after_rtl_seq
= get_insns ();
3091 after_rtl_end
= get_last_insn ();
3097 op
= assign_temp (type
, 0, 1);
3098 op
= validize_mem (op
);
3099 if (!MEM_P (op
) && TREE_CODE (val
) == SSA_NAME
)
3100 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val
), op
);
3102 generating_concat_p
= old_generating_concat_p
;
3104 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3105 expand_assignment (val
, make_tree (type
, op
), false);
3106 after_rtl_seq
= get_insns ();
3107 after_rtl_end
= get_last_insn ();
3110 output_rvec
[i
] = op
;
3113 inout_opnum
.safe_push (i
);
3116 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> input_rvec
;
3117 auto_vec
<machine_mode
, MAX_RECOG_OPERANDS
> input_mode
;
3119 input_rvec
.safe_grow (ninputs
);
3120 input_mode
.safe_grow (ninputs
);
3122 generating_concat_p
= 0;
3124 for (i
= 0; i
< ninputs
; ++i
)
3126 tree val
= input_tvec
[i
];
3127 tree type
= TREE_TYPE (val
);
3128 bool allows_reg
, allows_mem
, ok
;
3129 const char *constraint
;
3132 constraint
= constraints
[i
+ noutputs
];
3133 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3134 constraints
.address (),
3135 &allows_mem
, &allows_reg
);
3138 /* EXPAND_INITIALIZER will not generate code for valid initializer
3139 constants, but will still generate code for other types of operand.
3140 This is the behavior we want for constant constraints. */
3141 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3142 allows_reg
? EXPAND_NORMAL
3143 : allows_mem
? EXPAND_MEMORY
3144 : EXPAND_INITIALIZER
);
3146 /* Never pass a CONCAT to an ASM. */
3147 if (GET_CODE (op
) == CONCAT
)
3148 op
= force_reg (GET_MODE (op
), op
);
3149 else if (MEM_P (op
))
3150 op
= validize_mem (op
);
3152 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
3154 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
3155 op
= force_reg (TYPE_MODE (type
), op
);
3156 else if (!allows_mem
)
3157 warning (0, "asm operand %d probably doesn%'t match constraints",
3159 else if (MEM_P (op
))
3161 /* We won't recognize either volatile memory or memory
3162 with a queued address as available a memory_operand
3163 at this point. Ignore it: clearly this *is* a memory. */
3169 input_mode
[i
] = TYPE_MODE (type
);
3172 /* For in-out operands, copy output rtx to input rtx. */
3173 unsigned ninout
= inout_opnum
.length();
3174 for (i
= 0; i
< ninout
; i
++)
3176 int j
= inout_opnum
[i
];
3177 rtx o
= output_rvec
[j
];
3179 input_rvec
.safe_push (o
);
3180 input_mode
.safe_push (GET_MODE (o
));
3183 sprintf (buffer
, "%d", j
);
3184 constraints
.safe_push (ggc_strdup (buffer
));
3188 /* Sometimes we wish to automatically clobber registers across an asm.
3189 Case in point is when the i386 backend moved from cc0 to a hard reg --
3190 maintaining source-level compatibility means automatically clobbering
3191 the flags register. */
3192 rtx_insn
*after_md_seq
= NULL
;
3193 if (targetm
.md_asm_adjust
)
3194 after_md_seq
= targetm
.md_asm_adjust (output_rvec
, input_rvec
,
3195 constraints
, clobber_rvec
,
3198 /* Do not allow the hook to change the output and input count,
3199 lest it mess up the operand numbering. */
3200 gcc_assert (output_rvec
.length() == noutputs
);
3201 gcc_assert (input_rvec
.length() == ninputs
);
3202 gcc_assert (constraints
.length() == noutputs
+ ninputs
);
3204 /* But it certainly can adjust the clobbers. */
3205 nclobbers
= clobber_rvec
.length();
3207 /* Third pass checks for easy conflicts. */
3208 /* ??? Why are we doing this on trees instead of rtx. */
3210 bool clobber_conflict_found
= 0;
3211 for (i
= 0; i
< noutputs
; ++i
)
3212 if (tree_conflicts_with_clobbers_p (output_tvec
[i
], &clobbered_regs
))
3213 clobber_conflict_found
= 1;
3214 for (i
= 0; i
< ninputs
- ninout
; ++i
)
3215 if (tree_conflicts_with_clobbers_p (input_tvec
[i
], &clobbered_regs
))
3216 clobber_conflict_found
= 1;
3218 /* Make vectors for the expression-rtx, constraint strings,
3219 and named operands. */
3221 rtvec argvec
= rtvec_alloc (ninputs
);
3222 rtvec constraintvec
= rtvec_alloc (ninputs
);
3223 rtvec labelvec
= rtvec_alloc (nlabels
);
3225 rtx body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
3226 : GET_MODE (output_rvec
[0])),
3227 ggc_strdup (gimple_asm_string (stmt
)),
3228 "", 0, argvec
, constraintvec
,
3230 MEM_VOLATILE_P (body
) = gimple_asm_volatile_p (stmt
);
3232 for (i
= 0; i
< ninputs
; ++i
)
3234 ASM_OPERANDS_INPUT (body
, i
) = input_rvec
[i
];
3235 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
3236 = gen_rtx_ASM_INPUT_loc (input_mode
[i
],
3237 constraints
[i
+ noutputs
],
3241 /* Copy labels to the vector. */
3242 rtx_code_label
*fallthru_label
= NULL
;
3245 basic_block fallthru_bb
= NULL
;
3246 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
3248 fallthru_bb
= fallthru
->dest
;
3250 for (i
= 0; i
< nlabels
; ++i
)
3252 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
3254 /* If asm goto has any labels in the fallthru basic block, use
3255 a label that we emit immediately after the asm goto. Expansion
3256 may insert further instructions into the same basic block after
3257 asm goto and if we don't do this, insertion of instructions on
3258 the fallthru edge might misbehave. See PR58670. */
3259 if (fallthru_bb
&& label_to_block_fn (cfun
, label
) == fallthru_bb
)
3261 if (fallthru_label
== NULL_RTX
)
3262 fallthru_label
= gen_label_rtx ();
3266 r
= label_rtx (label
);
3267 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
3271 /* Now, for each output, construct an rtx
3272 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3273 ARGVEC CONSTRAINTS OPNAMES))
3274 If there is more than one, put them inside a PARALLEL. */
3276 if (nlabels
> 0 && nclobbers
== 0)
3278 gcc_assert (noutputs
== 0);
3279 emit_jump_insn (body
);
3281 else if (noutputs
== 0 && nclobbers
== 0)
3283 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3286 else if (noutputs
== 1 && nclobbers
== 0)
3288 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = constraints
[0];
3289 emit_insn (gen_rtx_SET (output_rvec
[0], body
));
3299 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
3301 /* For each output operand, store a SET. */
3302 for (i
= 0; i
< noutputs
; ++i
)
3304 rtx src
, o
= output_rvec
[i
];
3307 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody
) = constraints
[0];
3312 src
= gen_rtx_ASM_OPERANDS (GET_MODE (o
),
3313 ASM_OPERANDS_TEMPLATE (obody
),
3314 constraints
[i
], i
, argvec
,
3315 constraintvec
, labelvec
, locus
);
3316 MEM_VOLATILE_P (src
) = gimple_asm_volatile_p (stmt
);
3318 XVECEXP (body
, 0, i
) = gen_rtx_SET (o
, src
);
3321 /* If there are no outputs (but there are some clobbers)
3322 store the bare ASM_OPERANDS into the PARALLEL. */
3324 XVECEXP (body
, 0, i
++) = obody
;
3326 /* Store (clobber REG) for each clobbered register specified. */
3327 for (unsigned j
= 0; j
< nclobbers
; ++j
)
3329 rtx clobbered_reg
= clobber_rvec
[j
];
3331 /* Do sanity check for overlap between clobbers and respectively
3332 input and outputs that hasn't been handled. Such overlap
3333 should have been detected and reported above. */
3334 if (!clobber_conflict_found
&& REG_P (clobbered_reg
))
3336 /* We test the old body (obody) contents to avoid
3337 tripping over the under-construction body. */
3338 for (unsigned k
= 0; k
< noutputs
; ++k
)
3339 if (reg_overlap_mentioned_p (clobbered_reg
, output_rvec
[k
]))
3340 internal_error ("asm clobber conflict with output operand");
3342 for (unsigned k
= 0; k
< ninputs
- ninout
; ++k
)
3343 if (reg_overlap_mentioned_p (clobbered_reg
, input_rvec
[k
]))
3344 internal_error ("asm clobber conflict with input operand");
3347 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
3351 emit_jump_insn (body
);
3356 generating_concat_p
= old_generating_concat_p
;
3359 emit_label (fallthru_label
);
3362 emit_insn (after_md_seq
);
3364 emit_insn (after_rtl_seq
);
3367 crtl
->has_asm_statement
= 1;
3370 /* Emit code to jump to the address
3371 specified by the pointer expression EXP. */
3374 expand_computed_goto (tree exp
)
3376 rtx x
= expand_normal (exp
);
3378 do_pending_stack_adjust ();
3379 emit_indirect_jump (x
);
3382 /* Generate RTL code for a `goto' statement with target label LABEL.
3383 LABEL should be a LABEL_DECL tree node that was or will later be
3384 defined with `expand_label'. */
3387 expand_goto (tree label
)
3391 /* Check for a nonlocal goto to a containing function. Should have
3392 gotten translated to __builtin_nonlocal_goto. */
3393 tree context
= decl_function_context (label
);
3394 gcc_assert (!context
|| context
== current_function_decl
);
3397 emit_jump (jump_target_rtx (label
));
3400 /* Output a return with no value. */
3403 expand_null_return_1 (void)
3405 clear_pending_stack_adjust ();
3406 do_pending_stack_adjust ();
3407 emit_jump (return_label
);
3410 /* Generate RTL to return from the current function, with no value.
3411 (That is, we do not do anything about returning any value.) */
3414 expand_null_return (void)
3416 /* If this function was declared to return a value, but we
3417 didn't, clobber the return registers so that they are not
3418 propagated live to the rest of the function. */
3419 clobber_return_register ();
3421 expand_null_return_1 ();
3424 /* Generate RTL to return from the current function, with value VAL. */
3427 expand_value_return (rtx val
)
3429 /* Copy the value to the return location unless it's already there. */
3431 tree decl
= DECL_RESULT (current_function_decl
);
3432 rtx return_reg
= DECL_RTL (decl
);
3433 if (return_reg
!= val
)
3435 tree funtype
= TREE_TYPE (current_function_decl
);
3436 tree type
= TREE_TYPE (decl
);
3437 int unsignedp
= TYPE_UNSIGNED (type
);
3438 machine_mode old_mode
= DECL_MODE (decl
);
3440 if (DECL_BY_REFERENCE (decl
))
3441 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3443 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3445 if (mode
!= old_mode
)
3446 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3448 if (GET_CODE (return_reg
) == PARALLEL
)
3449 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3451 emit_move_insn (return_reg
, val
);
3454 expand_null_return_1 ();
3457 /* Generate RTL to evaluate the expression RETVAL and return it
3458 from the current function. */
3461 expand_return (tree retval
, tree bounds
)
3468 /* If function wants no value, give it none. */
3469 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
3471 expand_normal (retval
);
3472 expand_null_return ();
3476 if (retval
== error_mark_node
)
3478 /* Treat this like a return of no value from a function that
3480 expand_null_return ();
3483 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3484 || TREE_CODE (retval
) == INIT_EXPR
)
3485 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3486 retval_rhs
= TREE_OPERAND (retval
, 1);
3488 retval_rhs
= retval
;
3490 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3492 /* Put returned bounds to the right place. */
3493 bounds_rtl
= DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl
));
3499 if (bounds
&& bounds
!= error_mark_node
)
3501 bnd
= expand_normal (bounds
);
3502 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3504 else if (REG_P (bounds_rtl
))
3507 bnd
= chkp_expand_zero_bounds ();
3510 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3511 addr
= gen_rtx_MEM (Pmode
, addr
);
3512 bnd
= targetm
.calls
.load_bounds_for_arg (addr
, NULL
, NULL
);
3515 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3521 gcc_assert (GET_CODE (bounds_rtl
) == PARALLEL
);
3524 bnd
= chkp_expand_zero_bounds ();
3527 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3528 addr
= gen_rtx_MEM (Pmode
, addr
);
3531 for (n
= 0; n
< XVECLEN (bounds_rtl
, 0); n
++)
3533 rtx slot
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 0);
3536 rtx offs
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 1);
3537 rtx from
= adjust_address (addr
, Pmode
, INTVAL (offs
));
3538 bnd
= targetm
.calls
.load_bounds_for_arg (from
, NULL
, NULL
);
3540 targetm
.calls
.store_returned_bounds (slot
, bnd
);
3544 else if (chkp_function_instrumented_p (current_function_decl
)
3545 && !BOUNDED_P (retval_rhs
)
3546 && chkp_type_has_pointer (TREE_TYPE (retval_rhs
))
3547 && TREE_CODE (retval_rhs
) != RESULT_DECL
)
3549 rtx addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3550 addr
= gen_rtx_MEM (Pmode
, addr
);
3552 gcc_assert (MEM_P (result_rtl
));
3554 chkp_copy_bounds_for_stack_parm (result_rtl
, addr
, TREE_TYPE (retval_rhs
));
3557 /* If we are returning the RESULT_DECL, then the value has already
3558 been stored into it, so we don't have to do anything special. */
3559 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3560 expand_value_return (result_rtl
);
3562 /* If the result is an aggregate that is being returned in one (or more)
3563 registers, load the registers here. */
3565 else if (retval_rhs
!= 0
3566 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3567 && REG_P (result_rtl
))
3569 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3572 /* Use the mode of the result value on the return register. */
3573 PUT_MODE (result_rtl
, GET_MODE (val
));
3574 expand_value_return (val
);
3577 expand_null_return ();
3579 else if (retval_rhs
!= 0
3580 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3581 && (REG_P (result_rtl
)
3582 || (GET_CODE (result_rtl
) == PARALLEL
)))
3584 /* Compute the return value into a temporary (usually a pseudo reg). */
3586 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3587 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3588 val
= force_not_mem (val
);
3589 expand_value_return (val
);
3593 /* No hard reg used; calculate value into hard return reg. */
3594 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3595 expand_value_return (result_rtl
);
3599 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3600 STMT that doesn't require special handling for outgoing edges. That
3601 is no tailcalls and no GIMPLE_COND. */
3604 expand_gimple_stmt_1 (gimple
*stmt
)
3608 set_curr_insn_location (gimple_location (stmt
));
3610 switch (gimple_code (stmt
))
3613 op0
= gimple_goto_dest (stmt
);
3614 if (TREE_CODE (op0
) == LABEL_DECL
)
3617 expand_computed_goto (op0
);
3620 expand_label (gimple_label_label (as_a
<glabel
*> (stmt
)));
3623 case GIMPLE_PREDICT
:
3627 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
3628 if (gimple_switch_num_labels (swtch
) == 1)
3629 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch
)));
3631 expand_case (swtch
);
3635 expand_asm_stmt (as_a
<gasm
*> (stmt
));
3638 expand_call_stmt (as_a
<gcall
*> (stmt
));
3643 tree bnd
= gimple_return_retbnd (as_a
<greturn
*> (stmt
));
3644 op0
= gimple_return_retval (as_a
<greturn
*> (stmt
));
3646 if (op0
&& op0
!= error_mark_node
)
3648 tree result
= DECL_RESULT (current_function_decl
);
3650 /* Mark we have return statement with missing bounds. */
3652 && chkp_function_instrumented_p (cfun
->decl
)
3654 bnd
= error_mark_node
;
3656 /* If we are not returning the current function's RESULT_DECL,
3657 build an assignment to it. */
3660 /* I believe that a function's RESULT_DECL is unique. */
3661 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3663 /* ??? We'd like to use simply expand_assignment here,
3664 but this fails if the value is of BLKmode but the return
3665 decl is a register. expand_return has special handling
3666 for this combination, which eventually should move
3667 to common code. See comments there. Until then, let's
3668 build a modify expression :-/ */
3669 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3675 expand_null_return ();
3677 expand_return (op0
, bnd
);
3683 gassign
*assign_stmt
= as_a
<gassign
*> (stmt
);
3684 tree lhs
= gimple_assign_lhs (assign_stmt
);
3686 /* Tree expand used to fiddle with |= and &= of two bitfield
3687 COMPONENT_REFs here. This can't happen with gimple, the LHS
3688 of binary assigns must be a gimple reg. */
3690 if (TREE_CODE (lhs
) != SSA_NAME
3691 || get_gimple_rhs_class (gimple_expr_code (stmt
))
3692 == GIMPLE_SINGLE_RHS
)
3694 tree rhs
= gimple_assign_rhs1 (assign_stmt
);
3695 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
3696 == GIMPLE_SINGLE_RHS
);
3697 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
)
3698 /* Do not put locations on possibly shared trees. */
3699 && !is_gimple_min_invariant (rhs
))
3700 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
3701 if (TREE_CLOBBER_P (rhs
))
3702 /* This is a clobber to mark the going out of scope for
3706 expand_assignment (lhs
, rhs
,
3707 gimple_assign_nontemporal_move_p (
3713 bool nontemporal
= gimple_assign_nontemporal_move_p (assign_stmt
);
3714 struct separate_ops ops
;
3715 bool promoted
= false;
3717 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3718 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3721 ops
.code
= gimple_assign_rhs_code (assign_stmt
);
3722 ops
.type
= TREE_TYPE (lhs
);
3723 switch (get_gimple_rhs_class (ops
.code
))
3725 case GIMPLE_TERNARY_RHS
:
3726 ops
.op2
= gimple_assign_rhs3 (assign_stmt
);
3728 case GIMPLE_BINARY_RHS
:
3729 ops
.op1
= gimple_assign_rhs2 (assign_stmt
);
3731 case GIMPLE_UNARY_RHS
:
3732 ops
.op0
= gimple_assign_rhs1 (assign_stmt
);
3737 ops
.location
= gimple_location (stmt
);
3739 /* If we want to use a nontemporal store, force the value to
3740 register first. If we store into a promoted register,
3741 don't directly expand to target. */
3742 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
3743 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
3750 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
3751 /* If TEMP is a VOIDmode constant, use convert_modes to make
3752 sure that we properly convert it. */
3753 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3755 temp
= convert_modes (GET_MODE (target
),
3756 TYPE_MODE (ops
.type
),
3758 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3759 GET_MODE (target
), temp
, unsignedp
);
3762 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
3764 else if (nontemporal
&& emit_storent_insn (target
, temp
))
3768 temp
= force_operand (temp
, target
);
3770 emit_move_insn (target
, temp
);
3781 /* Expand one gimple statement STMT and return the last RTL instruction
3782 before any of the newly generated ones.
3784 In addition to generating the necessary RTL instructions this also
3785 sets REG_EH_REGION notes if necessary and sets the current source
3786 location for diagnostics. */
3789 expand_gimple_stmt (gimple
*stmt
)
3791 location_t saved_location
= input_location
;
3792 rtx_insn
*last
= get_last_insn ();
3797 /* We need to save and restore the current source location so that errors
3798 discovered during expansion are emitted with the right location. But
3799 it would be better if the diagnostic routines used the source location
3800 embedded in the tree nodes rather than globals. */
3801 if (gimple_has_location (stmt
))
3802 input_location
= gimple_location (stmt
);
3804 expand_gimple_stmt_1 (stmt
);
3806 /* Free any temporaries used to evaluate this statement. */
3809 input_location
= saved_location
;
3811 /* Mark all insns that may trap. */
3812 lp_nr
= lookup_stmt_eh_lp (stmt
);
3816 for (insn
= next_real_insn (last
); insn
;
3817 insn
= next_real_insn (insn
))
3819 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
3820 /* If we want exceptions for non-call insns, any
3821 may_trap_p instruction may throw. */
3822 && GET_CODE (PATTERN (insn
)) != CLOBBER
3823 && GET_CODE (PATTERN (insn
)) != USE
3824 && insn_could_throw_p (insn
))
3825 make_reg_eh_region_note (insn
, 0, lp_nr
);
3832 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3833 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3834 generated a tail call (something that might be denied by the ABI
3835 rules governing the call; see calls.c).
3837 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3838 can still reach the rest of BB. The case here is __builtin_sqrt,
3839 where the NaN result goes through the external function (with a
3840 tailcall) and the normal result happens via a sqrt instruction. */
3843 expand_gimple_tailcall (basic_block bb
, gcall
*stmt
, bool *can_fallthru
)
3845 rtx_insn
*last2
, *last
;
3848 profile_probability probability
;
3850 last2
= last
= expand_gimple_stmt (stmt
);
3852 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
3853 if (CALL_P (last
) && SIBLING_CALL_P (last
))
3856 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3858 *can_fallthru
= true;
3862 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3863 Any instructions emitted here are about to be deleted. */
3864 do_pending_stack_adjust ();
3866 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3867 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3868 EH or abnormal edges, we shouldn't have created a tail call in
3869 the first place. So it seems to me we should just be removing
3870 all edges here, or redirecting the existing fallthru edge to
3873 probability
= profile_probability::never ();
3875 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3877 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
3879 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3880 e
->dest
->count
-= e
->count ();
3881 probability
+= e
->probability
;
3888 /* This is somewhat ugly: the call_expr expander often emits instructions
3889 after the sibcall (to perform the function return). These confuse the
3890 find_many_sub_basic_blocks code, so we need to get rid of these. */
3891 last
= NEXT_INSN (last
);
3892 gcc_assert (BARRIER_P (last
));
3894 *can_fallthru
= false;
3895 while (NEXT_INSN (last
))
3897 /* For instance an sqrt builtin expander expands if with
3898 sibcall in the then and label for `else`. */
3899 if (LABEL_P (NEXT_INSN (last
)))
3901 *can_fallthru
= true;
3904 delete_insn (NEXT_INSN (last
));
3907 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
3909 e
->probability
= probability
;
3911 update_bb_for_insn (bb
);
3913 if (NEXT_INSN (last
))
3915 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
3918 if (BARRIER_P (last
))
3919 BB_END (bb
) = PREV_INSN (last
);
3922 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3927 /* Return the difference between the floor and the truncated result of
3928 a signed division by OP1 with remainder MOD. */
3930 floor_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3932 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3933 return gen_rtx_IF_THEN_ELSE
3934 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3935 gen_rtx_IF_THEN_ELSE
3936 (mode
, gen_rtx_LT (BImode
,
3937 gen_rtx_DIV (mode
, op1
, mod
),
3939 constm1_rtx
, const0_rtx
),
3943 /* Return the difference between the ceil and the truncated result of
3944 a signed division by OP1 with remainder MOD. */
3946 ceil_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3948 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3949 return gen_rtx_IF_THEN_ELSE
3950 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3951 gen_rtx_IF_THEN_ELSE
3952 (mode
, gen_rtx_GT (BImode
,
3953 gen_rtx_DIV (mode
, op1
, mod
),
3955 const1_rtx
, const0_rtx
),
3959 /* Return the difference between the ceil and the truncated result of
3960 an unsigned division by OP1 with remainder MOD. */
3962 ceil_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
3964 /* (mod != 0 ? 1 : 0) */
3965 return gen_rtx_IF_THEN_ELSE
3966 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3967 const1_rtx
, const0_rtx
);
3970 /* Return the difference between the rounded and the truncated result
3971 of a signed division by OP1 with remainder MOD. Halfway cases are
3972 rounded away from zero, rather than to the nearest even number. */
3974 round_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3976 /* (abs (mod) >= abs (op1) - abs (mod)
3977 ? (op1 / mod > 0 ? 1 : -1)
3979 return gen_rtx_IF_THEN_ELSE
3980 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
3981 gen_rtx_MINUS (mode
,
3982 gen_rtx_ABS (mode
, op1
),
3983 gen_rtx_ABS (mode
, mod
))),
3984 gen_rtx_IF_THEN_ELSE
3985 (mode
, gen_rtx_GT (BImode
,
3986 gen_rtx_DIV (mode
, op1
, mod
),
3988 const1_rtx
, constm1_rtx
),
3992 /* Return the difference between the rounded and the truncated result
3993 of a unsigned division by OP1 with remainder MOD. Halfway cases
3994 are rounded away from zero, rather than to the nearest even
3997 round_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3999 /* (mod >= op1 - mod ? 1 : 0) */
4000 return gen_rtx_IF_THEN_ELSE
4001 (mode
, gen_rtx_GE (BImode
, mod
,
4002 gen_rtx_MINUS (mode
, op1
, mod
)),
4003 const1_rtx
, const0_rtx
);
4006 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4010 convert_debug_memory_address (scalar_int_mode mode
, rtx x
,
4013 #ifndef POINTERS_EXTEND_UNSIGNED
4014 gcc_assert (mode
== Pmode
4015 || mode
== targetm
.addr_space
.address_mode (as
));
4016 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
4020 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
4022 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
4025 /* X must have some form of address mode already. */
4026 scalar_int_mode xmode
= as_a
<scalar_int_mode
> (GET_MODE (x
));
4027 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
4028 x
= lowpart_subreg (mode
, x
, xmode
);
4029 else if (POINTERS_EXTEND_UNSIGNED
> 0)
4030 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
4031 else if (!POINTERS_EXTEND_UNSIGNED
)
4032 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
4035 switch (GET_CODE (x
))
4038 if ((SUBREG_PROMOTED_VAR_P (x
)
4039 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
4040 || (GET_CODE (SUBREG_REG (x
)) == PLUS
4041 && REG_P (XEXP (SUBREG_REG (x
), 0))
4042 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
4043 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
4044 && GET_MODE (SUBREG_REG (x
)) == mode
)
4045 return SUBREG_REG (x
);
4048 temp
= gen_rtx_LABEL_REF (mode
, label_ref_label (x
));
4049 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
4052 temp
= shallow_copy_rtx (x
);
4053 PUT_MODE (temp
, mode
);
4056 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4058 temp
= gen_rtx_CONST (mode
, temp
);
4062 if (CONST_INT_P (XEXP (x
, 1)))
4064 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4066 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
4072 /* Don't know how to express ptr_extend as operation in debug info. */
4075 #endif /* POINTERS_EXTEND_UNSIGNED */
4080 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4081 by avoid_deep_ter_for_debug. */
4083 static hash_map
<tree
, tree
> *deep_ter_debug_map
;
4085 /* Split too deep TER chains for debug stmts using debug temporaries. */
4088 avoid_deep_ter_for_debug (gimple
*stmt
, int depth
)
4090 use_operand_p use_p
;
4092 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4094 tree use
= USE_FROM_PTR (use_p
);
4095 if (TREE_CODE (use
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (use
))
4097 gimple
*g
= get_gimple_for_ssa_name (use
);
4100 if (depth
> 6 && !stmt_ends_bb_p (g
))
4102 if (deep_ter_debug_map
== NULL
)
4103 deep_ter_debug_map
= new hash_map
<tree
, tree
>;
4105 tree
&vexpr
= deep_ter_debug_map
->get_or_insert (use
);
4108 vexpr
= make_node (DEBUG_EXPR_DECL
);
4109 gimple
*def_temp
= gimple_build_debug_bind (vexpr
, use
, g
);
4110 DECL_ARTIFICIAL (vexpr
) = 1;
4111 TREE_TYPE (vexpr
) = TREE_TYPE (use
);
4112 SET_DECL_MODE (vexpr
, TYPE_MODE (TREE_TYPE (use
)));
4113 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
4114 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
4115 avoid_deep_ter_for_debug (def_temp
, 0);
4118 avoid_deep_ter_for_debug (g
, depth
+ 1);
4122 /* Return an RTX equivalent to the value of the parameter DECL. */
4125 expand_debug_parm_decl (tree decl
)
4127 rtx incoming
= DECL_INCOMING_RTL (decl
);
4130 && GET_MODE (incoming
) != BLKmode
4131 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
4132 || (MEM_P (incoming
)
4133 && REG_P (XEXP (incoming
, 0))
4134 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
4136 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
4138 #ifdef HAVE_window_save
4139 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4140 If the target machine has an explicit window save instruction, the
4141 actual entry value is the corresponding OUTGOING_REGNO instead. */
4142 if (REG_P (incoming
)
4143 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
4145 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
4146 OUTGOING_REGNO (REGNO (incoming
)), 0);
4147 else if (MEM_P (incoming
))
4149 rtx reg
= XEXP (incoming
, 0);
4150 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
4152 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
4153 incoming
= replace_equiv_address_nv (incoming
, reg
);
4156 incoming
= copy_rtx (incoming
);
4160 ENTRY_VALUE_EXP (rtl
) = incoming
;
4165 && GET_MODE (incoming
) != BLKmode
4166 && !TREE_ADDRESSABLE (decl
)
4168 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
4169 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
4170 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
4171 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
4172 return copy_rtx (incoming
);
4177 /* Return an RTX equivalent to the value of the tree expression EXP. */
4180 expand_debug_expr (tree exp
)
4182 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
4183 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4184 machine_mode inner_mode
= VOIDmode
;
4185 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4187 scalar_int_mode op0_mode
, op1_mode
, addr_mode
;
4189 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4191 case tcc_expression
:
4192 switch (TREE_CODE (exp
))
4197 case WIDEN_MULT_PLUS_EXPR
:
4198 case WIDEN_MULT_MINUS_EXPR
:
4202 case TRUTH_ANDIF_EXPR
:
4203 case TRUTH_ORIF_EXPR
:
4204 case TRUTH_AND_EXPR
:
4206 case TRUTH_XOR_EXPR
:
4209 case TRUTH_NOT_EXPR
:
4218 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
4225 if (mode
== BLKmode
)
4227 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4230 switch (TREE_CODE (exp
))
4236 case WIDEN_LSHIFT_EXPR
:
4237 /* Ensure second operand isn't wider than the first one. */
4238 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
4239 if (is_a
<scalar_int_mode
> (inner_mode
, &op1_mode
)
4240 && (GET_MODE_UNIT_PRECISION (mode
)
4241 < GET_MODE_PRECISION (op1_mode
)))
4242 op1
= lowpart_subreg (GET_MODE_INNER (mode
), op1
, op1_mode
);
4251 if (mode
== BLKmode
)
4253 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4254 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4259 case tcc_comparison
:
4260 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4268 case tcc_exceptional
:
4269 case tcc_declaration
:
4275 switch (TREE_CODE (exp
))
4278 if (!lookup_constant_def (exp
))
4280 if (strlen (TREE_STRING_POINTER (exp
)) + 1
4281 != (size_t) TREE_STRING_LENGTH (exp
))
4283 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
4284 op0
= gen_rtx_MEM (BLKmode
, op0
);
4285 set_mem_attributes (op0
, exp
, 0);
4293 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
4297 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
4300 gcc_assert (COMPLEX_MODE_P (mode
));
4301 op0
= expand_debug_expr (TREE_REALPART (exp
));
4302 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
4303 return gen_rtx_CONCAT (mode
, op0
, op1
);
4305 case DEBUG_EXPR_DECL
:
4306 op0
= DECL_RTL_IF_SET (exp
);
4311 op0
= gen_rtx_DEBUG_EXPR (mode
);
4312 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
4313 SET_DECL_RTL (exp
, op0
);
4323 op0
= DECL_RTL_IF_SET (exp
);
4325 /* This decl was probably optimized away. */
4329 || DECL_EXTERNAL (exp
)
4330 || !TREE_STATIC (exp
)
4332 || DECL_HARD_REGISTER (exp
)
4333 || DECL_IN_CONSTANT_POOL (exp
)
4334 || mode
== VOIDmode
)
4337 op0
= make_decl_rtl_for_debug (exp
);
4339 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
4340 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
4344 op0
= copy_rtx (op0
);
4346 if (GET_MODE (op0
) == BLKmode
4347 /* If op0 is not BLKmode, but mode is, adjust_mode
4348 below would ICE. While it is likely a FE bug,
4349 try to be robust here. See PR43166. */
4351 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
4353 gcc_assert (MEM_P (op0
));
4354 op0
= adjust_address_nv (op0
, mode
, 0);
4364 inner_mode
= GET_MODE (op0
);
4366 if (mode
== inner_mode
)
4369 if (inner_mode
== VOIDmode
)
4371 if (TREE_CODE (exp
) == SSA_NAME
)
4372 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
4374 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4375 if (mode
== inner_mode
)
4379 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4381 if (GET_MODE_UNIT_BITSIZE (mode
)
4382 == GET_MODE_UNIT_BITSIZE (inner_mode
))
4383 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4384 else if (GET_MODE_UNIT_BITSIZE (mode
)
4385 < GET_MODE_UNIT_BITSIZE (inner_mode
))
4386 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4388 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4390 else if (FLOAT_MODE_P (mode
))
4392 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
4393 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4394 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
4396 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
4398 else if (FLOAT_MODE_P (inner_mode
))
4401 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4403 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4405 else if (GET_MODE_UNIT_PRECISION (mode
)
4406 == GET_MODE_UNIT_PRECISION (inner_mode
))
4407 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
4408 else if (GET_MODE_UNIT_PRECISION (mode
)
4409 < GET_MODE_UNIT_PRECISION (inner_mode
))
4410 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
4411 else if (UNARY_CLASS_P (exp
)
4412 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4414 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4416 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4422 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4424 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
4425 TREE_OPERAND (exp
, 0),
4426 TREE_OPERAND (exp
, 1));
4428 return expand_debug_expr (newexp
);
4432 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4433 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4437 if (TREE_CODE (exp
) == MEM_REF
)
4439 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4440 || (GET_CODE (op0
) == PLUS
4441 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
4442 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4443 Instead just use get_inner_reference. */
4446 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4447 if (!op1
|| !CONST_INT_P (op1
))
4450 op0
= plus_constant (inner_mode
, op0
, INTVAL (op1
));
4453 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4455 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4457 if (op0
== NULL_RTX
)
4460 op0
= gen_rtx_MEM (mode
, op0
);
4461 set_mem_attributes (op0
, exp
, 0);
4462 if (TREE_CODE (exp
) == MEM_REF
4463 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4464 set_mem_expr (op0
, NULL_TREE
);
4465 set_mem_addr_space (op0
, as
);
4469 case TARGET_MEM_REF
:
4470 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
4471 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
4474 op0
= expand_debug_expr
4475 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
4479 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4480 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4482 if (op0
== NULL_RTX
)
4485 op0
= gen_rtx_MEM (mode
, op0
);
4487 set_mem_attributes (op0
, exp
, 0);
4488 set_mem_addr_space (op0
, as
);
4494 case ARRAY_RANGE_REF
:
4499 case VIEW_CONVERT_EXPR
:
4502 poly_int64 bitsize
, bitpos
;
4504 int reversep
, volatilep
= 0;
4506 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
4507 &unsignedp
, &reversep
, &volatilep
);
4510 if (known_eq (bitsize
, 0))
4513 orig_op0
= op0
= expand_debug_expr (tem
);
4520 machine_mode addrmode
, offmode
;
4525 op0
= XEXP (op0
, 0);
4526 addrmode
= GET_MODE (op0
);
4527 if (addrmode
== VOIDmode
)
4530 op1
= expand_debug_expr (offset
);
4534 offmode
= GET_MODE (op1
);
4535 if (offmode
== VOIDmode
)
4536 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4538 if (addrmode
!= offmode
)
4539 op1
= lowpart_subreg (addrmode
, op1
, offmode
);
4541 /* Don't use offset_address here, we don't need a
4542 recognizable address, and we don't want to generate
4544 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4550 if (mode1
== VOIDmode
)
4552 if (maybe_gt (bitsize
, MAX_BITSIZE_MODE_ANY_INT
))
4555 mode1
= smallest_int_mode_for_size (bitsize
);
4557 poly_int64 bytepos
= bits_to_bytes_round_down (bitpos
);
4558 if (maybe_ne (bytepos
, 0))
4560 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
4561 bitpos
= num_trailing_bits (bitpos
);
4563 else if (known_eq (bitpos
, 0)
4564 && known_eq (bitsize
, GET_MODE_BITSIZE (mode
)))
4565 op0
= adjust_address_nv (op0
, mode
, 0);
4566 else if (GET_MODE (op0
) != mode1
)
4567 op0
= adjust_address_nv (op0
, mode1
, 0);
4569 op0
= copy_rtx (op0
);
4570 if (op0
== orig_op0
)
4571 op0
= shallow_copy_rtx (op0
);
4572 set_mem_attributes (op0
, exp
, 0);
4575 if (known_eq (bitpos
, 0) && mode
== GET_MODE (op0
))
4578 if (maybe_lt (bitpos
, 0))
4581 if (GET_MODE (op0
) == BLKmode
|| mode
== BLKmode
)
4585 if (multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
4586 && known_eq (bitsize
, GET_MODE_BITSIZE (mode1
)))
4588 machine_mode opmode
= GET_MODE (op0
);
4590 if (opmode
== VOIDmode
)
4591 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4593 /* This condition may hold if we're expanding the address
4594 right past the end of an array that turned out not to
4595 be addressable (i.e., the address was only computed in
4596 debug stmts). The gen_subreg below would rightfully
4597 crash, and the address doesn't really exist, so just
4599 if (known_ge (bitpos
, GET_MODE_BITSIZE (opmode
)))
4602 if (multiple_p (bitpos
, GET_MODE_BITSIZE (mode
)))
4603 return simplify_gen_subreg (mode
, op0
, opmode
, bytepos
);
4606 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4607 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4609 : ZERO_EXTRACT
, mode
,
4610 GET_MODE (op0
) != VOIDmode
4612 : TYPE_MODE (TREE_TYPE (tem
)),
4613 op0
, gen_int_mode (bitsize
, word_mode
),
4614 gen_int_mode (bitpos
, word_mode
));
4618 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4621 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4624 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4627 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4629 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4632 case FIX_TRUNC_EXPR
:
4633 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4636 case POINTER_PLUS_EXPR
:
4637 /* For the rare target where pointers are not the same size as
4638 size_t, we need to check for mis-matched modes and correct
4641 && is_a
<scalar_int_mode
> (GET_MODE (op0
), &op0_mode
)
4642 && is_a
<scalar_int_mode
> (GET_MODE (op1
), &op1_mode
)
4643 && op0_mode
!= op1_mode
)
4645 if (GET_MODE_BITSIZE (op0_mode
) < GET_MODE_BITSIZE (op1_mode
)
4646 /* If OP0 is a partial mode, then we must truncate, even
4647 if it has the same bitsize as OP1 as GCC's
4648 representation of partial modes is opaque. */
4649 || (GET_MODE_CLASS (op0_mode
) == MODE_PARTIAL_INT
4650 && (GET_MODE_BITSIZE (op0_mode
)
4651 == GET_MODE_BITSIZE (op1_mode
))))
4652 op1
= simplify_gen_unary (TRUNCATE
, op0_mode
, op1
, op1_mode
);
4654 /* We always sign-extend, regardless of the signedness of
4655 the operand, because the operand is always unsigned
4656 here even if the original C expression is signed. */
4657 op1
= simplify_gen_unary (SIGN_EXTEND
, op0_mode
, op1
, op1_mode
);
4661 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4664 case POINTER_DIFF_EXPR
:
4665 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4668 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4671 case TRUNC_DIV_EXPR
:
4672 case EXACT_DIV_EXPR
:
4674 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4676 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4678 case TRUNC_MOD_EXPR
:
4679 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4681 case FLOOR_DIV_EXPR
:
4683 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4686 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4687 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4688 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4689 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4692 case FLOOR_MOD_EXPR
:
4694 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4697 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4698 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4699 adj
= simplify_gen_unary (NEG
, mode
,
4700 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4702 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4708 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4709 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4710 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4711 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4715 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4716 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4717 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4718 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4724 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4725 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4726 adj
= simplify_gen_unary (NEG
, mode
,
4727 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4729 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4733 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4734 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4735 adj
= simplify_gen_unary (NEG
, mode
,
4736 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4738 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4741 case ROUND_DIV_EXPR
:
4744 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4745 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4746 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4747 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4751 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4752 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4753 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4754 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4757 case ROUND_MOD_EXPR
:
4760 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4761 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4762 adj
= simplify_gen_unary (NEG
, mode
,
4763 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4765 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4769 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4770 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4771 adj
= simplify_gen_unary (NEG
, mode
,
4772 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4774 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4778 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
4782 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
4784 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
4787 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
4790 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
4793 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
4796 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
4799 case TRUTH_AND_EXPR
:
4800 return simplify_gen_binary (AND
, mode
, op0
, op1
);
4804 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
4807 case TRUTH_XOR_EXPR
:
4808 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
4810 case TRUTH_ANDIF_EXPR
:
4811 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
4813 case TRUTH_ORIF_EXPR
:
4814 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
4816 case TRUTH_NOT_EXPR
:
4817 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
4820 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
4824 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
4828 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
4832 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
4836 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
4839 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
4841 case UNORDERED_EXPR
:
4842 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
4845 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
4848 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
4851 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
4854 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
4857 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
4860 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
4863 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
4866 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
4869 gcc_assert (COMPLEX_MODE_P (mode
));
4870 if (GET_MODE (op0
) == VOIDmode
)
4871 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
4872 if (GET_MODE (op1
) == VOIDmode
)
4873 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
4874 return gen_rtx_CONCAT (mode
, op0
, op1
);
4877 if (GET_CODE (op0
) == CONCAT
)
4878 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
4879 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
4881 GET_MODE_INNER (mode
)));
4884 scalar_mode imode
= GET_MODE_INNER (mode
);
4889 re
= adjust_address_nv (op0
, imode
, 0);
4890 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
4894 scalar_int_mode ifmode
;
4895 scalar_int_mode ihmode
;
4897 if (!int_mode_for_mode (mode
).exists (&ifmode
)
4898 || !int_mode_for_mode (imode
).exists (&ihmode
))
4900 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
4903 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
4904 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
4905 if (imode
!= ihmode
)
4906 re
= gen_rtx_SUBREG (imode
, re
, 0);
4907 im
= copy_rtx (op0
);
4909 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
4910 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
4911 if (imode
!= ihmode
)
4912 im
= gen_rtx_SUBREG (imode
, im
, 0);
4914 im
= gen_rtx_NEG (imode
, im
);
4915 return gen_rtx_CONCAT (mode
, re
, im
);
4919 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4920 if (!op0
|| !MEM_P (op0
))
4922 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
4923 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
4924 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
4925 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
4926 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
4927 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
4929 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4931 poly_int64 bitoffset
, bitsize
, maxsize
, byteoffset
;
4934 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0), &bitoffset
,
4935 &bitsize
, &maxsize
, &reverse
);
4937 || TREE_CODE (decl
) == PARM_DECL
4938 || TREE_CODE (decl
) == RESULT_DECL
)
4939 && (!TREE_ADDRESSABLE (decl
)
4940 || target_for_debug_bind (decl
))
4941 && multiple_p (bitoffset
, BITS_PER_UNIT
, &byteoffset
)
4942 && known_gt (bitsize
, 0)
4943 && known_eq (bitsize
, maxsize
))
4945 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
4946 return plus_constant (mode
, base
, byteoffset
);
4950 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
4951 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
4954 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4957 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4958 || (GET_CODE (op0
) == PLUS
4959 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
4960 && CONST_INT_P (XEXP (op0
, 1)))))
4962 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4964 if (!op1
|| !CONST_INT_P (op1
))
4967 return plus_constant (mode
, op0
, INTVAL (op1
));
4974 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
4975 addr_mode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
4976 op0
= convert_debug_memory_address (addr_mode
, XEXP (op0
, 0), as
);
4982 unsigned HOST_WIDE_INT i
, nelts
;
4984 if (!VECTOR_CST_NELTS (exp
).is_constant (&nelts
))
4987 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
4989 for (i
= 0; i
< nelts
; ++i
)
4991 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
4994 XVECEXP (op0
, 0, i
) = op1
;
5001 if (TREE_CLOBBER_P (exp
))
5003 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
5006 unsigned HOST_WIDE_INT nelts
;
5009 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)).is_constant (&nelts
))
5010 goto flag_unsupported
;
5012 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
5014 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
5016 op1
= expand_debug_expr (val
);
5019 XVECEXP (op0
, 0, i
) = op1
;
5024 op1
= expand_debug_expr
5025 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
5030 for (; i
< nelts
; i
++)
5031 XVECEXP (op0
, 0, i
) = op1
;
5037 goto flag_unsupported
;
5040 /* ??? Maybe handle some builtins? */
5045 gimple
*g
= get_gimple_for_ssa_name (exp
);
5049 if (deep_ter_debug_map
)
5051 tree
*slot
= deep_ter_debug_map
->get (exp
);
5056 t
= gimple_assign_rhs_to_tree (g
);
5057 op0
= expand_debug_expr (t
);
5063 /* If this is a reference to an incoming value of
5064 parameter that is never used in the code or where the
5065 incoming value is never used in the code, use
5066 PARM_DECL's DECL_RTL if set. */
5067 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
5068 && SSA_NAME_VAR (exp
)
5069 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
5070 && has_zero_uses (exp
))
5072 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
5075 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
5080 int part
= var_to_partition (SA
.map
, exp
);
5082 if (part
== NO_PARTITION
)
5085 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
5087 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
5095 /* Vector stuff. For most of the codes we don't have rtl codes. */
5096 case REALIGN_LOAD_EXPR
:
5098 case VEC_PACK_FIX_TRUNC_EXPR
:
5099 case VEC_PACK_SAT_EXPR
:
5100 case VEC_PACK_TRUNC_EXPR
:
5101 case VEC_UNPACK_FLOAT_HI_EXPR
:
5102 case VEC_UNPACK_FLOAT_LO_EXPR
:
5103 case VEC_UNPACK_HI_EXPR
:
5104 case VEC_UNPACK_LO_EXPR
:
5105 case VEC_WIDEN_MULT_HI_EXPR
:
5106 case VEC_WIDEN_MULT_LO_EXPR
:
5107 case VEC_WIDEN_MULT_EVEN_EXPR
:
5108 case VEC_WIDEN_MULT_ODD_EXPR
:
5109 case VEC_WIDEN_LSHIFT_HI_EXPR
:
5110 case VEC_WIDEN_LSHIFT_LO_EXPR
:
5112 case VEC_DUPLICATE_EXPR
:
5113 case VEC_SERIES_EXPR
:
5117 case ADDR_SPACE_CONVERT_EXPR
:
5118 case FIXED_CONVERT_EXPR
:
5120 case WITH_SIZE_EXPR
:
5121 case BIT_INSERT_EXPR
:
5125 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5126 && SCALAR_INT_MODE_P (mode
))
5129 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5131 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5134 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5136 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
5138 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5139 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5143 case WIDEN_MULT_EXPR
:
5144 case WIDEN_MULT_PLUS_EXPR
:
5145 case WIDEN_MULT_MINUS_EXPR
:
5146 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5147 && SCALAR_INT_MODE_P (mode
))
5149 inner_mode
= GET_MODE (op0
);
5150 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5151 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5153 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5154 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5155 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
5157 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
5158 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5159 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
5161 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
5162 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5164 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
5168 case MULT_HIGHPART_EXPR
:
5169 /* ??? Similar to the above. */
5172 case WIDEN_SUM_EXPR
:
5173 case WIDEN_LSHIFT_EXPR
:
5174 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5175 && SCALAR_INT_MODE_P (mode
))
5178 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5180 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5182 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
5183 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
5188 return simplify_gen_ternary (FMA
, mode
, inner_mode
, op0
, op1
, op2
);
5201 /* Return an RTX equivalent to the source bind value of the tree expression
5205 expand_debug_source_expr (tree exp
)
5208 machine_mode mode
= VOIDmode
, inner_mode
;
5210 switch (TREE_CODE (exp
))
5214 mode
= DECL_MODE (exp
);
5215 op0
= expand_debug_parm_decl (exp
);
5218 /* See if this isn't an argument that has been completely
5220 if (!DECL_RTL_SET_P (exp
)
5221 && !DECL_INCOMING_RTL (exp
)
5222 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
5224 tree aexp
= DECL_ORIGIN (exp
);
5225 if (DECL_CONTEXT (aexp
)
5226 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
5228 vec
<tree
, va_gc
> **debug_args
;
5231 debug_args
= decl_debug_args_lookup (current_function_decl
);
5232 if (debug_args
!= NULL
)
5234 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
5237 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
5247 if (op0
== NULL_RTX
)
5250 inner_mode
= GET_MODE (op0
);
5251 if (mode
== inner_mode
)
5254 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
5256 if (GET_MODE_UNIT_BITSIZE (mode
)
5257 == GET_MODE_UNIT_BITSIZE (inner_mode
))
5258 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
5259 else if (GET_MODE_UNIT_BITSIZE (mode
)
5260 < GET_MODE_UNIT_BITSIZE (inner_mode
))
5261 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
5263 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
5265 else if (FLOAT_MODE_P (mode
))
5267 else if (FLOAT_MODE_P (inner_mode
))
5269 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5270 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
5272 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
5274 else if (GET_MODE_UNIT_PRECISION (mode
)
5275 == GET_MODE_UNIT_PRECISION (inner_mode
))
5276 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
5277 else if (GET_MODE_UNIT_PRECISION (mode
)
5278 < GET_MODE_UNIT_PRECISION (inner_mode
))
5279 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
5280 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5281 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5283 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5288 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5289 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5290 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5293 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
5297 if (exp
== NULL_RTX
)
5300 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
5305 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5306 rtx dval
= make_debug_expr_from_rtl (exp
);
5308 /* Emit a debug bind insn before INSN. */
5309 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
5310 DEBUG_EXPR_TREE_DECL (dval
), exp
,
5311 VAR_INIT_STATUS_INITIALIZED
);
5313 emit_debug_insn_before (bind
, insn
);
5318 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
5320 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
5321 switch (*format_ptr
++)
5324 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
5329 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
5330 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
5338 /* Expand the _LOCs in debug insns. We run this after expanding all
5339 regular insns, so that any variables referenced in the function
5340 will have their DECL_RTLs set. */
5343 expand_debug_locations (void)
5346 rtx_insn
*last
= get_last_insn ();
5347 int save_strict_alias
= flag_strict_aliasing
;
5349 /* New alias sets while setting up memory attributes cause
5350 -fcompare-debug failures, even though it doesn't bring about any
5352 flag_strict_aliasing
= 0;
5354 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5355 if (DEBUG_BIND_INSN_P (insn
))
5357 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
5359 rtx_insn
*prev_insn
, *insn2
;
5362 if (value
== NULL_TREE
)
5366 if (INSN_VAR_LOCATION_STATUS (insn
)
5367 == VAR_INIT_STATUS_UNINITIALIZED
)
5368 val
= expand_debug_source_expr (value
);
5369 /* The avoid_deep_ter_for_debug function inserts
5370 debug bind stmts after SSA_NAME definition, with the
5371 SSA_NAME as the whole bind location. Disable temporarily
5372 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5373 being defined in this DEBUG_INSN. */
5374 else if (deep_ter_debug_map
&& TREE_CODE (value
) == SSA_NAME
)
5376 tree
*slot
= deep_ter_debug_map
->get (value
);
5379 if (*slot
== INSN_VAR_LOCATION_DECL (insn
))
5384 val
= expand_debug_expr (value
);
5386 *slot
= INSN_VAR_LOCATION_DECL (insn
);
5389 val
= expand_debug_expr (value
);
5390 gcc_assert (last
== get_last_insn ());
5394 val
= gen_rtx_UNKNOWN_VAR_LOC ();
5397 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
5399 gcc_assert (mode
== GET_MODE (val
)
5400 || (GET_MODE (val
) == VOIDmode
5401 && (CONST_SCALAR_INT_P (val
)
5402 || GET_CODE (val
) == CONST_FIXED
5403 || GET_CODE (val
) == LABEL_REF
)));
5406 INSN_VAR_LOCATION_LOC (insn
) = val
;
5407 prev_insn
= PREV_INSN (insn
);
5408 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
5409 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
5412 flag_strict_aliasing
= save_strict_alias
;
5415 /* Performs swapping operands of commutative operations to expand
5416 the expensive one first. */
5419 reorder_operands (basic_block bb
)
5421 unsigned int *lattice
; /* Hold cost of each statement. */
5422 unsigned int i
= 0, n
= 0;
5423 gimple_stmt_iterator gsi
;
5429 use_operand_p use_p
;
5430 gimple
*def0
, *def1
;
5432 /* Compute cost of each statement using estimate_num_insns. */
5433 stmts
= bb_seq (bb
);
5434 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5436 stmt
= gsi_stmt (gsi
);
5437 if (!is_gimple_debug (stmt
))
5438 gimple_set_uid (stmt
, n
++);
5440 lattice
= XNEWVEC (unsigned int, n
);
5441 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5444 stmt
= gsi_stmt (gsi
);
5445 if (is_gimple_debug (stmt
))
5447 cost
= estimate_num_insns (stmt
, &eni_size_weights
);
5449 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5451 tree use
= USE_FROM_PTR (use_p
);
5453 if (TREE_CODE (use
) != SSA_NAME
)
5455 def_stmt
= get_gimple_for_ssa_name (use
);
5458 lattice
[i
] += lattice
[gimple_uid (def_stmt
)];
5461 if (!is_gimple_assign (stmt
)
5462 || !commutative_tree_code (gimple_assign_rhs_code (stmt
)))
5464 op0
= gimple_op (stmt
, 1);
5465 op1
= gimple_op (stmt
, 2);
5466 if (TREE_CODE (op0
) != SSA_NAME
5467 || TREE_CODE (op1
) != SSA_NAME
)
5469 /* Swap operands if the second one is more expensive. */
5470 def0
= get_gimple_for_ssa_name (op0
);
5471 def1
= get_gimple_for_ssa_name (op1
);
5475 if (!def0
|| lattice
[gimple_uid (def1
)] > lattice
[gimple_uid (def0
)])
5479 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5481 fprintf (dump_file
, "Swap operands in stmt:\n");
5482 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5483 fprintf (dump_file
, "Cost left opnd=%d, right opnd=%d\n",
5484 def0
? lattice
[gimple_uid (def0
)] : 0,
5485 lattice
[gimple_uid (def1
)]);
5487 swap_ssa_operands (stmt
, gimple_assign_rhs1_ptr (stmt
),
5488 gimple_assign_rhs2_ptr (stmt
));
5494 /* Expand basic block BB from GIMPLE trees to RTL. */
5497 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
5499 gimple_stmt_iterator gsi
;
5501 gimple
*stmt
= NULL
;
5502 rtx_note
*note
= NULL
;
5508 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
5511 /* Note that since we are now transitioning from GIMPLE to RTL, we
5512 cannot use the gsi_*_bb() routines because they expect the basic
5513 block to be in GIMPLE, instead of RTL. Therefore, we need to
5514 access the BB sequence directly. */
5516 reorder_operands (bb
);
5517 stmts
= bb_seq (bb
);
5518 bb
->il
.gimple
.seq
= NULL
;
5519 bb
->il
.gimple
.phi_nodes
= NULL
;
5520 rtl_profile_for_bb (bb
);
5521 init_rtl_bb_info (bb
);
5522 bb
->flags
|= BB_RTL
;
5524 /* Remove the RETURN_EXPR if we may fall though to the exit
5526 gsi
= gsi_last (stmts
);
5527 if (!gsi_end_p (gsi
)
5528 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
5530 greturn
*ret_stmt
= as_a
<greturn
*> (gsi_stmt (gsi
));
5532 gcc_assert (single_succ_p (bb
));
5533 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
5535 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5536 && !gimple_return_retval (ret_stmt
))
5538 gsi_remove (&gsi
, false);
5539 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
5543 gsi
= gsi_start (stmts
);
5544 if (!gsi_end_p (gsi
))
5546 stmt
= gsi_stmt (gsi
);
5547 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5551 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
5555 gcc_checking_assert (!note
);
5556 last
= get_last_insn ();
5560 expand_gimple_stmt (stmt
);
5567 BB_HEAD (bb
) = NEXT_INSN (last
);
5568 if (NOTE_P (BB_HEAD (bb
)))
5569 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
5570 gcc_assert (LABEL_P (BB_HEAD (bb
)));
5571 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
5573 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5576 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
5579 NOTE_BASIC_BLOCK (note
) = bb
;
5581 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5585 stmt
= gsi_stmt (gsi
);
5587 /* If this statement is a non-debug one, and we generate debug
5588 insns, then this one might be the last real use of a TERed
5589 SSA_NAME, but where there are still some debug uses further
5590 down. Expanding the current SSA name in such further debug
5591 uses by their RHS might lead to wrong debug info, as coalescing
5592 might make the operands of such RHS be placed into the same
5593 pseudo as something else. Like so:
5594 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5598 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5599 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5600 the write to a_2 would actually have clobbered the place which
5603 So, instead of that, we recognize the situation, and generate
5604 debug temporaries at the last real use of TERed SSA names:
5611 if (MAY_HAVE_DEBUG_BIND_INSNS
5613 && !is_gimple_debug (stmt
))
5619 location_t sloc
= curr_insn_location ();
5621 /* Look for SSA names that have their last use here (TERed
5622 names always have only one real use). */
5623 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5624 if ((def
= get_gimple_for_ssa_name (op
)))
5626 imm_use_iterator imm_iter
;
5627 use_operand_p use_p
;
5628 bool have_debug_uses
= false;
5630 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5632 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5634 have_debug_uses
= true;
5639 if (have_debug_uses
)
5641 /* OP is a TERed SSA name, with DEF its defining
5642 statement, and where OP is used in further debug
5643 instructions. Generate a debug temporary, and
5644 replace all uses of OP in debug insns with that
5647 tree value
= gimple_assign_rhs_to_tree (def
);
5648 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5652 set_curr_insn_location (gimple_location (def
));
5654 DECL_ARTIFICIAL (vexpr
) = 1;
5655 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
5657 mode
= DECL_MODE (value
);
5659 mode
= TYPE_MODE (TREE_TYPE (value
));
5660 SET_DECL_MODE (vexpr
, mode
);
5662 val
= gen_rtx_VAR_LOCATION
5663 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5665 emit_debug_insn (val
);
5667 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5669 if (!gimple_debug_bind_p (debugstmt
))
5672 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5673 SET_USE (use_p
, vexpr
);
5675 update_stmt (debugstmt
);
5679 set_curr_insn_location (sloc
);
5682 currently_expanding_gimple_stmt
= stmt
;
5684 /* Expand this statement, then evaluate the resulting RTL and
5685 fixup the CFG accordingly. */
5686 if (gimple_code (stmt
) == GIMPLE_COND
)
5688 new_bb
= expand_gimple_cond (bb
, as_a
<gcond
*> (stmt
));
5692 else if (is_gimple_debug (stmt
))
5694 location_t sloc
= curr_insn_location ();
5695 gimple_stmt_iterator nsi
= gsi
;
5700 tree value
= NULL_TREE
;
5704 if (!gimple_debug_nonbind_marker_p (stmt
))
5706 if (gimple_debug_bind_p (stmt
))
5708 var
= gimple_debug_bind_get_var (stmt
);
5710 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
5711 && TREE_CODE (var
) != LABEL_DECL
5712 && !target_for_debug_bind (var
))
5713 goto delink_debug_stmt
;
5716 mode
= DECL_MODE (var
);
5718 mode
= TYPE_MODE (TREE_TYPE (var
));
5720 if (gimple_debug_bind_has_value_p (stmt
))
5721 value
= gimple_debug_bind_get_value (stmt
);
5723 val
= gen_rtx_VAR_LOCATION
5724 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5726 else if (gimple_debug_source_bind_p (stmt
))
5728 var
= gimple_debug_source_bind_get_var (stmt
);
5730 value
= gimple_debug_source_bind_get_value (stmt
);
5732 mode
= DECL_MODE (var
);
5734 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
5735 VAR_INIT_STATUS_UNINITIALIZED
);
5740 /* If this function was first compiled with markers
5741 enabled, but they're now disable (e.g. LTO), drop
5742 them on the floor. */
5743 else if (gimple_debug_nonbind_marker_p (stmt
)
5744 && !MAY_HAVE_DEBUG_MARKER_INSNS
)
5745 goto delink_debug_stmt
;
5746 else if (gimple_debug_begin_stmt_p (stmt
))
5747 val
= GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5748 else if (gimple_debug_inline_entry_p (stmt
))
5750 tree block
= gimple_block (stmt
);
5753 val
= GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5755 goto delink_debug_stmt
;
5760 last
= get_last_insn ();
5762 set_curr_insn_location (gimple_location (stmt
));
5764 emit_debug_insn (val
);
5766 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5768 /* We can't dump the insn with a TREE where an RTX
5770 if (GET_CODE (val
) == VAR_LOCATION
)
5772 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val
) == (rtx
)value
);
5773 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5775 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5776 if (GET_CODE (val
) == VAR_LOCATION
)
5777 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5781 /* In order not to generate too many debug temporaries,
5782 we delink all uses of debug statements we already expanded.
5783 Therefore debug statements between definition and real
5784 use of TERed SSA names will continue to use the SSA name,
5785 and not be replaced with debug temps. */
5786 delink_stmt_imm_use (stmt
);
5790 if (gsi_end_p (nsi
))
5792 stmt
= gsi_stmt (nsi
);
5793 if (!is_gimple_debug (stmt
))
5797 set_curr_insn_location (sloc
);
5801 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
5803 && gimple_call_tail_p (call_stmt
)
5804 && disable_tail_calls
)
5805 gimple_call_set_tail (call_stmt
, false);
5807 if (call_stmt
&& gimple_call_tail_p (call_stmt
))
5810 new_bb
= expand_gimple_tailcall (bb
, call_stmt
, &can_fallthru
);
5821 def_operand_p def_p
;
5822 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
5826 /* Ignore this stmt if it is in the list of
5827 replaceable expressions. */
5829 && bitmap_bit_p (SA
.values
,
5830 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
5833 last
= expand_gimple_stmt (stmt
);
5834 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5839 currently_expanding_gimple_stmt
= NULL
;
5841 /* Expand implicit goto and convert goto_locus. */
5842 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5844 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5845 set_curr_insn_location (e
->goto_locus
);
5846 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
5848 emit_jump (label_rtx_for_bb (e
->dest
));
5849 e
->flags
&= ~EDGE_FALLTHRU
;
5853 /* Expanded RTL can create a jump in the last instruction of block.
5854 This later might be assumed to be a jump to successor and break edge insertion.
5855 We need to insert dummy move to prevent this. PR41440. */
5856 if (single_succ_p (bb
)
5857 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
5858 && (last
= get_last_insn ())
5860 || (DEBUG_INSN_P (last
)
5861 && JUMP_P (prev_nondebug_insn (last
)))))
5863 rtx dummy
= gen_reg_rtx (SImode
);
5864 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
5867 do_pending_stack_adjust ();
5869 /* Find the block tail. The last insn in the block is the insn
5870 before a barrier and/or table jump insn. */
5871 last
= get_last_insn ();
5872 if (BARRIER_P (last
))
5873 last
= PREV_INSN (last
);
5874 if (JUMP_TABLE_DATA_P (last
))
5875 last
= PREV_INSN (PREV_INSN (last
));
5878 update_bb_for_insn (bb
);
5884 /* Create a basic block for initialization code. */
5887 construct_init_block (void)
5889 basic_block init_block
, first_block
;
5893 /* Multiple entry points not supported yet. */
5894 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
5895 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5896 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5897 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5898 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5900 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
5902 /* When entry edge points to first basic block, we don't need jump,
5903 otherwise we have to jump into proper target. */
5904 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
5906 tree label
= gimple_block_label (e
->dest
);
5908 emit_jump (jump_target_rtx (label
));
5912 flags
= EDGE_FALLTHRU
;
5914 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
5916 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5917 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5918 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5921 first_block
= e
->dest
;
5922 redirect_edge_succ (e
, init_block
);
5923 e
= make_single_succ_edge (init_block
, first_block
, flags
);
5926 e
= make_single_succ_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
5929 update_bb_for_insn (init_block
);
5933 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5934 found in the block tree. */
5937 set_block_levels (tree block
, int level
)
5941 BLOCK_NUMBER (block
) = level
;
5942 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
5943 block
= BLOCK_CHAIN (block
);
5947 /* Create a block containing landing pads and similar stuff. */
5950 construct_exit_block (void)
5952 rtx_insn
*head
= get_last_insn ();
5954 basic_block exit_block
;
5958 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
5959 rtx_insn
*orig_end
= BB_END (prev_bb
);
5961 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5963 /* Make sure the locus is set to the end of the function, so that
5964 epilogue line numbers and warnings are set properly. */
5965 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
5966 input_location
= cfun
->function_end_locus
;
5968 /* Generate rtl for function exit. */
5969 expand_function_end ();
5971 end
= get_last_insn ();
5974 /* While emitting the function end we could move end of the last basic
5976 BB_END (prev_bb
) = orig_end
;
5977 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
5978 head
= NEXT_INSN (head
);
5979 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5980 bb count counting will be confused. Any instructions before that
5981 label are emitted for the case where PREV_BB falls through into the
5982 exit block, so append those instructions to prev_bb in that case. */
5983 if (NEXT_INSN (head
) != return_label
)
5985 while (NEXT_INSN (head
) != return_label
)
5987 if (!NOTE_P (NEXT_INSN (head
)))
5988 BB_END (prev_bb
) = NEXT_INSN (head
);
5989 head
= NEXT_INSN (head
);
5992 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
5993 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
5994 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5997 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
5999 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
6000 if (!(e
->flags
& EDGE_ABNORMAL
))
6001 redirect_edge_succ (e
, exit_block
);
6006 e
= make_single_succ_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
6008 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6011 exit_block
->count
-= e2
->count ();
6013 update_bb_for_insn (exit_block
);
6016 /* Helper function for discover_nonconstant_array_refs.
6017 Look for ARRAY_REF nodes with non-constant indexes and mark them
6021 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
6022 void *data ATTRIBUTE_UNUSED
)
6026 if (IS_TYPE_OR_DECL_P (t
))
6028 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6030 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6031 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
6032 && (!TREE_OPERAND (t
, 2)
6033 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6034 || (TREE_CODE (t
) == COMPONENT_REF
6035 && (!TREE_OPERAND (t
,2)
6036 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6037 || TREE_CODE (t
) == BIT_FIELD_REF
6038 || TREE_CODE (t
) == REALPART_EXPR
6039 || TREE_CODE (t
) == IMAGPART_EXPR
6040 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
6041 || CONVERT_EXPR_P (t
))
6042 t
= TREE_OPERAND (t
, 0);
6044 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6046 t
= get_base_address (t
);
6048 && DECL_MODE (t
) != BLKmode
)
6049 TREE_ADDRESSABLE (t
) = 1;
6058 /* RTL expansion is not able to compile array references with variable
6059 offsets for arrays stored in single register. Discover such
6060 expressions and mark variables as addressable to avoid this
6064 discover_nonconstant_array_refs (void)
6067 gimple_stmt_iterator gsi
;
6069 FOR_EACH_BB_FN (bb
, cfun
)
6070 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6072 gimple
*stmt
= gsi_stmt (gsi
);
6073 if (!is_gimple_debug (stmt
))
6074 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
6078 /* This function sets crtl->args.internal_arg_pointer to a virtual
6079 register if DRAP is needed. Local register allocator will replace
6080 virtual_incoming_args_rtx with the virtual register. */
6083 expand_stack_alignment (void)
6086 unsigned int preferred_stack_boundary
;
6088 if (! SUPPORTS_STACK_ALIGNMENT
)
6091 if (cfun
->calls_alloca
6092 || cfun
->has_nonlocal_label
6093 || crtl
->has_nonlocal_goto
)
6094 crtl
->need_drap
= true;
6096 /* Call update_stack_boundary here again to update incoming stack
6097 boundary. It may set incoming stack alignment to a different
6098 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6099 use the minimum incoming stack alignment to check if it is OK
6100 to perform sibcall optimization since sibcall optimization will
6101 only align the outgoing stack to incoming stack boundary. */
6102 if (targetm
.calls
.update_stack_boundary
)
6103 targetm
.calls
.update_stack_boundary ();
6105 /* The incoming stack frame has to be aligned at least at
6106 parm_stack_boundary. */
6107 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
6109 /* Update crtl->stack_alignment_estimated and use it later to align
6110 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6111 exceptions since callgraph doesn't collect incoming stack alignment
6113 if (cfun
->can_throw_non_call_exceptions
6114 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
6115 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
6117 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
6118 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
6119 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
6120 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
6121 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
6123 gcc_assert (crtl
->stack_alignment_needed
6124 <= crtl
->stack_alignment_estimated
);
6126 crtl
->stack_realign_needed
6127 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
6128 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
6130 crtl
->stack_realign_processed
= true;
6132 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6134 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
6135 drap_rtx
= targetm
.calls
.get_drap_rtx ();
6137 /* stack_realign_drap and drap_rtx must match. */
6138 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
6140 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6141 if (drap_rtx
!= NULL
)
6143 crtl
->args
.internal_arg_pointer
= drap_rtx
;
6145 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6147 fixup_tail_calls ();
6153 expand_main_function (void)
6155 #if (defined(INVOKE__main) \
6156 || (!defined(HAS_INIT_SECTION) \
6157 && !defined(INIT_SECTION_ASM_OP) \
6158 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6159 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
);
6164 /* Expand code to initialize the stack_protect_guard. This is invoked at
6165 the beginning of a function to be protected. */
6168 stack_protect_prologue (void)
6170 tree guard_decl
= targetm
.stack_protect_guard ();
6173 x
= expand_normal (crtl
->stack_protect_guard
);
6175 y
= expand_normal (guard_decl
);
6179 /* Allow the target to copy from Y to X without leaking Y into a
6181 if (targetm
.have_stack_protect_set ())
6182 if (rtx_insn
*insn
= targetm
.gen_stack_protect_set (x
, y
))
6188 /* Otherwise do a straight move. */
6189 emit_move_insn (x
, y
);
6192 /* Translate the intermediate representation contained in the CFG
6193 from GIMPLE trees to RTL.
6195 We do conversion per basic block and preserve/update the tree CFG.
6196 This implies we have to do some magic as the CFG can simultaneously
6197 consist of basic blocks containing RTL and GIMPLE trees. This can
6198 confuse the CFG hooks, so be careful to not manipulate CFG during
6203 const pass_data pass_data_expand
=
6205 RTL_PASS
, /* type */
6206 "expand", /* name */
6207 OPTGROUP_NONE
, /* optinfo_flags */
6208 TV_EXPAND
, /* tv_id */
6209 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
6212 | PROP_gimple_lva
), /* properties_required */
6213 PROP_rtl
, /* properties_provided */
6214 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
6215 0, /* todo_flags_start */
6216 0, /* todo_flags_finish */
6219 class pass_expand
: public rtl_opt_pass
6222 pass_expand (gcc::context
*ctxt
)
6223 : rtl_opt_pass (pass_data_expand
, ctxt
)
6226 /* opt_pass methods: */
6227 virtual unsigned int execute (function
*);
6229 }; // class pass_expand
6232 pass_expand::execute (function
*fun
)
6234 basic_block bb
, init_block
;
6237 rtx_insn
*var_seq
, *var_ret_seq
;
6240 timevar_push (TV_OUT_OF_SSA
);
6241 rewrite_out_of_ssa (&SA
);
6242 timevar_pop (TV_OUT_OF_SSA
);
6243 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
6245 if (MAY_HAVE_DEBUG_BIND_STMTS
&& flag_tree_ter
)
6247 gimple_stmt_iterator gsi
;
6248 FOR_EACH_BB_FN (bb
, cfun
)
6249 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6250 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
6251 avoid_deep_ter_for_debug (gsi_stmt (gsi
), 0);
6254 /* Make sure all values used by the optimization passes have sane
6258 /* Some backends want to know that we are expanding to RTL. */
6259 currently_expanding_to_rtl
= 1;
6260 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6261 free_dominance_info (CDI_DOMINATORS
);
6263 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6265 if (chkp_function_instrumented_p (current_function_decl
))
6266 chkp_reset_rtl_bounds ();
6268 insn_locations_init ();
6269 if (!DECL_IS_BUILTIN (current_function_decl
))
6271 /* Eventually, all FEs should explicitly set function_start_locus. */
6272 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
6273 set_curr_insn_location
6274 (DECL_SOURCE_LOCATION (current_function_decl
));
6276 set_curr_insn_location (fun
->function_start_locus
);
6279 set_curr_insn_location (UNKNOWN_LOCATION
);
6280 prologue_location
= curr_insn_location ();
6282 #ifdef INSN_SCHEDULING
6283 init_sched_attrs ();
6286 /* Make sure first insn is a note even if we don't want linenums.
6287 This makes sure the first insn will never be deleted.
6288 Also, final expects a note to appear there. */
6289 emit_note (NOTE_INSN_DELETED
);
6291 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6292 discover_nonconstant_array_refs ();
6294 targetm
.expand_to_rtl_hook ();
6295 crtl
->init_stack_alignment ();
6296 fun
->cfg
->max_jumptable_ents
= 0;
6298 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6299 of the function section at exapnsion time to predict distance of calls. */
6300 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
6302 /* Expand the variables recorded during gimple lowering. */
6303 timevar_push (TV_VAR_EXPAND
);
6306 var_ret_seq
= expand_used_vars ();
6308 var_seq
= get_insns ();
6310 timevar_pop (TV_VAR_EXPAND
);
6312 /* Honor stack protection warnings. */
6313 if (warn_stack_protect
)
6315 if (fun
->calls_alloca
)
6316 warning (OPT_Wstack_protector
,
6317 "stack protector not protecting local variables: "
6318 "variable length buffer");
6319 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
6320 warning (OPT_Wstack_protector
,
6321 "stack protector not protecting function: "
6322 "all local arrays are less than %d bytes long",
6323 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
6326 /* Set up parameters and prepare for return, for the function. */
6327 expand_function_start (current_function_decl
);
6329 /* If we emitted any instructions for setting up the variables,
6330 emit them before the FUNCTION_START note. */
6333 emit_insn_before (var_seq
, parm_birth_insn
);
6335 /* In expand_function_end we'll insert the alloca save/restore
6336 before parm_birth_insn. We've just insertted an alloca call.
6337 Adjust the pointer to match. */
6338 parm_birth_insn
= var_seq
;
6341 /* Now propagate the RTL assignment of each partition to the
6342 underlying var of each SSA_NAME. */
6345 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6347 /* We might have generated new SSA names in
6348 update_alias_info_with_stack_vars. They will have a NULL
6349 defining statements, and won't be part of the partitioning,
6351 if (!SSA_NAME_DEF_STMT (name
))
6354 adjust_one_expanded_partition_var (name
);
6357 /* Clean up RTL of variables that straddle across multiple
6358 partitions, and check that the rtl of any PARM_DECLs that are not
6359 cleaned up is that of their default defs. */
6360 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6364 /* We might have generated new SSA names in
6365 update_alias_info_with_stack_vars. They will have a NULL
6366 defining statements, and won't be part of the partitioning,
6368 if (!SSA_NAME_DEF_STMT (name
))
6370 part
= var_to_partition (SA
.map
, name
);
6371 if (part
== NO_PARTITION
)
6374 /* If this decl was marked as living in multiple places, reset
6375 this now to NULL. */
6376 tree var
= SSA_NAME_VAR (name
);
6377 if (var
&& DECL_RTL_IF_SET (var
) == pc_rtx
)
6378 SET_DECL_RTL (var
, NULL
);
6379 /* Check that the pseudos chosen by assign_parms are those of
6380 the corresponding default defs. */
6381 else if (SSA_NAME_IS_DEFAULT_DEF (name
)
6382 && (TREE_CODE (var
) == PARM_DECL
6383 || TREE_CODE (var
) == RESULT_DECL
))
6385 rtx in
= DECL_RTL_IF_SET (var
);
6387 rtx out
= SA
.partition_to_pseudo
[part
];
6388 gcc_assert (in
== out
);
6390 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6391 those expected by debug backends for each parm and for
6392 the result. This is particularly important for stabs,
6393 whose register elimination from parm's DECL_RTL may cause
6394 -fcompare-debug differences as SET_DECL_RTL changes reg's
6395 attrs. So, make sure the RTL already has the parm as the
6396 EXPR, so that it won't change. */
6397 SET_DECL_RTL (var
, NULL_RTX
);
6399 set_mem_attributes (in
, var
, true);
6400 SET_DECL_RTL (var
, in
);
6404 /* If this function is `main', emit a call to `__main'
6405 to run global initializers, etc. */
6406 if (DECL_NAME (current_function_decl
)
6407 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
6408 && DECL_FILE_SCOPE_P (current_function_decl
))
6409 expand_main_function ();
6411 /* Initialize the stack_protect_guard field. This must happen after the
6412 call to __main (if any) so that the external decl is initialized. */
6413 if (crtl
->stack_protect_guard
&& targetm
.stack_protect_runtime_enabled_p ())
6414 stack_protect_prologue ();
6416 expand_phi_nodes (&SA
);
6418 /* Release any stale SSA redirection data. */
6419 redirect_edge_var_map_empty ();
6421 /* Register rtl specific functions for cfg. */
6422 rtl_register_cfg_hooks ();
6424 init_block
= construct_init_block ();
6426 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6427 remaining edges later. */
6428 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
6429 e
->flags
&= ~EDGE_EXECUTABLE
;
6431 /* If the function has too many markers, drop them while expanding. */
6432 if (cfun
->debug_marker_count
6433 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT
))
6434 cfun
->debug_nonbind_markers
= false;
6436 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
6437 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
6439 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
6441 if (MAY_HAVE_DEBUG_BIND_INSNS
)
6442 expand_debug_locations ();
6444 if (deep_ter_debug_map
)
6446 delete deep_ter_debug_map
;
6447 deep_ter_debug_map
= NULL
;
6450 /* Free stuff we no longer need after GIMPLE optimizations. */
6451 free_dominance_info (CDI_DOMINATORS
);
6452 free_dominance_info (CDI_POST_DOMINATORS
);
6453 delete_tree_cfg_annotations (fun
);
6455 timevar_push (TV_OUT_OF_SSA
);
6456 finish_out_of_ssa (&SA
);
6457 timevar_pop (TV_OUT_OF_SSA
);
6459 timevar_push (TV_POST_EXPAND
);
6460 /* We are no longer in SSA form. */
6461 fun
->gimple_df
->in_ssa_p
= false;
6462 loops_state_clear (LOOP_CLOSED_SSA
);
6464 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6465 conservatively to true until they are all profile aware. */
6466 delete lab_rtx_for_bb
;
6467 free_histograms (fun
);
6469 construct_exit_block ();
6470 insn_locations_finalize ();
6474 rtx_insn
*after
= return_label
;
6475 rtx_insn
*next
= NEXT_INSN (after
);
6476 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
6478 emit_insn_after (var_ret_seq
, after
);
6481 /* Zap the tree EH table. */
6482 set_eh_throw_stmt_table (fun
, NULL
);
6484 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6485 split edges which edge insertions might do. */
6486 rebuild_jump_labels (get_insns ());
6488 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
),
6489 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6493 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6497 rebuild_jump_labels_chain (e
->insns
.r
);
6498 /* Put insns after parm birth, but before
6499 NOTE_INSNS_FUNCTION_BEG. */
6500 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (fun
)
6501 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
6503 rtx_insn
*insns
= e
->insns
.r
;
6505 if (NOTE_P (parm_birth_insn
)
6506 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
6507 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
6509 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
6512 commit_one_edge_insertion (e
);
6519 /* We're done expanding trees to RTL. */
6520 currently_expanding_to_rtl
= 0;
6522 flush_mark_addressable_queue ();
6524 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
6525 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6529 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6531 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6532 e
->flags
&= ~EDGE_EXECUTABLE
;
6534 /* At the moment not all abnormal edges match the RTL
6535 representation. It is safe to remove them here as
6536 find_many_sub_basic_blocks will rediscover them.
6537 In the future we should get this fixed properly. */
6538 if ((e
->flags
& EDGE_ABNORMAL
)
6539 && !(e
->flags
& EDGE_SIBCALL
))
6546 auto_sbitmap
blocks (last_basic_block_for_fn (fun
));
6547 bitmap_ones (blocks
);
6548 find_many_sub_basic_blocks (blocks
);
6549 purge_all_dead_edges ();
6551 /* After initial rtl generation, call back to finish generating
6552 exception support code. We need to do this before cleaning up
6553 the CFG as the code does not expect dead landing pads. */
6554 if (fun
->eh
->region_tree
!= NULL
)
6555 finish_eh_generation ();
6557 /* Call expand_stack_alignment after finishing all
6558 updates to crtl->preferred_stack_boundary. */
6559 expand_stack_alignment ();
6561 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6563 if (crtl
->tail_call_emit
)
6564 fixup_tail_calls ();
6566 /* BB subdivision may have created basic blocks that are are only reachable
6567 from unlikely bbs but not marked as such in the profile. */
6569 propagate_unlikely_bbs_forward ();
6571 /* Remove unreachable blocks, otherwise we cannot compute dominators
6572 which are needed for loop state verification. As a side-effect
6573 this also compacts blocks.
6574 ??? We cannot remove trivially dead insns here as for example
6575 the DRAP reg on i?86 is not magically live at this point.
6576 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6577 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
6579 checking_verify_flow_info ();
6581 /* Initialize pseudos allocated for hard registers. */
6582 emit_initial_value_sets ();
6584 /* And finally unshare all RTL. */
6587 /* There's no need to defer outputting this function any more; we
6588 know we want to output it. */
6589 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
6591 /* Now that we're done expanding trees to RTL, we shouldn't have any
6592 more CONCATs anywhere. */
6593 generating_concat_p
= 0;
6598 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6599 /* And the pass manager will dump RTL for us. */
6602 /* If we're emitting a nested function, make sure its parent gets
6603 emitted as well. Doing otherwise confuses debug info. */
6606 for (parent
= DECL_CONTEXT (current_function_decl
);
6607 parent
!= NULL_TREE
;
6608 parent
= get_containing_scope (parent
))
6609 if (TREE_CODE (parent
) == FUNCTION_DECL
)
6610 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
6613 TREE_ASM_WRITTEN (current_function_decl
) = 1;
6615 /* After expanding, the return labels are no longer needed. */
6616 return_label
= NULL
;
6617 naked_return_label
= NULL
;
6619 /* After expanding, the tm_restart map is no longer needed. */
6620 if (fun
->gimple_df
->tm_restart
)
6621 fun
->gimple_df
->tm_restart
= NULL
;
6623 /* Tag the blocks with a depth number so that change_scope can find
6624 the common parent easily. */
6625 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
6626 default_rtl_profile ();
6628 /* For -dx discard loops now, otherwise IL verify in clean_state will
6630 if (rtl_dump_and_exit
)
6632 cfun
->curr_properties
&= ~PROP_loops
;
6633 loop_optimizer_finalize ();
6636 timevar_pop (TV_POST_EXPAND
);
6644 make_pass_expand (gcc::context
*ctxt
)
6646 return new pass_expand (ctxt
);