1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "tree-pass.h"
33 #include "regs.h" /* For reg_renumber. */
37 #include "diagnostic.h"
38 #include "fold-const.h"
40 #include "stor-layout.h"
42 #include "print-tree.h"
46 #include "cfgcleanup.h"
51 #include "internal-fn.h"
53 #include "gimple-iterator.h"
54 #include "gimple-expr.h"
55 #include "gimple-walk.h"
60 #include "gimple-pretty-print.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "tree-ssa-address.h"
74 #include "tree-chkp.h"
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
82 #define NAME__MAIN "__main"
85 /* This variable holds information helping the rewriting of SSA trees
89 /* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
91 gimple
*currently_expanding_gimple_stmt
;
93 static rtx
expand_debug_expr (tree
);
95 static bool defer_stack_allocation (tree
, bool);
97 static void record_alignment_for_reg_var (unsigned int);
99 /* Return an expression tree corresponding to the RHS of GIMPLE
103 gimple_assign_rhs_to_tree (gimple
*stmt
)
106 enum gimple_rhs_class grhs_class
;
108 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
110 if (grhs_class
== GIMPLE_TERNARY_RHS
)
111 t
= build3 (gimple_assign_rhs_code (stmt
),
112 TREE_TYPE (gimple_assign_lhs (stmt
)),
113 gimple_assign_rhs1 (stmt
),
114 gimple_assign_rhs2 (stmt
),
115 gimple_assign_rhs3 (stmt
));
116 else if (grhs_class
== GIMPLE_BINARY_RHS
)
117 t
= build2 (gimple_assign_rhs_code (stmt
),
118 TREE_TYPE (gimple_assign_lhs (stmt
)),
119 gimple_assign_rhs1 (stmt
),
120 gimple_assign_rhs2 (stmt
));
121 else if (grhs_class
== GIMPLE_UNARY_RHS
)
122 t
= build1 (gimple_assign_rhs_code (stmt
),
123 TREE_TYPE (gimple_assign_lhs (stmt
)),
124 gimple_assign_rhs1 (stmt
));
125 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
127 t
= gimple_assign_rhs1 (stmt
);
128 /* Avoid modifying this tree in place below. */
129 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
130 && gimple_location (stmt
) != EXPR_LOCATION (t
))
131 || (gimple_block (stmt
)
132 && currently_expanding_to_rtl
139 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
140 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
146 #ifndef STACK_ALIGNMENT_NEEDED
147 #define STACK_ALIGNMENT_NEEDED 1
150 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152 /* Choose either CUR or NEXT as the leader DECL for a partition.
153 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154 out of the same user variable being in multiple partitions (this is
155 less likely for compiler-introduced temps). */
158 leader_merge (tree cur
, tree next
)
160 if (cur
== NULL
|| cur
== next
)
163 if (DECL_P (cur
) && DECL_IGNORED_P (cur
))
166 if (DECL_P (next
) && DECL_IGNORED_P (next
))
172 /* Associate declaration T with storage space X. If T is no
173 SSA name this is exactly SET_DECL_RTL, otherwise make the
174 partition of T associated with X. */
176 set_rtl (tree t
, rtx x
)
178 gcc_checking_assert (!x
179 || !(TREE_CODE (t
) == SSA_NAME
|| is_gimple_reg (t
))
180 || (use_register_for_decl (t
)
182 || (GET_CODE (x
) == CONCAT
183 && (REG_P (XEXP (x
, 0))
184 || SUBREG_P (XEXP (x
, 0)))
185 && (REG_P (XEXP (x
, 1))
186 || SUBREG_P (XEXP (x
, 1))))
187 || (GET_CODE (x
) == PARALLEL
189 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
190 && !flag_tree_coalesce_vars
))
191 : (MEM_P (x
) || x
== pc_rtx
192 || (GET_CODE (x
) == CONCAT
193 && MEM_P (XEXP (x
, 0))
194 && MEM_P (XEXP (x
, 1))))));
195 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
196 RESULT_DECLs has the expected mode. For memory, we accept
197 unpromoted modes, since that's what we're likely to get. For
198 PARM_DECLs and RESULT_DECLs, we'll have been called by
199 set_parm_rtl, which will give us the default def, so we don't
200 have to compute it ourselves. For RESULT_DECLs, we accept mode
201 mismatches too, as long as we're not coalescing across variables,
202 so that we don't reject BLKmode PARALLELs or unpromoted REGs. */
203 gcc_checking_assert (!x
|| x
== pc_rtx
|| TREE_CODE (t
) != SSA_NAME
204 || (SSAVAR (t
) && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
205 && !flag_tree_coalesce_vars
)
206 || !use_register_for_decl (t
)
207 || GET_MODE (x
) == promote_ssa_mode (t
, NULL
));
212 tree cur
= NULL_TREE
;
220 else if (SUBREG_P (xm
))
222 gcc_assert (subreg_lowpart_p (xm
));
223 xm
= SUBREG_REG (xm
);
226 else if (GET_CODE (xm
) == CONCAT
)
231 else if (GET_CODE (xm
) == PARALLEL
)
233 xm
= XVECEXP (xm
, 0, 0);
234 gcc_assert (GET_CODE (xm
) == EXPR_LIST
);
238 else if (xm
== pc_rtx
)
243 tree next
= skip
? cur
: leader_merge (cur
, SSAVAR (t
) ? SSAVAR (t
) : t
);
248 set_mem_attributes (x
,
249 next
&& TREE_CODE (next
) == SSA_NAME
253 set_reg_attrs_for_decl_rtl (next
, x
);
257 if (TREE_CODE (t
) == SSA_NAME
)
259 int part
= var_to_partition (SA
.map
, t
);
260 if (part
!= NO_PARTITION
)
262 if (SA
.partition_to_pseudo
[part
])
263 gcc_assert (SA
.partition_to_pseudo
[part
] == x
);
264 else if (x
!= pc_rtx
)
265 SA
.partition_to_pseudo
[part
] = x
;
267 /* For the benefit of debug information at -O0 (where
268 vartracking doesn't run) record the place also in the base
269 DECL. For PARMs and RESULTs, do so only when setting the
271 if (x
&& x
!= pc_rtx
&& SSA_NAME_VAR (t
)
272 && (VAR_P (SSA_NAME_VAR (t
))
273 || SSA_NAME_IS_DEFAULT_DEF (t
)))
275 tree var
= SSA_NAME_VAR (t
);
276 /* If we don't yet have something recorded, just record it now. */
277 if (!DECL_RTL_SET_P (var
))
278 SET_DECL_RTL (var
, x
);
279 /* If we have it set already to "multiple places" don't
281 else if (DECL_RTL (var
) == pc_rtx
)
283 /* If we have something recorded and it's not the same place
284 as we want to record now, we have multiple partitions for the
285 same base variable, with different places. We can't just
286 randomly chose one, hence we have to say that we don't know.
287 This only happens with optimization, and there var-tracking
288 will figure out the right thing. */
289 else if (DECL_RTL (var
) != x
)
290 SET_DECL_RTL (var
, pc_rtx
);
297 /* This structure holds data relevant to one variable that will be
298 placed in a stack slot. */
304 /* Initially, the size of the variable. Later, the size of the partition,
305 if this variable becomes it's partition's representative. */
308 /* The *byte* alignment required for this variable. Or as, with the
309 size, the alignment for this partition. */
312 /* The partition representative. */
313 size_t representative
;
315 /* The next stack variable in the partition, or EOC. */
318 /* The numbers of conflicting stack variables. */
322 #define EOC ((size_t)-1)
324 /* We have an array of such objects while deciding allocation. */
325 static struct stack_var
*stack_vars
;
326 static size_t stack_vars_alloc
;
327 static size_t stack_vars_num
;
328 static hash_map
<tree
, size_t> *decl_to_stack_part
;
330 /* Conflict bitmaps go on this obstack. This allows us to destroy
331 all of them in one big sweep. */
332 static bitmap_obstack stack_var_bitmap_obstack
;
334 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
335 is non-decreasing. */
336 static size_t *stack_vars_sorted
;
338 /* The phase of the stack frame. This is the known misalignment of
339 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
340 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
341 static int frame_phase
;
343 /* Used during expand_used_vars to remember if we saw any decls for
344 which we'd like to enable stack smashing protection. */
345 static bool has_protected_decls
;
347 /* Used during expand_used_vars. Remember if we say a character buffer
348 smaller than our cutoff threshold. Used for -Wstack-protector. */
349 static bool has_short_buffer
;
351 /* Compute the byte alignment to use for DECL. Ignore alignment
352 we can't do with expected alignment of the stack boundary. */
355 align_local_variable (tree decl
)
359 if (TREE_CODE (decl
) == SSA_NAME
)
360 align
= TYPE_ALIGN (TREE_TYPE (decl
));
363 align
= LOCAL_DECL_ALIGNMENT (decl
);
364 DECL_ALIGN (decl
) = align
;
366 return align
/ BITS_PER_UNIT
;
369 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
370 down otherwise. Return truncated BASE value. */
372 static inline unsigned HOST_WIDE_INT
373 align_base (HOST_WIDE_INT base
, unsigned HOST_WIDE_INT align
, bool align_up
)
375 return align_up
? (base
+ align
- 1) & -align
: base
& -align
;
378 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
379 Return the frame offset. */
382 alloc_stack_frame_space (HOST_WIDE_INT size
, unsigned HOST_WIDE_INT align
)
384 HOST_WIDE_INT offset
, new_frame_offset
;
386 if (FRAME_GROWS_DOWNWARD
)
389 = align_base (frame_offset
- frame_phase
- size
,
390 align
, false) + frame_phase
;
391 offset
= new_frame_offset
;
396 = align_base (frame_offset
- frame_phase
, align
, true) + frame_phase
;
397 offset
= new_frame_offset
;
398 new_frame_offset
+= size
;
400 frame_offset
= new_frame_offset
;
402 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
403 frame_offset
= offset
= 0;
408 /* Accumulate DECL into STACK_VARS. */
411 add_stack_var (tree decl
)
415 if (stack_vars_num
>= stack_vars_alloc
)
417 if (stack_vars_alloc
)
418 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
420 stack_vars_alloc
= 32;
422 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
424 if (!decl_to_stack_part
)
425 decl_to_stack_part
= new hash_map
<tree
, size_t>;
427 v
= &stack_vars
[stack_vars_num
];
428 decl_to_stack_part
->put (decl
, stack_vars_num
);
431 tree size
= TREE_CODE (decl
) == SSA_NAME
432 ? TYPE_SIZE_UNIT (TREE_TYPE (decl
))
433 : DECL_SIZE_UNIT (decl
);
434 v
->size
= tree_to_uhwi (size
);
435 /* Ensure that all variables have size, so that &a != &b for any two
436 variables that are simultaneously live. */
439 v
->alignb
= align_local_variable (decl
);
440 /* An alignment of zero can mightily confuse us later. */
441 gcc_assert (v
->alignb
!= 0);
443 /* All variables are initially in their own partition. */
444 v
->representative
= stack_vars_num
;
447 /* All variables initially conflict with no other. */
450 /* Ensure that this decl doesn't get put onto the list twice. */
451 set_rtl (decl
, pc_rtx
);
456 /* Make the decls associated with luid's X and Y conflict. */
459 add_stack_var_conflict (size_t x
, size_t y
)
461 struct stack_var
*a
= &stack_vars
[x
];
462 struct stack_var
*b
= &stack_vars
[y
];
464 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
466 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
467 bitmap_set_bit (a
->conflicts
, y
);
468 bitmap_set_bit (b
->conflicts
, x
);
471 /* Check whether the decls associated with luid's X and Y conflict. */
474 stack_var_conflict_p (size_t x
, size_t y
)
476 struct stack_var
*a
= &stack_vars
[x
];
477 struct stack_var
*b
= &stack_vars
[y
];
480 /* Partitions containing an SSA name result from gimple registers
481 with things like unsupported modes. They are top-level and
482 hence conflict with everything else. */
483 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
486 if (!a
->conflicts
|| !b
->conflicts
)
488 return bitmap_bit_p (a
->conflicts
, y
);
491 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
492 enter its partition number into bitmap DATA. */
495 visit_op (gimple
*, tree op
, tree
, void *data
)
497 bitmap active
= (bitmap
)data
;
498 op
= get_base_address (op
);
501 && DECL_RTL_IF_SET (op
) == pc_rtx
)
503 size_t *v
= decl_to_stack_part
->get (op
);
505 bitmap_set_bit (active
, *v
);
510 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
511 record conflicts between it and all currently active other partitions
515 visit_conflict (gimple
*, tree op
, tree
, void *data
)
517 bitmap active
= (bitmap
)data
;
518 op
= get_base_address (op
);
521 && DECL_RTL_IF_SET (op
) == pc_rtx
)
523 size_t *v
= decl_to_stack_part
->get (op
);
524 if (v
&& bitmap_set_bit (active
, *v
))
529 gcc_assert (num
< stack_vars_num
);
530 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
531 add_stack_var_conflict (num
, i
);
537 /* Helper routine for add_scope_conflicts, calculating the active partitions
538 at the end of BB, leaving the result in WORK. We're called to generate
539 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
543 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
547 gimple_stmt_iterator gsi
;
548 walk_stmt_load_store_addr_fn visit
;
551 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
552 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
556 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
558 gimple
*stmt
= gsi_stmt (gsi
);
559 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
561 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
563 gimple
*stmt
= gsi_stmt (gsi
);
565 if (gimple_clobber_p (stmt
))
567 tree lhs
= gimple_assign_lhs (stmt
);
569 /* Nested function lowering might introduce LHSs
570 that are COMPONENT_REFs. */
571 if (TREE_CODE (lhs
) != VAR_DECL
)
573 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
574 && (v
= decl_to_stack_part
->get (lhs
)))
575 bitmap_clear_bit (work
, *v
);
577 else if (!is_gimple_debug (stmt
))
580 && visit
== visit_op
)
582 /* If this is the first real instruction in this BB we need
583 to add conflicts for everything live at this point now.
584 Unlike classical liveness for named objects we can't
585 rely on seeing a def/use of the names we're interested in.
586 There might merely be indirect loads/stores. We'd not add any
587 conflicts for such partitions. */
590 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
592 struct stack_var
*a
= &stack_vars
[i
];
594 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
595 bitmap_ior_into (a
->conflicts
, work
);
597 visit
= visit_conflict
;
599 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
604 /* Generate stack partition conflicts between all partitions that are
605 simultaneously live. */
608 add_scope_conflicts (void)
612 bitmap work
= BITMAP_ALLOC (NULL
);
616 /* We approximate the live range of a stack variable by taking the first
617 mention of its name as starting point(s), and by the end-of-scope
618 death clobber added by gimplify as ending point(s) of the range.
619 This overapproximates in the case we for instance moved an address-taken
620 operation upward, without also moving a dereference to it upwards.
621 But it's conservatively correct as a variable never can hold values
622 before its name is mentioned at least once.
624 We then do a mostly classical bitmap liveness algorithm. */
626 FOR_ALL_BB_FN (bb
, cfun
)
627 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
629 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
630 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
637 for (i
= 0; i
< n_bbs
; i
++)
640 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
641 active
= (bitmap
)bb
->aux
;
642 add_scope_conflicts_1 (bb
, work
, false);
643 if (bitmap_ior_into (active
, work
))
648 FOR_EACH_BB_FN (bb
, cfun
)
649 add_scope_conflicts_1 (bb
, work
, true);
653 FOR_ALL_BB_FN (bb
, cfun
)
654 BITMAP_FREE (bb
->aux
);
657 /* A subroutine of partition_stack_vars. A comparison function for qsort,
658 sorting an array of indices by the properties of the object. */
661 stack_var_cmp (const void *a
, const void *b
)
663 size_t ia
= *(const size_t *)a
;
664 size_t ib
= *(const size_t *)b
;
665 unsigned int aligna
= stack_vars
[ia
].alignb
;
666 unsigned int alignb
= stack_vars
[ib
].alignb
;
667 HOST_WIDE_INT sizea
= stack_vars
[ia
].size
;
668 HOST_WIDE_INT sizeb
= stack_vars
[ib
].size
;
669 tree decla
= stack_vars
[ia
].decl
;
670 tree declb
= stack_vars
[ib
].decl
;
672 unsigned int uida
, uidb
;
674 /* Primary compare on "large" alignment. Large comes first. */
675 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
676 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
677 if (largea
!= largeb
)
678 return (int)largeb
- (int)largea
;
680 /* Secondary compare on size, decreasing */
686 /* Tertiary compare on true alignment, decreasing. */
692 /* Final compare on ID for sort stability, increasing.
693 Two SSA names are compared by their version, SSA names come before
694 non-SSA names, and two normal decls are compared by their DECL_UID. */
695 if (TREE_CODE (decla
) == SSA_NAME
)
697 if (TREE_CODE (declb
) == SSA_NAME
)
698 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
702 else if (TREE_CODE (declb
) == SSA_NAME
)
705 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
713 struct part_traits
: unbounded_int_hashmap_traits
<size_t, bitmap
> {};
714 typedef hash_map
<size_t, bitmap
, part_traits
> part_hashmap
;
716 /* If the points-to solution *PI points to variables that are in a partition
717 together with other variables add all partition members to the pointed-to
721 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
722 part_hashmap
*decls_to_partitions
,
723 hash_set
<bitmap
> *visited
, bitmap temp
)
731 /* The pointed-to vars bitmap is shared, it is enough to
733 || visited
->add (pt
->vars
))
738 /* By using a temporary bitmap to store all members of the partitions
739 we have to add we make sure to visit each of the partitions only
741 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
743 || !bitmap_bit_p (temp
, i
))
744 && (part
= decls_to_partitions
->get (i
)))
745 bitmap_ior_into (temp
, *part
);
746 if (!bitmap_empty_p (temp
))
747 bitmap_ior_into (pt
->vars
, temp
);
750 /* Update points-to sets based on partition info, so we can use them on RTL.
751 The bitmaps representing stack partitions will be saved until expand,
752 where partitioned decls used as bases in memory expressions will be
756 update_alias_info_with_stack_vars (void)
758 part_hashmap
*decls_to_partitions
= NULL
;
760 tree var
= NULL_TREE
;
762 for (i
= 0; i
< stack_vars_num
; i
++)
766 struct ptr_info_def
*pi
;
768 /* Not interested in partitions with single variable. */
769 if (stack_vars
[i
].representative
!= i
770 || stack_vars
[i
].next
== EOC
)
773 if (!decls_to_partitions
)
775 decls_to_partitions
= new part_hashmap
;
776 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
779 /* Create an SSA_NAME that points to the partition for use
780 as base during alias-oracle queries on RTL for bases that
781 have been partitioned. */
782 if (var
== NULL_TREE
)
783 var
= create_tmp_var (ptr_type_node
);
784 name
= make_ssa_name (var
);
786 /* Create bitmaps representing partitions. They will be used for
787 points-to sets later, so use GGC alloc. */
788 part
= BITMAP_GGC_ALLOC ();
789 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
791 tree decl
= stack_vars
[j
].decl
;
792 unsigned int uid
= DECL_PT_UID (decl
);
793 bitmap_set_bit (part
, uid
);
794 decls_to_partitions
->put (uid
, part
);
795 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
796 if (TREE_ADDRESSABLE (decl
))
797 TREE_ADDRESSABLE (name
) = 1;
800 /* Make the SSA name point to all partition members. */
801 pi
= get_ptr_info (name
);
802 pt_solution_set (&pi
->pt
, part
, false);
805 /* Make all points-to sets that contain one member of a partition
806 contain all members of the partition. */
807 if (decls_to_partitions
)
810 hash_set
<bitmap
> visited
;
811 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
813 for (i
= 1; i
< num_ssa_names
; i
++)
815 tree name
= ssa_name (i
);
816 struct ptr_info_def
*pi
;
819 && POINTER_TYPE_P (TREE_TYPE (name
))
820 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
821 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
825 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
826 decls_to_partitions
, &visited
, temp
);
828 delete decls_to_partitions
;
833 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
834 partitioning algorithm. Partitions A and B are known to be non-conflicting.
835 Merge them into a single partition A. */
838 union_stack_vars (size_t a
, size_t b
)
840 struct stack_var
*vb
= &stack_vars
[b
];
844 gcc_assert (stack_vars
[b
].next
== EOC
);
845 /* Add B to A's partition. */
846 stack_vars
[b
].next
= stack_vars
[a
].next
;
847 stack_vars
[b
].representative
= a
;
848 stack_vars
[a
].next
= b
;
850 /* Update the required alignment of partition A to account for B. */
851 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
852 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
854 /* Update the interference graph and merge the conflicts. */
857 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
858 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
859 BITMAP_FREE (vb
->conflicts
);
863 /* A subroutine of expand_used_vars. Binpack the variables into
864 partitions constrained by the interference graph. The overall
865 algorithm used is as follows:
867 Sort the objects by size in descending order.
872 Look for the largest non-conflicting object B with size <= S.
879 partition_stack_vars (void)
881 size_t si
, sj
, n
= stack_vars_num
;
883 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
884 for (si
= 0; si
< n
; ++si
)
885 stack_vars_sorted
[si
] = si
;
890 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
892 for (si
= 0; si
< n
; ++si
)
894 size_t i
= stack_vars_sorted
[si
];
895 unsigned int ialign
= stack_vars
[i
].alignb
;
896 HOST_WIDE_INT isize
= stack_vars
[i
].size
;
898 /* Ignore objects that aren't partition representatives. If we
899 see a var that is not a partition representative, it must
900 have been merged earlier. */
901 if (stack_vars
[i
].representative
!= i
)
904 for (sj
= si
+ 1; sj
< n
; ++sj
)
906 size_t j
= stack_vars_sorted
[sj
];
907 unsigned int jalign
= stack_vars
[j
].alignb
;
908 HOST_WIDE_INT jsize
= stack_vars
[j
].size
;
910 /* Ignore objects that aren't partition representatives. */
911 if (stack_vars
[j
].representative
!= j
)
914 /* Do not mix objects of "small" (supported) alignment
915 and "large" (unsupported) alignment. */
916 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
917 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
920 /* For Address Sanitizer do not mix objects with different
921 sizes, as the shorter vars wouldn't be adequately protected.
922 Don't do that for "large" (unsupported) alignment objects,
923 those aren't protected anyway. */
924 if ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
&& isize
!= jsize
925 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
928 /* Ignore conflicting objects. */
929 if (stack_var_conflict_p (i
, j
))
932 /* UNION the objects, placing J at OFFSET. */
933 union_stack_vars (i
, j
);
937 update_alias_info_with_stack_vars ();
940 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
943 dump_stack_var_partition (void)
945 size_t si
, i
, j
, n
= stack_vars_num
;
947 for (si
= 0; si
< n
; ++si
)
949 i
= stack_vars_sorted
[si
];
951 /* Skip variables that aren't partition representatives, for now. */
952 if (stack_vars
[i
].representative
!= i
)
955 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
956 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
957 stack_vars
[i
].alignb
);
959 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
961 fputc ('\t', dump_file
);
962 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
964 fputc ('\n', dump_file
);
968 /* Assign rtl to DECL at BASE + OFFSET. */
971 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
972 HOST_WIDE_INT offset
)
977 /* If this fails, we've overflowed the stack frame. Error nicely? */
978 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
980 x
= plus_constant (Pmode
, base
, offset
);
981 x
= gen_rtx_MEM (TREE_CODE (decl
) == SSA_NAME
982 ? TYPE_MODE (TREE_TYPE (decl
))
983 : DECL_MODE (SSAVAR (decl
)), x
);
985 if (TREE_CODE (decl
) != SSA_NAME
)
987 /* Set alignment we actually gave this decl if it isn't an SSA name.
988 If it is we generate stack slots only accidentally so it isn't as
989 important, we'll simply use the alignment that is already set. */
990 if (base
== virtual_stack_vars_rtx
)
991 offset
-= frame_phase
;
992 align
= offset
& -offset
;
993 align
*= BITS_PER_UNIT
;
994 if (align
== 0 || align
> base_align
)
997 /* One would think that we could assert that we're not decreasing
998 alignment here, but (at least) the i386 port does exactly this
999 via the MINIMUM_ALIGNMENT hook. */
1001 DECL_ALIGN (decl
) = align
;
1002 DECL_USER_ALIGN (decl
) = 0;
1008 struct stack_vars_data
1010 /* Vector of offset pairs, always end of some padding followed
1011 by start of the padding that needs Address Sanitizer protection.
1012 The vector is in reversed, highest offset pairs come first. */
1013 vec
<HOST_WIDE_INT
> asan_vec
;
1015 /* Vector of partition representative decls in between the paddings. */
1016 vec
<tree
> asan_decl_vec
;
1018 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1021 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1022 unsigned int asan_alignb
;
1025 /* A subroutine of expand_used_vars. Give each partition representative
1026 a unique location within the stack frame. Update each partition member
1027 with that location. */
1030 expand_stack_vars (bool (*pred
) (size_t), struct stack_vars_data
*data
)
1032 size_t si
, i
, j
, n
= stack_vars_num
;
1033 HOST_WIDE_INT large_size
= 0, large_alloc
= 0;
1034 rtx large_base
= NULL
;
1035 unsigned large_align
= 0;
1038 /* Determine if there are any variables requiring "large" alignment.
1039 Since these are dynamically allocated, we only process these if
1040 no predicate involved. */
1041 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
1042 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1044 /* Find the total size of these variables. */
1045 for (si
= 0; si
< n
; ++si
)
1049 i
= stack_vars_sorted
[si
];
1050 alignb
= stack_vars
[i
].alignb
;
1052 /* All "large" alignment decls come before all "small" alignment
1053 decls, but "large" alignment decls are not sorted based on
1054 their alignment. Increase large_align to track the largest
1055 required alignment. */
1056 if ((alignb
* BITS_PER_UNIT
) > large_align
)
1057 large_align
= alignb
* BITS_PER_UNIT
;
1059 /* Stop when we get to the first decl with "small" alignment. */
1060 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1063 /* Skip variables that aren't partition representatives. */
1064 if (stack_vars
[i
].representative
!= i
)
1067 /* Skip variables that have already had rtl assigned. See also
1068 add_stack_var where we perpetrate this pc_rtx hack. */
1069 decl
= stack_vars
[i
].decl
;
1070 if (TREE_CODE (decl
) == SSA_NAME
1071 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1072 : DECL_RTL (decl
) != pc_rtx
)
1075 large_size
+= alignb
- 1;
1076 large_size
&= -(HOST_WIDE_INT
)alignb
;
1077 large_size
+= stack_vars
[i
].size
;
1080 /* If there were any, allocate space. */
1082 large_base
= allocate_dynamic_stack_space (GEN_INT (large_size
), 0,
1086 for (si
= 0; si
< n
; ++si
)
1089 unsigned base_align
, alignb
;
1090 HOST_WIDE_INT offset
;
1092 i
= stack_vars_sorted
[si
];
1094 /* Skip variables that aren't partition representatives, for now. */
1095 if (stack_vars
[i
].representative
!= i
)
1098 /* Skip variables that have already had rtl assigned. See also
1099 add_stack_var where we perpetrate this pc_rtx hack. */
1100 decl
= stack_vars
[i
].decl
;
1101 if (TREE_CODE (decl
) == SSA_NAME
1102 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1103 : DECL_RTL (decl
) != pc_rtx
)
1106 /* Check the predicate to see whether this variable should be
1107 allocated in this pass. */
1108 if (pred
&& !pred (i
))
1111 alignb
= stack_vars
[i
].alignb
;
1112 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1114 base
= virtual_stack_vars_rtx
;
1115 if ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
&& pred
)
1117 HOST_WIDE_INT prev_offset
1118 = align_base (frame_offset
,
1119 MAX (alignb
, ASAN_RED_ZONE_SIZE
),
1120 FRAME_GROWS_DOWNWARD
);
1121 tree repr_decl
= NULL_TREE
;
1123 = alloc_stack_frame_space (stack_vars
[i
].size
1124 + ASAN_RED_ZONE_SIZE
,
1125 MAX (alignb
, ASAN_RED_ZONE_SIZE
));
1127 data
->asan_vec
.safe_push (prev_offset
);
1128 data
->asan_vec
.safe_push (offset
+ stack_vars
[i
].size
);
1129 /* Find best representative of the partition.
1130 Prefer those with DECL_NAME, even better
1131 satisfying asan_protect_stack_decl predicate. */
1132 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1133 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1134 && DECL_NAME (stack_vars
[j
].decl
))
1136 repr_decl
= stack_vars
[j
].decl
;
1139 else if (repr_decl
== NULL_TREE
1140 && DECL_P (stack_vars
[j
].decl
)
1141 && DECL_NAME (stack_vars
[j
].decl
))
1142 repr_decl
= stack_vars
[j
].decl
;
1143 if (repr_decl
== NULL_TREE
)
1144 repr_decl
= stack_vars
[i
].decl
;
1145 data
->asan_decl_vec
.safe_push (repr_decl
);
1146 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1147 if (data
->asan_base
== NULL
)
1148 data
->asan_base
= gen_reg_rtx (Pmode
);
1149 base
= data
->asan_base
;
1151 if (!STRICT_ALIGNMENT
)
1152 base_align
= crtl
->max_used_stack_slot_alignment
;
1154 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1155 GET_MODE_ALIGNMENT (SImode
)
1156 << ASAN_SHADOW_SHIFT
);
1160 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1161 base_align
= crtl
->max_used_stack_slot_alignment
;
1166 /* Large alignment is only processed in the last pass. */
1169 gcc_assert (large_base
!= NULL
);
1171 large_alloc
+= alignb
- 1;
1172 large_alloc
&= -(HOST_WIDE_INT
)alignb
;
1173 offset
= large_alloc
;
1174 large_alloc
+= stack_vars
[i
].size
;
1177 base_align
= large_align
;
1180 /* Create rtl for each variable based on their location within the
1182 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1184 expand_one_stack_var_at (stack_vars
[j
].decl
,
1190 gcc_assert (large_alloc
== large_size
);
1193 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1194 static HOST_WIDE_INT
1195 account_stack_vars (void)
1197 size_t si
, j
, i
, n
= stack_vars_num
;
1198 HOST_WIDE_INT size
= 0;
1200 for (si
= 0; si
< n
; ++si
)
1202 i
= stack_vars_sorted
[si
];
1204 /* Skip variables that aren't partition representatives, for now. */
1205 if (stack_vars
[i
].representative
!= i
)
1208 size
+= stack_vars
[i
].size
;
1209 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1210 set_rtl (stack_vars
[j
].decl
, NULL
);
1215 /* Record the RTL assignment X for the default def of PARM. */
1218 set_parm_rtl (tree parm
, rtx x
)
1220 gcc_assert (TREE_CODE (parm
) == PARM_DECL
1221 || TREE_CODE (parm
) == RESULT_DECL
);
1223 if (x
&& !MEM_P (x
))
1225 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (parm
),
1226 TYPE_MODE (TREE_TYPE (parm
)),
1227 TYPE_ALIGN (TREE_TYPE (parm
)));
1229 /* If the variable alignment is very large we'll dynamicaly
1230 allocate it, which means that in-frame portion is just a
1231 pointer. ??? We've got a pseudo for sure here, do we
1232 actually dynamically allocate its spilling area if needed?
1233 ??? Isn't it a problem when POINTER_SIZE also exceeds
1234 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1235 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1236 align
= POINTER_SIZE
;
1238 record_alignment_for_reg_var (align
);
1241 tree ssa
= ssa_default_def (cfun
, parm
);
1243 return set_rtl (parm
, x
);
1245 int part
= var_to_partition (SA
.map
, ssa
);
1246 gcc_assert (part
!= NO_PARTITION
);
1248 bool changed
= bitmap_bit_p (SA
.partitions_for_parm_default_defs
, part
);
1249 gcc_assert (changed
);
1252 gcc_assert (DECL_RTL (parm
) == x
);
1255 /* A subroutine of expand_one_var. Called to immediately assign rtl
1256 to a variable to be allocated in the stack frame. */
1259 expand_one_stack_var_1 (tree var
)
1261 HOST_WIDE_INT size
, offset
;
1262 unsigned byte_align
;
1264 if (TREE_CODE (var
) == SSA_NAME
)
1266 tree type
= TREE_TYPE (var
);
1267 size
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1268 byte_align
= TYPE_ALIGN_UNIT (type
);
1272 size
= tree_to_uhwi (DECL_SIZE_UNIT (var
));
1273 byte_align
= align_local_variable (var
);
1276 /* We handle highly aligned variables in expand_stack_vars. */
1277 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1279 offset
= alloc_stack_frame_space (size
, byte_align
);
1281 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1282 crtl
->max_used_stack_slot_alignment
, offset
);
1285 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1286 already assigned some MEM. */
1289 expand_one_stack_var (tree var
)
1291 if (TREE_CODE (var
) == SSA_NAME
)
1293 int part
= var_to_partition (SA
.map
, var
);
1294 if (part
!= NO_PARTITION
)
1296 rtx x
= SA
.partition_to_pseudo
[part
];
1298 gcc_assert (MEM_P (x
));
1303 return expand_one_stack_var_1 (var
);
1306 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1307 that will reside in a hard register. */
1310 expand_one_hard_reg_var (tree var
)
1312 rest_of_decl_compilation (var
, 0, 0);
1315 /* Record the alignment requirements of some variable assigned to a
1319 record_alignment_for_reg_var (unsigned int align
)
1321 if (SUPPORTS_STACK_ALIGNMENT
1322 && crtl
->stack_alignment_estimated
< align
)
1324 /* stack_alignment_estimated shouldn't change after stack
1325 realign decision made */
1326 gcc_assert (!crtl
->stack_realign_processed
);
1327 crtl
->stack_alignment_estimated
= align
;
1330 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1331 So here we only make sure stack_alignment_needed >= align. */
1332 if (crtl
->stack_alignment_needed
< align
)
1333 crtl
->stack_alignment_needed
= align
;
1334 if (crtl
->max_used_stack_slot_alignment
< align
)
1335 crtl
->max_used_stack_slot_alignment
= align
;
1338 /* Create RTL for an SSA partition. */
1341 expand_one_ssa_partition (tree var
)
1343 int part
= var_to_partition (SA
.map
, var
);
1344 gcc_assert (part
!= NO_PARTITION
);
1346 if (SA
.partition_to_pseudo
[part
])
1349 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1350 TYPE_MODE (TREE_TYPE (var
)),
1351 TYPE_ALIGN (TREE_TYPE (var
)));
1353 /* If the variable alignment is very large we'll dynamicaly allocate
1354 it, which means that in-frame portion is just a pointer. */
1355 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1356 align
= POINTER_SIZE
;
1358 record_alignment_for_reg_var (align
);
1360 if (!use_register_for_decl (var
))
1362 if (defer_stack_allocation (var
, true))
1363 add_stack_var (var
);
1365 expand_one_stack_var_1 (var
);
1369 machine_mode reg_mode
= promote_ssa_mode (var
, NULL
);
1371 rtx x
= gen_reg_rtx (reg_mode
);
1376 /* Record the association between the RTL generated for partition PART
1377 and the underlying variable of the SSA_NAME VAR. */
1380 adjust_one_expanded_partition_var (tree var
)
1385 tree decl
= SSA_NAME_VAR (var
);
1387 int part
= var_to_partition (SA
.map
, var
);
1388 if (part
== NO_PARTITION
)
1391 rtx x
= SA
.partition_to_pseudo
[part
];
1400 /* Note if the object is a user variable. */
1401 if (decl
&& !DECL_ARTIFICIAL (decl
))
1404 if (POINTER_TYPE_P (decl
? TREE_TYPE (decl
) : TREE_TYPE (var
)))
1405 mark_reg_pointer (x
, get_pointer_alignment (var
));
1408 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1409 that will reside in a pseudo register. */
1412 expand_one_register_var (tree var
)
1414 if (TREE_CODE (var
) == SSA_NAME
)
1416 int part
= var_to_partition (SA
.map
, var
);
1417 if (part
!= NO_PARTITION
)
1419 rtx x
= SA
.partition_to_pseudo
[part
];
1421 gcc_assert (REG_P (x
));
1428 tree type
= TREE_TYPE (decl
);
1429 machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1430 rtx x
= gen_reg_rtx (reg_mode
);
1434 /* Note if the object is a user variable. */
1435 if (!DECL_ARTIFICIAL (decl
))
1438 if (POINTER_TYPE_P (type
))
1439 mark_reg_pointer (x
, get_pointer_alignment (var
));
1442 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1443 has some associated error, e.g. its type is error-mark. We just need
1444 to pick something that won't crash the rest of the compiler. */
1447 expand_one_error_var (tree var
)
1449 machine_mode mode
= DECL_MODE (var
);
1452 if (mode
== BLKmode
)
1453 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1454 else if (mode
== VOIDmode
)
1457 x
= gen_reg_rtx (mode
);
1459 SET_DECL_RTL (var
, x
);
1462 /* A subroutine of expand_one_var. VAR is a variable that will be
1463 allocated to the local stack frame. Return true if we wish to
1464 add VAR to STACK_VARS so that it will be coalesced with other
1465 variables. Return false to allocate VAR immediately.
1467 This function is used to reduce the number of variables considered
1468 for coalescing, which reduces the size of the quadratic problem. */
1471 defer_stack_allocation (tree var
, bool toplevel
)
1473 tree size_unit
= TREE_CODE (var
) == SSA_NAME
1474 ? TYPE_SIZE_UNIT (TREE_TYPE (var
))
1475 : DECL_SIZE_UNIT (var
);
1477 /* Whether the variable is small enough for immediate allocation not to be
1478 a problem with regard to the frame size. */
1480 = ((HOST_WIDE_INT
) tree_to_uhwi (size_unit
)
1481 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING
));
1483 /* If stack protection is enabled, *all* stack variables must be deferred,
1484 so that we can re-order the strings to the top of the frame.
1485 Similarly for Address Sanitizer. */
1486 if (flag_stack_protect
|| ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
))
1489 unsigned int align
= TREE_CODE (var
) == SSA_NAME
1490 ? TYPE_ALIGN (TREE_TYPE (var
))
1493 /* We handle "large" alignment via dynamic allocation. We want to handle
1494 this extra complication in only one place, so defer them. */
1495 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1498 bool ignored
= TREE_CODE (var
) == SSA_NAME
1499 ? !SSAVAR (var
) || DECL_IGNORED_P (SSA_NAME_VAR (var
))
1500 : DECL_IGNORED_P (var
);
1502 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1503 might be detached from their block and appear at toplevel when we reach
1504 here. We want to coalesce them with variables from other blocks when
1505 the immediate contribution to the frame size would be noticeable. */
1506 if (toplevel
&& optimize
> 0 && ignored
&& !smallish
)
1509 /* Variables declared in the outermost scope automatically conflict
1510 with every other variable. The only reason to want to defer them
1511 at all is that, after sorting, we can more efficiently pack
1512 small variables in the stack frame. Continue to defer at -O2. */
1513 if (toplevel
&& optimize
< 2)
1516 /* Without optimization, *most* variables are allocated from the
1517 stack, which makes the quadratic problem large exactly when we
1518 want compilation to proceed as quickly as possible. On the
1519 other hand, we don't want the function's stack frame size to
1520 get completely out of hand. So we avoid adding scalars and
1521 "small" aggregates to the list at all. */
1522 if (optimize
== 0 && smallish
)
1528 /* A subroutine of expand_used_vars. Expand one variable according to
1529 its flavor. Variables to be placed on the stack are not actually
1530 expanded yet, merely recorded.
1531 When REALLY_EXPAND is false, only add stack values to be allocated.
1532 Return stack usage this variable is supposed to take.
1535 static HOST_WIDE_INT
1536 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1538 unsigned int align
= BITS_PER_UNIT
;
1543 if (TREE_TYPE (var
) != error_mark_node
&& TREE_CODE (var
) == VAR_DECL
)
1545 /* Because we don't know if VAR will be in register or on stack,
1546 we conservatively assume it will be on stack even if VAR is
1547 eventually put into register after RA pass. For non-automatic
1548 variables, which won't be on stack, we collect alignment of
1549 type and ignore user specified alignment. Similarly for
1550 SSA_NAMEs for which use_register_for_decl returns true. */
1551 if (TREE_STATIC (var
)
1552 || DECL_EXTERNAL (var
)
1553 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1554 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1555 TYPE_MODE (TREE_TYPE (var
)),
1556 TYPE_ALIGN (TREE_TYPE (var
)));
1557 else if (DECL_HAS_VALUE_EXPR_P (var
)
1558 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1559 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1560 or variables which were assigned a stack slot already by
1561 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1562 changed from the offset chosen to it. */
1563 align
= crtl
->stack_alignment_estimated
;
1565 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1567 /* If the variable alignment is very large we'll dynamicaly allocate
1568 it, which means that in-frame portion is just a pointer. */
1569 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1570 align
= POINTER_SIZE
;
1573 record_alignment_for_reg_var (align
);
1575 if (TREE_CODE (origvar
) == SSA_NAME
)
1577 gcc_assert (TREE_CODE (var
) != VAR_DECL
1578 || (!DECL_EXTERNAL (var
)
1579 && !DECL_HAS_VALUE_EXPR_P (var
)
1580 && !TREE_STATIC (var
)
1581 && TREE_TYPE (var
) != error_mark_node
1582 && !DECL_HARD_REGISTER (var
)
1585 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (origvar
) != SSA_NAME
)
1587 else if (DECL_EXTERNAL (var
))
1589 else if (DECL_HAS_VALUE_EXPR_P (var
))
1591 else if (TREE_STATIC (var
))
1593 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1595 else if (TREE_TYPE (var
) == error_mark_node
)
1598 expand_one_error_var (var
);
1600 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1604 expand_one_hard_reg_var (var
);
1605 if (!DECL_HARD_REGISTER (var
))
1606 /* Invalid register specification. */
1607 expand_one_error_var (var
);
1610 else if (use_register_for_decl (var
))
1613 expand_one_register_var (origvar
);
1615 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1617 /* Reject variables which cover more than half of the address-space. */
1620 error ("size of variable %q+D is too large", var
);
1621 expand_one_error_var (var
);
1624 else if (defer_stack_allocation (var
, toplevel
))
1625 add_stack_var (origvar
);
1630 if (lookup_attribute ("naked",
1631 DECL_ATTRIBUTES (current_function_decl
)))
1632 error ("cannot allocate stack for variable %q+D, naked function.",
1635 expand_one_stack_var (origvar
);
1639 return tree_to_uhwi (DECL_SIZE_UNIT (var
));
1644 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1645 expanding variables. Those variables that can be put into registers
1646 are allocated pseudos; those that can't are put on the stack.
1648 TOPLEVEL is true if this is the outermost BLOCK. */
1651 expand_used_vars_for_block (tree block
, bool toplevel
)
1655 /* Expand all variables at this level. */
1656 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1658 && ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1659 || !DECL_NONSHAREABLE (t
)))
1660 expand_one_var (t
, toplevel
, true);
1662 /* Expand all variables at containing levels. */
1663 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1664 expand_used_vars_for_block (t
, false);
1667 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1668 and clear TREE_USED on all local variables. */
1671 clear_tree_used (tree block
)
1675 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1676 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1677 if ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1678 || !DECL_NONSHAREABLE (t
))
1681 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1682 clear_tree_used (t
);
1686 SPCT_FLAG_DEFAULT
= 1,
1688 SPCT_FLAG_STRONG
= 3,
1689 SPCT_FLAG_EXPLICIT
= 4
1692 /* Examine TYPE and determine a bit mask of the following features. */
1694 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1695 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1696 #define SPCT_HAS_ARRAY 4
1697 #define SPCT_HAS_AGGREGATE 8
1700 stack_protect_classify_type (tree type
)
1702 unsigned int ret
= 0;
1705 switch (TREE_CODE (type
))
1708 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1709 if (t
== char_type_node
1710 || t
== signed_char_type_node
1711 || t
== unsigned_char_type_node
)
1713 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1714 unsigned HOST_WIDE_INT len
;
1716 if (!TYPE_SIZE_UNIT (type
)
1717 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1720 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1723 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1725 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1728 ret
= SPCT_HAS_ARRAY
;
1732 case QUAL_UNION_TYPE
:
1734 ret
= SPCT_HAS_AGGREGATE
;
1735 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1736 if (TREE_CODE (t
) == FIELD_DECL
)
1737 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1747 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1748 part of the local stack frame. Remember if we ever return nonzero for
1749 any variable in this function. The return value is the phase number in
1750 which the variable should be allocated. */
1753 stack_protect_decl_phase (tree decl
)
1755 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1758 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1759 has_short_buffer
= true;
1761 if (flag_stack_protect
== SPCT_FLAG_ALL
1762 || flag_stack_protect
== SPCT_FLAG_STRONG
1763 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1764 && lookup_attribute ("stack_protect",
1765 DECL_ATTRIBUTES (current_function_decl
))))
1767 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1768 && !(bits
& SPCT_HAS_AGGREGATE
))
1770 else if (bits
& SPCT_HAS_ARRAY
)
1774 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1777 has_protected_decls
= true;
1782 /* Two helper routines that check for phase 1 and phase 2. These are used
1783 as callbacks for expand_stack_vars. */
1786 stack_protect_decl_phase_1 (size_t i
)
1788 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1792 stack_protect_decl_phase_2 (size_t i
)
1794 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1797 /* And helper function that checks for asan phase (with stack protector
1798 it is phase 3). This is used as callback for expand_stack_vars.
1799 Returns true if any of the vars in the partition need to be protected. */
1802 asan_decl_phase_3 (size_t i
)
1806 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1808 i
= stack_vars
[i
].next
;
1813 /* Ensure that variables in different stack protection phases conflict
1814 so that they are not merged and share the same stack slot. */
1817 add_stack_protection_conflicts (void)
1819 size_t i
, j
, n
= stack_vars_num
;
1820 unsigned char *phase
;
1822 phase
= XNEWVEC (unsigned char, n
);
1823 for (i
= 0; i
< n
; ++i
)
1824 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1826 for (i
= 0; i
< n
; ++i
)
1828 unsigned char ph_i
= phase
[i
];
1829 for (j
= i
+ 1; j
< n
; ++j
)
1830 if (ph_i
!= phase
[j
])
1831 add_stack_var_conflict (i
, j
);
1837 /* Create a decl for the guard at the top of the stack frame. */
1840 create_stack_guard (void)
1842 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1843 VAR_DECL
, NULL
, ptr_type_node
);
1844 TREE_THIS_VOLATILE (guard
) = 1;
1845 TREE_USED (guard
) = 1;
1846 expand_one_stack_var (guard
);
1847 crtl
->stack_protect_guard
= guard
;
1850 /* Prepare for expanding variables. */
1852 init_vars_expansion (void)
1854 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1855 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1857 /* A map from decl to stack partition. */
1858 decl_to_stack_part
= new hash_map
<tree
, size_t>;
1860 /* Initialize local stack smashing state. */
1861 has_protected_decls
= false;
1862 has_short_buffer
= false;
1865 /* Free up stack variable graph data. */
1867 fini_vars_expansion (void)
1869 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1871 XDELETEVEC (stack_vars
);
1872 if (stack_vars_sorted
)
1873 XDELETEVEC (stack_vars_sorted
);
1875 stack_vars_sorted
= NULL
;
1876 stack_vars_alloc
= stack_vars_num
= 0;
1877 delete decl_to_stack_part
;
1878 decl_to_stack_part
= NULL
;
1881 /* Make a fair guess for the size of the stack frame of the function
1882 in NODE. This doesn't have to be exact, the result is only used in
1883 the inline heuristics. So we don't want to run the full stack var
1884 packing algorithm (which is quadratic in the number of stack vars).
1885 Instead, we calculate the total size of all stack vars. This turns
1886 out to be a pretty fair estimate -- packing of stack vars doesn't
1887 happen very often. */
1890 estimated_stack_frame_size (struct cgraph_node
*node
)
1892 HOST_WIDE_INT size
= 0;
1895 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1899 init_vars_expansion ();
1901 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1902 if (auto_var_in_fn_p (var
, fn
->decl
))
1903 size
+= expand_one_var (var
, true, false);
1905 if (stack_vars_num
> 0)
1907 /* Fake sorting the stack vars for account_stack_vars (). */
1908 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1909 for (i
= 0; i
< stack_vars_num
; ++i
)
1910 stack_vars_sorted
[i
] = i
;
1911 size
+= account_stack_vars ();
1914 fini_vars_expansion ();
1919 /* Helper routine to check if a record or union contains an array field. */
1922 record_or_union_type_has_array_p (const_tree tree_type
)
1924 tree fields
= TYPE_FIELDS (tree_type
);
1927 for (f
= fields
; f
; f
= DECL_CHAIN (f
))
1928 if (TREE_CODE (f
) == FIELD_DECL
)
1930 tree field_type
= TREE_TYPE (f
);
1931 if (RECORD_OR_UNION_TYPE_P (field_type
)
1932 && record_or_union_type_has_array_p (field_type
))
1934 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1940 /* Check if the current function has local referenced variables that
1941 have their addresses taken, contain an array, or are arrays. */
1944 stack_protect_decl_p ()
1949 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1950 if (!is_global_var (var
))
1952 tree var_type
= TREE_TYPE (var
);
1953 if (TREE_CODE (var
) == VAR_DECL
1954 && (TREE_CODE (var_type
) == ARRAY_TYPE
1955 || TREE_ADDRESSABLE (var
)
1956 || (RECORD_OR_UNION_TYPE_P (var_type
)
1957 && record_or_union_type_has_array_p (var_type
))))
1963 /* Check if the current function has calls that use a return slot. */
1966 stack_protect_return_slot_p ()
1970 FOR_ALL_BB_FN (bb
, cfun
)
1971 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
1972 !gsi_end_p (gsi
); gsi_next (&gsi
))
1974 gimple
*stmt
= gsi_stmt (gsi
);
1975 /* This assumes that calls to internal-only functions never
1976 use a return slot. */
1977 if (is_gimple_call (stmt
)
1978 && !gimple_call_internal_p (stmt
)
1979 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
1980 gimple_call_fndecl (stmt
)))
1986 /* Expand all variables used in the function. */
1989 expand_used_vars (void)
1991 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
1992 vec
<tree
> maybe_local_decls
= vNULL
;
1993 rtx_insn
*var_end_seq
= NULL
;
1996 bool gen_stack_protect_signal
= false;
1998 /* Compute the phase of the stack frame for this function. */
2000 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2001 int off
= STARTING_FRAME_OFFSET
% align
;
2002 frame_phase
= off
? align
- off
: 0;
2005 /* Set TREE_USED on all variables in the local_decls. */
2006 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2007 TREE_USED (var
) = 1;
2008 /* Clear TREE_USED on all variables associated with a block scope. */
2009 clear_tree_used (DECL_INITIAL (current_function_decl
));
2011 init_vars_expansion ();
2013 if (targetm
.use_pseudo_pic_reg ())
2014 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
2016 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
2018 if (bitmap_bit_p (SA
.partitions_for_parm_default_defs
, i
))
2021 tree var
= partition_to_var (SA
.map
, i
);
2023 gcc_assert (!virtual_operand_p (var
));
2025 expand_one_ssa_partition (var
);
2028 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
2029 gen_stack_protect_signal
2030 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2032 /* At this point all variables on the local_decls with TREE_USED
2033 set are not associated with any block scope. Lay them out. */
2035 len
= vec_safe_length (cfun
->local_decls
);
2036 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2038 bool expand_now
= false;
2040 /* Expanded above already. */
2041 if (is_gimple_reg (var
))
2043 TREE_USED (var
) = 0;
2046 /* We didn't set a block for static or extern because it's hard
2047 to tell the difference between a global variable (re)declared
2048 in a local scope, and one that's really declared there to
2049 begin with. And it doesn't really matter much, since we're
2050 not giving them stack space. Expand them now. */
2051 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
2054 /* Expand variables not associated with any block now. Those created by
2055 the optimizers could be live anywhere in the function. Those that
2056 could possibly have been scoped originally and detached from their
2057 block will have their allocation deferred so we coalesce them with
2058 others when optimization is enabled. */
2059 else if (TREE_USED (var
))
2062 /* Finally, mark all variables on the list as used. We'll use
2063 this in a moment when we expand those associated with scopes. */
2064 TREE_USED (var
) = 1;
2067 expand_one_var (var
, true, true);
2070 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
2072 rtx rtl
= DECL_RTL_IF_SET (var
);
2074 /* Keep artificial non-ignored vars in cfun->local_decls
2075 chain until instantiate_decls. */
2076 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2077 add_local_decl (cfun
, var
);
2078 else if (rtl
== NULL_RTX
)
2079 /* If rtl isn't set yet, which can happen e.g. with
2080 -fstack-protector, retry before returning from this
2082 maybe_local_decls
.safe_push (var
);
2086 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2088 +-----------------+-----------------+
2089 | ...processed... | ...duplicates...|
2090 +-----------------+-----------------+
2092 +-- LEN points here.
2094 We just want the duplicates, as those are the artificial
2095 non-ignored vars that we want to keep until instantiate_decls.
2096 Move them down and truncate the array. */
2097 if (!vec_safe_is_empty (cfun
->local_decls
))
2098 cfun
->local_decls
->block_remove (0, len
);
2100 /* At this point, all variables within the block tree with TREE_USED
2101 set are actually used by the optimized function. Lay them out. */
2102 expand_used_vars_for_block (outer_block
, true);
2104 if (stack_vars_num
> 0)
2106 add_scope_conflicts ();
2108 /* If stack protection is enabled, we don't share space between
2109 vulnerable data and non-vulnerable data. */
2110 if (flag_stack_protect
!= 0
2111 && (flag_stack_protect
!= SPCT_FLAG_EXPLICIT
2112 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2113 && lookup_attribute ("stack_protect",
2114 DECL_ATTRIBUTES (current_function_decl
)))))
2115 add_stack_protection_conflicts ();
2117 /* Now that we have collected all stack variables, and have computed a
2118 minimal interference graph, attempt to save some stack space. */
2119 partition_stack_vars ();
2121 dump_stack_var_partition ();
2124 switch (flag_stack_protect
)
2127 create_stack_guard ();
2130 case SPCT_FLAG_STRONG
:
2131 if (gen_stack_protect_signal
2132 || cfun
->calls_alloca
|| has_protected_decls
2133 || lookup_attribute ("stack_protect",
2134 DECL_ATTRIBUTES (current_function_decl
)))
2135 create_stack_guard ();
2138 case SPCT_FLAG_DEFAULT
:
2139 if (cfun
->calls_alloca
|| has_protected_decls
2140 || lookup_attribute ("stack_protect",
2141 DECL_ATTRIBUTES (current_function_decl
)))
2142 create_stack_guard ();
2145 case SPCT_FLAG_EXPLICIT
:
2146 if (lookup_attribute ("stack_protect",
2147 DECL_ATTRIBUTES (current_function_decl
)))
2148 create_stack_guard ();
2154 /* Assign rtl to each variable based on these partitions. */
2155 if (stack_vars_num
> 0)
2157 struct stack_vars_data data
;
2159 data
.asan_vec
= vNULL
;
2160 data
.asan_decl_vec
= vNULL
;
2161 data
.asan_base
= NULL_RTX
;
2162 data
.asan_alignb
= 0;
2164 /* Reorder decls to be protected by iterating over the variables
2165 array multiple times, and allocating out of each phase in turn. */
2166 /* ??? We could probably integrate this into the qsort we did
2167 earlier, such that we naturally see these variables first,
2168 and thus naturally allocate things in the right order. */
2169 if (has_protected_decls
)
2171 /* Phase 1 contains only character arrays. */
2172 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
2174 /* Phase 2 contains other kinds of arrays. */
2175 if (flag_stack_protect
== SPCT_FLAG_ALL
2176 || flag_stack_protect
== SPCT_FLAG_STRONG
2177 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2178 && lookup_attribute ("stack_protect",
2179 DECL_ATTRIBUTES (current_function_decl
))))
2180 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
2183 if ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
)
2184 /* Phase 3, any partitions that need asan protection
2185 in addition to phase 1 and 2. */
2186 expand_stack_vars (asan_decl_phase_3
, &data
);
2188 if (!data
.asan_vec
.is_empty ())
2190 HOST_WIDE_INT prev_offset
= frame_offset
;
2191 HOST_WIDE_INT offset
, sz
, redzonesz
;
2192 redzonesz
= ASAN_RED_ZONE_SIZE
;
2193 sz
= data
.asan_vec
[0] - prev_offset
;
2194 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
2195 && data
.asan_alignb
<= 4096
2196 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
2197 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
2198 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
2200 = alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
);
2201 data
.asan_vec
.safe_push (prev_offset
);
2202 data
.asan_vec
.safe_push (offset
);
2203 /* Leave space for alignment if STRICT_ALIGNMENT. */
2204 if (STRICT_ALIGNMENT
)
2205 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
2206 << ASAN_SHADOW_SHIFT
)
2207 / BITS_PER_UNIT
, 1);
2210 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
2213 data
.asan_vec
.address (),
2214 data
.asan_decl_vec
.address (),
2215 data
.asan_vec
.length ());
2218 expand_stack_vars (NULL
, &data
);
2220 data
.asan_vec
.release ();
2221 data
.asan_decl_vec
.release ();
2224 fini_vars_expansion ();
2226 /* If there were any artificial non-ignored vars without rtl
2227 found earlier, see if deferred stack allocation hasn't assigned
2229 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
2231 rtx rtl
= DECL_RTL_IF_SET (var
);
2233 /* Keep artificial non-ignored vars in cfun->local_decls
2234 chain until instantiate_decls. */
2235 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2236 add_local_decl (cfun
, var
);
2238 maybe_local_decls
.release ();
2240 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2241 if (STACK_ALIGNMENT_NEEDED
)
2243 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2244 if (!FRAME_GROWS_DOWNWARD
)
2245 frame_offset
+= align
- 1;
2246 frame_offset
&= -align
;
2253 /* If we need to produce a detailed dump, print the tree representation
2254 for STMT to the dump file. SINCE is the last RTX after which the RTL
2255 generated for STMT should have been appended. */
2258 maybe_dump_rtl_for_gimple_stmt (gimple
*stmt
, rtx_insn
*since
)
2260 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2262 fprintf (dump_file
, "\n;; ");
2263 print_gimple_stmt (dump_file
, stmt
, 0,
2264 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
2265 fprintf (dump_file
, "\n");
2267 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
2271 /* Maps the blocks that do not contain tree labels to rtx labels. */
2273 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
2275 /* Returns the label_rtx expression for a label starting basic block BB. */
2277 static rtx_code_label
*
2278 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
2280 gimple_stmt_iterator gsi
;
2283 if (bb
->flags
& BB_RTL
)
2284 return block_label (bb
);
2286 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
2290 /* Find the tree label if it is present. */
2292 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2296 lab_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
2300 lab
= gimple_label_label (lab_stmt
);
2301 if (DECL_NONLOCAL (lab
))
2304 return jump_target_rtx (lab
);
2307 rtx_code_label
*l
= gen_label_rtx ();
2308 lab_rtx_for_bb
->put (bb
, l
);
2313 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2314 of a basic block where we just expanded the conditional at the end,
2315 possibly clean up the CFG and instruction sequence. LAST is the
2316 last instruction before the just emitted jump sequence. */
2319 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2321 /* Special case: when jumpif decides that the condition is
2322 trivial it emits an unconditional jump (and the necessary
2323 barrier). But we still have two edges, the fallthru one is
2324 wrong. purge_dead_edges would clean this up later. Unfortunately
2325 we have to insert insns (and split edges) before
2326 find_many_sub_basic_blocks and hence before purge_dead_edges.
2327 But splitting edges might create new blocks which depend on the
2328 fact that if there are two edges there's no barrier. So the
2329 barrier would get lost and verify_flow_info would ICE. Instead
2330 of auditing all edge splitters to care for the barrier (which
2331 normally isn't there in a cleaned CFG), fix it here. */
2332 if (BARRIER_P (get_last_insn ()))
2336 /* Now, we have a single successor block, if we have insns to
2337 insert on the remaining edge we potentially will insert
2338 it at the end of this block (if the dest block isn't feasible)
2339 in order to avoid splitting the edge. This insertion will take
2340 place in front of the last jump. But we might have emitted
2341 multiple jumps (conditional and one unconditional) to the
2342 same destination. Inserting in front of the last one then
2343 is a problem. See PR 40021. We fix this by deleting all
2344 jumps except the last unconditional one. */
2345 insn
= PREV_INSN (get_last_insn ());
2346 /* Make sure we have an unconditional jump. Otherwise we're
2348 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2349 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2351 insn
= PREV_INSN (insn
);
2352 if (JUMP_P (NEXT_INSN (insn
)))
2354 if (!any_condjump_p (NEXT_INSN (insn
)))
2356 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2357 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2359 delete_insn (NEXT_INSN (insn
));
2365 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2366 Returns a new basic block if we've terminated the current basic
2367 block and created a new one. */
2370 expand_gimple_cond (basic_block bb
, gcond
*stmt
)
2372 basic_block new_bb
, dest
;
2376 rtx_insn
*last2
, *last
;
2377 enum tree_code code
;
2380 code
= gimple_cond_code (stmt
);
2381 op0
= gimple_cond_lhs (stmt
);
2382 op1
= gimple_cond_rhs (stmt
);
2383 /* We're sometimes presented with such code:
2387 This would expand to two comparisons which then later might
2388 be cleaned up by combine. But some pattern matchers like if-conversion
2389 work better when there's only one compare, so make up for this
2390 here as special exception if TER would have made the same change. */
2392 && TREE_CODE (op0
) == SSA_NAME
2393 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2394 && TREE_CODE (op1
) == INTEGER_CST
2395 && ((gimple_cond_code (stmt
) == NE_EXPR
2396 && integer_zerop (op1
))
2397 || (gimple_cond_code (stmt
) == EQ_EXPR
2398 && integer_onep (op1
)))
2399 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2401 gimple
*second
= SSA_NAME_DEF_STMT (op0
);
2402 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2404 enum tree_code code2
= gimple_assign_rhs_code (second
);
2405 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2408 op0
= gimple_assign_rhs1 (second
);
2409 op1
= gimple_assign_rhs2 (second
);
2411 /* If jumps are cheap and the target does not support conditional
2412 compare, turn some more codes into jumpy sequences. */
2413 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2414 && targetm
.gen_ccmp_first
== NULL
)
2416 if ((code2
== BIT_AND_EXPR
2417 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2418 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2419 || code2
== TRUTH_AND_EXPR
)
2421 code
= TRUTH_ANDIF_EXPR
;
2422 op0
= gimple_assign_rhs1 (second
);
2423 op1
= gimple_assign_rhs2 (second
);
2425 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2427 code
= TRUTH_ORIF_EXPR
;
2428 op0
= gimple_assign_rhs1 (second
);
2429 op1
= gimple_assign_rhs2 (second
);
2435 last2
= last
= get_last_insn ();
2437 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2438 set_curr_insn_location (gimple_location (stmt
));
2440 /* These flags have no purpose in RTL land. */
2441 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2442 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2444 /* We can either have a pure conditional jump with one fallthru edge or
2445 two-way jump that needs to be decomposed into two basic blocks. */
2446 if (false_edge
->dest
== bb
->next_bb
)
2448 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2449 true_edge
->probability
);
2450 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2451 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2452 set_curr_insn_location (true_edge
->goto_locus
);
2453 false_edge
->flags
|= EDGE_FALLTHRU
;
2454 maybe_cleanup_end_of_block (false_edge
, last
);
2457 if (true_edge
->dest
== bb
->next_bb
)
2459 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2460 false_edge
->probability
);
2461 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2462 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2463 set_curr_insn_location (false_edge
->goto_locus
);
2464 true_edge
->flags
|= EDGE_FALLTHRU
;
2465 maybe_cleanup_end_of_block (true_edge
, last
);
2469 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2470 true_edge
->probability
);
2471 last
= get_last_insn ();
2472 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2473 set_curr_insn_location (false_edge
->goto_locus
);
2474 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2477 if (BARRIER_P (BB_END (bb
)))
2478 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2479 update_bb_for_insn (bb
);
2481 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2482 dest
= false_edge
->dest
;
2483 redirect_edge_succ (false_edge
, new_bb
);
2484 false_edge
->flags
|= EDGE_FALLTHRU
;
2485 new_bb
->count
= false_edge
->count
;
2486 new_bb
->frequency
= EDGE_FREQUENCY (false_edge
);
2487 add_bb_to_loop (new_bb
, bb
->loop_father
);
2488 new_edge
= make_edge (new_bb
, dest
, 0);
2489 new_edge
->probability
= REG_BR_PROB_BASE
;
2490 new_edge
->count
= new_bb
->count
;
2491 if (BARRIER_P (BB_END (new_bb
)))
2492 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2493 update_bb_for_insn (new_bb
);
2495 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2497 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2499 set_curr_insn_location (true_edge
->goto_locus
);
2500 true_edge
->goto_locus
= curr_insn_location ();
2506 /* Mark all calls that can have a transaction restart. */
2509 mark_transaction_restart_calls (gimple
*stmt
)
2511 struct tm_restart_node dummy
;
2512 tm_restart_node
**slot
;
2514 if (!cfun
->gimple_df
->tm_restart
)
2518 slot
= cfun
->gimple_df
->tm_restart
->find_slot (&dummy
, NO_INSERT
);
2521 struct tm_restart_node
*n
= *slot
;
2522 tree list
= n
->label_or_list
;
2525 for (insn
= next_real_insn (get_last_insn ());
2527 insn
= next_real_insn (insn
))
2530 if (TREE_CODE (list
) == LABEL_DECL
)
2531 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2533 for (; list
; list
= TREE_CHAIN (list
))
2534 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2538 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2542 expand_call_stmt (gcall
*stmt
)
2544 tree exp
, decl
, lhs
;
2548 if (gimple_call_internal_p (stmt
))
2550 expand_internal_call (stmt
);
2554 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2556 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2557 decl
= gimple_call_fndecl (stmt
);
2558 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
2560 /* If this is not a builtin function, the function type through which the
2561 call is made may be different from the type of the function. */
2564 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2565 CALL_EXPR_FN (exp
));
2567 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2568 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2570 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2572 tree arg
= gimple_call_arg (stmt
, i
);
2574 /* TER addresses into arguments of builtin functions so we have a
2575 chance to infer more correct alignment information. See PR39954. */
2577 && TREE_CODE (arg
) == SSA_NAME
2578 && (def
= get_gimple_for_ssa_name (arg
))
2579 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2580 arg
= gimple_assign_rhs1 (def
);
2581 CALL_EXPR_ARG (exp
, i
) = arg
;
2584 if (gimple_has_side_effects (stmt
))
2585 TREE_SIDE_EFFECTS (exp
) = 1;
2587 if (gimple_call_nothrow_p (stmt
))
2588 TREE_NOTHROW (exp
) = 1;
2590 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2591 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2593 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
2594 && (DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA
2595 || DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
2596 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2598 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2599 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2600 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2601 CALL_WITH_BOUNDS_P (exp
) = gimple_call_with_bounds_p (stmt
);
2603 /* Ensure RTL is created for debug args. */
2604 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2606 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2611 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2613 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2614 expand_debug_expr (dtemp
);
2618 lhs
= gimple_call_lhs (stmt
);
2620 expand_assignment (lhs
, exp
, false);
2622 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2624 mark_transaction_restart_calls (stmt
);
2628 /* Generate RTL for an asm statement (explicit assembler code).
2629 STRING is a STRING_CST node containing the assembler code text,
2630 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2631 insn is volatile; don't optimize it. */
2634 expand_asm_loc (tree string
, int vol
, location_t locus
)
2638 if (TREE_CODE (string
) == ADDR_EXPR
)
2639 string
= TREE_OPERAND (string
, 0);
2641 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2642 ggc_strdup (TREE_STRING_POINTER (string
)),
2645 MEM_VOLATILE_P (body
) = vol
;
2650 /* Return the number of times character C occurs in string S. */
2652 n_occurrences (int c
, const char *s
)
2660 /* A subroutine of expand_asm_operands. Check that all operands have
2661 the same number of alternatives. Return true if so. */
2664 check_operand_nalternatives (const vec
<const char *> &constraints
)
2666 unsigned len
= constraints
.length();
2669 int nalternatives
= n_occurrences (',', constraints
[0]);
2671 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2673 error ("too many alternatives in %<asm%>");
2677 for (unsigned i
= 1; i
< len
; ++i
)
2678 if (n_occurrences (',', constraints
[i
]) != nalternatives
)
2680 error ("operand constraints for %<asm%> differ "
2681 "in number of alternatives");
2688 /* Check for overlap between registers marked in CLOBBERED_REGS and
2689 anything inappropriate in T. Emit error and return the register
2690 variable definition for error, NULL_TREE for ok. */
2693 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
2695 /* Conflicts between asm-declared register variables and the clobber
2696 list are not allowed. */
2697 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
2701 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2702 DECL_NAME (overlap
));
2704 /* Reset registerness to stop multiple errors emitted for a single
2706 DECL_REGISTER (overlap
) = 0;
2713 /* Generate RTL for an asm statement with arguments.
2714 STRING is the instruction template.
2715 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2716 Each output or input has an expression in the TREE_VALUE and
2717 a tree list in TREE_PURPOSE which in turn contains a constraint
2718 name in TREE_VALUE (or NULL_TREE) and a constraint string
2720 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2721 that is clobbered by this insn.
2723 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2724 should be the fallthru basic block of the asm goto.
2726 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2727 Some elements of OUTPUTS may be replaced with trees representing temporary
2728 values. The caller should copy those temporary values to the originally
2731 VOL nonzero means the insn is volatile; don't optimize it. */
2734 expand_asm_stmt (gasm
*stmt
)
2736 class save_input_location
2741 explicit save_input_location(location_t where
)
2743 old
= input_location
;
2744 input_location
= where
;
2747 ~save_input_location()
2749 input_location
= old
;
2753 location_t locus
= gimple_location (stmt
);
2755 if (gimple_asm_input_p (stmt
))
2757 const char *s
= gimple_asm_string (stmt
);
2758 tree string
= build_string (strlen (s
), s
);
2759 expand_asm_loc (string
, gimple_asm_volatile_p (stmt
), locus
);
2763 /* There are some legacy diagnostics in here, and also avoids a
2764 sixth parameger to targetm.md_asm_adjust. */
2765 save_input_location
s_i_l(locus
);
2767 unsigned noutputs
= gimple_asm_noutputs (stmt
);
2768 unsigned ninputs
= gimple_asm_ninputs (stmt
);
2769 unsigned nlabels
= gimple_asm_nlabels (stmt
);
2772 /* ??? Diagnose during gimplification? */
2773 if (ninputs
+ noutputs
+ nlabels
> MAX_RECOG_OPERANDS
)
2775 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
2779 auto_vec
<tree
, MAX_RECOG_OPERANDS
> output_tvec
;
2780 auto_vec
<tree
, MAX_RECOG_OPERANDS
> input_tvec
;
2781 auto_vec
<const char *, MAX_RECOG_OPERANDS
> constraints
;
2783 /* Copy the gimple vectors into new vectors that we can manipulate. */
2785 output_tvec
.safe_grow (noutputs
);
2786 input_tvec
.safe_grow (ninputs
);
2787 constraints
.safe_grow (noutputs
+ ninputs
);
2789 for (i
= 0; i
< noutputs
; ++i
)
2791 tree t
= gimple_asm_output_op (stmt
, i
);
2792 output_tvec
[i
] = TREE_VALUE (t
);
2793 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2795 for (i
= 0; i
< ninputs
; i
++)
2797 tree t
= gimple_asm_input_op (stmt
, i
);
2798 input_tvec
[i
] = TREE_VALUE (t
);
2799 constraints
[i
+ noutputs
]
2800 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2803 /* ??? Diagnose during gimplification? */
2804 if (! check_operand_nalternatives (constraints
))
2807 /* Count the number of meaningful clobbered registers, ignoring what
2808 we would ignore later. */
2809 auto_vec
<rtx
> clobber_rvec
;
2810 HARD_REG_SET clobbered_regs
;
2811 CLEAR_HARD_REG_SET (clobbered_regs
);
2813 if (unsigned n
= gimple_asm_nclobbers (stmt
))
2815 clobber_rvec
.reserve (n
);
2816 for (i
= 0; i
< n
; i
++)
2818 tree t
= gimple_asm_clobber_op (stmt
, i
);
2819 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (t
));
2822 j
= decode_reg_name_and_count (regname
, &nregs
);
2827 /* ??? Diagnose during gimplification? */
2828 error ("unknown register name %qs in %<asm%>", regname
);
2832 rtx x
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2833 clobber_rvec
.safe_push (x
);
2837 /* Otherwise we should have -1 == empty string
2838 or -3 == cc, which is not a register. */
2839 gcc_assert (j
== -1 || j
== -3);
2843 for (int reg
= j
; reg
< j
+ nregs
; reg
++)
2845 /* Clobbering the PIC register is an error. */
2846 if (reg
== (int) PIC_OFFSET_TABLE_REGNUM
)
2848 /* ??? Diagnose during gimplification? */
2849 error ("PIC register clobbered by %qs in %<asm%>",
2854 SET_HARD_REG_BIT (clobbered_regs
, reg
);
2855 rtx x
= gen_rtx_REG (reg_raw_mode
[reg
], reg
);
2856 clobber_rvec
.safe_push (x
);
2860 unsigned nclobbers
= clobber_rvec
.length();
2862 /* First pass over inputs and outputs checks validity and sets
2863 mark_addressable if needed. */
2864 /* ??? Diagnose during gimplification? */
2866 for (i
= 0; i
< noutputs
; ++i
)
2868 tree val
= output_tvec
[i
];
2869 tree type
= TREE_TYPE (val
);
2870 const char *constraint
;
2875 /* Try to parse the output constraint. If that fails, there's
2876 no point in going further. */
2877 constraint
= constraints
[i
];
2878 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
2879 &allows_mem
, &allows_reg
, &is_inout
))
2886 && REG_P (DECL_RTL (val
))
2887 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
2888 mark_addressable (val
);
2891 for (i
= 0; i
< ninputs
; ++i
)
2893 bool allows_reg
, allows_mem
;
2894 const char *constraint
;
2896 constraint
= constraints
[i
+ noutputs
];
2897 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
2898 constraints
.address (),
2899 &allows_mem
, &allows_reg
))
2902 if (! allows_reg
&& allows_mem
)
2903 mark_addressable (input_tvec
[i
]);
2906 /* Second pass evaluates arguments. */
2908 /* Make sure stack is consistent for asm goto. */
2910 do_pending_stack_adjust ();
2911 int old_generating_concat_p
= generating_concat_p
;
2913 /* Vector of RTX's of evaluated output operands. */
2914 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> output_rvec
;
2915 auto_vec
<int, MAX_RECOG_OPERANDS
> inout_opnum
;
2916 rtx_insn
*after_rtl_seq
= NULL
, *after_rtl_end
= NULL
;
2918 output_rvec
.safe_grow (noutputs
);
2920 for (i
= 0; i
< noutputs
; ++i
)
2922 tree val
= output_tvec
[i
];
2923 tree type
= TREE_TYPE (val
);
2924 bool is_inout
, allows_reg
, allows_mem
, ok
;
2927 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
2928 noutputs
, &allows_mem
, &allows_reg
,
2932 /* If an output operand is not a decl or indirect ref and our constraint
2933 allows a register, make a temporary to act as an intermediate.
2934 Make the asm insn write into that, then we will copy it to
2935 the real output operand. Likewise for promoted variables. */
2937 generating_concat_p
= 0;
2939 if ((TREE_CODE (val
) == INDIRECT_REF
2942 && (allows_mem
|| REG_P (DECL_RTL (val
)))
2943 && ! (REG_P (DECL_RTL (val
))
2944 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
2948 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
2949 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
2951 op
= validize_mem (op
);
2953 if (! allows_reg
&& !MEM_P (op
))
2954 error ("output number %d not directly addressable", i
);
2955 if ((! allows_mem
&& MEM_P (op
))
2956 || GET_CODE (op
) == CONCAT
)
2959 op
= gen_reg_rtx (GET_MODE (op
));
2961 generating_concat_p
= old_generating_concat_p
;
2964 emit_move_insn (op
, old_op
);
2966 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
2967 emit_move_insn (old_op
, op
);
2968 after_rtl_seq
= get_insns ();
2969 after_rtl_end
= get_last_insn ();
2975 op
= assign_temp (type
, 0, 1);
2976 op
= validize_mem (op
);
2977 if (!MEM_P (op
) && TREE_CODE (val
) == SSA_NAME
)
2978 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val
), op
);
2980 generating_concat_p
= old_generating_concat_p
;
2982 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
2983 expand_assignment (val
, make_tree (type
, op
), false);
2984 after_rtl_seq
= get_insns ();
2985 after_rtl_end
= get_last_insn ();
2988 output_rvec
[i
] = op
;
2991 inout_opnum
.safe_push (i
);
2994 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> input_rvec
;
2995 auto_vec
<machine_mode
, MAX_RECOG_OPERANDS
> input_mode
;
2997 input_rvec
.safe_grow (ninputs
);
2998 input_mode
.safe_grow (ninputs
);
3000 generating_concat_p
= 0;
3002 for (i
= 0; i
< ninputs
; ++i
)
3004 tree val
= input_tvec
[i
];
3005 tree type
= TREE_TYPE (val
);
3006 bool allows_reg
, allows_mem
, ok
;
3007 const char *constraint
;
3010 constraint
= constraints
[i
+ noutputs
];
3011 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3012 constraints
.address (),
3013 &allows_mem
, &allows_reg
);
3016 /* EXPAND_INITIALIZER will not generate code for valid initializer
3017 constants, but will still generate code for other types of operand.
3018 This is the behavior we want for constant constraints. */
3019 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3020 allows_reg
? EXPAND_NORMAL
3021 : allows_mem
? EXPAND_MEMORY
3022 : EXPAND_INITIALIZER
);
3024 /* Never pass a CONCAT to an ASM. */
3025 if (GET_CODE (op
) == CONCAT
)
3026 op
= force_reg (GET_MODE (op
), op
);
3027 else if (MEM_P (op
))
3028 op
= validize_mem (op
);
3030 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
3032 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
3033 op
= force_reg (TYPE_MODE (type
), op
);
3034 else if (!allows_mem
)
3035 warning (0, "asm operand %d probably doesn%'t match constraints",
3037 else if (MEM_P (op
))
3039 /* We won't recognize either volatile memory or memory
3040 with a queued address as available a memory_operand
3041 at this point. Ignore it: clearly this *is* a memory. */
3047 input_mode
[i
] = TYPE_MODE (type
);
3050 /* For in-out operands, copy output rtx to input rtx. */
3051 unsigned ninout
= inout_opnum
.length();
3052 for (i
= 0; i
< ninout
; i
++)
3054 int j
= inout_opnum
[i
];
3055 rtx o
= output_rvec
[j
];
3057 input_rvec
.safe_push (o
);
3058 input_mode
.safe_push (GET_MODE (o
));
3061 sprintf (buffer
, "%d", j
);
3062 constraints
.safe_push (ggc_strdup (buffer
));
3066 /* Sometimes we wish to automatically clobber registers across an asm.
3067 Case in point is when the i386 backend moved from cc0 to a hard reg --
3068 maintaining source-level compatibility means automatically clobbering
3069 the flags register. */
3070 rtx_insn
*after_md_seq
= NULL
;
3071 if (targetm
.md_asm_adjust
)
3072 after_md_seq
= targetm
.md_asm_adjust (output_rvec
, input_rvec
,
3073 constraints
, clobber_rvec
,
3076 /* Do not allow the hook to change the output and input count,
3077 lest it mess up the operand numbering. */
3078 gcc_assert (output_rvec
.length() == noutputs
);
3079 gcc_assert (input_rvec
.length() == ninputs
);
3080 gcc_assert (constraints
.length() == noutputs
+ ninputs
);
3082 /* But it certainly can adjust the clobbers. */
3083 nclobbers
= clobber_rvec
.length();
3085 /* Third pass checks for easy conflicts. */
3086 /* ??? Why are we doing this on trees instead of rtx. */
3088 bool clobber_conflict_found
= 0;
3089 for (i
= 0; i
< noutputs
; ++i
)
3090 if (tree_conflicts_with_clobbers_p (output_tvec
[i
], &clobbered_regs
))
3091 clobber_conflict_found
= 1;
3092 for (i
= 0; i
< ninputs
- ninout
; ++i
)
3093 if (tree_conflicts_with_clobbers_p (input_tvec
[i
], &clobbered_regs
))
3094 clobber_conflict_found
= 1;
3096 /* Make vectors for the expression-rtx, constraint strings,
3097 and named operands. */
3099 rtvec argvec
= rtvec_alloc (ninputs
);
3100 rtvec constraintvec
= rtvec_alloc (ninputs
);
3101 rtvec labelvec
= rtvec_alloc (nlabels
);
3103 rtx body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
3104 : GET_MODE (output_rvec
[0])),
3105 ggc_strdup (gimple_asm_string (stmt
)),
3106 empty_string
, 0, argvec
, constraintvec
,
3108 MEM_VOLATILE_P (body
) = gimple_asm_volatile_p (stmt
);
3110 for (i
= 0; i
< ninputs
; ++i
)
3112 ASM_OPERANDS_INPUT (body
, i
) = input_rvec
[i
];
3113 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
3114 = gen_rtx_ASM_INPUT_loc (input_mode
[i
],
3115 constraints
[i
+ noutputs
],
3119 /* Copy labels to the vector. */
3120 rtx_code_label
*fallthru_label
= NULL
;
3123 basic_block fallthru_bb
= NULL
;
3124 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
3126 fallthru_bb
= fallthru
->dest
;
3128 for (i
= 0; i
< nlabels
; ++i
)
3130 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
3132 /* If asm goto has any labels in the fallthru basic block, use
3133 a label that we emit immediately after the asm goto. Expansion
3134 may insert further instructions into the same basic block after
3135 asm goto and if we don't do this, insertion of instructions on
3136 the fallthru edge might misbehave. See PR58670. */
3137 if (fallthru_bb
&& label_to_block_fn (cfun
, label
) == fallthru_bb
)
3139 if (fallthru_label
== NULL_RTX
)
3140 fallthru_label
= gen_label_rtx ();
3144 r
= label_rtx (label
);
3145 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
3149 /* Now, for each output, construct an rtx
3150 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3151 ARGVEC CONSTRAINTS OPNAMES))
3152 If there is more than one, put them inside a PARALLEL. */
3154 if (nlabels
> 0 && nclobbers
== 0)
3156 gcc_assert (noutputs
== 0);
3157 emit_jump_insn (body
);
3159 else if (noutputs
== 0 && nclobbers
== 0)
3161 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3164 else if (noutputs
== 1 && nclobbers
== 0)
3166 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = constraints
[0];
3167 emit_insn (gen_rtx_SET (output_rvec
[0], body
));
3177 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
3179 /* For each output operand, store a SET. */
3180 for (i
= 0; i
< noutputs
; ++i
)
3182 rtx src
, o
= output_rvec
[i
];
3185 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody
) = constraints
[0];
3190 src
= gen_rtx_ASM_OPERANDS (GET_MODE (o
),
3191 ASM_OPERANDS_TEMPLATE (obody
),
3192 constraints
[i
], i
, argvec
,
3193 constraintvec
, labelvec
, locus
);
3194 MEM_VOLATILE_P (src
) = gimple_asm_volatile_p (stmt
);
3196 XVECEXP (body
, 0, i
) = gen_rtx_SET (o
, src
);
3199 /* If there are no outputs (but there are some clobbers)
3200 store the bare ASM_OPERANDS into the PARALLEL. */
3202 XVECEXP (body
, 0, i
++) = obody
;
3204 /* Store (clobber REG) for each clobbered register specified. */
3205 for (unsigned j
= 0; j
< nclobbers
; ++j
)
3207 rtx clobbered_reg
= clobber_rvec
[j
];
3209 /* Do sanity check for overlap between clobbers and respectively
3210 input and outputs that hasn't been handled. Such overlap
3211 should have been detected and reported above. */
3212 if (!clobber_conflict_found
&& REG_P (clobbered_reg
))
3214 /* We test the old body (obody) contents to avoid
3215 tripping over the under-construction body. */
3216 for (unsigned k
= 0; k
< noutputs
; ++k
)
3217 if (reg_overlap_mentioned_p (clobbered_reg
, output_rvec
[k
]))
3218 internal_error ("asm clobber conflict with output operand");
3220 for (unsigned k
= 0; k
< ninputs
- ninout
; ++k
)
3221 if (reg_overlap_mentioned_p (clobbered_reg
, input_rvec
[k
]))
3222 internal_error ("asm clobber conflict with input operand");
3225 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
3229 emit_jump_insn (body
);
3234 generating_concat_p
= old_generating_concat_p
;
3237 emit_label (fallthru_label
);
3240 emit_insn (after_md_seq
);
3242 emit_insn (after_rtl_seq
);
3245 crtl
->has_asm_statement
= 1;
3248 /* Emit code to jump to the address
3249 specified by the pointer expression EXP. */
3252 expand_computed_goto (tree exp
)
3254 rtx x
= expand_normal (exp
);
3256 do_pending_stack_adjust ();
3257 emit_indirect_jump (x
);
3260 /* Generate RTL code for a `goto' statement with target label LABEL.
3261 LABEL should be a LABEL_DECL tree node that was or will later be
3262 defined with `expand_label'. */
3265 expand_goto (tree label
)
3269 /* Check for a nonlocal goto to a containing function. Should have
3270 gotten translated to __builtin_nonlocal_goto. */
3271 tree context
= decl_function_context (label
);
3272 gcc_assert (!context
|| context
== current_function_decl
);
3275 emit_jump (jump_target_rtx (label
));
3278 /* Output a return with no value. */
3281 expand_null_return_1 (void)
3283 clear_pending_stack_adjust ();
3284 do_pending_stack_adjust ();
3285 emit_jump (return_label
);
3288 /* Generate RTL to return from the current function, with no value.
3289 (That is, we do not do anything about returning any value.) */
3292 expand_null_return (void)
3294 /* If this function was declared to return a value, but we
3295 didn't, clobber the return registers so that they are not
3296 propagated live to the rest of the function. */
3297 clobber_return_register ();
3299 expand_null_return_1 ();
3302 /* Generate RTL to return from the current function, with value VAL. */
3305 expand_value_return (rtx val
)
3307 /* Copy the value to the return location unless it's already there. */
3309 tree decl
= DECL_RESULT (current_function_decl
);
3310 rtx return_reg
= DECL_RTL (decl
);
3311 if (return_reg
!= val
)
3313 tree funtype
= TREE_TYPE (current_function_decl
);
3314 tree type
= TREE_TYPE (decl
);
3315 int unsignedp
= TYPE_UNSIGNED (type
);
3316 machine_mode old_mode
= DECL_MODE (decl
);
3318 if (DECL_BY_REFERENCE (decl
))
3319 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3321 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3323 if (mode
!= old_mode
)
3324 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3326 if (GET_CODE (return_reg
) == PARALLEL
)
3327 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3329 emit_move_insn (return_reg
, val
);
3332 expand_null_return_1 ();
3335 /* Generate RTL to evaluate the expression RETVAL and return it
3336 from the current function. */
3339 expand_return (tree retval
, tree bounds
)
3346 /* If function wants no value, give it none. */
3347 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
3349 expand_normal (retval
);
3350 expand_null_return ();
3354 if (retval
== error_mark_node
)
3356 /* Treat this like a return of no value from a function that
3358 expand_null_return ();
3361 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3362 || TREE_CODE (retval
) == INIT_EXPR
)
3363 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3364 retval_rhs
= TREE_OPERAND (retval
, 1);
3366 retval_rhs
= retval
;
3368 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3370 /* Put returned bounds to the right place. */
3371 bounds_rtl
= DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl
));
3377 if (bounds
&& bounds
!= error_mark_node
)
3379 bnd
= expand_normal (bounds
);
3380 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3382 else if (REG_P (bounds_rtl
))
3385 bnd
= chkp_expand_zero_bounds ();
3388 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3389 addr
= gen_rtx_MEM (Pmode
, addr
);
3390 bnd
= targetm
.calls
.load_bounds_for_arg (addr
, NULL
, NULL
);
3393 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3399 gcc_assert (GET_CODE (bounds_rtl
) == PARALLEL
);
3402 bnd
= chkp_expand_zero_bounds ();
3405 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3406 addr
= gen_rtx_MEM (Pmode
, addr
);
3409 for (n
= 0; n
< XVECLEN (bounds_rtl
, 0); n
++)
3411 rtx slot
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 0);
3414 rtx offs
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 1);
3415 rtx from
= adjust_address (addr
, Pmode
, INTVAL (offs
));
3416 bnd
= targetm
.calls
.load_bounds_for_arg (from
, NULL
, NULL
);
3418 targetm
.calls
.store_returned_bounds (slot
, bnd
);
3422 else if (chkp_function_instrumented_p (current_function_decl
)
3423 && !BOUNDED_P (retval_rhs
)
3424 && chkp_type_has_pointer (TREE_TYPE (retval_rhs
))
3425 && TREE_CODE (retval_rhs
) != RESULT_DECL
)
3427 rtx addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3428 addr
= gen_rtx_MEM (Pmode
, addr
);
3430 gcc_assert (MEM_P (result_rtl
));
3432 chkp_copy_bounds_for_stack_parm (result_rtl
, addr
, TREE_TYPE (retval_rhs
));
3435 /* If we are returning the RESULT_DECL, then the value has already
3436 been stored into it, so we don't have to do anything special. */
3437 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3438 expand_value_return (result_rtl
);
3440 /* If the result is an aggregate that is being returned in one (or more)
3441 registers, load the registers here. */
3443 else if (retval_rhs
!= 0
3444 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3445 && REG_P (result_rtl
))
3447 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3450 /* Use the mode of the result value on the return register. */
3451 PUT_MODE (result_rtl
, GET_MODE (val
));
3452 expand_value_return (val
);
3455 expand_null_return ();
3457 else if (retval_rhs
!= 0
3458 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3459 && (REG_P (result_rtl
)
3460 || (GET_CODE (result_rtl
) == PARALLEL
)))
3462 /* Compute the return value into a temporary (usually a pseudo reg). */
3464 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3465 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3466 val
= force_not_mem (val
);
3467 expand_value_return (val
);
3471 /* No hard reg used; calculate value into hard return reg. */
3472 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3473 expand_value_return (result_rtl
);
3477 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3478 STMT that doesn't require special handling for outgoing edges. That
3479 is no tailcalls and no GIMPLE_COND. */
3482 expand_gimple_stmt_1 (gimple
*stmt
)
3486 set_curr_insn_location (gimple_location (stmt
));
3488 switch (gimple_code (stmt
))
3491 op0
= gimple_goto_dest (stmt
);
3492 if (TREE_CODE (op0
) == LABEL_DECL
)
3495 expand_computed_goto (op0
);
3498 expand_label (gimple_label_label (as_a
<glabel
*> (stmt
)));
3501 case GIMPLE_PREDICT
:
3504 expand_case (as_a
<gswitch
*> (stmt
));
3507 expand_asm_stmt (as_a
<gasm
*> (stmt
));
3510 expand_call_stmt (as_a
<gcall
*> (stmt
));
3515 tree bnd
= gimple_return_retbnd (as_a
<greturn
*> (stmt
));
3516 op0
= gimple_return_retval (as_a
<greturn
*> (stmt
));
3518 if (op0
&& op0
!= error_mark_node
)
3520 tree result
= DECL_RESULT (current_function_decl
);
3522 /* If we are not returning the current function's RESULT_DECL,
3523 build an assignment to it. */
3526 /* I believe that a function's RESULT_DECL is unique. */
3527 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3529 /* ??? We'd like to use simply expand_assignment here,
3530 but this fails if the value is of BLKmode but the return
3531 decl is a register. expand_return has special handling
3532 for this combination, which eventually should move
3533 to common code. See comments there. Until then, let's
3534 build a modify expression :-/ */
3535 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3538 /* Mark we have return statement with missing bounds. */
3539 if (!bnd
&& chkp_function_instrumented_p (cfun
->decl
))
3540 bnd
= error_mark_node
;
3544 expand_null_return ();
3546 expand_return (op0
, bnd
);
3552 gassign
*assign_stmt
= as_a
<gassign
*> (stmt
);
3553 tree lhs
= gimple_assign_lhs (assign_stmt
);
3555 /* Tree expand used to fiddle with |= and &= of two bitfield
3556 COMPONENT_REFs here. This can't happen with gimple, the LHS
3557 of binary assigns must be a gimple reg. */
3559 if (TREE_CODE (lhs
) != SSA_NAME
3560 || get_gimple_rhs_class (gimple_expr_code (stmt
))
3561 == GIMPLE_SINGLE_RHS
)
3563 tree rhs
= gimple_assign_rhs1 (assign_stmt
);
3564 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
3565 == GIMPLE_SINGLE_RHS
);
3566 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
)
3567 /* Do not put locations on possibly shared trees. */
3568 && !is_gimple_min_invariant (rhs
))
3569 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
3570 if (TREE_CLOBBER_P (rhs
))
3571 /* This is a clobber to mark the going out of scope for
3575 expand_assignment (lhs
, rhs
,
3576 gimple_assign_nontemporal_move_p (
3582 bool nontemporal
= gimple_assign_nontemporal_move_p (assign_stmt
);
3583 struct separate_ops ops
;
3584 bool promoted
= false;
3586 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3587 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3590 ops
.code
= gimple_assign_rhs_code (assign_stmt
);
3591 ops
.type
= TREE_TYPE (lhs
);
3592 switch (get_gimple_rhs_class (ops
.code
))
3594 case GIMPLE_TERNARY_RHS
:
3595 ops
.op2
= gimple_assign_rhs3 (assign_stmt
);
3597 case GIMPLE_BINARY_RHS
:
3598 ops
.op1
= gimple_assign_rhs2 (assign_stmt
);
3600 case GIMPLE_UNARY_RHS
:
3601 ops
.op0
= gimple_assign_rhs1 (assign_stmt
);
3606 ops
.location
= gimple_location (stmt
);
3608 /* If we want to use a nontemporal store, force the value to
3609 register first. If we store into a promoted register,
3610 don't directly expand to target. */
3611 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
3612 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
3619 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
3620 /* If TEMP is a VOIDmode constant, use convert_modes to make
3621 sure that we properly convert it. */
3622 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3624 temp
= convert_modes (GET_MODE (target
),
3625 TYPE_MODE (ops
.type
),
3627 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3628 GET_MODE (target
), temp
, unsignedp
);
3631 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
3633 else if (nontemporal
&& emit_storent_insn (target
, temp
))
3637 temp
= force_operand (temp
, target
);
3639 emit_move_insn (target
, temp
);
3650 /* Expand one gimple statement STMT and return the last RTL instruction
3651 before any of the newly generated ones.
3653 In addition to generating the necessary RTL instructions this also
3654 sets REG_EH_REGION notes if necessary and sets the current source
3655 location for diagnostics. */
3658 expand_gimple_stmt (gimple
*stmt
)
3660 location_t saved_location
= input_location
;
3661 rtx_insn
*last
= get_last_insn ();
3666 /* We need to save and restore the current source location so that errors
3667 discovered during expansion are emitted with the right location. But
3668 it would be better if the diagnostic routines used the source location
3669 embedded in the tree nodes rather than globals. */
3670 if (gimple_has_location (stmt
))
3671 input_location
= gimple_location (stmt
);
3673 expand_gimple_stmt_1 (stmt
);
3675 /* Free any temporaries used to evaluate this statement. */
3678 input_location
= saved_location
;
3680 /* Mark all insns that may trap. */
3681 lp_nr
= lookup_stmt_eh_lp (stmt
);
3685 for (insn
= next_real_insn (last
); insn
;
3686 insn
= next_real_insn (insn
))
3688 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
3689 /* If we want exceptions for non-call insns, any
3690 may_trap_p instruction may throw. */
3691 && GET_CODE (PATTERN (insn
)) != CLOBBER
3692 && GET_CODE (PATTERN (insn
)) != USE
3693 && insn_could_throw_p (insn
))
3694 make_reg_eh_region_note (insn
, 0, lp_nr
);
3701 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3702 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3703 generated a tail call (something that might be denied by the ABI
3704 rules governing the call; see calls.c).
3706 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3707 can still reach the rest of BB. The case here is __builtin_sqrt,
3708 where the NaN result goes through the external function (with a
3709 tailcall) and the normal result happens via a sqrt instruction. */
3712 expand_gimple_tailcall (basic_block bb
, gcall
*stmt
, bool *can_fallthru
)
3714 rtx_insn
*last2
, *last
;
3720 last2
= last
= expand_gimple_stmt (stmt
);
3722 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
3723 if (CALL_P (last
) && SIBLING_CALL_P (last
))
3726 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3728 *can_fallthru
= true;
3732 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3733 Any instructions emitted here are about to be deleted. */
3734 do_pending_stack_adjust ();
3736 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3737 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3738 EH or abnormal edges, we shouldn't have created a tail call in
3739 the first place. So it seems to me we should just be removing
3740 all edges here, or redirecting the existing fallthru edge to
3746 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3748 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
3750 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3752 e
->dest
->count
-= e
->count
;
3753 e
->dest
->frequency
-= EDGE_FREQUENCY (e
);
3754 if (e
->dest
->count
< 0)
3756 if (e
->dest
->frequency
< 0)
3757 e
->dest
->frequency
= 0;
3760 probability
+= e
->probability
;
3767 /* This is somewhat ugly: the call_expr expander often emits instructions
3768 after the sibcall (to perform the function return). These confuse the
3769 find_many_sub_basic_blocks code, so we need to get rid of these. */
3770 last
= NEXT_INSN (last
);
3771 gcc_assert (BARRIER_P (last
));
3773 *can_fallthru
= false;
3774 while (NEXT_INSN (last
))
3776 /* For instance an sqrt builtin expander expands if with
3777 sibcall in the then and label for `else`. */
3778 if (LABEL_P (NEXT_INSN (last
)))
3780 *can_fallthru
= true;
3783 delete_insn (NEXT_INSN (last
));
3786 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
3788 e
->probability
+= probability
;
3791 update_bb_for_insn (bb
);
3793 if (NEXT_INSN (last
))
3795 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
3798 if (BARRIER_P (last
))
3799 BB_END (bb
) = PREV_INSN (last
);
3802 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3807 /* Return the difference between the floor and the truncated result of
3808 a signed division by OP1 with remainder MOD. */
3810 floor_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3812 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3813 return gen_rtx_IF_THEN_ELSE
3814 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3815 gen_rtx_IF_THEN_ELSE
3816 (mode
, gen_rtx_LT (BImode
,
3817 gen_rtx_DIV (mode
, op1
, mod
),
3819 constm1_rtx
, const0_rtx
),
3823 /* Return the difference between the ceil and the truncated result of
3824 a signed division by OP1 with remainder MOD. */
3826 ceil_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3828 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3829 return gen_rtx_IF_THEN_ELSE
3830 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3831 gen_rtx_IF_THEN_ELSE
3832 (mode
, gen_rtx_GT (BImode
,
3833 gen_rtx_DIV (mode
, op1
, mod
),
3835 const1_rtx
, const0_rtx
),
3839 /* Return the difference between the ceil and the truncated result of
3840 an unsigned division by OP1 with remainder MOD. */
3842 ceil_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
3844 /* (mod != 0 ? 1 : 0) */
3845 return gen_rtx_IF_THEN_ELSE
3846 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3847 const1_rtx
, const0_rtx
);
3850 /* Return the difference between the rounded and the truncated result
3851 of a signed division by OP1 with remainder MOD. Halfway cases are
3852 rounded away from zero, rather than to the nearest even number. */
3854 round_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3856 /* (abs (mod) >= abs (op1) - abs (mod)
3857 ? (op1 / mod > 0 ? 1 : -1)
3859 return gen_rtx_IF_THEN_ELSE
3860 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
3861 gen_rtx_MINUS (mode
,
3862 gen_rtx_ABS (mode
, op1
),
3863 gen_rtx_ABS (mode
, mod
))),
3864 gen_rtx_IF_THEN_ELSE
3865 (mode
, gen_rtx_GT (BImode
,
3866 gen_rtx_DIV (mode
, op1
, mod
),
3868 const1_rtx
, constm1_rtx
),
3872 /* Return the difference between the rounded and the truncated result
3873 of a unsigned division by OP1 with remainder MOD. Halfway cases
3874 are rounded away from zero, rather than to the nearest even
3877 round_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3879 /* (mod >= op1 - mod ? 1 : 0) */
3880 return gen_rtx_IF_THEN_ELSE
3881 (mode
, gen_rtx_GE (BImode
, mod
,
3882 gen_rtx_MINUS (mode
, op1
, mod
)),
3883 const1_rtx
, const0_rtx
);
3886 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3890 convert_debug_memory_address (machine_mode mode
, rtx x
,
3893 machine_mode xmode
= GET_MODE (x
);
3895 #ifndef POINTERS_EXTEND_UNSIGNED
3896 gcc_assert (mode
== Pmode
3897 || mode
== targetm
.addr_space
.address_mode (as
));
3898 gcc_assert (xmode
== mode
|| xmode
== VOIDmode
);
3902 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
3904 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
3907 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
3908 x
= lowpart_subreg (mode
, x
, xmode
);
3909 else if (POINTERS_EXTEND_UNSIGNED
> 0)
3910 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
3911 else if (!POINTERS_EXTEND_UNSIGNED
)
3912 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
3915 switch (GET_CODE (x
))
3918 if ((SUBREG_PROMOTED_VAR_P (x
)
3919 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
3920 || (GET_CODE (SUBREG_REG (x
)) == PLUS
3921 && REG_P (XEXP (SUBREG_REG (x
), 0))
3922 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
3923 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
3924 && GET_MODE (SUBREG_REG (x
)) == mode
)
3925 return SUBREG_REG (x
);
3928 temp
= gen_rtx_LABEL_REF (mode
, LABEL_REF_LABEL (x
));
3929 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
3932 temp
= shallow_copy_rtx (x
);
3933 PUT_MODE (temp
, mode
);
3936 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
3938 temp
= gen_rtx_CONST (mode
, temp
);
3942 if (CONST_INT_P (XEXP (x
, 1)))
3944 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
3946 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
3952 /* Don't know how to express ptr_extend as operation in debug info. */
3955 #endif /* POINTERS_EXTEND_UNSIGNED */
3960 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
3961 by avoid_deep_ter_for_debug. */
3963 static hash_map
<tree
, tree
> *deep_ter_debug_map
;
3965 /* Split too deep TER chains for debug stmts using debug temporaries. */
3968 avoid_deep_ter_for_debug (gimple
*stmt
, int depth
)
3970 use_operand_p use_p
;
3972 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
3974 tree use
= USE_FROM_PTR (use_p
);
3975 if (TREE_CODE (use
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (use
))
3977 gimple
*g
= get_gimple_for_ssa_name (use
);
3980 if (depth
> 6 && !stmt_ends_bb_p (g
))
3982 if (deep_ter_debug_map
== NULL
)
3983 deep_ter_debug_map
= new hash_map
<tree
, tree
>;
3985 tree
&vexpr
= deep_ter_debug_map
->get_or_insert (use
);
3988 vexpr
= make_node (DEBUG_EXPR_DECL
);
3989 gimple
*def_temp
= gimple_build_debug_bind (vexpr
, use
, g
);
3990 DECL_ARTIFICIAL (vexpr
) = 1;
3991 TREE_TYPE (vexpr
) = TREE_TYPE (use
);
3992 DECL_MODE (vexpr
) = TYPE_MODE (TREE_TYPE (use
));
3993 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
3994 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
3995 avoid_deep_ter_for_debug (def_temp
, 0);
3998 avoid_deep_ter_for_debug (g
, depth
+ 1);
4002 /* Return an RTX equivalent to the value of the parameter DECL. */
4005 expand_debug_parm_decl (tree decl
)
4007 rtx incoming
= DECL_INCOMING_RTL (decl
);
4010 && GET_MODE (incoming
) != BLKmode
4011 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
4012 || (MEM_P (incoming
)
4013 && REG_P (XEXP (incoming
, 0))
4014 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
4016 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
4018 #ifdef HAVE_window_save
4019 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4020 If the target machine has an explicit window save instruction, the
4021 actual entry value is the corresponding OUTGOING_REGNO instead. */
4022 if (REG_P (incoming
)
4023 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
4025 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
4026 OUTGOING_REGNO (REGNO (incoming
)), 0);
4027 else if (MEM_P (incoming
))
4029 rtx reg
= XEXP (incoming
, 0);
4030 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
4032 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
4033 incoming
= replace_equiv_address_nv (incoming
, reg
);
4036 incoming
= copy_rtx (incoming
);
4040 ENTRY_VALUE_EXP (rtl
) = incoming
;
4045 && GET_MODE (incoming
) != BLKmode
4046 && !TREE_ADDRESSABLE (decl
)
4048 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
4049 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
4050 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
4051 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
4052 return copy_rtx (incoming
);
4057 /* Return an RTX equivalent to the value of the tree expression EXP. */
4060 expand_debug_expr (tree exp
)
4062 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
4063 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4064 machine_mode inner_mode
= VOIDmode
;
4065 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4068 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4070 case tcc_expression
:
4071 switch (TREE_CODE (exp
))
4076 case WIDEN_MULT_PLUS_EXPR
:
4077 case WIDEN_MULT_MINUS_EXPR
:
4081 case TRUTH_ANDIF_EXPR
:
4082 case TRUTH_ORIF_EXPR
:
4083 case TRUTH_AND_EXPR
:
4085 case TRUTH_XOR_EXPR
:
4088 case TRUTH_NOT_EXPR
:
4097 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
4104 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4107 switch (TREE_CODE (exp
))
4113 case WIDEN_LSHIFT_EXPR
:
4114 /* Ensure second operand isn't wider than the first one. */
4115 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
4116 if (SCALAR_INT_MODE_P (inner_mode
))
4118 machine_mode opmode
= mode
;
4119 if (VECTOR_MODE_P (mode
))
4120 opmode
= GET_MODE_INNER (mode
);
4121 if (SCALAR_INT_MODE_P (opmode
)
4122 && (GET_MODE_PRECISION (opmode
)
4123 < GET_MODE_PRECISION (inner_mode
)))
4124 op1
= lowpart_subreg (opmode
, op1
, inner_mode
);
4134 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4135 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4140 case tcc_comparison
:
4141 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4149 case tcc_exceptional
:
4150 case tcc_declaration
:
4156 switch (TREE_CODE (exp
))
4159 if (!lookup_constant_def (exp
))
4161 if (strlen (TREE_STRING_POINTER (exp
)) + 1
4162 != (size_t) TREE_STRING_LENGTH (exp
))
4164 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
4165 op0
= gen_rtx_MEM (BLKmode
, op0
);
4166 set_mem_attributes (op0
, exp
, 0);
4169 /* Fall through... */
4174 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
4178 gcc_assert (COMPLEX_MODE_P (mode
));
4179 op0
= expand_debug_expr (TREE_REALPART (exp
));
4180 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
4181 return gen_rtx_CONCAT (mode
, op0
, op1
);
4183 case DEBUG_EXPR_DECL
:
4184 op0
= DECL_RTL_IF_SET (exp
);
4189 op0
= gen_rtx_DEBUG_EXPR (mode
);
4190 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
4191 SET_DECL_RTL (exp
, op0
);
4201 op0
= DECL_RTL_IF_SET (exp
);
4203 /* This decl was probably optimized away. */
4206 if (TREE_CODE (exp
) != VAR_DECL
4207 || DECL_EXTERNAL (exp
)
4208 || !TREE_STATIC (exp
)
4210 || DECL_HARD_REGISTER (exp
)
4211 || DECL_IN_CONSTANT_POOL (exp
)
4212 || mode
== VOIDmode
)
4215 op0
= make_decl_rtl_for_debug (exp
);
4217 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
4218 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
4222 op0
= copy_rtx (op0
);
4224 if (GET_MODE (op0
) == BLKmode
4225 /* If op0 is not BLKmode, but mode is, adjust_mode
4226 below would ICE. While it is likely a FE bug,
4227 try to be robust here. See PR43166. */
4229 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
4231 gcc_assert (MEM_P (op0
));
4232 op0
= adjust_address_nv (op0
, mode
, 0);
4242 inner_mode
= GET_MODE (op0
);
4244 if (mode
== inner_mode
)
4247 if (inner_mode
== VOIDmode
)
4249 if (TREE_CODE (exp
) == SSA_NAME
)
4250 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
4252 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4253 if (mode
== inner_mode
)
4257 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4259 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
4260 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4261 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
4262 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4264 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4266 else if (FLOAT_MODE_P (mode
))
4268 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
4269 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4270 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
4272 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
4274 else if (FLOAT_MODE_P (inner_mode
))
4277 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4279 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4281 else if (CONSTANT_P (op0
)
4282 || GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (inner_mode
))
4283 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
4284 else if (UNARY_CLASS_P (exp
)
4285 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4287 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4289 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4295 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4297 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
4298 TREE_OPERAND (exp
, 0),
4299 TREE_OPERAND (exp
, 1));
4301 return expand_debug_expr (newexp
);
4305 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4306 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4310 if (TREE_CODE (exp
) == MEM_REF
)
4312 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4313 || (GET_CODE (op0
) == PLUS
4314 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
4315 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4316 Instead just use get_inner_reference. */
4319 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4320 if (!op1
|| !CONST_INT_P (op1
))
4323 op0
= plus_constant (inner_mode
, op0
, INTVAL (op1
));
4326 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4328 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4330 if (op0
== NULL_RTX
)
4333 op0
= gen_rtx_MEM (mode
, op0
);
4334 set_mem_attributes (op0
, exp
, 0);
4335 if (TREE_CODE (exp
) == MEM_REF
4336 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4337 set_mem_expr (op0
, NULL_TREE
);
4338 set_mem_addr_space (op0
, as
);
4342 case TARGET_MEM_REF
:
4343 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
4344 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
4347 op0
= expand_debug_expr
4348 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
4352 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4353 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4355 if (op0
== NULL_RTX
)
4358 op0
= gen_rtx_MEM (mode
, op0
);
4360 set_mem_attributes (op0
, exp
, 0);
4361 set_mem_addr_space (op0
, as
);
4367 case ARRAY_RANGE_REF
:
4372 case VIEW_CONVERT_EXPR
:
4375 HOST_WIDE_INT bitsize
, bitpos
;
4378 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
4379 &mode1
, &unsignedp
, &volatilep
, false);
4385 orig_op0
= op0
= expand_debug_expr (tem
);
4392 machine_mode addrmode
, offmode
;
4397 op0
= XEXP (op0
, 0);
4398 addrmode
= GET_MODE (op0
);
4399 if (addrmode
== VOIDmode
)
4402 op1
= expand_debug_expr (offset
);
4406 offmode
= GET_MODE (op1
);
4407 if (offmode
== VOIDmode
)
4408 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4410 if (addrmode
!= offmode
)
4411 op1
= lowpart_subreg (addrmode
, op1
, offmode
);
4413 /* Don't use offset_address here, we don't need a
4414 recognizable address, and we don't want to generate
4416 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4422 if (mode1
== VOIDmode
)
4424 mode1
= smallest_mode_for_size (bitsize
, MODE_INT
);
4425 if (bitpos
>= BITS_PER_UNIT
)
4427 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
4428 bitpos
%= BITS_PER_UNIT
;
4430 else if (bitpos
< 0)
4433 = (-bitpos
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
4434 op0
= adjust_address_nv (op0
, mode1
, units
);
4435 bitpos
+= units
* BITS_PER_UNIT
;
4437 else if (bitpos
== 0 && bitsize
== GET_MODE_BITSIZE (mode
))
4438 op0
= adjust_address_nv (op0
, mode
, 0);
4439 else if (GET_MODE (op0
) != mode1
)
4440 op0
= adjust_address_nv (op0
, mode1
, 0);
4442 op0
= copy_rtx (op0
);
4443 if (op0
== orig_op0
)
4444 op0
= shallow_copy_rtx (op0
);
4445 set_mem_attributes (op0
, exp
, 0);
4448 if (bitpos
== 0 && mode
== GET_MODE (op0
))
4454 if (GET_MODE (op0
) == BLKmode
)
4457 if ((bitpos
% BITS_PER_UNIT
) == 0
4458 && bitsize
== GET_MODE_BITSIZE (mode1
))
4460 machine_mode opmode
= GET_MODE (op0
);
4462 if (opmode
== VOIDmode
)
4463 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4465 /* This condition may hold if we're expanding the address
4466 right past the end of an array that turned out not to
4467 be addressable (i.e., the address was only computed in
4468 debug stmts). The gen_subreg below would rightfully
4469 crash, and the address doesn't really exist, so just
4471 if (bitpos
>= GET_MODE_BITSIZE (opmode
))
4474 if ((bitpos
% GET_MODE_BITSIZE (mode
)) == 0)
4475 return simplify_gen_subreg (mode
, op0
, opmode
,
4476 bitpos
/ BITS_PER_UNIT
);
4479 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4480 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4482 : ZERO_EXTRACT
, mode
,
4483 GET_MODE (op0
) != VOIDmode
4485 : TYPE_MODE (TREE_TYPE (tem
)),
4486 op0
, GEN_INT (bitsize
), GEN_INT (bitpos
));
4490 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4493 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4496 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4499 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4501 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4504 case FIX_TRUNC_EXPR
:
4505 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4508 case POINTER_PLUS_EXPR
:
4509 /* For the rare target where pointers are not the same size as
4510 size_t, we need to check for mis-matched modes and correct
4513 && GET_MODE (op0
) != VOIDmode
&& GET_MODE (op1
) != VOIDmode
4514 && GET_MODE (op0
) != GET_MODE (op1
))
4516 if (GET_MODE_BITSIZE (GET_MODE (op0
)) < GET_MODE_BITSIZE (GET_MODE (op1
))
4517 /* If OP0 is a partial mode, then we must truncate, even if it has
4518 the same bitsize as OP1 as GCC's representation of partial modes
4520 || (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_PARTIAL_INT
4521 && GET_MODE_BITSIZE (GET_MODE (op0
)) == GET_MODE_BITSIZE (GET_MODE (op1
))))
4522 op1
= simplify_gen_unary (TRUNCATE
, GET_MODE (op0
), op1
,
4525 /* We always sign-extend, regardless of the signedness of
4526 the operand, because the operand is always unsigned
4527 here even if the original C expression is signed. */
4528 op1
= simplify_gen_unary (SIGN_EXTEND
, GET_MODE (op0
), op1
,
4533 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4536 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4539 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4542 case TRUNC_DIV_EXPR
:
4543 case EXACT_DIV_EXPR
:
4545 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4547 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4549 case TRUNC_MOD_EXPR
:
4550 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4552 case FLOOR_DIV_EXPR
:
4554 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4557 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4558 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4559 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4560 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4563 case FLOOR_MOD_EXPR
:
4565 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4568 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4569 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4570 adj
= simplify_gen_unary (NEG
, mode
,
4571 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4573 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4579 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4580 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4581 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4582 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4586 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4587 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4588 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4589 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4595 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4596 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4597 adj
= simplify_gen_unary (NEG
, mode
,
4598 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4600 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4604 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4605 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4606 adj
= simplify_gen_unary (NEG
, mode
,
4607 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4609 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4612 case ROUND_DIV_EXPR
:
4615 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4616 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4617 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4618 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4622 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4623 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4624 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4625 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4628 case ROUND_MOD_EXPR
:
4631 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4632 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4633 adj
= simplify_gen_unary (NEG
, mode
,
4634 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4636 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4640 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4641 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4642 adj
= simplify_gen_unary (NEG
, mode
,
4643 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4645 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4649 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
4653 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
4655 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
4658 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
4661 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
4664 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
4667 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
4670 case TRUTH_AND_EXPR
:
4671 return simplify_gen_binary (AND
, mode
, op0
, op1
);
4675 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
4678 case TRUTH_XOR_EXPR
:
4679 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
4681 case TRUTH_ANDIF_EXPR
:
4682 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
4684 case TRUTH_ORIF_EXPR
:
4685 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
4687 case TRUTH_NOT_EXPR
:
4688 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
4691 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
4695 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
4699 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
4703 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
4707 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
4710 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
4712 case UNORDERED_EXPR
:
4713 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
4716 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
4719 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
4722 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
4725 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
4728 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
4731 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
4734 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
4737 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
4740 gcc_assert (COMPLEX_MODE_P (mode
));
4741 if (GET_MODE (op0
) == VOIDmode
)
4742 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
4743 if (GET_MODE (op1
) == VOIDmode
)
4744 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
4745 return gen_rtx_CONCAT (mode
, op0
, op1
);
4748 if (GET_CODE (op0
) == CONCAT
)
4749 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
4750 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
4752 GET_MODE_INNER (mode
)));
4755 machine_mode imode
= GET_MODE_INNER (mode
);
4760 re
= adjust_address_nv (op0
, imode
, 0);
4761 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
4765 machine_mode ifmode
= int_mode_for_mode (mode
);
4766 machine_mode ihmode
= int_mode_for_mode (imode
);
4768 if (ifmode
== BLKmode
|| ihmode
== BLKmode
)
4770 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
4773 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
4774 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
4775 if (imode
!= ihmode
)
4776 re
= gen_rtx_SUBREG (imode
, re
, 0);
4777 im
= copy_rtx (op0
);
4779 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
4780 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
4781 if (imode
!= ihmode
)
4782 im
= gen_rtx_SUBREG (imode
, im
, 0);
4784 im
= gen_rtx_NEG (imode
, im
);
4785 return gen_rtx_CONCAT (mode
, re
, im
);
4789 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4790 if (!op0
|| !MEM_P (op0
))
4792 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
4793 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
4794 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
4795 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
4796 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
4797 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
4799 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4801 HOST_WIDE_INT bitoffset
, bitsize
, maxsize
;
4803 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0),
4804 &bitoffset
, &bitsize
, &maxsize
);
4805 if ((TREE_CODE (decl
) == VAR_DECL
4806 || TREE_CODE (decl
) == PARM_DECL
4807 || TREE_CODE (decl
) == RESULT_DECL
)
4808 && (!TREE_ADDRESSABLE (decl
)
4809 || target_for_debug_bind (decl
))
4810 && (bitoffset
% BITS_PER_UNIT
) == 0
4812 && bitsize
== maxsize
)
4814 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
4815 return plus_constant (mode
, base
, bitoffset
/ BITS_PER_UNIT
);
4819 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
4820 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
4823 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4826 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4827 || (GET_CODE (op0
) == PLUS
4828 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
4829 && CONST_INT_P (XEXP (op0
, 1)))))
4831 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4833 if (!op1
|| !CONST_INT_P (op1
))
4836 return plus_constant (mode
, op0
, INTVAL (op1
));
4843 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
4844 op0
= convert_debug_memory_address (mode
, XEXP (op0
, 0), as
);
4852 op0
= gen_rtx_CONCATN
4853 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
4855 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
4857 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
4860 XVECEXP (op0
, 0, i
) = op1
;
4867 if (TREE_CLOBBER_P (exp
))
4869 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
4874 op0
= gen_rtx_CONCATN
4875 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
4877 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
4879 op1
= expand_debug_expr (val
);
4882 XVECEXP (op0
, 0, i
) = op1
;
4885 if (i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)))
4887 op1
= expand_debug_expr
4888 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
4893 for (; i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)); i
++)
4894 XVECEXP (op0
, 0, i
) = op1
;
4900 goto flag_unsupported
;
4903 /* ??? Maybe handle some builtins? */
4908 gimple
*g
= get_gimple_for_ssa_name (exp
);
4912 if (deep_ter_debug_map
)
4914 tree
*slot
= deep_ter_debug_map
->get (exp
);
4919 t
= gimple_assign_rhs_to_tree (g
);
4920 op0
= expand_debug_expr (t
);
4926 /* If this is a reference to an incoming value of
4927 parameter that is never used in the code or where the
4928 incoming value is never used in the code, use
4929 PARM_DECL's DECL_RTL if set. */
4930 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
4931 && SSA_NAME_VAR (exp
)
4932 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
4933 && has_zero_uses (exp
))
4935 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
4938 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
4943 int part
= var_to_partition (SA
.map
, exp
);
4945 if (part
== NO_PARTITION
)
4948 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
4950 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
4958 /* Vector stuff. For most of the codes we don't have rtl codes. */
4959 case REALIGN_LOAD_EXPR
:
4960 case REDUC_MAX_EXPR
:
4961 case REDUC_MIN_EXPR
:
4962 case REDUC_PLUS_EXPR
:
4964 case VEC_PACK_FIX_TRUNC_EXPR
:
4965 case VEC_PACK_SAT_EXPR
:
4966 case VEC_PACK_TRUNC_EXPR
:
4967 case VEC_UNPACK_FLOAT_HI_EXPR
:
4968 case VEC_UNPACK_FLOAT_LO_EXPR
:
4969 case VEC_UNPACK_HI_EXPR
:
4970 case VEC_UNPACK_LO_EXPR
:
4971 case VEC_WIDEN_MULT_HI_EXPR
:
4972 case VEC_WIDEN_MULT_LO_EXPR
:
4973 case VEC_WIDEN_MULT_EVEN_EXPR
:
4974 case VEC_WIDEN_MULT_ODD_EXPR
:
4975 case VEC_WIDEN_LSHIFT_HI_EXPR
:
4976 case VEC_WIDEN_LSHIFT_LO_EXPR
:
4981 case ADDR_SPACE_CONVERT_EXPR
:
4982 case FIXED_CONVERT_EXPR
:
4984 case WITH_SIZE_EXPR
:
4988 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
4989 && SCALAR_INT_MODE_P (mode
))
4992 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4994 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
4997 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4999 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
5001 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5002 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5006 case WIDEN_MULT_EXPR
:
5007 case WIDEN_MULT_PLUS_EXPR
:
5008 case WIDEN_MULT_MINUS_EXPR
:
5009 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5010 && SCALAR_INT_MODE_P (mode
))
5012 inner_mode
= GET_MODE (op0
);
5013 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5014 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5016 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5017 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5018 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
5020 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
5021 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5022 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
5024 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
5025 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5027 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
5031 case MULT_HIGHPART_EXPR
:
5032 /* ??? Similar to the above. */
5035 case WIDEN_SUM_EXPR
:
5036 case WIDEN_LSHIFT_EXPR
:
5037 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5038 && SCALAR_INT_MODE_P (mode
))
5041 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5043 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5045 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
5046 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
5051 return simplify_gen_ternary (FMA
, mode
, inner_mode
, op0
, op1
, op2
);
5064 /* Return an RTX equivalent to the source bind value of the tree expression
5068 expand_debug_source_expr (tree exp
)
5071 machine_mode mode
= VOIDmode
, inner_mode
;
5073 switch (TREE_CODE (exp
))
5077 mode
= DECL_MODE (exp
);
5078 op0
= expand_debug_parm_decl (exp
);
5081 /* See if this isn't an argument that has been completely
5083 if (!DECL_RTL_SET_P (exp
)
5084 && !DECL_INCOMING_RTL (exp
)
5085 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
5087 tree aexp
= DECL_ORIGIN (exp
);
5088 if (DECL_CONTEXT (aexp
)
5089 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
5091 vec
<tree
, va_gc
> **debug_args
;
5094 debug_args
= decl_debug_args_lookup (current_function_decl
);
5095 if (debug_args
!= NULL
)
5097 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
5100 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
5110 if (op0
== NULL_RTX
)
5113 inner_mode
= GET_MODE (op0
);
5114 if (mode
== inner_mode
)
5117 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
5119 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
5120 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
5121 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
5122 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
5124 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
5126 else if (FLOAT_MODE_P (mode
))
5128 else if (FLOAT_MODE_P (inner_mode
))
5130 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5131 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
5133 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
5135 else if (CONSTANT_P (op0
)
5136 || GET_MODE_BITSIZE (mode
) <= GET_MODE_BITSIZE (inner_mode
))
5137 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
5138 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5139 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5141 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5146 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5147 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5148 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5151 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
5155 if (exp
== NULL_RTX
)
5158 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
5163 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5164 rtx dval
= make_debug_expr_from_rtl (exp
);
5166 /* Emit a debug bind insn before INSN. */
5167 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
5168 DEBUG_EXPR_TREE_DECL (dval
), exp
,
5169 VAR_INIT_STATUS_INITIALIZED
);
5171 emit_debug_insn_before (bind
, insn
);
5176 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
5178 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
5179 switch (*format_ptr
++)
5182 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
5187 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
5188 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
5196 /* Expand the _LOCs in debug insns. We run this after expanding all
5197 regular insns, so that any variables referenced in the function
5198 will have their DECL_RTLs set. */
5201 expand_debug_locations (void)
5204 rtx_insn
*last
= get_last_insn ();
5205 int save_strict_alias
= flag_strict_aliasing
;
5207 /* New alias sets while setting up memory attributes cause
5208 -fcompare-debug failures, even though it doesn't bring about any
5210 flag_strict_aliasing
= 0;
5212 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5213 if (DEBUG_INSN_P (insn
))
5215 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
5217 rtx_insn
*prev_insn
, *insn2
;
5220 if (value
== NULL_TREE
)
5224 if (INSN_VAR_LOCATION_STATUS (insn
)
5225 == VAR_INIT_STATUS_UNINITIALIZED
)
5226 val
= expand_debug_source_expr (value
);
5227 /* The avoid_deep_ter_for_debug function inserts
5228 debug bind stmts after SSA_NAME definition, with the
5229 SSA_NAME as the whole bind location. Disable temporarily
5230 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5231 being defined in this DEBUG_INSN. */
5232 else if (deep_ter_debug_map
&& TREE_CODE (value
) == SSA_NAME
)
5234 tree
*slot
= deep_ter_debug_map
->get (value
);
5237 if (*slot
== INSN_VAR_LOCATION_DECL (insn
))
5242 val
= expand_debug_expr (value
);
5244 *slot
= INSN_VAR_LOCATION_DECL (insn
);
5247 val
= expand_debug_expr (value
);
5248 gcc_assert (last
== get_last_insn ());
5252 val
= gen_rtx_UNKNOWN_VAR_LOC ();
5255 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
5257 gcc_assert (mode
== GET_MODE (val
)
5258 || (GET_MODE (val
) == VOIDmode
5259 && (CONST_SCALAR_INT_P (val
)
5260 || GET_CODE (val
) == CONST_FIXED
5261 || GET_CODE (val
) == LABEL_REF
)));
5264 INSN_VAR_LOCATION_LOC (insn
) = val
;
5265 prev_insn
= PREV_INSN (insn
);
5266 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
5267 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
5270 flag_strict_aliasing
= save_strict_alias
;
5273 /* Performs swapping operands of commutative operations to expand
5274 the expensive one first. */
5277 reorder_operands (basic_block bb
)
5279 unsigned int *lattice
; /* Hold cost of each statement. */
5280 unsigned int i
= 0, n
= 0;
5281 gimple_stmt_iterator gsi
;
5287 use_operand_p use_p
;
5288 gimple
*def0
, *def1
;
5290 /* Compute cost of each statement using estimate_num_insns. */
5291 stmts
= bb_seq (bb
);
5292 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5294 stmt
= gsi_stmt (gsi
);
5295 if (!is_gimple_debug (stmt
))
5296 gimple_set_uid (stmt
, n
++);
5298 lattice
= XNEWVEC (unsigned int, n
);
5299 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5302 stmt
= gsi_stmt (gsi
);
5303 if (is_gimple_debug (stmt
))
5305 cost
= estimate_num_insns (stmt
, &eni_size_weights
);
5307 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5309 tree use
= USE_FROM_PTR (use_p
);
5311 if (TREE_CODE (use
) != SSA_NAME
)
5313 def_stmt
= get_gimple_for_ssa_name (use
);
5316 lattice
[i
] += lattice
[gimple_uid (def_stmt
)];
5319 if (!is_gimple_assign (stmt
)
5320 || !commutative_tree_code (gimple_assign_rhs_code (stmt
)))
5322 op0
= gimple_op (stmt
, 1);
5323 op1
= gimple_op (stmt
, 2);
5324 if (TREE_CODE (op0
) != SSA_NAME
5325 || TREE_CODE (op1
) != SSA_NAME
)
5327 /* Swap operands if the second one is more expensive. */
5328 def0
= get_gimple_for_ssa_name (op0
);
5329 def1
= get_gimple_for_ssa_name (op1
);
5333 if (!def0
|| lattice
[gimple_uid (def1
)] > lattice
[gimple_uid (def0
)])
5337 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5339 fprintf (dump_file
, "Swap operands in stmt:\n");
5340 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5341 fprintf (dump_file
, "Cost left opnd=%d, right opnd=%d\n",
5342 def0
? lattice
[gimple_uid (def0
)] : 0,
5343 lattice
[gimple_uid (def1
)]);
5345 swap_ssa_operands (stmt
, gimple_assign_rhs1_ptr (stmt
),
5346 gimple_assign_rhs2_ptr (stmt
));
5352 /* Expand basic block BB from GIMPLE trees to RTL. */
5355 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
5357 gimple_stmt_iterator gsi
;
5359 gimple
*stmt
= NULL
;
5366 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
5369 /* Note that since we are now transitioning from GIMPLE to RTL, we
5370 cannot use the gsi_*_bb() routines because they expect the basic
5371 block to be in GIMPLE, instead of RTL. Therefore, we need to
5372 access the BB sequence directly. */
5374 reorder_operands (bb
);
5375 stmts
= bb_seq (bb
);
5376 bb
->il
.gimple
.seq
= NULL
;
5377 bb
->il
.gimple
.phi_nodes
= NULL
;
5378 rtl_profile_for_bb (bb
);
5379 init_rtl_bb_info (bb
);
5380 bb
->flags
|= BB_RTL
;
5382 /* Remove the RETURN_EXPR if we may fall though to the exit
5384 gsi
= gsi_last (stmts
);
5385 if (!gsi_end_p (gsi
)
5386 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
5388 greturn
*ret_stmt
= as_a
<greturn
*> (gsi_stmt (gsi
));
5390 gcc_assert (single_succ_p (bb
));
5391 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
5393 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5394 && !gimple_return_retval (ret_stmt
))
5396 gsi_remove (&gsi
, false);
5397 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
5401 gsi
= gsi_start (stmts
);
5402 if (!gsi_end_p (gsi
))
5404 stmt
= gsi_stmt (gsi
);
5405 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5409 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
5413 last
= get_last_insn ();
5417 expand_gimple_stmt (stmt
);
5424 /* Java emits line number notes in the top of labels.
5425 ??? Make this go away once line number notes are obsoleted. */
5426 BB_HEAD (bb
) = NEXT_INSN (last
);
5427 if (NOTE_P (BB_HEAD (bb
)))
5428 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
5429 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
5431 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5434 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
5436 NOTE_BASIC_BLOCK (note
) = bb
;
5438 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5442 stmt
= gsi_stmt (gsi
);
5444 /* If this statement is a non-debug one, and we generate debug
5445 insns, then this one might be the last real use of a TERed
5446 SSA_NAME, but where there are still some debug uses further
5447 down. Expanding the current SSA name in such further debug
5448 uses by their RHS might lead to wrong debug info, as coalescing
5449 might make the operands of such RHS be placed into the same
5450 pseudo as something else. Like so:
5451 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5455 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5456 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5457 the write to a_2 would actually have clobbered the place which
5460 So, instead of that, we recognize the situation, and generate
5461 debug temporaries at the last real use of TERed SSA names:
5468 if (MAY_HAVE_DEBUG_INSNS
5470 && !is_gimple_debug (stmt
))
5476 location_t sloc
= curr_insn_location ();
5478 /* Look for SSA names that have their last use here (TERed
5479 names always have only one real use). */
5480 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5481 if ((def
= get_gimple_for_ssa_name (op
)))
5483 imm_use_iterator imm_iter
;
5484 use_operand_p use_p
;
5485 bool have_debug_uses
= false;
5487 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5489 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5491 have_debug_uses
= true;
5496 if (have_debug_uses
)
5498 /* OP is a TERed SSA name, with DEF its defining
5499 statement, and where OP is used in further debug
5500 instructions. Generate a debug temporary, and
5501 replace all uses of OP in debug insns with that
5504 tree value
= gimple_assign_rhs_to_tree (def
);
5505 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5509 set_curr_insn_location (gimple_location (def
));
5511 DECL_ARTIFICIAL (vexpr
) = 1;
5512 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
5514 mode
= DECL_MODE (value
);
5516 mode
= TYPE_MODE (TREE_TYPE (value
));
5517 DECL_MODE (vexpr
) = mode
;
5519 val
= gen_rtx_VAR_LOCATION
5520 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5522 emit_debug_insn (val
);
5524 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5526 if (!gimple_debug_bind_p (debugstmt
))
5529 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5530 SET_USE (use_p
, vexpr
);
5532 update_stmt (debugstmt
);
5536 set_curr_insn_location (sloc
);
5539 currently_expanding_gimple_stmt
= stmt
;
5541 /* Expand this statement, then evaluate the resulting RTL and
5542 fixup the CFG accordingly. */
5543 if (gimple_code (stmt
) == GIMPLE_COND
)
5545 new_bb
= expand_gimple_cond (bb
, as_a
<gcond
*> (stmt
));
5549 else if (gimple_debug_bind_p (stmt
))
5551 location_t sloc
= curr_insn_location ();
5552 gimple_stmt_iterator nsi
= gsi
;
5556 tree var
= gimple_debug_bind_get_var (stmt
);
5561 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
5562 && TREE_CODE (var
) != LABEL_DECL
5563 && !target_for_debug_bind (var
))
5564 goto delink_debug_stmt
;
5566 if (gimple_debug_bind_has_value_p (stmt
))
5567 value
= gimple_debug_bind_get_value (stmt
);
5571 last
= get_last_insn ();
5573 set_curr_insn_location (gimple_location (stmt
));
5576 mode
= DECL_MODE (var
);
5578 mode
= TYPE_MODE (TREE_TYPE (var
));
5580 val
= gen_rtx_VAR_LOCATION
5581 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5583 emit_debug_insn (val
);
5585 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5587 /* We can't dump the insn with a TREE where an RTX
5589 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5590 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5591 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5595 /* In order not to generate too many debug temporaries,
5596 we delink all uses of debug statements we already expanded.
5597 Therefore debug statements between definition and real
5598 use of TERed SSA names will continue to use the SSA name,
5599 and not be replaced with debug temps. */
5600 delink_stmt_imm_use (stmt
);
5604 if (gsi_end_p (nsi
))
5606 stmt
= gsi_stmt (nsi
);
5607 if (!gimple_debug_bind_p (stmt
))
5611 set_curr_insn_location (sloc
);
5613 else if (gimple_debug_source_bind_p (stmt
))
5615 location_t sloc
= curr_insn_location ();
5616 tree var
= gimple_debug_source_bind_get_var (stmt
);
5617 tree value
= gimple_debug_source_bind_get_value (stmt
);
5621 last
= get_last_insn ();
5623 set_curr_insn_location (gimple_location (stmt
));
5625 mode
= DECL_MODE (var
);
5627 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
5628 VAR_INIT_STATUS_UNINITIALIZED
);
5630 emit_debug_insn (val
);
5632 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5634 /* We can't dump the insn with a TREE where an RTX
5636 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5637 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5638 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5641 set_curr_insn_location (sloc
);
5645 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
5647 && gimple_call_tail_p (call_stmt
)
5648 && disable_tail_calls
)
5649 gimple_call_set_tail (call_stmt
, false);
5651 if (call_stmt
&& gimple_call_tail_p (call_stmt
))
5654 new_bb
= expand_gimple_tailcall (bb
, call_stmt
, &can_fallthru
);
5665 def_operand_p def_p
;
5666 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
5670 /* Ignore this stmt if it is in the list of
5671 replaceable expressions. */
5673 && bitmap_bit_p (SA
.values
,
5674 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
5677 last
= expand_gimple_stmt (stmt
);
5678 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5683 currently_expanding_gimple_stmt
= NULL
;
5685 /* Expand implicit goto and convert goto_locus. */
5686 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5688 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5689 set_curr_insn_location (e
->goto_locus
);
5690 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
5692 emit_jump (label_rtx_for_bb (e
->dest
));
5693 e
->flags
&= ~EDGE_FALLTHRU
;
5697 /* Expanded RTL can create a jump in the last instruction of block.
5698 This later might be assumed to be a jump to successor and break edge insertion.
5699 We need to insert dummy move to prevent this. PR41440. */
5700 if (single_succ_p (bb
)
5701 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
5702 && (last
= get_last_insn ())
5705 rtx dummy
= gen_reg_rtx (SImode
);
5706 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
5709 do_pending_stack_adjust ();
5711 /* Find the block tail. The last insn in the block is the insn
5712 before a barrier and/or table jump insn. */
5713 last
= get_last_insn ();
5714 if (BARRIER_P (last
))
5715 last
= PREV_INSN (last
);
5716 if (JUMP_TABLE_DATA_P (last
))
5717 last
= PREV_INSN (PREV_INSN (last
));
5720 update_bb_for_insn (bb
);
5726 /* Create a basic block for initialization code. */
5729 construct_init_block (void)
5731 basic_block init_block
, first_block
;
5735 /* Multiple entry points not supported yet. */
5736 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
5737 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5738 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5739 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5740 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5742 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
5744 /* When entry edge points to first basic block, we don't need jump,
5745 otherwise we have to jump into proper target. */
5746 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
5748 tree label
= gimple_block_label (e
->dest
);
5750 emit_jump (jump_target_rtx (label
));
5754 flags
= EDGE_FALLTHRU
;
5756 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
5758 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5759 init_block
->frequency
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
;
5760 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5761 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5764 first_block
= e
->dest
;
5765 redirect_edge_succ (e
, init_block
);
5766 e
= make_edge (init_block
, first_block
, flags
);
5769 e
= make_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FALLTHRU
);
5770 e
->probability
= REG_BR_PROB_BASE
;
5771 e
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5773 update_bb_for_insn (init_block
);
5777 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5778 found in the block tree. */
5781 set_block_levels (tree block
, int level
)
5785 BLOCK_NUMBER (block
) = level
;
5786 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
5787 block
= BLOCK_CHAIN (block
);
5791 /* Create a block containing landing pads and similar stuff. */
5794 construct_exit_block (void)
5796 rtx_insn
*head
= get_last_insn ();
5798 basic_block exit_block
;
5802 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
5803 rtx_insn
*orig_end
= BB_END (prev_bb
);
5805 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5807 /* Make sure the locus is set to the end of the function, so that
5808 epilogue line numbers and warnings are set properly. */
5809 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
5810 input_location
= cfun
->function_end_locus
;
5812 /* Generate rtl for function exit. */
5813 expand_function_end ();
5815 end
= get_last_insn ();
5818 /* While emitting the function end we could move end of the last basic
5820 BB_END (prev_bb
) = orig_end
;
5821 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
5822 head
= NEXT_INSN (head
);
5823 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5824 bb frequency counting will be confused. Any instructions before that
5825 label are emitted for the case where PREV_BB falls through into the
5826 exit block, so append those instructions to prev_bb in that case. */
5827 if (NEXT_INSN (head
) != return_label
)
5829 while (NEXT_INSN (head
) != return_label
)
5831 if (!NOTE_P (NEXT_INSN (head
)))
5832 BB_END (prev_bb
) = NEXT_INSN (head
);
5833 head
= NEXT_INSN (head
);
5836 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
5837 exit_block
->frequency
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->frequency
;
5838 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
5839 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5842 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
5844 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
5845 if (!(e
->flags
& EDGE_ABNORMAL
))
5846 redirect_edge_succ (e
, exit_block
);
5851 e
= make_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FALLTHRU
);
5852 e
->probability
= REG_BR_PROB_BASE
;
5853 e
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
5854 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5857 e
->count
-= e2
->count
;
5858 exit_block
->count
-= e2
->count
;
5859 exit_block
->frequency
-= EDGE_FREQUENCY (e2
);
5863 if (exit_block
->count
< 0)
5864 exit_block
->count
= 0;
5865 if (exit_block
->frequency
< 0)
5866 exit_block
->frequency
= 0;
5867 update_bb_for_insn (exit_block
);
5870 /* Helper function for discover_nonconstant_array_refs.
5871 Look for ARRAY_REF nodes with non-constant indexes and mark them
5875 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
5876 void *data ATTRIBUTE_UNUSED
)
5880 if (IS_TYPE_OR_DECL_P (t
))
5882 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5884 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5885 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
5886 && (!TREE_OPERAND (t
, 2)
5887 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5888 || (TREE_CODE (t
) == COMPONENT_REF
5889 && (!TREE_OPERAND (t
,2)
5890 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5891 || TREE_CODE (t
) == BIT_FIELD_REF
5892 || TREE_CODE (t
) == REALPART_EXPR
5893 || TREE_CODE (t
) == IMAGPART_EXPR
5894 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
5895 || CONVERT_EXPR_P (t
))
5896 t
= TREE_OPERAND (t
, 0);
5898 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5900 t
= get_base_address (t
);
5902 && DECL_MODE (t
) != BLKmode
)
5903 TREE_ADDRESSABLE (t
) = 1;
5912 /* RTL expansion is not able to compile array references with variable
5913 offsets for arrays stored in single register. Discover such
5914 expressions and mark variables as addressable to avoid this
5918 discover_nonconstant_array_refs (void)
5921 gimple_stmt_iterator gsi
;
5923 FOR_EACH_BB_FN (bb
, cfun
)
5924 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5926 gimple
*stmt
= gsi_stmt (gsi
);
5927 if (!is_gimple_debug (stmt
))
5928 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
5932 /* This function sets crtl->args.internal_arg_pointer to a virtual
5933 register if DRAP is needed. Local register allocator will replace
5934 virtual_incoming_args_rtx with the virtual register. */
5937 expand_stack_alignment (void)
5940 unsigned int preferred_stack_boundary
;
5942 if (! SUPPORTS_STACK_ALIGNMENT
)
5945 if (cfun
->calls_alloca
5946 || cfun
->has_nonlocal_label
5947 || crtl
->has_nonlocal_goto
)
5948 crtl
->need_drap
= true;
5950 /* Call update_stack_boundary here again to update incoming stack
5951 boundary. It may set incoming stack alignment to a different
5952 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5953 use the minimum incoming stack alignment to check if it is OK
5954 to perform sibcall optimization since sibcall optimization will
5955 only align the outgoing stack to incoming stack boundary. */
5956 if (targetm
.calls
.update_stack_boundary
)
5957 targetm
.calls
.update_stack_boundary ();
5959 /* The incoming stack frame has to be aligned at least at
5960 parm_stack_boundary. */
5961 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
5963 /* Update crtl->stack_alignment_estimated and use it later to align
5964 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5965 exceptions since callgraph doesn't collect incoming stack alignment
5967 if (cfun
->can_throw_non_call_exceptions
5968 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
5969 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
5971 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
5972 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
5973 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
5974 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
5975 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
5977 gcc_assert (crtl
->stack_alignment_needed
5978 <= crtl
->stack_alignment_estimated
);
5980 crtl
->stack_realign_needed
5981 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
5982 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
5984 crtl
->stack_realign_processed
= true;
5986 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5988 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
5989 drap_rtx
= targetm
.calls
.get_drap_rtx ();
5991 /* stack_realign_drap and drap_rtx must match. */
5992 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
5994 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5995 if (NULL
!= drap_rtx
)
5997 crtl
->args
.internal_arg_pointer
= drap_rtx
;
5999 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6001 fixup_tail_calls ();
6007 expand_main_function (void)
6009 #if (defined(INVOKE__main) \
6010 || (!defined(HAS_INIT_SECTION) \
6011 && !defined(INIT_SECTION_ASM_OP) \
6012 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6013 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
6018 /* Expand code to initialize the stack_protect_guard. This is invoked at
6019 the beginning of a function to be protected. */
6022 stack_protect_prologue (void)
6024 tree guard_decl
= targetm
.stack_protect_guard ();
6027 x
= expand_normal (crtl
->stack_protect_guard
);
6028 y
= expand_normal (guard_decl
);
6030 /* Allow the target to copy from Y to X without leaking Y into a
6032 if (targetm
.have_stack_protect_set ())
6033 if (rtx_insn
*insn
= targetm
.gen_stack_protect_set (x
, y
))
6039 /* Otherwise do a straight move. */
6040 emit_move_insn (x
, y
);
6043 /* Translate the intermediate representation contained in the CFG
6044 from GIMPLE trees to RTL.
6046 We do conversion per basic block and preserve/update the tree CFG.
6047 This implies we have to do some magic as the CFG can simultaneously
6048 consist of basic blocks containing RTL and GIMPLE trees. This can
6049 confuse the CFG hooks, so be careful to not manipulate CFG during
6054 const pass_data pass_data_expand
=
6056 RTL_PASS
, /* type */
6057 "expand", /* name */
6058 OPTGROUP_NONE
, /* optinfo_flags */
6059 TV_EXPAND
, /* tv_id */
6060 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
6063 | PROP_gimple_lva
), /* properties_required */
6064 PROP_rtl
, /* properties_provided */
6065 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
6066 0, /* todo_flags_start */
6067 0, /* todo_flags_finish */
6070 class pass_expand
: public rtl_opt_pass
6073 pass_expand (gcc::context
*ctxt
)
6074 : rtl_opt_pass (pass_data_expand
, ctxt
)
6077 /* opt_pass methods: */
6078 virtual unsigned int execute (function
*);
6080 }; // class pass_expand
6083 pass_expand::execute (function
*fun
)
6085 basic_block bb
, init_block
;
6089 rtx_insn
*var_seq
, *var_ret_seq
;
6092 timevar_push (TV_OUT_OF_SSA
);
6093 rewrite_out_of_ssa (&SA
);
6094 timevar_pop (TV_OUT_OF_SSA
);
6095 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
6097 if (MAY_HAVE_DEBUG_STMTS
&& flag_tree_ter
)
6099 gimple_stmt_iterator gsi
;
6100 FOR_EACH_BB_FN (bb
, cfun
)
6101 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6102 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
6103 avoid_deep_ter_for_debug (gsi_stmt (gsi
), 0);
6106 /* Make sure all values used by the optimization passes have sane
6110 /* Some backends want to know that we are expanding to RTL. */
6111 currently_expanding_to_rtl
= 1;
6112 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6113 free_dominance_info (CDI_DOMINATORS
);
6115 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6117 if (chkp_function_instrumented_p (current_function_decl
))
6118 chkp_reset_rtl_bounds ();
6120 insn_locations_init ();
6121 if (!DECL_IS_BUILTIN (current_function_decl
))
6123 /* Eventually, all FEs should explicitly set function_start_locus. */
6124 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
6125 set_curr_insn_location
6126 (DECL_SOURCE_LOCATION (current_function_decl
));
6128 set_curr_insn_location (fun
->function_start_locus
);
6131 set_curr_insn_location (UNKNOWN_LOCATION
);
6132 prologue_location
= curr_insn_location ();
6134 #ifdef INSN_SCHEDULING
6135 init_sched_attrs ();
6138 /* Make sure first insn is a note even if we don't want linenums.
6139 This makes sure the first insn will never be deleted.
6140 Also, final expects a note to appear there. */
6141 emit_note (NOTE_INSN_DELETED
);
6143 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6144 discover_nonconstant_array_refs ();
6146 targetm
.expand_to_rtl_hook ();
6147 crtl
->stack_alignment_needed
= STACK_BOUNDARY
;
6148 crtl
->max_used_stack_slot_alignment
= STACK_BOUNDARY
;
6149 crtl
->stack_alignment_estimated
= 0;
6150 crtl
->preferred_stack_boundary
= STACK_BOUNDARY
;
6151 fun
->cfg
->max_jumptable_ents
= 0;
6153 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6154 of the function section at exapnsion time to predict distance of calls. */
6155 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
6157 /* Expand the variables recorded during gimple lowering. */
6158 timevar_push (TV_VAR_EXPAND
);
6161 var_ret_seq
= expand_used_vars ();
6163 var_seq
= get_insns ();
6165 timevar_pop (TV_VAR_EXPAND
);
6167 /* Honor stack protection warnings. */
6168 if (warn_stack_protect
)
6170 if (fun
->calls_alloca
)
6171 warning (OPT_Wstack_protector
,
6172 "stack protector not protecting local variables: "
6173 "variable length buffer");
6174 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
6175 warning (OPT_Wstack_protector
,
6176 "stack protector not protecting function: "
6177 "all local arrays are less than %d bytes long",
6178 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
6181 /* Set up parameters and prepare for return, for the function. */
6182 expand_function_start (current_function_decl
);
6184 /* If we emitted any instructions for setting up the variables,
6185 emit them before the FUNCTION_START note. */
6188 emit_insn_before (var_seq
, parm_birth_insn
);
6190 /* In expand_function_end we'll insert the alloca save/restore
6191 before parm_birth_insn. We've just insertted an alloca call.
6192 Adjust the pointer to match. */
6193 parm_birth_insn
= var_seq
;
6196 /* Now propagate the RTL assignment of each partition to the
6197 underlying var of each SSA_NAME. */
6198 for (i
= 1; i
< num_ssa_names
; i
++)
6200 tree name
= ssa_name (i
);
6203 /* We might have generated new SSA names in
6204 update_alias_info_with_stack_vars. They will have a NULL
6205 defining statements, and won't be part of the partitioning,
6207 || !SSA_NAME_DEF_STMT (name
))
6210 adjust_one_expanded_partition_var (name
);
6213 /* Clean up RTL of variables that straddle across multiple
6214 partitions, and check that the rtl of any PARM_DECLs that are not
6215 cleaned up is that of their default defs. */
6216 for (i
= 1; i
< num_ssa_names
; i
++)
6218 tree name
= ssa_name (i
);
6222 /* We might have generated new SSA names in
6223 update_alias_info_with_stack_vars. They will have a NULL
6224 defining statements, and won't be part of the partitioning,
6226 || !SSA_NAME_DEF_STMT (name
))
6228 part
= var_to_partition (SA
.map
, name
);
6229 if (part
== NO_PARTITION
)
6232 /* If this decl was marked as living in multiple places, reset
6233 this now to NULL. */
6234 tree var
= SSA_NAME_VAR (name
);
6235 if (var
&& DECL_RTL_IF_SET (var
) == pc_rtx
)
6236 SET_DECL_RTL (var
, NULL
);
6237 /* Check that the pseudos chosen by assign_parms are those of
6238 the corresponding default defs. */
6239 else if (SSA_NAME_IS_DEFAULT_DEF (name
)
6240 && (TREE_CODE (var
) == PARM_DECL
6241 || TREE_CODE (var
) == RESULT_DECL
))
6243 rtx in
= DECL_RTL_IF_SET (var
);
6245 rtx out
= SA
.partition_to_pseudo
[part
];
6246 gcc_assert (in
== out
);
6248 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6249 those expected by debug backends for each parm and for
6250 the result. This is particularly important for stabs,
6251 whose register elimination from parm's DECL_RTL may cause
6252 -fcompare-debug differences as SET_DECL_RTL changes reg's
6253 attrs. So, make sure the RTL already has the parm as the
6254 EXPR, so that it won't change. */
6255 SET_DECL_RTL (var
, NULL_RTX
);
6257 set_mem_attributes (in
, var
, true);
6258 SET_DECL_RTL (var
, in
);
6262 /* If this function is `main', emit a call to `__main'
6263 to run global initializers, etc. */
6264 if (DECL_NAME (current_function_decl
)
6265 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
6266 && DECL_FILE_SCOPE_P (current_function_decl
))
6267 expand_main_function ();
6269 /* Initialize the stack_protect_guard field. This must happen after the
6270 call to __main (if any) so that the external decl is initialized. */
6271 if (crtl
->stack_protect_guard
)
6272 stack_protect_prologue ();
6274 expand_phi_nodes (&SA
);
6276 /* Register rtl specific functions for cfg. */
6277 rtl_register_cfg_hooks ();
6279 init_block
= construct_init_block ();
6281 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6282 remaining edges later. */
6283 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
6284 e
->flags
&= ~EDGE_EXECUTABLE
;
6286 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
6287 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
6289 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
6291 if (MAY_HAVE_DEBUG_INSNS
)
6292 expand_debug_locations ();
6294 if (deep_ter_debug_map
)
6296 delete deep_ter_debug_map
;
6297 deep_ter_debug_map
= NULL
;
6300 /* Free stuff we no longer need after GIMPLE optimizations. */
6301 free_dominance_info (CDI_DOMINATORS
);
6302 free_dominance_info (CDI_POST_DOMINATORS
);
6303 delete_tree_cfg_annotations (fun
);
6305 timevar_push (TV_OUT_OF_SSA
);
6306 finish_out_of_ssa (&SA
);
6307 timevar_pop (TV_OUT_OF_SSA
);
6309 timevar_push (TV_POST_EXPAND
);
6310 /* We are no longer in SSA form. */
6311 fun
->gimple_df
->in_ssa_p
= false;
6312 loops_state_clear (LOOP_CLOSED_SSA
);
6314 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6315 conservatively to true until they are all profile aware. */
6316 delete lab_rtx_for_bb
;
6317 free_histograms (fun
);
6319 construct_exit_block ();
6320 insn_locations_finalize ();
6324 rtx_insn
*after
= return_label
;
6325 rtx_insn
*next
= NEXT_INSN (after
);
6326 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
6328 emit_insn_after (var_ret_seq
, after
);
6331 /* Zap the tree EH table. */
6332 set_eh_throw_stmt_table (fun
, NULL
);
6334 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6335 split edges which edge insertions might do. */
6336 rebuild_jump_labels (get_insns ());
6338 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
),
6339 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6343 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6347 rebuild_jump_labels_chain (e
->insns
.r
);
6348 /* Put insns after parm birth, but before
6349 NOTE_INSNS_FUNCTION_BEG. */
6350 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (fun
)
6351 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
6353 rtx_insn
*insns
= e
->insns
.r
;
6355 if (NOTE_P (parm_birth_insn
)
6356 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
6357 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
6359 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
6362 commit_one_edge_insertion (e
);
6369 /* We're done expanding trees to RTL. */
6370 currently_expanding_to_rtl
= 0;
6372 flush_mark_addressable_queue ();
6374 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
6375 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6379 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6381 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6382 e
->flags
&= ~EDGE_EXECUTABLE
;
6384 /* At the moment not all abnormal edges match the RTL
6385 representation. It is safe to remove them here as
6386 find_many_sub_basic_blocks will rediscover them.
6387 In the future we should get this fixed properly. */
6388 if ((e
->flags
& EDGE_ABNORMAL
)
6389 && !(e
->flags
& EDGE_SIBCALL
))
6396 blocks
= sbitmap_alloc (last_basic_block_for_fn (fun
));
6397 bitmap_ones (blocks
);
6398 find_many_sub_basic_blocks (blocks
);
6399 sbitmap_free (blocks
);
6400 purge_all_dead_edges ();
6402 expand_stack_alignment ();
6404 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6406 if (crtl
->tail_call_emit
)
6407 fixup_tail_calls ();
6409 /* After initial rtl generation, call back to finish generating
6410 exception support code. We need to do this before cleaning up
6411 the CFG as the code does not expect dead landing pads. */
6412 if (fun
->eh
->region_tree
!= NULL
)
6413 finish_eh_generation ();
6415 /* Remove unreachable blocks, otherwise we cannot compute dominators
6416 which are needed for loop state verification. As a side-effect
6417 this also compacts blocks.
6418 ??? We cannot remove trivially dead insns here as for example
6419 the DRAP reg on i?86 is not magically live at this point.
6420 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6421 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
6423 checking_verify_flow_info ();
6425 /* Initialize pseudos allocated for hard registers. */
6426 emit_initial_value_sets ();
6428 /* And finally unshare all RTL. */
6431 /* There's no need to defer outputting this function any more; we
6432 know we want to output it. */
6433 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
6435 /* Now that we're done expanding trees to RTL, we shouldn't have any
6436 more CONCATs anywhere. */
6437 generating_concat_p
= 0;
6442 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6443 /* And the pass manager will dump RTL for us. */
6446 /* If we're emitting a nested function, make sure its parent gets
6447 emitted as well. Doing otherwise confuses debug info. */
6450 for (parent
= DECL_CONTEXT (current_function_decl
);
6451 parent
!= NULL_TREE
;
6452 parent
= get_containing_scope (parent
))
6453 if (TREE_CODE (parent
) == FUNCTION_DECL
)
6454 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
6457 /* We are now committed to emitting code for this function. Do any
6458 preparation, such as emitting abstract debug info for the inline
6459 before it gets mangled by optimization. */
6460 if (cgraph_function_possibly_inlined_p (current_function_decl
))
6461 (*debug_hooks
->outlining_inline_function
) (current_function_decl
);
6463 TREE_ASM_WRITTEN (current_function_decl
) = 1;
6465 /* After expanding, the return labels are no longer needed. */
6466 return_label
= NULL
;
6467 naked_return_label
= NULL
;
6469 /* After expanding, the tm_restart map is no longer needed. */
6470 if (fun
->gimple_df
->tm_restart
)
6471 fun
->gimple_df
->tm_restart
= NULL
;
6473 /* Tag the blocks with a depth number so that change_scope can find
6474 the common parent easily. */
6475 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
6476 default_rtl_profile ();
6478 timevar_pop (TV_POST_EXPAND
);
6486 make_pass_expand (gcc::context
*ctxt
)
6488 return new pass_expand (ctxt
);