1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "basic-block.h"
30 #include "langhooks.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
34 #include "tree-pass.h"
37 #include "diagnostic.h"
41 #include "tree-inline.h"
42 #include "value-prof.h"
46 /* Return an expression tree corresponding to the RHS of GIMPLE
50 gimple_assign_rhs_to_tree (gimple stmt
)
53 enum gimple_rhs_class grhs_class
;
55 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
57 if (grhs_class
== GIMPLE_BINARY_RHS
)
58 t
= build2 (gimple_assign_rhs_code (stmt
),
59 TREE_TYPE (gimple_assign_lhs (stmt
)),
60 gimple_assign_rhs1 (stmt
),
61 gimple_assign_rhs2 (stmt
));
62 else if (grhs_class
== GIMPLE_UNARY_RHS
)
63 t
= build1 (gimple_assign_rhs_code (stmt
),
64 TREE_TYPE (gimple_assign_lhs (stmt
)),
65 gimple_assign_rhs1 (stmt
));
66 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
67 t
= gimple_assign_rhs1 (stmt
);
74 /* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND
78 gimple_cond_pred_to_tree (gimple stmt
)
80 return build2 (gimple_cond_code (stmt
), boolean_type_node
,
81 gimple_cond_lhs (stmt
), gimple_cond_rhs (stmt
));
84 /* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression
85 inside *TP. DATA is the location to set. */
88 set_expr_location_r (tree
*tp
, int *ws ATTRIBUTE_UNUSED
, void *data
)
90 location_t
*loc
= (location_t
*) data
;
92 SET_EXPR_LOCATION (*tp
, *loc
);
98 /* RTL expansion has traditionally been done on trees, so the
99 transition to doing it on GIMPLE tuples is very invasive to the RTL
100 expander. To facilitate the transition, this function takes a
101 GIMPLE tuple STMT and returns the same statement in the form of a
105 gimple_to_tree (gimple stmt
)
109 tree_ann_common_t ann
;
112 switch (gimple_code (stmt
))
116 tree lhs
= gimple_assign_lhs (stmt
);
118 t
= gimple_assign_rhs_to_tree (stmt
);
119 t
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, t
);
120 if (gimple_assign_nontemporal_move_p (stmt
))
121 MOVE_NONTEMPORAL (t
) = true;
126 t
= gimple_cond_pred_to_tree (stmt
);
127 t
= build3 (COND_EXPR
, void_type_node
, t
, NULL_TREE
, NULL_TREE
);
131 t
= build1 (GOTO_EXPR
, void_type_node
, gimple_goto_dest (stmt
));
135 t
= build1 (LABEL_EXPR
, void_type_node
, gimple_label_label (stmt
));
140 tree retval
= gimple_return_retval (stmt
);
142 if (retval
&& retval
!= error_mark_node
)
144 tree result
= DECL_RESULT (current_function_decl
);
146 /* If we are not returning the current function's RESULT_DECL,
147 build an assignment to it. */
148 if (retval
!= result
)
150 /* I believe that a function's RESULT_DECL is unique. */
151 gcc_assert (TREE_CODE (retval
) != RESULT_DECL
);
153 retval
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
157 t
= build1 (RETURN_EXPR
, void_type_node
, retval
);
168 n
= gimple_asm_noutputs (stmt
);
171 t
= out
= gimple_asm_output_op (stmt
, 0);
172 for (i
= 1; i
< n
; i
++)
174 TREE_CHAIN (t
) = gimple_asm_output_op (stmt
, i
);
175 t
= gimple_asm_output_op (stmt
, i
);
180 n
= gimple_asm_ninputs (stmt
);
183 t
= in
= gimple_asm_input_op (stmt
, 0);
184 for (i
= 1; i
< n
; i
++)
186 TREE_CHAIN (t
) = gimple_asm_input_op (stmt
, i
);
187 t
= gimple_asm_input_op (stmt
, i
);
192 n
= gimple_asm_nclobbers (stmt
);
195 t
= cl
= gimple_asm_clobber_op (stmt
, 0);
196 for (i
= 1; i
< n
; i
++)
198 TREE_CHAIN (t
) = gimple_asm_clobber_op (stmt
, i
);
199 t
= gimple_asm_clobber_op (stmt
, i
);
203 s
= gimple_asm_string (stmt
);
204 t
= build4 (ASM_EXPR
, void_type_node
, build_string (strlen (s
), s
),
206 ASM_VOLATILE_P (t
) = gimple_asm_volatile_p (stmt
);
207 ASM_INPUT_P (t
) = gimple_asm_input_p (stmt
);
215 tree_ann_common_t ann
;
217 t
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
219 CALL_EXPR_FN (t
) = gimple_call_fn (stmt
);
220 TREE_TYPE (t
) = gimple_call_return_type (stmt
);
221 CALL_EXPR_STATIC_CHAIN (t
) = gimple_call_chain (stmt
);
223 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
224 CALL_EXPR_ARG (t
, i
) = gimple_call_arg (stmt
, i
);
226 if (!(gimple_call_flags (stmt
) & (ECF_CONST
| ECF_PURE
)))
227 TREE_SIDE_EFFECTS (t
) = 1;
229 if (gimple_call_flags (stmt
) & ECF_NOTHROW
)
230 TREE_NOTHROW (t
) = 1;
232 CALL_EXPR_TAILCALL (t
) = gimple_call_tail_p (stmt
);
233 CALL_EXPR_RETURN_SLOT_OPT (t
) = gimple_call_return_slot_opt_p (stmt
);
234 CALL_FROM_THUNK_P (t
) = gimple_call_from_thunk_p (stmt
);
235 CALL_CANNOT_INLINE_P (t
) = gimple_call_cannot_inline_p (stmt
);
236 CALL_EXPR_VA_ARG_PACK (t
) = gimple_call_va_arg_pack_p (stmt
);
238 /* If the call has a LHS then create a MODIFY_EXPR to hold it. */
240 tree lhs
= gimple_call_lhs (stmt
);
243 t
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, t
);
246 /* Record the original call statement, as it may be used
247 to retrieve profile information during expansion. */
249 if ((fn
= gimple_call_fndecl (stmt
)) != NULL_TREE
250 && DECL_BUILT_IN (fn
))
252 ann
= get_tree_common_ann (t
);
262 tree elt
= gimple_switch_label (stmt
, 0);
264 label_vec
= make_tree_vec (gimple_switch_num_labels (stmt
));
266 if (!CASE_LOW (elt
) && !CASE_HIGH (elt
))
268 for (i
= 1; i
< gimple_switch_num_labels (stmt
); i
++)
269 TREE_VEC_ELT (label_vec
, i
- 1) = gimple_switch_label (stmt
, i
);
271 /* The default case in a SWITCH_EXPR must be at the end of
273 TREE_VEC_ELT (label_vec
, i
- 1) = gimple_switch_label (stmt
, 0);
277 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
278 TREE_VEC_ELT (label_vec
, i
) = gimple_switch_label (stmt
, i
);
281 t
= build3 (SWITCH_EXPR
, void_type_node
, gimple_switch_index (stmt
),
288 t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
292 t
= build_resx (gimple_resx_region (stmt
));
298 error ("Unrecognized GIMPLE statement during RTL expansion");
299 print_gimple_stmt (stderr
, stmt
, 4, 0);
304 /* Ignore any bad gimple codes if we're going to die anyhow,
305 so we can at least set TREE_ASM_WRITTEN and have the rest
306 of compilation advance without sudden ICE death. */
307 t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
312 /* If STMT is inside an exception region, record it in the generated
314 rn
= lookup_stmt_eh_region (stmt
);
317 tree call
= get_call_expr_in (t
);
319 ann
= get_tree_common_ann (t
);
322 /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up
323 the CALL_EXPR not the assignment statment for EH region number. */
324 if (call
&& call
!= t
)
326 ann
= get_tree_common_ann (call
);
331 /* Set EXPR_LOCATION in all the embedded expressions. */
332 loc
= gimple_location (stmt
);
333 walk_tree (&t
, set_expr_location_r
, (void *) &loc
, NULL
);
335 TREE_BLOCK (t
) = gimple_block (stmt
);
341 /* Release back to GC memory allocated by gimple_to_tree. */
344 release_stmt_tree (gimple stmt
, tree stmt_tree
)
346 tree_ann_common_t ann
;
348 switch (gimple_code (stmt
))
351 if (get_gimple_rhs_class (gimple_expr_code (stmt
)) != GIMPLE_SINGLE_RHS
)
352 ggc_free (TREE_OPERAND (stmt_tree
, 1));
355 ggc_free (COND_EXPR_COND (stmt_tree
));
358 if (TREE_OPERAND (stmt_tree
, 0)
359 && TREE_CODE (TREE_OPERAND (stmt_tree
, 0)) == MODIFY_EXPR
)
360 ggc_free (TREE_OPERAND (stmt_tree
, 0));
363 if (gimple_call_lhs (stmt
))
365 ann
= tree_common_ann (TREE_OPERAND (stmt_tree
, 1));
368 ggc_free (TREE_OPERAND (stmt_tree
, 1));
374 ann
= tree_common_ann (stmt_tree
);
377 ggc_free (stmt_tree
);
381 /* Verify that there is exactly single jump instruction since last and attach
382 REG_BR_PROB note specifying probability.
383 ??? We really ought to pass the probability down to RTL expanders and let it
384 re-distribute it when the conditional expands into multiple conditionals.
385 This is however difficult to do. */
387 add_reg_br_prob_note (rtx last
, int probability
)
389 if (profile_status
== PROFILE_ABSENT
)
391 for (last
= NEXT_INSN (last
); last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
394 /* It is common to emit condjump-around-jump sequence when we don't know
395 how to reverse the conditional. Special case this. */
396 if (!any_condjump_p (last
)
397 || !JUMP_P (NEXT_INSN (last
))
398 || !simplejump_p (NEXT_INSN (last
))
399 || !NEXT_INSN (NEXT_INSN (last
))
400 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last
)))
401 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last
)))
402 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last
))))
403 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last
)))))
405 gcc_assert (!find_reg_note (last
, REG_BR_PROB
, 0));
406 add_reg_note (last
, REG_BR_PROB
,
407 GEN_INT (REG_BR_PROB_BASE
- probability
));
410 if (!last
|| !JUMP_P (last
) || !any_condjump_p (last
))
412 gcc_assert (!find_reg_note (last
, REG_BR_PROB
, 0));
413 add_reg_note (last
, REG_BR_PROB
, GEN_INT (probability
));
417 fprintf (dump_file
, "Failed to add probability note\n");
421 #ifndef STACK_ALIGNMENT_NEEDED
422 #define STACK_ALIGNMENT_NEEDED 1
426 /* This structure holds data relevant to one variable that will be
427 placed in a stack slot. */
433 /* The offset of the variable. During partitioning, this is the
434 offset relative to the partition. After partitioning, this
435 is relative to the stack frame. */
436 HOST_WIDE_INT offset
;
438 /* Initially, the size of the variable. Later, the size of the partition,
439 if this variable becomes it's partition's representative. */
442 /* The *byte* alignment required for this variable. Or as, with the
443 size, the alignment for this partition. */
446 /* The partition representative. */
447 size_t representative
;
449 /* The next stack variable in the partition, or EOC. */
453 #define EOC ((size_t)-1)
455 /* We have an array of such objects while deciding allocation. */
456 static struct stack_var
*stack_vars
;
457 static size_t stack_vars_alloc
;
458 static size_t stack_vars_num
;
460 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
461 is non-decreasing. */
462 static size_t *stack_vars_sorted
;
464 /* We have an interference graph between such objects. This graph
465 is lower triangular. */
466 static bool *stack_vars_conflict
;
467 static size_t stack_vars_conflict_alloc
;
469 /* The phase of the stack frame. This is the known misalignment of
470 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
471 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
472 static int frame_phase
;
474 /* Used during expand_used_vars to remember if we saw any decls for
475 which we'd like to enable stack smashing protection. */
476 static bool has_protected_decls
;
478 /* Used during expand_used_vars. Remember if we say a character buffer
479 smaller than our cutoff threshold. Used for -Wstack-protector. */
480 static bool has_short_buffer
;
482 /* Discover the byte alignment to use for DECL. Ignore alignment
483 we can't do with expected alignment of the stack boundary. */
486 get_decl_align_unit (tree decl
)
490 align
= DECL_ALIGN (decl
);
491 align
= LOCAL_ALIGNMENT (TREE_TYPE (decl
), align
);
493 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
494 align
= MAX_SUPPORTED_STACK_ALIGNMENT
;
496 if (SUPPORTS_STACK_ALIGNMENT
)
498 if (crtl
->stack_alignment_estimated
< align
)
500 gcc_assert(!crtl
->stack_realign_processed
);
501 crtl
->stack_alignment_estimated
= align
;
505 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
506 So here we only make sure stack_alignment_needed >= align. */
507 if (crtl
->stack_alignment_needed
< align
)
508 crtl
->stack_alignment_needed
= align
;
509 if (crtl
->max_used_stack_slot_alignment
< crtl
->stack_alignment_needed
)
510 crtl
->max_used_stack_slot_alignment
= crtl
->stack_alignment_needed
;
512 return align
/ BITS_PER_UNIT
;
515 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
516 Return the frame offset. */
519 alloc_stack_frame_space (HOST_WIDE_INT size
, HOST_WIDE_INT align
)
521 HOST_WIDE_INT offset
, new_frame_offset
;
523 new_frame_offset
= frame_offset
;
524 if (FRAME_GROWS_DOWNWARD
)
526 new_frame_offset
-= size
+ frame_phase
;
527 new_frame_offset
&= -align
;
528 new_frame_offset
+= frame_phase
;
529 offset
= new_frame_offset
;
533 new_frame_offset
-= frame_phase
;
534 new_frame_offset
+= align
- 1;
535 new_frame_offset
&= -align
;
536 new_frame_offset
+= frame_phase
;
537 offset
= new_frame_offset
;
538 new_frame_offset
+= size
;
540 frame_offset
= new_frame_offset
;
542 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
543 frame_offset
= offset
= 0;
548 /* Accumulate DECL into STACK_VARS. */
551 add_stack_var (tree decl
)
553 if (stack_vars_num
>= stack_vars_alloc
)
555 if (stack_vars_alloc
)
556 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
558 stack_vars_alloc
= 32;
560 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
562 stack_vars
[stack_vars_num
].decl
= decl
;
563 stack_vars
[stack_vars_num
].offset
= 0;
564 stack_vars
[stack_vars_num
].size
= tree_low_cst (DECL_SIZE_UNIT (decl
), 1);
565 stack_vars
[stack_vars_num
].alignb
= get_decl_align_unit (decl
);
567 /* All variables are initially in their own partition. */
568 stack_vars
[stack_vars_num
].representative
= stack_vars_num
;
569 stack_vars
[stack_vars_num
].next
= EOC
;
571 /* Ensure that this decl doesn't get put onto the list twice. */
572 SET_DECL_RTL (decl
, pc_rtx
);
577 /* Compute the linear index of a lower-triangular coordinate (I, J). */
580 triangular_index (size_t i
, size_t j
)
587 return (i
* (i
+ 1)) / 2 + j
;
590 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
593 resize_stack_vars_conflict (size_t n
)
595 size_t size
= triangular_index (n
-1, n
-1) + 1;
597 if (size
<= stack_vars_conflict_alloc
)
600 stack_vars_conflict
= XRESIZEVEC (bool, stack_vars_conflict
, size
);
601 memset (stack_vars_conflict
+ stack_vars_conflict_alloc
, 0,
602 (size
- stack_vars_conflict_alloc
) * sizeof (bool));
603 stack_vars_conflict_alloc
= size
;
606 /* Make the decls associated with luid's X and Y conflict. */
609 add_stack_var_conflict (size_t x
, size_t y
)
611 size_t index
= triangular_index (x
, y
);
612 gcc_assert (index
< stack_vars_conflict_alloc
);
613 stack_vars_conflict
[index
] = true;
616 /* Check whether the decls associated with luid's X and Y conflict. */
619 stack_var_conflict_p (size_t x
, size_t y
)
621 size_t index
= triangular_index (x
, y
);
622 gcc_assert (index
< stack_vars_conflict_alloc
);
623 return stack_vars_conflict
[index
];
626 /* Returns true if TYPE is or contains a union type. */
629 aggregate_contains_union_type (tree type
)
633 if (TREE_CODE (type
) == UNION_TYPE
634 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
636 if (TREE_CODE (type
) == ARRAY_TYPE
)
637 return aggregate_contains_union_type (TREE_TYPE (type
));
638 if (TREE_CODE (type
) != RECORD_TYPE
)
641 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
642 if (TREE_CODE (field
) == FIELD_DECL
)
643 if (aggregate_contains_union_type (TREE_TYPE (field
)))
649 /* A subroutine of expand_used_vars. If two variables X and Y have alias
650 sets that do not conflict, then do add a conflict for these variables
651 in the interference graph. We also need to make sure to add conflicts
652 for union containing structures. Else RTL alias analysis comes along
653 and due to type based aliasing rules decides that for two overlapping
654 union temporaries { short s; int i; } accesses to the same mem through
655 different types may not alias and happily reorders stores across
656 life-time boundaries of the temporaries (See PR25654).
657 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
660 add_alias_set_conflicts (void)
662 size_t i
, j
, n
= stack_vars_num
;
664 for (i
= 0; i
< n
; ++i
)
666 tree type_i
= TREE_TYPE (stack_vars
[i
].decl
);
667 bool aggr_i
= AGGREGATE_TYPE_P (type_i
);
670 contains_union
= aggregate_contains_union_type (type_i
);
671 for (j
= 0; j
< i
; ++j
)
673 tree type_j
= TREE_TYPE (stack_vars
[j
].decl
);
674 bool aggr_j
= AGGREGATE_TYPE_P (type_j
);
676 /* Either the objects conflict by means of type based
677 aliasing rules, or we need to add a conflict. */
678 || !objects_must_conflict_p (type_i
, type_j
)
679 /* In case the types do not conflict ensure that access
680 to elements will conflict. In case of unions we have
681 to be careful as type based aliasing rules may say
682 access to the same memory does not conflict. So play
683 safe and add a conflict in this case. */
685 add_stack_var_conflict (i
, j
);
690 /* A subroutine of partition_stack_vars. A comparison function for qsort,
691 sorting an array of indices by the size of the object. */
694 stack_var_size_cmp (const void *a
, const void *b
)
696 HOST_WIDE_INT sa
= stack_vars
[*(const size_t *)a
].size
;
697 HOST_WIDE_INT sb
= stack_vars
[*(const size_t *)b
].size
;
698 unsigned int uida
= DECL_UID (stack_vars
[*(const size_t *)a
].decl
);
699 unsigned int uidb
= DECL_UID (stack_vars
[*(const size_t *)b
].decl
);
705 /* For stack variables of the same size use the uid of the decl
706 to make the sort stable. */
714 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
715 partitioning algorithm. Partitions A and B are known to be non-conflicting.
716 Merge them into a single partition A.
718 At the same time, add OFFSET to all variables in partition B. At the end
719 of the partitioning process we've have a nice block easy to lay out within
723 union_stack_vars (size_t a
, size_t b
, HOST_WIDE_INT offset
)
727 /* Update each element of partition B with the given offset,
728 and merge them into partition A. */
729 for (last
= i
= b
; i
!= EOC
; last
= i
, i
= stack_vars
[i
].next
)
731 stack_vars
[i
].offset
+= offset
;
732 stack_vars
[i
].representative
= a
;
734 stack_vars
[last
].next
= stack_vars
[a
].next
;
735 stack_vars
[a
].next
= b
;
737 /* Update the required alignment of partition A to account for B. */
738 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
739 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
741 /* Update the interference graph and merge the conflicts. */
742 for (last
= stack_vars_num
, i
= 0; i
< last
; ++i
)
743 if (stack_var_conflict_p (b
, i
))
744 add_stack_var_conflict (a
, i
);
747 /* A subroutine of expand_used_vars. Binpack the variables into
748 partitions constrained by the interference graph. The overall
749 algorithm used is as follows:
751 Sort the objects by size.
756 Look for the largest non-conflicting object B with size <= S.
766 partition_stack_vars (void)
768 size_t si
, sj
, n
= stack_vars_num
;
770 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
771 for (si
= 0; si
< n
; ++si
)
772 stack_vars_sorted
[si
] = si
;
777 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_size_cmp
);
779 /* Special case: detect when all variables conflict, and thus we can't
780 do anything during the partitioning loop. It isn't uncommon (with
781 C code at least) to declare all variables at the top of the function,
782 and if we're not inlining, then all variables will be in the same scope.
783 Take advantage of very fast libc routines for this scan. */
784 gcc_assert (sizeof(bool) == sizeof(char));
785 if (memchr (stack_vars_conflict
, false, stack_vars_conflict_alloc
) == NULL
)
788 for (si
= 0; si
< n
; ++si
)
790 size_t i
= stack_vars_sorted
[si
];
791 HOST_WIDE_INT isize
= stack_vars
[i
].size
;
792 HOST_WIDE_INT offset
= 0;
794 for (sj
= si
; sj
-- > 0; )
796 size_t j
= stack_vars_sorted
[sj
];
797 HOST_WIDE_INT jsize
= stack_vars
[j
].size
;
798 unsigned int jalign
= stack_vars
[j
].alignb
;
800 /* Ignore objects that aren't partition representatives. */
801 if (stack_vars
[j
].representative
!= j
)
804 /* Ignore objects too large for the remaining space. */
808 /* Ignore conflicting objects. */
809 if (stack_var_conflict_p (i
, j
))
812 /* Refine the remaining space check to include alignment. */
813 if (offset
& (jalign
- 1))
815 HOST_WIDE_INT toff
= offset
;
817 toff
&= -(HOST_WIDE_INT
)jalign
;
818 if (isize
- (toff
- offset
) < jsize
)
821 isize
-= toff
- offset
;
825 /* UNION the objects, placing J at OFFSET. */
826 union_stack_vars (i
, j
, offset
);
835 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
838 dump_stack_var_partition (void)
840 size_t si
, i
, j
, n
= stack_vars_num
;
842 for (si
= 0; si
< n
; ++si
)
844 i
= stack_vars_sorted
[si
];
846 /* Skip variables that aren't partition representatives, for now. */
847 if (stack_vars
[i
].representative
!= i
)
850 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
851 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
852 stack_vars
[i
].alignb
);
854 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
856 fputc ('\t', dump_file
);
857 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
858 fprintf (dump_file
, ", offset " HOST_WIDE_INT_PRINT_DEC
"\n",
859 stack_vars
[j
].offset
);
864 /* Assign rtl to DECL at frame offset OFFSET. */
867 expand_one_stack_var_at (tree decl
, HOST_WIDE_INT offset
)
872 /* If this fails, we've overflowed the stack frame. Error nicely? */
873 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
875 x
= plus_constant (virtual_stack_vars_rtx
, offset
);
876 x
= gen_rtx_MEM (DECL_MODE (decl
), x
);
878 /* Set alignment we actually gave this decl. */
879 offset
-= frame_phase
;
880 align
= offset
& -offset
;
881 align
*= BITS_PER_UNIT
;
882 if (align
> STACK_BOUNDARY
|| align
== 0)
883 align
= STACK_BOUNDARY
;
884 DECL_ALIGN (decl
) = align
;
885 DECL_USER_ALIGN (decl
) = 0;
887 set_mem_attributes (x
, decl
, true);
888 SET_DECL_RTL (decl
, x
);
891 /* A subroutine of expand_used_vars. Give each partition representative
892 a unique location within the stack frame. Update each partition member
893 with that location. */
896 expand_stack_vars (bool (*pred
) (tree
))
898 size_t si
, i
, j
, n
= stack_vars_num
;
900 for (si
= 0; si
< n
; ++si
)
902 HOST_WIDE_INT offset
;
904 i
= stack_vars_sorted
[si
];
906 /* Skip variables that aren't partition representatives, for now. */
907 if (stack_vars
[i
].representative
!= i
)
910 /* Skip variables that have already had rtl assigned. See also
911 add_stack_var where we perpetrate this pc_rtx hack. */
912 if (DECL_RTL (stack_vars
[i
].decl
) != pc_rtx
)
915 /* Check the predicate to see whether this variable should be
916 allocated in this pass. */
917 if (pred
&& !pred (stack_vars
[i
].decl
))
920 offset
= alloc_stack_frame_space (stack_vars
[i
].size
,
921 stack_vars
[i
].alignb
);
923 /* Create rtl for each variable based on their location within the
925 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
927 gcc_assert (stack_vars
[j
].offset
<= stack_vars
[i
].size
);
928 expand_one_stack_var_at (stack_vars
[j
].decl
,
929 stack_vars
[j
].offset
+ offset
);
934 /* Take into account all sizes of partitions and reset DECL_RTLs. */
936 account_stack_vars (void)
938 size_t si
, j
, i
, n
= stack_vars_num
;
939 HOST_WIDE_INT size
= 0;
941 for (si
= 0; si
< n
; ++si
)
943 i
= stack_vars_sorted
[si
];
945 /* Skip variables that aren't partition representatives, for now. */
946 if (stack_vars
[i
].representative
!= i
)
949 size
+= stack_vars
[i
].size
;
950 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
951 SET_DECL_RTL (stack_vars
[j
].decl
, NULL
);
956 /* A subroutine of expand_one_var. Called to immediately assign rtl
957 to a variable to be allocated in the stack frame. */
960 expand_one_stack_var (tree var
)
962 HOST_WIDE_INT size
, offset
, align
;
964 size
= tree_low_cst (DECL_SIZE_UNIT (var
), 1);
965 align
= get_decl_align_unit (var
);
966 offset
= alloc_stack_frame_space (size
, align
);
968 expand_one_stack_var_at (var
, offset
);
971 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
972 that will reside in a hard register. */
975 expand_one_hard_reg_var (tree var
)
977 rest_of_decl_compilation (var
, 0, 0);
980 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
981 that will reside in a pseudo register. */
984 expand_one_register_var (tree var
)
986 tree type
= TREE_TYPE (var
);
987 int unsignedp
= TYPE_UNSIGNED (type
);
988 enum machine_mode reg_mode
989 = promote_mode (type
, DECL_MODE (var
), &unsignedp
, 0);
990 rtx x
= gen_reg_rtx (reg_mode
);
992 SET_DECL_RTL (var
, x
);
994 /* Note if the object is a user variable. */
995 if (!DECL_ARTIFICIAL (var
))
998 if (POINTER_TYPE_P (type
))
999 mark_reg_pointer (x
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var
))));
1002 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1003 has some associated error, e.g. its type is error-mark. We just need
1004 to pick something that won't crash the rest of the compiler. */
1007 expand_one_error_var (tree var
)
1009 enum machine_mode mode
= DECL_MODE (var
);
1012 if (mode
== BLKmode
)
1013 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1014 else if (mode
== VOIDmode
)
1017 x
= gen_reg_rtx (mode
);
1019 SET_DECL_RTL (var
, x
);
1022 /* A subroutine of expand_one_var. VAR is a variable that will be
1023 allocated to the local stack frame. Return true if we wish to
1024 add VAR to STACK_VARS so that it will be coalesced with other
1025 variables. Return false to allocate VAR immediately.
1027 This function is used to reduce the number of variables considered
1028 for coalescing, which reduces the size of the quadratic problem. */
1031 defer_stack_allocation (tree var
, bool toplevel
)
1033 /* If stack protection is enabled, *all* stack variables must be deferred,
1034 so that we can re-order the strings to the top of the frame. */
1035 if (flag_stack_protect
)
1038 /* Variables in the outermost scope automatically conflict with
1039 every other variable. The only reason to want to defer them
1040 at all is that, after sorting, we can more efficiently pack
1041 small variables in the stack frame. Continue to defer at -O2. */
1042 if (toplevel
&& optimize
< 2)
1045 /* Without optimization, *most* variables are allocated from the
1046 stack, which makes the quadratic problem large exactly when we
1047 want compilation to proceed as quickly as possible. On the
1048 other hand, we don't want the function's stack frame size to
1049 get completely out of hand. So we avoid adding scalars and
1050 "small" aggregates to the list at all. */
1051 if (optimize
== 0 && tree_low_cst (DECL_SIZE_UNIT (var
), 1) < 32)
1057 /* A subroutine of expand_used_vars. Expand one variable according to
1058 its flavor. Variables to be placed on the stack are not actually
1059 expanded yet, merely recorded.
1060 When REALLY_EXPAND is false, only add stack values to be allocated.
1061 Return stack usage this variable is supposed to take.
1064 static HOST_WIDE_INT
1065 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1067 if (SUPPORTS_STACK_ALIGNMENT
1068 && TREE_TYPE (var
) != error_mark_node
1069 && TREE_CODE (var
) == VAR_DECL
)
1073 /* Because we don't know if VAR will be in register or on stack,
1074 we conservatively assume it will be on stack even if VAR is
1075 eventually put into register after RA pass. For non-automatic
1076 variables, which won't be on stack, we collect alignment of
1077 type and ignore user specified alignment. */
1078 if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1079 align
= TYPE_ALIGN (TREE_TYPE (var
));
1081 align
= DECL_ALIGN (var
);
1083 if (crtl
->stack_alignment_estimated
< align
)
1085 /* stack_alignment_estimated shouldn't change after stack
1086 realign decision made */
1087 gcc_assert(!crtl
->stack_realign_processed
);
1088 crtl
->stack_alignment_estimated
= align
;
1092 if (TREE_CODE (var
) != VAR_DECL
)
1094 else if (DECL_EXTERNAL (var
))
1096 else if (DECL_HAS_VALUE_EXPR_P (var
))
1098 else if (TREE_STATIC (var
))
1100 else if (DECL_RTL_SET_P (var
))
1102 else if (TREE_TYPE (var
) == error_mark_node
)
1105 expand_one_error_var (var
);
1107 else if (DECL_HARD_REGISTER (var
))
1110 expand_one_hard_reg_var (var
);
1112 else if (use_register_for_decl (var
))
1115 expand_one_register_var (var
);
1117 else if (defer_stack_allocation (var
, toplevel
))
1118 add_stack_var (var
);
1122 expand_one_stack_var (var
);
1123 return tree_low_cst (DECL_SIZE_UNIT (var
), 1);
1128 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1129 expanding variables. Those variables that can be put into registers
1130 are allocated pseudos; those that can't are put on the stack.
1132 TOPLEVEL is true if this is the outermost BLOCK. */
1135 expand_used_vars_for_block (tree block
, bool toplevel
)
1137 size_t i
, j
, old_sv_num
, this_sv_num
, new_sv_num
;
1140 old_sv_num
= toplevel
? 0 : stack_vars_num
;
1142 /* Expand all variables at this level. */
1143 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
1145 expand_one_var (t
, toplevel
, true);
1147 this_sv_num
= stack_vars_num
;
1149 /* Expand all variables at containing levels. */
1150 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1151 expand_used_vars_for_block (t
, false);
1153 /* Since we do not track exact variable lifetimes (which is not even
1154 possible for variables whose address escapes), we mirror the block
1155 tree in the interference graph. Here we cause all variables at this
1156 level, and all sublevels, to conflict. Do make certain that a
1157 variable conflicts with itself. */
1158 if (old_sv_num
< this_sv_num
)
1160 new_sv_num
= stack_vars_num
;
1161 resize_stack_vars_conflict (new_sv_num
);
1163 for (i
= old_sv_num
; i
< new_sv_num
; ++i
)
1164 for (j
= i
< this_sv_num
? i
+1 : this_sv_num
; j
-- > old_sv_num
;)
1165 add_stack_var_conflict (i
, j
);
1169 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1170 and clear TREE_USED on all local variables. */
1173 clear_tree_used (tree block
)
1177 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
1178 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1181 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1182 clear_tree_used (t
);
1185 /* Examine TYPE and determine a bit mask of the following features. */
1187 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1188 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1189 #define SPCT_HAS_ARRAY 4
1190 #define SPCT_HAS_AGGREGATE 8
1193 stack_protect_classify_type (tree type
)
1195 unsigned int ret
= 0;
1198 switch (TREE_CODE (type
))
1201 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1202 if (t
== char_type_node
1203 || t
== signed_char_type_node
1204 || t
== unsigned_char_type_node
)
1206 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1207 unsigned HOST_WIDE_INT len
;
1209 if (!TYPE_SIZE_UNIT (type
)
1210 || !host_integerp (TYPE_SIZE_UNIT (type
), 1))
1213 len
= tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
1216 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1218 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1221 ret
= SPCT_HAS_ARRAY
;
1225 case QUAL_UNION_TYPE
:
1227 ret
= SPCT_HAS_AGGREGATE
;
1228 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1229 if (TREE_CODE (t
) == FIELD_DECL
)
1230 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1240 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1241 part of the local stack frame. Remember if we ever return nonzero for
1242 any variable in this function. The return value is the phase number in
1243 which the variable should be allocated. */
1246 stack_protect_decl_phase (tree decl
)
1248 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1251 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1252 has_short_buffer
= true;
1254 if (flag_stack_protect
== 2)
1256 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1257 && !(bits
& SPCT_HAS_AGGREGATE
))
1259 else if (bits
& SPCT_HAS_ARRAY
)
1263 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1266 has_protected_decls
= true;
1271 /* Two helper routines that check for phase 1 and phase 2. These are used
1272 as callbacks for expand_stack_vars. */
1275 stack_protect_decl_phase_1 (tree decl
)
1277 return stack_protect_decl_phase (decl
) == 1;
1281 stack_protect_decl_phase_2 (tree decl
)
1283 return stack_protect_decl_phase (decl
) == 2;
1286 /* Ensure that variables in different stack protection phases conflict
1287 so that they are not merged and share the same stack slot. */
1290 add_stack_protection_conflicts (void)
1292 size_t i
, j
, n
= stack_vars_num
;
1293 unsigned char *phase
;
1295 phase
= XNEWVEC (unsigned char, n
);
1296 for (i
= 0; i
< n
; ++i
)
1297 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1299 for (i
= 0; i
< n
; ++i
)
1301 unsigned char ph_i
= phase
[i
];
1302 for (j
= 0; j
< i
; ++j
)
1303 if (ph_i
!= phase
[j
])
1304 add_stack_var_conflict (i
, j
);
1310 /* Create a decl for the guard at the top of the stack frame. */
1313 create_stack_guard (void)
1315 tree guard
= build_decl (VAR_DECL
, NULL
, ptr_type_node
);
1316 TREE_THIS_VOLATILE (guard
) = 1;
1317 TREE_USED (guard
) = 1;
1318 expand_one_stack_var (guard
);
1319 crtl
->stack_protect_guard
= guard
;
1322 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1323 expanding variables. Those variables that can be put into registers
1324 are allocated pseudos; those that can't are put on the stack.
1326 TOPLEVEL is true if this is the outermost BLOCK. */
1328 static HOST_WIDE_INT
1329 account_used_vars_for_block (tree block
, bool toplevel
)
1331 size_t i
, j
, old_sv_num
, this_sv_num
, new_sv_num
;
1333 HOST_WIDE_INT size
= 0;
1335 old_sv_num
= toplevel
? 0 : stack_vars_num
;
1337 /* Expand all variables at this level. */
1338 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
1340 size
+= expand_one_var (t
, toplevel
, false);
1342 this_sv_num
= stack_vars_num
;
1344 /* Expand all variables at containing levels. */
1345 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1346 size
+= account_used_vars_for_block (t
, false);
1348 /* Since we do not track exact variable lifetimes (which is not even
1349 possible for variables whose address escapes), we mirror the block
1350 tree in the interference graph. Here we cause all variables at this
1351 level, and all sublevels, to conflict. Do make certain that a
1352 variable conflicts with itself. */
1353 if (old_sv_num
< this_sv_num
)
1355 new_sv_num
= stack_vars_num
;
1356 resize_stack_vars_conflict (new_sv_num
);
1358 for (i
= old_sv_num
; i
< new_sv_num
; ++i
)
1359 for (j
= i
< this_sv_num
? i
+1 : this_sv_num
; j
-- > old_sv_num
;)
1360 add_stack_var_conflict (i
, j
);
1365 /* Prepare for expanding variables. */
1367 init_vars_expansion (void)
1370 /* Set TREE_USED on all variables in the local_decls. */
1371 for (t
= cfun
->local_decls
; t
; t
= TREE_CHAIN (t
))
1372 TREE_USED (TREE_VALUE (t
)) = 1;
1374 /* Clear TREE_USED on all variables associated with a block scope. */
1375 clear_tree_used (DECL_INITIAL (current_function_decl
));
1377 /* Initialize local stack smashing state. */
1378 has_protected_decls
= false;
1379 has_short_buffer
= false;
1382 /* Free up stack variable graph data. */
1384 fini_vars_expansion (void)
1386 XDELETEVEC (stack_vars
);
1387 XDELETEVEC (stack_vars_sorted
);
1388 XDELETEVEC (stack_vars_conflict
);
1390 stack_vars_alloc
= stack_vars_num
= 0;
1391 stack_vars_conflict
= NULL
;
1392 stack_vars_conflict_alloc
= 0;
1396 estimated_stack_frame_size (void)
1398 HOST_WIDE_INT size
= 0;
1399 tree t
, outer_block
= DECL_INITIAL (current_function_decl
);
1401 init_vars_expansion ();
1403 /* At this point all variables on the local_decls with TREE_USED
1404 set are not associated with any block scope. Lay them out. */
1405 for (t
= cfun
->local_decls
; t
; t
= TREE_CHAIN (t
))
1407 tree var
= TREE_VALUE (t
);
1409 if (TREE_USED (var
))
1410 size
+= expand_one_var (var
, true, false);
1411 TREE_USED (var
) = 1;
1413 size
+= account_used_vars_for_block (outer_block
, true);
1414 if (stack_vars_num
> 0)
1416 /* Due to the way alias sets work, no variables with non-conflicting
1417 alias sets may be assigned the same address. Add conflicts to
1419 add_alias_set_conflicts ();
1421 /* If stack protection is enabled, we don't share space between
1422 vulnerable data and non-vulnerable data. */
1423 if (flag_stack_protect
)
1424 add_stack_protection_conflicts ();
1426 /* Now that we have collected all stack variables, and have computed a
1427 minimal interference graph, attempt to save some stack space. */
1428 partition_stack_vars ();
1430 dump_stack_var_partition ();
1432 size
+= account_stack_vars ();
1433 fini_vars_expansion ();
1438 /* Expand all variables used in the function. */
1441 expand_used_vars (void)
1443 tree t
, outer_block
= DECL_INITIAL (current_function_decl
);
1445 /* Compute the phase of the stack frame for this function. */
1447 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1448 int off
= STARTING_FRAME_OFFSET
% align
;
1449 frame_phase
= off
? align
- off
: 0;
1452 init_vars_expansion ();
1454 /* At this point all variables on the local_decls with TREE_USED
1455 set are not associated with any block scope. Lay them out. */
1456 for (t
= cfun
->local_decls
; t
; t
= TREE_CHAIN (t
))
1458 tree var
= TREE_VALUE (t
);
1459 bool expand_now
= false;
1461 /* We didn't set a block for static or extern because it's hard
1462 to tell the difference between a global variable (re)declared
1463 in a local scope, and one that's really declared there to
1464 begin with. And it doesn't really matter much, since we're
1465 not giving them stack space. Expand them now. */
1466 if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1469 /* Any variable that could have been hoisted into an SSA_NAME
1470 will have been propagated anywhere the optimizers chose,
1471 i.e. not confined to their original block. Allocate them
1472 as if they were defined in the outermost scope. */
1473 else if (is_gimple_reg (var
))
1476 /* If the variable is not associated with any block, then it
1477 was created by the optimizers, and could be live anywhere
1479 else if (TREE_USED (var
))
1482 /* Finally, mark all variables on the list as used. We'll use
1483 this in a moment when we expand those associated with scopes. */
1484 TREE_USED (var
) = 1;
1487 expand_one_var (var
, true, true);
1489 cfun
->local_decls
= NULL_TREE
;
1491 /* At this point, all variables within the block tree with TREE_USED
1492 set are actually used by the optimized function. Lay them out. */
1493 expand_used_vars_for_block (outer_block
, true);
1495 if (stack_vars_num
> 0)
1497 /* Due to the way alias sets work, no variables with non-conflicting
1498 alias sets may be assigned the same address. Add conflicts to
1500 add_alias_set_conflicts ();
1502 /* If stack protection is enabled, we don't share space between
1503 vulnerable data and non-vulnerable data. */
1504 if (flag_stack_protect
)
1505 add_stack_protection_conflicts ();
1507 /* Now that we have collected all stack variables, and have computed a
1508 minimal interference graph, attempt to save some stack space. */
1509 partition_stack_vars ();
1511 dump_stack_var_partition ();
1514 /* There are several conditions under which we should create a
1515 stack guard: protect-all, alloca used, protected decls present. */
1516 if (flag_stack_protect
== 2
1517 || (flag_stack_protect
1518 && (cfun
->calls_alloca
|| has_protected_decls
)))
1519 create_stack_guard ();
1521 /* Assign rtl to each variable based on these partitions. */
1522 if (stack_vars_num
> 0)
1524 /* Reorder decls to be protected by iterating over the variables
1525 array multiple times, and allocating out of each phase in turn. */
1526 /* ??? We could probably integrate this into the qsort we did
1527 earlier, such that we naturally see these variables first,
1528 and thus naturally allocate things in the right order. */
1529 if (has_protected_decls
)
1531 /* Phase 1 contains only character arrays. */
1532 expand_stack_vars (stack_protect_decl_phase_1
);
1534 /* Phase 2 contains other kinds of arrays. */
1535 if (flag_stack_protect
== 2)
1536 expand_stack_vars (stack_protect_decl_phase_2
);
1539 expand_stack_vars (NULL
);
1541 fini_vars_expansion ();
1544 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1545 if (STACK_ALIGNMENT_NEEDED
)
1547 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1548 if (!FRAME_GROWS_DOWNWARD
)
1549 frame_offset
+= align
- 1;
1550 frame_offset
&= -align
;
1555 /* If we need to produce a detailed dump, print the tree representation
1556 for STMT to the dump file. SINCE is the last RTX after which the RTL
1557 generated for STMT should have been appended. */
1560 maybe_dump_rtl_for_gimple_stmt (gimple stmt
, rtx since
)
1562 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1564 fprintf (dump_file
, "\n;; ");
1565 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1566 fprintf (dump_file
, "\n");
1568 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
1572 /* Maps the blocks that do not contain tree labels to rtx labels. */
1574 static struct pointer_map_t
*lab_rtx_for_bb
;
1576 /* Returns the label_rtx expression for a label starting basic block BB. */
1579 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
1581 gimple_stmt_iterator gsi
;
1586 if (bb
->flags
& BB_RTL
)
1587 return block_label (bb
);
1589 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
1593 /* Find the tree label if it is present. */
1595 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1597 lab_stmt
= gsi_stmt (gsi
);
1598 if (gimple_code (lab_stmt
) != GIMPLE_LABEL
)
1601 lab
= gimple_label_label (lab_stmt
);
1602 if (DECL_NONLOCAL (lab
))
1605 return label_rtx (lab
);
1608 elt
= pointer_map_insert (lab_rtx_for_bb
, bb
);
1609 *elt
= gen_label_rtx ();
1614 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1615 Returns a new basic block if we've terminated the current basic
1616 block and created a new one. */
1619 expand_gimple_cond (basic_block bb
, gimple stmt
)
1621 basic_block new_bb
, dest
;
1625 tree pred
= gimple_cond_pred_to_tree (stmt
);
1628 last2
= last
= get_last_insn ();
1630 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
1631 if (gimple_has_location (stmt
))
1633 set_curr_insn_source_location (gimple_location (stmt
));
1634 set_curr_insn_block (gimple_block (stmt
));
1637 /* These flags have no purpose in RTL land. */
1638 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
1639 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
1641 /* We can either have a pure conditional jump with one fallthru edge or
1642 two-way jump that needs to be decomposed into two basic blocks. */
1643 if (false_edge
->dest
== bb
->next_bb
)
1645 jumpif (pred
, label_rtx_for_bb (true_edge
->dest
));
1646 add_reg_br_prob_note (last
, true_edge
->probability
);
1647 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1648 if (true_edge
->goto_locus
)
1649 set_curr_insn_source_location (true_edge
->goto_locus
);
1650 false_edge
->flags
|= EDGE_FALLTHRU
;
1654 if (true_edge
->dest
== bb
->next_bb
)
1656 jumpifnot (pred
, label_rtx_for_bb (false_edge
->dest
));
1657 add_reg_br_prob_note (last
, false_edge
->probability
);
1658 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1659 if (false_edge
->goto_locus
)
1660 set_curr_insn_source_location (false_edge
->goto_locus
);
1661 true_edge
->flags
|= EDGE_FALLTHRU
;
1666 jumpif (pred
, label_rtx_for_bb (true_edge
->dest
));
1667 add_reg_br_prob_note (last
, true_edge
->probability
);
1668 last
= get_last_insn ();
1669 emit_jump (label_rtx_for_bb (false_edge
->dest
));
1672 if (BARRIER_P (BB_END (bb
)))
1673 BB_END (bb
) = PREV_INSN (BB_END (bb
));
1674 update_bb_for_insn (bb
);
1676 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
1677 dest
= false_edge
->dest
;
1678 redirect_edge_succ (false_edge
, new_bb
);
1679 false_edge
->flags
|= EDGE_FALLTHRU
;
1680 new_bb
->count
= false_edge
->count
;
1681 new_bb
->frequency
= EDGE_FREQUENCY (false_edge
);
1682 new_edge
= make_edge (new_bb
, dest
, 0);
1683 new_edge
->probability
= REG_BR_PROB_BASE
;
1684 new_edge
->count
= new_bb
->count
;
1685 if (BARRIER_P (BB_END (new_bb
)))
1686 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
1687 update_bb_for_insn (new_bb
);
1689 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
1691 if (false_edge
->goto_locus
)
1692 set_curr_insn_source_location (false_edge
->goto_locus
);
1698 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
1699 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1700 generated a tail call (something that might be denied by the ABI
1701 rules governing the call; see calls.c).
1703 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1704 can still reach the rest of BB. The case here is __builtin_sqrt,
1705 where the NaN result goes through the external function (with a
1706 tailcall) and the normal result happens via a sqrt instruction. */
1709 expand_gimple_tailcall (basic_block bb
, gimple stmt
, bool *can_fallthru
)
1716 tree stmt_tree
= gimple_to_tree (stmt
);
1718 last2
= last
= get_last_insn ();
1720 expand_expr_stmt (stmt_tree
);
1722 release_stmt_tree (stmt
, stmt_tree
);
1724 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
1725 if (CALL_P (last
) && SIBLING_CALL_P (last
))
1728 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
1730 *can_fallthru
= true;
1734 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1735 Any instructions emitted here are about to be deleted. */
1736 do_pending_stack_adjust ();
1738 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1739 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1740 EH or abnormal edges, we shouldn't have created a tail call in
1741 the first place. So it seems to me we should just be removing
1742 all edges here, or redirecting the existing fallthru edge to
1748 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
1750 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
1752 if (e
->dest
!= EXIT_BLOCK_PTR
)
1754 e
->dest
->count
-= e
->count
;
1755 e
->dest
->frequency
-= EDGE_FREQUENCY (e
);
1756 if (e
->dest
->count
< 0)
1758 if (e
->dest
->frequency
< 0)
1759 e
->dest
->frequency
= 0;
1762 probability
+= e
->probability
;
1769 /* This is somewhat ugly: the call_expr expander often emits instructions
1770 after the sibcall (to perform the function return). These confuse the
1771 find_many_sub_basic_blocks code, so we need to get rid of these. */
1772 last
= NEXT_INSN (last
);
1773 gcc_assert (BARRIER_P (last
));
1775 *can_fallthru
= false;
1776 while (NEXT_INSN (last
))
1778 /* For instance an sqrt builtin expander expands if with
1779 sibcall in the then and label for `else`. */
1780 if (LABEL_P (NEXT_INSN (last
)))
1782 *can_fallthru
= true;
1785 delete_insn (NEXT_INSN (last
));
1788 e
= make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_ABNORMAL
| EDGE_SIBCALL
);
1789 e
->probability
+= probability
;
1792 update_bb_for_insn (bb
);
1794 if (NEXT_INSN (last
))
1796 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
1799 if (BARRIER_P (last
))
1800 BB_END (bb
) = PREV_INSN (last
);
1803 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
1808 /* Expand basic block BB from GIMPLE trees to RTL. */
1811 expand_gimple_basic_block (basic_block bb
)
1813 gimple_stmt_iterator gsi
;
1822 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
1825 /* Note that since we are now transitioning from GIMPLE to RTL, we
1826 cannot use the gsi_*_bb() routines because they expect the basic
1827 block to be in GIMPLE, instead of RTL. Therefore, we need to
1828 access the BB sequence directly. */
1829 stmts
= bb_seq (bb
);
1830 bb
->il
.gimple
= NULL
;
1831 rtl_profile_for_bb (bb
);
1832 init_rtl_bb_info (bb
);
1833 bb
->flags
|= BB_RTL
;
1835 /* Remove the RETURN_EXPR if we may fall though to the exit
1837 gsi
= gsi_last (stmts
);
1838 if (!gsi_end_p (gsi
)
1839 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
1841 gimple ret_stmt
= gsi_stmt (gsi
);
1843 gcc_assert (single_succ_p (bb
));
1844 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR
);
1846 if (bb
->next_bb
== EXIT_BLOCK_PTR
1847 && !gimple_return_retval (ret_stmt
))
1849 gsi_remove (&gsi
, false);
1850 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
1854 gsi
= gsi_start (stmts
);
1855 if (!gsi_end_p (gsi
))
1857 stmt
= gsi_stmt (gsi
);
1858 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1862 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
1866 last
= get_last_insn ();
1870 tree stmt_tree
= gimple_to_tree (stmt
);
1871 expand_expr_stmt (stmt_tree
);
1872 release_stmt_tree (stmt
, stmt_tree
);
1877 emit_label ((rtx
) *elt
);
1879 /* Java emits line number notes in the top of labels.
1880 ??? Make this go away once line number notes are obsoleted. */
1881 BB_HEAD (bb
) = NEXT_INSN (last
);
1882 if (NOTE_P (BB_HEAD (bb
)))
1883 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
1884 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
1886 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1889 note
= BB_HEAD (bb
) = emit_note (NOTE_INSN_BASIC_BLOCK
);
1891 NOTE_BASIC_BLOCK (note
) = bb
;
1893 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
1895 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1896 e
->flags
&= ~EDGE_EXECUTABLE
;
1898 /* At the moment not all abnormal edges match the RTL representation.
1899 It is safe to remove them here as find_many_sub_basic_blocks will
1900 rediscover them. In the future we should get this fixed properly. */
1901 if (e
->flags
& EDGE_ABNORMAL
)
1907 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
1909 gimple stmt
= gsi_stmt (gsi
);
1912 /* Expand this statement, then evaluate the resulting RTL and
1913 fixup the CFG accordingly. */
1914 if (gimple_code (stmt
) == GIMPLE_COND
)
1916 new_bb
= expand_gimple_cond (bb
, stmt
);
1922 if (is_gimple_call (stmt
) && gimple_call_tail_p (stmt
))
1925 new_bb
= expand_gimple_tailcall (bb
, stmt
, &can_fallthru
);
1936 tree stmt_tree
= gimple_to_tree (stmt
);
1937 last
= get_last_insn ();
1938 expand_expr_stmt (stmt_tree
);
1939 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1940 release_stmt_tree (stmt
, stmt_tree
);
1945 /* Expand implicit goto. */
1946 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1948 if (e
->flags
& EDGE_FALLTHRU
)
1952 if (e
&& e
->dest
!= bb
->next_bb
)
1954 emit_jump (label_rtx_for_bb (e
->dest
));
1956 set_curr_insn_source_location (e
->goto_locus
);
1957 e
->flags
&= ~EDGE_FALLTHRU
;
1960 do_pending_stack_adjust ();
1962 /* Find the block tail. The last insn in the block is the insn
1963 before a barrier and/or table jump insn. */
1964 last
= get_last_insn ();
1965 if (BARRIER_P (last
))
1966 last
= PREV_INSN (last
);
1967 if (JUMP_TABLE_DATA_P (last
))
1968 last
= PREV_INSN (PREV_INSN (last
));
1971 update_bb_for_insn (bb
);
1977 /* Create a basic block for initialization code. */
1980 construct_init_block (void)
1982 basic_block init_block
, first_block
;
1986 /* Multiple entry points not supported yet. */
1987 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR
->succs
) == 1);
1988 init_rtl_bb_info (ENTRY_BLOCK_PTR
);
1989 init_rtl_bb_info (EXIT_BLOCK_PTR
);
1990 ENTRY_BLOCK_PTR
->flags
|= BB_RTL
;
1991 EXIT_BLOCK_PTR
->flags
|= BB_RTL
;
1993 e
= EDGE_SUCC (ENTRY_BLOCK_PTR
, 0);
1995 /* When entry edge points to first basic block, we don't need jump,
1996 otherwise we have to jump into proper target. */
1997 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR
->next_bb
)
1999 tree label
= gimple_block_label (e
->dest
);
2001 emit_jump (label_rtx (label
));
2005 flags
= EDGE_FALLTHRU
;
2007 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
2010 init_block
->frequency
= ENTRY_BLOCK_PTR
->frequency
;
2011 init_block
->count
= ENTRY_BLOCK_PTR
->count
;
2014 first_block
= e
->dest
;
2015 redirect_edge_succ (e
, init_block
);
2016 e
= make_edge (init_block
, first_block
, flags
);
2019 e
= make_edge (init_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
2020 e
->probability
= REG_BR_PROB_BASE
;
2021 e
->count
= ENTRY_BLOCK_PTR
->count
;
2023 update_bb_for_insn (init_block
);
2027 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
2028 found in the block tree. */
2031 set_block_levels (tree block
, int level
)
2035 BLOCK_NUMBER (block
) = level
;
2036 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
2037 block
= BLOCK_CHAIN (block
);
2041 /* Create a block containing landing pads and similar stuff. */
2044 construct_exit_block (void)
2046 rtx head
= get_last_insn ();
2048 basic_block exit_block
;
2052 rtx orig_end
= BB_END (EXIT_BLOCK_PTR
->prev_bb
);
2054 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
2056 /* Make sure the locus is set to the end of the function, so that
2057 epilogue line numbers and warnings are set properly. */
2058 if (cfun
->function_end_locus
!= UNKNOWN_LOCATION
)
2059 input_location
= cfun
->function_end_locus
;
2061 /* The following insns belong to the top scope. */
2062 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
2064 /* Generate rtl for function exit. */
2065 expand_function_end ();
2067 end
= get_last_insn ();
2070 /* While emitting the function end we could move end of the last basic block.
2072 BB_END (EXIT_BLOCK_PTR
->prev_bb
) = orig_end
;
2073 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
2074 head
= NEXT_INSN (head
);
2075 exit_block
= create_basic_block (NEXT_INSN (head
), end
,
2076 EXIT_BLOCK_PTR
->prev_bb
);
2077 exit_block
->frequency
= EXIT_BLOCK_PTR
->frequency
;
2078 exit_block
->count
= EXIT_BLOCK_PTR
->count
;
2081 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR
->preds
))
2083 e
= EDGE_PRED (EXIT_BLOCK_PTR
, ix
);
2084 if (!(e
->flags
& EDGE_ABNORMAL
))
2085 redirect_edge_succ (e
, exit_block
);
2090 e
= make_edge (exit_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
2091 e
->probability
= REG_BR_PROB_BASE
;
2092 e
->count
= EXIT_BLOCK_PTR
->count
;
2093 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR
->preds
)
2096 e
->count
-= e2
->count
;
2097 exit_block
->count
-= e2
->count
;
2098 exit_block
->frequency
-= EDGE_FREQUENCY (e2
);
2102 if (exit_block
->count
< 0)
2103 exit_block
->count
= 0;
2104 if (exit_block
->frequency
< 0)
2105 exit_block
->frequency
= 0;
2106 update_bb_for_insn (exit_block
);
2109 /* Helper function for discover_nonconstant_array_refs.
2110 Look for ARRAY_REF nodes with non-constant indexes and mark them
2114 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
2115 void *data ATTRIBUTE_UNUSED
)
2119 if (IS_TYPE_OR_DECL_P (t
))
2121 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2123 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2124 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
2125 && (!TREE_OPERAND (t
, 2)
2126 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
2127 || (TREE_CODE (t
) == COMPONENT_REF
2128 && (!TREE_OPERAND (t
,2)
2129 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
2130 || TREE_CODE (t
) == BIT_FIELD_REF
2131 || TREE_CODE (t
) == REALPART_EXPR
2132 || TREE_CODE (t
) == IMAGPART_EXPR
2133 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
2134 || CONVERT_EXPR_P (t
))
2135 t
= TREE_OPERAND (t
, 0);
2137 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2139 t
= get_base_address (t
);
2140 if (t
&& DECL_P (t
))
2141 TREE_ADDRESSABLE (t
) = 1;
2150 /* RTL expansion is not able to compile array references with variable
2151 offsets for arrays stored in single register. Discover such
2152 expressions and mark variables as addressable to avoid this
2156 discover_nonconstant_array_refs (void)
2159 gimple_stmt_iterator gsi
;
2162 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2164 gimple stmt
= gsi_stmt (gsi
);
2165 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
2169 /* This function sets crtl->args.internal_arg_pointer to a virtual
2170 register if DRAP is needed. Local register allocator will replace
2171 virtual_incoming_args_rtx with the virtual register. */
2174 expand_stack_alignment (void)
2177 unsigned int preferred_stack_boundary
, incoming_stack_boundary
;
2179 if (! SUPPORTS_STACK_ALIGNMENT
)
2182 if (cfun
->calls_alloca
2183 || cfun
->has_nonlocal_label
2184 || crtl
->has_nonlocal_goto
)
2185 crtl
->need_drap
= true;
2187 gcc_assert (crtl
->stack_alignment_needed
2188 <= crtl
->stack_alignment_estimated
);
2190 /* Update stack boundary if needed. */
2191 if (targetm
.calls
.update_stack_boundary
)
2192 targetm
.calls
.update_stack_boundary ();
2194 /* Update crtl->stack_alignment_estimated and use it later to align
2195 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
2196 exceptions since callgraph doesn't collect incoming stack alignment
2198 if (flag_non_call_exceptions
2199 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
2200 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
2202 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
2203 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
2204 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
2205 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
2206 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
2208 /* The incoming stack frame has to be aligned at least at
2209 parm_stack_boundary. */
2210 if (crtl
->parm_stack_boundary
> INCOMING_STACK_BOUNDARY
)
2211 incoming_stack_boundary
= crtl
->parm_stack_boundary
;
2213 incoming_stack_boundary
= INCOMING_STACK_BOUNDARY
;
2215 crtl
->stack_realign_needed
2216 = incoming_stack_boundary
< crtl
->stack_alignment_estimated
;
2217 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
2219 crtl
->stack_realign_processed
= true;
2221 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
2223 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
2224 drap_rtx
= targetm
.calls
.get_drap_rtx ();
2226 /* stack_realign_drap and drap_rtx must match. */
2227 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
2229 /* Do nothing if NULL is returned, which means DRAP is not needed. */
2230 if (NULL
!= drap_rtx
)
2232 crtl
->args
.internal_arg_pointer
= drap_rtx
;
2234 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
2236 fixup_tail_calls ();
2240 /* Translate the intermediate representation contained in the CFG
2241 from GIMPLE trees to RTL.
2243 We do conversion per basic block and preserve/update the tree CFG.
2244 This implies we have to do some magic as the CFG can simultaneously
2245 consist of basic blocks containing RTL and GIMPLE trees. This can
2246 confuse the CFG hooks, so be careful to not manipulate CFG during
2250 gimple_expand_cfg (void)
2252 basic_block bb
, init_block
;
2257 /* Some backends want to know that we are expanding to RTL. */
2258 currently_expanding_to_rtl
= 1;
2260 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
2262 insn_locators_alloc ();
2263 if (!DECL_BUILT_IN (current_function_decl
))
2264 set_curr_insn_source_location (DECL_SOURCE_LOCATION (current_function_decl
));
2265 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
2266 prologue_locator
= curr_insn_locator ();
2268 /* Make sure first insn is a note even if we don't want linenums.
2269 This makes sure the first insn will never be deleted.
2270 Also, final expects a note to appear there. */
2271 emit_note (NOTE_INSN_DELETED
);
2273 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
2274 discover_nonconstant_array_refs ();
2276 targetm
.expand_to_rtl_hook ();
2277 crtl
->stack_alignment_needed
= STACK_BOUNDARY
;
2278 crtl
->max_used_stack_slot_alignment
= STACK_BOUNDARY
;
2279 crtl
->stack_alignment_estimated
= STACK_BOUNDARY
;
2280 crtl
->preferred_stack_boundary
= STACK_BOUNDARY
;
2281 cfun
->cfg
->max_jumptable_ents
= 0;
2284 /* Expand the variables recorded during gimple lowering. */
2285 expand_used_vars ();
2287 /* Honor stack protection warnings. */
2288 if (warn_stack_protect
)
2290 if (cfun
->calls_alloca
)
2291 warning (OPT_Wstack_protector
,
2292 "not protecting local variables: variable length buffer");
2293 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
2294 warning (OPT_Wstack_protector
,
2295 "not protecting function: no buffer at least %d bytes long",
2296 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
2299 /* Set up parameters and prepare for return, for the function. */
2300 expand_function_start (current_function_decl
);
2302 /* If this function is `main', emit a call to `__main'
2303 to run global initializers, etc. */
2304 if (DECL_NAME (current_function_decl
)
2305 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
2306 && DECL_FILE_SCOPE_P (current_function_decl
))
2307 expand_main_function ();
2309 /* Initialize the stack_protect_guard field. This must happen after the
2310 call to __main (if any) so that the external decl is initialized. */
2311 if (crtl
->stack_protect_guard
)
2312 stack_protect_prologue ();
2314 /* Register rtl specific functions for cfg. */
2315 rtl_register_cfg_hooks ();
2317 init_block
= construct_init_block ();
2319 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
2320 remaining edges in expand_gimple_basic_block. */
2321 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
2322 e
->flags
&= ~EDGE_EXECUTABLE
;
2324 lab_rtx_for_bb
= pointer_map_create ();
2325 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
2326 bb
= expand_gimple_basic_block (bb
);
2328 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
2329 conservatively to true until they are all profile aware. */
2330 pointer_map_destroy (lab_rtx_for_bb
);
2333 construct_exit_block ();
2334 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
2335 insn_locators_finalize ();
2337 /* We're done expanding trees to RTL. */
2338 currently_expanding_to_rtl
= 0;
2340 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */
2341 convert_from_eh_region_ranges ();
2342 set_eh_throw_stmt_table (cfun
, NULL
);
2344 rebuild_jump_labels (get_insns ());
2345 find_exception_handler_labels ();
2347 blocks
= sbitmap_alloc (last_basic_block
);
2348 sbitmap_ones (blocks
);
2349 find_many_sub_basic_blocks (blocks
);
2350 purge_all_dead_edges ();
2351 sbitmap_free (blocks
);
2355 expand_stack_alignment ();
2357 #ifdef ENABLE_CHECKING
2358 verify_flow_info ();
2361 /* There's no need to defer outputting this function any more; we
2362 know we want to output it. */
2363 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
2365 /* Now that we're done expanding trees to RTL, we shouldn't have any
2366 more CONCATs anywhere. */
2367 generating_concat_p
= 0;
2372 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
2373 /* And the pass manager will dump RTL for us. */
2376 /* If we're emitting a nested function, make sure its parent gets
2377 emitted as well. Doing otherwise confuses debug info. */
2380 for (parent
= DECL_CONTEXT (current_function_decl
);
2381 parent
!= NULL_TREE
;
2382 parent
= get_containing_scope (parent
))
2383 if (TREE_CODE (parent
) == FUNCTION_DECL
)
2384 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
2387 /* We are now committed to emitting code for this function. Do any
2388 preparation, such as emitting abstract debug info for the inline
2389 before it gets mangled by optimization. */
2390 if (cgraph_function_possibly_inlined_p (current_function_decl
))
2391 (*debug_hooks
->outlining_inline_function
) (current_function_decl
);
2393 TREE_ASM_WRITTEN (current_function_decl
) = 1;
2395 /* After expanding, the return labels are no longer needed. */
2396 return_label
= NULL
;
2397 naked_return_label
= NULL
;
2398 /* Tag the blocks with a depth number so that change_scope can find
2399 the common parent easily. */
2400 set_block_levels (DECL_INITIAL (cfun
->decl
), 0);
2401 default_rtl_profile ();
2405 struct rtl_opt_pass pass_expand
=
2409 "expand", /* name */
2411 gimple_expand_cfg
, /* execute */
2414 0, /* static_pass_number */
2415 TV_EXPAND
, /* tv_id */
2416 /* ??? If TER is enabled, we actually receive GENERIC. */
2417 PROP_gimple_leh
| PROP_cfg
, /* properties_required */
2418 PROP_rtl
, /* properties_provided */
2419 PROP_trees
, /* properties_destroyed */
2420 0, /* todo_flags_start */
2421 TODO_dump_func
, /* todo_flags_finish */