diagnostic.c (diagnostic_report_diagnostic): Do not warn about loaded plugins for...
[official-gcc.git] / gcc / cfgexpand.c
blob695e4ef0ef90a78cffebc1ec81285d1cce21d215
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "toplev.h"
40 #include "debug.h"
41 #include "params.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
44 #include "target.h"
47 /* Return an expression tree corresponding to the RHS of GIMPLE
48 statement STMT. */
50 tree
51 gimple_assign_rhs_to_tree (gimple stmt)
53 tree t;
54 enum gimple_rhs_class grhs_class;
56 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
58 if (grhs_class == GIMPLE_BINARY_RHS)
59 t = build2 (gimple_assign_rhs_code (stmt),
60 TREE_TYPE (gimple_assign_lhs (stmt)),
61 gimple_assign_rhs1 (stmt),
62 gimple_assign_rhs2 (stmt));
63 else if (grhs_class == GIMPLE_UNARY_RHS)
64 t = build1 (gimple_assign_rhs_code (stmt),
65 TREE_TYPE (gimple_assign_lhs (stmt)),
66 gimple_assign_rhs1 (stmt));
67 else if (grhs_class == GIMPLE_SINGLE_RHS)
68 t = gimple_assign_rhs1 (stmt);
69 else
70 gcc_unreachable ();
72 return t;
75 /* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND
76 statement STMT. */
78 static tree
79 gimple_cond_pred_to_tree (gimple stmt)
81 return build2 (gimple_cond_code (stmt), boolean_type_node,
82 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
85 /* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression
86 inside *TP. DATA is the location to set. */
88 static tree
89 set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data)
91 location_t *loc = (location_t *) data;
92 if (EXPR_P (*tp))
93 SET_EXPR_LOCATION (*tp, *loc);
95 return NULL_TREE;
99 /* RTL expansion has traditionally been done on trees, so the
100 transition to doing it on GIMPLE tuples is very invasive to the RTL
101 expander. To facilitate the transition, this function takes a
102 GIMPLE tuple STMT and returns the same statement in the form of a
103 tree. */
105 static tree
106 gimple_to_tree (gimple stmt)
108 tree t;
109 int rn;
110 tree_ann_common_t ann;
111 location_t loc;
113 switch (gimple_code (stmt))
115 case GIMPLE_ASSIGN:
117 tree lhs = gimple_assign_lhs (stmt);
119 t = gimple_assign_rhs_to_tree (stmt);
120 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
121 if (gimple_assign_nontemporal_move_p (stmt))
122 MOVE_NONTEMPORAL (t) = true;
124 break;
126 case GIMPLE_COND:
127 t = gimple_cond_pred_to_tree (stmt);
128 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
129 break;
131 case GIMPLE_GOTO:
132 t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt));
133 break;
135 case GIMPLE_LABEL:
136 t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt));
137 break;
139 case GIMPLE_RETURN:
141 tree retval = gimple_return_retval (stmt);
143 if (retval && retval != error_mark_node)
145 tree result = DECL_RESULT (current_function_decl);
147 /* If we are not returning the current function's RESULT_DECL,
148 build an assignment to it. */
149 if (retval != result)
151 /* I believe that a function's RESULT_DECL is unique. */
152 gcc_assert (TREE_CODE (retval) != RESULT_DECL);
154 retval = build2 (MODIFY_EXPR, TREE_TYPE (result),
155 result, retval);
158 t = build1 (RETURN_EXPR, void_type_node, retval);
160 break;
162 case GIMPLE_ASM:
164 size_t i, n;
165 tree out, in, cl;
166 const char *s;
168 out = NULL_TREE;
169 n = gimple_asm_noutputs (stmt);
170 if (n > 0)
172 t = out = gimple_asm_output_op (stmt, 0);
173 for (i = 1; i < n; i++)
175 TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
176 t = gimple_asm_output_op (stmt, i);
180 in = NULL_TREE;
181 n = gimple_asm_ninputs (stmt);
182 if (n > 0)
184 t = in = gimple_asm_input_op (stmt, 0);
185 for (i = 1; i < n; i++)
187 TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
188 t = gimple_asm_input_op (stmt, i);
192 cl = NULL_TREE;
193 n = gimple_asm_nclobbers (stmt);
194 if (n > 0)
196 t = cl = gimple_asm_clobber_op (stmt, 0);
197 for (i = 1; i < n; i++)
199 TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
200 t = gimple_asm_clobber_op (stmt, i);
204 s = gimple_asm_string (stmt);
205 t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s),
206 out, in, cl);
207 ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt);
208 ASM_INPUT_P (t) = gimple_asm_input_p (stmt);
210 break;
212 case GIMPLE_CALL:
214 size_t i;
215 tree fn;
216 tree_ann_common_t ann;
218 t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
220 CALL_EXPR_FN (t) = gimple_call_fn (stmt);
221 TREE_TYPE (t) = gimple_call_return_type (stmt);
222 CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt);
224 for (i = 0; i < gimple_call_num_args (stmt); i++)
225 CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i);
227 if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
228 TREE_SIDE_EFFECTS (t) = 1;
230 if (gimple_call_flags (stmt) & ECF_NOTHROW)
231 TREE_NOTHROW (t) = 1;
233 CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt);
234 CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt);
235 CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt);
236 CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt);
237 CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt);
239 /* If the call has a LHS then create a MODIFY_EXPR to hold it. */
241 tree lhs = gimple_call_lhs (stmt);
243 if (lhs)
244 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
247 /* Record the original call statement, as it may be used
248 to retrieve profile information during expansion. */
250 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
251 && DECL_BUILT_IN (fn))
253 ann = get_tree_common_ann (t);
254 ann->stmt = stmt;
257 break;
259 case GIMPLE_SWITCH:
261 tree label_vec;
262 size_t i;
263 tree elt = gimple_switch_label (stmt, 0);
265 label_vec = make_tree_vec (gimple_switch_num_labels (stmt));
267 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
269 for (i = 1; i < gimple_switch_num_labels (stmt); i++)
270 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i);
272 /* The default case in a SWITCH_EXPR must be at the end of
273 the label vector. */
274 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0);
276 else
278 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
279 TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i);
282 t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt),
283 NULL, label_vec);
285 break;
287 case GIMPLE_NOP:
288 case GIMPLE_PREDICT:
289 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
290 break;
292 case GIMPLE_RESX:
293 t = build_resx (gimple_resx_region (stmt));
294 break;
296 default:
297 if (errorcount == 0)
299 error ("Unrecognized GIMPLE statement during RTL expansion");
300 print_gimple_stmt (stderr, stmt, 4, 0);
301 gcc_unreachable ();
303 else
305 /* Ignore any bad gimple codes if we're going to die anyhow,
306 so we can at least set TREE_ASM_WRITTEN and have the rest
307 of compilation advance without sudden ICE death. */
308 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
309 break;
313 /* If STMT is inside an exception region, record it in the generated
314 expression. */
315 rn = lookup_stmt_eh_region (stmt);
316 if (rn >= 0)
318 tree call = get_call_expr_in (t);
320 ann = get_tree_common_ann (t);
321 ann->rn = rn;
323 /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up
324 the CALL_EXPR not the assignment statment for EH region number. */
325 if (call && call != t)
327 ann = get_tree_common_ann (call);
328 ann->rn = rn;
332 /* Set EXPR_LOCATION in all the embedded expressions. */
333 loc = gimple_location (stmt);
334 walk_tree (&t, set_expr_location_r, (void *) &loc, NULL);
336 TREE_BLOCK (t) = gimple_block (stmt);
338 return t;
342 /* Release back to GC memory allocated by gimple_to_tree. */
344 static void
345 release_stmt_tree (gimple stmt, tree stmt_tree)
347 tree_ann_common_t ann;
349 switch (gimple_code (stmt))
351 case GIMPLE_ASSIGN:
352 if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS)
353 ggc_free (TREE_OPERAND (stmt_tree, 1));
354 break;
355 case GIMPLE_COND:
356 ggc_free (COND_EXPR_COND (stmt_tree));
357 break;
358 case GIMPLE_RETURN:
359 if (TREE_OPERAND (stmt_tree, 0)
360 && TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR)
361 ggc_free (TREE_OPERAND (stmt_tree, 0));
362 break;
363 case GIMPLE_CALL:
364 if (gimple_call_lhs (stmt))
366 ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1));
367 if (ann)
368 ggc_free (ann);
369 ggc_free (TREE_OPERAND (stmt_tree, 1));
371 break;
372 default:
373 break;
375 ann = tree_common_ann (stmt_tree);
376 if (ann)
377 ggc_free (ann);
378 ggc_free (stmt_tree);
382 /* Verify that there is exactly single jump instruction since last and attach
383 REG_BR_PROB note specifying probability.
384 ??? We really ought to pass the probability down to RTL expanders and let it
385 re-distribute it when the conditional expands into multiple conditionals.
386 This is however difficult to do. */
387 void
388 add_reg_br_prob_note (rtx last, int probability)
390 if (profile_status == PROFILE_ABSENT)
391 return;
392 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
393 if (JUMP_P (last))
395 /* It is common to emit condjump-around-jump sequence when we don't know
396 how to reverse the conditional. Special case this. */
397 if (!any_condjump_p (last)
398 || !JUMP_P (NEXT_INSN (last))
399 || !simplejump_p (NEXT_INSN (last))
400 || !NEXT_INSN (NEXT_INSN (last))
401 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
402 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
403 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
404 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
405 goto failed;
406 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
407 add_reg_note (last, REG_BR_PROB,
408 GEN_INT (REG_BR_PROB_BASE - probability));
409 return;
411 if (!last || !JUMP_P (last) || !any_condjump_p (last))
412 goto failed;
413 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
414 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
415 return;
416 failed:
417 if (dump_file)
418 fprintf (dump_file, "Failed to add probability note\n");
422 #ifndef STACK_ALIGNMENT_NEEDED
423 #define STACK_ALIGNMENT_NEEDED 1
424 #endif
427 /* This structure holds data relevant to one variable that will be
428 placed in a stack slot. */
429 struct stack_var
431 /* The Variable. */
432 tree decl;
434 /* The offset of the variable. During partitioning, this is the
435 offset relative to the partition. After partitioning, this
436 is relative to the stack frame. */
437 HOST_WIDE_INT offset;
439 /* Initially, the size of the variable. Later, the size of the partition,
440 if this variable becomes it's partition's representative. */
441 HOST_WIDE_INT size;
443 /* The *byte* alignment required for this variable. Or as, with the
444 size, the alignment for this partition. */
445 unsigned int alignb;
447 /* The partition representative. */
448 size_t representative;
450 /* The next stack variable in the partition, or EOC. */
451 size_t next;
454 #define EOC ((size_t)-1)
456 /* We have an array of such objects while deciding allocation. */
457 static struct stack_var *stack_vars;
458 static size_t stack_vars_alloc;
459 static size_t stack_vars_num;
461 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
462 is non-decreasing. */
463 static size_t *stack_vars_sorted;
465 /* We have an interference graph between such objects. This graph
466 is lower triangular. */
467 static bool *stack_vars_conflict;
468 static size_t stack_vars_conflict_alloc;
470 /* The phase of the stack frame. This is the known misalignment of
471 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
472 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
473 static int frame_phase;
475 /* Used during expand_used_vars to remember if we saw any decls for
476 which we'd like to enable stack smashing protection. */
477 static bool has_protected_decls;
479 /* Used during expand_used_vars. Remember if we say a character buffer
480 smaller than our cutoff threshold. Used for -Wstack-protector. */
481 static bool has_short_buffer;
483 /* Discover the byte alignment to use for DECL. Ignore alignment
484 we can't do with expected alignment of the stack boundary. */
486 static unsigned int
487 get_decl_align_unit (tree decl)
489 unsigned int align;
491 align = LOCAL_DECL_ALIGNMENT (decl);
493 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
494 align = MAX_SUPPORTED_STACK_ALIGNMENT;
496 if (SUPPORTS_STACK_ALIGNMENT)
498 if (crtl->stack_alignment_estimated < align)
500 gcc_assert(!crtl->stack_realign_processed);
501 crtl->stack_alignment_estimated = align;
505 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
506 So here we only make sure stack_alignment_needed >= align. */
507 if (crtl->stack_alignment_needed < align)
508 crtl->stack_alignment_needed = align;
509 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
510 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
512 return align / BITS_PER_UNIT;
515 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
516 Return the frame offset. */
518 static HOST_WIDE_INT
519 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
521 HOST_WIDE_INT offset, new_frame_offset;
523 new_frame_offset = frame_offset;
524 if (FRAME_GROWS_DOWNWARD)
526 new_frame_offset -= size + frame_phase;
527 new_frame_offset &= -align;
528 new_frame_offset += frame_phase;
529 offset = new_frame_offset;
531 else
533 new_frame_offset -= frame_phase;
534 new_frame_offset += align - 1;
535 new_frame_offset &= -align;
536 new_frame_offset += frame_phase;
537 offset = new_frame_offset;
538 new_frame_offset += size;
540 frame_offset = new_frame_offset;
542 if (frame_offset_overflow (frame_offset, cfun->decl))
543 frame_offset = offset = 0;
545 return offset;
548 /* Accumulate DECL into STACK_VARS. */
550 static void
551 add_stack_var (tree decl)
553 if (stack_vars_num >= stack_vars_alloc)
555 if (stack_vars_alloc)
556 stack_vars_alloc = stack_vars_alloc * 3 / 2;
557 else
558 stack_vars_alloc = 32;
559 stack_vars
560 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
562 stack_vars[stack_vars_num].decl = decl;
563 stack_vars[stack_vars_num].offset = 0;
564 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
565 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
567 /* All variables are initially in their own partition. */
568 stack_vars[stack_vars_num].representative = stack_vars_num;
569 stack_vars[stack_vars_num].next = EOC;
571 /* Ensure that this decl doesn't get put onto the list twice. */
572 SET_DECL_RTL (decl, pc_rtx);
574 stack_vars_num++;
577 /* Compute the linear index of a lower-triangular coordinate (I, J). */
579 static size_t
580 triangular_index (size_t i, size_t j)
582 if (i < j)
584 size_t t;
585 t = i, i = j, j = t;
587 return (i * (i + 1)) / 2 + j;
590 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
592 static void
593 resize_stack_vars_conflict (size_t n)
595 size_t size = triangular_index (n-1, n-1) + 1;
597 if (size <= stack_vars_conflict_alloc)
598 return;
600 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
601 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
602 (size - stack_vars_conflict_alloc) * sizeof (bool));
603 stack_vars_conflict_alloc = size;
606 /* Make the decls associated with luid's X and Y conflict. */
608 static void
609 add_stack_var_conflict (size_t x, size_t y)
611 size_t index = triangular_index (x, y);
612 gcc_assert (index < stack_vars_conflict_alloc);
613 stack_vars_conflict[index] = true;
616 /* Check whether the decls associated with luid's X and Y conflict. */
618 static bool
619 stack_var_conflict_p (size_t x, size_t y)
621 size_t index = triangular_index (x, y);
622 gcc_assert (index < stack_vars_conflict_alloc);
623 return stack_vars_conflict[index];
626 /* Returns true if TYPE is or contains a union type. */
628 static bool
629 aggregate_contains_union_type (tree type)
631 tree field;
633 if (TREE_CODE (type) == UNION_TYPE
634 || TREE_CODE (type) == QUAL_UNION_TYPE)
635 return true;
636 if (TREE_CODE (type) == ARRAY_TYPE)
637 return aggregate_contains_union_type (TREE_TYPE (type));
638 if (TREE_CODE (type) != RECORD_TYPE)
639 return false;
641 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
642 if (TREE_CODE (field) == FIELD_DECL)
643 if (aggregate_contains_union_type (TREE_TYPE (field)))
644 return true;
646 return false;
649 /* A subroutine of expand_used_vars. If two variables X and Y have alias
650 sets that do not conflict, then do add a conflict for these variables
651 in the interference graph. We also need to make sure to add conflicts
652 for union containing structures. Else RTL alias analysis comes along
653 and due to type based aliasing rules decides that for two overlapping
654 union temporaries { short s; int i; } accesses to the same mem through
655 different types may not alias and happily reorders stores across
656 life-time boundaries of the temporaries (See PR25654).
657 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
659 static void
660 add_alias_set_conflicts (void)
662 size_t i, j, n = stack_vars_num;
664 for (i = 0; i < n; ++i)
666 tree type_i = TREE_TYPE (stack_vars[i].decl);
667 bool aggr_i = AGGREGATE_TYPE_P (type_i);
668 bool contains_union;
670 contains_union = aggregate_contains_union_type (type_i);
671 for (j = 0; j < i; ++j)
673 tree type_j = TREE_TYPE (stack_vars[j].decl);
674 bool aggr_j = AGGREGATE_TYPE_P (type_j);
675 if (aggr_i != aggr_j
676 /* Either the objects conflict by means of type based
677 aliasing rules, or we need to add a conflict. */
678 || !objects_must_conflict_p (type_i, type_j)
679 /* In case the types do not conflict ensure that access
680 to elements will conflict. In case of unions we have
681 to be careful as type based aliasing rules may say
682 access to the same memory does not conflict. So play
683 safe and add a conflict in this case. */
684 || contains_union)
685 add_stack_var_conflict (i, j);
690 /* A subroutine of partition_stack_vars. A comparison function for qsort,
691 sorting an array of indices by the size of the object. */
693 static int
694 stack_var_size_cmp (const void *a, const void *b)
696 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
697 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
698 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
699 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
701 if (sa < sb)
702 return -1;
703 if (sa > sb)
704 return 1;
705 /* For stack variables of the same size use the uid of the decl
706 to make the sort stable. */
707 if (uida < uidb)
708 return -1;
709 if (uida > uidb)
710 return 1;
711 return 0;
714 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
715 partitioning algorithm. Partitions A and B are known to be non-conflicting.
716 Merge them into a single partition A.
718 At the same time, add OFFSET to all variables in partition B. At the end
719 of the partitioning process we've have a nice block easy to lay out within
720 the stack frame. */
722 static void
723 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
725 size_t i, last;
727 /* Update each element of partition B with the given offset,
728 and merge them into partition A. */
729 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
731 stack_vars[i].offset += offset;
732 stack_vars[i].representative = a;
734 stack_vars[last].next = stack_vars[a].next;
735 stack_vars[a].next = b;
737 /* Update the required alignment of partition A to account for B. */
738 if (stack_vars[a].alignb < stack_vars[b].alignb)
739 stack_vars[a].alignb = stack_vars[b].alignb;
741 /* Update the interference graph and merge the conflicts. */
742 for (last = stack_vars_num, i = 0; i < last; ++i)
743 if (stack_var_conflict_p (b, i))
744 add_stack_var_conflict (a, i);
747 /* A subroutine of expand_used_vars. Binpack the variables into
748 partitions constrained by the interference graph. The overall
749 algorithm used is as follows:
751 Sort the objects by size.
752 For each object A {
753 S = size(A)
754 O = 0
755 loop {
756 Look for the largest non-conflicting object B with size <= S.
757 UNION (A, B)
758 offset(B) = O
759 O += size(B)
760 S -= size(B)
765 static void
766 partition_stack_vars (void)
768 size_t si, sj, n = stack_vars_num;
770 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
771 for (si = 0; si < n; ++si)
772 stack_vars_sorted[si] = si;
774 if (n == 1)
775 return;
777 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
779 /* Special case: detect when all variables conflict, and thus we can't
780 do anything during the partitioning loop. It isn't uncommon (with
781 C code at least) to declare all variables at the top of the function,
782 and if we're not inlining, then all variables will be in the same scope.
783 Take advantage of very fast libc routines for this scan. */
784 gcc_assert (sizeof(bool) == sizeof(char));
785 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
786 return;
788 for (si = 0; si < n; ++si)
790 size_t i = stack_vars_sorted[si];
791 HOST_WIDE_INT isize = stack_vars[i].size;
792 HOST_WIDE_INT offset = 0;
794 for (sj = si; sj-- > 0; )
796 size_t j = stack_vars_sorted[sj];
797 HOST_WIDE_INT jsize = stack_vars[j].size;
798 unsigned int jalign = stack_vars[j].alignb;
800 /* Ignore objects that aren't partition representatives. */
801 if (stack_vars[j].representative != j)
802 continue;
804 /* Ignore objects too large for the remaining space. */
805 if (isize < jsize)
806 continue;
808 /* Ignore conflicting objects. */
809 if (stack_var_conflict_p (i, j))
810 continue;
812 /* Refine the remaining space check to include alignment. */
813 if (offset & (jalign - 1))
815 HOST_WIDE_INT toff = offset;
816 toff += jalign - 1;
817 toff &= -(HOST_WIDE_INT)jalign;
818 if (isize - (toff - offset) < jsize)
819 continue;
821 isize -= toff - offset;
822 offset = toff;
825 /* UNION the objects, placing J at OFFSET. */
826 union_stack_vars (i, j, offset);
828 isize -= jsize;
829 if (isize == 0)
830 break;
835 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
837 static void
838 dump_stack_var_partition (void)
840 size_t si, i, j, n = stack_vars_num;
842 for (si = 0; si < n; ++si)
844 i = stack_vars_sorted[si];
846 /* Skip variables that aren't partition representatives, for now. */
847 if (stack_vars[i].representative != i)
848 continue;
850 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
851 " align %u\n", (unsigned long) i, stack_vars[i].size,
852 stack_vars[i].alignb);
854 for (j = i; j != EOC; j = stack_vars[j].next)
856 fputc ('\t', dump_file);
857 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
858 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
859 stack_vars[j].offset);
864 /* Assign rtl to DECL at frame offset OFFSET. */
866 static void
867 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
869 /* Alignment is unsigned. */
870 unsigned HOST_WIDE_INT align;
871 rtx x;
873 /* If this fails, we've overflowed the stack frame. Error nicely? */
874 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
876 x = plus_constant (virtual_stack_vars_rtx, offset);
877 x = gen_rtx_MEM (DECL_MODE (decl), x);
879 /* Set alignment we actually gave this decl. */
880 offset -= frame_phase;
881 align = offset & -offset;
882 align *= BITS_PER_UNIT;
883 if (align == 0)
884 align = STACK_BOUNDARY;
885 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
886 align = MAX_SUPPORTED_STACK_ALIGNMENT;
887 DECL_ALIGN (decl) = align;
888 DECL_USER_ALIGN (decl) = 0;
890 set_mem_attributes (x, decl, true);
891 SET_DECL_RTL (decl, x);
894 /* A subroutine of expand_used_vars. Give each partition representative
895 a unique location within the stack frame. Update each partition member
896 with that location. */
898 static void
899 expand_stack_vars (bool (*pred) (tree))
901 size_t si, i, j, n = stack_vars_num;
903 for (si = 0; si < n; ++si)
905 HOST_WIDE_INT offset;
907 i = stack_vars_sorted[si];
909 /* Skip variables that aren't partition representatives, for now. */
910 if (stack_vars[i].representative != i)
911 continue;
913 /* Skip variables that have already had rtl assigned. See also
914 add_stack_var where we perpetrate this pc_rtx hack. */
915 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
916 continue;
918 /* Check the predicate to see whether this variable should be
919 allocated in this pass. */
920 if (pred && !pred (stack_vars[i].decl))
921 continue;
923 offset = alloc_stack_frame_space (stack_vars[i].size,
924 stack_vars[i].alignb);
926 /* Create rtl for each variable based on their location within the
927 partition. */
928 for (j = i; j != EOC; j = stack_vars[j].next)
930 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
931 expand_one_stack_var_at (stack_vars[j].decl,
932 stack_vars[j].offset + offset);
937 /* Take into account all sizes of partitions and reset DECL_RTLs. */
938 static HOST_WIDE_INT
939 account_stack_vars (void)
941 size_t si, j, i, n = stack_vars_num;
942 HOST_WIDE_INT size = 0;
944 for (si = 0; si < n; ++si)
946 i = stack_vars_sorted[si];
948 /* Skip variables that aren't partition representatives, for now. */
949 if (stack_vars[i].representative != i)
950 continue;
952 size += stack_vars[i].size;
953 for (j = i; j != EOC; j = stack_vars[j].next)
954 SET_DECL_RTL (stack_vars[j].decl, NULL);
956 return size;
959 /* A subroutine of expand_one_var. Called to immediately assign rtl
960 to a variable to be allocated in the stack frame. */
962 static void
963 expand_one_stack_var (tree var)
965 HOST_WIDE_INT size, offset, align;
967 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
968 align = get_decl_align_unit (var);
969 offset = alloc_stack_frame_space (size, align);
971 expand_one_stack_var_at (var, offset);
974 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
975 that will reside in a hard register. */
977 static void
978 expand_one_hard_reg_var (tree var)
980 rest_of_decl_compilation (var, 0, 0);
983 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
984 that will reside in a pseudo register. */
986 static void
987 expand_one_register_var (tree var)
989 tree type = TREE_TYPE (var);
990 int unsignedp = TYPE_UNSIGNED (type);
991 enum machine_mode reg_mode
992 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
993 rtx x = gen_reg_rtx (reg_mode);
995 SET_DECL_RTL (var, x);
997 /* Note if the object is a user variable. */
998 if (!DECL_ARTIFICIAL (var))
999 mark_user_reg (x);
1001 if (POINTER_TYPE_P (type))
1002 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
1005 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1006 has some associated error, e.g. its type is error-mark. We just need
1007 to pick something that won't crash the rest of the compiler. */
1009 static void
1010 expand_one_error_var (tree var)
1012 enum machine_mode mode = DECL_MODE (var);
1013 rtx x;
1015 if (mode == BLKmode)
1016 x = gen_rtx_MEM (BLKmode, const0_rtx);
1017 else if (mode == VOIDmode)
1018 x = const0_rtx;
1019 else
1020 x = gen_reg_rtx (mode);
1022 SET_DECL_RTL (var, x);
1025 /* A subroutine of expand_one_var. VAR is a variable that will be
1026 allocated to the local stack frame. Return true if we wish to
1027 add VAR to STACK_VARS so that it will be coalesced with other
1028 variables. Return false to allocate VAR immediately.
1030 This function is used to reduce the number of variables considered
1031 for coalescing, which reduces the size of the quadratic problem. */
1033 static bool
1034 defer_stack_allocation (tree var, bool toplevel)
1036 /* If stack protection is enabled, *all* stack variables must be deferred,
1037 so that we can re-order the strings to the top of the frame. */
1038 if (flag_stack_protect)
1039 return true;
1041 /* Variables in the outermost scope automatically conflict with
1042 every other variable. The only reason to want to defer them
1043 at all is that, after sorting, we can more efficiently pack
1044 small variables in the stack frame. Continue to defer at -O2. */
1045 if (toplevel && optimize < 2)
1046 return false;
1048 /* Without optimization, *most* variables are allocated from the
1049 stack, which makes the quadratic problem large exactly when we
1050 want compilation to proceed as quickly as possible. On the
1051 other hand, we don't want the function's stack frame size to
1052 get completely out of hand. So we avoid adding scalars and
1053 "small" aggregates to the list at all. */
1054 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1055 return false;
1057 return true;
1060 /* A subroutine of expand_used_vars. Expand one variable according to
1061 its flavor. Variables to be placed on the stack are not actually
1062 expanded yet, merely recorded.
1063 When REALLY_EXPAND is false, only add stack values to be allocated.
1064 Return stack usage this variable is supposed to take.
1067 static HOST_WIDE_INT
1068 expand_one_var (tree var, bool toplevel, bool really_expand)
1070 if (SUPPORTS_STACK_ALIGNMENT
1071 && TREE_TYPE (var) != error_mark_node
1072 && TREE_CODE (var) == VAR_DECL)
1074 unsigned int align;
1076 /* Because we don't know if VAR will be in register or on stack,
1077 we conservatively assume it will be on stack even if VAR is
1078 eventually put into register after RA pass. For non-automatic
1079 variables, which won't be on stack, we collect alignment of
1080 type and ignore user specified alignment. */
1081 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1082 align = TYPE_ALIGN (TREE_TYPE (var));
1083 else
1084 align = DECL_ALIGN (var);
1086 if (crtl->stack_alignment_estimated < align)
1088 /* stack_alignment_estimated shouldn't change after stack
1089 realign decision made */
1090 gcc_assert(!crtl->stack_realign_processed);
1091 crtl->stack_alignment_estimated = align;
1095 if (TREE_CODE (var) != VAR_DECL)
1097 else if (DECL_EXTERNAL (var))
1099 else if (DECL_HAS_VALUE_EXPR_P (var))
1101 else if (TREE_STATIC (var))
1103 else if (DECL_RTL_SET_P (var))
1105 else if (TREE_TYPE (var) == error_mark_node)
1107 if (really_expand)
1108 expand_one_error_var (var);
1110 else if (DECL_HARD_REGISTER (var))
1112 if (really_expand)
1113 expand_one_hard_reg_var (var);
1115 else if (use_register_for_decl (var))
1117 if (really_expand)
1118 expand_one_register_var (var);
1120 else if (defer_stack_allocation (var, toplevel))
1121 add_stack_var (var);
1122 else
1124 if (really_expand)
1125 expand_one_stack_var (var);
1126 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1128 return 0;
1131 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1132 expanding variables. Those variables that can be put into registers
1133 are allocated pseudos; those that can't are put on the stack.
1135 TOPLEVEL is true if this is the outermost BLOCK. */
1137 static void
1138 expand_used_vars_for_block (tree block, bool toplevel)
1140 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1141 tree t;
1143 old_sv_num = toplevel ? 0 : stack_vars_num;
1145 /* Expand all variables at this level. */
1146 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1147 if (TREE_USED (t))
1148 expand_one_var (t, toplevel, true);
1150 this_sv_num = stack_vars_num;
1152 /* Expand all variables at containing levels. */
1153 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1154 expand_used_vars_for_block (t, false);
1156 /* Since we do not track exact variable lifetimes (which is not even
1157 possible for variables whose address escapes), we mirror the block
1158 tree in the interference graph. Here we cause all variables at this
1159 level, and all sublevels, to conflict. Do make certain that a
1160 variable conflicts with itself. */
1161 if (old_sv_num < this_sv_num)
1163 new_sv_num = stack_vars_num;
1164 resize_stack_vars_conflict (new_sv_num);
1166 for (i = old_sv_num; i < new_sv_num; ++i)
1167 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1168 add_stack_var_conflict (i, j);
1172 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1173 and clear TREE_USED on all local variables. */
1175 static void
1176 clear_tree_used (tree block)
1178 tree t;
1180 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1181 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1182 TREE_USED (t) = 0;
1184 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1185 clear_tree_used (t);
1188 /* Examine TYPE and determine a bit mask of the following features. */
1190 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1191 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1192 #define SPCT_HAS_ARRAY 4
1193 #define SPCT_HAS_AGGREGATE 8
1195 static unsigned int
1196 stack_protect_classify_type (tree type)
1198 unsigned int ret = 0;
1199 tree t;
1201 switch (TREE_CODE (type))
1203 case ARRAY_TYPE:
1204 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1205 if (t == char_type_node
1206 || t == signed_char_type_node
1207 || t == unsigned_char_type_node)
1209 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1210 unsigned HOST_WIDE_INT len;
1212 if (!TYPE_SIZE_UNIT (type)
1213 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1214 len = max;
1215 else
1216 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1218 if (len < max)
1219 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1220 else
1221 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1223 else
1224 ret = SPCT_HAS_ARRAY;
1225 break;
1227 case UNION_TYPE:
1228 case QUAL_UNION_TYPE:
1229 case RECORD_TYPE:
1230 ret = SPCT_HAS_AGGREGATE;
1231 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1232 if (TREE_CODE (t) == FIELD_DECL)
1233 ret |= stack_protect_classify_type (TREE_TYPE (t));
1234 break;
1236 default:
1237 break;
1240 return ret;
1243 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1244 part of the local stack frame. Remember if we ever return nonzero for
1245 any variable in this function. The return value is the phase number in
1246 which the variable should be allocated. */
1248 static int
1249 stack_protect_decl_phase (tree decl)
1251 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1252 int ret = 0;
1254 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1255 has_short_buffer = true;
1257 if (flag_stack_protect == 2)
1259 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1260 && !(bits & SPCT_HAS_AGGREGATE))
1261 ret = 1;
1262 else if (bits & SPCT_HAS_ARRAY)
1263 ret = 2;
1265 else
1266 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1268 if (ret)
1269 has_protected_decls = true;
1271 return ret;
1274 /* Two helper routines that check for phase 1 and phase 2. These are used
1275 as callbacks for expand_stack_vars. */
1277 static bool
1278 stack_protect_decl_phase_1 (tree decl)
1280 return stack_protect_decl_phase (decl) == 1;
1283 static bool
1284 stack_protect_decl_phase_2 (tree decl)
1286 return stack_protect_decl_phase (decl) == 2;
1289 /* Ensure that variables in different stack protection phases conflict
1290 so that they are not merged and share the same stack slot. */
1292 static void
1293 add_stack_protection_conflicts (void)
1295 size_t i, j, n = stack_vars_num;
1296 unsigned char *phase;
1298 phase = XNEWVEC (unsigned char, n);
1299 for (i = 0; i < n; ++i)
1300 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1302 for (i = 0; i < n; ++i)
1304 unsigned char ph_i = phase[i];
1305 for (j = 0; j < i; ++j)
1306 if (ph_i != phase[j])
1307 add_stack_var_conflict (i, j);
1310 XDELETEVEC (phase);
1313 /* Create a decl for the guard at the top of the stack frame. */
1315 static void
1316 create_stack_guard (void)
1318 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
1319 TREE_THIS_VOLATILE (guard) = 1;
1320 TREE_USED (guard) = 1;
1321 expand_one_stack_var (guard);
1322 crtl->stack_protect_guard = guard;
1325 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1326 expanding variables. Those variables that can be put into registers
1327 are allocated pseudos; those that can't are put on the stack.
1329 TOPLEVEL is true if this is the outermost BLOCK. */
1331 static HOST_WIDE_INT
1332 account_used_vars_for_block (tree block, bool toplevel)
1334 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1335 tree t;
1336 HOST_WIDE_INT size = 0;
1338 old_sv_num = toplevel ? 0 : stack_vars_num;
1340 /* Expand all variables at this level. */
1341 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1342 if (TREE_USED (t))
1343 size += expand_one_var (t, toplevel, false);
1345 this_sv_num = stack_vars_num;
1347 /* Expand all variables at containing levels. */
1348 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1349 size += account_used_vars_for_block (t, false);
1351 /* Since we do not track exact variable lifetimes (which is not even
1352 possible for variables whose address escapes), we mirror the block
1353 tree in the interference graph. Here we cause all variables at this
1354 level, and all sublevels, to conflict. Do make certain that a
1355 variable conflicts with itself. */
1356 if (old_sv_num < this_sv_num)
1358 new_sv_num = stack_vars_num;
1359 resize_stack_vars_conflict (new_sv_num);
1361 for (i = old_sv_num; i < new_sv_num; ++i)
1362 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1363 add_stack_var_conflict (i, j);
1365 return size;
1368 /* Prepare for expanding variables. */
1369 static void
1370 init_vars_expansion (void)
1372 tree t;
1373 /* Set TREE_USED on all variables in the local_decls. */
1374 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1375 TREE_USED (TREE_VALUE (t)) = 1;
1377 /* Clear TREE_USED on all variables associated with a block scope. */
1378 clear_tree_used (DECL_INITIAL (current_function_decl));
1380 /* Initialize local stack smashing state. */
1381 has_protected_decls = false;
1382 has_short_buffer = false;
1385 /* Free up stack variable graph data. */
1386 static void
1387 fini_vars_expansion (void)
1389 XDELETEVEC (stack_vars);
1390 XDELETEVEC (stack_vars_sorted);
1391 XDELETEVEC (stack_vars_conflict);
1392 stack_vars = NULL;
1393 stack_vars_alloc = stack_vars_num = 0;
1394 stack_vars_conflict = NULL;
1395 stack_vars_conflict_alloc = 0;
1398 /* Make a fair guess for the size of the stack frame of the current
1399 function. This doesn't have to be exact, the result is only used
1400 in the inline heuristics. So we don't want to run the full stack
1401 var packing algorithm (which is quadratic in the number of stack
1402 vars). Instead, we calculate the total size of all stack vars.
1403 This turns out to be a pretty fair estimate -- packing of stack
1404 vars doesn't happen very often. */
1406 HOST_WIDE_INT
1407 estimated_stack_frame_size (void)
1409 HOST_WIDE_INT size = 0;
1410 size_t i;
1411 tree t, outer_block = DECL_INITIAL (current_function_decl);
1413 init_vars_expansion ();
1415 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1417 tree var = TREE_VALUE (t);
1419 if (TREE_USED (var))
1420 size += expand_one_var (var, true, false);
1421 TREE_USED (var) = 1;
1423 size += account_used_vars_for_block (outer_block, true);
1425 if (stack_vars_num > 0)
1427 /* Fake sorting the stack vars for account_stack_vars (). */
1428 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1429 for (i = 0; i < stack_vars_num; ++i)
1430 stack_vars_sorted[i] = i;
1431 size += account_stack_vars ();
1432 fini_vars_expansion ();
1435 return size;
1438 /* Expand all variables used in the function. */
1440 static void
1441 expand_used_vars (void)
1443 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1445 /* Compute the phase of the stack frame for this function. */
1447 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1448 int off = STARTING_FRAME_OFFSET % align;
1449 frame_phase = off ? align - off : 0;
1452 init_vars_expansion ();
1454 /* At this point all variables on the local_decls with TREE_USED
1455 set are not associated with any block scope. Lay them out. */
1456 t = cfun->local_decls;
1457 cfun->local_decls = NULL_TREE;
1458 for (; t; t = next)
1460 tree var = TREE_VALUE (t);
1461 bool expand_now = false;
1463 next = TREE_CHAIN (t);
1465 /* We didn't set a block for static or extern because it's hard
1466 to tell the difference between a global variable (re)declared
1467 in a local scope, and one that's really declared there to
1468 begin with. And it doesn't really matter much, since we're
1469 not giving them stack space. Expand them now. */
1470 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1471 expand_now = true;
1473 /* Any variable that could have been hoisted into an SSA_NAME
1474 will have been propagated anywhere the optimizers chose,
1475 i.e. not confined to their original block. Allocate them
1476 as if they were defined in the outermost scope. */
1477 else if (is_gimple_reg (var))
1478 expand_now = true;
1480 /* If the variable is not associated with any block, then it
1481 was created by the optimizers, and could be live anywhere
1482 in the function. */
1483 else if (TREE_USED (var))
1484 expand_now = true;
1486 /* Finally, mark all variables on the list as used. We'll use
1487 this in a moment when we expand those associated with scopes. */
1488 TREE_USED (var) = 1;
1490 if (expand_now)
1492 expand_one_var (var, true, true);
1493 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1495 rtx rtl = DECL_RTL_IF_SET (var);
1497 /* Keep artificial non-ignored vars in cfun->local_decls
1498 chain until instantiate_decls. */
1499 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1501 TREE_CHAIN (t) = cfun->local_decls;
1502 cfun->local_decls = t;
1503 continue;
1508 ggc_free (t);
1511 /* At this point, all variables within the block tree with TREE_USED
1512 set are actually used by the optimized function. Lay them out. */
1513 expand_used_vars_for_block (outer_block, true);
1515 if (stack_vars_num > 0)
1517 /* Due to the way alias sets work, no variables with non-conflicting
1518 alias sets may be assigned the same address. Add conflicts to
1519 reflect this. */
1520 add_alias_set_conflicts ();
1522 /* If stack protection is enabled, we don't share space between
1523 vulnerable data and non-vulnerable data. */
1524 if (flag_stack_protect)
1525 add_stack_protection_conflicts ();
1527 /* Now that we have collected all stack variables, and have computed a
1528 minimal interference graph, attempt to save some stack space. */
1529 partition_stack_vars ();
1530 if (dump_file)
1531 dump_stack_var_partition ();
1534 /* There are several conditions under which we should create a
1535 stack guard: protect-all, alloca used, protected decls present. */
1536 if (flag_stack_protect == 2
1537 || (flag_stack_protect
1538 && (cfun->calls_alloca || has_protected_decls)))
1539 create_stack_guard ();
1541 /* Assign rtl to each variable based on these partitions. */
1542 if (stack_vars_num > 0)
1544 /* Reorder decls to be protected by iterating over the variables
1545 array multiple times, and allocating out of each phase in turn. */
1546 /* ??? We could probably integrate this into the qsort we did
1547 earlier, such that we naturally see these variables first,
1548 and thus naturally allocate things in the right order. */
1549 if (has_protected_decls)
1551 /* Phase 1 contains only character arrays. */
1552 expand_stack_vars (stack_protect_decl_phase_1);
1554 /* Phase 2 contains other kinds of arrays. */
1555 if (flag_stack_protect == 2)
1556 expand_stack_vars (stack_protect_decl_phase_2);
1559 expand_stack_vars (NULL);
1561 fini_vars_expansion ();
1564 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1565 if (STACK_ALIGNMENT_NEEDED)
1567 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1568 if (!FRAME_GROWS_DOWNWARD)
1569 frame_offset += align - 1;
1570 frame_offset &= -align;
1575 /* If we need to produce a detailed dump, print the tree representation
1576 for STMT to the dump file. SINCE is the last RTX after which the RTL
1577 generated for STMT should have been appended. */
1579 static void
1580 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1582 if (dump_file && (dump_flags & TDF_DETAILS))
1584 fprintf (dump_file, "\n;; ");
1585 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1586 fprintf (dump_file, "\n");
1588 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1592 /* Maps the blocks that do not contain tree labels to rtx labels. */
1594 static struct pointer_map_t *lab_rtx_for_bb;
1596 /* Returns the label_rtx expression for a label starting basic block BB. */
1598 static rtx
1599 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1601 gimple_stmt_iterator gsi;
1602 tree lab;
1603 gimple lab_stmt;
1604 void **elt;
1606 if (bb->flags & BB_RTL)
1607 return block_label (bb);
1609 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1610 if (elt)
1611 return (rtx) *elt;
1613 /* Find the tree label if it is present. */
1615 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1617 lab_stmt = gsi_stmt (gsi);
1618 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1619 break;
1621 lab = gimple_label_label (lab_stmt);
1622 if (DECL_NONLOCAL (lab))
1623 break;
1625 return label_rtx (lab);
1628 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1629 *elt = gen_label_rtx ();
1630 return (rtx) *elt;
1634 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1635 Returns a new basic block if we've terminated the current basic
1636 block and created a new one. */
1638 static basic_block
1639 expand_gimple_cond (basic_block bb, gimple stmt)
1641 basic_block new_bb, dest;
1642 edge new_edge;
1643 edge true_edge;
1644 edge false_edge;
1645 tree pred = gimple_cond_pred_to_tree (stmt);
1646 rtx last2, last;
1648 last2 = last = get_last_insn ();
1650 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1651 if (gimple_has_location (stmt))
1653 set_curr_insn_source_location (gimple_location (stmt));
1654 set_curr_insn_block (gimple_block (stmt));
1657 /* These flags have no purpose in RTL land. */
1658 true_edge->flags &= ~EDGE_TRUE_VALUE;
1659 false_edge->flags &= ~EDGE_FALSE_VALUE;
1661 /* We can either have a pure conditional jump with one fallthru edge or
1662 two-way jump that needs to be decomposed into two basic blocks. */
1663 if (false_edge->dest == bb->next_bb)
1665 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1666 add_reg_br_prob_note (last, true_edge->probability);
1667 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1668 if (true_edge->goto_locus)
1670 set_curr_insn_source_location (true_edge->goto_locus);
1671 set_curr_insn_block (true_edge->goto_block);
1672 true_edge->goto_locus = curr_insn_locator ();
1674 true_edge->goto_block = NULL;
1675 false_edge->flags |= EDGE_FALLTHRU;
1676 ggc_free (pred);
1677 return NULL;
1679 if (true_edge->dest == bb->next_bb)
1681 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
1682 add_reg_br_prob_note (last, false_edge->probability);
1683 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1684 if (false_edge->goto_locus)
1686 set_curr_insn_source_location (false_edge->goto_locus);
1687 set_curr_insn_block (false_edge->goto_block);
1688 false_edge->goto_locus = curr_insn_locator ();
1690 false_edge->goto_block = NULL;
1691 true_edge->flags |= EDGE_FALLTHRU;
1692 ggc_free (pred);
1693 return NULL;
1696 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1697 add_reg_br_prob_note (last, true_edge->probability);
1698 last = get_last_insn ();
1699 if (false_edge->goto_locus)
1701 set_curr_insn_source_location (false_edge->goto_locus);
1702 set_curr_insn_block (false_edge->goto_block);
1703 false_edge->goto_locus = curr_insn_locator ();
1705 false_edge->goto_block = NULL;
1706 emit_jump (label_rtx_for_bb (false_edge->dest));
1708 BB_END (bb) = last;
1709 if (BARRIER_P (BB_END (bb)))
1710 BB_END (bb) = PREV_INSN (BB_END (bb));
1711 update_bb_for_insn (bb);
1713 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1714 dest = false_edge->dest;
1715 redirect_edge_succ (false_edge, new_bb);
1716 false_edge->flags |= EDGE_FALLTHRU;
1717 new_bb->count = false_edge->count;
1718 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1719 new_edge = make_edge (new_bb, dest, 0);
1720 new_edge->probability = REG_BR_PROB_BASE;
1721 new_edge->count = new_bb->count;
1722 if (BARRIER_P (BB_END (new_bb)))
1723 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1724 update_bb_for_insn (new_bb);
1726 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1728 if (true_edge->goto_locus)
1730 set_curr_insn_source_location (true_edge->goto_locus);
1731 set_curr_insn_block (true_edge->goto_block);
1732 true_edge->goto_locus = curr_insn_locator ();
1734 true_edge->goto_block = NULL;
1736 ggc_free (pred);
1737 return new_bb;
1740 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
1741 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1742 generated a tail call (something that might be denied by the ABI
1743 rules governing the call; see calls.c).
1745 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1746 can still reach the rest of BB. The case here is __builtin_sqrt,
1747 where the NaN result goes through the external function (with a
1748 tailcall) and the normal result happens via a sqrt instruction. */
1750 static basic_block
1751 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
1753 rtx last2, last;
1754 edge e;
1755 edge_iterator ei;
1756 int probability;
1757 gcov_type count;
1758 tree stmt_tree = gimple_to_tree (stmt);
1760 last2 = last = get_last_insn ();
1762 expand_expr_stmt (stmt_tree);
1764 release_stmt_tree (stmt, stmt_tree);
1766 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1767 if (CALL_P (last) && SIBLING_CALL_P (last))
1768 goto found;
1770 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1772 *can_fallthru = true;
1773 return NULL;
1775 found:
1776 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1777 Any instructions emitted here are about to be deleted. */
1778 do_pending_stack_adjust ();
1780 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1781 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1782 EH or abnormal edges, we shouldn't have created a tail call in
1783 the first place. So it seems to me we should just be removing
1784 all edges here, or redirecting the existing fallthru edge to
1785 the exit block. */
1787 probability = 0;
1788 count = 0;
1790 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1792 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1794 if (e->dest != EXIT_BLOCK_PTR)
1796 e->dest->count -= e->count;
1797 e->dest->frequency -= EDGE_FREQUENCY (e);
1798 if (e->dest->count < 0)
1799 e->dest->count = 0;
1800 if (e->dest->frequency < 0)
1801 e->dest->frequency = 0;
1803 count += e->count;
1804 probability += e->probability;
1805 remove_edge (e);
1807 else
1808 ei_next (&ei);
1811 /* This is somewhat ugly: the call_expr expander often emits instructions
1812 after the sibcall (to perform the function return). These confuse the
1813 find_many_sub_basic_blocks code, so we need to get rid of these. */
1814 last = NEXT_INSN (last);
1815 gcc_assert (BARRIER_P (last));
1817 *can_fallthru = false;
1818 while (NEXT_INSN (last))
1820 /* For instance an sqrt builtin expander expands if with
1821 sibcall in the then and label for `else`. */
1822 if (LABEL_P (NEXT_INSN (last)))
1824 *can_fallthru = true;
1825 break;
1827 delete_insn (NEXT_INSN (last));
1830 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1831 e->probability += probability;
1832 e->count += count;
1833 BB_END (bb) = last;
1834 update_bb_for_insn (bb);
1836 if (NEXT_INSN (last))
1838 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1840 last = BB_END (bb);
1841 if (BARRIER_P (last))
1842 BB_END (bb) = PREV_INSN (last);
1845 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1847 return bb;
1850 /* Expand basic block BB from GIMPLE trees to RTL. */
1852 static basic_block
1853 expand_gimple_basic_block (basic_block bb)
1855 gimple_stmt_iterator gsi;
1856 gimple_seq stmts;
1857 gimple stmt = NULL;
1858 rtx note, last;
1859 edge e;
1860 edge_iterator ei;
1861 void **elt;
1863 if (dump_file)
1864 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
1865 bb->index);
1867 /* Note that since we are now transitioning from GIMPLE to RTL, we
1868 cannot use the gsi_*_bb() routines because they expect the basic
1869 block to be in GIMPLE, instead of RTL. Therefore, we need to
1870 access the BB sequence directly. */
1871 stmts = bb_seq (bb);
1872 bb->il.gimple = NULL;
1873 rtl_profile_for_bb (bb);
1874 init_rtl_bb_info (bb);
1875 bb->flags |= BB_RTL;
1877 /* Remove the RETURN_EXPR if we may fall though to the exit
1878 instead. */
1879 gsi = gsi_last (stmts);
1880 if (!gsi_end_p (gsi)
1881 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
1883 gimple ret_stmt = gsi_stmt (gsi);
1885 gcc_assert (single_succ_p (bb));
1886 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
1888 if (bb->next_bb == EXIT_BLOCK_PTR
1889 && !gimple_return_retval (ret_stmt))
1891 gsi_remove (&gsi, false);
1892 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
1896 gsi = gsi_start (stmts);
1897 if (!gsi_end_p (gsi))
1899 stmt = gsi_stmt (gsi);
1900 if (gimple_code (stmt) != GIMPLE_LABEL)
1901 stmt = NULL;
1904 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1906 if (stmt || elt)
1908 last = get_last_insn ();
1910 if (stmt)
1912 tree stmt_tree = gimple_to_tree (stmt);
1913 expand_expr_stmt (stmt_tree);
1914 release_stmt_tree (stmt, stmt_tree);
1915 gsi_next (&gsi);
1918 if (elt)
1919 emit_label ((rtx) *elt);
1921 /* Java emits line number notes in the top of labels.
1922 ??? Make this go away once line number notes are obsoleted. */
1923 BB_HEAD (bb) = NEXT_INSN (last);
1924 if (NOTE_P (BB_HEAD (bb)))
1925 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1926 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1928 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1930 else
1931 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1933 NOTE_BASIC_BLOCK (note) = bb;
1935 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1937 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1938 e->flags &= ~EDGE_EXECUTABLE;
1940 /* At the moment not all abnormal edges match the RTL representation.
1941 It is safe to remove them here as find_many_sub_basic_blocks will
1942 rediscover them. In the future we should get this fixed properly. */
1943 if (e->flags & EDGE_ABNORMAL)
1944 remove_edge (e);
1945 else
1946 ei_next (&ei);
1949 for (; !gsi_end_p (gsi); gsi_next (&gsi))
1951 gimple stmt = gsi_stmt (gsi);
1952 basic_block new_bb;
1954 /* Expand this statement, then evaluate the resulting RTL and
1955 fixup the CFG accordingly. */
1956 if (gimple_code (stmt) == GIMPLE_COND)
1958 new_bb = expand_gimple_cond (bb, stmt);
1959 if (new_bb)
1960 return new_bb;
1962 else
1964 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
1966 bool can_fallthru;
1967 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1968 if (new_bb)
1970 if (can_fallthru)
1971 bb = new_bb;
1972 else
1973 return new_bb;
1976 else if (gimple_code (stmt) != GIMPLE_CHANGE_DYNAMIC_TYPE)
1978 tree stmt_tree = gimple_to_tree (stmt);
1979 last = get_last_insn ();
1980 expand_expr_stmt (stmt_tree);
1981 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1982 release_stmt_tree (stmt, stmt_tree);
1987 /* Expand implicit goto and convert goto_locus. */
1988 FOR_EACH_EDGE (e, ei, bb->succs)
1990 if (e->goto_locus && e->goto_block)
1992 set_curr_insn_source_location (e->goto_locus);
1993 set_curr_insn_block (e->goto_block);
1994 e->goto_locus = curr_insn_locator ();
1996 e->goto_block = NULL;
1997 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
1999 emit_jump (label_rtx_for_bb (e->dest));
2000 e->flags &= ~EDGE_FALLTHRU;
2004 do_pending_stack_adjust ();
2006 /* Find the block tail. The last insn in the block is the insn
2007 before a barrier and/or table jump insn. */
2008 last = get_last_insn ();
2009 if (BARRIER_P (last))
2010 last = PREV_INSN (last);
2011 if (JUMP_TABLE_DATA_P (last))
2012 last = PREV_INSN (PREV_INSN (last));
2013 BB_END (bb) = last;
2015 update_bb_for_insn (bb);
2017 return bb;
2021 /* Create a basic block for initialization code. */
2023 static basic_block
2024 construct_init_block (void)
2026 basic_block init_block, first_block;
2027 edge e = NULL;
2028 int flags;
2030 /* Multiple entry points not supported yet. */
2031 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
2032 init_rtl_bb_info (ENTRY_BLOCK_PTR);
2033 init_rtl_bb_info (EXIT_BLOCK_PTR);
2034 ENTRY_BLOCK_PTR->flags |= BB_RTL;
2035 EXIT_BLOCK_PTR->flags |= BB_RTL;
2037 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
2039 /* When entry edge points to first basic block, we don't need jump,
2040 otherwise we have to jump into proper target. */
2041 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
2043 tree label = gimple_block_label (e->dest);
2045 emit_jump (label_rtx (label));
2046 flags = 0;
2048 else
2049 flags = EDGE_FALLTHRU;
2051 init_block = create_basic_block (NEXT_INSN (get_insns ()),
2052 get_last_insn (),
2053 ENTRY_BLOCK_PTR);
2054 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
2055 init_block->count = ENTRY_BLOCK_PTR->count;
2056 if (e)
2058 first_block = e->dest;
2059 redirect_edge_succ (e, init_block);
2060 e = make_edge (init_block, first_block, flags);
2062 else
2063 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2064 e->probability = REG_BR_PROB_BASE;
2065 e->count = ENTRY_BLOCK_PTR->count;
2067 update_bb_for_insn (init_block);
2068 return init_block;
2071 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
2072 found in the block tree. */
2074 static void
2075 set_block_levels (tree block, int level)
2077 while (block)
2079 BLOCK_NUMBER (block) = level;
2080 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
2081 block = BLOCK_CHAIN (block);
2085 /* Create a block containing landing pads and similar stuff. */
2087 static void
2088 construct_exit_block (void)
2090 rtx head = get_last_insn ();
2091 rtx end;
2092 basic_block exit_block;
2093 edge e, e2;
2094 unsigned ix;
2095 edge_iterator ei;
2096 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
2098 rtl_profile_for_bb (EXIT_BLOCK_PTR);
2100 /* Make sure the locus is set to the end of the function, so that
2101 epilogue line numbers and warnings are set properly. */
2102 if (cfun->function_end_locus != UNKNOWN_LOCATION)
2103 input_location = cfun->function_end_locus;
2105 /* The following insns belong to the top scope. */
2106 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2108 /* Generate rtl for function exit. */
2109 expand_function_end ();
2111 end = get_last_insn ();
2112 if (head == end)
2113 return;
2114 /* While emitting the function end we could move end of the last basic block.
2116 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
2117 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
2118 head = NEXT_INSN (head);
2119 exit_block = create_basic_block (NEXT_INSN (head), end,
2120 EXIT_BLOCK_PTR->prev_bb);
2121 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
2122 exit_block->count = EXIT_BLOCK_PTR->count;
2124 ix = 0;
2125 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
2127 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
2128 if (!(e->flags & EDGE_ABNORMAL))
2129 redirect_edge_succ (e, exit_block);
2130 else
2131 ix++;
2134 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2135 e->probability = REG_BR_PROB_BASE;
2136 e->count = EXIT_BLOCK_PTR->count;
2137 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
2138 if (e2 != e)
2140 e->count -= e2->count;
2141 exit_block->count -= e2->count;
2142 exit_block->frequency -= EDGE_FREQUENCY (e2);
2144 if (e->count < 0)
2145 e->count = 0;
2146 if (exit_block->count < 0)
2147 exit_block->count = 0;
2148 if (exit_block->frequency < 0)
2149 exit_block->frequency = 0;
2150 update_bb_for_insn (exit_block);
2153 /* Helper function for discover_nonconstant_array_refs.
2154 Look for ARRAY_REF nodes with non-constant indexes and mark them
2155 addressable. */
2157 static tree
2158 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
2159 void *data ATTRIBUTE_UNUSED)
2161 tree t = *tp;
2163 if (IS_TYPE_OR_DECL_P (t))
2164 *walk_subtrees = 0;
2165 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2167 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2168 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
2169 && (!TREE_OPERAND (t, 2)
2170 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2171 || (TREE_CODE (t) == COMPONENT_REF
2172 && (!TREE_OPERAND (t,2)
2173 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2174 || TREE_CODE (t) == BIT_FIELD_REF
2175 || TREE_CODE (t) == REALPART_EXPR
2176 || TREE_CODE (t) == IMAGPART_EXPR
2177 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2178 || CONVERT_EXPR_P (t))
2179 t = TREE_OPERAND (t, 0);
2181 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2183 t = get_base_address (t);
2184 if (t && DECL_P (t))
2185 TREE_ADDRESSABLE (t) = 1;
2188 *walk_subtrees = 0;
2191 return NULL_TREE;
2194 /* RTL expansion is not able to compile array references with variable
2195 offsets for arrays stored in single register. Discover such
2196 expressions and mark variables as addressable to avoid this
2197 scenario. */
2199 static void
2200 discover_nonconstant_array_refs (void)
2202 basic_block bb;
2203 gimple_stmt_iterator gsi;
2205 FOR_EACH_BB (bb)
2206 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2208 gimple stmt = gsi_stmt (gsi);
2209 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
2213 /* This function sets crtl->args.internal_arg_pointer to a virtual
2214 register if DRAP is needed. Local register allocator will replace
2215 virtual_incoming_args_rtx with the virtual register. */
2217 static void
2218 expand_stack_alignment (void)
2220 rtx drap_rtx;
2221 unsigned int preferred_stack_boundary;
2223 if (! SUPPORTS_STACK_ALIGNMENT)
2224 return;
2226 if (cfun->calls_alloca
2227 || cfun->has_nonlocal_label
2228 || crtl->has_nonlocal_goto)
2229 crtl->need_drap = true;
2231 gcc_assert (crtl->stack_alignment_needed
2232 <= crtl->stack_alignment_estimated);
2234 /* Update crtl->stack_alignment_estimated and use it later to align
2235 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
2236 exceptions since callgraph doesn't collect incoming stack alignment
2237 in this case. */
2238 if (flag_non_call_exceptions
2239 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
2240 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2241 else
2242 preferred_stack_boundary = crtl->preferred_stack_boundary;
2243 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
2244 crtl->stack_alignment_estimated = preferred_stack_boundary;
2245 if (preferred_stack_boundary > crtl->stack_alignment_needed)
2246 crtl->stack_alignment_needed = preferred_stack_boundary;
2248 crtl->stack_realign_needed
2249 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
2250 crtl->stack_realign_tried = crtl->stack_realign_needed;
2252 crtl->stack_realign_processed = true;
2254 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
2255 alignment. */
2256 gcc_assert (targetm.calls.get_drap_rtx != NULL);
2257 drap_rtx = targetm.calls.get_drap_rtx ();
2259 /* stack_realign_drap and drap_rtx must match. */
2260 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
2262 /* Do nothing if NULL is returned, which means DRAP is not needed. */
2263 if (NULL != drap_rtx)
2265 crtl->args.internal_arg_pointer = drap_rtx;
2267 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
2268 needed. */
2269 fixup_tail_calls ();
2273 /* Translate the intermediate representation contained in the CFG
2274 from GIMPLE trees to RTL.
2276 We do conversion per basic block and preserve/update the tree CFG.
2277 This implies we have to do some magic as the CFG can simultaneously
2278 consist of basic blocks containing RTL and GIMPLE trees. This can
2279 confuse the CFG hooks, so be careful to not manipulate CFG during
2280 the expansion. */
2282 static unsigned int
2283 gimple_expand_cfg (void)
2285 basic_block bb, init_block;
2286 sbitmap blocks;
2287 edge_iterator ei;
2288 edge e;
2290 /* Some backends want to know that we are expanding to RTL. */
2291 currently_expanding_to_rtl = 1;
2293 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
2295 insn_locators_alloc ();
2296 if (!DECL_BUILT_IN (current_function_decl))
2298 /* Eventually, all FEs should explicitly set function_start_locus. */
2299 if (cfun->function_start_locus == UNKNOWN_LOCATION)
2300 set_curr_insn_source_location
2301 (DECL_SOURCE_LOCATION (current_function_decl));
2302 else
2303 set_curr_insn_source_location (cfun->function_start_locus);
2305 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2306 prologue_locator = curr_insn_locator ();
2308 /* Make sure first insn is a note even if we don't want linenums.
2309 This makes sure the first insn will never be deleted.
2310 Also, final expects a note to appear there. */
2311 emit_note (NOTE_INSN_DELETED);
2313 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
2314 discover_nonconstant_array_refs ();
2316 targetm.expand_to_rtl_hook ();
2317 crtl->stack_alignment_needed = STACK_BOUNDARY;
2318 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
2319 crtl->stack_alignment_estimated = STACK_BOUNDARY;
2320 crtl->preferred_stack_boundary = STACK_BOUNDARY;
2321 cfun->cfg->max_jumptable_ents = 0;
2324 /* Expand the variables recorded during gimple lowering. */
2325 expand_used_vars ();
2327 /* Honor stack protection warnings. */
2328 if (warn_stack_protect)
2330 if (cfun->calls_alloca)
2331 warning (OPT_Wstack_protector,
2332 "not protecting local variables: variable length buffer");
2333 if (has_short_buffer && !crtl->stack_protect_guard)
2334 warning (OPT_Wstack_protector,
2335 "not protecting function: no buffer at least %d bytes long",
2336 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
2339 /* Set up parameters and prepare for return, for the function. */
2340 expand_function_start (current_function_decl);
2342 /* If this function is `main', emit a call to `__main'
2343 to run global initializers, etc. */
2344 if (DECL_NAME (current_function_decl)
2345 && MAIN_NAME_P (DECL_NAME (current_function_decl))
2346 && DECL_FILE_SCOPE_P (current_function_decl))
2347 expand_main_function ();
2349 /* Initialize the stack_protect_guard field. This must happen after the
2350 call to __main (if any) so that the external decl is initialized. */
2351 if (crtl->stack_protect_guard)
2352 stack_protect_prologue ();
2354 /* Update stack boundary if needed. */
2355 if (SUPPORTS_STACK_ALIGNMENT)
2357 /* Call update_stack_boundary here to update incoming stack
2358 boundary before TARGET_FUNCTION_OK_FOR_SIBCALL is called.
2359 TARGET_FUNCTION_OK_FOR_SIBCALL needs to know the accurate
2360 incoming stack alignment to check if it is OK to perform
2361 sibcall optimization since sibcall optimization will only
2362 align the outgoing stack to incoming stack boundary. */
2363 if (targetm.calls.update_stack_boundary)
2364 targetm.calls.update_stack_boundary ();
2366 /* The incoming stack frame has to be aligned at least at
2367 parm_stack_boundary. */
2368 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2371 /* Register rtl specific functions for cfg. */
2372 rtl_register_cfg_hooks ();
2374 init_block = construct_init_block ();
2376 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
2377 remaining edges in expand_gimple_basic_block. */
2378 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2379 e->flags &= ~EDGE_EXECUTABLE;
2381 lab_rtx_for_bb = pointer_map_create ();
2382 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
2383 bb = expand_gimple_basic_block (bb);
2385 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
2386 conservatively to true until they are all profile aware. */
2387 pointer_map_destroy (lab_rtx_for_bb);
2388 free_histograms ();
2390 construct_exit_block ();
2391 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2392 insn_locators_finalize ();
2394 /* We're done expanding trees to RTL. */
2395 currently_expanding_to_rtl = 0;
2397 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */
2398 convert_from_eh_region_ranges ();
2399 set_eh_throw_stmt_table (cfun, NULL);
2401 rebuild_jump_labels (get_insns ());
2402 find_exception_handler_labels ();
2404 blocks = sbitmap_alloc (last_basic_block);
2405 sbitmap_ones (blocks);
2406 find_many_sub_basic_blocks (blocks);
2407 purge_all_dead_edges ();
2408 sbitmap_free (blocks);
2410 compact_blocks ();
2412 expand_stack_alignment ();
2414 #ifdef ENABLE_CHECKING
2415 verify_flow_info ();
2416 #endif
2418 /* There's no need to defer outputting this function any more; we
2419 know we want to output it. */
2420 DECL_DEFER_OUTPUT (current_function_decl) = 0;
2422 /* Now that we're done expanding trees to RTL, we shouldn't have any
2423 more CONCATs anywhere. */
2424 generating_concat_p = 0;
2426 if (dump_file)
2428 fprintf (dump_file,
2429 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
2430 /* And the pass manager will dump RTL for us. */
2433 /* If we're emitting a nested function, make sure its parent gets
2434 emitted as well. Doing otherwise confuses debug info. */
2436 tree parent;
2437 for (parent = DECL_CONTEXT (current_function_decl);
2438 parent != NULL_TREE;
2439 parent = get_containing_scope (parent))
2440 if (TREE_CODE (parent) == FUNCTION_DECL)
2441 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
2444 /* We are now committed to emitting code for this function. Do any
2445 preparation, such as emitting abstract debug info for the inline
2446 before it gets mangled by optimization. */
2447 if (cgraph_function_possibly_inlined_p (current_function_decl))
2448 (*debug_hooks->outlining_inline_function) (current_function_decl);
2450 TREE_ASM_WRITTEN (current_function_decl) = 1;
2452 /* After expanding, the return labels are no longer needed. */
2453 return_label = NULL;
2454 naked_return_label = NULL;
2455 /* Tag the blocks with a depth number so that change_scope can find
2456 the common parent easily. */
2457 set_block_levels (DECL_INITIAL (cfun->decl), 0);
2458 default_rtl_profile ();
2459 return 0;
2462 struct rtl_opt_pass pass_expand =
2465 RTL_PASS,
2466 "expand", /* name */
2467 NULL, /* gate */
2468 gimple_expand_cfg, /* execute */
2469 NULL, /* sub */
2470 NULL, /* next */
2471 0, /* static_pass_number */
2472 TV_EXPAND, /* tv_id */
2473 /* ??? If TER is enabled, we actually receive GENERIC. */
2474 PROP_gimple_leh | PROP_cfg, /* properties_required */
2475 PROP_rtl, /* properties_provided */
2476 PROP_trees, /* properties_destroyed */
2477 0, /* todo_flags_start */
2478 TODO_dump_func, /* todo_flags_finish */