Daily bump.
[official-gcc.git] / gcc / cfgexpand.c
blobcd04a625ec8b70fb0f58fc9eaa846e9d3bebd08e
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "expr.h"
30 #include "langhooks.h"
31 #include "tree-flow.h"
32 #include "timevar.h"
33 #include "tree-dump.h"
34 #include "tree-pass.h"
35 #include "except.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "toplev.h"
39 #include "debug.h"
40 #include "params.h"
41 #include "tree-inline.h"
42 #include "value-prof.h"
43 #include "target.h"
45 /* Verify that there is exactly single jump instruction since last and attach
46 REG_BR_PROB note specifying probability.
47 ??? We really ought to pass the probability down to RTL expanders and let it
48 re-distribute it when the conditional expands into multiple conditionals.
49 This is however difficult to do. */
50 void
51 add_reg_br_prob_note (rtx last, int probability)
53 if (profile_status == PROFILE_ABSENT)
54 return;
55 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
56 if (JUMP_P (last))
58 /* It is common to emit condjump-around-jump sequence when we don't know
59 how to reverse the conditional. Special case this. */
60 if (!any_condjump_p (last)
61 || !JUMP_P (NEXT_INSN (last))
62 || !simplejump_p (NEXT_INSN (last))
63 || !NEXT_INSN (NEXT_INSN (last))
64 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
65 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
66 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
67 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
68 goto failed;
69 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
70 REG_NOTES (last)
71 = gen_rtx_EXPR_LIST (REG_BR_PROB,
72 GEN_INT (REG_BR_PROB_BASE - probability),
73 REG_NOTES (last));
74 return;
76 if (!last || !JUMP_P (last) || !any_condjump_p (last))
77 goto failed;
78 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
79 REG_NOTES (last)
80 = gen_rtx_EXPR_LIST (REG_BR_PROB,
81 GEN_INT (probability), REG_NOTES (last));
82 return;
83 failed:
84 if (dump_file)
85 fprintf (dump_file, "Failed to add probability note\n");
89 #ifndef LOCAL_ALIGNMENT
90 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
91 #endif
93 #ifndef STACK_ALIGNMENT_NEEDED
94 #define STACK_ALIGNMENT_NEEDED 1
95 #endif
98 /* This structure holds data relevant to one variable that will be
99 placed in a stack slot. */
100 struct stack_var
102 /* The Variable. */
103 tree decl;
105 /* The offset of the variable. During partitioning, this is the
106 offset relative to the partition. After partitioning, this
107 is relative to the stack frame. */
108 HOST_WIDE_INT offset;
110 /* Initially, the size of the variable. Later, the size of the partition,
111 if this variable becomes it's partition's representative. */
112 HOST_WIDE_INT size;
114 /* The *byte* alignment required for this variable. Or as, with the
115 size, the alignment for this partition. */
116 unsigned int alignb;
118 /* The partition representative. */
119 size_t representative;
121 /* The next stack variable in the partition, or EOC. */
122 size_t next;
125 #define EOC ((size_t)-1)
127 /* We have an array of such objects while deciding allocation. */
128 static struct stack_var *stack_vars;
129 static size_t stack_vars_alloc;
130 static size_t stack_vars_num;
132 /* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
133 is non-decreasing. */
134 static size_t *stack_vars_sorted;
136 /* We have an interference graph between such objects. This graph
137 is lower triangular. */
138 static bool *stack_vars_conflict;
139 static size_t stack_vars_conflict_alloc;
141 /* The phase of the stack frame. This is the known misalignment of
142 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
143 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
144 static int frame_phase;
146 /* Used during expand_used_vars to remember if we saw any decls for
147 which we'd like to enable stack smashing protection. */
148 static bool has_protected_decls;
150 /* Used during expand_used_vars. Remember if we say a character buffer
151 smaller than our cutoff threshold. Used for -Wstack-protector. */
152 static bool has_short_buffer;
154 /* Discover the byte alignment to use for DECL. Ignore alignment
155 we can't do with expected alignment of the stack boundary. */
157 static unsigned int
158 get_decl_align_unit (tree decl)
160 unsigned int align;
162 align = DECL_ALIGN (decl);
163 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
164 if (align > PREFERRED_STACK_BOUNDARY)
165 align = PREFERRED_STACK_BOUNDARY;
166 if (cfun->stack_alignment_needed < align)
167 cfun->stack_alignment_needed = align;
169 return align / BITS_PER_UNIT;
172 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
173 Return the frame offset. */
175 static HOST_WIDE_INT
176 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
178 HOST_WIDE_INT offset, new_frame_offset;
180 new_frame_offset = frame_offset;
181 if (FRAME_GROWS_DOWNWARD)
183 new_frame_offset -= size + frame_phase;
184 new_frame_offset &= -align;
185 new_frame_offset += frame_phase;
186 offset = new_frame_offset;
188 else
190 new_frame_offset -= frame_phase;
191 new_frame_offset += align - 1;
192 new_frame_offset &= -align;
193 new_frame_offset += frame_phase;
194 offset = new_frame_offset;
195 new_frame_offset += size;
197 frame_offset = new_frame_offset;
199 if (frame_offset_overflow (frame_offset, cfun->decl))
200 frame_offset = offset = 0;
202 return offset;
205 /* Accumulate DECL into STACK_VARS. */
207 static void
208 add_stack_var (tree decl)
210 if (stack_vars_num >= stack_vars_alloc)
212 if (stack_vars_alloc)
213 stack_vars_alloc = stack_vars_alloc * 3 / 2;
214 else
215 stack_vars_alloc = 32;
216 stack_vars
217 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
219 stack_vars[stack_vars_num].decl = decl;
220 stack_vars[stack_vars_num].offset = 0;
221 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
222 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
224 /* All variables are initially in their own partition. */
225 stack_vars[stack_vars_num].representative = stack_vars_num;
226 stack_vars[stack_vars_num].next = EOC;
228 /* Ensure that this decl doesn't get put onto the list twice. */
229 SET_DECL_RTL (decl, pc_rtx);
231 stack_vars_num++;
234 /* Compute the linear index of a lower-triangular coordinate (I, J). */
236 static size_t
237 triangular_index (size_t i, size_t j)
239 if (i < j)
241 size_t t;
242 t = i, i = j, j = t;
244 return (i * (i + 1)) / 2 + j;
247 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
249 static void
250 resize_stack_vars_conflict (size_t n)
252 size_t size = triangular_index (n-1, n-1) + 1;
254 if (size <= stack_vars_conflict_alloc)
255 return;
257 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
258 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
259 (size - stack_vars_conflict_alloc) * sizeof (bool));
260 stack_vars_conflict_alloc = size;
263 /* Make the decls associated with luid's X and Y conflict. */
265 static void
266 add_stack_var_conflict (size_t x, size_t y)
268 size_t index = triangular_index (x, y);
269 gcc_assert (index < stack_vars_conflict_alloc);
270 stack_vars_conflict[index] = true;
273 /* Check whether the decls associated with luid's X and Y conflict. */
275 static bool
276 stack_var_conflict_p (size_t x, size_t y)
278 size_t index = triangular_index (x, y);
279 gcc_assert (index < stack_vars_conflict_alloc);
280 return stack_vars_conflict[index];
283 /* Returns true if TYPE is or contains a union type. */
285 static bool
286 aggregate_contains_union_type (tree type)
288 tree field;
290 if (TREE_CODE (type) == UNION_TYPE
291 || TREE_CODE (type) == QUAL_UNION_TYPE)
292 return true;
293 if (TREE_CODE (type) == ARRAY_TYPE)
294 return aggregate_contains_union_type (TREE_TYPE (type));
295 if (TREE_CODE (type) != RECORD_TYPE)
296 return false;
298 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
299 if (TREE_CODE (field) == FIELD_DECL)
300 if (aggregate_contains_union_type (TREE_TYPE (field)))
301 return true;
303 return false;
306 /* A subroutine of expand_used_vars. If two variables X and Y have alias
307 sets that do not conflict, then do add a conflict for these variables
308 in the interference graph. We also need to make sure to add conflicts
309 for union containing structures. Else RTL alias analysis comes along
310 and due to type based aliasing rules decides that for two overlapping
311 union temporaries { short s; int i; } accesses to the same mem through
312 different types may not alias and happily reorders stores across
313 life-time boundaries of the temporaries (See PR25654).
314 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
316 static void
317 add_alias_set_conflicts (void)
319 size_t i, j, n = stack_vars_num;
321 for (i = 0; i < n; ++i)
323 tree type_i = TREE_TYPE (stack_vars[i].decl);
324 bool aggr_i = AGGREGATE_TYPE_P (type_i);
325 bool contains_union;
327 contains_union = aggregate_contains_union_type (type_i);
328 for (j = 0; j < i; ++j)
330 tree type_j = TREE_TYPE (stack_vars[j].decl);
331 bool aggr_j = AGGREGATE_TYPE_P (type_j);
332 if (aggr_i != aggr_j
333 /* Either the objects conflict by means of type based
334 aliasing rules, or we need to add a conflict. */
335 || !objects_must_conflict_p (type_i, type_j)
336 /* In case the types do not conflict ensure that access
337 to elements will conflict. In case of unions we have
338 to be careful as type based aliasing rules may say
339 access to the same memory does not conflict. So play
340 safe and add a conflict in this case. */
341 || contains_union)
342 add_stack_var_conflict (i, j);
347 /* A subroutine of partition_stack_vars. A comparison function for qsort,
348 sorting an array of indicies by the size of the object. */
350 static int
351 stack_var_size_cmp (const void *a, const void *b)
353 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
354 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
355 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
356 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
358 if (sa < sb)
359 return -1;
360 if (sa > sb)
361 return 1;
362 /* For stack variables of the same size use the uid of the decl
363 to make the sort stable. */
364 if (uida < uidb)
365 return -1;
366 if (uida > uidb)
367 return 1;
368 return 0;
371 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
372 partitioning algorithm. Partitions A and B are known to be non-conflicting.
373 Merge them into a single partition A.
375 At the same time, add OFFSET to all variables in partition B. At the end
376 of the partitioning process we've have a nice block easy to lay out within
377 the stack frame. */
379 static void
380 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
382 size_t i, last;
384 /* Update each element of partition B with the given offset,
385 and merge them into partition A. */
386 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
388 stack_vars[i].offset += offset;
389 stack_vars[i].representative = a;
391 stack_vars[last].next = stack_vars[a].next;
392 stack_vars[a].next = b;
394 /* Update the required alignment of partition A to account for B. */
395 if (stack_vars[a].alignb < stack_vars[b].alignb)
396 stack_vars[a].alignb = stack_vars[b].alignb;
398 /* Update the interference graph and merge the conflicts. */
399 for (last = stack_vars_num, i = 0; i < last; ++i)
400 if (stack_var_conflict_p (b, i))
401 add_stack_var_conflict (a, i);
404 /* A subroutine of expand_used_vars. Binpack the variables into
405 partitions constrained by the interference graph. The overall
406 algorithm used is as follows:
408 Sort the objects by size.
409 For each object A {
410 S = size(A)
411 O = 0
412 loop {
413 Look for the largest non-conflicting object B with size <= S.
414 UNION (A, B)
415 offset(B) = O
416 O += size(B)
417 S -= size(B)
422 static void
423 partition_stack_vars (void)
425 size_t si, sj, n = stack_vars_num;
427 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
428 for (si = 0; si < n; ++si)
429 stack_vars_sorted[si] = si;
431 if (n == 1)
432 return;
434 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
436 /* Special case: detect when all variables conflict, and thus we can't
437 do anything during the partitioning loop. It isn't uncommon (with
438 C code at least) to declare all variables at the top of the function,
439 and if we're not inlining, then all variables will be in the same scope.
440 Take advantage of very fast libc routines for this scan. */
441 gcc_assert (sizeof(bool) == sizeof(char));
442 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
443 return;
445 for (si = 0; si < n; ++si)
447 size_t i = stack_vars_sorted[si];
448 HOST_WIDE_INT isize = stack_vars[i].size;
449 HOST_WIDE_INT offset = 0;
451 for (sj = si; sj-- > 0; )
453 size_t j = stack_vars_sorted[sj];
454 HOST_WIDE_INT jsize = stack_vars[j].size;
455 unsigned int jalign = stack_vars[j].alignb;
457 /* Ignore objects that aren't partition representatives. */
458 if (stack_vars[j].representative != j)
459 continue;
461 /* Ignore objects too large for the remaining space. */
462 if (isize < jsize)
463 continue;
465 /* Ignore conflicting objects. */
466 if (stack_var_conflict_p (i, j))
467 continue;
469 /* Refine the remaining space check to include alignment. */
470 if (offset & (jalign - 1))
472 HOST_WIDE_INT toff = offset;
473 toff += jalign - 1;
474 toff &= -(HOST_WIDE_INT)jalign;
475 if (isize - (toff - offset) < jsize)
476 continue;
478 isize -= toff - offset;
479 offset = toff;
482 /* UNION the objects, placing J at OFFSET. */
483 union_stack_vars (i, j, offset);
485 isize -= jsize;
486 if (isize == 0)
487 break;
492 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
494 static void
495 dump_stack_var_partition (void)
497 size_t si, i, j, n = stack_vars_num;
499 for (si = 0; si < n; ++si)
501 i = stack_vars_sorted[si];
503 /* Skip variables that aren't partition representatives, for now. */
504 if (stack_vars[i].representative != i)
505 continue;
507 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
508 " align %u\n", (unsigned long) i, stack_vars[i].size,
509 stack_vars[i].alignb);
511 for (j = i; j != EOC; j = stack_vars[j].next)
513 fputc ('\t', dump_file);
514 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
515 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
516 stack_vars[j].offset);
521 /* Assign rtl to DECL at frame offset OFFSET. */
523 static void
524 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
526 HOST_WIDE_INT align;
527 rtx x;
529 /* If this fails, we've overflowed the stack frame. Error nicely? */
530 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
532 x = plus_constant (virtual_stack_vars_rtx, offset);
533 x = gen_rtx_MEM (DECL_MODE (decl), x);
535 /* Set alignment we actually gave this decl. */
536 offset -= frame_phase;
537 align = offset & -offset;
538 align *= BITS_PER_UNIT;
539 if (align > STACK_BOUNDARY || align == 0)
540 align = STACK_BOUNDARY;
541 DECL_ALIGN (decl) = align;
542 DECL_USER_ALIGN (decl) = 0;
544 set_mem_attributes (x, decl, true);
545 SET_DECL_RTL (decl, x);
548 /* A subroutine of expand_used_vars. Give each partition representative
549 a unique location within the stack frame. Update each partition member
550 with that location. */
552 static void
553 expand_stack_vars (bool (*pred) (tree))
555 size_t si, i, j, n = stack_vars_num;
557 for (si = 0; si < n; ++si)
559 HOST_WIDE_INT offset;
561 i = stack_vars_sorted[si];
563 /* Skip variables that aren't partition representatives, for now. */
564 if (stack_vars[i].representative != i)
565 continue;
567 /* Skip variables that have already had rtl assigned. See also
568 add_stack_var where we perpetrate this pc_rtx hack. */
569 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
570 continue;
572 /* Check the predicate to see whether this variable should be
573 allocated in this pass. */
574 if (pred && !pred (stack_vars[i].decl))
575 continue;
577 offset = alloc_stack_frame_space (stack_vars[i].size,
578 stack_vars[i].alignb);
580 /* Create rtl for each variable based on their location within the
581 partition. */
582 for (j = i; j != EOC; j = stack_vars[j].next)
584 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
585 expand_one_stack_var_at (stack_vars[j].decl,
586 stack_vars[j].offset + offset);
591 /* Take into account all sizes of partitions and reset DECL_RTLs. */
592 static HOST_WIDE_INT
593 account_stack_vars (void)
595 size_t si, j, i, n = stack_vars_num;
596 HOST_WIDE_INT size = 0;
598 for (si = 0; si < n; ++si)
600 i = stack_vars_sorted[si];
602 /* Skip variables that aren't partition representatives, for now. */
603 if (stack_vars[i].representative != i)
604 continue;
606 size += stack_vars[i].size;
607 for (j = i; j != EOC; j = stack_vars[j].next)
608 SET_DECL_RTL (stack_vars[j].decl, NULL);
610 return size;
613 /* A subroutine of expand_one_var. Called to immediately assign rtl
614 to a variable to be allocated in the stack frame. */
616 static void
617 expand_one_stack_var (tree var)
619 HOST_WIDE_INT size, offset, align;
621 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
622 align = get_decl_align_unit (var);
623 offset = alloc_stack_frame_space (size, align);
625 expand_one_stack_var_at (var, offset);
628 /* A subroutine of expand_one_var. Called to assign rtl
629 to a TREE_STATIC VAR_DECL. */
631 static void
632 expand_one_static_var (tree var)
634 /* In unit-at-a-time all the static variables are expanded at the end
635 of compilation process. */
636 if (flag_unit_at_a_time)
637 return;
638 /* If this is an inlined copy of a static local variable,
639 look up the original. */
640 var = DECL_ORIGIN (var);
642 /* If we've already processed this variable because of that, do nothing. */
643 if (TREE_ASM_WRITTEN (var))
644 return;
646 /* Give the front end a chance to do whatever. In practice, this is
647 resolving duplicate names for IMA in C. */
648 if (lang_hooks.expand_decl (var))
649 return;
651 /* Otherwise, just emit the variable. */
652 rest_of_decl_compilation (var, 0, 0);
655 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
656 that will reside in a hard register. */
658 static void
659 expand_one_hard_reg_var (tree var)
661 rest_of_decl_compilation (var, 0, 0);
664 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
665 that will reside in a pseudo register. */
667 static void
668 expand_one_register_var (tree var)
670 tree type = TREE_TYPE (var);
671 int unsignedp = TYPE_UNSIGNED (type);
672 enum machine_mode reg_mode
673 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
674 rtx x = gen_reg_rtx (reg_mode);
676 SET_DECL_RTL (var, x);
678 /* Note if the object is a user variable. */
679 if (!DECL_ARTIFICIAL (var))
680 mark_user_reg (x);
682 if (POINTER_TYPE_P (type))
683 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
686 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
687 has some associated error, e.g. its type is error-mark. We just need
688 to pick something that won't crash the rest of the compiler. */
690 static void
691 expand_one_error_var (tree var)
693 enum machine_mode mode = DECL_MODE (var);
694 rtx x;
696 if (mode == BLKmode)
697 x = gen_rtx_MEM (BLKmode, const0_rtx);
698 else if (mode == VOIDmode)
699 x = const0_rtx;
700 else
701 x = gen_reg_rtx (mode);
703 SET_DECL_RTL (var, x);
706 /* A subroutine of expand_one_var. VAR is a variable that will be
707 allocated to the local stack frame. Return true if we wish to
708 add VAR to STACK_VARS so that it will be coalesced with other
709 variables. Return false to allocate VAR immediately.
711 This function is used to reduce the number of variables considered
712 for coalescing, which reduces the size of the quadratic problem. */
714 static bool
715 defer_stack_allocation (tree var, bool toplevel)
717 /* If stack protection is enabled, *all* stack variables must be deferred,
718 so that we can re-order the strings to the top of the frame. */
719 if (flag_stack_protect)
720 return true;
722 /* Variables in the outermost scope automatically conflict with
723 every other variable. The only reason to want to defer them
724 at all is that, after sorting, we can more efficiently pack
725 small variables in the stack frame. Continue to defer at -O2. */
726 if (toplevel && optimize < 2)
727 return false;
729 /* Without optimization, *most* variables are allocated from the
730 stack, which makes the quadratic problem large exactly when we
731 want compilation to proceed as quickly as possible. On the
732 other hand, we don't want the function's stack frame size to
733 get completely out of hand. So we avoid adding scalars and
734 "small" aggregates to the list at all. */
735 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
736 return false;
738 return true;
741 /* A subroutine of expand_used_vars. Expand one variable according to
742 its flavor. Variables to be placed on the stack are not actually
743 expanded yet, merely recorded.
744 When REALLY_EXPAND is false, only add stack values to be allocated.
745 Return stack usage this variable is supposed to take.
748 static HOST_WIDE_INT
749 expand_one_var (tree var, bool toplevel, bool really_expand)
751 if (TREE_CODE (var) != VAR_DECL)
753 if (really_expand)
754 lang_hooks.expand_decl (var);
756 else if (DECL_EXTERNAL (var))
758 else if (DECL_HAS_VALUE_EXPR_P (var))
760 else if (TREE_STATIC (var))
762 if (really_expand)
763 expand_one_static_var (var);
765 else if (DECL_RTL_SET_P (var))
767 else if (TREE_TYPE (var) == error_mark_node)
769 if (really_expand)
770 expand_one_error_var (var);
772 else if (DECL_HARD_REGISTER (var))
774 if (really_expand)
775 expand_one_hard_reg_var (var);
777 else if (use_register_for_decl (var))
779 if (really_expand)
780 expand_one_register_var (var);
782 else if (defer_stack_allocation (var, toplevel))
783 add_stack_var (var);
784 else
786 if (really_expand)
787 expand_one_stack_var (var);
788 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
790 return 0;
793 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
794 expanding variables. Those variables that can be put into registers
795 are allocated pseudos; those that can't are put on the stack.
797 TOPLEVEL is true if this is the outermost BLOCK. */
799 static void
800 expand_used_vars_for_block (tree block, bool toplevel)
802 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
803 tree t;
805 old_sv_num = toplevel ? 0 : stack_vars_num;
807 /* Expand all variables at this level. */
808 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
809 if (TREE_USED (t)
810 /* Force local static variables to be output when marked by
811 used attribute. For unit-at-a-time, cgraph code already takes
812 care of this. */
813 || (!flag_unit_at_a_time && TREE_STATIC (t)
814 && DECL_PRESERVE_P (t)))
815 expand_one_var (t, toplevel, true);
817 this_sv_num = stack_vars_num;
819 /* Expand all variables at containing levels. */
820 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
821 expand_used_vars_for_block (t, false);
823 /* Since we do not track exact variable lifetimes (which is not even
824 possible for variables whose address escapes), we mirror the block
825 tree in the interference graph. Here we cause all variables at this
826 level, and all sublevels, to conflict. Do make certain that a
827 variable conflicts with itself. */
828 if (old_sv_num < this_sv_num)
830 new_sv_num = stack_vars_num;
831 resize_stack_vars_conflict (new_sv_num);
833 for (i = old_sv_num; i < new_sv_num; ++i)
834 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
835 add_stack_var_conflict (i, j);
839 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
840 and clear TREE_USED on all local variables. */
842 static void
843 clear_tree_used (tree block)
845 tree t;
847 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
848 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
849 TREE_USED (t) = 0;
851 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
852 clear_tree_used (t);
855 /* Examine TYPE and determine a bit mask of the following features. */
857 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
858 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
859 #define SPCT_HAS_ARRAY 4
860 #define SPCT_HAS_AGGREGATE 8
862 static unsigned int
863 stack_protect_classify_type (tree type)
865 unsigned int ret = 0;
866 tree t;
868 switch (TREE_CODE (type))
870 case ARRAY_TYPE:
871 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
872 if (t == char_type_node
873 || t == signed_char_type_node
874 || t == unsigned_char_type_node)
876 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
877 unsigned HOST_WIDE_INT len;
879 if (!TYPE_SIZE_UNIT (type)
880 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
881 len = max;
882 else
883 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
885 if (len < max)
886 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
887 else
888 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
890 else
891 ret = SPCT_HAS_ARRAY;
892 break;
894 case UNION_TYPE:
895 case QUAL_UNION_TYPE:
896 case RECORD_TYPE:
897 ret = SPCT_HAS_AGGREGATE;
898 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
899 if (TREE_CODE (t) == FIELD_DECL)
900 ret |= stack_protect_classify_type (TREE_TYPE (t));
901 break;
903 default:
904 break;
907 return ret;
910 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
911 part of the local stack frame. Remember if we ever return nonzero for
912 any variable in this function. The return value is the phase number in
913 which the variable should be allocated. */
915 static int
916 stack_protect_decl_phase (tree decl)
918 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
919 int ret = 0;
921 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
922 has_short_buffer = true;
924 if (flag_stack_protect == 2)
926 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
927 && !(bits & SPCT_HAS_AGGREGATE))
928 ret = 1;
929 else if (bits & SPCT_HAS_ARRAY)
930 ret = 2;
932 else
933 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
935 if (ret)
936 has_protected_decls = true;
938 return ret;
941 /* Two helper routines that check for phase 1 and phase 2. These are used
942 as callbacks for expand_stack_vars. */
944 static bool
945 stack_protect_decl_phase_1 (tree decl)
947 return stack_protect_decl_phase (decl) == 1;
950 static bool
951 stack_protect_decl_phase_2 (tree decl)
953 return stack_protect_decl_phase (decl) == 2;
956 /* Ensure that variables in different stack protection phases conflict
957 so that they are not merged and share the same stack slot. */
959 static void
960 add_stack_protection_conflicts (void)
962 size_t i, j, n = stack_vars_num;
963 unsigned char *phase;
965 phase = XNEWVEC (unsigned char, n);
966 for (i = 0; i < n; ++i)
967 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
969 for (i = 0; i < n; ++i)
971 unsigned char ph_i = phase[i];
972 for (j = 0; j < i; ++j)
973 if (ph_i != phase[j])
974 add_stack_var_conflict (i, j);
977 XDELETEVEC (phase);
980 /* Create a decl for the guard at the top of the stack frame. */
982 static void
983 create_stack_guard (void)
985 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
986 TREE_THIS_VOLATILE (guard) = 1;
987 TREE_USED (guard) = 1;
988 expand_one_stack_var (guard);
989 cfun->stack_protect_guard = guard;
992 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
993 expanding variables. Those variables that can be put into registers
994 are allocated pseudos; those that can't are put on the stack.
996 TOPLEVEL is true if this is the outermost BLOCK. */
998 static HOST_WIDE_INT
999 account_used_vars_for_block (tree block, bool toplevel)
1001 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1002 tree t;
1003 HOST_WIDE_INT size = 0;
1005 old_sv_num = toplevel ? 0 : stack_vars_num;
1007 /* Expand all variables at this level. */
1008 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1009 if (TREE_USED (t))
1010 size += expand_one_var (t, toplevel, false);
1012 this_sv_num = stack_vars_num;
1014 /* Expand all variables at containing levels. */
1015 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1016 size += account_used_vars_for_block (t, false);
1018 /* Since we do not track exact variable lifetimes (which is not even
1019 possible for variables whose address escapes), we mirror the block
1020 tree in the interference graph. Here we cause all variables at this
1021 level, and all sublevels, to conflict. Do make certain that a
1022 variable conflicts with itself. */
1023 if (old_sv_num < this_sv_num)
1025 new_sv_num = stack_vars_num;
1026 resize_stack_vars_conflict (new_sv_num);
1028 for (i = old_sv_num; i < new_sv_num; ++i)
1029 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1030 add_stack_var_conflict (i, j);
1032 return size;
1035 /* Prepare for expanding variables. */
1036 static void
1037 init_vars_expansion (void)
1039 tree t;
1040 /* Set TREE_USED on all variables in the unexpanded_var_list. */
1041 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1042 TREE_USED (TREE_VALUE (t)) = 1;
1044 /* Clear TREE_USED on all variables associated with a block scope. */
1045 clear_tree_used (DECL_INITIAL (current_function_decl));
1047 /* Initialize local stack smashing state. */
1048 has_protected_decls = false;
1049 has_short_buffer = false;
1052 /* Free up stack variable graph data. */
1053 static void
1054 fini_vars_expansion (void)
1056 XDELETEVEC (stack_vars);
1057 XDELETEVEC (stack_vars_sorted);
1058 XDELETEVEC (stack_vars_conflict);
1059 stack_vars = NULL;
1060 stack_vars_alloc = stack_vars_num = 0;
1061 stack_vars_conflict = NULL;
1062 stack_vars_conflict_alloc = 0;
1065 HOST_WIDE_INT
1066 estimated_stack_frame_size (void)
1068 HOST_WIDE_INT size = 0;
1069 tree t, outer_block = DECL_INITIAL (current_function_decl);
1071 init_vars_expansion ();
1073 /* At this point all variables on the unexpanded_var_list with TREE_USED
1074 set are not associated with any block scope. Lay them out. */
1075 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1077 tree var = TREE_VALUE (t);
1079 if (TREE_USED (var))
1080 size += expand_one_var (var, true, false);
1081 TREE_USED (var) = 1;
1083 size += account_used_vars_for_block (outer_block, true);
1084 if (stack_vars_num > 0)
1086 /* Due to the way alias sets work, no variables with non-conflicting
1087 alias sets may be assigned the same address. Add conflicts to
1088 reflect this. */
1089 add_alias_set_conflicts ();
1091 /* If stack protection is enabled, we don't share space between
1092 vulnerable data and non-vulnerable data. */
1093 if (flag_stack_protect)
1094 add_stack_protection_conflicts ();
1096 /* Now that we have collected all stack variables, and have computed a
1097 minimal interference graph, attempt to save some stack space. */
1098 partition_stack_vars ();
1099 if (dump_file)
1100 dump_stack_var_partition ();
1102 size += account_stack_vars ();
1103 fini_vars_expansion ();
1105 return size;
1108 /* Expand all variables used in the function. */
1110 static void
1111 expand_used_vars (void)
1113 tree t, outer_block = DECL_INITIAL (current_function_decl);
1115 /* Compute the phase of the stack frame for this function. */
1117 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1118 int off = STARTING_FRAME_OFFSET % align;
1119 frame_phase = off ? align - off : 0;
1122 init_vars_expansion ();
1124 /* At this point all variables on the unexpanded_var_list with TREE_USED
1125 set are not associated with any block scope. Lay them out. */
1126 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1128 tree var = TREE_VALUE (t);
1129 bool expand_now = false;
1131 /* We didn't set a block for static or extern because it's hard
1132 to tell the difference between a global variable (re)declared
1133 in a local scope, and one that's really declared there to
1134 begin with. And it doesn't really matter much, since we're
1135 not giving them stack space. Expand them now. */
1136 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1137 expand_now = true;
1139 /* Any variable that could have been hoisted into an SSA_NAME
1140 will have been propagated anywhere the optimizers chose,
1141 i.e. not confined to their original block. Allocate them
1142 as if they were defined in the outermost scope. */
1143 else if (is_gimple_reg (var))
1144 expand_now = true;
1146 /* If the variable is not associated with any block, then it
1147 was created by the optimizers, and could be live anywhere
1148 in the function. */
1149 else if (TREE_USED (var))
1150 expand_now = true;
1152 /* Finally, mark all variables on the list as used. We'll use
1153 this in a moment when we expand those associated with scopes. */
1154 TREE_USED (var) = 1;
1156 if (expand_now)
1157 expand_one_var (var, true, true);
1159 cfun->unexpanded_var_list = NULL_TREE;
1161 /* At this point, all variables within the block tree with TREE_USED
1162 set are actually used by the optimized function. Lay them out. */
1163 expand_used_vars_for_block (outer_block, true);
1165 if (stack_vars_num > 0)
1167 /* Due to the way alias sets work, no variables with non-conflicting
1168 alias sets may be assigned the same address. Add conflicts to
1169 reflect this. */
1170 add_alias_set_conflicts ();
1172 /* If stack protection is enabled, we don't share space between
1173 vulnerable data and non-vulnerable data. */
1174 if (flag_stack_protect)
1175 add_stack_protection_conflicts ();
1177 /* Now that we have collected all stack variables, and have computed a
1178 minimal interference graph, attempt to save some stack space. */
1179 partition_stack_vars ();
1180 if (dump_file)
1181 dump_stack_var_partition ();
1184 /* There are several conditions under which we should create a
1185 stack guard: protect-all, alloca used, protected decls present. */
1186 if (flag_stack_protect == 2
1187 || (flag_stack_protect
1188 && (current_function_calls_alloca || has_protected_decls)))
1189 create_stack_guard ();
1191 /* Assign rtl to each variable based on these partitions. */
1192 if (stack_vars_num > 0)
1194 /* Reorder decls to be protected by iterating over the variables
1195 array multiple times, and allocating out of each phase in turn. */
1196 /* ??? We could probably integrate this into the qsort we did
1197 earlier, such that we naturally see these variables first,
1198 and thus naturally allocate things in the right order. */
1199 if (has_protected_decls)
1201 /* Phase 1 contains only character arrays. */
1202 expand_stack_vars (stack_protect_decl_phase_1);
1204 /* Phase 2 contains other kinds of arrays. */
1205 if (flag_stack_protect == 2)
1206 expand_stack_vars (stack_protect_decl_phase_2);
1209 expand_stack_vars (NULL);
1211 fini_vars_expansion ();
1214 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1215 if (STACK_ALIGNMENT_NEEDED)
1217 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1218 if (!FRAME_GROWS_DOWNWARD)
1219 frame_offset += align - 1;
1220 frame_offset &= -align;
1225 /* If we need to produce a detailed dump, print the tree representation
1226 for STMT to the dump file. SINCE is the last RTX after which the RTL
1227 generated for STMT should have been appended. */
1229 static void
1230 maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1232 if (dump_file && (dump_flags & TDF_DETAILS))
1234 fprintf (dump_file, "\n;; ");
1235 print_generic_expr (dump_file, stmt, TDF_SLIM);
1236 fprintf (dump_file, "\n");
1238 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1242 /* Maps the blocks that do not contain tree labels to rtx labels. */
1244 static struct pointer_map_t *lab_rtx_for_bb;
1246 /* Returns the label_rtx expression for a label starting basic block BB. */
1248 static rtx
1249 label_rtx_for_bb (basic_block bb)
1251 tree_stmt_iterator tsi;
1252 tree lab, lab_stmt;
1253 void **elt;
1255 if (bb->flags & BB_RTL)
1256 return block_label (bb);
1258 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1259 if (elt)
1260 return (rtx) *elt;
1262 /* Find the tree label if it is present. */
1264 for (tsi = tsi_start (bb_stmt_list (bb)); !tsi_end_p (tsi); tsi_next (&tsi))
1266 lab_stmt = tsi_stmt (tsi);
1267 if (TREE_CODE (lab_stmt) != LABEL_EXPR)
1268 break;
1270 lab = LABEL_EXPR_LABEL (lab_stmt);
1271 if (DECL_NONLOCAL (lab))
1272 break;
1274 return label_rtx (lab);
1277 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1278 *elt = gen_label_rtx ();
1279 return (rtx) *elt;
1282 /* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
1283 Returns a new basic block if we've terminated the current basic
1284 block and created a new one. */
1286 static basic_block
1287 expand_gimple_cond_expr (basic_block bb, tree stmt)
1289 basic_block new_bb, dest;
1290 edge new_edge;
1291 edge true_edge;
1292 edge false_edge;
1293 tree pred = COND_EXPR_COND (stmt);
1294 rtx last2, last;
1296 gcc_assert (COND_EXPR_THEN (stmt) == NULL_TREE);
1297 gcc_assert (COND_EXPR_ELSE (stmt) == NULL_TREE);
1298 last2 = last = get_last_insn ();
1300 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1301 if (EXPR_LOCUS (stmt))
1303 set_curr_insn_source_location (*(EXPR_LOCUS (stmt)));
1304 set_curr_insn_block (TREE_BLOCK (stmt));
1307 /* These flags have no purpose in RTL land. */
1308 true_edge->flags &= ~EDGE_TRUE_VALUE;
1309 false_edge->flags &= ~EDGE_FALSE_VALUE;
1311 /* We can either have a pure conditional jump with one fallthru edge or
1312 two-way jump that needs to be decomposed into two basic blocks. */
1313 if (false_edge->dest == bb->next_bb)
1315 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1316 add_reg_br_prob_note (last, true_edge->probability);
1317 maybe_dump_rtl_for_tree_stmt (stmt, last);
1318 if (true_edge->goto_locus)
1319 set_curr_insn_source_location (true_edge->goto_locus);
1320 false_edge->flags |= EDGE_FALLTHRU;
1321 return NULL;
1323 if (true_edge->dest == bb->next_bb)
1325 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
1326 add_reg_br_prob_note (last, false_edge->probability);
1327 maybe_dump_rtl_for_tree_stmt (stmt, last);
1328 if (false_edge->goto_locus)
1329 set_curr_insn_source_location (false_edge->goto_locus);
1330 true_edge->flags |= EDGE_FALLTHRU;
1331 return NULL;
1334 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1335 add_reg_br_prob_note (last, true_edge->probability);
1336 last = get_last_insn ();
1337 emit_jump (label_rtx_for_bb (false_edge->dest));
1339 BB_END (bb) = last;
1340 if (BARRIER_P (BB_END (bb)))
1341 BB_END (bb) = PREV_INSN (BB_END (bb));
1342 update_bb_for_insn (bb);
1344 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1345 dest = false_edge->dest;
1346 redirect_edge_succ (false_edge, new_bb);
1347 false_edge->flags |= EDGE_FALLTHRU;
1348 new_bb->count = false_edge->count;
1349 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1350 new_edge = make_edge (new_bb, dest, 0);
1351 new_edge->probability = REG_BR_PROB_BASE;
1352 new_edge->count = new_bb->count;
1353 if (BARRIER_P (BB_END (new_bb)))
1354 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1355 update_bb_for_insn (new_bb);
1357 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1359 if (false_edge->goto_locus)
1360 set_curr_insn_source_location (false_edge->goto_locus);
1362 return new_bb;
1365 /* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
1366 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1367 generated a tail call (something that might be denied by the ABI
1368 rules governing the call; see calls.c).
1370 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1371 can still reach the rest of BB. The case here is __builtin_sqrt,
1372 where the NaN result goes through the external function (with a
1373 tailcall) and the normal result happens via a sqrt instruction. */
1375 static basic_block
1376 expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
1378 rtx last2, last;
1379 edge e;
1380 edge_iterator ei;
1381 int probability;
1382 gcov_type count;
1384 last2 = last = get_last_insn ();
1386 expand_expr_stmt (stmt);
1388 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1389 if (CALL_P (last) && SIBLING_CALL_P (last))
1390 goto found;
1392 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1394 *can_fallthru = true;
1395 return NULL;
1397 found:
1398 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1399 Any instructions emitted here are about to be deleted. */
1400 do_pending_stack_adjust ();
1402 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1403 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1404 EH or abnormal edges, we shouldn't have created a tail call in
1405 the first place. So it seems to me we should just be removing
1406 all edges here, or redirecting the existing fallthru edge to
1407 the exit block. */
1409 probability = 0;
1410 count = 0;
1412 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1414 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1416 if (e->dest != EXIT_BLOCK_PTR)
1418 e->dest->count -= e->count;
1419 e->dest->frequency -= EDGE_FREQUENCY (e);
1420 if (e->dest->count < 0)
1421 e->dest->count = 0;
1422 if (e->dest->frequency < 0)
1423 e->dest->frequency = 0;
1425 count += e->count;
1426 probability += e->probability;
1427 remove_edge (e);
1429 else
1430 ei_next (&ei);
1433 /* This is somewhat ugly: the call_expr expander often emits instructions
1434 after the sibcall (to perform the function return). These confuse the
1435 find_many_sub_basic_blocks code, so we need to get rid of these. */
1436 last = NEXT_INSN (last);
1437 gcc_assert (BARRIER_P (last));
1439 *can_fallthru = false;
1440 while (NEXT_INSN (last))
1442 /* For instance an sqrt builtin expander expands if with
1443 sibcall in the then and label for `else`. */
1444 if (LABEL_P (NEXT_INSN (last)))
1446 *can_fallthru = true;
1447 break;
1449 delete_insn (NEXT_INSN (last));
1452 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1453 e->probability += probability;
1454 e->count += count;
1455 BB_END (bb) = last;
1456 update_bb_for_insn (bb);
1458 if (NEXT_INSN (last))
1460 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1462 last = BB_END (bb);
1463 if (BARRIER_P (last))
1464 BB_END (bb) = PREV_INSN (last);
1467 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1469 return bb;
1472 /* Expand basic block BB from GIMPLE trees to RTL. */
1474 static basic_block
1475 expand_gimple_basic_block (basic_block bb)
1477 tree_stmt_iterator tsi;
1478 tree stmts = bb_stmt_list (bb);
1479 tree stmt = NULL;
1480 rtx note, last;
1481 edge e;
1482 edge_iterator ei;
1483 void **elt;
1485 if (dump_file)
1487 fprintf (dump_file,
1488 "\n;; Generating RTL for tree basic block %d\n",
1489 bb->index);
1492 bb->il.tree = NULL;
1493 init_rtl_bb_info (bb);
1494 bb->flags |= BB_RTL;
1496 /* Remove the RETURN_EXPR if we may fall though to the exit
1497 instead. */
1498 tsi = tsi_last (stmts);
1499 if (!tsi_end_p (tsi)
1500 && TREE_CODE (tsi_stmt (tsi)) == RETURN_EXPR)
1502 tree ret_stmt = tsi_stmt (tsi);
1504 gcc_assert (single_succ_p (bb));
1505 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
1507 if (bb->next_bb == EXIT_BLOCK_PTR
1508 && !TREE_OPERAND (ret_stmt, 0))
1510 tsi_delink (&tsi);
1511 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
1515 tsi = tsi_start (stmts);
1516 if (!tsi_end_p (tsi))
1518 stmt = tsi_stmt (tsi);
1519 if (TREE_CODE (stmt) != LABEL_EXPR)
1520 stmt = NULL_TREE;
1523 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1525 if (stmt || elt)
1527 last = get_last_insn ();
1529 if (stmt)
1531 expand_expr_stmt (stmt);
1532 tsi_next (&tsi);
1535 if (elt)
1536 emit_label ((rtx) *elt);
1538 /* Java emits line number notes in the top of labels.
1539 ??? Make this go away once line number notes are obsoleted. */
1540 BB_HEAD (bb) = NEXT_INSN (last);
1541 if (NOTE_P (BB_HEAD (bb)))
1542 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1543 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1545 maybe_dump_rtl_for_tree_stmt (stmt, last);
1547 else
1548 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1550 NOTE_BASIC_BLOCK (note) = bb;
1552 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1554 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1555 e->flags &= ~EDGE_EXECUTABLE;
1557 /* At the moment not all abnormal edges match the RTL representation.
1558 It is safe to remove them here as find_many_sub_basic_blocks will
1559 rediscover them. In the future we should get this fixed properly. */
1560 if (e->flags & EDGE_ABNORMAL)
1561 remove_edge (e);
1562 else
1563 ei_next (&ei);
1566 for (; !tsi_end_p (tsi); tsi_next (&tsi))
1568 tree stmt = tsi_stmt (tsi);
1569 basic_block new_bb;
1571 if (!stmt)
1572 continue;
1574 /* Expand this statement, then evaluate the resulting RTL and
1575 fixup the CFG accordingly. */
1576 if (TREE_CODE (stmt) == COND_EXPR)
1578 new_bb = expand_gimple_cond_expr (bb, stmt);
1579 if (new_bb)
1580 return new_bb;
1582 else
1584 tree call = get_call_expr_in (stmt);
1585 int region;
1586 /* For the benefit of calls.c, converting all this to rtl,
1587 we need to record the call expression, not just the outer
1588 modify statement. */
1589 if (call && call != stmt)
1591 if ((region = lookup_stmt_eh_region (stmt)) > 0)
1592 add_stmt_to_eh_region (call, region);
1593 gimple_duplicate_stmt_histograms (cfun, call, cfun, stmt);
1595 if (call && CALL_EXPR_TAILCALL (call))
1597 bool can_fallthru;
1598 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1599 if (new_bb)
1601 if (can_fallthru)
1602 bb = new_bb;
1603 else
1604 return new_bb;
1607 else
1609 last = get_last_insn ();
1610 expand_expr_stmt (stmt);
1611 maybe_dump_rtl_for_tree_stmt (stmt, last);
1616 /* Expand implicit goto. */
1617 FOR_EACH_EDGE (e, ei, bb->succs)
1619 if (e->flags & EDGE_FALLTHRU)
1620 break;
1623 if (e && e->dest != bb->next_bb)
1625 emit_jump (label_rtx_for_bb (e->dest));
1626 if (e->goto_locus)
1627 set_curr_insn_source_location (e->goto_locus);
1628 e->flags &= ~EDGE_FALLTHRU;
1631 do_pending_stack_adjust ();
1633 /* Find the block tail. The last insn in the block is the insn
1634 before a barrier and/or table jump insn. */
1635 last = get_last_insn ();
1636 if (BARRIER_P (last))
1637 last = PREV_INSN (last);
1638 if (JUMP_TABLE_DATA_P (last))
1639 last = PREV_INSN (PREV_INSN (last));
1640 BB_END (bb) = last;
1642 update_bb_for_insn (bb);
1644 return bb;
1648 /* Create a basic block for initialization code. */
1650 static basic_block
1651 construct_init_block (void)
1653 basic_block init_block, first_block;
1654 edge e = NULL;
1655 int flags;
1657 /* Multiple entry points not supported yet. */
1658 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
1659 init_rtl_bb_info (ENTRY_BLOCK_PTR);
1660 init_rtl_bb_info (EXIT_BLOCK_PTR);
1661 ENTRY_BLOCK_PTR->flags |= BB_RTL;
1662 EXIT_BLOCK_PTR->flags |= BB_RTL;
1664 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
1666 /* When entry edge points to first basic block, we don't need jump,
1667 otherwise we have to jump into proper target. */
1668 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1670 tree label = tree_block_label (e->dest);
1672 emit_jump (label_rtx (label));
1673 flags = 0;
1675 else
1676 flags = EDGE_FALLTHRU;
1678 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1679 get_last_insn (),
1680 ENTRY_BLOCK_PTR);
1681 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1682 init_block->count = ENTRY_BLOCK_PTR->count;
1683 if (e)
1685 first_block = e->dest;
1686 redirect_edge_succ (e, init_block);
1687 e = make_edge (init_block, first_block, flags);
1689 else
1690 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1691 e->probability = REG_BR_PROB_BASE;
1692 e->count = ENTRY_BLOCK_PTR->count;
1694 update_bb_for_insn (init_block);
1695 return init_block;
1698 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
1699 found in the block tree. */
1701 static void
1702 set_block_levels (tree block, int level)
1704 while (block)
1706 BLOCK_NUMBER (block) = level;
1707 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
1708 block = BLOCK_CHAIN (block);
1712 /* Create a block containing landing pads and similar stuff. */
1714 static void
1715 construct_exit_block (void)
1717 rtx head = get_last_insn ();
1718 rtx end;
1719 basic_block exit_block;
1720 edge e, e2;
1721 unsigned ix;
1722 edge_iterator ei;
1723 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
1725 /* Make sure the locus is set to the end of the function, so that
1726 epilogue line numbers and warnings are set properly. */
1727 if (cfun->function_end_locus != UNKNOWN_LOCATION)
1728 input_location = cfun->function_end_locus;
1730 /* The following insns belong to the top scope. */
1731 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1733 /* Generate rtl for function exit. */
1734 expand_function_end ();
1736 end = get_last_insn ();
1737 if (head == end)
1738 return;
1739 /* While emitting the function end we could move end of the last basic block.
1741 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
1742 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
1743 head = NEXT_INSN (head);
1744 exit_block = create_basic_block (NEXT_INSN (head), end,
1745 EXIT_BLOCK_PTR->prev_bb);
1746 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1747 exit_block->count = EXIT_BLOCK_PTR->count;
1749 ix = 0;
1750 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
1752 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
1753 if (!(e->flags & EDGE_ABNORMAL))
1754 redirect_edge_succ (e, exit_block);
1755 else
1756 ix++;
1759 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1760 e->probability = REG_BR_PROB_BASE;
1761 e->count = EXIT_BLOCK_PTR->count;
1762 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
1763 if (e2 != e)
1765 e->count -= e2->count;
1766 exit_block->count -= e2->count;
1767 exit_block->frequency -= EDGE_FREQUENCY (e2);
1769 if (e->count < 0)
1770 e->count = 0;
1771 if (exit_block->count < 0)
1772 exit_block->count = 0;
1773 if (exit_block->frequency < 0)
1774 exit_block->frequency = 0;
1775 update_bb_for_insn (exit_block);
1778 /* Helper function for discover_nonconstant_array_refs.
1779 Look for ARRAY_REF nodes with non-constant indexes and mark them
1780 addressable. */
1782 static tree
1783 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1784 void *data ATTRIBUTE_UNUSED)
1786 tree t = *tp;
1788 if (IS_TYPE_OR_DECL_P (t))
1789 *walk_subtrees = 0;
1790 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1792 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1793 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1794 && (!TREE_OPERAND (t, 2)
1795 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1796 || (TREE_CODE (t) == COMPONENT_REF
1797 && (!TREE_OPERAND (t,2)
1798 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1799 || TREE_CODE (t) == BIT_FIELD_REF
1800 || TREE_CODE (t) == REALPART_EXPR
1801 || TREE_CODE (t) == IMAGPART_EXPR
1802 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1803 || TREE_CODE (t) == NOP_EXPR
1804 || TREE_CODE (t) == CONVERT_EXPR)
1805 t = TREE_OPERAND (t, 0);
1807 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1809 t = get_base_address (t);
1810 if (t && DECL_P (t))
1811 TREE_ADDRESSABLE (t) = 1;
1814 *walk_subtrees = 0;
1817 return NULL_TREE;
1820 /* RTL expansion is not able to compile array references with variable
1821 offsets for arrays stored in single register. Discover such
1822 expressions and mark variables as addressable to avoid this
1823 scenario. */
1825 static void
1826 discover_nonconstant_array_refs (void)
1828 basic_block bb;
1829 block_stmt_iterator bsi;
1831 FOR_EACH_BB (bb)
1833 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1834 walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1835 NULL , NULL);
1839 /* Translate the intermediate representation contained in the CFG
1840 from GIMPLE trees to RTL.
1842 We do conversion per basic block and preserve/update the tree CFG.
1843 This implies we have to do some magic as the CFG can simultaneously
1844 consist of basic blocks containing RTL and GIMPLE trees. This can
1845 confuse the CFG hooks, so be careful to not manipulate CFG during
1846 the expansion. */
1848 static unsigned int
1849 tree_expand_cfg (void)
1851 basic_block bb, init_block;
1852 sbitmap blocks;
1853 edge_iterator ei;
1854 edge e;
1856 /* Some backends want to know that we are expanding to RTL. */
1857 currently_expanding_to_rtl = 1;
1859 insn_locators_alloc ();
1860 if (!DECL_BUILT_IN (current_function_decl))
1861 set_curr_insn_source_location (DECL_SOURCE_LOCATION (current_function_decl));
1862 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1863 prologue_locator = curr_insn_locator ();
1865 /* Make sure first insn is a note even if we don't want linenums.
1866 This makes sure the first insn will never be deleted.
1867 Also, final expects a note to appear there. */
1868 emit_note (NOTE_INSN_DELETED);
1870 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
1871 discover_nonconstant_array_refs ();
1873 targetm.expand_to_rtl_hook ();
1875 /* Expand the variables recorded during gimple lowering. */
1876 expand_used_vars ();
1878 /* Honor stack protection warnings. */
1879 if (warn_stack_protect)
1881 if (current_function_calls_alloca)
1882 warning (OPT_Wstack_protector,
1883 "not protecting local variables: variable length buffer");
1884 if (has_short_buffer && !cfun->stack_protect_guard)
1885 warning (OPT_Wstack_protector,
1886 "not protecting function: no buffer at least %d bytes long",
1887 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1890 /* Set up parameters and prepare for return, for the function. */
1891 expand_function_start (current_function_decl);
1893 /* If this function is `main', emit a call to `__main'
1894 to run global initializers, etc. */
1895 if (DECL_NAME (current_function_decl)
1896 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1897 && DECL_FILE_SCOPE_P (current_function_decl))
1898 expand_main_function ();
1900 /* Initialize the stack_protect_guard field. This must happen after the
1901 call to __main (if any) so that the external decl is initialized. */
1902 if (cfun->stack_protect_guard)
1903 stack_protect_prologue ();
1905 /* Register rtl specific functions for cfg. */
1906 rtl_register_cfg_hooks ();
1908 init_block = construct_init_block ();
1910 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
1911 remaining edges in expand_gimple_basic_block. */
1912 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1913 e->flags &= ~EDGE_EXECUTABLE;
1915 lab_rtx_for_bb = pointer_map_create ();
1916 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
1917 bb = expand_gimple_basic_block (bb);
1918 pointer_map_destroy (lab_rtx_for_bb);
1920 construct_exit_block ();
1921 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1922 insn_locators_finalize ();
1924 /* We're done expanding trees to RTL. */
1925 currently_expanding_to_rtl = 0;
1927 /* Convert tree EH labels to RTL EH labels, and clean out any unreachable
1928 EH regions. */
1929 convert_from_eh_region_ranges ();
1931 rebuild_jump_labels (get_insns ());
1932 find_exception_handler_labels ();
1934 blocks = sbitmap_alloc (last_basic_block);
1935 sbitmap_ones (blocks);
1936 find_many_sub_basic_blocks (blocks);
1937 purge_all_dead_edges ();
1938 sbitmap_free (blocks);
1940 compact_blocks ();
1941 #ifdef ENABLE_CHECKING
1942 verify_flow_info ();
1943 #endif
1945 /* There's no need to defer outputting this function any more; we
1946 know we want to output it. */
1947 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1949 /* Now that we're done expanding trees to RTL, we shouldn't have any
1950 more CONCATs anywhere. */
1951 generating_concat_p = 0;
1953 if (dump_file)
1955 fprintf (dump_file,
1956 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1957 /* And the pass manager will dump RTL for us. */
1960 /* If we're emitting a nested function, make sure its parent gets
1961 emitted as well. Doing otherwise confuses debug info. */
1963 tree parent;
1964 for (parent = DECL_CONTEXT (current_function_decl);
1965 parent != NULL_TREE;
1966 parent = get_containing_scope (parent))
1967 if (TREE_CODE (parent) == FUNCTION_DECL)
1968 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1971 /* We are now committed to emitting code for this function. Do any
1972 preparation, such as emitting abstract debug info for the inline
1973 before it gets mangled by optimization. */
1974 if (cgraph_function_possibly_inlined_p (current_function_decl))
1975 (*debug_hooks->outlining_inline_function) (current_function_decl);
1977 TREE_ASM_WRITTEN (current_function_decl) = 1;
1979 /* After expanding, the return labels are no longer needed. */
1980 return_label = NULL;
1981 naked_return_label = NULL;
1982 free_histograms ();
1983 /* Tag the blocks with a depth number so that change_scope can find
1984 the common parent easily. */
1985 set_block_levels (DECL_INITIAL (cfun->decl), 0);
1986 return 0;
1989 struct gimple_opt_pass pass_expand =
1992 GIMPLE_PASS,
1993 "expand", /* name */
1994 NULL, /* gate */
1995 tree_expand_cfg, /* execute */
1996 NULL, /* sub */
1997 NULL, /* next */
1998 0, /* static_pass_number */
1999 TV_EXPAND, /* tv_id */
2000 /* ??? If TER is enabled, we actually receive GENERIC. */
2001 PROP_gimple_leh | PROP_cfg, /* properties_required */
2002 PROP_rtl, /* properties_provided */
2003 PROP_trees, /* properties_destroyed */
2004 0, /* todo_flags_start */
2005 TODO_dump_func, /* todo_flags_finish */