tree-ssa.doxy: Update for doxygen 1.5.
[official-gcc.git] / gcc / cfgexpand.c
blobd5a1e885922916412388308c446c881c6ef1f015
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "expr.h"
30 #include "langhooks.h"
31 #include "tree-flow.h"
32 #include "timevar.h"
33 #include "tree-dump.h"
34 #include "tree-pass.h"
35 #include "except.h"
36 #include "flags.h"
37 #include "diagnostic.h"
38 #include "toplev.h"
39 #include "debug.h"
40 #include "params.h"
41 #include "tree-inline.h"
42 #include "value-prof.h"
44 /* Verify that there is exactly single jump instruction since last and attach
45 REG_BR_PROB note specifying probability.
46 ??? We really ought to pass the probability down to RTL expanders and let it
47 re-distribute it when the conditional expands into multiple conditionals.
48 This is however difficult to do. */
49 void
50 add_reg_br_prob_note (rtx last, int probability)
52 if (profile_status == PROFILE_ABSENT)
53 return;
54 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
55 if (JUMP_P (last))
57 /* It is common to emit condjump-around-jump sequence when we don't know
58 how to reverse the conditional. Special case this. */
59 if (!any_condjump_p (last)
60 || !JUMP_P (NEXT_INSN (last))
61 || !simplejump_p (NEXT_INSN (last))
62 || !NEXT_INSN (NEXT_INSN (last))
63 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
64 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
65 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
66 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
67 goto failed;
68 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
69 REG_NOTES (last)
70 = gen_rtx_EXPR_LIST (REG_BR_PROB,
71 GEN_INT (REG_BR_PROB_BASE - probability),
72 REG_NOTES (last));
73 return;
75 if (!last || !JUMP_P (last) || !any_condjump_p (last))
76 goto failed;
77 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
78 REG_NOTES (last)
79 = gen_rtx_EXPR_LIST (REG_BR_PROB,
80 GEN_INT (probability), REG_NOTES (last));
81 return;
82 failed:
83 if (dump_file)
84 fprintf (dump_file, "Failed to add probability note\n");
88 #ifndef LOCAL_ALIGNMENT
89 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
90 #endif
92 #ifndef STACK_ALIGNMENT_NEEDED
93 #define STACK_ALIGNMENT_NEEDED 1
94 #endif
97 /* This structure holds data relevant to one variable that will be
98 placed in a stack slot. */
99 struct stack_var
101 /* The Variable. */
102 tree decl;
104 /* The offset of the variable. During partitioning, this is the
105 offset relative to the partition. After partitioning, this
106 is relative to the stack frame. */
107 HOST_WIDE_INT offset;
109 /* Initially, the size of the variable. Later, the size of the partition,
110 if this variable becomes it's partition's representative. */
111 HOST_WIDE_INT size;
113 /* The *byte* alignment required for this variable. Or as, with the
114 size, the alignment for this partition. */
115 unsigned int alignb;
117 /* The partition representative. */
118 size_t representative;
120 /* The next stack variable in the partition, or EOC. */
121 size_t next;
124 #define EOC ((size_t)-1)
126 /* We have an array of such objects while deciding allocation. */
127 static struct stack_var *stack_vars;
128 static size_t stack_vars_alloc;
129 static size_t stack_vars_num;
131 /* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
132 is non-decreasing. */
133 static size_t *stack_vars_sorted;
135 /* We have an interference graph between such objects. This graph
136 is lower triangular. */
137 static bool *stack_vars_conflict;
138 static size_t stack_vars_conflict_alloc;
140 /* The phase of the stack frame. This is the known misalignment of
141 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
142 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
143 static int frame_phase;
145 /* Used during expand_used_vars to remember if we saw any decls for
146 which we'd like to enable stack smashing protection. */
147 static bool has_protected_decls;
149 /* Used during expand_used_vars. Remember if we say a character buffer
150 smaller than our cutoff threshold. Used for -Wstack-protector. */
151 static bool has_short_buffer;
153 /* Discover the byte alignment to use for DECL. Ignore alignment
154 we can't do with expected alignment of the stack boundary. */
156 static unsigned int
157 get_decl_align_unit (tree decl)
159 unsigned int align;
161 align = DECL_ALIGN (decl);
162 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
163 if (align > PREFERRED_STACK_BOUNDARY)
164 align = PREFERRED_STACK_BOUNDARY;
165 if (cfun->stack_alignment_needed < align)
166 cfun->stack_alignment_needed = align;
168 return align / BITS_PER_UNIT;
171 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
172 Return the frame offset. */
174 static HOST_WIDE_INT
175 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
177 HOST_WIDE_INT offset, new_frame_offset;
179 new_frame_offset = frame_offset;
180 if (FRAME_GROWS_DOWNWARD)
182 new_frame_offset -= size + frame_phase;
183 new_frame_offset &= -align;
184 new_frame_offset += frame_phase;
185 offset = new_frame_offset;
187 else
189 new_frame_offset -= frame_phase;
190 new_frame_offset += align - 1;
191 new_frame_offset &= -align;
192 new_frame_offset += frame_phase;
193 offset = new_frame_offset;
194 new_frame_offset += size;
196 frame_offset = new_frame_offset;
198 if (frame_offset_overflow (frame_offset, cfun->decl))
199 frame_offset = offset = 0;
201 return offset;
204 /* Accumulate DECL into STACK_VARS. */
206 static void
207 add_stack_var (tree decl)
209 if (stack_vars_num >= stack_vars_alloc)
211 if (stack_vars_alloc)
212 stack_vars_alloc = stack_vars_alloc * 3 / 2;
213 else
214 stack_vars_alloc = 32;
215 stack_vars
216 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
218 stack_vars[stack_vars_num].decl = decl;
219 stack_vars[stack_vars_num].offset = 0;
220 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
221 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
223 /* All variables are initially in their own partition. */
224 stack_vars[stack_vars_num].representative = stack_vars_num;
225 stack_vars[stack_vars_num].next = EOC;
227 /* Ensure that this decl doesn't get put onto the list twice. */
228 SET_DECL_RTL (decl, pc_rtx);
230 stack_vars_num++;
233 /* Compute the linear index of a lower-triangular coordinate (I, J). */
235 static size_t
236 triangular_index (size_t i, size_t j)
238 if (i < j)
240 size_t t;
241 t = i, i = j, j = t;
243 return (i * (i + 1)) / 2 + j;
246 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
248 static void
249 resize_stack_vars_conflict (size_t n)
251 size_t size = triangular_index (n-1, n-1) + 1;
253 if (size <= stack_vars_conflict_alloc)
254 return;
256 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
257 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
258 (size - stack_vars_conflict_alloc) * sizeof (bool));
259 stack_vars_conflict_alloc = size;
262 /* Make the decls associated with luid's X and Y conflict. */
264 static void
265 add_stack_var_conflict (size_t x, size_t y)
267 size_t index = triangular_index (x, y);
268 gcc_assert (index < stack_vars_conflict_alloc);
269 stack_vars_conflict[index] = true;
272 /* Check whether the decls associated with luid's X and Y conflict. */
274 static bool
275 stack_var_conflict_p (size_t x, size_t y)
277 size_t index = triangular_index (x, y);
278 gcc_assert (index < stack_vars_conflict_alloc);
279 return stack_vars_conflict[index];
282 /* Returns true if TYPE is or contains a union type. */
284 static bool
285 aggregate_contains_union_type (tree type)
287 tree field;
289 if (TREE_CODE (type) == UNION_TYPE
290 || TREE_CODE (type) == QUAL_UNION_TYPE)
291 return true;
292 if (TREE_CODE (type) == ARRAY_TYPE)
293 return aggregate_contains_union_type (TREE_TYPE (type));
294 if (TREE_CODE (type) != RECORD_TYPE)
295 return false;
297 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
298 if (TREE_CODE (field) == FIELD_DECL)
299 if (aggregate_contains_union_type (TREE_TYPE (field)))
300 return true;
302 return false;
305 /* A subroutine of expand_used_vars. If two variables X and Y have alias
306 sets that do not conflict, then do add a conflict for these variables
307 in the interference graph. We also need to make sure to add conflicts
308 for union containing structures. Else RTL alias analysis comes along
309 and due to type based aliasing rules decides that for two overlapping
310 union temporaries { short s; int i; } accesses to the same mem through
311 different types may not alias and happily reorders stores across
312 life-time boundaries of the temporaries (See PR25654).
313 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
315 static void
316 add_alias_set_conflicts (void)
318 size_t i, j, n = stack_vars_num;
320 for (i = 0; i < n; ++i)
322 tree type_i = TREE_TYPE (stack_vars[i].decl);
323 bool aggr_i = AGGREGATE_TYPE_P (type_i);
324 bool contains_union;
326 contains_union = aggregate_contains_union_type (type_i);
327 for (j = 0; j < i; ++j)
329 tree type_j = TREE_TYPE (stack_vars[j].decl);
330 bool aggr_j = AGGREGATE_TYPE_P (type_j);
331 if (aggr_i != aggr_j
332 /* Either the objects conflict by means of type based
333 aliasing rules, or we need to add a conflict. */
334 || !objects_must_conflict_p (type_i, type_j)
335 /* In case the types do not conflict ensure that access
336 to elements will conflict. In case of unions we have
337 to be careful as type based aliasing rules may say
338 access to the same memory does not conflict. So play
339 safe and add a conflict in this case. */
340 || contains_union)
341 add_stack_var_conflict (i, j);
346 /* A subroutine of partition_stack_vars. A comparison function for qsort,
347 sorting an array of indicies by the size of the object. */
349 static int
350 stack_var_size_cmp (const void *a, const void *b)
352 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
353 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
354 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
355 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
357 if (sa < sb)
358 return -1;
359 if (sa > sb)
360 return 1;
361 /* For stack variables of the same size use the uid of the decl
362 to make the sort stable. */
363 if (uida < uidb)
364 return -1;
365 if (uida > uidb)
366 return 1;
367 return 0;
370 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
371 partitioning algorithm. Partitions A and B are known to be non-conflicting.
372 Merge them into a single partition A.
374 At the same time, add OFFSET to all variables in partition B. At the end
375 of the partitioning process we've have a nice block easy to lay out within
376 the stack frame. */
378 static void
379 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
381 size_t i, last;
383 /* Update each element of partition B with the given offset,
384 and merge them into partition A. */
385 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
387 stack_vars[i].offset += offset;
388 stack_vars[i].representative = a;
390 stack_vars[last].next = stack_vars[a].next;
391 stack_vars[a].next = b;
393 /* Update the required alignment of partition A to account for B. */
394 if (stack_vars[a].alignb < stack_vars[b].alignb)
395 stack_vars[a].alignb = stack_vars[b].alignb;
397 /* Update the interference graph and merge the conflicts. */
398 for (last = stack_vars_num, i = 0; i < last; ++i)
399 if (stack_var_conflict_p (b, i))
400 add_stack_var_conflict (a, i);
403 /* A subroutine of expand_used_vars. Binpack the variables into
404 partitions constrained by the interference graph. The overall
405 algorithm used is as follows:
407 Sort the objects by size.
408 For each object A {
409 S = size(A)
410 O = 0
411 loop {
412 Look for the largest non-conflicting object B with size <= S.
413 UNION (A, B)
414 offset(B) = O
415 O += size(B)
416 S -= size(B)
421 static void
422 partition_stack_vars (void)
424 size_t si, sj, n = stack_vars_num;
426 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
427 for (si = 0; si < n; ++si)
428 stack_vars_sorted[si] = si;
430 if (n == 1)
431 return;
433 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
435 /* Special case: detect when all variables conflict, and thus we can't
436 do anything during the partitioning loop. It isn't uncommon (with
437 C code at least) to declare all variables at the top of the function,
438 and if we're not inlining, then all variables will be in the same scope.
439 Take advantage of very fast libc routines for this scan. */
440 gcc_assert (sizeof(bool) == sizeof(char));
441 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
442 return;
444 for (si = 0; si < n; ++si)
446 size_t i = stack_vars_sorted[si];
447 HOST_WIDE_INT isize = stack_vars[i].size;
448 HOST_WIDE_INT offset = 0;
450 for (sj = si; sj-- > 0; )
452 size_t j = stack_vars_sorted[sj];
453 HOST_WIDE_INT jsize = stack_vars[j].size;
454 unsigned int jalign = stack_vars[j].alignb;
456 /* Ignore objects that aren't partition representatives. */
457 if (stack_vars[j].representative != j)
458 continue;
460 /* Ignore objects too large for the remaining space. */
461 if (isize < jsize)
462 continue;
464 /* Ignore conflicting objects. */
465 if (stack_var_conflict_p (i, j))
466 continue;
468 /* Refine the remaining space check to include alignment. */
469 if (offset & (jalign - 1))
471 HOST_WIDE_INT toff = offset;
472 toff += jalign - 1;
473 toff &= -(HOST_WIDE_INT)jalign;
474 if (isize - (toff - offset) < jsize)
475 continue;
477 isize -= toff - offset;
478 offset = toff;
481 /* UNION the objects, placing J at OFFSET. */
482 union_stack_vars (i, j, offset);
484 isize -= jsize;
485 if (isize == 0)
486 break;
491 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
493 static void
494 dump_stack_var_partition (void)
496 size_t si, i, j, n = stack_vars_num;
498 for (si = 0; si < n; ++si)
500 i = stack_vars_sorted[si];
502 /* Skip variables that aren't partition representatives, for now. */
503 if (stack_vars[i].representative != i)
504 continue;
506 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
507 " align %u\n", (unsigned long) i, stack_vars[i].size,
508 stack_vars[i].alignb);
510 for (j = i; j != EOC; j = stack_vars[j].next)
512 fputc ('\t', dump_file);
513 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
514 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
515 stack_vars[i].offset);
520 /* Assign rtl to DECL at frame offset OFFSET. */
522 static void
523 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
525 HOST_WIDE_INT align;
526 rtx x;
528 /* If this fails, we've overflowed the stack frame. Error nicely? */
529 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
531 x = plus_constant (virtual_stack_vars_rtx, offset);
532 x = gen_rtx_MEM (DECL_MODE (decl), x);
534 /* Set alignment we actually gave this decl. */
535 offset -= frame_phase;
536 align = offset & -offset;
537 align *= BITS_PER_UNIT;
538 if (align > STACK_BOUNDARY || align == 0)
539 align = STACK_BOUNDARY;
540 DECL_ALIGN (decl) = align;
541 DECL_USER_ALIGN (decl) = 0;
543 set_mem_attributes (x, decl, true);
544 SET_DECL_RTL (decl, x);
547 /* A subroutine of expand_used_vars. Give each partition representative
548 a unique location within the stack frame. Update each partition member
549 with that location. */
551 static void
552 expand_stack_vars (bool (*pred) (tree))
554 size_t si, i, j, n = stack_vars_num;
556 for (si = 0; si < n; ++si)
558 HOST_WIDE_INT offset;
560 i = stack_vars_sorted[si];
562 /* Skip variables that aren't partition representatives, for now. */
563 if (stack_vars[i].representative != i)
564 continue;
566 /* Skip variables that have already had rtl assigned. See also
567 add_stack_var where we perpetrate this pc_rtx hack. */
568 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
569 continue;
571 /* Check the predicate to see whether this variable should be
572 allocated in this pass. */
573 if (pred && !pred (stack_vars[i].decl))
574 continue;
576 offset = alloc_stack_frame_space (stack_vars[i].size,
577 stack_vars[i].alignb);
579 /* Create rtl for each variable based on their location within the
580 partition. */
581 for (j = i; j != EOC; j = stack_vars[j].next)
582 expand_one_stack_var_at (stack_vars[j].decl,
583 stack_vars[j].offset + offset);
587 /* Take into account all sizes of partitions and reset DECL_RTLs. */
588 static HOST_WIDE_INT
589 account_stack_vars (void)
591 size_t si, j, i, n = stack_vars_num;
592 HOST_WIDE_INT size = 0;
594 for (si = 0; si < n; ++si)
596 i = stack_vars_sorted[si];
598 /* Skip variables that aren't partition representatives, for now. */
599 if (stack_vars[i].representative != i)
600 continue;
602 size += stack_vars[i].size;
603 for (j = i; j != EOC; j = stack_vars[j].next)
604 SET_DECL_RTL (stack_vars[j].decl, NULL);
606 return size;
609 /* A subroutine of expand_one_var. Called to immediately assign rtl
610 to a variable to be allocated in the stack frame. */
612 static void
613 expand_one_stack_var (tree var)
615 HOST_WIDE_INT size, offset, align;
617 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
618 align = get_decl_align_unit (var);
619 offset = alloc_stack_frame_space (size, align);
621 expand_one_stack_var_at (var, offset);
624 /* A subroutine of expand_one_var. Called to assign rtl
625 to a TREE_STATIC VAR_DECL. */
627 static void
628 expand_one_static_var (tree var)
630 /* In unit-at-a-time all the static variables are expanded at the end
631 of compilation process. */
632 if (flag_unit_at_a_time)
633 return;
634 /* If this is an inlined copy of a static local variable,
635 look up the original. */
636 var = DECL_ORIGIN (var);
638 /* If we've already processed this variable because of that, do nothing. */
639 if (TREE_ASM_WRITTEN (var))
640 return;
642 /* Give the front end a chance to do whatever. In practice, this is
643 resolving duplicate names for IMA in C. */
644 if (lang_hooks.expand_decl (var))
645 return;
647 /* Otherwise, just emit the variable. */
648 rest_of_decl_compilation (var, 0, 0);
651 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
652 that will reside in a hard register. */
654 static void
655 expand_one_hard_reg_var (tree var)
657 rest_of_decl_compilation (var, 0, 0);
660 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
661 that will reside in a pseudo register. */
663 static void
664 expand_one_register_var (tree var)
666 tree type = TREE_TYPE (var);
667 int unsignedp = TYPE_UNSIGNED (type);
668 enum machine_mode reg_mode
669 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
670 rtx x = gen_reg_rtx (reg_mode);
672 SET_DECL_RTL (var, x);
674 /* Note if the object is a user variable. */
675 if (!DECL_ARTIFICIAL (var))
677 mark_user_reg (x);
679 /* Trust user variables which have a pointer type to really
680 be pointers. Do not trust compiler generated temporaries
681 as our type system is totally busted as it relates to
682 pointer arithmetic which translates into lots of compiler
683 generated objects with pointer types, but which are not really
684 pointers. */
685 if (POINTER_TYPE_P (type))
686 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
690 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
691 has some associated error, e.g. its type is error-mark. We just need
692 to pick something that won't crash the rest of the compiler. */
694 static void
695 expand_one_error_var (tree var)
697 enum machine_mode mode = DECL_MODE (var);
698 rtx x;
700 if (mode == BLKmode)
701 x = gen_rtx_MEM (BLKmode, const0_rtx);
702 else if (mode == VOIDmode)
703 x = const0_rtx;
704 else
705 x = gen_reg_rtx (mode);
707 SET_DECL_RTL (var, x);
710 /* A subroutine of expand_one_var. VAR is a variable that will be
711 allocated to the local stack frame. Return true if we wish to
712 add VAR to STACK_VARS so that it will be coalesced with other
713 variables. Return false to allocate VAR immediately.
715 This function is used to reduce the number of variables considered
716 for coalescing, which reduces the size of the quadratic problem. */
718 static bool
719 defer_stack_allocation (tree var, bool toplevel)
721 /* If stack protection is enabled, *all* stack variables must be deferred,
722 so that we can re-order the strings to the top of the frame. */
723 if (flag_stack_protect)
724 return true;
726 /* Variables in the outermost scope automatically conflict with
727 every other variable. The only reason to want to defer them
728 at all is that, after sorting, we can more efficiently pack
729 small variables in the stack frame. Continue to defer at -O2. */
730 if (toplevel && optimize < 2)
731 return false;
733 /* Without optimization, *most* variables are allocated from the
734 stack, which makes the quadratic problem large exactly when we
735 want compilation to proceed as quickly as possible. On the
736 other hand, we don't want the function's stack frame size to
737 get completely out of hand. So we avoid adding scalars and
738 "small" aggregates to the list at all. */
739 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
740 return false;
742 return true;
745 /* A subroutine of expand_used_vars. Expand one variable according to
746 its flavor. Variables to be placed on the stack are not actually
747 expanded yet, merely recorded.
748 When REALLY_EXPAND is false, only add stack values to be allocated.
749 Return stack usage this variable is supposed to take.
752 static HOST_WIDE_INT
753 expand_one_var (tree var, bool toplevel, bool really_expand)
755 if (TREE_CODE (var) != VAR_DECL)
757 if (really_expand)
758 lang_hooks.expand_decl (var);
760 else if (DECL_EXTERNAL (var))
762 else if (DECL_HAS_VALUE_EXPR_P (var))
764 else if (TREE_STATIC (var))
766 if (really_expand)
767 expand_one_static_var (var);
769 else if (DECL_RTL_SET_P (var))
771 else if (TREE_TYPE (var) == error_mark_node)
773 if (really_expand)
774 expand_one_error_var (var);
776 else if (DECL_HARD_REGISTER (var))
778 if (really_expand)
779 expand_one_hard_reg_var (var);
781 else if (use_register_for_decl (var))
783 if (really_expand)
784 expand_one_register_var (var);
786 else if (defer_stack_allocation (var, toplevel))
787 add_stack_var (var);
788 else
790 if (really_expand)
791 expand_one_stack_var (var);
792 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
794 return 0;
797 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
798 expanding variables. Those variables that can be put into registers
799 are allocated pseudos; those that can't are put on the stack.
801 TOPLEVEL is true if this is the outermost BLOCK. */
803 static void
804 expand_used_vars_for_block (tree block, bool toplevel)
806 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
807 tree t;
809 old_sv_num = toplevel ? 0 : stack_vars_num;
811 /* Expand all variables at this level. */
812 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
813 if (TREE_USED (t)
814 /* Force local static variables to be output when marked by
815 used attribute. For unit-at-a-time, cgraph code already takes
816 care of this. */
817 || (!flag_unit_at_a_time && TREE_STATIC (t)
818 && DECL_PRESERVE_P (t)))
819 expand_one_var (t, toplevel, true);
821 this_sv_num = stack_vars_num;
823 /* Expand all variables at containing levels. */
824 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
825 expand_used_vars_for_block (t, false);
827 /* Since we do not track exact variable lifetimes (which is not even
828 possible for variables whose address escapes), we mirror the block
829 tree in the interference graph. Here we cause all variables at this
830 level, and all sublevels, to conflict. Do make certain that a
831 variable conflicts with itself. */
832 if (old_sv_num < this_sv_num)
834 new_sv_num = stack_vars_num;
835 resize_stack_vars_conflict (new_sv_num);
837 for (i = old_sv_num; i < new_sv_num; ++i)
838 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
839 add_stack_var_conflict (i, j);
843 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
844 and clear TREE_USED on all local variables. */
846 static void
847 clear_tree_used (tree block)
849 tree t;
851 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
852 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
853 TREE_USED (t) = 0;
855 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
856 clear_tree_used (t);
859 /* Examine TYPE and determine a bit mask of the following features. */
861 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
862 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
863 #define SPCT_HAS_ARRAY 4
864 #define SPCT_HAS_AGGREGATE 8
866 static unsigned int
867 stack_protect_classify_type (tree type)
869 unsigned int ret = 0;
870 tree t;
872 switch (TREE_CODE (type))
874 case ARRAY_TYPE:
875 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
876 if (t == char_type_node
877 || t == signed_char_type_node
878 || t == unsigned_char_type_node)
880 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
881 unsigned HOST_WIDE_INT len;
883 if (!TYPE_SIZE_UNIT (type)
884 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
885 len = max;
886 else
887 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
889 if (len < max)
890 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
891 else
892 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
894 else
895 ret = SPCT_HAS_ARRAY;
896 break;
898 case UNION_TYPE:
899 case QUAL_UNION_TYPE:
900 case RECORD_TYPE:
901 ret = SPCT_HAS_AGGREGATE;
902 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
903 if (TREE_CODE (t) == FIELD_DECL)
904 ret |= stack_protect_classify_type (TREE_TYPE (t));
905 break;
907 default:
908 break;
911 return ret;
914 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
915 part of the local stack frame. Remember if we ever return nonzero for
916 any variable in this function. The return value is the phase number in
917 which the variable should be allocated. */
919 static int
920 stack_protect_decl_phase (tree decl)
922 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
923 int ret = 0;
925 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
926 has_short_buffer = true;
928 if (flag_stack_protect == 2)
930 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
931 && !(bits & SPCT_HAS_AGGREGATE))
932 ret = 1;
933 else if (bits & SPCT_HAS_ARRAY)
934 ret = 2;
936 else
937 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
939 if (ret)
940 has_protected_decls = true;
942 return ret;
945 /* Two helper routines that check for phase 1 and phase 2. These are used
946 as callbacks for expand_stack_vars. */
948 static bool
949 stack_protect_decl_phase_1 (tree decl)
951 return stack_protect_decl_phase (decl) == 1;
954 static bool
955 stack_protect_decl_phase_2 (tree decl)
957 return stack_protect_decl_phase (decl) == 2;
960 /* Ensure that variables in different stack protection phases conflict
961 so that they are not merged and share the same stack slot. */
963 static void
964 add_stack_protection_conflicts (void)
966 size_t i, j, n = stack_vars_num;
967 unsigned char *phase;
969 phase = XNEWVEC (unsigned char, n);
970 for (i = 0; i < n; ++i)
971 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
973 for (i = 0; i < n; ++i)
975 unsigned char ph_i = phase[i];
976 for (j = 0; j < i; ++j)
977 if (ph_i != phase[j])
978 add_stack_var_conflict (i, j);
981 XDELETEVEC (phase);
984 /* Create a decl for the guard at the top of the stack frame. */
986 static void
987 create_stack_guard (void)
989 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
990 TREE_THIS_VOLATILE (guard) = 1;
991 TREE_USED (guard) = 1;
992 expand_one_stack_var (guard);
993 cfun->stack_protect_guard = guard;
996 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
997 expanding variables. Those variables that can be put into registers
998 are allocated pseudos; those that can't are put on the stack.
1000 TOPLEVEL is true if this is the outermost BLOCK. */
1002 static HOST_WIDE_INT
1003 account_used_vars_for_block (tree block, bool toplevel)
1005 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1006 tree t;
1007 HOST_WIDE_INT size = 0;
1009 old_sv_num = toplevel ? 0 : stack_vars_num;
1011 /* Expand all variables at this level. */
1012 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1013 if (TREE_USED (t))
1014 size += expand_one_var (t, toplevel, false);
1016 this_sv_num = stack_vars_num;
1018 /* Expand all variables at containing levels. */
1019 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1020 size += account_used_vars_for_block (t, false);
1022 /* Since we do not track exact variable lifetimes (which is not even
1023 possible for variables whose address escapes), we mirror the block
1024 tree in the interference graph. Here we cause all variables at this
1025 level, and all sublevels, to conflict. Do make certain that a
1026 variable conflicts with itself. */
1027 if (old_sv_num < this_sv_num)
1029 new_sv_num = stack_vars_num;
1030 resize_stack_vars_conflict (new_sv_num);
1032 for (i = old_sv_num; i < new_sv_num; ++i)
1033 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1034 add_stack_var_conflict (i, j);
1036 return size;
1039 /* Prepare for expanding variables. */
1040 static void
1041 init_vars_expansion (void)
1043 tree t;
1044 /* Set TREE_USED on all variables in the unexpanded_var_list. */
1045 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1046 TREE_USED (TREE_VALUE (t)) = 1;
1048 /* Clear TREE_USED on all variables associated with a block scope. */
1049 clear_tree_used (DECL_INITIAL (current_function_decl));
1051 /* Initialize local stack smashing state. */
1052 has_protected_decls = false;
1053 has_short_buffer = false;
1056 /* Free up stack variable graph data. */
1057 static void
1058 fini_vars_expansion (void)
1060 XDELETEVEC (stack_vars);
1061 XDELETEVEC (stack_vars_sorted);
1062 XDELETEVEC (stack_vars_conflict);
1063 stack_vars = NULL;
1064 stack_vars_alloc = stack_vars_num = 0;
1065 stack_vars_conflict = NULL;
1066 stack_vars_conflict_alloc = 0;
1069 HOST_WIDE_INT
1070 estimated_stack_frame_size (void)
1072 HOST_WIDE_INT size = 0;
1073 tree t, outer_block = DECL_INITIAL (current_function_decl);
1075 init_vars_expansion ();
1077 /* At this point all variables on the unexpanded_var_list with TREE_USED
1078 set are not associated with any block scope. Lay them out. */
1079 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1081 tree var = TREE_VALUE (t);
1083 if (TREE_USED (var))
1084 size += expand_one_var (var, true, false);
1085 TREE_USED (var) = 1;
1087 size += account_used_vars_for_block (outer_block, true);
1088 if (stack_vars_num > 0)
1090 /* Due to the way alias sets work, no variables with non-conflicting
1091 alias sets may be assigned the same address. Add conflicts to
1092 reflect this. */
1093 add_alias_set_conflicts ();
1095 /* If stack protection is enabled, we don't share space between
1096 vulnerable data and non-vulnerable data. */
1097 if (flag_stack_protect)
1098 add_stack_protection_conflicts ();
1100 /* Now that we have collected all stack variables, and have computed a
1101 minimal interference graph, attempt to save some stack space. */
1102 partition_stack_vars ();
1103 if (dump_file)
1104 dump_stack_var_partition ();
1106 size += account_stack_vars ();
1107 fini_vars_expansion ();
1109 return size;
1112 /* Expand all variables used in the function. */
1114 static void
1115 expand_used_vars (void)
1117 tree t, outer_block = DECL_INITIAL (current_function_decl);
1119 /* Compute the phase of the stack frame for this function. */
1121 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1122 int off = STARTING_FRAME_OFFSET % align;
1123 frame_phase = off ? align - off : 0;
1126 init_vars_expansion ();
1128 /* At this point all variables on the unexpanded_var_list with TREE_USED
1129 set are not associated with any block scope. Lay them out. */
1130 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1132 tree var = TREE_VALUE (t);
1133 bool expand_now = false;
1135 /* We didn't set a block for static or extern because it's hard
1136 to tell the difference between a global variable (re)declared
1137 in a local scope, and one that's really declared there to
1138 begin with. And it doesn't really matter much, since we're
1139 not giving them stack space. Expand them now. */
1140 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1141 expand_now = true;
1143 /* Any variable that could have been hoisted into an SSA_NAME
1144 will have been propagated anywhere the optimizers chose,
1145 i.e. not confined to their original block. Allocate them
1146 as if they were defined in the outermost scope. */
1147 else if (is_gimple_reg (var))
1148 expand_now = true;
1150 /* If the variable is not associated with any block, then it
1151 was created by the optimizers, and could be live anywhere
1152 in the function. */
1153 else if (TREE_USED (var))
1154 expand_now = true;
1156 /* Finally, mark all variables on the list as used. We'll use
1157 this in a moment when we expand those associated with scopes. */
1158 TREE_USED (var) = 1;
1160 if (expand_now)
1161 expand_one_var (var, true, true);
1163 cfun->unexpanded_var_list = NULL_TREE;
1165 /* At this point, all variables within the block tree with TREE_USED
1166 set are actually used by the optimized function. Lay them out. */
1167 expand_used_vars_for_block (outer_block, true);
1169 if (stack_vars_num > 0)
1171 /* Due to the way alias sets work, no variables with non-conflicting
1172 alias sets may be assigned the same address. Add conflicts to
1173 reflect this. */
1174 add_alias_set_conflicts ();
1176 /* If stack protection is enabled, we don't share space between
1177 vulnerable data and non-vulnerable data. */
1178 if (flag_stack_protect)
1179 add_stack_protection_conflicts ();
1181 /* Now that we have collected all stack variables, and have computed a
1182 minimal interference graph, attempt to save some stack space. */
1183 partition_stack_vars ();
1184 if (dump_file)
1185 dump_stack_var_partition ();
1188 /* There are several conditions under which we should create a
1189 stack guard: protect-all, alloca used, protected decls present. */
1190 if (flag_stack_protect == 2
1191 || (flag_stack_protect
1192 && (current_function_calls_alloca || has_protected_decls)))
1193 create_stack_guard ();
1195 /* Assign rtl to each variable based on these partitions. */
1196 if (stack_vars_num > 0)
1198 /* Reorder decls to be protected by iterating over the variables
1199 array multiple times, and allocating out of each phase in turn. */
1200 /* ??? We could probably integrate this into the qsort we did
1201 earlier, such that we naturally see these variables first,
1202 and thus naturally allocate things in the right order. */
1203 if (has_protected_decls)
1205 /* Phase 1 contains only character arrays. */
1206 expand_stack_vars (stack_protect_decl_phase_1);
1208 /* Phase 2 contains other kinds of arrays. */
1209 if (flag_stack_protect == 2)
1210 expand_stack_vars (stack_protect_decl_phase_2);
1213 expand_stack_vars (NULL);
1215 fini_vars_expansion ();
1218 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1219 if (STACK_ALIGNMENT_NEEDED)
1221 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1222 if (!FRAME_GROWS_DOWNWARD)
1223 frame_offset += align - 1;
1224 frame_offset &= -align;
1229 /* If we need to produce a detailed dump, print the tree representation
1230 for STMT to the dump file. SINCE is the last RTX after which the RTL
1231 generated for STMT should have been appended. */
1233 static void
1234 maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1236 if (dump_file && (dump_flags & TDF_DETAILS))
1238 fprintf (dump_file, "\n;; ");
1239 print_generic_expr (dump_file, stmt, TDF_SLIM);
1240 fprintf (dump_file, "\n");
1242 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1246 /* Maps the blocks that do not contain tree labels to rtx labels. */
1248 static struct pointer_map_t *lab_rtx_for_bb;
1250 /* Returns the label_rtx expression for a label starting basic block BB. */
1252 static rtx
1253 label_rtx_for_bb (basic_block bb)
1255 tree_stmt_iterator tsi;
1256 tree lab, lab_stmt;
1257 void **elt;
1259 if (bb->flags & BB_RTL)
1260 return block_label (bb);
1262 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1263 if (elt)
1264 return (rtx) *elt;
1266 /* Find the tree label if it is present. */
1268 for (tsi = tsi_start (bb_stmt_list (bb)); !tsi_end_p (tsi); tsi_next (&tsi))
1270 lab_stmt = tsi_stmt (tsi);
1271 if (TREE_CODE (lab_stmt) != LABEL_EXPR)
1272 break;
1274 lab = LABEL_EXPR_LABEL (lab_stmt);
1275 if (DECL_NONLOCAL (lab))
1276 break;
1278 return label_rtx (lab);
1281 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1282 *elt = gen_label_rtx ();
1283 return (rtx) *elt;
1286 /* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
1287 Returns a new basic block if we've terminated the current basic
1288 block and created a new one. */
1290 static basic_block
1291 expand_gimple_cond_expr (basic_block bb, tree stmt)
1293 basic_block new_bb, dest;
1294 edge new_edge;
1295 edge true_edge;
1296 edge false_edge;
1297 tree pred = COND_EXPR_COND (stmt);
1298 rtx last2, last;
1300 gcc_assert (COND_EXPR_THEN (stmt) == NULL_TREE);
1301 gcc_assert (COND_EXPR_ELSE (stmt) == NULL_TREE);
1302 last2 = last = get_last_insn ();
1304 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1305 if (EXPR_LOCUS (stmt))
1307 set_curr_insn_source_location (*(EXPR_LOCUS (stmt)));
1308 set_curr_insn_block (TREE_BLOCK (stmt));
1311 /* These flags have no purpose in RTL land. */
1312 true_edge->flags &= ~EDGE_TRUE_VALUE;
1313 false_edge->flags &= ~EDGE_FALSE_VALUE;
1315 /* We can either have a pure conditional jump with one fallthru edge or
1316 two-way jump that needs to be decomposed into two basic blocks. */
1317 if (false_edge->dest == bb->next_bb)
1319 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1320 add_reg_br_prob_note (last, true_edge->probability);
1321 maybe_dump_rtl_for_tree_stmt (stmt, last);
1322 if (true_edge->goto_locus)
1323 set_curr_insn_source_location (*true_edge->goto_locus);
1324 false_edge->flags |= EDGE_FALLTHRU;
1325 return NULL;
1327 if (true_edge->dest == bb->next_bb)
1329 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
1330 add_reg_br_prob_note (last, false_edge->probability);
1331 maybe_dump_rtl_for_tree_stmt (stmt, last);
1332 if (false_edge->goto_locus)
1333 set_curr_insn_source_location (*false_edge->goto_locus);
1334 true_edge->flags |= EDGE_FALLTHRU;
1335 return NULL;
1338 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1339 add_reg_br_prob_note (last, true_edge->probability);
1340 last = get_last_insn ();
1341 emit_jump (label_rtx_for_bb (false_edge->dest));
1343 BB_END (bb) = last;
1344 if (BARRIER_P (BB_END (bb)))
1345 BB_END (bb) = PREV_INSN (BB_END (bb));
1346 update_bb_for_insn (bb);
1348 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1349 dest = false_edge->dest;
1350 redirect_edge_succ (false_edge, new_bb);
1351 false_edge->flags |= EDGE_FALLTHRU;
1352 new_bb->count = false_edge->count;
1353 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1354 new_edge = make_edge (new_bb, dest, 0);
1355 new_edge->probability = REG_BR_PROB_BASE;
1356 new_edge->count = new_bb->count;
1357 if (BARRIER_P (BB_END (new_bb)))
1358 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1359 update_bb_for_insn (new_bb);
1361 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1363 if (false_edge->goto_locus)
1364 set_curr_insn_source_location (*false_edge->goto_locus);
1366 return new_bb;
1369 /* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
1370 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1371 generated a tail call (something that might be denied by the ABI
1372 rules governing the call; see calls.c).
1374 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1375 can still reach the rest of BB. The case here is __builtin_sqrt,
1376 where the NaN result goes through the external function (with a
1377 tailcall) and the normal result happens via a sqrt instruction. */
1379 static basic_block
1380 expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
1382 rtx last2, last;
1383 edge e;
1384 edge_iterator ei;
1385 int probability;
1386 gcov_type count;
1388 last2 = last = get_last_insn ();
1390 expand_expr_stmt (stmt);
1392 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1393 if (CALL_P (last) && SIBLING_CALL_P (last))
1394 goto found;
1396 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1398 *can_fallthru = true;
1399 return NULL;
1401 found:
1402 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1403 Any instructions emitted here are about to be deleted. */
1404 do_pending_stack_adjust ();
1406 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1407 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1408 EH or abnormal edges, we shouldn't have created a tail call in
1409 the first place. So it seems to me we should just be removing
1410 all edges here, or redirecting the existing fallthru edge to
1411 the exit block. */
1413 probability = 0;
1414 count = 0;
1416 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1418 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1420 if (e->dest != EXIT_BLOCK_PTR)
1422 e->dest->count -= e->count;
1423 e->dest->frequency -= EDGE_FREQUENCY (e);
1424 if (e->dest->count < 0)
1425 e->dest->count = 0;
1426 if (e->dest->frequency < 0)
1427 e->dest->frequency = 0;
1429 count += e->count;
1430 probability += e->probability;
1431 remove_edge (e);
1433 else
1434 ei_next (&ei);
1437 /* This is somewhat ugly: the call_expr expander often emits instructions
1438 after the sibcall (to perform the function return). These confuse the
1439 find_many_sub_basic_blocks code, so we need to get rid of these. */
1440 last = NEXT_INSN (last);
1441 gcc_assert (BARRIER_P (last));
1443 *can_fallthru = false;
1444 while (NEXT_INSN (last))
1446 /* For instance an sqrt builtin expander expands if with
1447 sibcall in the then and label for `else`. */
1448 if (LABEL_P (NEXT_INSN (last)))
1450 *can_fallthru = true;
1451 break;
1453 delete_insn (NEXT_INSN (last));
1456 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1457 e->probability += probability;
1458 e->count += count;
1459 BB_END (bb) = last;
1460 update_bb_for_insn (bb);
1462 if (NEXT_INSN (last))
1464 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1466 last = BB_END (bb);
1467 if (BARRIER_P (last))
1468 BB_END (bb) = PREV_INSN (last);
1471 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1473 return bb;
1476 /* Expand basic block BB from GIMPLE trees to RTL. */
1478 static basic_block
1479 expand_gimple_basic_block (basic_block bb)
1481 tree_stmt_iterator tsi;
1482 tree stmts = bb_stmt_list (bb);
1483 tree stmt = NULL;
1484 rtx note, last;
1485 edge e;
1486 edge_iterator ei;
1487 void **elt;
1489 if (dump_file)
1491 fprintf (dump_file,
1492 "\n;; Generating RTL for tree basic block %d\n",
1493 bb->index);
1496 bb->il.tree = NULL;
1497 init_rtl_bb_info (bb);
1498 bb->flags |= BB_RTL;
1500 /* Remove the RETURN_EXPR if we may fall though to the exit
1501 instead. */
1502 tsi = tsi_last (stmts);
1503 if (!tsi_end_p (tsi)
1504 && TREE_CODE (tsi_stmt (tsi)) == RETURN_EXPR)
1506 tree ret_stmt = tsi_stmt (tsi);
1508 gcc_assert (single_succ_p (bb));
1509 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
1511 if (bb->next_bb == EXIT_BLOCK_PTR
1512 && !TREE_OPERAND (ret_stmt, 0))
1514 tsi_delink (&tsi);
1515 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
1519 tsi = tsi_start (stmts);
1520 if (!tsi_end_p (tsi))
1522 stmt = tsi_stmt (tsi);
1523 if (TREE_CODE (stmt) != LABEL_EXPR)
1524 stmt = NULL_TREE;
1527 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1529 if (stmt || elt)
1531 last = get_last_insn ();
1533 if (stmt)
1535 expand_expr_stmt (stmt);
1536 tsi_next (&tsi);
1539 if (elt)
1540 emit_label ((rtx) *elt);
1542 /* Java emits line number notes in the top of labels.
1543 ??? Make this go away once line number notes are obsoleted. */
1544 BB_HEAD (bb) = NEXT_INSN (last);
1545 if (NOTE_P (BB_HEAD (bb)))
1546 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1547 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1549 maybe_dump_rtl_for_tree_stmt (stmt, last);
1551 else
1552 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1554 NOTE_BASIC_BLOCK (note) = bb;
1556 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1558 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1559 e->flags &= ~EDGE_EXECUTABLE;
1561 /* At the moment not all abnormal edges match the RTL representation.
1562 It is safe to remove them here as find_many_sub_basic_blocks will
1563 rediscover them. In the future we should get this fixed properly. */
1564 if (e->flags & EDGE_ABNORMAL)
1565 remove_edge (e);
1566 else
1567 ei_next (&ei);
1570 for (; !tsi_end_p (tsi); tsi_next (&tsi))
1572 tree stmt = tsi_stmt (tsi);
1573 basic_block new_bb;
1575 if (!stmt)
1576 continue;
1578 /* Expand this statement, then evaluate the resulting RTL and
1579 fixup the CFG accordingly. */
1580 if (TREE_CODE (stmt) == COND_EXPR)
1582 new_bb = expand_gimple_cond_expr (bb, stmt);
1583 if (new_bb)
1584 return new_bb;
1586 else
1588 tree call = get_call_expr_in (stmt);
1589 int region;
1590 /* For the benefit of calls.c, converting all this to rtl,
1591 we need to record the call expression, not just the outer
1592 modify statement. */
1593 if (call && call != stmt)
1595 if ((region = lookup_stmt_eh_region (stmt)) > 0)
1596 add_stmt_to_eh_region (call, region);
1597 gimple_duplicate_stmt_histograms (cfun, call, cfun, stmt);
1599 if (call && CALL_EXPR_TAILCALL (call))
1601 bool can_fallthru;
1602 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1603 if (new_bb)
1605 if (can_fallthru)
1606 bb = new_bb;
1607 else
1608 return new_bb;
1611 else
1613 last = get_last_insn ();
1614 expand_expr_stmt (stmt);
1615 maybe_dump_rtl_for_tree_stmt (stmt, last);
1620 /* Expand implicit goto. */
1621 FOR_EACH_EDGE (e, ei, bb->succs)
1623 if (e->flags & EDGE_FALLTHRU)
1624 break;
1627 if (e && e->dest != bb->next_bb)
1629 emit_jump (label_rtx_for_bb (e->dest));
1630 if (e->goto_locus)
1631 set_curr_insn_source_location (*e->goto_locus);
1632 e->flags &= ~EDGE_FALLTHRU;
1635 do_pending_stack_adjust ();
1637 /* Find the block tail. The last insn in the block is the insn
1638 before a barrier and/or table jump insn. */
1639 last = get_last_insn ();
1640 if (BARRIER_P (last))
1641 last = PREV_INSN (last);
1642 if (JUMP_TABLE_DATA_P (last))
1643 last = PREV_INSN (PREV_INSN (last));
1644 BB_END (bb) = last;
1646 update_bb_for_insn (bb);
1648 return bb;
1652 /* Create a basic block for initialization code. */
1654 static basic_block
1655 construct_init_block (void)
1657 basic_block init_block, first_block;
1658 edge e = NULL;
1659 int flags;
1661 /* Multiple entry points not supported yet. */
1662 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
1663 init_rtl_bb_info (ENTRY_BLOCK_PTR);
1664 init_rtl_bb_info (EXIT_BLOCK_PTR);
1665 ENTRY_BLOCK_PTR->flags |= BB_RTL;
1666 EXIT_BLOCK_PTR->flags |= BB_RTL;
1668 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
1670 /* When entry edge points to first basic block, we don't need jump,
1671 otherwise we have to jump into proper target. */
1672 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1674 tree label = tree_block_label (e->dest);
1676 emit_jump (label_rtx (label));
1677 flags = 0;
1679 else
1680 flags = EDGE_FALLTHRU;
1682 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1683 get_last_insn (),
1684 ENTRY_BLOCK_PTR);
1685 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1686 init_block->count = ENTRY_BLOCK_PTR->count;
1687 if (e)
1689 first_block = e->dest;
1690 redirect_edge_succ (e, init_block);
1691 e = make_edge (init_block, first_block, flags);
1693 else
1694 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1695 e->probability = REG_BR_PROB_BASE;
1696 e->count = ENTRY_BLOCK_PTR->count;
1698 update_bb_for_insn (init_block);
1699 return init_block;
1702 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
1703 found in the block tree. */
1705 static void
1706 set_block_levels (tree block, int level)
1708 while (block)
1710 BLOCK_NUMBER (block) = level;
1711 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
1712 block = BLOCK_CHAIN (block);
1716 /* Create a block containing landing pads and similar stuff. */
1718 static void
1719 construct_exit_block (void)
1721 rtx head = get_last_insn ();
1722 rtx end;
1723 basic_block exit_block;
1724 edge e, e2;
1725 unsigned ix;
1726 edge_iterator ei;
1727 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
1729 /* Make sure the locus is set to the end of the function, so that
1730 epilogue line numbers and warnings are set properly. */
1731 #ifdef USE_MAPPED_LOCATION
1732 if (cfun->function_end_locus != UNKNOWN_LOCATION)
1733 #else
1734 if (cfun->function_end_locus.file)
1735 #endif
1736 input_location = cfun->function_end_locus;
1738 /* The following insns belong to the top scope. */
1739 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1741 /* Generate rtl for function exit. */
1742 expand_function_end ();
1744 end = get_last_insn ();
1745 if (head == end)
1746 return;
1747 /* While emitting the function end we could move end of the last basic block.
1749 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
1750 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
1751 head = NEXT_INSN (head);
1752 exit_block = create_basic_block (NEXT_INSN (head), end,
1753 EXIT_BLOCK_PTR->prev_bb);
1754 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1755 exit_block->count = EXIT_BLOCK_PTR->count;
1757 ix = 0;
1758 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
1760 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
1761 if (!(e->flags & EDGE_ABNORMAL))
1762 redirect_edge_succ (e, exit_block);
1763 else
1764 ix++;
1767 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1768 e->probability = REG_BR_PROB_BASE;
1769 e->count = EXIT_BLOCK_PTR->count;
1770 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
1771 if (e2 != e)
1773 e->count -= e2->count;
1774 exit_block->count -= e2->count;
1775 exit_block->frequency -= EDGE_FREQUENCY (e2);
1777 if (e->count < 0)
1778 e->count = 0;
1779 if (exit_block->count < 0)
1780 exit_block->count = 0;
1781 if (exit_block->frequency < 0)
1782 exit_block->frequency = 0;
1783 update_bb_for_insn (exit_block);
1786 /* Helper function for discover_nonconstant_array_refs.
1787 Look for ARRAY_REF nodes with non-constant indexes and mark them
1788 addressable. */
1790 static tree
1791 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1792 void *data ATTRIBUTE_UNUSED)
1794 tree t = *tp;
1796 if (IS_TYPE_OR_DECL_P (t))
1797 *walk_subtrees = 0;
1798 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1800 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1801 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1802 && (!TREE_OPERAND (t, 2)
1803 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1804 || (TREE_CODE (t) == COMPONENT_REF
1805 && (!TREE_OPERAND (t,2)
1806 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1807 || TREE_CODE (t) == BIT_FIELD_REF
1808 || TREE_CODE (t) == REALPART_EXPR
1809 || TREE_CODE (t) == IMAGPART_EXPR
1810 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1811 || TREE_CODE (t) == NOP_EXPR
1812 || TREE_CODE (t) == CONVERT_EXPR)
1813 t = TREE_OPERAND (t, 0);
1815 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1817 t = get_base_address (t);
1818 if (t && DECL_P (t))
1819 TREE_ADDRESSABLE (t) = 1;
1822 *walk_subtrees = 0;
1825 return NULL_TREE;
1828 /* RTL expansion is not able to compile array references with variable
1829 offsets for arrays stored in single register. Discover such
1830 expressions and mark variables as addressable to avoid this
1831 scenario. */
1833 static void
1834 discover_nonconstant_array_refs (void)
1836 basic_block bb;
1837 block_stmt_iterator bsi;
1839 FOR_EACH_BB (bb)
1841 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1842 walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1843 NULL , NULL);
1847 /* Translate the intermediate representation contained in the CFG
1848 from GIMPLE trees to RTL.
1850 We do conversion per basic block and preserve/update the tree CFG.
1851 This implies we have to do some magic as the CFG can simultaneously
1852 consist of basic blocks containing RTL and GIMPLE trees. This can
1853 confuse the CFG hooks, so be careful to not manipulate CFG during
1854 the expansion. */
1856 static unsigned int
1857 tree_expand_cfg (void)
1859 basic_block bb, init_block;
1860 sbitmap blocks;
1861 edge_iterator ei;
1862 edge e;
1864 /* Some backends want to know that we are expanding to RTL. */
1865 currently_expanding_to_rtl = 1;
1867 insn_locators_alloc ();
1868 if (!DECL_BUILT_IN (current_function_decl))
1869 set_curr_insn_source_location (DECL_SOURCE_LOCATION (current_function_decl));
1870 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1871 prologue_locator = curr_insn_locator ();
1873 /* Make sure first insn is a note even if we don't want linenums.
1874 This makes sure the first insn will never be deleted.
1875 Also, final expects a note to appear there. */
1876 emit_note (NOTE_INSN_DELETED);
1878 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
1879 discover_nonconstant_array_refs ();
1881 /* Expand the variables recorded during gimple lowering. */
1882 expand_used_vars ();
1884 /* Honor stack protection warnings. */
1885 if (warn_stack_protect)
1887 if (current_function_calls_alloca)
1888 warning (0, "not protecting local variables: variable length buffer");
1889 if (has_short_buffer && !cfun->stack_protect_guard)
1890 warning (0, "not protecting function: no buffer at least %d bytes long",
1891 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1894 /* Set up parameters and prepare for return, for the function. */
1895 expand_function_start (current_function_decl);
1897 /* If this function is `main', emit a call to `__main'
1898 to run global initializers, etc. */
1899 if (DECL_NAME (current_function_decl)
1900 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1901 && DECL_FILE_SCOPE_P (current_function_decl))
1902 expand_main_function ();
1904 /* Initialize the stack_protect_guard field. This must happen after the
1905 call to __main (if any) so that the external decl is initialized. */
1906 if (cfun->stack_protect_guard)
1907 stack_protect_prologue ();
1909 /* Register rtl specific functions for cfg. */
1910 rtl_register_cfg_hooks ();
1912 init_block = construct_init_block ();
1914 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
1915 remaining edges in expand_gimple_basic_block. */
1916 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1917 e->flags &= ~EDGE_EXECUTABLE;
1919 lab_rtx_for_bb = pointer_map_create ();
1920 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
1921 bb = expand_gimple_basic_block (bb);
1922 pointer_map_destroy (lab_rtx_for_bb);
1924 construct_exit_block ();
1925 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1926 insn_locators_finalize ();
1928 /* We're done expanding trees to RTL. */
1929 currently_expanding_to_rtl = 0;
1931 /* Convert tree EH labels to RTL EH labels, and clean out any unreachable
1932 EH regions. */
1933 convert_from_eh_region_ranges ();
1935 rebuild_jump_labels (get_insns ());
1936 find_exception_handler_labels ();
1938 blocks = sbitmap_alloc (last_basic_block);
1939 sbitmap_ones (blocks);
1940 find_many_sub_basic_blocks (blocks);
1941 purge_all_dead_edges ();
1942 sbitmap_free (blocks);
1944 compact_blocks ();
1945 #ifdef ENABLE_CHECKING
1946 verify_flow_info ();
1947 #endif
1949 /* There's no need to defer outputting this function any more; we
1950 know we want to output it. */
1951 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1953 /* Now that we're done expanding trees to RTL, we shouldn't have any
1954 more CONCATs anywhere. */
1955 generating_concat_p = 0;
1957 if (dump_file)
1959 fprintf (dump_file,
1960 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1961 /* And the pass manager will dump RTL for us. */
1964 /* If we're emitting a nested function, make sure its parent gets
1965 emitted as well. Doing otherwise confuses debug info. */
1967 tree parent;
1968 for (parent = DECL_CONTEXT (current_function_decl);
1969 parent != NULL_TREE;
1970 parent = get_containing_scope (parent))
1971 if (TREE_CODE (parent) == FUNCTION_DECL)
1972 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1975 /* We are now committed to emitting code for this function. Do any
1976 preparation, such as emitting abstract debug info for the inline
1977 before it gets mangled by optimization. */
1978 if (cgraph_function_possibly_inlined_p (current_function_decl))
1979 (*debug_hooks->outlining_inline_function) (current_function_decl);
1981 TREE_ASM_WRITTEN (current_function_decl) = 1;
1983 /* After expanding, the return labels are no longer needed. */
1984 return_label = NULL;
1985 naked_return_label = NULL;
1986 free_histograms ();
1987 /* Tag the blocks with a depth number so that change_scope can find
1988 the common parent easily. */
1989 set_block_levels (DECL_INITIAL (cfun->decl), 0);
1990 return 0;
1993 struct tree_opt_pass pass_expand =
1995 "expand", /* name */
1996 NULL, /* gate */
1997 tree_expand_cfg, /* execute */
1998 NULL, /* sub */
1999 NULL, /* next */
2000 0, /* static_pass_number */
2001 TV_EXPAND, /* tv_id */
2002 /* ??? If TER is enabled, we actually receive GENERIC. */
2003 PROP_gimple_leh | PROP_cfg, /* properties_required */
2004 PROP_rtl, /* properties_provided */
2005 PROP_trees, /* properties_destroyed */
2006 0, /* todo_flags_start */
2007 TODO_dump_func, /* todo_flags_finish */
2008 'r' /* letter */