* tree-ssa-pre.c (grand_bitmap_obstack): New.
[official-gcc.git] / gcc / cfgexpand.c
blobb73445d0b184adc744cad9fe6a2b9ba8a05c34ad
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "toplev.h"
42 #ifndef LOCAL_ALIGNMENT
43 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
44 #endif
46 #ifndef STACK_ALIGNMENT_NEEDED
47 #define STACK_ALIGNMENT_NEEDED 1
48 #endif
50 #ifdef FRAME_GROWS_DOWNWARD
51 # undef FRAME_GROWS_DOWNWARD
52 # define FRAME_GROWS_DOWNWARD 1
53 #else
54 # define FRAME_GROWS_DOWNWARD 0
55 #endif
58 /* This structure holds data relevant to one variable that will be
59 placed in a stack slot. */
60 struct stack_var
62 /* The Variable. */
63 tree decl;
65 /* The offset of the variable. During partitioning, this is the
66 offset relative to the partition. After partitioning, this
67 is relative to the stack frame. */
68 HOST_WIDE_INT offset;
70 /* Initially, the size of the variable. Later, the size of the partition,
71 if this variable becomes it's partition's representative. */
72 HOST_WIDE_INT size;
74 /* The *byte* alignment required for this variable. Or as, with the
75 size, the alignment for this partition. */
76 unsigned int alignb;
78 /* The partition representative. */
79 size_t representative;
81 /* The next stack variable in the partition, or EOC. */
82 size_t next;
85 #define EOC ((size_t)-1)
87 /* We have an array of such objects while deciding allocation. */
88 static struct stack_var *stack_vars;
89 static size_t stack_vars_alloc;
90 static size_t stack_vars_num;
92 /* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
93 is non-decreasing. */
94 static size_t *stack_vars_sorted;
96 /* We have an interference graph between such objects. This graph
97 is lower triangular. */
98 static bool *stack_vars_conflict;
99 static size_t stack_vars_conflict_alloc;
101 /* The phase of the stack frame. This is the known misalignment of
102 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
103 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
104 static int frame_phase;
107 /* Discover the byte alignment to use for DECL. Ignore alignment
108 we can't do with expected alignment of the stack boundary. */
110 static unsigned int
111 get_decl_align_unit (tree decl)
113 unsigned int align;
115 align = DECL_ALIGN (decl);
116 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
117 if (align > PREFERRED_STACK_BOUNDARY)
118 align = PREFERRED_STACK_BOUNDARY;
119 if (cfun->stack_alignment_needed < align)
120 cfun->stack_alignment_needed = align;
122 return align / BITS_PER_UNIT;
125 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
126 Return the frame offset. */
128 static HOST_WIDE_INT
129 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
131 HOST_WIDE_INT offset, new_frame_offset;
133 new_frame_offset = frame_offset;
134 if (FRAME_GROWS_DOWNWARD)
136 new_frame_offset -= size + frame_phase;
137 new_frame_offset &= -align;
138 new_frame_offset += frame_phase;
139 offset = new_frame_offset;
141 else
143 new_frame_offset -= frame_phase;
144 new_frame_offset += align - 1;
145 new_frame_offset &= -align;
146 new_frame_offset += frame_phase;
147 offset = new_frame_offset;
148 new_frame_offset += size;
150 frame_offset = new_frame_offset;
152 return offset;
155 /* Accumulate DECL into STACK_VARS. */
157 static void
158 add_stack_var (tree decl)
160 if (stack_vars_num >= stack_vars_alloc)
162 if (stack_vars_alloc)
163 stack_vars_alloc = stack_vars_alloc * 3 / 2;
164 else
165 stack_vars_alloc = 32;
166 stack_vars
167 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
169 stack_vars[stack_vars_num].decl = decl;
170 stack_vars[stack_vars_num].offset = 0;
171 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
172 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
174 /* All variables are initially in their own partition. */
175 stack_vars[stack_vars_num].representative = stack_vars_num;
176 stack_vars[stack_vars_num].next = EOC;
178 /* Ensure that this decl doesn't get put onto the list twice. */
179 SET_DECL_RTL (decl, pc_rtx);
181 stack_vars_num++;
184 /* Compute the linear index of a lower-triangular coordinate (I, J). */
186 static size_t
187 triangular_index (size_t i, size_t j)
189 if (i < j)
191 size_t t;
192 t = i, i = j, j = t;
194 return (i * (i + 1)) / 2 + j;
197 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
199 static void
200 resize_stack_vars_conflict (size_t n)
202 size_t size = triangular_index (n-1, n-1) + 1;
204 if (size <= stack_vars_conflict_alloc)
205 return;
207 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
208 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
209 (size - stack_vars_conflict_alloc) * sizeof (bool));
210 stack_vars_conflict_alloc = size;
213 /* Make the decls associated with luid's X and Y conflict. */
215 static void
216 add_stack_var_conflict (size_t x, size_t y)
218 size_t index = triangular_index (x, y);
219 gcc_assert (index < stack_vars_conflict_alloc);
220 stack_vars_conflict[index] = true;
223 /* Check whether the decls associated with luid's X and Y conflict. */
225 static bool
226 stack_var_conflict_p (size_t x, size_t y)
228 size_t index = triangular_index (x, y);
229 gcc_assert (index < stack_vars_conflict_alloc);
230 return stack_vars_conflict[index];
233 /* A subroutine of expand_used_vars. If two variables X and Y have alias
234 sets that do not conflict, then do add a conflict for these variables
235 in the interference graph. We also have to mind MEM_IN_STRUCT_P and
236 MEM_SCALAR_P. */
238 static void
239 add_alias_set_conflicts (void)
241 size_t i, j, n = stack_vars_num;
243 for (i = 0; i < n; ++i)
245 bool aggr_i = AGGREGATE_TYPE_P (TREE_TYPE (stack_vars[i].decl));
246 HOST_WIDE_INT set_i = get_alias_set (stack_vars[i].decl);
248 for (j = 0; j < i; ++j)
250 bool aggr_j = AGGREGATE_TYPE_P (TREE_TYPE (stack_vars[j].decl));
251 HOST_WIDE_INT set_j = get_alias_set (stack_vars[j].decl);
252 if (aggr_i != aggr_j || !alias_sets_conflict_p (set_i, set_j))
253 add_stack_var_conflict (i, j);
258 /* A subroutine of partition_stack_vars. A comparison function for qsort,
259 sorting an array of indicies by the size of the object. */
261 static int
262 stack_var_size_cmp (const void *a, const void *b)
264 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
265 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
267 if (sa < sb)
268 return -1;
269 if (sa > sb)
270 return 1;
271 return 0;
274 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
275 partitioning algorithm. Partitions A and B are known to be non-conflicting.
276 Merge them into a single partition A.
278 At the same time, add OFFSET to all variables in partition B. At the end
279 of the partitioning process we've have a nice block easy to lay out within
280 the stack frame. */
282 static void
283 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
285 size_t i, last;
287 /* Update each element of partition B with the given offset,
288 and merge them into partition A. */
289 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
291 stack_vars[i].offset += offset;
292 stack_vars[i].representative = a;
294 stack_vars[last].next = stack_vars[a].next;
295 stack_vars[a].next = b;
297 /* Update the required alignment of partition A to account for B. */
298 if (stack_vars[a].alignb < stack_vars[b].alignb)
299 stack_vars[a].alignb = stack_vars[b].alignb;
301 /* Update the interference graph and merge the conflicts. */
302 for (last = stack_vars_num, i = 0; i < last; ++i)
303 if (stack_var_conflict_p (b, i))
304 add_stack_var_conflict (a, i);
307 /* A subroutine of expand_used_vars. Binpack the variables into
308 partitions constrained by the interference graph. The overall
309 algorithm used is as follows:
311 Sort the objects by size.
312 For each object A {
313 S = size(A)
314 O = 0
315 loop {
316 Look for the largest non-conflicting object B with size <= S.
317 UNION (A, B)
318 offset(B) = O
319 O += size(B)
320 S -= size(B)
325 static void
326 partition_stack_vars (void)
328 size_t si, sj, n = stack_vars_num;
330 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
331 for (si = 0; si < n; ++si)
332 stack_vars_sorted[si] = si;
334 if (n == 1)
335 return;
337 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
339 /* Special case: detect when all variables conflict, and thus we can't
340 do anything during the partitioning loop. It isn't uncommon (with
341 C code at least) to declare all variables at the top of the function,
342 and if we're not inlining, then all variables will be in the same scope.
343 Take advantage of very fast libc routines for this scan. */
344 gcc_assert (sizeof(bool) == sizeof(char));
345 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
346 return;
348 for (si = 0; si < n; ++si)
350 size_t i = stack_vars_sorted[si];
351 HOST_WIDE_INT isize = stack_vars[i].size;
352 HOST_WIDE_INT offset = 0;
354 for (sj = si; sj-- > 0; )
356 size_t j = stack_vars_sorted[sj];
357 HOST_WIDE_INT jsize = stack_vars[j].size;
358 unsigned int jalign = stack_vars[j].alignb;
360 /* Ignore objects that aren't partition representatives. */
361 if (stack_vars[j].representative != j)
362 continue;
364 /* Ignore objects too large for the remaining space. */
365 if (isize < jsize)
366 continue;
368 /* Ignore conflicting objects. */
369 if (stack_var_conflict_p (i, j))
370 continue;
372 /* Refine the remaining space check to include alignment. */
373 if (offset & (jalign - 1))
375 HOST_WIDE_INT toff = offset;
376 toff += jalign - 1;
377 toff &= -(HOST_WIDE_INT)jalign;
378 if (isize - (toff - offset) < jsize)
379 continue;
381 isize -= toff - offset;
382 offset = toff;
385 /* UNION the objects, placing J at OFFSET. */
386 union_stack_vars (i, j, offset);
388 isize -= jsize;
389 if (isize == 0)
390 break;
395 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
397 static void
398 dump_stack_var_partition (void)
400 size_t si, i, j, n = stack_vars_num;
402 for (si = 0; si < n; ++si)
404 i = stack_vars_sorted[si];
406 /* Skip variables that aren't partition representatives, for now. */
407 if (stack_vars[i].representative != i)
408 continue;
410 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
411 " align %u\n", (unsigned long) i, stack_vars[i].size,
412 stack_vars[i].alignb);
414 for (j = i; j != EOC; j = stack_vars[j].next)
416 fputc ('\t', dump_file);
417 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
418 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
419 stack_vars[i].offset);
424 /* Assign rtl to DECL at frame offset OFFSET. */
426 static void
427 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
429 HOST_WIDE_INT align;
430 rtx x;
432 /* If this fails, we've overflowed the stack frame. Error nicely? */
433 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
435 x = plus_constant (virtual_stack_vars_rtx, offset);
436 x = gen_rtx_MEM (DECL_MODE (decl), x);
438 /* Set alignment we actually gave this decl. */
439 offset -= frame_phase;
440 align = offset & -offset;
441 align *= BITS_PER_UNIT;
442 if (align > STACK_BOUNDARY || align == 0)
443 align = STACK_BOUNDARY;
444 DECL_ALIGN (decl) = align;
445 DECL_USER_ALIGN (decl) = 0;
447 set_mem_attributes (x, decl, true);
448 SET_DECL_RTL (decl, x);
451 /* A subroutine of expand_used_vars. Give each partition representative
452 a unique location within the stack frame. Update each partition member
453 with that location. */
455 static void
456 expand_stack_vars (void)
458 size_t si, i, j, n = stack_vars_num;
460 for (si = 0; si < n; ++si)
462 HOST_WIDE_INT offset;
464 i = stack_vars_sorted[si];
466 /* Skip variables that aren't partition representatives, for now. */
467 if (stack_vars[i].representative != i)
468 continue;
470 offset = alloc_stack_frame_space (stack_vars[i].size,
471 stack_vars[i].alignb);
473 /* Create rtl for each variable based on their location within the
474 partition. */
475 for (j = i; j != EOC; j = stack_vars[j].next)
476 expand_one_stack_var_at (stack_vars[j].decl,
477 stack_vars[j].offset + offset);
481 /* A subroutine of expand_one_var. Called to immediately assign rtl
482 to a variable to be allocated in the stack frame. */
484 static void
485 expand_one_stack_var (tree var)
487 HOST_WIDE_INT size, offset, align;
489 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
490 align = get_decl_align_unit (var);
491 offset = alloc_stack_frame_space (size, align);
493 expand_one_stack_var_at (var, offset);
496 /* A subroutine of expand_one_var. Called to assign rtl
497 to a TREE_STATIC VAR_DECL. */
499 static void
500 expand_one_static_var (tree var)
502 /* If this is an inlined copy of a static local variable,
503 look up the original. */
504 var = DECL_ORIGIN (var);
506 /* If we've already processed this variable because of that, do nothing. */
507 if (TREE_ASM_WRITTEN (var))
508 return;
510 /* Give the front end a chance to do whatever. In practice, this is
511 resolving duplicate names for IMA in C. */
512 if (lang_hooks.expand_decl (var))
513 return;
515 /* Otherwise, just emit the variable. */
516 rest_of_decl_compilation (var, 0, 0);
519 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
520 that will reside in a hard register. */
522 static void
523 expand_one_hard_reg_var (tree var)
525 rest_of_decl_compilation (var, 0, 0);
528 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
529 that will reside in a pseudo register. */
531 static void
532 expand_one_register_var (tree var)
534 tree type = TREE_TYPE (var);
535 int unsignedp = TYPE_UNSIGNED (type);
536 enum machine_mode reg_mode
537 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
538 rtx x = gen_reg_rtx (reg_mode);
540 SET_DECL_RTL (var, x);
542 /* Note if the object is a user variable. */
543 if (!DECL_ARTIFICIAL (var))
545 mark_user_reg (x);
547 /* Trust user variables which have a pointer type to really
548 be pointers. Do not trust compiler generated temporaries
549 as our type system is totally busted as it relates to
550 pointer arithmetic which translates into lots of compiler
551 generated objects with pointer types, but which are not really
552 pointers. */
553 if (POINTER_TYPE_P (type))
554 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
558 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
559 has some associated error, e.g. it's type is error-mark. We just need
560 to pick something that won't crash the rest of the compiler. */
562 static void
563 expand_one_error_var (tree var)
565 enum machine_mode mode = DECL_MODE (var);
566 rtx x;
568 if (mode == BLKmode)
569 x = gen_rtx_MEM (BLKmode, const0_rtx);
570 else if (mode == VOIDmode)
571 x = const0_rtx;
572 else
573 x = gen_reg_rtx (mode);
575 SET_DECL_RTL (var, x);
578 /* A subroutine of expand_one_var. VAR is a variable that will be
579 allocated to the local stack frame. Return true if we wish to
580 add VAR to STACK_VARS so that it will be coalesced with other
581 variables. Return false to allocate VAR immediately.
583 This function is used to reduce the number of variables considered
584 for coalescing, which reduces the size of the quadratic problem. */
586 static bool
587 defer_stack_allocation (tree var, bool toplevel)
589 /* Variables in the outermost scope automatically conflict with
590 every other variable. The only reason to want to defer them
591 at all is that, after sorting, we can more efficiently pack
592 small variables in the stack frame. Continue to defer at -O2. */
593 if (toplevel && optimize < 2)
594 return false;
596 /* Without optimization, *most* variables are allocated from the
597 stack, which makes the quadratic problem large exactly when we
598 want compilation to proceed as quickly as possible. On the
599 other hand, we don't want the function's stack frame size to
600 get completely out of hand. So we avoid adding scalars and
601 "small" aggregates to the list at all. */
602 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
603 return false;
605 return true;
608 /* A subroutine of expand_used_vars. Expand one variable according to
609 its flavor. Variables to be placed on the stack are not actually
610 expanded yet, merely recorded. */
612 static void
613 expand_one_var (tree var, bool toplevel)
615 if (TREE_CODE (var) != VAR_DECL)
616 lang_hooks.expand_decl (var);
617 else if (DECL_EXTERNAL (var))
619 else if (DECL_VALUE_EXPR (var))
621 else if (TREE_STATIC (var))
622 expand_one_static_var (var);
623 else if (DECL_RTL_SET_P (var))
625 else if (TREE_TYPE (var) == error_mark_node)
626 expand_one_error_var (var);
627 else if (DECL_HARD_REGISTER (var))
628 expand_one_hard_reg_var (var);
629 else if (use_register_for_decl (var))
630 expand_one_register_var (var);
631 else if (defer_stack_allocation (var, toplevel))
632 add_stack_var (var);
633 else
634 expand_one_stack_var (var);
637 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
638 expanding variables. Those variables that can be put into registers
639 are allocated pseudos; those that can't are put on the stack.
641 TOPLEVEL is true if this is the outermost BLOCK. */
643 static void
644 expand_used_vars_for_block (tree block, bool toplevel)
646 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
647 tree t;
649 old_sv_num = toplevel ? 0 : stack_vars_num;
651 /* Expand all variables at this level. */
652 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
653 if (TREE_USED (t))
654 expand_one_var (t, toplevel);
656 this_sv_num = stack_vars_num;
658 /* Expand all variables at containing levels. */
659 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
660 expand_used_vars_for_block (t, false);
662 /* Since we do not track exact variable lifetimes (which is not even
663 possible for varibles whose address escapes), we mirror the block
664 tree in the interference graph. Here we cause all variables at this
665 level, and all sublevels, to conflict. Do make certain that a
666 variable conflicts with itself. */
667 if (old_sv_num < this_sv_num)
669 new_sv_num = stack_vars_num;
670 resize_stack_vars_conflict (new_sv_num);
672 for (i = old_sv_num; i < new_sv_num; ++i)
673 for (j = i < this_sv_num ? i : this_sv_num; ; --j)
675 add_stack_var_conflict (i, j);
676 if (j == old_sv_num)
677 break;
682 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
683 and clear TREE_USED on all local variables. */
685 static void
686 clear_tree_used (tree block)
688 tree t;
690 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
691 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
692 TREE_USED (t) = 0;
694 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
695 clear_tree_used (t);
698 /* Expand all variables used in the function. */
700 static void
701 expand_used_vars (void)
703 tree t, outer_block = DECL_INITIAL (current_function_decl);
705 /* Compute the phase of the stack frame for this function. */
707 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
708 int off = STARTING_FRAME_OFFSET % align;
709 frame_phase = off ? align - off : 0;
712 /* Set TREE_USED on all variables in the unexpanded_var_list. */
713 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
714 TREE_USED (TREE_VALUE (t)) = 1;
716 /* Clear TREE_USED on all variables associated with a block scope. */
717 clear_tree_used (outer_block);
719 /* At this point all variables on the unexpanded_var_list with TREE_USED
720 set are not associated with any block scope. Lay them out. */
721 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
723 tree var = TREE_VALUE (t);
724 bool expand_now = false;
726 /* We didn't set a block for static or extern because it's hard
727 to tell the difference between a global variable (re)declared
728 in a local scope, and one that's really declared there to
729 begin with. And it doesn't really matter much, since we're
730 not giving them stack space. Expand them now. */
731 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
732 expand_now = true;
734 /* Any variable that could have been hoisted into an SSA_NAME
735 will have been propagated anywhere the optimizers chose,
736 i.e. not confined to their original block. Allocate them
737 as if they were defined in the outermost scope. */
738 else if (is_gimple_reg (var))
739 expand_now = true;
741 /* If the variable is not associated with any block, then it
742 was created by the optimizers, and could be live anywhere
743 in the function. */
744 else if (TREE_USED (var))
745 expand_now = true;
747 /* Finally, mark all variables on the list as used. We'll use
748 this in a moment when we expand those associated with scopes. */
749 TREE_USED (var) = 1;
751 if (expand_now)
752 expand_one_var (var, true);
754 cfun->unexpanded_var_list = NULL_TREE;
756 /* At this point, all variables within the block tree with TREE_USED
757 set are actually used by the optimized function. Lay them out. */
758 expand_used_vars_for_block (outer_block, true);
760 if (stack_vars_num > 0)
762 /* Due to the way alias sets work, no variables with non-conflicting
763 alias sets may be assigned the same address. Add conflicts to
764 reflect this. */
765 add_alias_set_conflicts ();
767 /* Now that we have collected all stack variables, and have computed a
768 minimal interference graph, attempt to save some stack space. */
769 partition_stack_vars ();
770 if (dump_file)
771 dump_stack_var_partition ();
773 /* Assign rtl to each variable based on these partitions. */
774 expand_stack_vars ();
776 /* Free up stack variable graph data. */
777 XDELETEVEC (stack_vars);
778 XDELETEVEC (stack_vars_sorted);
779 XDELETEVEC (stack_vars_conflict);
780 stack_vars = NULL;
781 stack_vars_alloc = stack_vars_num = 0;
782 stack_vars_conflict = NULL;
783 stack_vars_conflict_alloc = 0;
786 /* If the target requires that FRAME_OFFSET be aligned, do it. */
787 if (STACK_ALIGNMENT_NEEDED)
789 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
790 if (!FRAME_GROWS_DOWNWARD)
791 frame_offset += align - 1;
792 frame_offset &= -align;
797 /* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
798 Returns a new basic block if we've terminated the current basic
799 block and created a new one. */
801 static basic_block
802 expand_gimple_cond_expr (basic_block bb, tree stmt)
804 basic_block new_bb, dest;
805 edge new_edge;
806 edge true_edge;
807 edge false_edge;
808 tree pred = COND_EXPR_COND (stmt);
809 tree then_exp = COND_EXPR_THEN (stmt);
810 tree else_exp = COND_EXPR_ELSE (stmt);
811 rtx last;
813 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
814 if (EXPR_LOCUS (stmt))
816 emit_line_note (*(EXPR_LOCUS (stmt)));
817 record_block_change (TREE_BLOCK (stmt));
820 /* These flags have no purpose in RTL land. */
821 true_edge->flags &= ~EDGE_TRUE_VALUE;
822 false_edge->flags &= ~EDGE_FALSE_VALUE;
824 /* We can either have a pure conditional jump with one fallthru edge or
825 two-way jump that needs to be decomposed into two basic blocks. */
826 if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp))
828 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
829 return NULL;
831 if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp))
833 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp)));
834 return NULL;
836 gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR
837 && TREE_CODE (else_exp) == GOTO_EXPR);
839 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
840 last = get_last_insn ();
841 expand_expr (else_exp, const0_rtx, VOIDmode, 0);
843 BB_END (bb) = last;
844 if (BARRIER_P (BB_END (bb)))
845 BB_END (bb) = PREV_INSN (BB_END (bb));
846 update_bb_for_insn (bb);
848 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
849 dest = false_edge->dest;
850 redirect_edge_succ (false_edge, new_bb);
851 false_edge->flags |= EDGE_FALLTHRU;
852 new_bb->count = false_edge->count;
853 new_bb->frequency = EDGE_FREQUENCY (false_edge);
854 new_edge = make_edge (new_bb, dest, 0);
855 new_edge->probability = REG_BR_PROB_BASE;
856 new_edge->count = new_bb->count;
857 if (BARRIER_P (BB_END (new_bb)))
858 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
859 update_bb_for_insn (new_bb);
861 if (dump_file)
863 dump_bb (bb, dump_file, 0);
864 dump_bb (new_bb, dump_file, 0);
867 return new_bb;
870 /* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
871 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
872 generated a tail call (something that might be denied by the ABI
873 rules governing the call; see calls.c).
875 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
876 can still reach the rest of BB. The case here is __builtin_sqrt,
877 where the NaN result goes through the external function (with a
878 tailcall) and the normal result happens via a sqrt instruction. */
880 static basic_block
881 expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
883 rtx last = get_last_insn ();
884 edge e;
885 int probability;
886 gcov_type count;
888 expand_expr_stmt (stmt);
890 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
891 if (CALL_P (last) && SIBLING_CALL_P (last))
892 goto found;
894 *can_fallthru = true;
895 return NULL;
897 found:
898 /* ??? Wouldn't it be better to just reset any pending stack adjust?
899 Any instructions emitted here are about to be deleted. */
900 do_pending_stack_adjust ();
902 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
903 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
904 EH or abnormal edges, we shouldn't have created a tail call in
905 the first place. So it seems to me we should just be removing
906 all edges here, or redirecting the existing fallthru edge to
907 the exit block. */
909 e = bb->succ;
910 probability = 0;
911 count = 0;
912 while (e)
914 edge next = e->succ_next;
916 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
918 if (e->dest != EXIT_BLOCK_PTR)
920 e->dest->count -= e->count;
921 e->dest->frequency -= EDGE_FREQUENCY (e);
922 if (e->dest->count < 0)
923 e->dest->count = 0;
924 if (e->dest->frequency < 0)
925 e->dest->frequency = 0;
927 count += e->count;
928 probability += e->probability;
929 remove_edge (e);
932 e = next;
935 /* This is somewhat ugly: the call_expr expander often emits instructions
936 after the sibcall (to perform the function return). These confuse the
937 find_sub_basic_blocks code, so we need to get rid of these. */
938 last = NEXT_INSN (last);
939 gcc_assert (BARRIER_P (last));
941 *can_fallthru = false;
942 while (NEXT_INSN (last))
944 /* For instance an sqrt builtin expander expands if with
945 sibcall in the then and label for `else`. */
946 if (LABEL_P (NEXT_INSN (last)))
948 *can_fallthru = true;
949 break;
951 delete_insn (NEXT_INSN (last));
954 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
955 e->probability += probability;
956 e->count += count;
957 BB_END (bb) = last;
958 update_bb_for_insn (bb);
960 if (NEXT_INSN (last))
962 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
964 last = BB_END (bb);
965 if (BARRIER_P (last))
966 BB_END (bb) = PREV_INSN (last);
969 return bb;
972 /* Expand basic block BB from GIMPLE trees to RTL. */
974 static basic_block
975 expand_gimple_basic_block (basic_block bb, FILE * dump_file)
977 block_stmt_iterator bsi = bsi_start (bb);
978 tree stmt = NULL;
979 rtx note, last;
980 edge e;
982 if (dump_file)
984 tree_register_cfg_hooks ();
985 dump_bb (bb, dump_file, 0);
986 rtl_register_cfg_hooks ();
989 if (!bsi_end_p (bsi))
990 stmt = bsi_stmt (bsi);
992 if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
994 last = get_last_insn ();
996 expand_expr_stmt (stmt);
998 /* Java emits line number notes in the top of labels.
999 ??? Make this go away once line number notes are obsoleted. */
1000 BB_HEAD (bb) = NEXT_INSN (last);
1001 if (NOTE_P (BB_HEAD (bb)))
1002 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1003 bsi_next (&bsi);
1004 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1006 else
1007 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1009 NOTE_BASIC_BLOCK (note) = bb;
1011 e = bb->succ;
1012 while (e)
1014 edge next = e->succ_next;
1016 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1017 e->flags &= ~EDGE_EXECUTABLE;
1019 /* At the moment not all abnormal edges match the RTL representation.
1020 It is safe to remove them here as find_sub_basic_blocks will
1021 rediscover them. In the future we should get this fixed properly. */
1022 if (e->flags & EDGE_ABNORMAL)
1023 remove_edge (e);
1025 e = next;
1028 for (; !bsi_end_p (bsi); bsi_next (&bsi))
1030 tree stmt = bsi_stmt (bsi);
1031 basic_block new_bb;
1033 if (!stmt)
1034 continue;
1036 /* Expand this statement, then evaluate the resulting RTL and
1037 fixup the CFG accordingly. */
1038 if (TREE_CODE (stmt) == COND_EXPR)
1040 new_bb = expand_gimple_cond_expr (bb, stmt);
1041 if (new_bb)
1042 return new_bb;
1044 else
1046 tree call = get_call_expr_in (stmt);
1047 if (call && CALL_EXPR_TAILCALL (call))
1049 bool can_fallthru;
1050 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1051 if (new_bb)
1053 if (can_fallthru)
1054 bb = new_bb;
1055 else
1056 return new_bb;
1059 else
1060 expand_expr_stmt (stmt);
1064 do_pending_stack_adjust ();
1066 /* Find the the block tail. The last insn is the block is the insn
1067 before a barrier and/or table jump insn. */
1068 last = get_last_insn ();
1069 if (BARRIER_P (last))
1070 last = PREV_INSN (last);
1071 if (JUMP_TABLE_DATA_P (last))
1072 last = PREV_INSN (PREV_INSN (last));
1073 BB_END (bb) = last;
1075 if (dump_file)
1076 dump_bb (bb, dump_file, 0);
1077 update_bb_for_insn (bb);
1079 return bb;
1083 /* Create a basic block for initialization code. */
1085 static basic_block
1086 construct_init_block (void)
1088 basic_block init_block, first_block;
1089 edge e;
1091 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1092 if (e->dest == ENTRY_BLOCK_PTR->next_bb)
1093 break;
1095 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1096 get_last_insn (),
1097 ENTRY_BLOCK_PTR);
1098 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1099 init_block->count = ENTRY_BLOCK_PTR->count;
1100 if (e)
1102 first_block = e->dest;
1103 redirect_edge_succ (e, init_block);
1104 e = make_edge (init_block, first_block, EDGE_FALLTHRU);
1106 else
1107 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1108 e->probability = REG_BR_PROB_BASE;
1109 e->count = ENTRY_BLOCK_PTR->count;
1111 update_bb_for_insn (init_block);
1112 return init_block;
1116 /* Create a block containing landing pads and similar stuff. */
1118 static void
1119 construct_exit_block (void)
1121 rtx head = get_last_insn ();
1122 rtx end;
1123 basic_block exit_block;
1124 edge e, e2, next;
1126 /* Make sure the locus is set to the end of the function, so that
1127 epilogue line numbers and warnings are set properly. */
1128 #ifdef USE_MAPPED_LOCATION
1129 if (cfun->function_end_locus != UNKNOWN_LOCATION)
1130 #else
1131 if (cfun->function_end_locus.file)
1132 #endif
1133 input_location = cfun->function_end_locus;
1135 /* The following insns belong to the top scope. */
1136 record_block_change (DECL_INITIAL (current_function_decl));
1138 /* Generate rtl for function exit. */
1139 expand_function_end ();
1141 end = get_last_insn ();
1142 if (head == end)
1143 return;
1144 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
1145 head = NEXT_INSN (head);
1146 exit_block = create_basic_block (NEXT_INSN (head), end,
1147 EXIT_BLOCK_PTR->prev_bb);
1148 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1149 exit_block->count = EXIT_BLOCK_PTR->count;
1150 for (e = EXIT_BLOCK_PTR->pred; e; e = next)
1152 next = e->pred_next;
1153 if (!(e->flags & EDGE_ABNORMAL))
1154 redirect_edge_succ (e, exit_block);
1156 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1157 e->probability = REG_BR_PROB_BASE;
1158 e->count = EXIT_BLOCK_PTR->count;
1159 for (e2 = EXIT_BLOCK_PTR->pred; e2; e2 = e2->pred_next)
1160 if (e2 != e)
1162 e->count -= e2->count;
1163 exit_block->count -= e2->count;
1164 exit_block->frequency -= EDGE_FREQUENCY (e2);
1166 if (e->count < 0)
1167 e->count = 0;
1168 if (exit_block->count < 0)
1169 exit_block->count = 0;
1170 if (exit_block->frequency < 0)
1171 exit_block->frequency = 0;
1172 update_bb_for_insn (exit_block);
1175 /* Translate the intermediate representation contained in the CFG
1176 from GIMPLE trees to RTL.
1178 We do conversion per basic block and preserve/update the tree CFG.
1179 This implies we have to do some magic as the CFG can simultaneously
1180 consist of basic blocks containing RTL and GIMPLE trees. This can
1181 confuse the CFG hooks, so be careful to not manipulate CFG during
1182 the expansion. */
1184 static void
1185 tree_expand_cfg (void)
1187 basic_block bb, init_block;
1188 sbitmap blocks;
1190 profile_status = PROFILE_ABSENT;
1192 /* Some backends want to know that we are expanding to RTL. */
1193 currently_expanding_to_rtl = 1;
1195 /* Prepare the rtl middle end to start recording block changes. */
1196 reset_block_changes ();
1198 /* Expand the variables recorded during gimple lowering. */
1199 expand_used_vars ();
1201 /* Set up parameters and prepare for return, for the function. */
1202 expand_function_start (current_function_decl);
1204 /* If this function is `main', emit a call to `__main'
1205 to run global initializers, etc. */
1206 if (DECL_NAME (current_function_decl)
1207 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1208 && DECL_FILE_SCOPE_P (current_function_decl))
1209 expand_main_function ();
1211 /* Register rtl specific functions for cfg. */
1212 rtl_register_cfg_hooks ();
1214 init_block = construct_init_block ();
1216 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
1217 bb = expand_gimple_basic_block (bb, dump_file);
1219 construct_exit_block ();
1221 /* We're done expanding trees to RTL. */
1222 currently_expanding_to_rtl = 0;
1224 /* Convert from NOTE_INSN_EH_REGION style notes, and do other
1225 sorts of eh initialization. */
1226 convert_from_eh_region_ranges ();
1228 rebuild_jump_labels (get_insns ());
1229 find_exception_handler_labels ();
1231 blocks = sbitmap_alloc (last_basic_block);
1232 sbitmap_ones (blocks);
1233 find_many_sub_basic_blocks (blocks);
1234 purge_all_dead_edges (0);
1235 sbitmap_free (blocks);
1237 compact_blocks ();
1238 #ifdef ENABLE_CHECKING
1239 verify_flow_info();
1240 #endif
1242 /* There's no need to defer outputting this function any more; we
1243 know we want to output it. */
1244 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1246 /* Now that we're done expanding trees to RTL, we shouldn't have any
1247 more CONCATs anywhere. */
1248 generating_concat_p = 0;
1250 finalize_block_changes ();
1253 struct tree_opt_pass pass_expand =
1255 "expand", /* name */
1256 NULL, /* gate */
1257 tree_expand_cfg, /* execute */
1258 NULL, /* sub */
1259 NULL, /* next */
1260 0, /* static_pass_number */
1261 TV_EXPAND, /* tv_id */
1262 /* ??? If TER is enabled, we actually receive GENERIC. */
1263 PROP_gimple_leh | PROP_cfg, /* properties_required */
1264 PROP_rtl, /* properties_provided */
1265 PROP_gimple_leh, /* properties_destroyed */
1266 0, /* todo_flags_start */
1267 0, /* todo_flags_finish */
1268 'r' /* letter */