* gcc.c-torture/execute/20101011-1.c: Skip on SH.
[official-gcc.git] / gcc / cfgexpand.c
blob1ef1fa0d8f283619e4a4dbb1cddf95312633c875
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54 struct ssaexpand SA;
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
61 statement STMT. */
63 tree
64 gimple_assign_rhs_to_tree (gimple stmt)
66 tree t;
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
94 && EXPR_P (t)
95 && gimple_block (stmt) != TREE_BLOCK (t)))
96 t = copy_node (t);
98 else
99 gcc_unreachable ();
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
106 return t;
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
112 #endif
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
119 static inline void
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
125 if (x && !MEM_P (x))
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
137 change this. */
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
150 else
151 SET_DECL_RTL (t, x);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
156 struct stack_var
158 /* The Variable. */
159 tree decl;
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
211 static unsigned int
212 get_decl_align_unit (tree decl)
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
215 return align / BITS_PER_UNIT;
218 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
219 Return the frame offset. */
221 static HOST_WIDE_INT
222 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
224 HOST_WIDE_INT offset, new_frame_offset;
226 new_frame_offset = frame_offset;
227 if (FRAME_GROWS_DOWNWARD)
229 new_frame_offset -= size + frame_phase;
230 new_frame_offset &= -align;
231 new_frame_offset += frame_phase;
232 offset = new_frame_offset;
234 else
236 new_frame_offset -= frame_phase;
237 new_frame_offset += align - 1;
238 new_frame_offset &= -align;
239 new_frame_offset += frame_phase;
240 offset = new_frame_offset;
241 new_frame_offset += size;
243 frame_offset = new_frame_offset;
245 if (frame_offset_overflow (frame_offset, cfun->decl))
246 frame_offset = offset = 0;
248 return offset;
251 /* Accumulate DECL into STACK_VARS. */
253 static void
254 add_stack_var (tree decl)
256 struct stack_var *v;
258 if (stack_vars_num >= stack_vars_alloc)
260 if (stack_vars_alloc)
261 stack_vars_alloc = stack_vars_alloc * 3 / 2;
262 else
263 stack_vars_alloc = 32;
264 stack_vars
265 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
267 v = &stack_vars[stack_vars_num];
269 v->decl = decl;
270 v->offset = 0;
271 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
272 /* Ensure that all variables have size, so that &a != &b for any two
273 variables that are simultaneously live. */
274 if (v->size == 0)
275 v->size = 1;
276 v->alignb = get_decl_align_unit (SSAVAR (decl));
278 /* All variables are initially in their own partition. */
279 v->representative = stack_vars_num;
280 v->next = EOC;
282 /* All variables initially conflict with no other. */
283 v->conflicts = NULL;
285 /* Ensure that this decl doesn't get put onto the list twice. */
286 set_rtl (decl, pc_rtx);
288 stack_vars_num++;
291 /* Make the decls associated with luid's X and Y conflict. */
293 static void
294 add_stack_var_conflict (size_t x, size_t y)
296 struct stack_var *a = &stack_vars[x];
297 struct stack_var *b = &stack_vars[y];
298 if (!a->conflicts)
299 a->conflicts = BITMAP_ALLOC (NULL);
300 if (!b->conflicts)
301 b->conflicts = BITMAP_ALLOC (NULL);
302 bitmap_set_bit (a->conflicts, y);
303 bitmap_set_bit (b->conflicts, x);
306 /* Check whether the decls associated with luid's X and Y conflict. */
308 static bool
309 stack_var_conflict_p (size_t x, size_t y)
311 struct stack_var *a = &stack_vars[x];
312 struct stack_var *b = &stack_vars[y];
313 if (!a->conflicts || !b->conflicts)
314 return false;
315 return bitmap_bit_p (a->conflicts, y);
318 /* Returns true if TYPE is or contains a union type. */
320 static bool
321 aggregate_contains_union_type (tree type)
323 tree field;
325 if (TREE_CODE (type) == UNION_TYPE
326 || TREE_CODE (type) == QUAL_UNION_TYPE)
327 return true;
328 if (TREE_CODE (type) == ARRAY_TYPE)
329 return aggregate_contains_union_type (TREE_TYPE (type));
330 if (TREE_CODE (type) != RECORD_TYPE)
331 return false;
333 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
334 if (TREE_CODE (field) == FIELD_DECL)
335 if (aggregate_contains_union_type (TREE_TYPE (field)))
336 return true;
338 return false;
341 /* A subroutine of expand_used_vars. If two variables X and Y have alias
342 sets that do not conflict, then do add a conflict for these variables
343 in the interference graph. We also need to make sure to add conflicts
344 for union containing structures. Else RTL alias analysis comes along
345 and due to type based aliasing rules decides that for two overlapping
346 union temporaries { short s; int i; } accesses to the same mem through
347 different types may not alias and happily reorders stores across
348 life-time boundaries of the temporaries (See PR25654).
349 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
351 static void
352 add_alias_set_conflicts (void)
354 size_t i, j, n = stack_vars_num;
356 for (i = 0; i < n; ++i)
358 tree type_i = TREE_TYPE (stack_vars[i].decl);
359 bool aggr_i = AGGREGATE_TYPE_P (type_i);
360 bool contains_union;
362 contains_union = aggregate_contains_union_type (type_i);
363 for (j = 0; j < i; ++j)
365 tree type_j = TREE_TYPE (stack_vars[j].decl);
366 bool aggr_j = AGGREGATE_TYPE_P (type_j);
367 if (aggr_i != aggr_j
368 /* Either the objects conflict by means of type based
369 aliasing rules, or we need to add a conflict. */
370 || !objects_must_conflict_p (type_i, type_j)
371 /* In case the types do not conflict ensure that access
372 to elements will conflict. In case of unions we have
373 to be careful as type based aliasing rules may say
374 access to the same memory does not conflict. So play
375 safe and add a conflict in this case. */
376 || contains_union)
377 add_stack_var_conflict (i, j);
382 /* A subroutine of partition_stack_vars. A comparison function for qsort,
383 sorting an array of indices by the properties of the object. */
385 static int
386 stack_var_cmp (const void *a, const void *b)
388 size_t ia = *(const size_t *)a;
389 size_t ib = *(const size_t *)b;
390 unsigned int aligna = stack_vars[ia].alignb;
391 unsigned int alignb = stack_vars[ib].alignb;
392 HOST_WIDE_INT sizea = stack_vars[ia].size;
393 HOST_WIDE_INT sizeb = stack_vars[ib].size;
394 tree decla = stack_vars[ia].decl;
395 tree declb = stack_vars[ib].decl;
396 bool largea, largeb;
397 unsigned int uida, uidb;
399 /* Primary compare on "large" alignment. Large comes first. */
400 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
401 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 if (largea != largeb)
403 return (int)largeb - (int)largea;
405 /* Secondary compare on size, decreasing */
406 if (sizea < sizeb)
407 return -1;
408 if (sizea > sizeb)
409 return 1;
411 /* Tertiary compare on true alignment, decreasing. */
412 if (aligna < alignb)
413 return -1;
414 if (aligna > alignb)
415 return 1;
417 /* Final compare on ID for sort stability, increasing.
418 Two SSA names are compared by their version, SSA names come before
419 non-SSA names, and two normal decls are compared by their DECL_UID. */
420 if (TREE_CODE (decla) == SSA_NAME)
422 if (TREE_CODE (declb) == SSA_NAME)
423 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
424 else
425 return -1;
427 else if (TREE_CODE (declb) == SSA_NAME)
428 return 1;
429 else
430 uida = DECL_UID (decla), uidb = DECL_UID (declb);
431 if (uida < uidb)
432 return 1;
433 if (uida > uidb)
434 return -1;
435 return 0;
439 /* If the points-to solution *PI points to variables that are in a partition
440 together with other variables add all partition members to the pointed-to
441 variables bitmap. */
443 static void
444 add_partitioned_vars_to_ptset (struct pt_solution *pt,
445 struct pointer_map_t *decls_to_partitions,
446 struct pointer_set_t *visited, bitmap temp)
448 bitmap_iterator bi;
449 unsigned i;
450 bitmap *part;
452 if (pt->anything
453 || pt->vars == NULL
454 /* The pointed-to vars bitmap is shared, it is enough to
455 visit it once. */
456 || pointer_set_insert(visited, pt->vars))
457 return;
459 bitmap_clear (temp);
461 /* By using a temporary bitmap to store all members of the partitions
462 we have to add we make sure to visit each of the partitions only
463 once. */
464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
465 if ((!temp
466 || !bitmap_bit_p (temp, i))
467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
468 (void *)(size_t) i)))
469 bitmap_ior_into (temp, *part);
470 if (!bitmap_empty_p (temp))
471 bitmap_ior_into (pt->vars, temp);
474 /* Update points-to sets based on partition info, so we can use them on RTL.
475 The bitmaps representing stack partitions will be saved until expand,
476 where partitioned decls used as bases in memory expressions will be
477 rewritten. */
479 static void
480 update_alias_info_with_stack_vars (void)
482 struct pointer_map_t *decls_to_partitions = NULL;
483 size_t i, j;
484 tree var = NULL_TREE;
486 for (i = 0; i < stack_vars_num; i++)
488 bitmap part = NULL;
489 tree name;
490 struct ptr_info_def *pi;
492 /* Not interested in partitions with single variable. */
493 if (stack_vars[i].representative != i
494 || stack_vars[i].next == EOC)
495 continue;
497 if (!decls_to_partitions)
499 decls_to_partitions = pointer_map_create ();
500 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
503 /* Create an SSA_NAME that points to the partition for use
504 as base during alias-oracle queries on RTL for bases that
505 have been partitioned. */
506 if (var == NULL_TREE)
507 var = create_tmp_var (ptr_type_node, NULL);
508 name = make_ssa_name (var, NULL);
510 /* Create bitmaps representing partitions. They will be used for
511 points-to sets later, so use GGC alloc. */
512 part = BITMAP_GGC_ALLOC ();
513 for (j = i; j != EOC; j = stack_vars[j].next)
515 tree decl = stack_vars[j].decl;
516 unsigned int uid = DECL_PT_UID (decl);
517 /* We should never end up partitioning SSA names (though they
518 may end up on the stack). Neither should we allocate stack
519 space to something that is unused and thus unreferenced. */
520 gcc_assert (DECL_P (decl)
521 && referenced_var_lookup (DECL_UID (decl)));
522 bitmap_set_bit (part, uid);
523 *((bitmap *) pointer_map_insert (decls_to_partitions,
524 (void *)(size_t) uid)) = part;
525 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
526 decl)) = name;
529 /* Make the SSA name point to all partition members. */
530 pi = get_ptr_info (name);
531 pt_solution_set (&pi->pt, part, false, false);
534 /* Make all points-to sets that contain one member of a partition
535 contain all members of the partition. */
536 if (decls_to_partitions)
538 unsigned i;
539 struct pointer_set_t *visited = pointer_set_create ();
540 bitmap temp = BITMAP_ALLOC (NULL);
542 for (i = 1; i < num_ssa_names; i++)
544 tree name = ssa_name (i);
545 struct ptr_info_def *pi;
547 if (name
548 && POINTER_TYPE_P (TREE_TYPE (name))
549 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
550 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
551 visited, temp);
554 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
555 decls_to_partitions, visited, temp);
557 pointer_set_destroy (visited);
558 pointer_map_destroy (decls_to_partitions);
559 BITMAP_FREE (temp);
563 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
564 partitioning algorithm. Partitions A and B are known to be non-conflicting.
565 Merge them into a single partition A.
567 At the same time, add OFFSET to all variables in partition B. At the end
568 of the partitioning process we've have a nice block easy to lay out within
569 the stack frame. */
571 static void
572 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
574 size_t i, last;
575 struct stack_var *vb = &stack_vars[b];
576 bitmap_iterator bi;
577 unsigned u;
579 /* Update each element of partition B with the given offset,
580 and merge them into partition A. */
581 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
583 stack_vars[i].offset += offset;
584 stack_vars[i].representative = a;
586 stack_vars[last].next = stack_vars[a].next;
587 stack_vars[a].next = b;
589 /* Update the required alignment of partition A to account for B. */
590 if (stack_vars[a].alignb < stack_vars[b].alignb)
591 stack_vars[a].alignb = stack_vars[b].alignb;
593 /* Update the interference graph and merge the conflicts. */
594 if (vb->conflicts)
596 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
597 add_stack_var_conflict (a, stack_vars[u].representative);
598 BITMAP_FREE (vb->conflicts);
602 /* A subroutine of expand_used_vars. Binpack the variables into
603 partitions constrained by the interference graph. The overall
604 algorithm used is as follows:
606 Sort the objects by size.
607 For each object A {
608 S = size(A)
609 O = 0
610 loop {
611 Look for the largest non-conflicting object B with size <= S.
612 UNION (A, B)
613 offset(B) = O
614 O += size(B)
615 S -= size(B)
620 static void
621 partition_stack_vars (void)
623 size_t si, sj, n = stack_vars_num;
625 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
626 for (si = 0; si < n; ++si)
627 stack_vars_sorted[si] = si;
629 if (n == 1)
630 return;
632 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
634 for (si = 0; si < n; ++si)
636 size_t i = stack_vars_sorted[si];
637 HOST_WIDE_INT isize = stack_vars[i].size;
638 unsigned int ialign = stack_vars[i].alignb;
639 HOST_WIDE_INT offset = 0;
641 for (sj = si; sj-- > 0; )
643 size_t j = stack_vars_sorted[sj];
644 HOST_WIDE_INT jsize = stack_vars[j].size;
645 unsigned int jalign = stack_vars[j].alignb;
647 /* Ignore objects that aren't partition representatives. */
648 if (stack_vars[j].representative != j)
649 continue;
651 /* Ignore objects too large for the remaining space. */
652 if (isize < jsize)
653 continue;
655 /* Ignore conflicting objects. */
656 if (stack_var_conflict_p (i, j))
657 continue;
659 /* Do not mix objects of "small" (supported) alignment
660 and "large" (unsupported) alignment. */
661 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
662 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
663 continue;
665 /* Refine the remaining space check to include alignment. */
666 if (offset & (jalign - 1))
668 HOST_WIDE_INT toff = offset;
669 toff += jalign - 1;
670 toff &= -(HOST_WIDE_INT)jalign;
671 if (isize - (toff - offset) < jsize)
672 continue;
674 isize -= toff - offset;
675 offset = toff;
678 /* UNION the objects, placing J at OFFSET. */
679 union_stack_vars (i, j, offset);
681 isize -= jsize;
682 if (isize == 0)
683 break;
687 if (optimize)
688 update_alias_info_with_stack_vars ();
691 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
693 static void
694 dump_stack_var_partition (void)
696 size_t si, i, j, n = stack_vars_num;
698 for (si = 0; si < n; ++si)
700 i = stack_vars_sorted[si];
702 /* Skip variables that aren't partition representatives, for now. */
703 if (stack_vars[i].representative != i)
704 continue;
706 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
707 " align %u\n", (unsigned long) i, stack_vars[i].size,
708 stack_vars[i].alignb);
710 for (j = i; j != EOC; j = stack_vars[j].next)
712 fputc ('\t', dump_file);
713 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
714 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
715 stack_vars[j].offset);
720 /* Assign rtl to DECL at BASE + OFFSET. */
722 static void
723 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
724 HOST_WIDE_INT offset)
726 unsigned align;
727 rtx x;
729 /* If this fails, we've overflowed the stack frame. Error nicely? */
730 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
732 x = plus_constant (base, offset);
733 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
735 if (TREE_CODE (decl) != SSA_NAME)
737 /* Set alignment we actually gave this decl if it isn't an SSA name.
738 If it is we generate stack slots only accidentally so it isn't as
739 important, we'll simply use the alignment that is already set. */
740 if (base == virtual_stack_vars_rtx)
741 offset -= frame_phase;
742 align = offset & -offset;
743 align *= BITS_PER_UNIT;
744 if (align == 0 || align > base_align)
745 align = base_align;
747 /* One would think that we could assert that we're not decreasing
748 alignment here, but (at least) the i386 port does exactly this
749 via the MINIMUM_ALIGNMENT hook. */
751 DECL_ALIGN (decl) = align;
752 DECL_USER_ALIGN (decl) = 0;
755 set_mem_attributes (x, SSAVAR (decl), true);
756 set_rtl (decl, x);
759 /* A subroutine of expand_used_vars. Give each partition representative
760 a unique location within the stack frame. Update each partition member
761 with that location. */
763 static void
764 expand_stack_vars (bool (*pred) (tree))
766 size_t si, i, j, n = stack_vars_num;
767 HOST_WIDE_INT large_size = 0, large_alloc = 0;
768 rtx large_base = NULL;
769 unsigned large_align = 0;
770 tree decl;
772 /* Determine if there are any variables requiring "large" alignment.
773 Since these are dynamically allocated, we only process these if
774 no predicate involved. */
775 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
776 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
778 /* Find the total size of these variables. */
779 for (si = 0; si < n; ++si)
781 unsigned alignb;
783 i = stack_vars_sorted[si];
784 alignb = stack_vars[i].alignb;
786 /* Stop when we get to the first decl with "small" alignment. */
787 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
788 break;
790 /* Skip variables that aren't partition representatives. */
791 if (stack_vars[i].representative != i)
792 continue;
794 /* Skip variables that have already had rtl assigned. See also
795 add_stack_var where we perpetrate this pc_rtx hack. */
796 decl = stack_vars[i].decl;
797 if ((TREE_CODE (decl) == SSA_NAME
798 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
799 : DECL_RTL (decl)) != pc_rtx)
800 continue;
802 large_size += alignb - 1;
803 large_size &= -(HOST_WIDE_INT)alignb;
804 large_size += stack_vars[i].size;
807 /* If there were any, allocate space. */
808 if (large_size > 0)
809 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
810 large_align, true);
813 for (si = 0; si < n; ++si)
815 rtx base;
816 unsigned base_align, alignb;
817 HOST_WIDE_INT offset;
819 i = stack_vars_sorted[si];
821 /* Skip variables that aren't partition representatives, for now. */
822 if (stack_vars[i].representative != i)
823 continue;
825 /* Skip variables that have already had rtl assigned. See also
826 add_stack_var where we perpetrate this pc_rtx hack. */
827 decl = stack_vars[i].decl;
828 if ((TREE_CODE (decl) == SSA_NAME
829 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
830 : DECL_RTL (decl)) != pc_rtx)
831 continue;
833 /* Check the predicate to see whether this variable should be
834 allocated in this pass. */
835 if (pred && !pred (decl))
836 continue;
838 alignb = stack_vars[i].alignb;
839 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
841 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
842 base = virtual_stack_vars_rtx;
843 base_align = crtl->max_used_stack_slot_alignment;
845 else
847 /* Large alignment is only processed in the last pass. */
848 if (pred)
849 continue;
850 gcc_assert (large_base != NULL);
852 large_alloc += alignb - 1;
853 large_alloc &= -(HOST_WIDE_INT)alignb;
854 offset = large_alloc;
855 large_alloc += stack_vars[i].size;
857 base = large_base;
858 base_align = large_align;
861 /* Create rtl for each variable based on their location within the
862 partition. */
863 for (j = i; j != EOC; j = stack_vars[j].next)
865 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
866 expand_one_stack_var_at (stack_vars[j].decl,
867 base, base_align,
868 stack_vars[j].offset + offset);
872 gcc_assert (large_alloc == large_size);
875 /* Take into account all sizes of partitions and reset DECL_RTLs. */
876 static HOST_WIDE_INT
877 account_stack_vars (void)
879 size_t si, j, i, n = stack_vars_num;
880 HOST_WIDE_INT size = 0;
882 for (si = 0; si < n; ++si)
884 i = stack_vars_sorted[si];
886 /* Skip variables that aren't partition representatives, for now. */
887 if (stack_vars[i].representative != i)
888 continue;
890 size += stack_vars[i].size;
891 for (j = i; j != EOC; j = stack_vars[j].next)
892 set_rtl (stack_vars[j].decl, NULL);
894 return size;
897 /* A subroutine of expand_one_var. Called to immediately assign rtl
898 to a variable to be allocated in the stack frame. */
900 static void
901 expand_one_stack_var (tree var)
903 HOST_WIDE_INT size, offset;
904 unsigned byte_align;
906 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
907 byte_align = get_decl_align_unit (SSAVAR (var));
909 /* We handle highly aligned variables in expand_stack_vars. */
910 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
912 offset = alloc_stack_frame_space (size, byte_align);
914 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
915 crtl->max_used_stack_slot_alignment, offset);
918 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
919 that will reside in a hard register. */
921 static void
922 expand_one_hard_reg_var (tree var)
924 rest_of_decl_compilation (var, 0, 0);
927 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
928 that will reside in a pseudo register. */
930 static void
931 expand_one_register_var (tree var)
933 tree decl = SSAVAR (var);
934 tree type = TREE_TYPE (decl);
935 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
936 rtx x = gen_reg_rtx (reg_mode);
938 set_rtl (var, x);
940 /* Note if the object is a user variable. */
941 if (!DECL_ARTIFICIAL (decl))
942 mark_user_reg (x);
944 if (POINTER_TYPE_P (type))
945 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
948 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
949 has some associated error, e.g. its type is error-mark. We just need
950 to pick something that won't crash the rest of the compiler. */
952 static void
953 expand_one_error_var (tree var)
955 enum machine_mode mode = DECL_MODE (var);
956 rtx x;
958 if (mode == BLKmode)
959 x = gen_rtx_MEM (BLKmode, const0_rtx);
960 else if (mode == VOIDmode)
961 x = const0_rtx;
962 else
963 x = gen_reg_rtx (mode);
965 SET_DECL_RTL (var, x);
968 /* A subroutine of expand_one_var. VAR is a variable that will be
969 allocated to the local stack frame. Return true if we wish to
970 add VAR to STACK_VARS so that it will be coalesced with other
971 variables. Return false to allocate VAR immediately.
973 This function is used to reduce the number of variables considered
974 for coalescing, which reduces the size of the quadratic problem. */
976 static bool
977 defer_stack_allocation (tree var, bool toplevel)
979 /* If stack protection is enabled, *all* stack variables must be deferred,
980 so that we can re-order the strings to the top of the frame. */
981 if (flag_stack_protect)
982 return true;
984 /* We handle "large" alignment via dynamic allocation. We want to handle
985 this extra complication in only one place, so defer them. */
986 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
987 return true;
989 /* Variables in the outermost scope automatically conflict with
990 every other variable. The only reason to want to defer them
991 at all is that, after sorting, we can more efficiently pack
992 small variables in the stack frame. Continue to defer at -O2. */
993 if (toplevel && optimize < 2)
994 return false;
996 /* Without optimization, *most* variables are allocated from the
997 stack, which makes the quadratic problem large exactly when we
998 want compilation to proceed as quickly as possible. On the
999 other hand, we don't want the function's stack frame size to
1000 get completely out of hand. So we avoid adding scalars and
1001 "small" aggregates to the list at all. */
1002 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1003 return false;
1005 return true;
1008 /* A subroutine of expand_used_vars. Expand one variable according to
1009 its flavor. Variables to be placed on the stack are not actually
1010 expanded yet, merely recorded.
1011 When REALLY_EXPAND is false, only add stack values to be allocated.
1012 Return stack usage this variable is supposed to take.
1015 static HOST_WIDE_INT
1016 expand_one_var (tree var, bool toplevel, bool really_expand)
1018 unsigned int align = BITS_PER_UNIT;
1019 tree origvar = var;
1021 var = SSAVAR (var);
1023 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1025 /* Because we don't know if VAR will be in register or on stack,
1026 we conservatively assume it will be on stack even if VAR is
1027 eventually put into register after RA pass. For non-automatic
1028 variables, which won't be on stack, we collect alignment of
1029 type and ignore user specified alignment. */
1030 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1031 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1032 TYPE_MODE (TREE_TYPE (var)),
1033 TYPE_ALIGN (TREE_TYPE (var)));
1034 else if (DECL_HAS_VALUE_EXPR_P (var)
1035 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1036 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1037 or variables which were assigned a stack slot already by
1038 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1039 changed from the offset chosen to it. */
1040 align = crtl->stack_alignment_estimated;
1041 else
1042 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1044 /* If the variable alignment is very large we'll dynamicaly allocate
1045 it, which means that in-frame portion is just a pointer. */
1046 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1047 align = POINTER_SIZE;
1050 if (SUPPORTS_STACK_ALIGNMENT
1051 && crtl->stack_alignment_estimated < align)
1053 /* stack_alignment_estimated shouldn't change after stack
1054 realign decision made */
1055 gcc_assert(!crtl->stack_realign_processed);
1056 crtl->stack_alignment_estimated = align;
1059 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1060 So here we only make sure stack_alignment_needed >= align. */
1061 if (crtl->stack_alignment_needed < align)
1062 crtl->stack_alignment_needed = align;
1063 if (crtl->max_used_stack_slot_alignment < align)
1064 crtl->max_used_stack_slot_alignment = align;
1066 if (TREE_CODE (origvar) == SSA_NAME)
1068 gcc_assert (TREE_CODE (var) != VAR_DECL
1069 || (!DECL_EXTERNAL (var)
1070 && !DECL_HAS_VALUE_EXPR_P (var)
1071 && !TREE_STATIC (var)
1072 && TREE_TYPE (var) != error_mark_node
1073 && !DECL_HARD_REGISTER (var)
1074 && really_expand));
1076 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1078 else if (DECL_EXTERNAL (var))
1080 else if (DECL_HAS_VALUE_EXPR_P (var))
1082 else if (TREE_STATIC (var))
1084 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1086 else if (TREE_TYPE (var) == error_mark_node)
1088 if (really_expand)
1089 expand_one_error_var (var);
1091 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1093 if (really_expand)
1094 expand_one_hard_reg_var (var);
1096 else if (use_register_for_decl (var))
1098 if (really_expand)
1099 expand_one_register_var (origvar);
1101 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1103 if (really_expand)
1105 error ("size of variable %q+D is too large", var);
1106 expand_one_error_var (var);
1109 else if (defer_stack_allocation (var, toplevel))
1110 add_stack_var (origvar);
1111 else
1113 if (really_expand)
1114 expand_one_stack_var (origvar);
1115 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1117 return 0;
1120 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1121 expanding variables. Those variables that can be put into registers
1122 are allocated pseudos; those that can't are put on the stack.
1124 TOPLEVEL is true if this is the outermost BLOCK. */
1126 static void
1127 expand_used_vars_for_block (tree block, bool toplevel)
1129 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1130 tree t;
1132 old_sv_num = toplevel ? 0 : stack_vars_num;
1134 /* Expand all variables at this level. */
1135 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1136 if (TREE_USED (t))
1137 expand_one_var (t, toplevel, true);
1139 this_sv_num = stack_vars_num;
1141 /* Expand all variables at containing levels. */
1142 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1143 expand_used_vars_for_block (t, false);
1145 /* Since we do not track exact variable lifetimes (which is not even
1146 possible for variables whose address escapes), we mirror the block
1147 tree in the interference graph. Here we cause all variables at this
1148 level, and all sublevels, to conflict. */
1149 if (old_sv_num < this_sv_num)
1151 new_sv_num = stack_vars_num;
1153 for (i = old_sv_num; i < new_sv_num; ++i)
1154 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1155 add_stack_var_conflict (i, j);
1159 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1160 and clear TREE_USED on all local variables. */
1162 static void
1163 clear_tree_used (tree block)
1165 tree t;
1167 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1168 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1169 TREE_USED (t) = 0;
1171 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1172 clear_tree_used (t);
1175 /* Examine TYPE and determine a bit mask of the following features. */
1177 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1178 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1179 #define SPCT_HAS_ARRAY 4
1180 #define SPCT_HAS_AGGREGATE 8
1182 static unsigned int
1183 stack_protect_classify_type (tree type)
1185 unsigned int ret = 0;
1186 tree t;
1188 switch (TREE_CODE (type))
1190 case ARRAY_TYPE:
1191 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1192 if (t == char_type_node
1193 || t == signed_char_type_node
1194 || t == unsigned_char_type_node)
1196 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1197 unsigned HOST_WIDE_INT len;
1199 if (!TYPE_SIZE_UNIT (type)
1200 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1201 len = max;
1202 else
1203 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1205 if (len < max)
1206 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1207 else
1208 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1210 else
1211 ret = SPCT_HAS_ARRAY;
1212 break;
1214 case UNION_TYPE:
1215 case QUAL_UNION_TYPE:
1216 case RECORD_TYPE:
1217 ret = SPCT_HAS_AGGREGATE;
1218 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1219 if (TREE_CODE (t) == FIELD_DECL)
1220 ret |= stack_protect_classify_type (TREE_TYPE (t));
1221 break;
1223 default:
1224 break;
1227 return ret;
1230 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1231 part of the local stack frame. Remember if we ever return nonzero for
1232 any variable in this function. The return value is the phase number in
1233 which the variable should be allocated. */
1235 static int
1236 stack_protect_decl_phase (tree decl)
1238 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1239 int ret = 0;
1241 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1242 has_short_buffer = true;
1244 if (flag_stack_protect == 2)
1246 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1247 && !(bits & SPCT_HAS_AGGREGATE))
1248 ret = 1;
1249 else if (bits & SPCT_HAS_ARRAY)
1250 ret = 2;
1252 else
1253 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1255 if (ret)
1256 has_protected_decls = true;
1258 return ret;
1261 /* Two helper routines that check for phase 1 and phase 2. These are used
1262 as callbacks for expand_stack_vars. */
1264 static bool
1265 stack_protect_decl_phase_1 (tree decl)
1267 return stack_protect_decl_phase (decl) == 1;
1270 static bool
1271 stack_protect_decl_phase_2 (tree decl)
1273 return stack_protect_decl_phase (decl) == 2;
1276 /* Ensure that variables in different stack protection phases conflict
1277 so that they are not merged and share the same stack slot. */
1279 static void
1280 add_stack_protection_conflicts (void)
1282 size_t i, j, n = stack_vars_num;
1283 unsigned char *phase;
1285 phase = XNEWVEC (unsigned char, n);
1286 for (i = 0; i < n; ++i)
1287 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1289 for (i = 0; i < n; ++i)
1291 unsigned char ph_i = phase[i];
1292 for (j = 0; j < i; ++j)
1293 if (ph_i != phase[j])
1294 add_stack_var_conflict (i, j);
1297 XDELETEVEC (phase);
1300 /* Create a decl for the guard at the top of the stack frame. */
1302 static void
1303 create_stack_guard (void)
1305 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1306 VAR_DECL, NULL, ptr_type_node);
1307 TREE_THIS_VOLATILE (guard) = 1;
1308 TREE_USED (guard) = 1;
1309 expand_one_stack_var (guard);
1310 crtl->stack_protect_guard = guard;
1313 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1314 expanding variables. Those variables that can be put into registers
1315 are allocated pseudos; those that can't are put on the stack.
1317 TOPLEVEL is true if this is the outermost BLOCK. */
1319 static HOST_WIDE_INT
1320 account_used_vars_for_block (tree block, bool toplevel)
1322 tree t;
1323 HOST_WIDE_INT size = 0;
1325 /* Expand all variables at this level. */
1326 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1327 if (TREE_USED (t))
1328 size += expand_one_var (t, toplevel, false);
1330 /* Expand all variables at containing levels. */
1331 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1332 size += account_used_vars_for_block (t, false);
1334 return size;
1337 /* Prepare for expanding variables. */
1338 static void
1339 init_vars_expansion (void)
1341 tree t;
1342 unsigned ix;
1343 /* Set TREE_USED on all variables in the local_decls. */
1344 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1345 TREE_USED (t) = 1;
1347 /* Clear TREE_USED on all variables associated with a block scope. */
1348 clear_tree_used (DECL_INITIAL (current_function_decl));
1350 /* Initialize local stack smashing state. */
1351 has_protected_decls = false;
1352 has_short_buffer = false;
1355 /* Free up stack variable graph data. */
1356 static void
1357 fini_vars_expansion (void)
1359 size_t i, n = stack_vars_num;
1360 for (i = 0; i < n; i++)
1361 BITMAP_FREE (stack_vars[i].conflicts);
1362 XDELETEVEC (stack_vars);
1363 XDELETEVEC (stack_vars_sorted);
1364 stack_vars = NULL;
1365 stack_vars_alloc = stack_vars_num = 0;
1368 /* Make a fair guess for the size of the stack frame of the decl
1369 passed. This doesn't have to be exact, the result is only used
1370 in the inline heuristics. So we don't want to run the full stack
1371 var packing algorithm (which is quadratic in the number of stack
1372 vars). Instead, we calculate the total size of all stack vars.
1373 This turns out to be a pretty fair estimate -- packing of stack
1374 vars doesn't happen very often. */
1376 HOST_WIDE_INT
1377 estimated_stack_frame_size (tree decl)
1379 HOST_WIDE_INT size = 0;
1380 size_t i;
1381 tree var, outer_block = DECL_INITIAL (current_function_decl);
1382 unsigned ix;
1383 tree old_cur_fun_decl = current_function_decl;
1384 current_function_decl = decl;
1385 push_cfun (DECL_STRUCT_FUNCTION (decl));
1387 init_vars_expansion ();
1389 FOR_EACH_LOCAL_DECL (cfun, ix, var)
1391 if (TREE_USED (var))
1392 size += expand_one_var (var, true, false);
1393 TREE_USED (var) = 1;
1395 size += account_used_vars_for_block (outer_block, true);
1397 if (stack_vars_num > 0)
1399 /* Fake sorting the stack vars for account_stack_vars (). */
1400 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1401 for (i = 0; i < stack_vars_num; ++i)
1402 stack_vars_sorted[i] = i;
1403 size += account_stack_vars ();
1404 fini_vars_expansion ();
1406 pop_cfun ();
1407 current_function_decl = old_cur_fun_decl;
1408 return size;
1411 /* Expand all variables used in the function. */
1413 static void
1414 expand_used_vars (void)
1416 tree var, outer_block = DECL_INITIAL (current_function_decl);
1417 VEC(tree,heap) *maybe_local_decls = NULL;
1418 unsigned i;
1419 unsigned len;
1421 /* Compute the phase of the stack frame for this function. */
1423 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1424 int off = STARTING_FRAME_OFFSET % align;
1425 frame_phase = off ? align - off : 0;
1428 init_vars_expansion ();
1430 for (i = 0; i < SA.map->num_partitions; i++)
1432 tree var = partition_to_var (SA.map, i);
1434 gcc_assert (is_gimple_reg (var));
1435 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1436 expand_one_var (var, true, true);
1437 else
1439 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1440 contain the default def (representing the parm or result itself)
1441 we don't do anything here. But those which don't contain the
1442 default def (representing a temporary based on the parm/result)
1443 we need to allocate space just like for normal VAR_DECLs. */
1444 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1446 expand_one_var (var, true, true);
1447 gcc_assert (SA.partition_to_pseudo[i]);
1452 /* At this point all variables on the local_decls with TREE_USED
1453 set are not associated with any block scope. Lay them out. */
1455 len = VEC_length (tree, cfun->local_decls);
1456 FOR_EACH_LOCAL_DECL (cfun, i, var)
1458 bool expand_now = false;
1460 /* Expanded above already. */
1461 if (is_gimple_reg (var))
1463 TREE_USED (var) = 0;
1464 goto next;
1466 /* We didn't set a block for static or extern because it's hard
1467 to tell the difference between a global variable (re)declared
1468 in a local scope, and one that's really declared there to
1469 begin with. And it doesn't really matter much, since we're
1470 not giving them stack space. Expand them now. */
1471 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1472 expand_now = true;
1474 /* If the variable is not associated with any block, then it
1475 was created by the optimizers, and could be live anywhere
1476 in the function. */
1477 else if (TREE_USED (var))
1478 expand_now = true;
1480 /* Finally, mark all variables on the list as used. We'll use
1481 this in a moment when we expand those associated with scopes. */
1482 TREE_USED (var) = 1;
1484 if (expand_now)
1485 expand_one_var (var, true, true);
1487 next:
1488 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1490 rtx rtl = DECL_RTL_IF_SET (var);
1492 /* Keep artificial non-ignored vars in cfun->local_decls
1493 chain until instantiate_decls. */
1494 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1495 add_local_decl (cfun, var);
1496 else if (rtl == NULL_RTX)
1497 /* If rtl isn't set yet, which can happen e.g. with
1498 -fstack-protector, retry before returning from this
1499 function. */
1500 VEC_safe_push (tree, heap, maybe_local_decls, var);
1504 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1506 +-----------------+-----------------+
1507 | ...processed... | ...duplicates...|
1508 +-----------------+-----------------+
1510 +-- LEN points here.
1512 We just want the duplicates, as those are the artificial
1513 non-ignored vars that we want to keep until instantiate_decls.
1514 Move them down and truncate the array. */
1515 if (!VEC_empty (tree, cfun->local_decls))
1516 VEC_block_remove (tree, cfun->local_decls, 0, len);
1518 /* At this point, all variables within the block tree with TREE_USED
1519 set are actually used by the optimized function. Lay them out. */
1520 expand_used_vars_for_block (outer_block, true);
1522 if (stack_vars_num > 0)
1524 /* Due to the way alias sets work, no variables with non-conflicting
1525 alias sets may be assigned the same address. Add conflicts to
1526 reflect this. */
1527 add_alias_set_conflicts ();
1529 /* If stack protection is enabled, we don't share space between
1530 vulnerable data and non-vulnerable data. */
1531 if (flag_stack_protect)
1532 add_stack_protection_conflicts ();
1534 /* Now that we have collected all stack variables, and have computed a
1535 minimal interference graph, attempt to save some stack space. */
1536 partition_stack_vars ();
1537 if (dump_file)
1538 dump_stack_var_partition ();
1541 /* There are several conditions under which we should create a
1542 stack guard: protect-all, alloca used, protected decls present. */
1543 if (flag_stack_protect == 2
1544 || (flag_stack_protect
1545 && (cfun->calls_alloca || has_protected_decls)))
1546 create_stack_guard ();
1548 /* Assign rtl to each variable based on these partitions. */
1549 if (stack_vars_num > 0)
1551 /* Reorder decls to be protected by iterating over the variables
1552 array multiple times, and allocating out of each phase in turn. */
1553 /* ??? We could probably integrate this into the qsort we did
1554 earlier, such that we naturally see these variables first,
1555 and thus naturally allocate things in the right order. */
1556 if (has_protected_decls)
1558 /* Phase 1 contains only character arrays. */
1559 expand_stack_vars (stack_protect_decl_phase_1);
1561 /* Phase 2 contains other kinds of arrays. */
1562 if (flag_stack_protect == 2)
1563 expand_stack_vars (stack_protect_decl_phase_2);
1566 expand_stack_vars (NULL);
1568 fini_vars_expansion ();
1571 /* If there were any artificial non-ignored vars without rtl
1572 found earlier, see if deferred stack allocation hasn't assigned
1573 rtl to them. */
1574 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1576 rtx rtl = DECL_RTL_IF_SET (var);
1578 /* Keep artificial non-ignored vars in cfun->local_decls
1579 chain until instantiate_decls. */
1580 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1581 add_local_decl (cfun, var);
1583 VEC_free (tree, heap, maybe_local_decls);
1585 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1586 if (STACK_ALIGNMENT_NEEDED)
1588 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1589 if (!FRAME_GROWS_DOWNWARD)
1590 frame_offset += align - 1;
1591 frame_offset &= -align;
1596 /* If we need to produce a detailed dump, print the tree representation
1597 for STMT to the dump file. SINCE is the last RTX after which the RTL
1598 generated for STMT should have been appended. */
1600 static void
1601 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1603 if (dump_file && (dump_flags & TDF_DETAILS))
1605 fprintf (dump_file, "\n;; ");
1606 print_gimple_stmt (dump_file, stmt, 0,
1607 TDF_SLIM | (dump_flags & TDF_LINENO));
1608 fprintf (dump_file, "\n");
1610 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1614 /* Maps the blocks that do not contain tree labels to rtx labels. */
1616 static struct pointer_map_t *lab_rtx_for_bb;
1618 /* Returns the label_rtx expression for a label starting basic block BB. */
1620 static rtx
1621 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1623 gimple_stmt_iterator gsi;
1624 tree lab;
1625 gimple lab_stmt;
1626 void **elt;
1628 if (bb->flags & BB_RTL)
1629 return block_label (bb);
1631 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1632 if (elt)
1633 return (rtx) *elt;
1635 /* Find the tree label if it is present. */
1637 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1639 lab_stmt = gsi_stmt (gsi);
1640 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1641 break;
1643 lab = gimple_label_label (lab_stmt);
1644 if (DECL_NONLOCAL (lab))
1645 break;
1647 return label_rtx (lab);
1650 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1651 *elt = gen_label_rtx ();
1652 return (rtx) *elt;
1656 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1657 of a basic block where we just expanded the conditional at the end,
1658 possibly clean up the CFG and instruction sequence. LAST is the
1659 last instruction before the just emitted jump sequence. */
1661 static void
1662 maybe_cleanup_end_of_block (edge e, rtx last)
1664 /* Special case: when jumpif decides that the condition is
1665 trivial it emits an unconditional jump (and the necessary
1666 barrier). But we still have two edges, the fallthru one is
1667 wrong. purge_dead_edges would clean this up later. Unfortunately
1668 we have to insert insns (and split edges) before
1669 find_many_sub_basic_blocks and hence before purge_dead_edges.
1670 But splitting edges might create new blocks which depend on the
1671 fact that if there are two edges there's no barrier. So the
1672 barrier would get lost and verify_flow_info would ICE. Instead
1673 of auditing all edge splitters to care for the barrier (which
1674 normally isn't there in a cleaned CFG), fix it here. */
1675 if (BARRIER_P (get_last_insn ()))
1677 rtx insn;
1678 remove_edge (e);
1679 /* Now, we have a single successor block, if we have insns to
1680 insert on the remaining edge we potentially will insert
1681 it at the end of this block (if the dest block isn't feasible)
1682 in order to avoid splitting the edge. This insertion will take
1683 place in front of the last jump. But we might have emitted
1684 multiple jumps (conditional and one unconditional) to the
1685 same destination. Inserting in front of the last one then
1686 is a problem. See PR 40021. We fix this by deleting all
1687 jumps except the last unconditional one. */
1688 insn = PREV_INSN (get_last_insn ());
1689 /* Make sure we have an unconditional jump. Otherwise we're
1690 confused. */
1691 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1692 for (insn = PREV_INSN (insn); insn != last;)
1694 insn = PREV_INSN (insn);
1695 if (JUMP_P (NEXT_INSN (insn)))
1696 delete_insn (NEXT_INSN (insn));
1701 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1702 Returns a new basic block if we've terminated the current basic
1703 block and created a new one. */
1705 static basic_block
1706 expand_gimple_cond (basic_block bb, gimple stmt)
1708 basic_block new_bb, dest;
1709 edge new_edge;
1710 edge true_edge;
1711 edge false_edge;
1712 rtx last2, last;
1713 enum tree_code code;
1714 tree op0, op1;
1716 code = gimple_cond_code (stmt);
1717 op0 = gimple_cond_lhs (stmt);
1718 op1 = gimple_cond_rhs (stmt);
1719 /* We're sometimes presented with such code:
1720 D.123_1 = x < y;
1721 if (D.123_1 != 0)
1723 This would expand to two comparisons which then later might
1724 be cleaned up by combine. But some pattern matchers like if-conversion
1725 work better when there's only one compare, so make up for this
1726 here as special exception if TER would have made the same change. */
1727 if (gimple_cond_single_var_p (stmt)
1728 && SA.values
1729 && TREE_CODE (op0) == SSA_NAME
1730 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1732 gimple second = SSA_NAME_DEF_STMT (op0);
1733 if (gimple_code (second) == GIMPLE_ASSIGN)
1735 enum tree_code code2 = gimple_assign_rhs_code (second);
1736 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1738 code = code2;
1739 op0 = gimple_assign_rhs1 (second);
1740 op1 = gimple_assign_rhs2 (second);
1742 /* If jumps are cheap turn some more codes into
1743 jumpy sequences. */
1744 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1746 if ((code2 == BIT_AND_EXPR
1747 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1748 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1749 || code2 == TRUTH_AND_EXPR)
1751 code = TRUTH_ANDIF_EXPR;
1752 op0 = gimple_assign_rhs1 (second);
1753 op1 = gimple_assign_rhs2 (second);
1755 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1757 code = TRUTH_ORIF_EXPR;
1758 op0 = gimple_assign_rhs1 (second);
1759 op1 = gimple_assign_rhs2 (second);
1765 last2 = last = get_last_insn ();
1767 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1768 if (gimple_has_location (stmt))
1770 set_curr_insn_source_location (gimple_location (stmt));
1771 set_curr_insn_block (gimple_block (stmt));
1774 /* These flags have no purpose in RTL land. */
1775 true_edge->flags &= ~EDGE_TRUE_VALUE;
1776 false_edge->flags &= ~EDGE_FALSE_VALUE;
1778 /* We can either have a pure conditional jump with one fallthru edge or
1779 two-way jump that needs to be decomposed into two basic blocks. */
1780 if (false_edge->dest == bb->next_bb)
1782 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1783 true_edge->probability);
1784 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1785 if (true_edge->goto_locus)
1787 set_curr_insn_source_location (true_edge->goto_locus);
1788 set_curr_insn_block (true_edge->goto_block);
1789 true_edge->goto_locus = curr_insn_locator ();
1791 true_edge->goto_block = NULL;
1792 false_edge->flags |= EDGE_FALLTHRU;
1793 maybe_cleanup_end_of_block (false_edge, last);
1794 return NULL;
1796 if (true_edge->dest == bb->next_bb)
1798 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1799 false_edge->probability);
1800 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1801 if (false_edge->goto_locus)
1803 set_curr_insn_source_location (false_edge->goto_locus);
1804 set_curr_insn_block (false_edge->goto_block);
1805 false_edge->goto_locus = curr_insn_locator ();
1807 false_edge->goto_block = NULL;
1808 true_edge->flags |= EDGE_FALLTHRU;
1809 maybe_cleanup_end_of_block (true_edge, last);
1810 return NULL;
1813 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1814 true_edge->probability);
1815 last = get_last_insn ();
1816 if (false_edge->goto_locus)
1818 set_curr_insn_source_location (false_edge->goto_locus);
1819 set_curr_insn_block (false_edge->goto_block);
1820 false_edge->goto_locus = curr_insn_locator ();
1822 false_edge->goto_block = NULL;
1823 emit_jump (label_rtx_for_bb (false_edge->dest));
1825 BB_END (bb) = last;
1826 if (BARRIER_P (BB_END (bb)))
1827 BB_END (bb) = PREV_INSN (BB_END (bb));
1828 update_bb_for_insn (bb);
1830 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1831 dest = false_edge->dest;
1832 redirect_edge_succ (false_edge, new_bb);
1833 false_edge->flags |= EDGE_FALLTHRU;
1834 new_bb->count = false_edge->count;
1835 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1836 new_edge = make_edge (new_bb, dest, 0);
1837 new_edge->probability = REG_BR_PROB_BASE;
1838 new_edge->count = new_bb->count;
1839 if (BARRIER_P (BB_END (new_bb)))
1840 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1841 update_bb_for_insn (new_bb);
1843 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1845 if (true_edge->goto_locus)
1847 set_curr_insn_source_location (true_edge->goto_locus);
1848 set_curr_insn_block (true_edge->goto_block);
1849 true_edge->goto_locus = curr_insn_locator ();
1851 true_edge->goto_block = NULL;
1853 return new_bb;
1856 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1857 statement STMT. */
1859 static void
1860 expand_call_stmt (gimple stmt)
1862 tree exp;
1863 tree lhs = gimple_call_lhs (stmt);
1864 size_t i;
1865 bool builtin_p;
1866 tree decl;
1868 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1870 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1871 decl = gimple_call_fndecl (stmt);
1872 builtin_p = decl && DECL_BUILT_IN (decl);
1874 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1875 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1877 for (i = 0; i < gimple_call_num_args (stmt); i++)
1879 tree arg = gimple_call_arg (stmt, i);
1880 gimple def;
1881 /* TER addresses into arguments of builtin functions so we have a
1882 chance to infer more correct alignment information. See PR39954. */
1883 if (builtin_p
1884 && TREE_CODE (arg) == SSA_NAME
1885 && (def = get_gimple_for_ssa_name (arg))
1886 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1887 arg = gimple_assign_rhs1 (def);
1888 CALL_EXPR_ARG (exp, i) = arg;
1891 if (gimple_has_side_effects (stmt))
1892 TREE_SIDE_EFFECTS (exp) = 1;
1894 if (gimple_call_nothrow_p (stmt))
1895 TREE_NOTHROW (exp) = 1;
1897 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1898 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1899 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1900 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1901 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1902 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1903 TREE_BLOCK (exp) = gimple_block (stmt);
1905 if (lhs)
1906 expand_assignment (lhs, exp, false);
1907 else
1908 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1911 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1912 STMT that doesn't require special handling for outgoing edges. That
1913 is no tailcalls and no GIMPLE_COND. */
1915 static void
1916 expand_gimple_stmt_1 (gimple stmt)
1918 tree op0;
1919 switch (gimple_code (stmt))
1921 case GIMPLE_GOTO:
1922 op0 = gimple_goto_dest (stmt);
1923 if (TREE_CODE (op0) == LABEL_DECL)
1924 expand_goto (op0);
1925 else
1926 expand_computed_goto (op0);
1927 break;
1928 case GIMPLE_LABEL:
1929 expand_label (gimple_label_label (stmt));
1930 break;
1931 case GIMPLE_NOP:
1932 case GIMPLE_PREDICT:
1933 break;
1934 case GIMPLE_SWITCH:
1935 expand_case (stmt);
1936 break;
1937 case GIMPLE_ASM:
1938 expand_asm_stmt (stmt);
1939 break;
1940 case GIMPLE_CALL:
1941 expand_call_stmt (stmt);
1942 break;
1944 case GIMPLE_RETURN:
1945 op0 = gimple_return_retval (stmt);
1947 if (op0 && op0 != error_mark_node)
1949 tree result = DECL_RESULT (current_function_decl);
1951 /* If we are not returning the current function's RESULT_DECL,
1952 build an assignment to it. */
1953 if (op0 != result)
1955 /* I believe that a function's RESULT_DECL is unique. */
1956 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1958 /* ??? We'd like to use simply expand_assignment here,
1959 but this fails if the value is of BLKmode but the return
1960 decl is a register. expand_return has special handling
1961 for this combination, which eventually should move
1962 to common code. See comments there. Until then, let's
1963 build a modify expression :-/ */
1964 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1965 result, op0);
1968 if (!op0)
1969 expand_null_return ();
1970 else
1971 expand_return (op0);
1972 break;
1974 case GIMPLE_ASSIGN:
1976 tree lhs = gimple_assign_lhs (stmt);
1978 /* Tree expand used to fiddle with |= and &= of two bitfield
1979 COMPONENT_REFs here. This can't happen with gimple, the LHS
1980 of binary assigns must be a gimple reg. */
1982 if (TREE_CODE (lhs) != SSA_NAME
1983 || get_gimple_rhs_class (gimple_expr_code (stmt))
1984 == GIMPLE_SINGLE_RHS)
1986 tree rhs = gimple_assign_rhs1 (stmt);
1987 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1988 == GIMPLE_SINGLE_RHS);
1989 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1990 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1991 expand_assignment (lhs, rhs,
1992 gimple_assign_nontemporal_move_p (stmt));
1994 else
1996 rtx target, temp;
1997 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1998 struct separate_ops ops;
1999 bool promoted = false;
2001 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2002 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2003 promoted = true;
2005 ops.code = gimple_assign_rhs_code (stmt);
2006 ops.type = TREE_TYPE (lhs);
2007 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2009 case GIMPLE_TERNARY_RHS:
2010 ops.op2 = gimple_assign_rhs3 (stmt);
2011 /* Fallthru */
2012 case GIMPLE_BINARY_RHS:
2013 ops.op1 = gimple_assign_rhs2 (stmt);
2014 /* Fallthru */
2015 case GIMPLE_UNARY_RHS:
2016 ops.op0 = gimple_assign_rhs1 (stmt);
2017 break;
2018 default:
2019 gcc_unreachable ();
2021 ops.location = gimple_location (stmt);
2023 /* If we want to use a nontemporal store, force the value to
2024 register first. If we store into a promoted register,
2025 don't directly expand to target. */
2026 temp = nontemporal || promoted ? NULL_RTX : target;
2027 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2028 EXPAND_NORMAL);
2030 if (temp == target)
2032 else if (promoted)
2034 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2035 /* If TEMP is a VOIDmode constant, use convert_modes to make
2036 sure that we properly convert it. */
2037 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2039 temp = convert_modes (GET_MODE (target),
2040 TYPE_MODE (ops.type),
2041 temp, unsignedp);
2042 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2043 GET_MODE (target), temp, unsignedp);
2046 convert_move (SUBREG_REG (target), temp, unsignedp);
2048 else if (nontemporal && emit_storent_insn (target, temp))
2050 else
2052 temp = force_operand (temp, target);
2053 if (temp != target)
2054 emit_move_insn (target, temp);
2058 break;
2060 default:
2061 gcc_unreachable ();
2065 /* Expand one gimple statement STMT and return the last RTL instruction
2066 before any of the newly generated ones.
2068 In addition to generating the necessary RTL instructions this also
2069 sets REG_EH_REGION notes if necessary and sets the current source
2070 location for diagnostics. */
2072 static rtx
2073 expand_gimple_stmt (gimple stmt)
2075 int lp_nr = 0;
2076 rtx last = NULL;
2077 location_t saved_location = input_location;
2079 last = get_last_insn ();
2081 /* If this is an expression of some kind and it has an associated line
2082 number, then emit the line number before expanding the expression.
2084 We need to save and restore the file and line information so that
2085 errors discovered during expansion are emitted with the right
2086 information. It would be better of the diagnostic routines
2087 used the file/line information embedded in the tree nodes rather
2088 than globals. */
2089 gcc_assert (cfun);
2091 if (gimple_has_location (stmt))
2093 input_location = gimple_location (stmt);
2094 set_curr_insn_source_location (input_location);
2096 /* Record where the insns produced belong. */
2097 set_curr_insn_block (gimple_block (stmt));
2100 expand_gimple_stmt_1 (stmt);
2101 /* Free any temporaries used to evaluate this statement. */
2102 free_temp_slots ();
2104 input_location = saved_location;
2106 /* Mark all insns that may trap. */
2107 lp_nr = lookup_stmt_eh_lp (stmt);
2108 if (lp_nr)
2110 rtx insn;
2111 for (insn = next_real_insn (last); insn;
2112 insn = next_real_insn (insn))
2114 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2115 /* If we want exceptions for non-call insns, any
2116 may_trap_p instruction may throw. */
2117 && GET_CODE (PATTERN (insn)) != CLOBBER
2118 && GET_CODE (PATTERN (insn)) != USE
2119 && insn_could_throw_p (insn))
2120 make_reg_eh_region_note (insn, 0, lp_nr);
2124 return last;
2127 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2128 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2129 generated a tail call (something that might be denied by the ABI
2130 rules governing the call; see calls.c).
2132 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2133 can still reach the rest of BB. The case here is __builtin_sqrt,
2134 where the NaN result goes through the external function (with a
2135 tailcall) and the normal result happens via a sqrt instruction. */
2137 static basic_block
2138 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2140 rtx last2, last;
2141 edge e;
2142 edge_iterator ei;
2143 int probability;
2144 gcov_type count;
2146 last2 = last = expand_gimple_stmt (stmt);
2148 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2149 if (CALL_P (last) && SIBLING_CALL_P (last))
2150 goto found;
2152 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2154 *can_fallthru = true;
2155 return NULL;
2157 found:
2158 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2159 Any instructions emitted here are about to be deleted. */
2160 do_pending_stack_adjust ();
2162 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2163 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2164 EH or abnormal edges, we shouldn't have created a tail call in
2165 the first place. So it seems to me we should just be removing
2166 all edges here, or redirecting the existing fallthru edge to
2167 the exit block. */
2169 probability = 0;
2170 count = 0;
2172 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2174 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2176 if (e->dest != EXIT_BLOCK_PTR)
2178 e->dest->count -= e->count;
2179 e->dest->frequency -= EDGE_FREQUENCY (e);
2180 if (e->dest->count < 0)
2181 e->dest->count = 0;
2182 if (e->dest->frequency < 0)
2183 e->dest->frequency = 0;
2185 count += e->count;
2186 probability += e->probability;
2187 remove_edge (e);
2189 else
2190 ei_next (&ei);
2193 /* This is somewhat ugly: the call_expr expander often emits instructions
2194 after the sibcall (to perform the function return). These confuse the
2195 find_many_sub_basic_blocks code, so we need to get rid of these. */
2196 last = NEXT_INSN (last);
2197 gcc_assert (BARRIER_P (last));
2199 *can_fallthru = false;
2200 while (NEXT_INSN (last))
2202 /* For instance an sqrt builtin expander expands if with
2203 sibcall in the then and label for `else`. */
2204 if (LABEL_P (NEXT_INSN (last)))
2206 *can_fallthru = true;
2207 break;
2209 delete_insn (NEXT_INSN (last));
2212 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2213 e->probability += probability;
2214 e->count += count;
2215 BB_END (bb) = last;
2216 update_bb_for_insn (bb);
2218 if (NEXT_INSN (last))
2220 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2222 last = BB_END (bb);
2223 if (BARRIER_P (last))
2224 BB_END (bb) = PREV_INSN (last);
2227 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2229 return bb;
2232 /* Return the difference between the floor and the truncated result of
2233 a signed division by OP1 with remainder MOD. */
2234 static rtx
2235 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2237 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2238 return gen_rtx_IF_THEN_ELSE
2239 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2240 gen_rtx_IF_THEN_ELSE
2241 (mode, gen_rtx_LT (BImode,
2242 gen_rtx_DIV (mode, op1, mod),
2243 const0_rtx),
2244 constm1_rtx, const0_rtx),
2245 const0_rtx);
2248 /* Return the difference between the ceil and the truncated result of
2249 a signed division by OP1 with remainder MOD. */
2250 static rtx
2251 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2253 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2254 return gen_rtx_IF_THEN_ELSE
2255 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2256 gen_rtx_IF_THEN_ELSE
2257 (mode, gen_rtx_GT (BImode,
2258 gen_rtx_DIV (mode, op1, mod),
2259 const0_rtx),
2260 const1_rtx, const0_rtx),
2261 const0_rtx);
2264 /* Return the difference between the ceil and the truncated result of
2265 an unsigned division by OP1 with remainder MOD. */
2266 static rtx
2267 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2269 /* (mod != 0 ? 1 : 0) */
2270 return gen_rtx_IF_THEN_ELSE
2271 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2272 const1_rtx, const0_rtx);
2275 /* Return the difference between the rounded and the truncated result
2276 of a signed division by OP1 with remainder MOD. Halfway cases are
2277 rounded away from zero, rather than to the nearest even number. */
2278 static rtx
2279 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2281 /* (abs (mod) >= abs (op1) - abs (mod)
2282 ? (op1 / mod > 0 ? 1 : -1)
2283 : 0) */
2284 return gen_rtx_IF_THEN_ELSE
2285 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2286 gen_rtx_MINUS (mode,
2287 gen_rtx_ABS (mode, op1),
2288 gen_rtx_ABS (mode, mod))),
2289 gen_rtx_IF_THEN_ELSE
2290 (mode, gen_rtx_GT (BImode,
2291 gen_rtx_DIV (mode, op1, mod),
2292 const0_rtx),
2293 const1_rtx, constm1_rtx),
2294 const0_rtx);
2297 /* Return the difference between the rounded and the truncated result
2298 of a unsigned division by OP1 with remainder MOD. Halfway cases
2299 are rounded away from zero, rather than to the nearest even
2300 number. */
2301 static rtx
2302 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2304 /* (mod >= op1 - mod ? 1 : 0) */
2305 return gen_rtx_IF_THEN_ELSE
2306 (mode, gen_rtx_GE (BImode, mod,
2307 gen_rtx_MINUS (mode, op1, mod)),
2308 const1_rtx, const0_rtx);
2311 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2312 any rtl. */
2314 static rtx
2315 convert_debug_memory_address (enum machine_mode mode, rtx x)
2317 enum machine_mode xmode = GET_MODE (x);
2319 #ifndef POINTERS_EXTEND_UNSIGNED
2320 gcc_assert (mode == Pmode);
2321 gcc_assert (xmode == mode || xmode == VOIDmode);
2322 #else
2323 gcc_assert (mode == Pmode || mode == ptr_mode);
2325 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2326 return x;
2328 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2329 x = simplify_gen_subreg (mode, x, xmode,
2330 subreg_lowpart_offset
2331 (mode, xmode));
2332 else if (POINTERS_EXTEND_UNSIGNED > 0)
2333 x = gen_rtx_ZERO_EXTEND (mode, x);
2334 else if (!POINTERS_EXTEND_UNSIGNED)
2335 x = gen_rtx_SIGN_EXTEND (mode, x);
2336 else
2337 gcc_unreachable ();
2338 #endif /* POINTERS_EXTEND_UNSIGNED */
2340 return x;
2343 /* Return an RTX equivalent to the value of the tree expression
2344 EXP. */
2346 static rtx
2347 expand_debug_expr (tree exp)
2349 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2350 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2351 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2352 addr_space_t as;
2354 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2356 case tcc_expression:
2357 switch (TREE_CODE (exp))
2359 case COND_EXPR:
2360 case DOT_PROD_EXPR:
2361 case WIDEN_MULT_PLUS_EXPR:
2362 case WIDEN_MULT_MINUS_EXPR:
2363 goto ternary;
2365 case TRUTH_ANDIF_EXPR:
2366 case TRUTH_ORIF_EXPR:
2367 case TRUTH_AND_EXPR:
2368 case TRUTH_OR_EXPR:
2369 case TRUTH_XOR_EXPR:
2370 goto binary;
2372 case TRUTH_NOT_EXPR:
2373 goto unary;
2375 default:
2376 break;
2378 break;
2380 ternary:
2381 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2382 if (!op2)
2383 return NULL_RTX;
2384 /* Fall through. */
2386 binary:
2387 case tcc_binary:
2388 case tcc_comparison:
2389 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2390 if (!op1)
2391 return NULL_RTX;
2392 /* Fall through. */
2394 unary:
2395 case tcc_unary:
2396 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2397 if (!op0)
2398 return NULL_RTX;
2399 break;
2401 case tcc_type:
2402 case tcc_statement:
2403 gcc_unreachable ();
2405 case tcc_constant:
2406 case tcc_exceptional:
2407 case tcc_declaration:
2408 case tcc_reference:
2409 case tcc_vl_exp:
2410 break;
2413 switch (TREE_CODE (exp))
2415 case STRING_CST:
2416 if (!lookup_constant_def (exp))
2418 if (strlen (TREE_STRING_POINTER (exp)) + 1
2419 != (size_t) TREE_STRING_LENGTH (exp))
2420 return NULL_RTX;
2421 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2422 op0 = gen_rtx_MEM (BLKmode, op0);
2423 set_mem_attributes (op0, exp, 0);
2424 return op0;
2426 /* Fall through... */
2428 case INTEGER_CST:
2429 case REAL_CST:
2430 case FIXED_CST:
2431 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2432 return op0;
2434 case COMPLEX_CST:
2435 gcc_assert (COMPLEX_MODE_P (mode));
2436 op0 = expand_debug_expr (TREE_REALPART (exp));
2437 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2438 return gen_rtx_CONCAT (mode, op0, op1);
2440 case DEBUG_EXPR_DECL:
2441 op0 = DECL_RTL_IF_SET (exp);
2443 if (op0)
2444 return op0;
2446 op0 = gen_rtx_DEBUG_EXPR (mode);
2447 DEBUG_EXPR_TREE_DECL (op0) = exp;
2448 SET_DECL_RTL (exp, op0);
2450 return op0;
2452 case VAR_DECL:
2453 case PARM_DECL:
2454 case FUNCTION_DECL:
2455 case LABEL_DECL:
2456 case CONST_DECL:
2457 case RESULT_DECL:
2458 op0 = DECL_RTL_IF_SET (exp);
2460 /* This decl was probably optimized away. */
2461 if (!op0)
2463 if (TREE_CODE (exp) != VAR_DECL
2464 || DECL_EXTERNAL (exp)
2465 || !TREE_STATIC (exp)
2466 || !DECL_NAME (exp)
2467 || DECL_HARD_REGISTER (exp)
2468 || mode == VOIDmode)
2469 return NULL;
2471 op0 = make_decl_rtl_for_debug (exp);
2472 if (!MEM_P (op0)
2473 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2474 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2475 return NULL;
2477 else
2478 op0 = copy_rtx (op0);
2480 if (GET_MODE (op0) == BLKmode
2481 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2482 below would ICE. While it is likely a FE bug,
2483 try to be robust here. See PR43166. */
2484 || mode == BLKmode
2485 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2487 gcc_assert (MEM_P (op0));
2488 op0 = adjust_address_nv (op0, mode, 0);
2489 return op0;
2492 /* Fall through. */
2494 adjust_mode:
2495 case PAREN_EXPR:
2496 case NOP_EXPR:
2497 case CONVERT_EXPR:
2499 enum machine_mode inner_mode = GET_MODE (op0);
2501 if (mode == inner_mode)
2502 return op0;
2504 if (inner_mode == VOIDmode)
2506 if (TREE_CODE (exp) == SSA_NAME)
2507 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2508 else
2509 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2510 if (mode == inner_mode)
2511 return op0;
2514 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2516 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2517 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2518 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2519 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2520 else
2521 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2523 else if (FLOAT_MODE_P (mode))
2525 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2526 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2527 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2528 else
2529 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2531 else if (FLOAT_MODE_P (inner_mode))
2533 if (unsignedp)
2534 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2535 else
2536 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2538 else if (CONSTANT_P (op0)
2539 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2540 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2541 subreg_lowpart_offset (mode,
2542 inner_mode));
2543 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2544 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2545 : unsignedp)
2546 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2547 else
2548 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2550 return op0;
2553 case MEM_REF:
2554 /* ??? FIXME. */
2555 if (!integer_zerop (TREE_OPERAND (exp, 1)))
2556 return NULL;
2557 /* Fallthru. */
2558 case INDIRECT_REF:
2559 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2560 if (!op0)
2561 return NULL;
2563 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2564 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2565 else
2566 as = ADDR_SPACE_GENERIC;
2568 op0 = gen_rtx_MEM (mode, op0);
2570 set_mem_attributes (op0, exp, 0);
2571 set_mem_addr_space (op0, as);
2573 return op0;
2575 case TARGET_MEM_REF:
2576 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2577 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2578 return NULL;
2580 op0 = expand_debug_expr
2581 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2582 if (!op0)
2583 return NULL;
2585 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2587 op0 = gen_rtx_MEM (mode, op0);
2589 set_mem_attributes (op0, exp, 0);
2590 set_mem_addr_space (op0, as);
2592 return op0;
2594 case ARRAY_REF:
2595 case ARRAY_RANGE_REF:
2596 case COMPONENT_REF:
2597 case BIT_FIELD_REF:
2598 case REALPART_EXPR:
2599 case IMAGPART_EXPR:
2600 case VIEW_CONVERT_EXPR:
2602 enum machine_mode mode1;
2603 HOST_WIDE_INT bitsize, bitpos;
2604 tree offset;
2605 int volatilep = 0;
2606 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2607 &mode1, &unsignedp, &volatilep, false);
2608 rtx orig_op0;
2610 if (bitsize == 0)
2611 return NULL;
2613 orig_op0 = op0 = expand_debug_expr (tem);
2615 if (!op0)
2616 return NULL;
2618 if (offset)
2620 enum machine_mode addrmode, offmode;
2622 if (!MEM_P (op0))
2623 return NULL;
2625 op0 = XEXP (op0, 0);
2626 addrmode = GET_MODE (op0);
2627 if (addrmode == VOIDmode)
2628 addrmode = Pmode;
2630 op1 = expand_debug_expr (offset);
2631 if (!op1)
2632 return NULL;
2634 offmode = GET_MODE (op1);
2635 if (offmode == VOIDmode)
2636 offmode = TYPE_MODE (TREE_TYPE (offset));
2638 if (addrmode != offmode)
2639 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2640 subreg_lowpart_offset (addrmode,
2641 offmode));
2643 /* Don't use offset_address here, we don't need a
2644 recognizable address, and we don't want to generate
2645 code. */
2646 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2649 if (MEM_P (op0))
2651 if (mode1 == VOIDmode)
2652 /* Bitfield. */
2653 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2654 if (bitpos >= BITS_PER_UNIT)
2656 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2657 bitpos %= BITS_PER_UNIT;
2659 else if (bitpos < 0)
2661 HOST_WIDE_INT units
2662 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2663 op0 = adjust_address_nv (op0, mode1, units);
2664 bitpos += units * BITS_PER_UNIT;
2666 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2667 op0 = adjust_address_nv (op0, mode, 0);
2668 else if (GET_MODE (op0) != mode1)
2669 op0 = adjust_address_nv (op0, mode1, 0);
2670 else
2671 op0 = copy_rtx (op0);
2672 if (op0 == orig_op0)
2673 op0 = shallow_copy_rtx (op0);
2674 set_mem_attributes (op0, exp, 0);
2677 if (bitpos == 0 && mode == GET_MODE (op0))
2678 return op0;
2680 if (bitpos < 0)
2681 return NULL;
2683 if (GET_MODE (op0) == BLKmode)
2684 return NULL;
2686 if ((bitpos % BITS_PER_UNIT) == 0
2687 && bitsize == GET_MODE_BITSIZE (mode1))
2689 enum machine_mode opmode = GET_MODE (op0);
2691 if (opmode == VOIDmode)
2692 opmode = mode1;
2694 /* This condition may hold if we're expanding the address
2695 right past the end of an array that turned out not to
2696 be addressable (i.e., the address was only computed in
2697 debug stmts). The gen_subreg below would rightfully
2698 crash, and the address doesn't really exist, so just
2699 drop it. */
2700 if (bitpos >= GET_MODE_BITSIZE (opmode))
2701 return NULL;
2703 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2704 return simplify_gen_subreg (mode, op0, opmode,
2705 bitpos / BITS_PER_UNIT);
2708 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2709 && TYPE_UNSIGNED (TREE_TYPE (exp))
2710 ? SIGN_EXTRACT
2711 : ZERO_EXTRACT, mode,
2712 GET_MODE (op0) != VOIDmode
2713 ? GET_MODE (op0) : mode1,
2714 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2717 case ABS_EXPR:
2718 return gen_rtx_ABS (mode, op0);
2720 case NEGATE_EXPR:
2721 return gen_rtx_NEG (mode, op0);
2723 case BIT_NOT_EXPR:
2724 return gen_rtx_NOT (mode, op0);
2726 case FLOAT_EXPR:
2727 if (unsignedp)
2728 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2729 else
2730 return gen_rtx_FLOAT (mode, op0);
2732 case FIX_TRUNC_EXPR:
2733 if (unsignedp)
2734 return gen_rtx_UNSIGNED_FIX (mode, op0);
2735 else
2736 return gen_rtx_FIX (mode, op0);
2738 case POINTER_PLUS_EXPR:
2739 /* For the rare target where pointers are not the same size as
2740 size_t, we need to check for mis-matched modes and correct
2741 the addend. */
2742 if (op0 && op1
2743 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2744 && GET_MODE (op0) != GET_MODE (op1))
2746 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2747 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2748 else
2749 /* We always sign-extend, regardless of the signedness of
2750 the operand, because the operand is always unsigned
2751 here even if the original C expression is signed. */
2752 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2754 /* Fall through. */
2755 case PLUS_EXPR:
2756 return gen_rtx_PLUS (mode, op0, op1);
2758 case MINUS_EXPR:
2759 return gen_rtx_MINUS (mode, op0, op1);
2761 case MULT_EXPR:
2762 return gen_rtx_MULT (mode, op0, op1);
2764 case RDIV_EXPR:
2765 case TRUNC_DIV_EXPR:
2766 case EXACT_DIV_EXPR:
2767 if (unsignedp)
2768 return gen_rtx_UDIV (mode, op0, op1);
2769 else
2770 return gen_rtx_DIV (mode, op0, op1);
2772 case TRUNC_MOD_EXPR:
2773 if (unsignedp)
2774 return gen_rtx_UMOD (mode, op0, op1);
2775 else
2776 return gen_rtx_MOD (mode, op0, op1);
2778 case FLOOR_DIV_EXPR:
2779 if (unsignedp)
2780 return gen_rtx_UDIV (mode, op0, op1);
2781 else
2783 rtx div = gen_rtx_DIV (mode, op0, op1);
2784 rtx mod = gen_rtx_MOD (mode, op0, op1);
2785 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2786 return gen_rtx_PLUS (mode, div, adj);
2789 case FLOOR_MOD_EXPR:
2790 if (unsignedp)
2791 return gen_rtx_UMOD (mode, op0, op1);
2792 else
2794 rtx mod = gen_rtx_MOD (mode, op0, op1);
2795 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2796 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2797 return gen_rtx_PLUS (mode, mod, adj);
2800 case CEIL_DIV_EXPR:
2801 if (unsignedp)
2803 rtx div = gen_rtx_UDIV (mode, op0, op1);
2804 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2805 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2806 return gen_rtx_PLUS (mode, div, adj);
2808 else
2810 rtx div = gen_rtx_DIV (mode, op0, op1);
2811 rtx mod = gen_rtx_MOD (mode, op0, op1);
2812 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2813 return gen_rtx_PLUS (mode, div, adj);
2816 case CEIL_MOD_EXPR:
2817 if (unsignedp)
2819 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2820 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2821 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2822 return gen_rtx_PLUS (mode, mod, adj);
2824 else
2826 rtx mod = gen_rtx_MOD (mode, op0, op1);
2827 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2828 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2829 return gen_rtx_PLUS (mode, mod, adj);
2832 case ROUND_DIV_EXPR:
2833 if (unsignedp)
2835 rtx div = gen_rtx_UDIV (mode, op0, op1);
2836 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2837 rtx adj = round_udiv_adjust (mode, mod, op1);
2838 return gen_rtx_PLUS (mode, div, adj);
2840 else
2842 rtx div = gen_rtx_DIV (mode, op0, op1);
2843 rtx mod = gen_rtx_MOD (mode, op0, op1);
2844 rtx adj = round_sdiv_adjust (mode, mod, op1);
2845 return gen_rtx_PLUS (mode, div, adj);
2848 case ROUND_MOD_EXPR:
2849 if (unsignedp)
2851 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2852 rtx adj = round_udiv_adjust (mode, mod, op1);
2853 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2854 return gen_rtx_PLUS (mode, mod, adj);
2856 else
2858 rtx mod = gen_rtx_MOD (mode, op0, op1);
2859 rtx adj = round_sdiv_adjust (mode, mod, op1);
2860 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2861 return gen_rtx_PLUS (mode, mod, adj);
2864 case LSHIFT_EXPR:
2865 return gen_rtx_ASHIFT (mode, op0, op1);
2867 case RSHIFT_EXPR:
2868 if (unsignedp)
2869 return gen_rtx_LSHIFTRT (mode, op0, op1);
2870 else
2871 return gen_rtx_ASHIFTRT (mode, op0, op1);
2873 case LROTATE_EXPR:
2874 return gen_rtx_ROTATE (mode, op0, op1);
2876 case RROTATE_EXPR:
2877 return gen_rtx_ROTATERT (mode, op0, op1);
2879 case MIN_EXPR:
2880 if (unsignedp)
2881 return gen_rtx_UMIN (mode, op0, op1);
2882 else
2883 return gen_rtx_SMIN (mode, op0, op1);
2885 case MAX_EXPR:
2886 if (unsignedp)
2887 return gen_rtx_UMAX (mode, op0, op1);
2888 else
2889 return gen_rtx_SMAX (mode, op0, op1);
2891 case BIT_AND_EXPR:
2892 case TRUTH_AND_EXPR:
2893 return gen_rtx_AND (mode, op0, op1);
2895 case BIT_IOR_EXPR:
2896 case TRUTH_OR_EXPR:
2897 return gen_rtx_IOR (mode, op0, op1);
2899 case BIT_XOR_EXPR:
2900 case TRUTH_XOR_EXPR:
2901 return gen_rtx_XOR (mode, op0, op1);
2903 case TRUTH_ANDIF_EXPR:
2904 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2906 case TRUTH_ORIF_EXPR:
2907 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2909 case TRUTH_NOT_EXPR:
2910 return gen_rtx_EQ (mode, op0, const0_rtx);
2912 case LT_EXPR:
2913 if (unsignedp)
2914 return gen_rtx_LTU (mode, op0, op1);
2915 else
2916 return gen_rtx_LT (mode, op0, op1);
2918 case LE_EXPR:
2919 if (unsignedp)
2920 return gen_rtx_LEU (mode, op0, op1);
2921 else
2922 return gen_rtx_LE (mode, op0, op1);
2924 case GT_EXPR:
2925 if (unsignedp)
2926 return gen_rtx_GTU (mode, op0, op1);
2927 else
2928 return gen_rtx_GT (mode, op0, op1);
2930 case GE_EXPR:
2931 if (unsignedp)
2932 return gen_rtx_GEU (mode, op0, op1);
2933 else
2934 return gen_rtx_GE (mode, op0, op1);
2936 case EQ_EXPR:
2937 return gen_rtx_EQ (mode, op0, op1);
2939 case NE_EXPR:
2940 return gen_rtx_NE (mode, op0, op1);
2942 case UNORDERED_EXPR:
2943 return gen_rtx_UNORDERED (mode, op0, op1);
2945 case ORDERED_EXPR:
2946 return gen_rtx_ORDERED (mode, op0, op1);
2948 case UNLT_EXPR:
2949 return gen_rtx_UNLT (mode, op0, op1);
2951 case UNLE_EXPR:
2952 return gen_rtx_UNLE (mode, op0, op1);
2954 case UNGT_EXPR:
2955 return gen_rtx_UNGT (mode, op0, op1);
2957 case UNGE_EXPR:
2958 return gen_rtx_UNGE (mode, op0, op1);
2960 case UNEQ_EXPR:
2961 return gen_rtx_UNEQ (mode, op0, op1);
2963 case LTGT_EXPR:
2964 return gen_rtx_LTGT (mode, op0, op1);
2966 case COND_EXPR:
2967 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2969 case COMPLEX_EXPR:
2970 gcc_assert (COMPLEX_MODE_P (mode));
2971 if (GET_MODE (op0) == VOIDmode)
2972 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2973 if (GET_MODE (op1) == VOIDmode)
2974 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2975 return gen_rtx_CONCAT (mode, op0, op1);
2977 case CONJ_EXPR:
2978 if (GET_CODE (op0) == CONCAT)
2979 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2980 gen_rtx_NEG (GET_MODE_INNER (mode),
2981 XEXP (op0, 1)));
2982 else
2984 enum machine_mode imode = GET_MODE_INNER (mode);
2985 rtx re, im;
2987 if (MEM_P (op0))
2989 re = adjust_address_nv (op0, imode, 0);
2990 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2992 else
2994 enum machine_mode ifmode = int_mode_for_mode (mode);
2995 enum machine_mode ihmode = int_mode_for_mode (imode);
2996 rtx halfsize;
2997 if (ifmode == BLKmode || ihmode == BLKmode)
2998 return NULL;
2999 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3000 re = op0;
3001 if (mode != ifmode)
3002 re = gen_rtx_SUBREG (ifmode, re, 0);
3003 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3004 if (imode != ihmode)
3005 re = gen_rtx_SUBREG (imode, re, 0);
3006 im = copy_rtx (op0);
3007 if (mode != ifmode)
3008 im = gen_rtx_SUBREG (ifmode, im, 0);
3009 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3010 if (imode != ihmode)
3011 im = gen_rtx_SUBREG (imode, im, 0);
3013 im = gen_rtx_NEG (imode, im);
3014 return gen_rtx_CONCAT (mode, re, im);
3017 case ADDR_EXPR:
3018 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3019 if (!op0 || !MEM_P (op0))
3021 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3022 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3023 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3024 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3025 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3027 if (handled_component_p (TREE_OPERAND (exp, 0)))
3029 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3030 tree decl
3031 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3032 &bitoffset, &bitsize, &maxsize);
3033 if ((TREE_CODE (decl) == VAR_DECL
3034 || TREE_CODE (decl) == PARM_DECL
3035 || TREE_CODE (decl) == RESULT_DECL)
3036 && !TREE_ADDRESSABLE (decl)
3037 && (bitoffset % BITS_PER_UNIT) == 0
3038 && bitsize > 0
3039 && bitsize == maxsize)
3040 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3041 bitoffset / BITS_PER_UNIT);
3044 return NULL;
3047 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
3049 return op0;
3051 case VECTOR_CST:
3052 exp = build_constructor_from_list (TREE_TYPE (exp),
3053 TREE_VECTOR_CST_ELTS (exp));
3054 /* Fall through. */
3056 case CONSTRUCTOR:
3057 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3059 unsigned i;
3060 tree val;
3062 op0 = gen_rtx_CONCATN
3063 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3065 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3067 op1 = expand_debug_expr (val);
3068 if (!op1)
3069 return NULL;
3070 XVECEXP (op0, 0, i) = op1;
3073 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3075 op1 = expand_debug_expr
3076 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
3078 if (!op1)
3079 return NULL;
3081 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3082 XVECEXP (op0, 0, i) = op1;
3085 return op0;
3087 else
3088 goto flag_unsupported;
3090 case CALL_EXPR:
3091 /* ??? Maybe handle some builtins? */
3092 return NULL;
3094 case SSA_NAME:
3096 gimple g = get_gimple_for_ssa_name (exp);
3097 if (g)
3099 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3100 if (!op0)
3101 return NULL;
3103 else
3105 int part = var_to_partition (SA.map, exp);
3107 if (part == NO_PARTITION)
3108 return NULL;
3110 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3112 op0 = SA.partition_to_pseudo[part];
3114 goto adjust_mode;
3117 case ERROR_MARK:
3118 return NULL;
3120 /* Vector stuff. For most of the codes we don't have rtl codes. */
3121 case REALIGN_LOAD_EXPR:
3122 case REDUC_MAX_EXPR:
3123 case REDUC_MIN_EXPR:
3124 case REDUC_PLUS_EXPR:
3125 case VEC_COND_EXPR:
3126 case VEC_EXTRACT_EVEN_EXPR:
3127 case VEC_EXTRACT_ODD_EXPR:
3128 case VEC_INTERLEAVE_HIGH_EXPR:
3129 case VEC_INTERLEAVE_LOW_EXPR:
3130 case VEC_LSHIFT_EXPR:
3131 case VEC_PACK_FIX_TRUNC_EXPR:
3132 case VEC_PACK_SAT_EXPR:
3133 case VEC_PACK_TRUNC_EXPR:
3134 case VEC_RSHIFT_EXPR:
3135 case VEC_UNPACK_FLOAT_HI_EXPR:
3136 case VEC_UNPACK_FLOAT_LO_EXPR:
3137 case VEC_UNPACK_HI_EXPR:
3138 case VEC_UNPACK_LO_EXPR:
3139 case VEC_WIDEN_MULT_HI_EXPR:
3140 case VEC_WIDEN_MULT_LO_EXPR:
3141 return NULL;
3143 /* Misc codes. */
3144 case ADDR_SPACE_CONVERT_EXPR:
3145 case FIXED_CONVERT_EXPR:
3146 case OBJ_TYPE_REF:
3147 case WITH_SIZE_EXPR:
3148 return NULL;
3150 case DOT_PROD_EXPR:
3151 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3152 && SCALAR_INT_MODE_P (mode))
3154 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3155 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3156 else
3157 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3158 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3159 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3160 else
3161 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3162 op0 = gen_rtx_MULT (mode, op0, op1);
3163 return gen_rtx_PLUS (mode, op0, op2);
3165 return NULL;
3167 case WIDEN_MULT_EXPR:
3168 case WIDEN_MULT_PLUS_EXPR:
3169 case WIDEN_MULT_MINUS_EXPR:
3170 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3171 && SCALAR_INT_MODE_P (mode))
3173 enum machine_mode inner_mode = GET_MODE (op0);
3174 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3175 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3176 else
3177 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3178 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3179 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3180 else
3181 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3182 op0 = gen_rtx_MULT (mode, op0, op1);
3183 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3184 return op0;
3185 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3186 return gen_rtx_PLUS (mode, op0, op2);
3187 else
3188 return gen_rtx_MINUS (mode, op2, op0);
3190 return NULL;
3192 case WIDEN_SUM_EXPR:
3193 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3194 && SCALAR_INT_MODE_P (mode))
3196 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3197 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3198 else
3199 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3200 return gen_rtx_PLUS (mode, op0, op1);
3202 return NULL;
3204 default:
3205 flag_unsupported:
3206 #ifdef ENABLE_CHECKING
3207 debug_tree (exp);
3208 gcc_unreachable ();
3209 #else
3210 return NULL;
3211 #endif
3215 /* Expand the _LOCs in debug insns. We run this after expanding all
3216 regular insns, so that any variables referenced in the function
3217 will have their DECL_RTLs set. */
3219 static void
3220 expand_debug_locations (void)
3222 rtx insn;
3223 rtx last = get_last_insn ();
3224 int save_strict_alias = flag_strict_aliasing;
3226 /* New alias sets while setting up memory attributes cause
3227 -fcompare-debug failures, even though it doesn't bring about any
3228 codegen changes. */
3229 flag_strict_aliasing = 0;
3231 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3232 if (DEBUG_INSN_P (insn))
3234 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3235 rtx val;
3236 enum machine_mode mode;
3238 if (value == NULL_TREE)
3239 val = NULL_RTX;
3240 else
3242 val = expand_debug_expr (value);
3243 gcc_assert (last == get_last_insn ());
3246 if (!val)
3247 val = gen_rtx_UNKNOWN_VAR_LOC ();
3248 else
3250 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3252 gcc_assert (mode == GET_MODE (val)
3253 || (GET_MODE (val) == VOIDmode
3254 && (CONST_INT_P (val)
3255 || GET_CODE (val) == CONST_FIXED
3256 || GET_CODE (val) == CONST_DOUBLE
3257 || GET_CODE (val) == LABEL_REF)));
3260 INSN_VAR_LOCATION_LOC (insn) = val;
3263 flag_strict_aliasing = save_strict_alias;
3266 /* Expand basic block BB from GIMPLE trees to RTL. */
3268 static basic_block
3269 expand_gimple_basic_block (basic_block bb)
3271 gimple_stmt_iterator gsi;
3272 gimple_seq stmts;
3273 gimple stmt = NULL;
3274 rtx note, last;
3275 edge e;
3276 edge_iterator ei;
3277 void **elt;
3279 if (dump_file)
3280 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3281 bb->index);
3283 /* Note that since we are now transitioning from GIMPLE to RTL, we
3284 cannot use the gsi_*_bb() routines because they expect the basic
3285 block to be in GIMPLE, instead of RTL. Therefore, we need to
3286 access the BB sequence directly. */
3287 stmts = bb_seq (bb);
3288 bb->il.gimple = NULL;
3289 rtl_profile_for_bb (bb);
3290 init_rtl_bb_info (bb);
3291 bb->flags |= BB_RTL;
3293 /* Remove the RETURN_EXPR if we may fall though to the exit
3294 instead. */
3295 gsi = gsi_last (stmts);
3296 if (!gsi_end_p (gsi)
3297 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3299 gimple ret_stmt = gsi_stmt (gsi);
3301 gcc_assert (single_succ_p (bb));
3302 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3304 if (bb->next_bb == EXIT_BLOCK_PTR
3305 && !gimple_return_retval (ret_stmt))
3307 gsi_remove (&gsi, false);
3308 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3312 gsi = gsi_start (stmts);
3313 if (!gsi_end_p (gsi))
3315 stmt = gsi_stmt (gsi);
3316 if (gimple_code (stmt) != GIMPLE_LABEL)
3317 stmt = NULL;
3320 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3322 if (stmt || elt)
3324 last = get_last_insn ();
3326 if (stmt)
3328 expand_gimple_stmt (stmt);
3329 gsi_next (&gsi);
3332 if (elt)
3333 emit_label ((rtx) *elt);
3335 /* Java emits line number notes in the top of labels.
3336 ??? Make this go away once line number notes are obsoleted. */
3337 BB_HEAD (bb) = NEXT_INSN (last);
3338 if (NOTE_P (BB_HEAD (bb)))
3339 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3340 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3342 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3344 else
3345 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3347 NOTE_BASIC_BLOCK (note) = bb;
3349 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3351 basic_block new_bb;
3353 stmt = gsi_stmt (gsi);
3355 /* If this statement is a non-debug one, and we generate debug
3356 insns, then this one might be the last real use of a TERed
3357 SSA_NAME, but where there are still some debug uses further
3358 down. Expanding the current SSA name in such further debug
3359 uses by their RHS might lead to wrong debug info, as coalescing
3360 might make the operands of such RHS be placed into the same
3361 pseudo as something else. Like so:
3362 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3363 use(a_1);
3364 a_2 = ...
3365 #DEBUG ... => a_1
3366 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3367 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3368 the write to a_2 would actually have clobbered the place which
3369 formerly held a_0.
3371 So, instead of that, we recognize the situation, and generate
3372 debug temporaries at the last real use of TERed SSA names:
3373 a_1 = a_0 + 1;
3374 #DEBUG #D1 => a_1
3375 use(a_1);
3376 a_2 = ...
3377 #DEBUG ... => #D1
3379 if (MAY_HAVE_DEBUG_INSNS
3380 && SA.values
3381 && !is_gimple_debug (stmt))
3383 ssa_op_iter iter;
3384 tree op;
3385 gimple def;
3387 location_t sloc = get_curr_insn_source_location ();
3388 tree sblock = get_curr_insn_block ();
3390 /* Look for SSA names that have their last use here (TERed
3391 names always have only one real use). */
3392 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3393 if ((def = get_gimple_for_ssa_name (op)))
3395 imm_use_iterator imm_iter;
3396 use_operand_p use_p;
3397 bool have_debug_uses = false;
3399 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3401 if (gimple_debug_bind_p (USE_STMT (use_p)))
3403 have_debug_uses = true;
3404 break;
3408 if (have_debug_uses)
3410 /* OP is a TERed SSA name, with DEF it's defining
3411 statement, and where OP is used in further debug
3412 instructions. Generate a debug temporary, and
3413 replace all uses of OP in debug insns with that
3414 temporary. */
3415 gimple debugstmt;
3416 tree value = gimple_assign_rhs_to_tree (def);
3417 tree vexpr = make_node (DEBUG_EXPR_DECL);
3418 rtx val;
3419 enum machine_mode mode;
3421 set_curr_insn_source_location (gimple_location (def));
3422 set_curr_insn_block (gimple_block (def));
3424 DECL_ARTIFICIAL (vexpr) = 1;
3425 TREE_TYPE (vexpr) = TREE_TYPE (value);
3426 if (DECL_P (value))
3427 mode = DECL_MODE (value);
3428 else
3429 mode = TYPE_MODE (TREE_TYPE (value));
3430 DECL_MODE (vexpr) = mode;
3432 val = gen_rtx_VAR_LOCATION
3433 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3435 val = emit_debug_insn (val);
3437 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3439 if (!gimple_debug_bind_p (debugstmt))
3440 continue;
3442 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3443 SET_USE (use_p, vexpr);
3445 update_stmt (debugstmt);
3449 set_curr_insn_source_location (sloc);
3450 set_curr_insn_block (sblock);
3453 currently_expanding_gimple_stmt = stmt;
3455 /* Expand this statement, then evaluate the resulting RTL and
3456 fixup the CFG accordingly. */
3457 if (gimple_code (stmt) == GIMPLE_COND)
3459 new_bb = expand_gimple_cond (bb, stmt);
3460 if (new_bb)
3461 return new_bb;
3463 else if (gimple_debug_bind_p (stmt))
3465 location_t sloc = get_curr_insn_source_location ();
3466 tree sblock = get_curr_insn_block ();
3467 gimple_stmt_iterator nsi = gsi;
3469 for (;;)
3471 tree var = gimple_debug_bind_get_var (stmt);
3472 tree value;
3473 rtx val;
3474 enum machine_mode mode;
3476 if (gimple_debug_bind_has_value_p (stmt))
3477 value = gimple_debug_bind_get_value (stmt);
3478 else
3479 value = NULL_TREE;
3481 last = get_last_insn ();
3483 set_curr_insn_source_location (gimple_location (stmt));
3484 set_curr_insn_block (gimple_block (stmt));
3486 if (DECL_P (var))
3487 mode = DECL_MODE (var);
3488 else
3489 mode = TYPE_MODE (TREE_TYPE (var));
3491 val = gen_rtx_VAR_LOCATION
3492 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3494 val = emit_debug_insn (val);
3496 if (dump_file && (dump_flags & TDF_DETAILS))
3498 /* We can't dump the insn with a TREE where an RTX
3499 is expected. */
3500 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3501 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3502 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3505 /* In order not to generate too many debug temporaries,
3506 we delink all uses of debug statements we already expanded.
3507 Therefore debug statements between definition and real
3508 use of TERed SSA names will continue to use the SSA name,
3509 and not be replaced with debug temps. */
3510 delink_stmt_imm_use (stmt);
3512 gsi = nsi;
3513 gsi_next (&nsi);
3514 if (gsi_end_p (nsi))
3515 break;
3516 stmt = gsi_stmt (nsi);
3517 if (!gimple_debug_bind_p (stmt))
3518 break;
3521 set_curr_insn_source_location (sloc);
3522 set_curr_insn_block (sblock);
3524 else
3526 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3528 bool can_fallthru;
3529 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3530 if (new_bb)
3532 if (can_fallthru)
3533 bb = new_bb;
3534 else
3535 return new_bb;
3538 else
3540 def_operand_p def_p;
3541 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3543 if (def_p != NULL)
3545 /* Ignore this stmt if it is in the list of
3546 replaceable expressions. */
3547 if (SA.values
3548 && bitmap_bit_p (SA.values,
3549 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3550 continue;
3552 last = expand_gimple_stmt (stmt);
3553 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3558 currently_expanding_gimple_stmt = NULL;
3560 /* Expand implicit goto and convert goto_locus. */
3561 FOR_EACH_EDGE (e, ei, bb->succs)
3563 if (e->goto_locus && e->goto_block)
3565 set_curr_insn_source_location (e->goto_locus);
3566 set_curr_insn_block (e->goto_block);
3567 e->goto_locus = curr_insn_locator ();
3569 e->goto_block = NULL;
3570 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3572 emit_jump (label_rtx_for_bb (e->dest));
3573 e->flags &= ~EDGE_FALLTHRU;
3577 /* Expanded RTL can create a jump in the last instruction of block.
3578 This later might be assumed to be a jump to successor and break edge insertion.
3579 We need to insert dummy move to prevent this. PR41440. */
3580 if (single_succ_p (bb)
3581 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3582 && (last = get_last_insn ())
3583 && JUMP_P (last))
3585 rtx dummy = gen_reg_rtx (SImode);
3586 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3589 do_pending_stack_adjust ();
3591 /* Find the block tail. The last insn in the block is the insn
3592 before a barrier and/or table jump insn. */
3593 last = get_last_insn ();
3594 if (BARRIER_P (last))
3595 last = PREV_INSN (last);
3596 if (JUMP_TABLE_DATA_P (last))
3597 last = PREV_INSN (PREV_INSN (last));
3598 BB_END (bb) = last;
3600 update_bb_for_insn (bb);
3602 return bb;
3606 /* Create a basic block for initialization code. */
3608 static basic_block
3609 construct_init_block (void)
3611 basic_block init_block, first_block;
3612 edge e = NULL;
3613 int flags;
3615 /* Multiple entry points not supported yet. */
3616 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3617 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3618 init_rtl_bb_info (EXIT_BLOCK_PTR);
3619 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3620 EXIT_BLOCK_PTR->flags |= BB_RTL;
3622 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3624 /* When entry edge points to first basic block, we don't need jump,
3625 otherwise we have to jump into proper target. */
3626 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3628 tree label = gimple_block_label (e->dest);
3630 emit_jump (label_rtx (label));
3631 flags = 0;
3633 else
3634 flags = EDGE_FALLTHRU;
3636 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3637 get_last_insn (),
3638 ENTRY_BLOCK_PTR);
3639 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3640 init_block->count = ENTRY_BLOCK_PTR->count;
3641 if (e)
3643 first_block = e->dest;
3644 redirect_edge_succ (e, init_block);
3645 e = make_edge (init_block, first_block, flags);
3647 else
3648 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3649 e->probability = REG_BR_PROB_BASE;
3650 e->count = ENTRY_BLOCK_PTR->count;
3652 update_bb_for_insn (init_block);
3653 return init_block;
3656 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3657 found in the block tree. */
3659 static void
3660 set_block_levels (tree block, int level)
3662 while (block)
3664 BLOCK_NUMBER (block) = level;
3665 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3666 block = BLOCK_CHAIN (block);
3670 /* Create a block containing landing pads and similar stuff. */
3672 static void
3673 construct_exit_block (void)
3675 rtx head = get_last_insn ();
3676 rtx end;
3677 basic_block exit_block;
3678 edge e, e2;
3679 unsigned ix;
3680 edge_iterator ei;
3681 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3683 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3685 /* Make sure the locus is set to the end of the function, so that
3686 epilogue line numbers and warnings are set properly. */
3687 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3688 input_location = cfun->function_end_locus;
3690 /* The following insns belong to the top scope. */
3691 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3693 /* Generate rtl for function exit. */
3694 expand_function_end ();
3696 end = get_last_insn ();
3697 if (head == end)
3698 return;
3699 /* While emitting the function end we could move end of the last basic block.
3701 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3702 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3703 head = NEXT_INSN (head);
3704 exit_block = create_basic_block (NEXT_INSN (head), end,
3705 EXIT_BLOCK_PTR->prev_bb);
3706 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3707 exit_block->count = EXIT_BLOCK_PTR->count;
3709 ix = 0;
3710 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3712 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3713 if (!(e->flags & EDGE_ABNORMAL))
3714 redirect_edge_succ (e, exit_block);
3715 else
3716 ix++;
3719 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3720 e->probability = REG_BR_PROB_BASE;
3721 e->count = EXIT_BLOCK_PTR->count;
3722 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3723 if (e2 != e)
3725 e->count -= e2->count;
3726 exit_block->count -= e2->count;
3727 exit_block->frequency -= EDGE_FREQUENCY (e2);
3729 if (e->count < 0)
3730 e->count = 0;
3731 if (exit_block->count < 0)
3732 exit_block->count = 0;
3733 if (exit_block->frequency < 0)
3734 exit_block->frequency = 0;
3735 update_bb_for_insn (exit_block);
3738 /* Helper function for discover_nonconstant_array_refs.
3739 Look for ARRAY_REF nodes with non-constant indexes and mark them
3740 addressable. */
3742 static tree
3743 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3744 void *data ATTRIBUTE_UNUSED)
3746 tree t = *tp;
3748 if (IS_TYPE_OR_DECL_P (t))
3749 *walk_subtrees = 0;
3750 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3752 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3753 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3754 && (!TREE_OPERAND (t, 2)
3755 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3756 || (TREE_CODE (t) == COMPONENT_REF
3757 && (!TREE_OPERAND (t,2)
3758 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3759 || TREE_CODE (t) == BIT_FIELD_REF
3760 || TREE_CODE (t) == REALPART_EXPR
3761 || TREE_CODE (t) == IMAGPART_EXPR
3762 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3763 || CONVERT_EXPR_P (t))
3764 t = TREE_OPERAND (t, 0);
3766 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3768 t = get_base_address (t);
3769 if (t && DECL_P (t)
3770 && DECL_MODE (t) != BLKmode)
3771 TREE_ADDRESSABLE (t) = 1;
3774 *walk_subtrees = 0;
3777 return NULL_TREE;
3780 /* RTL expansion is not able to compile array references with variable
3781 offsets for arrays stored in single register. Discover such
3782 expressions and mark variables as addressable to avoid this
3783 scenario. */
3785 static void
3786 discover_nonconstant_array_refs (void)
3788 basic_block bb;
3789 gimple_stmt_iterator gsi;
3791 FOR_EACH_BB (bb)
3792 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3794 gimple stmt = gsi_stmt (gsi);
3795 if (!is_gimple_debug (stmt))
3796 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3800 /* This function sets crtl->args.internal_arg_pointer to a virtual
3801 register if DRAP is needed. Local register allocator will replace
3802 virtual_incoming_args_rtx with the virtual register. */
3804 static void
3805 expand_stack_alignment (void)
3807 rtx drap_rtx;
3808 unsigned int preferred_stack_boundary;
3810 if (! SUPPORTS_STACK_ALIGNMENT)
3811 return;
3813 if (cfun->calls_alloca
3814 || cfun->has_nonlocal_label
3815 || crtl->has_nonlocal_goto)
3816 crtl->need_drap = true;
3818 /* Call update_stack_boundary here again to update incoming stack
3819 boundary. It may set incoming stack alignment to a different
3820 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3821 use the minimum incoming stack alignment to check if it is OK
3822 to perform sibcall optimization since sibcall optimization will
3823 only align the outgoing stack to incoming stack boundary. */
3824 if (targetm.calls.update_stack_boundary)
3825 targetm.calls.update_stack_boundary ();
3827 /* The incoming stack frame has to be aligned at least at
3828 parm_stack_boundary. */
3829 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3831 /* Update crtl->stack_alignment_estimated and use it later to align
3832 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3833 exceptions since callgraph doesn't collect incoming stack alignment
3834 in this case. */
3835 if (cfun->can_throw_non_call_exceptions
3836 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3837 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3838 else
3839 preferred_stack_boundary = crtl->preferred_stack_boundary;
3840 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3841 crtl->stack_alignment_estimated = preferred_stack_boundary;
3842 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3843 crtl->stack_alignment_needed = preferred_stack_boundary;
3845 gcc_assert (crtl->stack_alignment_needed
3846 <= crtl->stack_alignment_estimated);
3848 crtl->stack_realign_needed
3849 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3850 crtl->stack_realign_tried = crtl->stack_realign_needed;
3852 crtl->stack_realign_processed = true;
3854 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3855 alignment. */
3856 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3857 drap_rtx = targetm.calls.get_drap_rtx ();
3859 /* stack_realign_drap and drap_rtx must match. */
3860 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3862 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3863 if (NULL != drap_rtx)
3865 crtl->args.internal_arg_pointer = drap_rtx;
3867 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3868 needed. */
3869 fixup_tail_calls ();
3873 /* Translate the intermediate representation contained in the CFG
3874 from GIMPLE trees to RTL.
3876 We do conversion per basic block and preserve/update the tree CFG.
3877 This implies we have to do some magic as the CFG can simultaneously
3878 consist of basic blocks containing RTL and GIMPLE trees. This can
3879 confuse the CFG hooks, so be careful to not manipulate CFG during
3880 the expansion. */
3882 static unsigned int
3883 gimple_expand_cfg (void)
3885 basic_block bb, init_block;
3886 sbitmap blocks;
3887 edge_iterator ei;
3888 edge e;
3889 rtx var_seq;
3890 unsigned i;
3892 timevar_push (TV_OUT_OF_SSA);
3893 rewrite_out_of_ssa (&SA);
3894 timevar_pop (TV_OUT_OF_SSA);
3895 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3896 sizeof (rtx));
3898 /* Some backends want to know that we are expanding to RTL. */
3899 currently_expanding_to_rtl = 1;
3901 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3903 insn_locators_alloc ();
3904 if (!DECL_IS_BUILTIN (current_function_decl))
3906 /* Eventually, all FEs should explicitly set function_start_locus. */
3907 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3908 set_curr_insn_source_location
3909 (DECL_SOURCE_LOCATION (current_function_decl));
3910 else
3911 set_curr_insn_source_location (cfun->function_start_locus);
3913 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3914 prologue_locator = curr_insn_locator ();
3916 #ifdef INSN_SCHEDULING
3917 init_sched_attrs ();
3918 #endif
3920 /* Make sure first insn is a note even if we don't want linenums.
3921 This makes sure the first insn will never be deleted.
3922 Also, final expects a note to appear there. */
3923 emit_note (NOTE_INSN_DELETED);
3925 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3926 discover_nonconstant_array_refs ();
3928 targetm.expand_to_rtl_hook ();
3929 crtl->stack_alignment_needed = STACK_BOUNDARY;
3930 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3931 crtl->stack_alignment_estimated = 0;
3932 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3933 cfun->cfg->max_jumptable_ents = 0;
3935 /* Expand the variables recorded during gimple lowering. */
3936 timevar_push (TV_VAR_EXPAND);
3937 start_sequence ();
3939 expand_used_vars ();
3941 var_seq = get_insns ();
3942 end_sequence ();
3943 timevar_pop (TV_VAR_EXPAND);
3945 /* Honor stack protection warnings. */
3946 if (warn_stack_protect)
3948 if (cfun->calls_alloca)
3949 warning (OPT_Wstack_protector,
3950 "stack protector not protecting local variables: "
3951 "variable length buffer");
3952 if (has_short_buffer && !crtl->stack_protect_guard)
3953 warning (OPT_Wstack_protector,
3954 "stack protector not protecting function: "
3955 "all local arrays are less than %d bytes long",
3956 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3959 /* Set up parameters and prepare for return, for the function. */
3960 expand_function_start (current_function_decl);
3962 /* If we emitted any instructions for setting up the variables,
3963 emit them before the FUNCTION_START note. */
3964 if (var_seq)
3966 emit_insn_before (var_seq, parm_birth_insn);
3968 /* In expand_function_end we'll insert the alloca save/restore
3969 before parm_birth_insn. We've just insertted an alloca call.
3970 Adjust the pointer to match. */
3971 parm_birth_insn = var_seq;
3974 /* Now that we also have the parameter RTXs, copy them over to our
3975 partitions. */
3976 for (i = 0; i < SA.map->num_partitions; i++)
3978 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3980 if (TREE_CODE (var) != VAR_DECL
3981 && !SA.partition_to_pseudo[i])
3982 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3983 gcc_assert (SA.partition_to_pseudo[i]);
3985 /* If this decl was marked as living in multiple places, reset
3986 this now to NULL. */
3987 if (DECL_RTL_IF_SET (var) == pc_rtx)
3988 SET_DECL_RTL (var, NULL);
3990 /* Some RTL parts really want to look at DECL_RTL(x) when x
3991 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3992 SET_DECL_RTL here making this available, but that would mean
3993 to select one of the potentially many RTLs for one DECL. Instead
3994 of doing that we simply reset the MEM_EXPR of the RTL in question,
3995 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3996 if (!DECL_RTL_SET_P (var))
3998 if (MEM_P (SA.partition_to_pseudo[i]))
3999 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4003 /* If this function is `main', emit a call to `__main'
4004 to run global initializers, etc. */
4005 if (DECL_NAME (current_function_decl)
4006 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4007 && DECL_FILE_SCOPE_P (current_function_decl))
4008 expand_main_function ();
4010 /* Initialize the stack_protect_guard field. This must happen after the
4011 call to __main (if any) so that the external decl is initialized. */
4012 if (crtl->stack_protect_guard)
4013 stack_protect_prologue ();
4015 expand_phi_nodes (&SA);
4017 /* Register rtl specific functions for cfg. */
4018 rtl_register_cfg_hooks ();
4020 init_block = construct_init_block ();
4022 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4023 remaining edges later. */
4024 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4025 e->flags &= ~EDGE_EXECUTABLE;
4027 lab_rtx_for_bb = pointer_map_create ();
4028 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4029 bb = expand_gimple_basic_block (bb);
4031 if (MAY_HAVE_DEBUG_INSNS)
4032 expand_debug_locations ();
4034 execute_free_datastructures ();
4035 timevar_push (TV_OUT_OF_SSA);
4036 finish_out_of_ssa (&SA);
4037 timevar_pop (TV_OUT_OF_SSA);
4039 timevar_push (TV_POST_EXPAND);
4040 /* We are no longer in SSA form. */
4041 cfun->gimple_df->in_ssa_p = false;
4043 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4044 conservatively to true until they are all profile aware. */
4045 pointer_map_destroy (lab_rtx_for_bb);
4046 free_histograms ();
4048 construct_exit_block ();
4049 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4050 insn_locators_finalize ();
4052 /* Zap the tree EH table. */
4053 set_eh_throw_stmt_table (cfun, NULL);
4055 rebuild_jump_labels (get_insns ());
4057 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4059 edge e;
4060 edge_iterator ei;
4061 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4063 if (e->insns.r)
4064 commit_one_edge_insertion (e);
4065 else
4066 ei_next (&ei);
4070 /* We're done expanding trees to RTL. */
4071 currently_expanding_to_rtl = 0;
4073 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4075 edge e;
4076 edge_iterator ei;
4077 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4079 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4080 e->flags &= ~EDGE_EXECUTABLE;
4082 /* At the moment not all abnormal edges match the RTL
4083 representation. It is safe to remove them here as
4084 find_many_sub_basic_blocks will rediscover them.
4085 In the future we should get this fixed properly. */
4086 if ((e->flags & EDGE_ABNORMAL)
4087 && !(e->flags & EDGE_SIBCALL))
4088 remove_edge (e);
4089 else
4090 ei_next (&ei);
4094 blocks = sbitmap_alloc (last_basic_block);
4095 sbitmap_ones (blocks);
4096 find_many_sub_basic_blocks (blocks);
4097 sbitmap_free (blocks);
4098 purge_all_dead_edges ();
4100 compact_blocks ();
4102 expand_stack_alignment ();
4104 #ifdef ENABLE_CHECKING
4105 verify_flow_info ();
4106 #endif
4108 /* There's no need to defer outputting this function any more; we
4109 know we want to output it. */
4110 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4112 /* Now that we're done expanding trees to RTL, we shouldn't have any
4113 more CONCATs anywhere. */
4114 generating_concat_p = 0;
4116 if (dump_file)
4118 fprintf (dump_file,
4119 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4120 /* And the pass manager will dump RTL for us. */
4123 /* If we're emitting a nested function, make sure its parent gets
4124 emitted as well. Doing otherwise confuses debug info. */
4126 tree parent;
4127 for (parent = DECL_CONTEXT (current_function_decl);
4128 parent != NULL_TREE;
4129 parent = get_containing_scope (parent))
4130 if (TREE_CODE (parent) == FUNCTION_DECL)
4131 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4134 /* We are now committed to emitting code for this function. Do any
4135 preparation, such as emitting abstract debug info for the inline
4136 before it gets mangled by optimization. */
4137 if (cgraph_function_possibly_inlined_p (current_function_decl))
4138 (*debug_hooks->outlining_inline_function) (current_function_decl);
4140 TREE_ASM_WRITTEN (current_function_decl) = 1;
4142 /* After expanding, the return labels are no longer needed. */
4143 return_label = NULL;
4144 naked_return_label = NULL;
4145 /* Tag the blocks with a depth number so that change_scope can find
4146 the common parent easily. */
4147 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4148 default_rtl_profile ();
4149 timevar_pop (TV_POST_EXPAND);
4150 return 0;
4153 struct rtl_opt_pass pass_expand =
4156 RTL_PASS,
4157 "expand", /* name */
4158 NULL, /* gate */
4159 gimple_expand_cfg, /* execute */
4160 NULL, /* sub */
4161 NULL, /* next */
4162 0, /* static_pass_number */
4163 TV_EXPAND, /* tv_id */
4164 PROP_ssa | PROP_gimple_leh | PROP_cfg
4165 | PROP_gimple_lcx, /* properties_required */
4166 PROP_rtl, /* properties_provided */
4167 PROP_ssa | PROP_trees, /* properties_destroyed */
4168 TODO_verify_ssa | TODO_verify_flow
4169 | TODO_verify_stmts, /* todo_flags_start */
4170 TODO_dump_func
4171 | TODO_ggc_collect /* todo_flags_finish */