Merge from mainline (167278:168000).
[official-gcc/graphite-test-results.git] / gcc / cfgexpand.c
bloba3940d01775fa0cdd58b84fbed31e865db99c7bb
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54 struct ssaexpand SA;
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
61 statement STMT. */
63 tree
64 gimple_assign_rhs_to_tree (gimple stmt)
66 tree t;
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
94 && EXPR_P (t)
95 && gimple_block (stmt) != TREE_BLOCK (t)))
96 t = copy_node (t);
98 else
99 gcc_unreachable ();
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
106 return t;
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
112 #endif
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
119 static inline void
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
125 if (x && !MEM_P (x))
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
137 change this. */
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
150 else
151 SET_DECL_RTL (t, x);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
156 struct stack_var
158 /* The Variable. */
159 tree decl;
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
211 static unsigned int
212 get_decl_align_unit (tree decl)
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
215 return align / BITS_PER_UNIT;
218 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
219 Return the frame offset. */
221 static HOST_WIDE_INT
222 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
224 HOST_WIDE_INT offset, new_frame_offset;
226 new_frame_offset = frame_offset;
227 if (FRAME_GROWS_DOWNWARD)
229 new_frame_offset -= size + frame_phase;
230 new_frame_offset &= -align;
231 new_frame_offset += frame_phase;
232 offset = new_frame_offset;
234 else
236 new_frame_offset -= frame_phase;
237 new_frame_offset += align - 1;
238 new_frame_offset &= -align;
239 new_frame_offset += frame_phase;
240 offset = new_frame_offset;
241 new_frame_offset += size;
243 frame_offset = new_frame_offset;
245 if (frame_offset_overflow (frame_offset, cfun->decl))
246 frame_offset = offset = 0;
248 return offset;
251 /* Accumulate DECL into STACK_VARS. */
253 static void
254 add_stack_var (tree decl)
256 struct stack_var *v;
258 if (stack_vars_num >= stack_vars_alloc)
260 if (stack_vars_alloc)
261 stack_vars_alloc = stack_vars_alloc * 3 / 2;
262 else
263 stack_vars_alloc = 32;
264 stack_vars
265 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
267 v = &stack_vars[stack_vars_num];
269 v->decl = decl;
270 v->offset = 0;
271 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
272 /* Ensure that all variables have size, so that &a != &b for any two
273 variables that are simultaneously live. */
274 if (v->size == 0)
275 v->size = 1;
276 v->alignb = get_decl_align_unit (SSAVAR (decl));
278 /* All variables are initially in their own partition. */
279 v->representative = stack_vars_num;
280 v->next = EOC;
282 /* All variables initially conflict with no other. */
283 v->conflicts = NULL;
285 /* Ensure that this decl doesn't get put onto the list twice. */
286 set_rtl (decl, pc_rtx);
288 stack_vars_num++;
291 /* Make the decls associated with luid's X and Y conflict. */
293 static void
294 add_stack_var_conflict (size_t x, size_t y)
296 struct stack_var *a = &stack_vars[x];
297 struct stack_var *b = &stack_vars[y];
298 if (!a->conflicts)
299 a->conflicts = BITMAP_ALLOC (NULL);
300 if (!b->conflicts)
301 b->conflicts = BITMAP_ALLOC (NULL);
302 bitmap_set_bit (a->conflicts, y);
303 bitmap_set_bit (b->conflicts, x);
306 /* Check whether the decls associated with luid's X and Y conflict. */
308 static bool
309 stack_var_conflict_p (size_t x, size_t y)
311 struct stack_var *a = &stack_vars[x];
312 struct stack_var *b = &stack_vars[y];
313 if (!a->conflicts || !b->conflicts)
314 return false;
315 return bitmap_bit_p (a->conflicts, y);
318 /* Returns true if TYPE is or contains a union type. */
320 static bool
321 aggregate_contains_union_type (tree type)
323 tree field;
325 if (TREE_CODE (type) == UNION_TYPE
326 || TREE_CODE (type) == QUAL_UNION_TYPE)
327 return true;
328 if (TREE_CODE (type) == ARRAY_TYPE)
329 return aggregate_contains_union_type (TREE_TYPE (type));
330 if (TREE_CODE (type) != RECORD_TYPE)
331 return false;
333 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
334 if (TREE_CODE (field) == FIELD_DECL)
335 if (aggregate_contains_union_type (TREE_TYPE (field)))
336 return true;
338 return false;
341 /* A subroutine of expand_used_vars. If two variables X and Y have alias
342 sets that do not conflict, then do add a conflict for these variables
343 in the interference graph. We also need to make sure to add conflicts
344 for union containing structures. Else RTL alias analysis comes along
345 and due to type based aliasing rules decides that for two overlapping
346 union temporaries { short s; int i; } accesses to the same mem through
347 different types may not alias and happily reorders stores across
348 life-time boundaries of the temporaries (See PR25654).
349 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
351 static void
352 add_alias_set_conflicts (void)
354 size_t i, j, n = stack_vars_num;
356 for (i = 0; i < n; ++i)
358 tree type_i = TREE_TYPE (stack_vars[i].decl);
359 bool aggr_i = AGGREGATE_TYPE_P (type_i);
360 bool contains_union;
362 contains_union = aggregate_contains_union_type (type_i);
363 for (j = 0; j < i; ++j)
365 tree type_j = TREE_TYPE (stack_vars[j].decl);
366 bool aggr_j = AGGREGATE_TYPE_P (type_j);
367 if (aggr_i != aggr_j
368 /* Either the objects conflict by means of type based
369 aliasing rules, or we need to add a conflict. */
370 || !objects_must_conflict_p (type_i, type_j)
371 /* In case the types do not conflict ensure that access
372 to elements will conflict. In case of unions we have
373 to be careful as type based aliasing rules may say
374 access to the same memory does not conflict. So play
375 safe and add a conflict in this case. */
376 || contains_union)
377 add_stack_var_conflict (i, j);
382 /* A subroutine of partition_stack_vars. A comparison function for qsort,
383 sorting an array of indices by the properties of the object. */
385 static int
386 stack_var_cmp (const void *a, const void *b)
388 size_t ia = *(const size_t *)a;
389 size_t ib = *(const size_t *)b;
390 unsigned int aligna = stack_vars[ia].alignb;
391 unsigned int alignb = stack_vars[ib].alignb;
392 HOST_WIDE_INT sizea = stack_vars[ia].size;
393 HOST_WIDE_INT sizeb = stack_vars[ib].size;
394 tree decla = stack_vars[ia].decl;
395 tree declb = stack_vars[ib].decl;
396 bool largea, largeb;
397 unsigned int uida, uidb;
399 /* Primary compare on "large" alignment. Large comes first. */
400 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
401 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 if (largea != largeb)
403 return (int)largeb - (int)largea;
405 /* Secondary compare on size, decreasing */
406 if (sizea < sizeb)
407 return -1;
408 if (sizea > sizeb)
409 return 1;
411 /* Tertiary compare on true alignment, decreasing. */
412 if (aligna < alignb)
413 return -1;
414 if (aligna > alignb)
415 return 1;
417 /* Final compare on ID for sort stability, increasing.
418 Two SSA names are compared by their version, SSA names come before
419 non-SSA names, and two normal decls are compared by their DECL_UID. */
420 if (TREE_CODE (decla) == SSA_NAME)
422 if (TREE_CODE (declb) == SSA_NAME)
423 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
424 else
425 return -1;
427 else if (TREE_CODE (declb) == SSA_NAME)
428 return 1;
429 else
430 uida = DECL_UID (decla), uidb = DECL_UID (declb);
431 if (uida < uidb)
432 return 1;
433 if (uida > uidb)
434 return -1;
435 return 0;
439 /* If the points-to solution *PI points to variables that are in a partition
440 together with other variables add all partition members to the pointed-to
441 variables bitmap. */
443 static void
444 add_partitioned_vars_to_ptset (struct pt_solution *pt,
445 struct pointer_map_t *decls_to_partitions,
446 struct pointer_set_t *visited, bitmap temp)
448 bitmap_iterator bi;
449 unsigned i;
450 bitmap *part;
452 if (pt->anything
453 || pt->vars == NULL
454 /* The pointed-to vars bitmap is shared, it is enough to
455 visit it once. */
456 || pointer_set_insert(visited, pt->vars))
457 return;
459 bitmap_clear (temp);
461 /* By using a temporary bitmap to store all members of the partitions
462 we have to add we make sure to visit each of the partitions only
463 once. */
464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
465 if ((!temp
466 || !bitmap_bit_p (temp, i))
467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
468 (void *)(size_t) i)))
469 bitmap_ior_into (temp, *part);
470 if (!bitmap_empty_p (temp))
471 bitmap_ior_into (pt->vars, temp);
474 /* Update points-to sets based on partition info, so we can use them on RTL.
475 The bitmaps representing stack partitions will be saved until expand,
476 where partitioned decls used as bases in memory expressions will be
477 rewritten. */
479 static void
480 update_alias_info_with_stack_vars (void)
482 struct pointer_map_t *decls_to_partitions = NULL;
483 size_t i, j;
484 tree var = NULL_TREE;
486 for (i = 0; i < stack_vars_num; i++)
488 bitmap part = NULL;
489 tree name;
490 struct ptr_info_def *pi;
492 /* Not interested in partitions with single variable. */
493 if (stack_vars[i].representative != i
494 || stack_vars[i].next == EOC)
495 continue;
497 if (!decls_to_partitions)
499 decls_to_partitions = pointer_map_create ();
500 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
503 /* Create an SSA_NAME that points to the partition for use
504 as base during alias-oracle queries on RTL for bases that
505 have been partitioned. */
506 if (var == NULL_TREE)
507 var = create_tmp_var (ptr_type_node, NULL);
508 name = make_ssa_name (var, NULL);
510 /* Create bitmaps representing partitions. They will be used for
511 points-to sets later, so use GGC alloc. */
512 part = BITMAP_GGC_ALLOC ();
513 for (j = i; j != EOC; j = stack_vars[j].next)
515 tree decl = stack_vars[j].decl;
516 unsigned int uid = DECL_PT_UID (decl);
517 /* We should never end up partitioning SSA names (though they
518 may end up on the stack). Neither should we allocate stack
519 space to something that is unused and thus unreferenced, except
520 for -O0 where we are preserving even unreferenced variables. */
521 gcc_assert (DECL_P (decl)
522 && (!optimize
523 || referenced_var_lookup (DECL_UID (decl))));
524 bitmap_set_bit (part, uid);
525 *((bitmap *) pointer_map_insert (decls_to_partitions,
526 (void *)(size_t) uid)) = part;
527 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
528 decl)) = name;
531 /* Make the SSA name point to all partition members. */
532 pi = get_ptr_info (name);
533 pt_solution_set (&pi->pt, part, false, false);
536 /* Make all points-to sets that contain one member of a partition
537 contain all members of the partition. */
538 if (decls_to_partitions)
540 unsigned i;
541 struct pointer_set_t *visited = pointer_set_create ();
542 bitmap temp = BITMAP_ALLOC (NULL);
544 for (i = 1; i < num_ssa_names; i++)
546 tree name = ssa_name (i);
547 struct ptr_info_def *pi;
549 if (name
550 && POINTER_TYPE_P (TREE_TYPE (name))
551 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
552 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
553 visited, temp);
556 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
557 decls_to_partitions, visited, temp);
559 pointer_set_destroy (visited);
560 pointer_map_destroy (decls_to_partitions);
561 BITMAP_FREE (temp);
565 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
566 partitioning algorithm. Partitions A and B are known to be non-conflicting.
567 Merge them into a single partition A.
569 At the same time, add OFFSET to all variables in partition B. At the end
570 of the partitioning process we've have a nice block easy to lay out within
571 the stack frame. */
573 static void
574 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
576 size_t i, last;
577 struct stack_var *vb = &stack_vars[b];
578 bitmap_iterator bi;
579 unsigned u;
581 /* Update each element of partition B with the given offset,
582 and merge them into partition A. */
583 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
585 stack_vars[i].offset += offset;
586 stack_vars[i].representative = a;
588 stack_vars[last].next = stack_vars[a].next;
589 stack_vars[a].next = b;
591 /* Update the required alignment of partition A to account for B. */
592 if (stack_vars[a].alignb < stack_vars[b].alignb)
593 stack_vars[a].alignb = stack_vars[b].alignb;
595 /* Update the interference graph and merge the conflicts. */
596 if (vb->conflicts)
598 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
599 add_stack_var_conflict (a, stack_vars[u].representative);
600 BITMAP_FREE (vb->conflicts);
604 /* A subroutine of expand_used_vars. Binpack the variables into
605 partitions constrained by the interference graph. The overall
606 algorithm used is as follows:
608 Sort the objects by size.
609 For each object A {
610 S = size(A)
611 O = 0
612 loop {
613 Look for the largest non-conflicting object B with size <= S.
614 UNION (A, B)
615 offset(B) = O
616 O += size(B)
617 S -= size(B)
622 static void
623 partition_stack_vars (void)
625 size_t si, sj, n = stack_vars_num;
627 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
628 for (si = 0; si < n; ++si)
629 stack_vars_sorted[si] = si;
631 if (n == 1)
632 return;
634 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
636 for (si = 0; si < n; ++si)
638 size_t i = stack_vars_sorted[si];
639 HOST_WIDE_INT isize = stack_vars[i].size;
640 unsigned int ialign = stack_vars[i].alignb;
641 HOST_WIDE_INT offset = 0;
643 for (sj = si; sj-- > 0; )
645 size_t j = stack_vars_sorted[sj];
646 HOST_WIDE_INT jsize = stack_vars[j].size;
647 unsigned int jalign = stack_vars[j].alignb;
649 /* Ignore objects that aren't partition representatives. */
650 if (stack_vars[j].representative != j)
651 continue;
653 /* Ignore objects too large for the remaining space. */
654 if (isize < jsize)
655 continue;
657 /* Ignore conflicting objects. */
658 if (stack_var_conflict_p (i, j))
659 continue;
661 /* Do not mix objects of "small" (supported) alignment
662 and "large" (unsupported) alignment. */
663 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
664 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
665 continue;
667 /* Refine the remaining space check to include alignment. */
668 if (offset & (jalign - 1))
670 HOST_WIDE_INT toff = offset;
671 toff += jalign - 1;
672 toff &= -(HOST_WIDE_INT)jalign;
673 if (isize - (toff - offset) < jsize)
674 continue;
676 isize -= toff - offset;
677 offset = toff;
680 /* UNION the objects, placing J at OFFSET. */
681 union_stack_vars (i, j, offset);
683 isize -= jsize;
684 if (isize == 0)
685 break;
689 update_alias_info_with_stack_vars ();
692 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
694 static void
695 dump_stack_var_partition (void)
697 size_t si, i, j, n = stack_vars_num;
699 for (si = 0; si < n; ++si)
701 i = stack_vars_sorted[si];
703 /* Skip variables that aren't partition representatives, for now. */
704 if (stack_vars[i].representative != i)
705 continue;
707 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
708 " align %u\n", (unsigned long) i, stack_vars[i].size,
709 stack_vars[i].alignb);
711 for (j = i; j != EOC; j = stack_vars[j].next)
713 fputc ('\t', dump_file);
714 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
715 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
716 stack_vars[j].offset);
721 /* Assign rtl to DECL at BASE + OFFSET. */
723 static void
724 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
725 HOST_WIDE_INT offset)
727 unsigned align;
728 rtx x;
730 /* If this fails, we've overflowed the stack frame. Error nicely? */
731 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
733 x = plus_constant (base, offset);
734 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
736 if (TREE_CODE (decl) != SSA_NAME)
738 /* Set alignment we actually gave this decl if it isn't an SSA name.
739 If it is we generate stack slots only accidentally so it isn't as
740 important, we'll simply use the alignment that is already set. */
741 if (base == virtual_stack_vars_rtx)
742 offset -= frame_phase;
743 align = offset & -offset;
744 align *= BITS_PER_UNIT;
745 if (align == 0 || align > base_align)
746 align = base_align;
748 /* One would think that we could assert that we're not decreasing
749 alignment here, but (at least) the i386 port does exactly this
750 via the MINIMUM_ALIGNMENT hook. */
752 DECL_ALIGN (decl) = align;
753 DECL_USER_ALIGN (decl) = 0;
756 set_mem_attributes (x, SSAVAR (decl), true);
757 set_rtl (decl, x);
760 /* A subroutine of expand_used_vars. Give each partition representative
761 a unique location within the stack frame. Update each partition member
762 with that location. */
764 static void
765 expand_stack_vars (bool (*pred) (tree))
767 size_t si, i, j, n = stack_vars_num;
768 HOST_WIDE_INT large_size = 0, large_alloc = 0;
769 rtx large_base = NULL;
770 unsigned large_align = 0;
771 tree decl;
773 /* Determine if there are any variables requiring "large" alignment.
774 Since these are dynamically allocated, we only process these if
775 no predicate involved. */
776 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
777 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
779 /* Find the total size of these variables. */
780 for (si = 0; si < n; ++si)
782 unsigned alignb;
784 i = stack_vars_sorted[si];
785 alignb = stack_vars[i].alignb;
787 /* Stop when we get to the first decl with "small" alignment. */
788 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
789 break;
791 /* Skip variables that aren't partition representatives. */
792 if (stack_vars[i].representative != i)
793 continue;
795 /* Skip variables that have already had rtl assigned. See also
796 add_stack_var where we perpetrate this pc_rtx hack. */
797 decl = stack_vars[i].decl;
798 if ((TREE_CODE (decl) == SSA_NAME
799 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
800 : DECL_RTL (decl)) != pc_rtx)
801 continue;
803 large_size += alignb - 1;
804 large_size &= -(HOST_WIDE_INT)alignb;
805 large_size += stack_vars[i].size;
808 /* If there were any, allocate space. */
809 if (large_size > 0)
810 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
811 large_align, true);
814 for (si = 0; si < n; ++si)
816 rtx base;
817 unsigned base_align, alignb;
818 HOST_WIDE_INT offset;
820 i = stack_vars_sorted[si];
822 /* Skip variables that aren't partition representatives, for now. */
823 if (stack_vars[i].representative != i)
824 continue;
826 /* Skip variables that have already had rtl assigned. See also
827 add_stack_var where we perpetrate this pc_rtx hack. */
828 decl = stack_vars[i].decl;
829 if ((TREE_CODE (decl) == SSA_NAME
830 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
831 : DECL_RTL (decl)) != pc_rtx)
832 continue;
834 /* Check the predicate to see whether this variable should be
835 allocated in this pass. */
836 if (pred && !pred (decl))
837 continue;
839 alignb = stack_vars[i].alignb;
840 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
842 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
843 base = virtual_stack_vars_rtx;
844 base_align = crtl->max_used_stack_slot_alignment;
846 else
848 /* Large alignment is only processed in the last pass. */
849 if (pred)
850 continue;
851 gcc_assert (large_base != NULL);
853 large_alloc += alignb - 1;
854 large_alloc &= -(HOST_WIDE_INT)alignb;
855 offset = large_alloc;
856 large_alloc += stack_vars[i].size;
858 base = large_base;
859 base_align = large_align;
862 /* Create rtl for each variable based on their location within the
863 partition. */
864 for (j = i; j != EOC; j = stack_vars[j].next)
866 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
867 expand_one_stack_var_at (stack_vars[j].decl,
868 base, base_align,
869 stack_vars[j].offset + offset);
873 gcc_assert (large_alloc == large_size);
876 /* Take into account all sizes of partitions and reset DECL_RTLs. */
877 static HOST_WIDE_INT
878 account_stack_vars (void)
880 size_t si, j, i, n = stack_vars_num;
881 HOST_WIDE_INT size = 0;
883 for (si = 0; si < n; ++si)
885 i = stack_vars_sorted[si];
887 /* Skip variables that aren't partition representatives, for now. */
888 if (stack_vars[i].representative != i)
889 continue;
891 size += stack_vars[i].size;
892 for (j = i; j != EOC; j = stack_vars[j].next)
893 set_rtl (stack_vars[j].decl, NULL);
895 return size;
898 /* A subroutine of expand_one_var. Called to immediately assign rtl
899 to a variable to be allocated in the stack frame. */
901 static void
902 expand_one_stack_var (tree var)
904 HOST_WIDE_INT size, offset;
905 unsigned byte_align;
907 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
908 byte_align = get_decl_align_unit (SSAVAR (var));
910 /* We handle highly aligned variables in expand_stack_vars. */
911 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
913 offset = alloc_stack_frame_space (size, byte_align);
915 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
916 crtl->max_used_stack_slot_alignment, offset);
919 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
920 that will reside in a hard register. */
922 static void
923 expand_one_hard_reg_var (tree var)
925 rest_of_decl_compilation (var, 0, 0);
928 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
929 that will reside in a pseudo register. */
931 static void
932 expand_one_register_var (tree var)
934 tree decl = SSAVAR (var);
935 tree type = TREE_TYPE (decl);
936 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
937 rtx x = gen_reg_rtx (reg_mode);
939 set_rtl (var, x);
941 /* Note if the object is a user variable. */
942 if (!DECL_ARTIFICIAL (decl))
943 mark_user_reg (x);
945 if (POINTER_TYPE_P (type))
946 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
949 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
950 has some associated error, e.g. its type is error-mark. We just need
951 to pick something that won't crash the rest of the compiler. */
953 static void
954 expand_one_error_var (tree var)
956 enum machine_mode mode = DECL_MODE (var);
957 rtx x;
959 if (mode == BLKmode)
960 x = gen_rtx_MEM (BLKmode, const0_rtx);
961 else if (mode == VOIDmode)
962 x = const0_rtx;
963 else
964 x = gen_reg_rtx (mode);
966 SET_DECL_RTL (var, x);
969 /* A subroutine of expand_one_var. VAR is a variable that will be
970 allocated to the local stack frame. Return true if we wish to
971 add VAR to STACK_VARS so that it will be coalesced with other
972 variables. Return false to allocate VAR immediately.
974 This function is used to reduce the number of variables considered
975 for coalescing, which reduces the size of the quadratic problem. */
977 static bool
978 defer_stack_allocation (tree var, bool toplevel)
980 /* If stack protection is enabled, *all* stack variables must be deferred,
981 so that we can re-order the strings to the top of the frame. */
982 if (flag_stack_protect)
983 return true;
985 /* We handle "large" alignment via dynamic allocation. We want to handle
986 this extra complication in only one place, so defer them. */
987 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
988 return true;
990 /* Variables in the outermost scope automatically conflict with
991 every other variable. The only reason to want to defer them
992 at all is that, after sorting, we can more efficiently pack
993 small variables in the stack frame. Continue to defer at -O2. */
994 if (toplevel && optimize < 2)
995 return false;
997 /* Without optimization, *most* variables are allocated from the
998 stack, which makes the quadratic problem large exactly when we
999 want compilation to proceed as quickly as possible. On the
1000 other hand, we don't want the function's stack frame size to
1001 get completely out of hand. So we avoid adding scalars and
1002 "small" aggregates to the list at all. */
1003 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1004 return false;
1006 return true;
1009 /* A subroutine of expand_used_vars. Expand one variable according to
1010 its flavor. Variables to be placed on the stack are not actually
1011 expanded yet, merely recorded.
1012 When REALLY_EXPAND is false, only add stack values to be allocated.
1013 Return stack usage this variable is supposed to take.
1016 static HOST_WIDE_INT
1017 expand_one_var (tree var, bool toplevel, bool really_expand)
1019 unsigned int align = BITS_PER_UNIT;
1020 tree origvar = var;
1022 var = SSAVAR (var);
1024 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1026 /* Because we don't know if VAR will be in register or on stack,
1027 we conservatively assume it will be on stack even if VAR is
1028 eventually put into register after RA pass. For non-automatic
1029 variables, which won't be on stack, we collect alignment of
1030 type and ignore user specified alignment. */
1031 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1032 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1033 TYPE_MODE (TREE_TYPE (var)),
1034 TYPE_ALIGN (TREE_TYPE (var)));
1035 else if (DECL_HAS_VALUE_EXPR_P (var)
1036 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1037 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1038 or variables which were assigned a stack slot already by
1039 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1040 changed from the offset chosen to it. */
1041 align = crtl->stack_alignment_estimated;
1042 else
1043 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1045 /* If the variable alignment is very large we'll dynamicaly allocate
1046 it, which means that in-frame portion is just a pointer. */
1047 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1048 align = POINTER_SIZE;
1051 if (SUPPORTS_STACK_ALIGNMENT
1052 && crtl->stack_alignment_estimated < align)
1054 /* stack_alignment_estimated shouldn't change after stack
1055 realign decision made */
1056 gcc_assert(!crtl->stack_realign_processed);
1057 crtl->stack_alignment_estimated = align;
1060 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1061 So here we only make sure stack_alignment_needed >= align. */
1062 if (crtl->stack_alignment_needed < align)
1063 crtl->stack_alignment_needed = align;
1064 if (crtl->max_used_stack_slot_alignment < align)
1065 crtl->max_used_stack_slot_alignment = align;
1067 if (TREE_CODE (origvar) == SSA_NAME)
1069 gcc_assert (TREE_CODE (var) != VAR_DECL
1070 || (!DECL_EXTERNAL (var)
1071 && !DECL_HAS_VALUE_EXPR_P (var)
1072 && !TREE_STATIC (var)
1073 && TREE_TYPE (var) != error_mark_node
1074 && !DECL_HARD_REGISTER (var)
1075 && really_expand));
1077 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1079 else if (DECL_EXTERNAL (var))
1081 else if (DECL_HAS_VALUE_EXPR_P (var))
1083 else if (TREE_STATIC (var))
1085 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1087 else if (TREE_TYPE (var) == error_mark_node)
1089 if (really_expand)
1090 expand_one_error_var (var);
1092 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1094 if (really_expand)
1095 expand_one_hard_reg_var (var);
1097 else if (use_register_for_decl (var))
1099 if (really_expand)
1100 expand_one_register_var (origvar);
1102 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1104 if (really_expand)
1106 error ("size of variable %q+D is too large", var);
1107 expand_one_error_var (var);
1110 else if (defer_stack_allocation (var, toplevel))
1111 add_stack_var (origvar);
1112 else
1114 if (really_expand)
1115 expand_one_stack_var (origvar);
1116 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1118 return 0;
1121 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1122 expanding variables. Those variables that can be put into registers
1123 are allocated pseudos; those that can't are put on the stack.
1125 TOPLEVEL is true if this is the outermost BLOCK. */
1127 static void
1128 expand_used_vars_for_block (tree block, bool toplevel)
1130 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1131 tree t;
1133 old_sv_num = toplevel ? 0 : stack_vars_num;
1135 /* Expand all variables at this level. */
1136 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1137 if (TREE_USED (t))
1138 expand_one_var (t, toplevel, true);
1140 this_sv_num = stack_vars_num;
1142 /* Expand all variables at containing levels. */
1143 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1144 expand_used_vars_for_block (t, false);
1146 /* Since we do not track exact variable lifetimes (which is not even
1147 possible for variables whose address escapes), we mirror the block
1148 tree in the interference graph. Here we cause all variables at this
1149 level, and all sublevels, to conflict. */
1150 if (old_sv_num < this_sv_num)
1152 new_sv_num = stack_vars_num;
1154 for (i = old_sv_num; i < new_sv_num; ++i)
1155 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1156 add_stack_var_conflict (i, j);
1160 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1161 and clear TREE_USED on all local variables. */
1163 static void
1164 clear_tree_used (tree block)
1166 tree t;
1168 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1169 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1170 TREE_USED (t) = 0;
1172 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1173 clear_tree_used (t);
1176 /* Examine TYPE and determine a bit mask of the following features. */
1178 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1179 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1180 #define SPCT_HAS_ARRAY 4
1181 #define SPCT_HAS_AGGREGATE 8
1183 static unsigned int
1184 stack_protect_classify_type (tree type)
1186 unsigned int ret = 0;
1187 tree t;
1189 switch (TREE_CODE (type))
1191 case ARRAY_TYPE:
1192 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1193 if (t == char_type_node
1194 || t == signed_char_type_node
1195 || t == unsigned_char_type_node)
1197 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1198 unsigned HOST_WIDE_INT len;
1200 if (!TYPE_SIZE_UNIT (type)
1201 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1202 len = max;
1203 else
1204 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1206 if (len < max)
1207 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1208 else
1209 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1211 else
1212 ret = SPCT_HAS_ARRAY;
1213 break;
1215 case UNION_TYPE:
1216 case QUAL_UNION_TYPE:
1217 case RECORD_TYPE:
1218 ret = SPCT_HAS_AGGREGATE;
1219 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1220 if (TREE_CODE (t) == FIELD_DECL)
1221 ret |= stack_protect_classify_type (TREE_TYPE (t));
1222 break;
1224 default:
1225 break;
1228 return ret;
1231 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1232 part of the local stack frame. Remember if we ever return nonzero for
1233 any variable in this function. The return value is the phase number in
1234 which the variable should be allocated. */
1236 static int
1237 stack_protect_decl_phase (tree decl)
1239 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1240 int ret = 0;
1242 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1243 has_short_buffer = true;
1245 if (flag_stack_protect == 2)
1247 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1248 && !(bits & SPCT_HAS_AGGREGATE))
1249 ret = 1;
1250 else if (bits & SPCT_HAS_ARRAY)
1251 ret = 2;
1253 else
1254 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1256 if (ret)
1257 has_protected_decls = true;
1259 return ret;
1262 /* Two helper routines that check for phase 1 and phase 2. These are used
1263 as callbacks for expand_stack_vars. */
1265 static bool
1266 stack_protect_decl_phase_1 (tree decl)
1268 return stack_protect_decl_phase (decl) == 1;
1271 static bool
1272 stack_protect_decl_phase_2 (tree decl)
1274 return stack_protect_decl_phase (decl) == 2;
1277 /* Ensure that variables in different stack protection phases conflict
1278 so that they are not merged and share the same stack slot. */
1280 static void
1281 add_stack_protection_conflicts (void)
1283 size_t i, j, n = stack_vars_num;
1284 unsigned char *phase;
1286 phase = XNEWVEC (unsigned char, n);
1287 for (i = 0; i < n; ++i)
1288 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1290 for (i = 0; i < n; ++i)
1292 unsigned char ph_i = phase[i];
1293 for (j = 0; j < i; ++j)
1294 if (ph_i != phase[j])
1295 add_stack_var_conflict (i, j);
1298 XDELETEVEC (phase);
1301 /* Create a decl for the guard at the top of the stack frame. */
1303 static void
1304 create_stack_guard (void)
1306 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1307 VAR_DECL, NULL, ptr_type_node);
1308 TREE_THIS_VOLATILE (guard) = 1;
1309 TREE_USED (guard) = 1;
1310 expand_one_stack_var (guard);
1311 crtl->stack_protect_guard = guard;
1314 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1315 expanding variables. Those variables that can be put into registers
1316 are allocated pseudos; those that can't are put on the stack.
1318 TOPLEVEL is true if this is the outermost BLOCK. */
1320 static HOST_WIDE_INT
1321 account_used_vars_for_block (tree block, bool toplevel)
1323 tree t;
1324 HOST_WIDE_INT size = 0;
1326 /* Expand all variables at this level. */
1327 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1328 if (TREE_USED (t))
1329 size += expand_one_var (t, toplevel, false);
1331 /* Expand all variables at containing levels. */
1332 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1333 size += account_used_vars_for_block (t, false);
1335 return size;
1338 /* Prepare for expanding variables. */
1339 static void
1340 init_vars_expansion (void)
1342 tree t;
1343 unsigned ix;
1344 /* Set TREE_USED on all variables in the local_decls. */
1345 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1346 TREE_USED (t) = 1;
1348 /* Clear TREE_USED on all variables associated with a block scope. */
1349 clear_tree_used (DECL_INITIAL (current_function_decl));
1351 /* Initialize local stack smashing state. */
1352 has_protected_decls = false;
1353 has_short_buffer = false;
1356 /* Free up stack variable graph data. */
1357 static void
1358 fini_vars_expansion (void)
1360 size_t i, n = stack_vars_num;
1361 for (i = 0; i < n; i++)
1362 BITMAP_FREE (stack_vars[i].conflicts);
1363 XDELETEVEC (stack_vars);
1364 XDELETEVEC (stack_vars_sorted);
1365 stack_vars = NULL;
1366 stack_vars_alloc = stack_vars_num = 0;
1369 /* Make a fair guess for the size of the stack frame of the decl
1370 passed. This doesn't have to be exact, the result is only used
1371 in the inline heuristics. So we don't want to run the full stack
1372 var packing algorithm (which is quadratic in the number of stack
1373 vars). Instead, we calculate the total size of all stack vars.
1374 This turns out to be a pretty fair estimate -- packing of stack
1375 vars doesn't happen very often. */
1377 HOST_WIDE_INT
1378 estimated_stack_frame_size (tree decl)
1380 HOST_WIDE_INT size = 0;
1381 size_t i;
1382 tree var, outer_block = DECL_INITIAL (current_function_decl);
1383 unsigned ix;
1384 tree old_cur_fun_decl = current_function_decl;
1385 current_function_decl = decl;
1386 push_cfun (DECL_STRUCT_FUNCTION (decl));
1388 init_vars_expansion ();
1390 FOR_EACH_LOCAL_DECL (cfun, ix, var)
1392 if (TREE_USED (var))
1393 size += expand_one_var (var, true, false);
1394 TREE_USED (var) = 1;
1396 size += account_used_vars_for_block (outer_block, true);
1398 if (stack_vars_num > 0)
1400 /* Fake sorting the stack vars for account_stack_vars (). */
1401 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1402 for (i = 0; i < stack_vars_num; ++i)
1403 stack_vars_sorted[i] = i;
1404 size += account_stack_vars ();
1405 fini_vars_expansion ();
1407 pop_cfun ();
1408 current_function_decl = old_cur_fun_decl;
1409 return size;
1412 /* Expand all variables used in the function. */
1414 static void
1415 expand_used_vars (void)
1417 tree var, outer_block = DECL_INITIAL (current_function_decl);
1418 VEC(tree,heap) *maybe_local_decls = NULL;
1419 unsigned i;
1420 unsigned len;
1422 /* Compute the phase of the stack frame for this function. */
1424 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1425 int off = STARTING_FRAME_OFFSET % align;
1426 frame_phase = off ? align - off : 0;
1429 init_vars_expansion ();
1431 for (i = 0; i < SA.map->num_partitions; i++)
1433 tree var = partition_to_var (SA.map, i);
1435 gcc_assert (is_gimple_reg (var));
1436 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1437 expand_one_var (var, true, true);
1438 else
1440 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1441 contain the default def (representing the parm or result itself)
1442 we don't do anything here. But those which don't contain the
1443 default def (representing a temporary based on the parm/result)
1444 we need to allocate space just like for normal VAR_DECLs. */
1445 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1447 expand_one_var (var, true, true);
1448 gcc_assert (SA.partition_to_pseudo[i]);
1453 /* At this point all variables on the local_decls with TREE_USED
1454 set are not associated with any block scope. Lay them out. */
1456 len = VEC_length (tree, cfun->local_decls);
1457 FOR_EACH_LOCAL_DECL (cfun, i, var)
1459 bool expand_now = false;
1461 /* Expanded above already. */
1462 if (is_gimple_reg (var))
1464 TREE_USED (var) = 0;
1465 goto next;
1467 /* We didn't set a block for static or extern because it's hard
1468 to tell the difference between a global variable (re)declared
1469 in a local scope, and one that's really declared there to
1470 begin with. And it doesn't really matter much, since we're
1471 not giving them stack space. Expand them now. */
1472 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1473 expand_now = true;
1475 /* If the variable is not associated with any block, then it
1476 was created by the optimizers, and could be live anywhere
1477 in the function. */
1478 else if (TREE_USED (var))
1479 expand_now = true;
1481 /* Finally, mark all variables on the list as used. We'll use
1482 this in a moment when we expand those associated with scopes. */
1483 TREE_USED (var) = 1;
1485 if (expand_now)
1486 expand_one_var (var, true, true);
1488 next:
1489 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1491 rtx rtl = DECL_RTL_IF_SET (var);
1493 /* Keep artificial non-ignored vars in cfun->local_decls
1494 chain until instantiate_decls. */
1495 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1496 add_local_decl (cfun, var);
1497 else if (rtl == NULL_RTX)
1498 /* If rtl isn't set yet, which can happen e.g. with
1499 -fstack-protector, retry before returning from this
1500 function. */
1501 VEC_safe_push (tree, heap, maybe_local_decls, var);
1505 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1507 +-----------------+-----------------+
1508 | ...processed... | ...duplicates...|
1509 +-----------------+-----------------+
1511 +-- LEN points here.
1513 We just want the duplicates, as those are the artificial
1514 non-ignored vars that we want to keep until instantiate_decls.
1515 Move them down and truncate the array. */
1516 if (!VEC_empty (tree, cfun->local_decls))
1517 VEC_block_remove (tree, cfun->local_decls, 0, len);
1519 /* At this point, all variables within the block tree with TREE_USED
1520 set are actually used by the optimized function. Lay them out. */
1521 expand_used_vars_for_block (outer_block, true);
1523 if (stack_vars_num > 0)
1525 /* Due to the way alias sets work, no variables with non-conflicting
1526 alias sets may be assigned the same address. Add conflicts to
1527 reflect this. */
1528 add_alias_set_conflicts ();
1530 /* If stack protection is enabled, we don't share space between
1531 vulnerable data and non-vulnerable data. */
1532 if (flag_stack_protect)
1533 add_stack_protection_conflicts ();
1535 /* Now that we have collected all stack variables, and have computed a
1536 minimal interference graph, attempt to save some stack space. */
1537 partition_stack_vars ();
1538 if (dump_file)
1539 dump_stack_var_partition ();
1542 /* There are several conditions under which we should create a
1543 stack guard: protect-all, alloca used, protected decls present. */
1544 if (flag_stack_protect == 2
1545 || (flag_stack_protect
1546 && (cfun->calls_alloca || has_protected_decls)))
1547 create_stack_guard ();
1549 /* Assign rtl to each variable based on these partitions. */
1550 if (stack_vars_num > 0)
1552 /* Reorder decls to be protected by iterating over the variables
1553 array multiple times, and allocating out of each phase in turn. */
1554 /* ??? We could probably integrate this into the qsort we did
1555 earlier, such that we naturally see these variables first,
1556 and thus naturally allocate things in the right order. */
1557 if (has_protected_decls)
1559 /* Phase 1 contains only character arrays. */
1560 expand_stack_vars (stack_protect_decl_phase_1);
1562 /* Phase 2 contains other kinds of arrays. */
1563 if (flag_stack_protect == 2)
1564 expand_stack_vars (stack_protect_decl_phase_2);
1567 expand_stack_vars (NULL);
1569 fini_vars_expansion ();
1572 /* If there were any artificial non-ignored vars without rtl
1573 found earlier, see if deferred stack allocation hasn't assigned
1574 rtl to them. */
1575 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1577 rtx rtl = DECL_RTL_IF_SET (var);
1579 /* Keep artificial non-ignored vars in cfun->local_decls
1580 chain until instantiate_decls. */
1581 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1582 add_local_decl (cfun, var);
1584 VEC_free (tree, heap, maybe_local_decls);
1586 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1587 if (STACK_ALIGNMENT_NEEDED)
1589 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1590 if (!FRAME_GROWS_DOWNWARD)
1591 frame_offset += align - 1;
1592 frame_offset &= -align;
1597 /* If we need to produce a detailed dump, print the tree representation
1598 for STMT to the dump file. SINCE is the last RTX after which the RTL
1599 generated for STMT should have been appended. */
1601 static void
1602 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1604 if (dump_file && (dump_flags & TDF_DETAILS))
1606 fprintf (dump_file, "\n;; ");
1607 print_gimple_stmt (dump_file, stmt, 0,
1608 TDF_SLIM | (dump_flags & TDF_LINENO));
1609 fprintf (dump_file, "\n");
1611 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1615 /* Maps the blocks that do not contain tree labels to rtx labels. */
1617 static struct pointer_map_t *lab_rtx_for_bb;
1619 /* Returns the label_rtx expression for a label starting basic block BB. */
1621 static rtx
1622 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1624 gimple_stmt_iterator gsi;
1625 tree lab;
1626 gimple lab_stmt;
1627 void **elt;
1629 if (bb->flags & BB_RTL)
1630 return block_label (bb);
1632 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1633 if (elt)
1634 return (rtx) *elt;
1636 /* Find the tree label if it is present. */
1638 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1640 lab_stmt = gsi_stmt (gsi);
1641 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1642 break;
1644 lab = gimple_label_label (lab_stmt);
1645 if (DECL_NONLOCAL (lab))
1646 break;
1648 return label_rtx (lab);
1651 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1652 *elt = gen_label_rtx ();
1653 return (rtx) *elt;
1657 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1658 of a basic block where we just expanded the conditional at the end,
1659 possibly clean up the CFG and instruction sequence. LAST is the
1660 last instruction before the just emitted jump sequence. */
1662 static void
1663 maybe_cleanup_end_of_block (edge e, rtx last)
1665 /* Special case: when jumpif decides that the condition is
1666 trivial it emits an unconditional jump (and the necessary
1667 barrier). But we still have two edges, the fallthru one is
1668 wrong. purge_dead_edges would clean this up later. Unfortunately
1669 we have to insert insns (and split edges) before
1670 find_many_sub_basic_blocks and hence before purge_dead_edges.
1671 But splitting edges might create new blocks which depend on the
1672 fact that if there are two edges there's no barrier. So the
1673 barrier would get lost and verify_flow_info would ICE. Instead
1674 of auditing all edge splitters to care for the barrier (which
1675 normally isn't there in a cleaned CFG), fix it here. */
1676 if (BARRIER_P (get_last_insn ()))
1678 rtx insn;
1679 remove_edge (e);
1680 /* Now, we have a single successor block, if we have insns to
1681 insert on the remaining edge we potentially will insert
1682 it at the end of this block (if the dest block isn't feasible)
1683 in order to avoid splitting the edge. This insertion will take
1684 place in front of the last jump. But we might have emitted
1685 multiple jumps (conditional and one unconditional) to the
1686 same destination. Inserting in front of the last one then
1687 is a problem. See PR 40021. We fix this by deleting all
1688 jumps except the last unconditional one. */
1689 insn = PREV_INSN (get_last_insn ());
1690 /* Make sure we have an unconditional jump. Otherwise we're
1691 confused. */
1692 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1693 for (insn = PREV_INSN (insn); insn != last;)
1695 insn = PREV_INSN (insn);
1696 if (JUMP_P (NEXT_INSN (insn)))
1698 if (!any_condjump_p (NEXT_INSN (insn)))
1700 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1701 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1703 delete_insn (NEXT_INSN (insn));
1709 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1710 Returns a new basic block if we've terminated the current basic
1711 block and created a new one. */
1713 static basic_block
1714 expand_gimple_cond (basic_block bb, gimple stmt)
1716 basic_block new_bb, dest;
1717 edge new_edge;
1718 edge true_edge;
1719 edge false_edge;
1720 rtx last2, last;
1721 enum tree_code code;
1722 tree op0, op1;
1724 code = gimple_cond_code (stmt);
1725 op0 = gimple_cond_lhs (stmt);
1726 op1 = gimple_cond_rhs (stmt);
1727 /* We're sometimes presented with such code:
1728 D.123_1 = x < y;
1729 if (D.123_1 != 0)
1731 This would expand to two comparisons which then later might
1732 be cleaned up by combine. But some pattern matchers like if-conversion
1733 work better when there's only one compare, so make up for this
1734 here as special exception if TER would have made the same change. */
1735 if (gimple_cond_single_var_p (stmt)
1736 && SA.values
1737 && TREE_CODE (op0) == SSA_NAME
1738 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1740 gimple second = SSA_NAME_DEF_STMT (op0);
1741 if (gimple_code (second) == GIMPLE_ASSIGN)
1743 enum tree_code code2 = gimple_assign_rhs_code (second);
1744 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1746 code = code2;
1747 op0 = gimple_assign_rhs1 (second);
1748 op1 = gimple_assign_rhs2 (second);
1750 /* If jumps are cheap turn some more codes into
1751 jumpy sequences. */
1752 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1754 if ((code2 == BIT_AND_EXPR
1755 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1756 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1757 || code2 == TRUTH_AND_EXPR)
1759 code = TRUTH_ANDIF_EXPR;
1760 op0 = gimple_assign_rhs1 (second);
1761 op1 = gimple_assign_rhs2 (second);
1763 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1765 code = TRUTH_ORIF_EXPR;
1766 op0 = gimple_assign_rhs1 (second);
1767 op1 = gimple_assign_rhs2 (second);
1773 last2 = last = get_last_insn ();
1775 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1776 if (gimple_has_location (stmt))
1778 set_curr_insn_source_location (gimple_location (stmt));
1779 set_curr_insn_block (gimple_block (stmt));
1782 /* These flags have no purpose in RTL land. */
1783 true_edge->flags &= ~EDGE_TRUE_VALUE;
1784 false_edge->flags &= ~EDGE_FALSE_VALUE;
1786 /* We can either have a pure conditional jump with one fallthru edge or
1787 two-way jump that needs to be decomposed into two basic blocks. */
1788 if (false_edge->dest == bb->next_bb)
1790 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1791 true_edge->probability);
1792 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1793 if (true_edge->goto_locus)
1795 set_curr_insn_source_location (true_edge->goto_locus);
1796 set_curr_insn_block (true_edge->goto_block);
1797 true_edge->goto_locus = curr_insn_locator ();
1799 true_edge->goto_block = NULL;
1800 false_edge->flags |= EDGE_FALLTHRU;
1801 maybe_cleanup_end_of_block (false_edge, last);
1802 return NULL;
1804 if (true_edge->dest == bb->next_bb)
1806 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1807 false_edge->probability);
1808 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1809 if (false_edge->goto_locus)
1811 set_curr_insn_source_location (false_edge->goto_locus);
1812 set_curr_insn_block (false_edge->goto_block);
1813 false_edge->goto_locus = curr_insn_locator ();
1815 false_edge->goto_block = NULL;
1816 true_edge->flags |= EDGE_FALLTHRU;
1817 maybe_cleanup_end_of_block (true_edge, last);
1818 return NULL;
1821 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1822 true_edge->probability);
1823 last = get_last_insn ();
1824 if (false_edge->goto_locus)
1826 set_curr_insn_source_location (false_edge->goto_locus);
1827 set_curr_insn_block (false_edge->goto_block);
1828 false_edge->goto_locus = curr_insn_locator ();
1830 false_edge->goto_block = NULL;
1831 emit_jump (label_rtx_for_bb (false_edge->dest));
1833 BB_END (bb) = last;
1834 if (BARRIER_P (BB_END (bb)))
1835 BB_END (bb) = PREV_INSN (BB_END (bb));
1836 update_bb_for_insn (bb);
1838 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1839 dest = false_edge->dest;
1840 redirect_edge_succ (false_edge, new_bb);
1841 false_edge->flags |= EDGE_FALLTHRU;
1842 new_bb->count = false_edge->count;
1843 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1844 new_edge = make_edge (new_bb, dest, 0);
1845 new_edge->probability = REG_BR_PROB_BASE;
1846 new_edge->count = new_bb->count;
1847 if (BARRIER_P (BB_END (new_bb)))
1848 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1849 update_bb_for_insn (new_bb);
1851 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1853 if (true_edge->goto_locus)
1855 set_curr_insn_source_location (true_edge->goto_locus);
1856 set_curr_insn_block (true_edge->goto_block);
1857 true_edge->goto_locus = curr_insn_locator ();
1859 true_edge->goto_block = NULL;
1861 return new_bb;
1864 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1865 statement STMT. */
1867 static void
1868 expand_call_stmt (gimple stmt)
1870 tree exp;
1871 tree lhs = gimple_call_lhs (stmt);
1872 size_t i;
1873 bool builtin_p;
1874 tree decl;
1876 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1878 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1879 decl = gimple_call_fndecl (stmt);
1880 builtin_p = decl && DECL_BUILT_IN (decl);
1882 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1883 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1885 for (i = 0; i < gimple_call_num_args (stmt); i++)
1887 tree arg = gimple_call_arg (stmt, i);
1888 gimple def;
1889 /* TER addresses into arguments of builtin functions so we have a
1890 chance to infer more correct alignment information. See PR39954. */
1891 if (builtin_p
1892 && TREE_CODE (arg) == SSA_NAME
1893 && (def = get_gimple_for_ssa_name (arg))
1894 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1895 arg = gimple_assign_rhs1 (def);
1896 CALL_EXPR_ARG (exp, i) = arg;
1899 if (gimple_has_side_effects (stmt))
1900 TREE_SIDE_EFFECTS (exp) = 1;
1902 if (gimple_call_nothrow_p (stmt))
1903 TREE_NOTHROW (exp) = 1;
1905 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1906 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1907 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1908 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1909 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1910 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1911 TREE_BLOCK (exp) = gimple_block (stmt);
1913 if (lhs)
1914 expand_assignment (lhs, exp, false);
1915 else
1916 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1919 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1920 STMT that doesn't require special handling for outgoing edges. That
1921 is no tailcalls and no GIMPLE_COND. */
1923 static void
1924 expand_gimple_stmt_1 (gimple stmt)
1926 tree op0;
1927 switch (gimple_code (stmt))
1929 case GIMPLE_GOTO:
1930 op0 = gimple_goto_dest (stmt);
1931 if (TREE_CODE (op0) == LABEL_DECL)
1932 expand_goto (op0);
1933 else
1934 expand_computed_goto (op0);
1935 break;
1936 case GIMPLE_LABEL:
1937 expand_label (gimple_label_label (stmt));
1938 break;
1939 case GIMPLE_NOP:
1940 case GIMPLE_PREDICT:
1941 break;
1942 case GIMPLE_SWITCH:
1943 expand_case (stmt);
1944 break;
1945 case GIMPLE_ASM:
1946 expand_asm_stmt (stmt);
1947 break;
1948 case GIMPLE_CALL:
1949 expand_call_stmt (stmt);
1950 break;
1952 case GIMPLE_RETURN:
1953 op0 = gimple_return_retval (stmt);
1955 if (op0 && op0 != error_mark_node)
1957 tree result = DECL_RESULT (current_function_decl);
1959 /* If we are not returning the current function's RESULT_DECL,
1960 build an assignment to it. */
1961 if (op0 != result)
1963 /* I believe that a function's RESULT_DECL is unique. */
1964 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1966 /* ??? We'd like to use simply expand_assignment here,
1967 but this fails if the value is of BLKmode but the return
1968 decl is a register. expand_return has special handling
1969 for this combination, which eventually should move
1970 to common code. See comments there. Until then, let's
1971 build a modify expression :-/ */
1972 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1973 result, op0);
1976 if (!op0)
1977 expand_null_return ();
1978 else
1979 expand_return (op0);
1980 break;
1982 case GIMPLE_ASSIGN:
1984 tree lhs = gimple_assign_lhs (stmt);
1986 /* Tree expand used to fiddle with |= and &= of two bitfield
1987 COMPONENT_REFs here. This can't happen with gimple, the LHS
1988 of binary assigns must be a gimple reg. */
1990 if (TREE_CODE (lhs) != SSA_NAME
1991 || get_gimple_rhs_class (gimple_expr_code (stmt))
1992 == GIMPLE_SINGLE_RHS)
1994 tree rhs = gimple_assign_rhs1 (stmt);
1995 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1996 == GIMPLE_SINGLE_RHS);
1997 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1998 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1999 expand_assignment (lhs, rhs,
2000 gimple_assign_nontemporal_move_p (stmt));
2002 else
2004 rtx target, temp;
2005 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2006 struct separate_ops ops;
2007 bool promoted = false;
2009 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2010 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2011 promoted = true;
2013 ops.code = gimple_assign_rhs_code (stmt);
2014 ops.type = TREE_TYPE (lhs);
2015 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2017 case GIMPLE_TERNARY_RHS:
2018 ops.op2 = gimple_assign_rhs3 (stmt);
2019 /* Fallthru */
2020 case GIMPLE_BINARY_RHS:
2021 ops.op1 = gimple_assign_rhs2 (stmt);
2022 /* Fallthru */
2023 case GIMPLE_UNARY_RHS:
2024 ops.op0 = gimple_assign_rhs1 (stmt);
2025 break;
2026 default:
2027 gcc_unreachable ();
2029 ops.location = gimple_location (stmt);
2031 /* If we want to use a nontemporal store, force the value to
2032 register first. If we store into a promoted register,
2033 don't directly expand to target. */
2034 temp = nontemporal || promoted ? NULL_RTX : target;
2035 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2036 EXPAND_NORMAL);
2038 if (temp == target)
2040 else if (promoted)
2042 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2043 /* If TEMP is a VOIDmode constant, use convert_modes to make
2044 sure that we properly convert it. */
2045 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2047 temp = convert_modes (GET_MODE (target),
2048 TYPE_MODE (ops.type),
2049 temp, unsignedp);
2050 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2051 GET_MODE (target), temp, unsignedp);
2054 convert_move (SUBREG_REG (target), temp, unsignedp);
2056 else if (nontemporal && emit_storent_insn (target, temp))
2058 else
2060 temp = force_operand (temp, target);
2061 if (temp != target)
2062 emit_move_insn (target, temp);
2066 break;
2068 default:
2069 gcc_unreachable ();
2073 /* Expand one gimple statement STMT and return the last RTL instruction
2074 before any of the newly generated ones.
2076 In addition to generating the necessary RTL instructions this also
2077 sets REG_EH_REGION notes if necessary and sets the current source
2078 location for diagnostics. */
2080 static rtx
2081 expand_gimple_stmt (gimple stmt)
2083 int lp_nr = 0;
2084 rtx last = NULL;
2085 location_t saved_location = input_location;
2087 last = get_last_insn ();
2089 /* If this is an expression of some kind and it has an associated line
2090 number, then emit the line number before expanding the expression.
2092 We need to save and restore the file and line information so that
2093 errors discovered during expansion are emitted with the right
2094 information. It would be better of the diagnostic routines
2095 used the file/line information embedded in the tree nodes rather
2096 than globals. */
2097 gcc_assert (cfun);
2099 if (gimple_has_location (stmt))
2101 input_location = gimple_location (stmt);
2102 set_curr_insn_source_location (input_location);
2104 /* Record where the insns produced belong. */
2105 set_curr_insn_block (gimple_block (stmt));
2108 expand_gimple_stmt_1 (stmt);
2109 /* Free any temporaries used to evaluate this statement. */
2110 free_temp_slots ();
2112 input_location = saved_location;
2114 /* Mark all insns that may trap. */
2115 lp_nr = lookup_stmt_eh_lp (stmt);
2116 if (lp_nr)
2118 rtx insn;
2119 for (insn = next_real_insn (last); insn;
2120 insn = next_real_insn (insn))
2122 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2123 /* If we want exceptions for non-call insns, any
2124 may_trap_p instruction may throw. */
2125 && GET_CODE (PATTERN (insn)) != CLOBBER
2126 && GET_CODE (PATTERN (insn)) != USE
2127 && insn_could_throw_p (insn))
2128 make_reg_eh_region_note (insn, 0, lp_nr);
2132 return last;
2135 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2136 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2137 generated a tail call (something that might be denied by the ABI
2138 rules governing the call; see calls.c).
2140 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2141 can still reach the rest of BB. The case here is __builtin_sqrt,
2142 where the NaN result goes through the external function (with a
2143 tailcall) and the normal result happens via a sqrt instruction. */
2145 static basic_block
2146 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2148 rtx last2, last;
2149 edge e;
2150 edge_iterator ei;
2151 int probability;
2152 gcov_type count;
2154 last2 = last = expand_gimple_stmt (stmt);
2156 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2157 if (CALL_P (last) && SIBLING_CALL_P (last))
2158 goto found;
2160 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2162 *can_fallthru = true;
2163 return NULL;
2165 found:
2166 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2167 Any instructions emitted here are about to be deleted. */
2168 do_pending_stack_adjust ();
2170 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2171 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2172 EH or abnormal edges, we shouldn't have created a tail call in
2173 the first place. So it seems to me we should just be removing
2174 all edges here, or redirecting the existing fallthru edge to
2175 the exit block. */
2177 probability = 0;
2178 count = 0;
2180 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2182 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2184 if (e->dest != EXIT_BLOCK_PTR)
2186 e->dest->count -= e->count;
2187 e->dest->frequency -= EDGE_FREQUENCY (e);
2188 if (e->dest->count < 0)
2189 e->dest->count = 0;
2190 if (e->dest->frequency < 0)
2191 e->dest->frequency = 0;
2193 count += e->count;
2194 probability += e->probability;
2195 remove_edge (e);
2197 else
2198 ei_next (&ei);
2201 /* This is somewhat ugly: the call_expr expander often emits instructions
2202 after the sibcall (to perform the function return). These confuse the
2203 find_many_sub_basic_blocks code, so we need to get rid of these. */
2204 last = NEXT_INSN (last);
2205 gcc_assert (BARRIER_P (last));
2207 *can_fallthru = false;
2208 while (NEXT_INSN (last))
2210 /* For instance an sqrt builtin expander expands if with
2211 sibcall in the then and label for `else`. */
2212 if (LABEL_P (NEXT_INSN (last)))
2214 *can_fallthru = true;
2215 break;
2217 delete_insn (NEXT_INSN (last));
2220 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2221 e->probability += probability;
2222 e->count += count;
2223 BB_END (bb) = last;
2224 update_bb_for_insn (bb);
2226 if (NEXT_INSN (last))
2228 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2230 last = BB_END (bb);
2231 if (BARRIER_P (last))
2232 BB_END (bb) = PREV_INSN (last);
2235 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2237 return bb;
2240 /* Return the difference between the floor and the truncated result of
2241 a signed division by OP1 with remainder MOD. */
2242 static rtx
2243 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2245 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2246 return gen_rtx_IF_THEN_ELSE
2247 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2248 gen_rtx_IF_THEN_ELSE
2249 (mode, gen_rtx_LT (BImode,
2250 gen_rtx_DIV (mode, op1, mod),
2251 const0_rtx),
2252 constm1_rtx, const0_rtx),
2253 const0_rtx);
2256 /* Return the difference between the ceil and the truncated result of
2257 a signed division by OP1 with remainder MOD. */
2258 static rtx
2259 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2261 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2262 return gen_rtx_IF_THEN_ELSE
2263 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2264 gen_rtx_IF_THEN_ELSE
2265 (mode, gen_rtx_GT (BImode,
2266 gen_rtx_DIV (mode, op1, mod),
2267 const0_rtx),
2268 const1_rtx, const0_rtx),
2269 const0_rtx);
2272 /* Return the difference between the ceil and the truncated result of
2273 an unsigned division by OP1 with remainder MOD. */
2274 static rtx
2275 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2277 /* (mod != 0 ? 1 : 0) */
2278 return gen_rtx_IF_THEN_ELSE
2279 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2280 const1_rtx, const0_rtx);
2283 /* Return the difference between the rounded and the truncated result
2284 of a signed division by OP1 with remainder MOD. Halfway cases are
2285 rounded away from zero, rather than to the nearest even number. */
2286 static rtx
2287 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2289 /* (abs (mod) >= abs (op1) - abs (mod)
2290 ? (op1 / mod > 0 ? 1 : -1)
2291 : 0) */
2292 return gen_rtx_IF_THEN_ELSE
2293 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2294 gen_rtx_MINUS (mode,
2295 gen_rtx_ABS (mode, op1),
2296 gen_rtx_ABS (mode, mod))),
2297 gen_rtx_IF_THEN_ELSE
2298 (mode, gen_rtx_GT (BImode,
2299 gen_rtx_DIV (mode, op1, mod),
2300 const0_rtx),
2301 const1_rtx, constm1_rtx),
2302 const0_rtx);
2305 /* Return the difference between the rounded and the truncated result
2306 of a unsigned division by OP1 with remainder MOD. Halfway cases
2307 are rounded away from zero, rather than to the nearest even
2308 number. */
2309 static rtx
2310 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2312 /* (mod >= op1 - mod ? 1 : 0) */
2313 return gen_rtx_IF_THEN_ELSE
2314 (mode, gen_rtx_GE (BImode, mod,
2315 gen_rtx_MINUS (mode, op1, mod)),
2316 const1_rtx, const0_rtx);
2319 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2320 any rtl. */
2322 static rtx
2323 convert_debug_memory_address (enum machine_mode mode, rtx x)
2325 enum machine_mode xmode = GET_MODE (x);
2327 #ifndef POINTERS_EXTEND_UNSIGNED
2328 gcc_assert (mode == Pmode);
2329 gcc_assert (xmode == mode || xmode == VOIDmode);
2330 #else
2331 gcc_assert (mode == Pmode || mode == ptr_mode);
2333 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2334 return x;
2336 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2337 x = simplify_gen_subreg (mode, x, xmode,
2338 subreg_lowpart_offset
2339 (mode, xmode));
2340 else if (POINTERS_EXTEND_UNSIGNED > 0)
2341 x = gen_rtx_ZERO_EXTEND (mode, x);
2342 else if (!POINTERS_EXTEND_UNSIGNED)
2343 x = gen_rtx_SIGN_EXTEND (mode, x);
2344 else
2345 gcc_unreachable ();
2346 #endif /* POINTERS_EXTEND_UNSIGNED */
2348 return x;
2351 /* Return an RTX equivalent to the value of the tree expression
2352 EXP. */
2354 static rtx
2355 expand_debug_expr (tree exp)
2357 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2358 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2359 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2360 addr_space_t as;
2362 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2364 case tcc_expression:
2365 switch (TREE_CODE (exp))
2367 case COND_EXPR:
2368 case DOT_PROD_EXPR:
2369 case WIDEN_MULT_PLUS_EXPR:
2370 case WIDEN_MULT_MINUS_EXPR:
2371 case FMA_EXPR:
2372 goto ternary;
2374 case TRUTH_ANDIF_EXPR:
2375 case TRUTH_ORIF_EXPR:
2376 case TRUTH_AND_EXPR:
2377 case TRUTH_OR_EXPR:
2378 case TRUTH_XOR_EXPR:
2379 goto binary;
2381 case TRUTH_NOT_EXPR:
2382 goto unary;
2384 default:
2385 break;
2387 break;
2389 ternary:
2390 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2391 if (!op2)
2392 return NULL_RTX;
2393 /* Fall through. */
2395 binary:
2396 case tcc_binary:
2397 case tcc_comparison:
2398 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2399 if (!op1)
2400 return NULL_RTX;
2401 /* Fall through. */
2403 unary:
2404 case tcc_unary:
2405 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2406 if (!op0)
2407 return NULL_RTX;
2408 break;
2410 case tcc_type:
2411 case tcc_statement:
2412 gcc_unreachable ();
2414 case tcc_constant:
2415 case tcc_exceptional:
2416 case tcc_declaration:
2417 case tcc_reference:
2418 case tcc_vl_exp:
2419 break;
2422 switch (TREE_CODE (exp))
2424 case STRING_CST:
2425 if (!lookup_constant_def (exp))
2427 if (strlen (TREE_STRING_POINTER (exp)) + 1
2428 != (size_t) TREE_STRING_LENGTH (exp))
2429 return NULL_RTX;
2430 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2431 op0 = gen_rtx_MEM (BLKmode, op0);
2432 set_mem_attributes (op0, exp, 0);
2433 return op0;
2435 /* Fall through... */
2437 case INTEGER_CST:
2438 case REAL_CST:
2439 case FIXED_CST:
2440 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2441 return op0;
2443 case COMPLEX_CST:
2444 gcc_assert (COMPLEX_MODE_P (mode));
2445 op0 = expand_debug_expr (TREE_REALPART (exp));
2446 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2447 return gen_rtx_CONCAT (mode, op0, op1);
2449 case DEBUG_EXPR_DECL:
2450 op0 = DECL_RTL_IF_SET (exp);
2452 if (op0)
2453 return op0;
2455 op0 = gen_rtx_DEBUG_EXPR (mode);
2456 DEBUG_EXPR_TREE_DECL (op0) = exp;
2457 SET_DECL_RTL (exp, op0);
2459 return op0;
2461 case VAR_DECL:
2462 case PARM_DECL:
2463 case FUNCTION_DECL:
2464 case LABEL_DECL:
2465 case CONST_DECL:
2466 case RESULT_DECL:
2467 op0 = DECL_RTL_IF_SET (exp);
2469 /* This decl was probably optimized away. */
2470 if (!op0)
2472 if (TREE_CODE (exp) != VAR_DECL
2473 || DECL_EXTERNAL (exp)
2474 || !TREE_STATIC (exp)
2475 || !DECL_NAME (exp)
2476 || DECL_HARD_REGISTER (exp)
2477 || mode == VOIDmode)
2478 return NULL;
2480 op0 = make_decl_rtl_for_debug (exp);
2481 if (!MEM_P (op0)
2482 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2483 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2484 return NULL;
2486 else
2487 op0 = copy_rtx (op0);
2489 if (GET_MODE (op0) == BLKmode
2490 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2491 below would ICE. While it is likely a FE bug,
2492 try to be robust here. See PR43166. */
2493 || mode == BLKmode
2494 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2496 gcc_assert (MEM_P (op0));
2497 op0 = adjust_address_nv (op0, mode, 0);
2498 return op0;
2501 /* Fall through. */
2503 adjust_mode:
2504 case PAREN_EXPR:
2505 case NOP_EXPR:
2506 case CONVERT_EXPR:
2508 enum machine_mode inner_mode = GET_MODE (op0);
2510 if (mode == inner_mode)
2511 return op0;
2513 if (inner_mode == VOIDmode)
2515 if (TREE_CODE (exp) == SSA_NAME)
2516 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2517 else
2518 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2519 if (mode == inner_mode)
2520 return op0;
2523 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2525 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2526 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2527 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2528 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2529 else
2530 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2532 else if (FLOAT_MODE_P (mode))
2534 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2535 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2536 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2537 else
2538 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2540 else if (FLOAT_MODE_P (inner_mode))
2542 if (unsignedp)
2543 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2544 else
2545 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2547 else if (CONSTANT_P (op0)
2548 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2549 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2550 subreg_lowpart_offset (mode,
2551 inner_mode));
2552 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2553 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2554 : unsignedp)
2555 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2556 else
2557 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2559 return op0;
2562 case MEM_REF:
2563 case INDIRECT_REF:
2564 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2565 if (!op0)
2566 return NULL;
2568 if (TREE_CODE (exp) == MEM_REF)
2570 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2571 if (!op1 || !CONST_INT_P (op1))
2572 return NULL;
2574 op0 = plus_constant (op0, INTVAL (op1));
2577 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2578 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2579 else
2580 as = ADDR_SPACE_GENERIC;
2582 op0 = gen_rtx_MEM (mode, op0);
2584 set_mem_attributes (op0, exp, 0);
2585 set_mem_addr_space (op0, as);
2587 return op0;
2589 case TARGET_MEM_REF:
2590 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2591 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2592 return NULL;
2594 op0 = expand_debug_expr
2595 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2596 if (!op0)
2597 return NULL;
2599 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2601 op0 = gen_rtx_MEM (mode, op0);
2603 set_mem_attributes (op0, exp, 0);
2604 set_mem_addr_space (op0, as);
2606 return op0;
2608 case ARRAY_REF:
2609 case ARRAY_RANGE_REF:
2610 case COMPONENT_REF:
2611 case BIT_FIELD_REF:
2612 case REALPART_EXPR:
2613 case IMAGPART_EXPR:
2614 case VIEW_CONVERT_EXPR:
2616 enum machine_mode mode1;
2617 HOST_WIDE_INT bitsize, bitpos;
2618 tree offset;
2619 int volatilep = 0;
2620 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2621 &mode1, &unsignedp, &volatilep, false);
2622 rtx orig_op0;
2624 if (bitsize == 0)
2625 return NULL;
2627 orig_op0 = op0 = expand_debug_expr (tem);
2629 if (!op0)
2630 return NULL;
2632 if (offset)
2634 enum machine_mode addrmode, offmode;
2636 if (!MEM_P (op0))
2637 return NULL;
2639 op0 = XEXP (op0, 0);
2640 addrmode = GET_MODE (op0);
2641 if (addrmode == VOIDmode)
2642 addrmode = Pmode;
2644 op1 = expand_debug_expr (offset);
2645 if (!op1)
2646 return NULL;
2648 offmode = GET_MODE (op1);
2649 if (offmode == VOIDmode)
2650 offmode = TYPE_MODE (TREE_TYPE (offset));
2652 if (addrmode != offmode)
2653 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2654 subreg_lowpart_offset (addrmode,
2655 offmode));
2657 /* Don't use offset_address here, we don't need a
2658 recognizable address, and we don't want to generate
2659 code. */
2660 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2663 if (MEM_P (op0))
2665 if (mode1 == VOIDmode)
2666 /* Bitfield. */
2667 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2668 if (bitpos >= BITS_PER_UNIT)
2670 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2671 bitpos %= BITS_PER_UNIT;
2673 else if (bitpos < 0)
2675 HOST_WIDE_INT units
2676 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2677 op0 = adjust_address_nv (op0, mode1, units);
2678 bitpos += units * BITS_PER_UNIT;
2680 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2681 op0 = adjust_address_nv (op0, mode, 0);
2682 else if (GET_MODE (op0) != mode1)
2683 op0 = adjust_address_nv (op0, mode1, 0);
2684 else
2685 op0 = copy_rtx (op0);
2686 if (op0 == orig_op0)
2687 op0 = shallow_copy_rtx (op0);
2688 set_mem_attributes (op0, exp, 0);
2691 if (bitpos == 0 && mode == GET_MODE (op0))
2692 return op0;
2694 if (bitpos < 0)
2695 return NULL;
2697 if (GET_MODE (op0) == BLKmode)
2698 return NULL;
2700 if ((bitpos % BITS_PER_UNIT) == 0
2701 && bitsize == GET_MODE_BITSIZE (mode1))
2703 enum machine_mode opmode = GET_MODE (op0);
2705 if (opmode == VOIDmode)
2706 opmode = TYPE_MODE (TREE_TYPE (tem));
2708 /* This condition may hold if we're expanding the address
2709 right past the end of an array that turned out not to
2710 be addressable (i.e., the address was only computed in
2711 debug stmts). The gen_subreg below would rightfully
2712 crash, and the address doesn't really exist, so just
2713 drop it. */
2714 if (bitpos >= GET_MODE_BITSIZE (opmode))
2715 return NULL;
2717 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2718 return simplify_gen_subreg (mode, op0, opmode,
2719 bitpos / BITS_PER_UNIT);
2722 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2723 && TYPE_UNSIGNED (TREE_TYPE (exp))
2724 ? SIGN_EXTRACT
2725 : ZERO_EXTRACT, mode,
2726 GET_MODE (op0) != VOIDmode
2727 ? GET_MODE (op0)
2728 : TYPE_MODE (TREE_TYPE (tem)),
2729 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2732 case ABS_EXPR:
2733 return gen_rtx_ABS (mode, op0);
2735 case NEGATE_EXPR:
2736 return gen_rtx_NEG (mode, op0);
2738 case BIT_NOT_EXPR:
2739 return gen_rtx_NOT (mode, op0);
2741 case FLOAT_EXPR:
2742 if (unsignedp)
2743 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2744 else
2745 return gen_rtx_FLOAT (mode, op0);
2747 case FIX_TRUNC_EXPR:
2748 if (unsignedp)
2749 return gen_rtx_UNSIGNED_FIX (mode, op0);
2750 else
2751 return gen_rtx_FIX (mode, op0);
2753 case POINTER_PLUS_EXPR:
2754 /* For the rare target where pointers are not the same size as
2755 size_t, we need to check for mis-matched modes and correct
2756 the addend. */
2757 if (op0 && op1
2758 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2759 && GET_MODE (op0) != GET_MODE (op1))
2761 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2762 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2763 else
2764 /* We always sign-extend, regardless of the signedness of
2765 the operand, because the operand is always unsigned
2766 here even if the original C expression is signed. */
2767 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2769 /* Fall through. */
2770 case PLUS_EXPR:
2771 return gen_rtx_PLUS (mode, op0, op1);
2773 case MINUS_EXPR:
2774 return gen_rtx_MINUS (mode, op0, op1);
2776 case MULT_EXPR:
2777 return gen_rtx_MULT (mode, op0, op1);
2779 case RDIV_EXPR:
2780 case TRUNC_DIV_EXPR:
2781 case EXACT_DIV_EXPR:
2782 if (unsignedp)
2783 return gen_rtx_UDIV (mode, op0, op1);
2784 else
2785 return gen_rtx_DIV (mode, op0, op1);
2787 case TRUNC_MOD_EXPR:
2788 if (unsignedp)
2789 return gen_rtx_UMOD (mode, op0, op1);
2790 else
2791 return gen_rtx_MOD (mode, op0, op1);
2793 case FLOOR_DIV_EXPR:
2794 if (unsignedp)
2795 return gen_rtx_UDIV (mode, op0, op1);
2796 else
2798 rtx div = gen_rtx_DIV (mode, op0, op1);
2799 rtx mod = gen_rtx_MOD (mode, op0, op1);
2800 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2801 return gen_rtx_PLUS (mode, div, adj);
2804 case FLOOR_MOD_EXPR:
2805 if (unsignedp)
2806 return gen_rtx_UMOD (mode, op0, op1);
2807 else
2809 rtx mod = gen_rtx_MOD (mode, op0, op1);
2810 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2811 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2812 return gen_rtx_PLUS (mode, mod, adj);
2815 case CEIL_DIV_EXPR:
2816 if (unsignedp)
2818 rtx div = gen_rtx_UDIV (mode, op0, op1);
2819 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2820 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2821 return gen_rtx_PLUS (mode, div, adj);
2823 else
2825 rtx div = gen_rtx_DIV (mode, op0, op1);
2826 rtx mod = gen_rtx_MOD (mode, op0, op1);
2827 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2828 return gen_rtx_PLUS (mode, div, adj);
2831 case CEIL_MOD_EXPR:
2832 if (unsignedp)
2834 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2835 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2836 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2837 return gen_rtx_PLUS (mode, mod, adj);
2839 else
2841 rtx mod = gen_rtx_MOD (mode, op0, op1);
2842 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2843 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2844 return gen_rtx_PLUS (mode, mod, adj);
2847 case ROUND_DIV_EXPR:
2848 if (unsignedp)
2850 rtx div = gen_rtx_UDIV (mode, op0, op1);
2851 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2852 rtx adj = round_udiv_adjust (mode, mod, op1);
2853 return gen_rtx_PLUS (mode, div, adj);
2855 else
2857 rtx div = gen_rtx_DIV (mode, op0, op1);
2858 rtx mod = gen_rtx_MOD (mode, op0, op1);
2859 rtx adj = round_sdiv_adjust (mode, mod, op1);
2860 return gen_rtx_PLUS (mode, div, adj);
2863 case ROUND_MOD_EXPR:
2864 if (unsignedp)
2866 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2867 rtx adj = round_udiv_adjust (mode, mod, op1);
2868 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2869 return gen_rtx_PLUS (mode, mod, adj);
2871 else
2873 rtx mod = gen_rtx_MOD (mode, op0, op1);
2874 rtx adj = round_sdiv_adjust (mode, mod, op1);
2875 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2876 return gen_rtx_PLUS (mode, mod, adj);
2879 case LSHIFT_EXPR:
2880 return gen_rtx_ASHIFT (mode, op0, op1);
2882 case RSHIFT_EXPR:
2883 if (unsignedp)
2884 return gen_rtx_LSHIFTRT (mode, op0, op1);
2885 else
2886 return gen_rtx_ASHIFTRT (mode, op0, op1);
2888 case LROTATE_EXPR:
2889 return gen_rtx_ROTATE (mode, op0, op1);
2891 case RROTATE_EXPR:
2892 return gen_rtx_ROTATERT (mode, op0, op1);
2894 case MIN_EXPR:
2895 if (unsignedp)
2896 return gen_rtx_UMIN (mode, op0, op1);
2897 else
2898 return gen_rtx_SMIN (mode, op0, op1);
2900 case MAX_EXPR:
2901 if (unsignedp)
2902 return gen_rtx_UMAX (mode, op0, op1);
2903 else
2904 return gen_rtx_SMAX (mode, op0, op1);
2906 case BIT_AND_EXPR:
2907 case TRUTH_AND_EXPR:
2908 return gen_rtx_AND (mode, op0, op1);
2910 case BIT_IOR_EXPR:
2911 case TRUTH_OR_EXPR:
2912 return gen_rtx_IOR (mode, op0, op1);
2914 case BIT_XOR_EXPR:
2915 case TRUTH_XOR_EXPR:
2916 return gen_rtx_XOR (mode, op0, op1);
2918 case TRUTH_ANDIF_EXPR:
2919 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2921 case TRUTH_ORIF_EXPR:
2922 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2924 case TRUTH_NOT_EXPR:
2925 return gen_rtx_EQ (mode, op0, const0_rtx);
2927 case LT_EXPR:
2928 if (unsignedp)
2929 return gen_rtx_LTU (mode, op0, op1);
2930 else
2931 return gen_rtx_LT (mode, op0, op1);
2933 case LE_EXPR:
2934 if (unsignedp)
2935 return gen_rtx_LEU (mode, op0, op1);
2936 else
2937 return gen_rtx_LE (mode, op0, op1);
2939 case GT_EXPR:
2940 if (unsignedp)
2941 return gen_rtx_GTU (mode, op0, op1);
2942 else
2943 return gen_rtx_GT (mode, op0, op1);
2945 case GE_EXPR:
2946 if (unsignedp)
2947 return gen_rtx_GEU (mode, op0, op1);
2948 else
2949 return gen_rtx_GE (mode, op0, op1);
2951 case EQ_EXPR:
2952 return gen_rtx_EQ (mode, op0, op1);
2954 case NE_EXPR:
2955 return gen_rtx_NE (mode, op0, op1);
2957 case UNORDERED_EXPR:
2958 return gen_rtx_UNORDERED (mode, op0, op1);
2960 case ORDERED_EXPR:
2961 return gen_rtx_ORDERED (mode, op0, op1);
2963 case UNLT_EXPR:
2964 return gen_rtx_UNLT (mode, op0, op1);
2966 case UNLE_EXPR:
2967 return gen_rtx_UNLE (mode, op0, op1);
2969 case UNGT_EXPR:
2970 return gen_rtx_UNGT (mode, op0, op1);
2972 case UNGE_EXPR:
2973 return gen_rtx_UNGE (mode, op0, op1);
2975 case UNEQ_EXPR:
2976 return gen_rtx_UNEQ (mode, op0, op1);
2978 case LTGT_EXPR:
2979 return gen_rtx_LTGT (mode, op0, op1);
2981 case COND_EXPR:
2982 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2984 case COMPLEX_EXPR:
2985 gcc_assert (COMPLEX_MODE_P (mode));
2986 if (GET_MODE (op0) == VOIDmode)
2987 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2988 if (GET_MODE (op1) == VOIDmode)
2989 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2990 return gen_rtx_CONCAT (mode, op0, op1);
2992 case CONJ_EXPR:
2993 if (GET_CODE (op0) == CONCAT)
2994 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2995 gen_rtx_NEG (GET_MODE_INNER (mode),
2996 XEXP (op0, 1)));
2997 else
2999 enum machine_mode imode = GET_MODE_INNER (mode);
3000 rtx re, im;
3002 if (MEM_P (op0))
3004 re = adjust_address_nv (op0, imode, 0);
3005 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3007 else
3009 enum machine_mode ifmode = int_mode_for_mode (mode);
3010 enum machine_mode ihmode = int_mode_for_mode (imode);
3011 rtx halfsize;
3012 if (ifmode == BLKmode || ihmode == BLKmode)
3013 return NULL;
3014 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3015 re = op0;
3016 if (mode != ifmode)
3017 re = gen_rtx_SUBREG (ifmode, re, 0);
3018 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3019 if (imode != ihmode)
3020 re = gen_rtx_SUBREG (imode, re, 0);
3021 im = copy_rtx (op0);
3022 if (mode != ifmode)
3023 im = gen_rtx_SUBREG (ifmode, im, 0);
3024 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3025 if (imode != ihmode)
3026 im = gen_rtx_SUBREG (imode, im, 0);
3028 im = gen_rtx_NEG (imode, im);
3029 return gen_rtx_CONCAT (mode, re, im);
3032 case ADDR_EXPR:
3033 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3034 if (!op0 || !MEM_P (op0))
3036 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3037 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3038 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3039 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3040 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3042 if (handled_component_p (TREE_OPERAND (exp, 0)))
3044 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3045 tree decl
3046 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3047 &bitoffset, &bitsize, &maxsize);
3048 if ((TREE_CODE (decl) == VAR_DECL
3049 || TREE_CODE (decl) == PARM_DECL
3050 || TREE_CODE (decl) == RESULT_DECL)
3051 && !TREE_ADDRESSABLE (decl)
3052 && (bitoffset % BITS_PER_UNIT) == 0
3053 && bitsize > 0
3054 && bitsize == maxsize)
3055 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3056 bitoffset / BITS_PER_UNIT);
3059 return NULL;
3062 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
3064 return op0;
3066 case VECTOR_CST:
3067 exp = build_constructor_from_list (TREE_TYPE (exp),
3068 TREE_VECTOR_CST_ELTS (exp));
3069 /* Fall through. */
3071 case CONSTRUCTOR:
3072 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3074 unsigned i;
3075 tree val;
3077 op0 = gen_rtx_CONCATN
3078 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3080 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3082 op1 = expand_debug_expr (val);
3083 if (!op1)
3084 return NULL;
3085 XVECEXP (op0, 0, i) = op1;
3088 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3090 op1 = expand_debug_expr
3091 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3093 if (!op1)
3094 return NULL;
3096 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3097 XVECEXP (op0, 0, i) = op1;
3100 return op0;
3102 else
3103 goto flag_unsupported;
3105 case CALL_EXPR:
3106 /* ??? Maybe handle some builtins? */
3107 return NULL;
3109 case SSA_NAME:
3111 gimple g = get_gimple_for_ssa_name (exp);
3112 if (g)
3114 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3115 if (!op0)
3116 return NULL;
3118 else
3120 int part = var_to_partition (SA.map, exp);
3122 if (part == NO_PARTITION)
3123 return NULL;
3125 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3127 op0 = SA.partition_to_pseudo[part];
3129 goto adjust_mode;
3132 case ERROR_MARK:
3133 return NULL;
3135 /* Vector stuff. For most of the codes we don't have rtl codes. */
3136 case REALIGN_LOAD_EXPR:
3137 case REDUC_MAX_EXPR:
3138 case REDUC_MIN_EXPR:
3139 case REDUC_PLUS_EXPR:
3140 case VEC_COND_EXPR:
3141 case VEC_EXTRACT_EVEN_EXPR:
3142 case VEC_EXTRACT_ODD_EXPR:
3143 case VEC_INTERLEAVE_HIGH_EXPR:
3144 case VEC_INTERLEAVE_LOW_EXPR:
3145 case VEC_LSHIFT_EXPR:
3146 case VEC_PACK_FIX_TRUNC_EXPR:
3147 case VEC_PACK_SAT_EXPR:
3148 case VEC_PACK_TRUNC_EXPR:
3149 case VEC_RSHIFT_EXPR:
3150 case VEC_UNPACK_FLOAT_HI_EXPR:
3151 case VEC_UNPACK_FLOAT_LO_EXPR:
3152 case VEC_UNPACK_HI_EXPR:
3153 case VEC_UNPACK_LO_EXPR:
3154 case VEC_WIDEN_MULT_HI_EXPR:
3155 case VEC_WIDEN_MULT_LO_EXPR:
3156 return NULL;
3158 /* Misc codes. */
3159 case ADDR_SPACE_CONVERT_EXPR:
3160 case FIXED_CONVERT_EXPR:
3161 case OBJ_TYPE_REF:
3162 case WITH_SIZE_EXPR:
3163 return NULL;
3165 case DOT_PROD_EXPR:
3166 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3167 && SCALAR_INT_MODE_P (mode))
3169 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3170 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3171 else
3172 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3173 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3174 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3175 else
3176 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3177 op0 = gen_rtx_MULT (mode, op0, op1);
3178 return gen_rtx_PLUS (mode, op0, op2);
3180 return NULL;
3182 case WIDEN_MULT_EXPR:
3183 case WIDEN_MULT_PLUS_EXPR:
3184 case WIDEN_MULT_MINUS_EXPR:
3185 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3186 && SCALAR_INT_MODE_P (mode))
3188 enum machine_mode inner_mode = GET_MODE (op0);
3189 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3190 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3191 else
3192 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3193 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3194 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3195 else
3196 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3197 op0 = gen_rtx_MULT (mode, op0, op1);
3198 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3199 return op0;
3200 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3201 return gen_rtx_PLUS (mode, op0, op2);
3202 else
3203 return gen_rtx_MINUS (mode, op2, op0);
3205 return NULL;
3207 case WIDEN_SUM_EXPR:
3208 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3209 && SCALAR_INT_MODE_P (mode))
3211 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3212 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3213 else
3214 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3215 return gen_rtx_PLUS (mode, op0, op1);
3217 return NULL;
3219 case FMA_EXPR:
3220 return gen_rtx_FMA (mode, op0, op1, op2);
3222 default:
3223 flag_unsupported:
3224 #ifdef ENABLE_CHECKING
3225 debug_tree (exp);
3226 gcc_unreachable ();
3227 #else
3228 return NULL;
3229 #endif
3233 /* Expand the _LOCs in debug insns. We run this after expanding all
3234 regular insns, so that any variables referenced in the function
3235 will have their DECL_RTLs set. */
3237 static void
3238 expand_debug_locations (void)
3240 rtx insn;
3241 rtx last = get_last_insn ();
3242 int save_strict_alias = flag_strict_aliasing;
3244 /* New alias sets while setting up memory attributes cause
3245 -fcompare-debug failures, even though it doesn't bring about any
3246 codegen changes. */
3247 flag_strict_aliasing = 0;
3249 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3250 if (DEBUG_INSN_P (insn))
3252 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3253 rtx val;
3254 enum machine_mode mode;
3256 if (value == NULL_TREE)
3257 val = NULL_RTX;
3258 else
3260 val = expand_debug_expr (value);
3261 gcc_assert (last == get_last_insn ());
3264 if (!val)
3265 val = gen_rtx_UNKNOWN_VAR_LOC ();
3266 else
3268 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3270 gcc_assert (mode == GET_MODE (val)
3271 || (GET_MODE (val) == VOIDmode
3272 && (CONST_INT_P (val)
3273 || GET_CODE (val) == CONST_FIXED
3274 || GET_CODE (val) == CONST_DOUBLE
3275 || GET_CODE (val) == LABEL_REF)));
3278 INSN_VAR_LOCATION_LOC (insn) = val;
3281 flag_strict_aliasing = save_strict_alias;
3284 /* Expand basic block BB from GIMPLE trees to RTL. */
3286 static basic_block
3287 expand_gimple_basic_block (basic_block bb)
3289 gimple_stmt_iterator gsi;
3290 gimple_seq stmts;
3291 gimple stmt = NULL;
3292 rtx note, last;
3293 edge e;
3294 edge_iterator ei;
3295 void **elt;
3297 if (dump_file)
3298 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3299 bb->index);
3301 /* Note that since we are now transitioning from GIMPLE to RTL, we
3302 cannot use the gsi_*_bb() routines because they expect the basic
3303 block to be in GIMPLE, instead of RTL. Therefore, we need to
3304 access the BB sequence directly. */
3305 stmts = bb_seq (bb);
3306 bb->il.gimple = NULL;
3307 rtl_profile_for_bb (bb);
3308 init_rtl_bb_info (bb);
3309 bb->flags |= BB_RTL;
3311 /* Remove the RETURN_EXPR if we may fall though to the exit
3312 instead. */
3313 gsi = gsi_last (stmts);
3314 if (!gsi_end_p (gsi)
3315 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3317 gimple ret_stmt = gsi_stmt (gsi);
3319 gcc_assert (single_succ_p (bb));
3320 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3322 if (bb->next_bb == EXIT_BLOCK_PTR
3323 && !gimple_return_retval (ret_stmt))
3325 gsi_remove (&gsi, false);
3326 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3330 gsi = gsi_start (stmts);
3331 if (!gsi_end_p (gsi))
3333 stmt = gsi_stmt (gsi);
3334 if (gimple_code (stmt) != GIMPLE_LABEL)
3335 stmt = NULL;
3338 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3340 if (stmt || elt)
3342 last = get_last_insn ();
3344 if (stmt)
3346 expand_gimple_stmt (stmt);
3347 gsi_next (&gsi);
3350 if (elt)
3351 emit_label ((rtx) *elt);
3353 /* Java emits line number notes in the top of labels.
3354 ??? Make this go away once line number notes are obsoleted. */
3355 BB_HEAD (bb) = NEXT_INSN (last);
3356 if (NOTE_P (BB_HEAD (bb)))
3357 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3358 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3360 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3362 else
3363 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3365 NOTE_BASIC_BLOCK (note) = bb;
3367 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3369 basic_block new_bb;
3371 stmt = gsi_stmt (gsi);
3373 /* If this statement is a non-debug one, and we generate debug
3374 insns, then this one might be the last real use of a TERed
3375 SSA_NAME, but where there are still some debug uses further
3376 down. Expanding the current SSA name in such further debug
3377 uses by their RHS might lead to wrong debug info, as coalescing
3378 might make the operands of such RHS be placed into the same
3379 pseudo as something else. Like so:
3380 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3381 use(a_1);
3382 a_2 = ...
3383 #DEBUG ... => a_1
3384 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3385 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3386 the write to a_2 would actually have clobbered the place which
3387 formerly held a_0.
3389 So, instead of that, we recognize the situation, and generate
3390 debug temporaries at the last real use of TERed SSA names:
3391 a_1 = a_0 + 1;
3392 #DEBUG #D1 => a_1
3393 use(a_1);
3394 a_2 = ...
3395 #DEBUG ... => #D1
3397 if (MAY_HAVE_DEBUG_INSNS
3398 && SA.values
3399 && !is_gimple_debug (stmt))
3401 ssa_op_iter iter;
3402 tree op;
3403 gimple def;
3405 location_t sloc = get_curr_insn_source_location ();
3406 tree sblock = get_curr_insn_block ();
3408 /* Look for SSA names that have their last use here (TERed
3409 names always have only one real use). */
3410 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3411 if ((def = get_gimple_for_ssa_name (op)))
3413 imm_use_iterator imm_iter;
3414 use_operand_p use_p;
3415 bool have_debug_uses = false;
3417 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3419 if (gimple_debug_bind_p (USE_STMT (use_p)))
3421 have_debug_uses = true;
3422 break;
3426 if (have_debug_uses)
3428 /* OP is a TERed SSA name, with DEF it's defining
3429 statement, and where OP is used in further debug
3430 instructions. Generate a debug temporary, and
3431 replace all uses of OP in debug insns with that
3432 temporary. */
3433 gimple debugstmt;
3434 tree value = gimple_assign_rhs_to_tree (def);
3435 tree vexpr = make_node (DEBUG_EXPR_DECL);
3436 rtx val;
3437 enum machine_mode mode;
3439 set_curr_insn_source_location (gimple_location (def));
3440 set_curr_insn_block (gimple_block (def));
3442 DECL_ARTIFICIAL (vexpr) = 1;
3443 TREE_TYPE (vexpr) = TREE_TYPE (value);
3444 if (DECL_P (value))
3445 mode = DECL_MODE (value);
3446 else
3447 mode = TYPE_MODE (TREE_TYPE (value));
3448 DECL_MODE (vexpr) = mode;
3450 val = gen_rtx_VAR_LOCATION
3451 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3453 val = emit_debug_insn (val);
3455 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3457 if (!gimple_debug_bind_p (debugstmt))
3458 continue;
3460 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3461 SET_USE (use_p, vexpr);
3463 update_stmt (debugstmt);
3467 set_curr_insn_source_location (sloc);
3468 set_curr_insn_block (sblock);
3471 currently_expanding_gimple_stmt = stmt;
3473 /* Expand this statement, then evaluate the resulting RTL and
3474 fixup the CFG accordingly. */
3475 if (gimple_code (stmt) == GIMPLE_COND)
3477 new_bb = expand_gimple_cond (bb, stmt);
3478 if (new_bb)
3479 return new_bb;
3481 else if (gimple_debug_bind_p (stmt))
3483 location_t sloc = get_curr_insn_source_location ();
3484 tree sblock = get_curr_insn_block ();
3485 gimple_stmt_iterator nsi = gsi;
3487 for (;;)
3489 tree var = gimple_debug_bind_get_var (stmt);
3490 tree value;
3491 rtx val;
3492 enum machine_mode mode;
3494 if (gimple_debug_bind_has_value_p (stmt))
3495 value = gimple_debug_bind_get_value (stmt);
3496 else
3497 value = NULL_TREE;
3499 last = get_last_insn ();
3501 set_curr_insn_source_location (gimple_location (stmt));
3502 set_curr_insn_block (gimple_block (stmt));
3504 if (DECL_P (var))
3505 mode = DECL_MODE (var);
3506 else
3507 mode = TYPE_MODE (TREE_TYPE (var));
3509 val = gen_rtx_VAR_LOCATION
3510 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3512 val = emit_debug_insn (val);
3514 if (dump_file && (dump_flags & TDF_DETAILS))
3516 /* We can't dump the insn with a TREE where an RTX
3517 is expected. */
3518 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3519 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3520 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3523 /* In order not to generate too many debug temporaries,
3524 we delink all uses of debug statements we already expanded.
3525 Therefore debug statements between definition and real
3526 use of TERed SSA names will continue to use the SSA name,
3527 and not be replaced with debug temps. */
3528 delink_stmt_imm_use (stmt);
3530 gsi = nsi;
3531 gsi_next (&nsi);
3532 if (gsi_end_p (nsi))
3533 break;
3534 stmt = gsi_stmt (nsi);
3535 if (!gimple_debug_bind_p (stmt))
3536 break;
3539 set_curr_insn_source_location (sloc);
3540 set_curr_insn_block (sblock);
3542 else
3544 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3546 bool can_fallthru;
3547 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3548 if (new_bb)
3550 if (can_fallthru)
3551 bb = new_bb;
3552 else
3553 return new_bb;
3556 else
3558 def_operand_p def_p;
3559 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3561 if (def_p != NULL)
3563 /* Ignore this stmt if it is in the list of
3564 replaceable expressions. */
3565 if (SA.values
3566 && bitmap_bit_p (SA.values,
3567 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3568 continue;
3570 last = expand_gimple_stmt (stmt);
3571 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3576 currently_expanding_gimple_stmt = NULL;
3578 /* Expand implicit goto and convert goto_locus. */
3579 FOR_EACH_EDGE (e, ei, bb->succs)
3581 if (e->goto_locus && e->goto_block)
3583 set_curr_insn_source_location (e->goto_locus);
3584 set_curr_insn_block (e->goto_block);
3585 e->goto_locus = curr_insn_locator ();
3587 e->goto_block = NULL;
3588 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3590 emit_jump (label_rtx_for_bb (e->dest));
3591 e->flags &= ~EDGE_FALLTHRU;
3595 /* Expanded RTL can create a jump in the last instruction of block.
3596 This later might be assumed to be a jump to successor and break edge insertion.
3597 We need to insert dummy move to prevent this. PR41440. */
3598 if (single_succ_p (bb)
3599 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3600 && (last = get_last_insn ())
3601 && JUMP_P (last))
3603 rtx dummy = gen_reg_rtx (SImode);
3604 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3607 do_pending_stack_adjust ();
3609 /* Find the block tail. The last insn in the block is the insn
3610 before a barrier and/or table jump insn. */
3611 last = get_last_insn ();
3612 if (BARRIER_P (last))
3613 last = PREV_INSN (last);
3614 if (JUMP_TABLE_DATA_P (last))
3615 last = PREV_INSN (PREV_INSN (last));
3616 BB_END (bb) = last;
3618 update_bb_for_insn (bb);
3620 return bb;
3624 /* Create a basic block for initialization code. */
3626 static basic_block
3627 construct_init_block (void)
3629 basic_block init_block, first_block;
3630 edge e = NULL;
3631 int flags;
3633 /* Multiple entry points not supported yet. */
3634 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3635 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3636 init_rtl_bb_info (EXIT_BLOCK_PTR);
3637 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3638 EXIT_BLOCK_PTR->flags |= BB_RTL;
3640 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3642 /* When entry edge points to first basic block, we don't need jump,
3643 otherwise we have to jump into proper target. */
3644 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3646 tree label = gimple_block_label (e->dest);
3648 emit_jump (label_rtx (label));
3649 flags = 0;
3651 else
3652 flags = EDGE_FALLTHRU;
3654 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3655 get_last_insn (),
3656 ENTRY_BLOCK_PTR);
3657 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3658 init_block->count = ENTRY_BLOCK_PTR->count;
3659 if (e)
3661 first_block = e->dest;
3662 redirect_edge_succ (e, init_block);
3663 e = make_edge (init_block, first_block, flags);
3665 else
3666 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3667 e->probability = REG_BR_PROB_BASE;
3668 e->count = ENTRY_BLOCK_PTR->count;
3670 update_bb_for_insn (init_block);
3671 return init_block;
3674 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3675 found in the block tree. */
3677 static void
3678 set_block_levels (tree block, int level)
3680 while (block)
3682 BLOCK_NUMBER (block) = level;
3683 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3684 block = BLOCK_CHAIN (block);
3688 /* Create a block containing landing pads and similar stuff. */
3690 static void
3691 construct_exit_block (void)
3693 rtx head = get_last_insn ();
3694 rtx end;
3695 basic_block exit_block;
3696 edge e, e2;
3697 unsigned ix;
3698 edge_iterator ei;
3699 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3701 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3703 /* Make sure the locus is set to the end of the function, so that
3704 epilogue line numbers and warnings are set properly. */
3705 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3706 input_location = cfun->function_end_locus;
3708 /* The following insns belong to the top scope. */
3709 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3711 /* Generate rtl for function exit. */
3712 expand_function_end ();
3714 end = get_last_insn ();
3715 if (head == end)
3716 return;
3717 /* While emitting the function end we could move end of the last basic block.
3719 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3720 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3721 head = NEXT_INSN (head);
3722 exit_block = create_basic_block (NEXT_INSN (head), end,
3723 EXIT_BLOCK_PTR->prev_bb);
3724 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3725 exit_block->count = EXIT_BLOCK_PTR->count;
3727 ix = 0;
3728 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3730 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3731 if (!(e->flags & EDGE_ABNORMAL))
3732 redirect_edge_succ (e, exit_block);
3733 else
3734 ix++;
3737 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3738 e->probability = REG_BR_PROB_BASE;
3739 e->count = EXIT_BLOCK_PTR->count;
3740 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3741 if (e2 != e)
3743 e->count -= e2->count;
3744 exit_block->count -= e2->count;
3745 exit_block->frequency -= EDGE_FREQUENCY (e2);
3747 if (e->count < 0)
3748 e->count = 0;
3749 if (exit_block->count < 0)
3750 exit_block->count = 0;
3751 if (exit_block->frequency < 0)
3752 exit_block->frequency = 0;
3753 update_bb_for_insn (exit_block);
3756 /* Helper function for discover_nonconstant_array_refs.
3757 Look for ARRAY_REF nodes with non-constant indexes and mark them
3758 addressable. */
3760 static tree
3761 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3762 void *data ATTRIBUTE_UNUSED)
3764 tree t = *tp;
3766 if (IS_TYPE_OR_DECL_P (t))
3767 *walk_subtrees = 0;
3768 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3770 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3771 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3772 && (!TREE_OPERAND (t, 2)
3773 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3774 || (TREE_CODE (t) == COMPONENT_REF
3775 && (!TREE_OPERAND (t,2)
3776 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3777 || TREE_CODE (t) == BIT_FIELD_REF
3778 || TREE_CODE (t) == REALPART_EXPR
3779 || TREE_CODE (t) == IMAGPART_EXPR
3780 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3781 || CONVERT_EXPR_P (t))
3782 t = TREE_OPERAND (t, 0);
3784 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3786 t = get_base_address (t);
3787 if (t && DECL_P (t)
3788 && DECL_MODE (t) != BLKmode)
3789 TREE_ADDRESSABLE (t) = 1;
3792 *walk_subtrees = 0;
3795 return NULL_TREE;
3798 /* RTL expansion is not able to compile array references with variable
3799 offsets for arrays stored in single register. Discover such
3800 expressions and mark variables as addressable to avoid this
3801 scenario. */
3803 static void
3804 discover_nonconstant_array_refs (void)
3806 basic_block bb;
3807 gimple_stmt_iterator gsi;
3809 FOR_EACH_BB (bb)
3810 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3812 gimple stmt = gsi_stmt (gsi);
3813 if (!is_gimple_debug (stmt))
3814 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3818 /* This function sets crtl->args.internal_arg_pointer to a virtual
3819 register if DRAP is needed. Local register allocator will replace
3820 virtual_incoming_args_rtx with the virtual register. */
3822 static void
3823 expand_stack_alignment (void)
3825 rtx drap_rtx;
3826 unsigned int preferred_stack_boundary;
3828 if (! SUPPORTS_STACK_ALIGNMENT)
3829 return;
3831 if (cfun->calls_alloca
3832 || cfun->has_nonlocal_label
3833 || crtl->has_nonlocal_goto)
3834 crtl->need_drap = true;
3836 /* Call update_stack_boundary here again to update incoming stack
3837 boundary. It may set incoming stack alignment to a different
3838 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3839 use the minimum incoming stack alignment to check if it is OK
3840 to perform sibcall optimization since sibcall optimization will
3841 only align the outgoing stack to incoming stack boundary. */
3842 if (targetm.calls.update_stack_boundary)
3843 targetm.calls.update_stack_boundary ();
3845 /* The incoming stack frame has to be aligned at least at
3846 parm_stack_boundary. */
3847 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3849 /* Update crtl->stack_alignment_estimated and use it later to align
3850 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3851 exceptions since callgraph doesn't collect incoming stack alignment
3852 in this case. */
3853 if (cfun->can_throw_non_call_exceptions
3854 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3855 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3856 else
3857 preferred_stack_boundary = crtl->preferred_stack_boundary;
3858 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3859 crtl->stack_alignment_estimated = preferred_stack_boundary;
3860 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3861 crtl->stack_alignment_needed = preferred_stack_boundary;
3863 gcc_assert (crtl->stack_alignment_needed
3864 <= crtl->stack_alignment_estimated);
3866 crtl->stack_realign_needed
3867 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3868 crtl->stack_realign_tried = crtl->stack_realign_needed;
3870 crtl->stack_realign_processed = true;
3872 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3873 alignment. */
3874 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3875 drap_rtx = targetm.calls.get_drap_rtx ();
3877 /* stack_realign_drap and drap_rtx must match. */
3878 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3880 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3881 if (NULL != drap_rtx)
3883 crtl->args.internal_arg_pointer = drap_rtx;
3885 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3886 needed. */
3887 fixup_tail_calls ();
3891 /* Translate the intermediate representation contained in the CFG
3892 from GIMPLE trees to RTL.
3894 We do conversion per basic block and preserve/update the tree CFG.
3895 This implies we have to do some magic as the CFG can simultaneously
3896 consist of basic blocks containing RTL and GIMPLE trees. This can
3897 confuse the CFG hooks, so be careful to not manipulate CFG during
3898 the expansion. */
3900 static unsigned int
3901 gimple_expand_cfg (void)
3903 basic_block bb, init_block;
3904 sbitmap blocks;
3905 edge_iterator ei;
3906 edge e;
3907 rtx var_seq;
3908 unsigned i;
3910 timevar_push (TV_OUT_OF_SSA);
3911 rewrite_out_of_ssa (&SA);
3912 timevar_pop (TV_OUT_OF_SSA);
3913 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3914 sizeof (rtx));
3916 /* Some backends want to know that we are expanding to RTL. */
3917 currently_expanding_to_rtl = 1;
3919 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3921 insn_locators_alloc ();
3922 if (!DECL_IS_BUILTIN (current_function_decl))
3924 /* Eventually, all FEs should explicitly set function_start_locus. */
3925 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3926 set_curr_insn_source_location
3927 (DECL_SOURCE_LOCATION (current_function_decl));
3928 else
3929 set_curr_insn_source_location (cfun->function_start_locus);
3931 else
3932 set_curr_insn_source_location (UNKNOWN_LOCATION);
3933 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3934 prologue_locator = curr_insn_locator ();
3936 #ifdef INSN_SCHEDULING
3937 init_sched_attrs ();
3938 #endif
3940 /* Make sure first insn is a note even if we don't want linenums.
3941 This makes sure the first insn will never be deleted.
3942 Also, final expects a note to appear there. */
3943 emit_note (NOTE_INSN_DELETED);
3945 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3946 discover_nonconstant_array_refs ();
3948 targetm.expand_to_rtl_hook ();
3949 crtl->stack_alignment_needed = STACK_BOUNDARY;
3950 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3951 crtl->stack_alignment_estimated = 0;
3952 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3953 cfun->cfg->max_jumptable_ents = 0;
3955 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
3956 of the function section at exapnsion time to predict distance of calls. */
3957 resolve_unique_section (current_function_decl, 0, flag_function_sections);
3959 /* Expand the variables recorded during gimple lowering. */
3960 timevar_push (TV_VAR_EXPAND);
3961 start_sequence ();
3963 expand_used_vars ();
3965 var_seq = get_insns ();
3966 end_sequence ();
3967 timevar_pop (TV_VAR_EXPAND);
3969 /* Honor stack protection warnings. */
3970 if (warn_stack_protect)
3972 if (cfun->calls_alloca)
3973 warning (OPT_Wstack_protector,
3974 "stack protector not protecting local variables: "
3975 "variable length buffer");
3976 if (has_short_buffer && !crtl->stack_protect_guard)
3977 warning (OPT_Wstack_protector,
3978 "stack protector not protecting function: "
3979 "all local arrays are less than %d bytes long",
3980 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3983 /* Set up parameters and prepare for return, for the function. */
3984 expand_function_start (current_function_decl);
3986 /* If we emitted any instructions for setting up the variables,
3987 emit them before the FUNCTION_START note. */
3988 if (var_seq)
3990 emit_insn_before (var_seq, parm_birth_insn);
3992 /* In expand_function_end we'll insert the alloca save/restore
3993 before parm_birth_insn. We've just insertted an alloca call.
3994 Adjust the pointer to match. */
3995 parm_birth_insn = var_seq;
3998 /* Now that we also have the parameter RTXs, copy them over to our
3999 partitions. */
4000 for (i = 0; i < SA.map->num_partitions; i++)
4002 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4004 if (TREE_CODE (var) != VAR_DECL
4005 && !SA.partition_to_pseudo[i])
4006 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4007 gcc_assert (SA.partition_to_pseudo[i]);
4009 /* If this decl was marked as living in multiple places, reset
4010 this now to NULL. */
4011 if (DECL_RTL_IF_SET (var) == pc_rtx)
4012 SET_DECL_RTL (var, NULL);
4014 /* Some RTL parts really want to look at DECL_RTL(x) when x
4015 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4016 SET_DECL_RTL here making this available, but that would mean
4017 to select one of the potentially many RTLs for one DECL. Instead
4018 of doing that we simply reset the MEM_EXPR of the RTL in question,
4019 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4020 if (!DECL_RTL_SET_P (var))
4022 if (MEM_P (SA.partition_to_pseudo[i]))
4023 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4027 /* If this function is `main', emit a call to `__main'
4028 to run global initializers, etc. */
4029 if (DECL_NAME (current_function_decl)
4030 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4031 && DECL_FILE_SCOPE_P (current_function_decl))
4032 expand_main_function ();
4034 /* Initialize the stack_protect_guard field. This must happen after the
4035 call to __main (if any) so that the external decl is initialized. */
4036 if (crtl->stack_protect_guard)
4037 stack_protect_prologue ();
4039 expand_phi_nodes (&SA);
4041 /* Register rtl specific functions for cfg. */
4042 rtl_register_cfg_hooks ();
4044 init_block = construct_init_block ();
4046 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4047 remaining edges later. */
4048 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4049 e->flags &= ~EDGE_EXECUTABLE;
4051 lab_rtx_for_bb = pointer_map_create ();
4052 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4053 bb = expand_gimple_basic_block (bb);
4055 if (MAY_HAVE_DEBUG_INSNS)
4056 expand_debug_locations ();
4058 execute_free_datastructures ();
4059 timevar_push (TV_OUT_OF_SSA);
4060 finish_out_of_ssa (&SA);
4061 timevar_pop (TV_OUT_OF_SSA);
4063 timevar_push (TV_POST_EXPAND);
4064 /* We are no longer in SSA form. */
4065 cfun->gimple_df->in_ssa_p = false;
4067 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4068 conservatively to true until they are all profile aware. */
4069 pointer_map_destroy (lab_rtx_for_bb);
4070 free_histograms ();
4072 construct_exit_block ();
4073 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4074 insn_locators_finalize ();
4076 /* Zap the tree EH table. */
4077 set_eh_throw_stmt_table (cfun, NULL);
4079 rebuild_jump_labels (get_insns ());
4081 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4083 edge e;
4084 edge_iterator ei;
4085 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4087 if (e->insns.r)
4088 commit_one_edge_insertion (e);
4089 else
4090 ei_next (&ei);
4094 /* We're done expanding trees to RTL. */
4095 currently_expanding_to_rtl = 0;
4097 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4099 edge e;
4100 edge_iterator ei;
4101 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4103 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4104 e->flags &= ~EDGE_EXECUTABLE;
4106 /* At the moment not all abnormal edges match the RTL
4107 representation. It is safe to remove them here as
4108 find_many_sub_basic_blocks will rediscover them.
4109 In the future we should get this fixed properly. */
4110 if ((e->flags & EDGE_ABNORMAL)
4111 && !(e->flags & EDGE_SIBCALL))
4112 remove_edge (e);
4113 else
4114 ei_next (&ei);
4118 blocks = sbitmap_alloc (last_basic_block);
4119 sbitmap_ones (blocks);
4120 find_many_sub_basic_blocks (blocks);
4121 sbitmap_free (blocks);
4122 purge_all_dead_edges ();
4124 compact_blocks ();
4126 expand_stack_alignment ();
4128 #ifdef ENABLE_CHECKING
4129 verify_flow_info ();
4130 #endif
4132 /* There's no need to defer outputting this function any more; we
4133 know we want to output it. */
4134 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4136 /* Now that we're done expanding trees to RTL, we shouldn't have any
4137 more CONCATs anywhere. */
4138 generating_concat_p = 0;
4140 if (dump_file)
4142 fprintf (dump_file,
4143 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4144 /* And the pass manager will dump RTL for us. */
4147 /* If we're emitting a nested function, make sure its parent gets
4148 emitted as well. Doing otherwise confuses debug info. */
4150 tree parent;
4151 for (parent = DECL_CONTEXT (current_function_decl);
4152 parent != NULL_TREE;
4153 parent = get_containing_scope (parent))
4154 if (TREE_CODE (parent) == FUNCTION_DECL)
4155 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4158 /* We are now committed to emitting code for this function. Do any
4159 preparation, such as emitting abstract debug info for the inline
4160 before it gets mangled by optimization. */
4161 if (cgraph_function_possibly_inlined_p (current_function_decl))
4162 (*debug_hooks->outlining_inline_function) (current_function_decl);
4164 TREE_ASM_WRITTEN (current_function_decl) = 1;
4166 /* After expanding, the return labels are no longer needed. */
4167 return_label = NULL;
4168 naked_return_label = NULL;
4169 /* Tag the blocks with a depth number so that change_scope can find
4170 the common parent easily. */
4171 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4172 default_rtl_profile ();
4173 timevar_pop (TV_POST_EXPAND);
4174 return 0;
4177 struct rtl_opt_pass pass_expand =
4180 RTL_PASS,
4181 "expand", /* name */
4182 NULL, /* gate */
4183 gimple_expand_cfg, /* execute */
4184 NULL, /* sub */
4185 NULL, /* next */
4186 0, /* static_pass_number */
4187 TV_EXPAND, /* tv_id */
4188 PROP_ssa | PROP_gimple_leh | PROP_cfg
4189 | PROP_gimple_lcx, /* properties_required */
4190 PROP_rtl, /* properties_provided */
4191 PROP_ssa | PROP_trees, /* properties_destroyed */
4192 TODO_verify_ssa | TODO_verify_flow
4193 | TODO_verify_stmts, /* todo_flags_start */
4194 TODO_dump_func
4195 | TODO_ggc_collect /* todo_flags_finish */