* common.opt (optimize_fast): New Variable.
[official-gcc.git] / gcc / cfgexpand.c
blob897d0f9f2b3af7eee91f8861e60ff6d30f194aa5
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54 struct ssaexpand SA;
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
61 statement STMT. */
63 tree
64 gimple_assign_rhs_to_tree (gimple stmt)
66 tree t;
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
94 && EXPR_P (t)
95 && gimple_block (stmt) != TREE_BLOCK (t)))
96 t = copy_node (t);
98 else
99 gcc_unreachable ();
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
106 return t;
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
112 #endif
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
119 static inline void
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
125 if (x && !MEM_P (x))
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
137 change this. */
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
150 else
151 SET_DECL_RTL (t, x);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
156 struct stack_var
158 /* The Variable. */
159 tree decl;
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
211 static unsigned int
212 get_decl_align_unit (tree decl)
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
215 return align / BITS_PER_UNIT;
218 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
219 Return the frame offset. */
221 static HOST_WIDE_INT
222 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
224 HOST_WIDE_INT offset, new_frame_offset;
226 new_frame_offset = frame_offset;
227 if (FRAME_GROWS_DOWNWARD)
229 new_frame_offset -= size + frame_phase;
230 new_frame_offset &= -align;
231 new_frame_offset += frame_phase;
232 offset = new_frame_offset;
234 else
236 new_frame_offset -= frame_phase;
237 new_frame_offset += align - 1;
238 new_frame_offset &= -align;
239 new_frame_offset += frame_phase;
240 offset = new_frame_offset;
241 new_frame_offset += size;
243 frame_offset = new_frame_offset;
245 if (frame_offset_overflow (frame_offset, cfun->decl))
246 frame_offset = offset = 0;
248 return offset;
251 /* Accumulate DECL into STACK_VARS. */
253 static void
254 add_stack_var (tree decl)
256 struct stack_var *v;
258 if (stack_vars_num >= stack_vars_alloc)
260 if (stack_vars_alloc)
261 stack_vars_alloc = stack_vars_alloc * 3 / 2;
262 else
263 stack_vars_alloc = 32;
264 stack_vars
265 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
267 v = &stack_vars[stack_vars_num];
269 v->decl = decl;
270 v->offset = 0;
271 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
272 /* Ensure that all variables have size, so that &a != &b for any two
273 variables that are simultaneously live. */
274 if (v->size == 0)
275 v->size = 1;
276 v->alignb = get_decl_align_unit (SSAVAR (decl));
278 /* All variables are initially in their own partition. */
279 v->representative = stack_vars_num;
280 v->next = EOC;
282 /* All variables initially conflict with no other. */
283 v->conflicts = NULL;
285 /* Ensure that this decl doesn't get put onto the list twice. */
286 set_rtl (decl, pc_rtx);
288 stack_vars_num++;
291 /* Make the decls associated with luid's X and Y conflict. */
293 static void
294 add_stack_var_conflict (size_t x, size_t y)
296 struct stack_var *a = &stack_vars[x];
297 struct stack_var *b = &stack_vars[y];
298 if (!a->conflicts)
299 a->conflicts = BITMAP_ALLOC (NULL);
300 if (!b->conflicts)
301 b->conflicts = BITMAP_ALLOC (NULL);
302 bitmap_set_bit (a->conflicts, y);
303 bitmap_set_bit (b->conflicts, x);
306 /* Check whether the decls associated with luid's X and Y conflict. */
308 static bool
309 stack_var_conflict_p (size_t x, size_t y)
311 struct stack_var *a = &stack_vars[x];
312 struct stack_var *b = &stack_vars[y];
313 if (!a->conflicts || !b->conflicts)
314 return false;
315 return bitmap_bit_p (a->conflicts, y);
318 /* Returns true if TYPE is or contains a union type. */
320 static bool
321 aggregate_contains_union_type (tree type)
323 tree field;
325 if (TREE_CODE (type) == UNION_TYPE
326 || TREE_CODE (type) == QUAL_UNION_TYPE)
327 return true;
328 if (TREE_CODE (type) == ARRAY_TYPE)
329 return aggregate_contains_union_type (TREE_TYPE (type));
330 if (TREE_CODE (type) != RECORD_TYPE)
331 return false;
333 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
334 if (TREE_CODE (field) == FIELD_DECL)
335 if (aggregate_contains_union_type (TREE_TYPE (field)))
336 return true;
338 return false;
341 /* A subroutine of expand_used_vars. If two variables X and Y have alias
342 sets that do not conflict, then do add a conflict for these variables
343 in the interference graph. We also need to make sure to add conflicts
344 for union containing structures. Else RTL alias analysis comes along
345 and due to type based aliasing rules decides that for two overlapping
346 union temporaries { short s; int i; } accesses to the same mem through
347 different types may not alias and happily reorders stores across
348 life-time boundaries of the temporaries (See PR25654).
349 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
351 static void
352 add_alias_set_conflicts (void)
354 size_t i, j, n = stack_vars_num;
356 for (i = 0; i < n; ++i)
358 tree type_i = TREE_TYPE (stack_vars[i].decl);
359 bool aggr_i = AGGREGATE_TYPE_P (type_i);
360 bool contains_union;
362 contains_union = aggregate_contains_union_type (type_i);
363 for (j = 0; j < i; ++j)
365 tree type_j = TREE_TYPE (stack_vars[j].decl);
366 bool aggr_j = AGGREGATE_TYPE_P (type_j);
367 if (aggr_i != aggr_j
368 /* Either the objects conflict by means of type based
369 aliasing rules, or we need to add a conflict. */
370 || !objects_must_conflict_p (type_i, type_j)
371 /* In case the types do not conflict ensure that access
372 to elements will conflict. In case of unions we have
373 to be careful as type based aliasing rules may say
374 access to the same memory does not conflict. So play
375 safe and add a conflict in this case. */
376 || contains_union)
377 add_stack_var_conflict (i, j);
382 /* A subroutine of partition_stack_vars. A comparison function for qsort,
383 sorting an array of indices by the properties of the object. */
385 static int
386 stack_var_cmp (const void *a, const void *b)
388 size_t ia = *(const size_t *)a;
389 size_t ib = *(const size_t *)b;
390 unsigned int aligna = stack_vars[ia].alignb;
391 unsigned int alignb = stack_vars[ib].alignb;
392 HOST_WIDE_INT sizea = stack_vars[ia].size;
393 HOST_WIDE_INT sizeb = stack_vars[ib].size;
394 tree decla = stack_vars[ia].decl;
395 tree declb = stack_vars[ib].decl;
396 bool largea, largeb;
397 unsigned int uida, uidb;
399 /* Primary compare on "large" alignment. Large comes first. */
400 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
401 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 if (largea != largeb)
403 return (int)largeb - (int)largea;
405 /* Secondary compare on size, decreasing */
406 if (sizea < sizeb)
407 return -1;
408 if (sizea > sizeb)
409 return 1;
411 /* Tertiary compare on true alignment, decreasing. */
412 if (aligna < alignb)
413 return -1;
414 if (aligna > alignb)
415 return 1;
417 /* Final compare on ID for sort stability, increasing.
418 Two SSA names are compared by their version, SSA names come before
419 non-SSA names, and two normal decls are compared by their DECL_UID. */
420 if (TREE_CODE (decla) == SSA_NAME)
422 if (TREE_CODE (declb) == SSA_NAME)
423 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
424 else
425 return -1;
427 else if (TREE_CODE (declb) == SSA_NAME)
428 return 1;
429 else
430 uida = DECL_UID (decla), uidb = DECL_UID (declb);
431 if (uida < uidb)
432 return 1;
433 if (uida > uidb)
434 return -1;
435 return 0;
439 /* If the points-to solution *PI points to variables that are in a partition
440 together with other variables add all partition members to the pointed-to
441 variables bitmap. */
443 static void
444 add_partitioned_vars_to_ptset (struct pt_solution *pt,
445 struct pointer_map_t *decls_to_partitions,
446 struct pointer_set_t *visited, bitmap temp)
448 bitmap_iterator bi;
449 unsigned i;
450 bitmap *part;
452 if (pt->anything
453 || pt->vars == NULL
454 /* The pointed-to vars bitmap is shared, it is enough to
455 visit it once. */
456 || pointer_set_insert(visited, pt->vars))
457 return;
459 bitmap_clear (temp);
461 /* By using a temporary bitmap to store all members of the partitions
462 we have to add we make sure to visit each of the partitions only
463 once. */
464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
465 if ((!temp
466 || !bitmap_bit_p (temp, i))
467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
468 (void *)(size_t) i)))
469 bitmap_ior_into (temp, *part);
470 if (!bitmap_empty_p (temp))
471 bitmap_ior_into (pt->vars, temp);
474 /* Update points-to sets based on partition info, so we can use them on RTL.
475 The bitmaps representing stack partitions will be saved until expand,
476 where partitioned decls used as bases in memory expressions will be
477 rewritten. */
479 static void
480 update_alias_info_with_stack_vars (void)
482 struct pointer_map_t *decls_to_partitions = NULL;
483 size_t i, j;
484 tree var = NULL_TREE;
486 for (i = 0; i < stack_vars_num; i++)
488 bitmap part = NULL;
489 tree name;
490 struct ptr_info_def *pi;
492 /* Not interested in partitions with single variable. */
493 if (stack_vars[i].representative != i
494 || stack_vars[i].next == EOC)
495 continue;
497 if (!decls_to_partitions)
499 decls_to_partitions = pointer_map_create ();
500 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
503 /* Create an SSA_NAME that points to the partition for use
504 as base during alias-oracle queries on RTL for bases that
505 have been partitioned. */
506 if (var == NULL_TREE)
507 var = create_tmp_var (ptr_type_node, NULL);
508 name = make_ssa_name (var, NULL);
510 /* Create bitmaps representing partitions. They will be used for
511 points-to sets later, so use GGC alloc. */
512 part = BITMAP_GGC_ALLOC ();
513 for (j = i; j != EOC; j = stack_vars[j].next)
515 tree decl = stack_vars[j].decl;
516 unsigned int uid = DECL_PT_UID (decl);
517 /* We should never end up partitioning SSA names (though they
518 may end up on the stack). Neither should we allocate stack
519 space to something that is unused and thus unreferenced, except
520 for -O0 where we are preserving even unreferenced variables. */
521 gcc_assert (DECL_P (decl)
522 && (!optimize
523 || referenced_var_lookup (cfun, DECL_UID (decl))));
524 bitmap_set_bit (part, uid);
525 *((bitmap *) pointer_map_insert (decls_to_partitions,
526 (void *)(size_t) uid)) = part;
527 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
528 decl)) = name;
531 /* Make the SSA name point to all partition members. */
532 pi = get_ptr_info (name);
533 pt_solution_set (&pi->pt, part, false, false);
536 /* Make all points-to sets that contain one member of a partition
537 contain all members of the partition. */
538 if (decls_to_partitions)
540 unsigned i;
541 struct pointer_set_t *visited = pointer_set_create ();
542 bitmap temp = BITMAP_ALLOC (NULL);
544 for (i = 1; i < num_ssa_names; i++)
546 tree name = ssa_name (i);
547 struct ptr_info_def *pi;
549 if (name
550 && POINTER_TYPE_P (TREE_TYPE (name))
551 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
552 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
553 visited, temp);
556 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
557 decls_to_partitions, visited, temp);
559 pointer_set_destroy (visited);
560 pointer_map_destroy (decls_to_partitions);
561 BITMAP_FREE (temp);
565 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
566 partitioning algorithm. Partitions A and B are known to be non-conflicting.
567 Merge them into a single partition A.
569 At the same time, add OFFSET to all variables in partition B. At the end
570 of the partitioning process we've have a nice block easy to lay out within
571 the stack frame. */
573 static void
574 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
576 size_t i, last;
577 struct stack_var *vb = &stack_vars[b];
578 bitmap_iterator bi;
579 unsigned u;
581 /* Update each element of partition B with the given offset,
582 and merge them into partition A. */
583 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
585 stack_vars[i].offset += offset;
586 stack_vars[i].representative = a;
588 stack_vars[last].next = stack_vars[a].next;
589 stack_vars[a].next = b;
591 /* Update the required alignment of partition A to account for B. */
592 if (stack_vars[a].alignb < stack_vars[b].alignb)
593 stack_vars[a].alignb = stack_vars[b].alignb;
595 /* Update the interference graph and merge the conflicts. */
596 if (vb->conflicts)
598 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
599 add_stack_var_conflict (a, stack_vars[u].representative);
600 BITMAP_FREE (vb->conflicts);
604 /* A subroutine of expand_used_vars. Binpack the variables into
605 partitions constrained by the interference graph. The overall
606 algorithm used is as follows:
608 Sort the objects by size.
609 For each object A {
610 S = size(A)
611 O = 0
612 loop {
613 Look for the largest non-conflicting object B with size <= S.
614 UNION (A, B)
615 offset(B) = O
616 O += size(B)
617 S -= size(B)
622 static void
623 partition_stack_vars (void)
625 size_t si, sj, n = stack_vars_num;
627 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
628 for (si = 0; si < n; ++si)
629 stack_vars_sorted[si] = si;
631 if (n == 1)
632 return;
634 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
636 for (si = 0; si < n; ++si)
638 size_t i = stack_vars_sorted[si];
639 HOST_WIDE_INT isize = stack_vars[i].size;
640 unsigned int ialign = stack_vars[i].alignb;
641 HOST_WIDE_INT offset = 0;
643 for (sj = si; sj-- > 0; )
645 size_t j = stack_vars_sorted[sj];
646 HOST_WIDE_INT jsize = stack_vars[j].size;
647 unsigned int jalign = stack_vars[j].alignb;
649 /* Ignore objects that aren't partition representatives. */
650 if (stack_vars[j].representative != j)
651 continue;
653 /* Ignore objects too large for the remaining space. */
654 if (isize < jsize)
655 continue;
657 /* Ignore conflicting objects. */
658 if (stack_var_conflict_p (i, j))
659 continue;
661 /* Do not mix objects of "small" (supported) alignment
662 and "large" (unsupported) alignment. */
663 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
664 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
665 continue;
667 /* Refine the remaining space check to include alignment. */
668 if (offset & (jalign - 1))
670 HOST_WIDE_INT toff = offset;
671 toff += jalign - 1;
672 toff &= -(HOST_WIDE_INT)jalign;
673 if (isize - (toff - offset) < jsize)
674 continue;
676 isize -= toff - offset;
677 offset = toff;
680 /* UNION the objects, placing J at OFFSET. */
681 union_stack_vars (i, j, offset);
683 isize -= jsize;
684 if (isize == 0)
685 break;
689 update_alias_info_with_stack_vars ();
692 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
694 static void
695 dump_stack_var_partition (void)
697 size_t si, i, j, n = stack_vars_num;
699 for (si = 0; si < n; ++si)
701 i = stack_vars_sorted[si];
703 /* Skip variables that aren't partition representatives, for now. */
704 if (stack_vars[i].representative != i)
705 continue;
707 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
708 " align %u\n", (unsigned long) i, stack_vars[i].size,
709 stack_vars[i].alignb);
711 for (j = i; j != EOC; j = stack_vars[j].next)
713 fputc ('\t', dump_file);
714 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
715 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
716 stack_vars[j].offset);
721 /* Assign rtl to DECL at BASE + OFFSET. */
723 static void
724 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
725 HOST_WIDE_INT offset)
727 unsigned align;
728 rtx x;
730 /* If this fails, we've overflowed the stack frame. Error nicely? */
731 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
733 x = plus_constant (base, offset);
734 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
736 if (TREE_CODE (decl) != SSA_NAME)
738 /* Set alignment we actually gave this decl if it isn't an SSA name.
739 If it is we generate stack slots only accidentally so it isn't as
740 important, we'll simply use the alignment that is already set. */
741 if (base == virtual_stack_vars_rtx)
742 offset -= frame_phase;
743 align = offset & -offset;
744 align *= BITS_PER_UNIT;
745 if (align == 0 || align > base_align)
746 align = base_align;
748 /* One would think that we could assert that we're not decreasing
749 alignment here, but (at least) the i386 port does exactly this
750 via the MINIMUM_ALIGNMENT hook. */
752 DECL_ALIGN (decl) = align;
753 DECL_USER_ALIGN (decl) = 0;
756 set_mem_attributes (x, SSAVAR (decl), true);
757 set_rtl (decl, x);
760 /* A subroutine of expand_used_vars. Give each partition representative
761 a unique location within the stack frame. Update each partition member
762 with that location. */
764 static void
765 expand_stack_vars (bool (*pred) (tree))
767 size_t si, i, j, n = stack_vars_num;
768 HOST_WIDE_INT large_size = 0, large_alloc = 0;
769 rtx large_base = NULL;
770 unsigned large_align = 0;
771 tree decl;
773 /* Determine if there are any variables requiring "large" alignment.
774 Since these are dynamically allocated, we only process these if
775 no predicate involved. */
776 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
777 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
779 /* Find the total size of these variables. */
780 for (si = 0; si < n; ++si)
782 unsigned alignb;
784 i = stack_vars_sorted[si];
785 alignb = stack_vars[i].alignb;
787 /* Stop when we get to the first decl with "small" alignment. */
788 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
789 break;
791 /* Skip variables that aren't partition representatives. */
792 if (stack_vars[i].representative != i)
793 continue;
795 /* Skip variables that have already had rtl assigned. See also
796 add_stack_var where we perpetrate this pc_rtx hack. */
797 decl = stack_vars[i].decl;
798 if ((TREE_CODE (decl) == SSA_NAME
799 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
800 : DECL_RTL (decl)) != pc_rtx)
801 continue;
803 large_size += alignb - 1;
804 large_size &= -(HOST_WIDE_INT)alignb;
805 large_size += stack_vars[i].size;
808 /* If there were any, allocate space. */
809 if (large_size > 0)
810 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
811 large_align, true);
814 for (si = 0; si < n; ++si)
816 rtx base;
817 unsigned base_align, alignb;
818 HOST_WIDE_INT offset;
820 i = stack_vars_sorted[si];
822 /* Skip variables that aren't partition representatives, for now. */
823 if (stack_vars[i].representative != i)
824 continue;
826 /* Skip variables that have already had rtl assigned. See also
827 add_stack_var where we perpetrate this pc_rtx hack. */
828 decl = stack_vars[i].decl;
829 if ((TREE_CODE (decl) == SSA_NAME
830 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
831 : DECL_RTL (decl)) != pc_rtx)
832 continue;
834 /* Check the predicate to see whether this variable should be
835 allocated in this pass. */
836 if (pred && !pred (decl))
837 continue;
839 alignb = stack_vars[i].alignb;
840 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
842 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
843 base = virtual_stack_vars_rtx;
844 base_align = crtl->max_used_stack_slot_alignment;
846 else
848 /* Large alignment is only processed in the last pass. */
849 if (pred)
850 continue;
851 gcc_assert (large_base != NULL);
853 large_alloc += alignb - 1;
854 large_alloc &= -(HOST_WIDE_INT)alignb;
855 offset = large_alloc;
856 large_alloc += stack_vars[i].size;
858 base = large_base;
859 base_align = large_align;
862 /* Create rtl for each variable based on their location within the
863 partition. */
864 for (j = i; j != EOC; j = stack_vars[j].next)
866 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
867 expand_one_stack_var_at (stack_vars[j].decl,
868 base, base_align,
869 stack_vars[j].offset + offset);
873 gcc_assert (large_alloc == large_size);
876 /* Take into account all sizes of partitions and reset DECL_RTLs. */
877 static HOST_WIDE_INT
878 account_stack_vars (void)
880 size_t si, j, i, n = stack_vars_num;
881 HOST_WIDE_INT size = 0;
883 for (si = 0; si < n; ++si)
885 i = stack_vars_sorted[si];
887 /* Skip variables that aren't partition representatives, for now. */
888 if (stack_vars[i].representative != i)
889 continue;
891 size += stack_vars[i].size;
892 for (j = i; j != EOC; j = stack_vars[j].next)
893 set_rtl (stack_vars[j].decl, NULL);
895 return size;
898 /* A subroutine of expand_one_var. Called to immediately assign rtl
899 to a variable to be allocated in the stack frame. */
901 static void
902 expand_one_stack_var (tree var)
904 HOST_WIDE_INT size, offset;
905 unsigned byte_align;
907 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
908 byte_align = get_decl_align_unit (SSAVAR (var));
910 /* We handle highly aligned variables in expand_stack_vars. */
911 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
913 offset = alloc_stack_frame_space (size, byte_align);
915 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
916 crtl->max_used_stack_slot_alignment, offset);
919 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
920 that will reside in a hard register. */
922 static void
923 expand_one_hard_reg_var (tree var)
925 rest_of_decl_compilation (var, 0, 0);
928 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
929 that will reside in a pseudo register. */
931 static void
932 expand_one_register_var (tree var)
934 tree decl = SSAVAR (var);
935 tree type = TREE_TYPE (decl);
936 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
937 rtx x = gen_reg_rtx (reg_mode);
939 set_rtl (var, x);
941 /* Note if the object is a user variable. */
942 if (!DECL_ARTIFICIAL (decl))
943 mark_user_reg (x);
945 if (POINTER_TYPE_P (type))
946 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
949 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
950 has some associated error, e.g. its type is error-mark. We just need
951 to pick something that won't crash the rest of the compiler. */
953 static void
954 expand_one_error_var (tree var)
956 enum machine_mode mode = DECL_MODE (var);
957 rtx x;
959 if (mode == BLKmode)
960 x = gen_rtx_MEM (BLKmode, const0_rtx);
961 else if (mode == VOIDmode)
962 x = const0_rtx;
963 else
964 x = gen_reg_rtx (mode);
966 SET_DECL_RTL (var, x);
969 /* A subroutine of expand_one_var. VAR is a variable that will be
970 allocated to the local stack frame. Return true if we wish to
971 add VAR to STACK_VARS so that it will be coalesced with other
972 variables. Return false to allocate VAR immediately.
974 This function is used to reduce the number of variables considered
975 for coalescing, which reduces the size of the quadratic problem. */
977 static bool
978 defer_stack_allocation (tree var, bool toplevel)
980 /* If stack protection is enabled, *all* stack variables must be deferred,
981 so that we can re-order the strings to the top of the frame. */
982 if (flag_stack_protect)
983 return true;
985 /* We handle "large" alignment via dynamic allocation. We want to handle
986 this extra complication in only one place, so defer them. */
987 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
988 return true;
990 /* Variables in the outermost scope automatically conflict with
991 every other variable. The only reason to want to defer them
992 at all is that, after sorting, we can more efficiently pack
993 small variables in the stack frame. Continue to defer at -O2. */
994 if (toplevel && optimize < 2)
995 return false;
997 /* Without optimization, *most* variables are allocated from the
998 stack, which makes the quadratic problem large exactly when we
999 want compilation to proceed as quickly as possible. On the
1000 other hand, we don't want the function's stack frame size to
1001 get completely out of hand. So we avoid adding scalars and
1002 "small" aggregates to the list at all. */
1003 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1004 return false;
1006 return true;
1009 /* A subroutine of expand_used_vars. Expand one variable according to
1010 its flavor. Variables to be placed on the stack are not actually
1011 expanded yet, merely recorded.
1012 When REALLY_EXPAND is false, only add stack values to be allocated.
1013 Return stack usage this variable is supposed to take.
1016 static HOST_WIDE_INT
1017 expand_one_var (tree var, bool toplevel, bool really_expand)
1019 unsigned int align = BITS_PER_UNIT;
1020 tree origvar = var;
1022 var = SSAVAR (var);
1024 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1026 /* Because we don't know if VAR will be in register or on stack,
1027 we conservatively assume it will be on stack even if VAR is
1028 eventually put into register after RA pass. For non-automatic
1029 variables, which won't be on stack, we collect alignment of
1030 type and ignore user specified alignment. */
1031 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1032 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1033 TYPE_MODE (TREE_TYPE (var)),
1034 TYPE_ALIGN (TREE_TYPE (var)));
1035 else if (DECL_HAS_VALUE_EXPR_P (var)
1036 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1037 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1038 or variables which were assigned a stack slot already by
1039 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1040 changed from the offset chosen to it. */
1041 align = crtl->stack_alignment_estimated;
1042 else
1043 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1045 /* If the variable alignment is very large we'll dynamicaly allocate
1046 it, which means that in-frame portion is just a pointer. */
1047 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1048 align = POINTER_SIZE;
1051 if (SUPPORTS_STACK_ALIGNMENT
1052 && crtl->stack_alignment_estimated < align)
1054 /* stack_alignment_estimated shouldn't change after stack
1055 realign decision made */
1056 gcc_assert(!crtl->stack_realign_processed);
1057 crtl->stack_alignment_estimated = align;
1060 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1061 So here we only make sure stack_alignment_needed >= align. */
1062 if (crtl->stack_alignment_needed < align)
1063 crtl->stack_alignment_needed = align;
1064 if (crtl->max_used_stack_slot_alignment < align)
1065 crtl->max_used_stack_slot_alignment = align;
1067 if (TREE_CODE (origvar) == SSA_NAME)
1069 gcc_assert (TREE_CODE (var) != VAR_DECL
1070 || (!DECL_EXTERNAL (var)
1071 && !DECL_HAS_VALUE_EXPR_P (var)
1072 && !TREE_STATIC (var)
1073 && TREE_TYPE (var) != error_mark_node
1074 && !DECL_HARD_REGISTER (var)
1075 && really_expand));
1077 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1079 else if (DECL_EXTERNAL (var))
1081 else if (DECL_HAS_VALUE_EXPR_P (var))
1083 else if (TREE_STATIC (var))
1085 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1087 else if (TREE_TYPE (var) == error_mark_node)
1089 if (really_expand)
1090 expand_one_error_var (var);
1092 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1094 if (really_expand)
1095 expand_one_hard_reg_var (var);
1097 else if (use_register_for_decl (var))
1099 if (really_expand)
1100 expand_one_register_var (origvar);
1102 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1104 if (really_expand)
1106 error ("size of variable %q+D is too large", var);
1107 expand_one_error_var (var);
1110 else if (defer_stack_allocation (var, toplevel))
1111 add_stack_var (origvar);
1112 else
1114 if (really_expand)
1115 expand_one_stack_var (origvar);
1116 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1118 return 0;
1121 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1122 expanding variables. Those variables that can be put into registers
1123 are allocated pseudos; those that can't are put on the stack.
1125 TOPLEVEL is true if this is the outermost BLOCK. */
1127 static void
1128 expand_used_vars_for_block (tree block, bool toplevel)
1130 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1131 tree t;
1133 old_sv_num = toplevel ? 0 : stack_vars_num;
1135 /* Expand all variables at this level. */
1136 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1137 if (TREE_USED (t))
1138 expand_one_var (t, toplevel, true);
1140 this_sv_num = stack_vars_num;
1142 /* Expand all variables at containing levels. */
1143 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1144 expand_used_vars_for_block (t, false);
1146 /* Since we do not track exact variable lifetimes (which is not even
1147 possible for variables whose address escapes), we mirror the block
1148 tree in the interference graph. Here we cause all variables at this
1149 level, and all sublevels, to conflict. */
1150 if (old_sv_num < this_sv_num)
1152 new_sv_num = stack_vars_num;
1154 for (i = old_sv_num; i < new_sv_num; ++i)
1155 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1156 add_stack_var_conflict (i, j);
1160 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1161 and clear TREE_USED on all local variables. */
1163 static void
1164 clear_tree_used (tree block)
1166 tree t;
1168 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1169 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1170 TREE_USED (t) = 0;
1172 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1173 clear_tree_used (t);
1176 /* Examine TYPE and determine a bit mask of the following features. */
1178 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1179 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1180 #define SPCT_HAS_ARRAY 4
1181 #define SPCT_HAS_AGGREGATE 8
1183 static unsigned int
1184 stack_protect_classify_type (tree type)
1186 unsigned int ret = 0;
1187 tree t;
1189 switch (TREE_CODE (type))
1191 case ARRAY_TYPE:
1192 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1193 if (t == char_type_node
1194 || t == signed_char_type_node
1195 || t == unsigned_char_type_node)
1197 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1198 unsigned HOST_WIDE_INT len;
1200 if (!TYPE_SIZE_UNIT (type)
1201 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1202 len = max;
1203 else
1204 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1206 if (len < max)
1207 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1208 else
1209 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1211 else
1212 ret = SPCT_HAS_ARRAY;
1213 break;
1215 case UNION_TYPE:
1216 case QUAL_UNION_TYPE:
1217 case RECORD_TYPE:
1218 ret = SPCT_HAS_AGGREGATE;
1219 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1220 if (TREE_CODE (t) == FIELD_DECL)
1221 ret |= stack_protect_classify_type (TREE_TYPE (t));
1222 break;
1224 default:
1225 break;
1228 return ret;
1231 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1232 part of the local stack frame. Remember if we ever return nonzero for
1233 any variable in this function. The return value is the phase number in
1234 which the variable should be allocated. */
1236 static int
1237 stack_protect_decl_phase (tree decl)
1239 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1240 int ret = 0;
1242 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1243 has_short_buffer = true;
1245 if (flag_stack_protect == 2)
1247 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1248 && !(bits & SPCT_HAS_AGGREGATE))
1249 ret = 1;
1250 else if (bits & SPCT_HAS_ARRAY)
1251 ret = 2;
1253 else
1254 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1256 if (ret)
1257 has_protected_decls = true;
1259 return ret;
1262 /* Two helper routines that check for phase 1 and phase 2. These are used
1263 as callbacks for expand_stack_vars. */
1265 static bool
1266 stack_protect_decl_phase_1 (tree decl)
1268 return stack_protect_decl_phase (decl) == 1;
1271 static bool
1272 stack_protect_decl_phase_2 (tree decl)
1274 return stack_protect_decl_phase (decl) == 2;
1277 /* Ensure that variables in different stack protection phases conflict
1278 so that they are not merged and share the same stack slot. */
1280 static void
1281 add_stack_protection_conflicts (void)
1283 size_t i, j, n = stack_vars_num;
1284 unsigned char *phase;
1286 phase = XNEWVEC (unsigned char, n);
1287 for (i = 0; i < n; ++i)
1288 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1290 for (i = 0; i < n; ++i)
1292 unsigned char ph_i = phase[i];
1293 for (j = 0; j < i; ++j)
1294 if (ph_i != phase[j])
1295 add_stack_var_conflict (i, j);
1298 XDELETEVEC (phase);
1301 /* Create a decl for the guard at the top of the stack frame. */
1303 static void
1304 create_stack_guard (void)
1306 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1307 VAR_DECL, NULL, ptr_type_node);
1308 TREE_THIS_VOLATILE (guard) = 1;
1309 TREE_USED (guard) = 1;
1310 expand_one_stack_var (guard);
1311 crtl->stack_protect_guard = guard;
1314 /* Prepare for expanding variables. */
1315 static void
1316 init_vars_expansion (void)
1318 tree t;
1319 unsigned ix;
1320 /* Set TREE_USED on all variables in the local_decls. */
1321 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1322 TREE_USED (t) = 1;
1324 /* Clear TREE_USED on all variables associated with a block scope. */
1325 clear_tree_used (DECL_INITIAL (current_function_decl));
1327 /* Initialize local stack smashing state. */
1328 has_protected_decls = false;
1329 has_short_buffer = false;
1332 /* Free up stack variable graph data. */
1333 static void
1334 fini_vars_expansion (void)
1336 size_t i, n = stack_vars_num;
1337 for (i = 0; i < n; i++)
1338 BITMAP_FREE (stack_vars[i].conflicts);
1339 XDELETEVEC (stack_vars);
1340 XDELETEVEC (stack_vars_sorted);
1341 stack_vars = NULL;
1342 stack_vars_alloc = stack_vars_num = 0;
1345 /* Make a fair guess for the size of the stack frame of the function
1346 in NODE. This doesn't have to be exact, the result is only used in
1347 the inline heuristics. So we don't want to run the full stack var
1348 packing algorithm (which is quadratic in the number of stack vars).
1349 Instead, we calculate the total size of all stack vars. This turns
1350 out to be a pretty fair estimate -- packing of stack vars doesn't
1351 happen very often. */
1353 HOST_WIDE_INT
1354 estimated_stack_frame_size (struct cgraph_node *node)
1356 HOST_WIDE_INT size = 0;
1357 size_t i;
1358 tree var;
1359 tree old_cur_fun_decl = current_function_decl;
1360 referenced_var_iterator rvi;
1361 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1363 current_function_decl = node->decl;
1364 push_cfun (fn);
1366 gcc_checking_assert (gimple_referenced_vars (fn));
1367 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1368 size += expand_one_var (var, true, false);
1370 if (stack_vars_num > 0)
1372 /* Fake sorting the stack vars for account_stack_vars (). */
1373 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1374 for (i = 0; i < stack_vars_num; ++i)
1375 stack_vars_sorted[i] = i;
1376 size += account_stack_vars ();
1377 fini_vars_expansion ();
1379 pop_cfun ();
1380 current_function_decl = old_cur_fun_decl;
1381 return size;
1384 /* Expand all variables used in the function. */
1386 static void
1387 expand_used_vars (void)
1389 tree var, outer_block = DECL_INITIAL (current_function_decl);
1390 VEC(tree,heap) *maybe_local_decls = NULL;
1391 unsigned i;
1392 unsigned len;
1394 /* Compute the phase of the stack frame for this function. */
1396 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1397 int off = STARTING_FRAME_OFFSET % align;
1398 frame_phase = off ? align - off : 0;
1401 init_vars_expansion ();
1403 for (i = 0; i < SA.map->num_partitions; i++)
1405 tree var = partition_to_var (SA.map, i);
1407 gcc_assert (is_gimple_reg (var));
1408 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1409 expand_one_var (var, true, true);
1410 else
1412 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1413 contain the default def (representing the parm or result itself)
1414 we don't do anything here. But those which don't contain the
1415 default def (representing a temporary based on the parm/result)
1416 we need to allocate space just like for normal VAR_DECLs. */
1417 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1419 expand_one_var (var, true, true);
1420 gcc_assert (SA.partition_to_pseudo[i]);
1425 /* At this point all variables on the local_decls with TREE_USED
1426 set are not associated with any block scope. Lay them out. */
1428 len = VEC_length (tree, cfun->local_decls);
1429 FOR_EACH_LOCAL_DECL (cfun, i, var)
1431 bool expand_now = false;
1433 /* Expanded above already. */
1434 if (is_gimple_reg (var))
1436 TREE_USED (var) = 0;
1437 goto next;
1439 /* We didn't set a block for static or extern because it's hard
1440 to tell the difference between a global variable (re)declared
1441 in a local scope, and one that's really declared there to
1442 begin with. And it doesn't really matter much, since we're
1443 not giving them stack space. Expand them now. */
1444 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1445 expand_now = true;
1447 /* If the variable is not associated with any block, then it
1448 was created by the optimizers, and could be live anywhere
1449 in the function. */
1450 else if (TREE_USED (var))
1451 expand_now = true;
1453 /* Finally, mark all variables on the list as used. We'll use
1454 this in a moment when we expand those associated with scopes. */
1455 TREE_USED (var) = 1;
1457 if (expand_now)
1458 expand_one_var (var, true, true);
1460 next:
1461 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1463 rtx rtl = DECL_RTL_IF_SET (var);
1465 /* Keep artificial non-ignored vars in cfun->local_decls
1466 chain until instantiate_decls. */
1467 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1468 add_local_decl (cfun, var);
1469 else if (rtl == NULL_RTX)
1470 /* If rtl isn't set yet, which can happen e.g. with
1471 -fstack-protector, retry before returning from this
1472 function. */
1473 VEC_safe_push (tree, heap, maybe_local_decls, var);
1477 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1479 +-----------------+-----------------+
1480 | ...processed... | ...duplicates...|
1481 +-----------------+-----------------+
1483 +-- LEN points here.
1485 We just want the duplicates, as those are the artificial
1486 non-ignored vars that we want to keep until instantiate_decls.
1487 Move them down and truncate the array. */
1488 if (!VEC_empty (tree, cfun->local_decls))
1489 VEC_block_remove (tree, cfun->local_decls, 0, len);
1491 /* At this point, all variables within the block tree with TREE_USED
1492 set are actually used by the optimized function. Lay them out. */
1493 expand_used_vars_for_block (outer_block, true);
1495 if (stack_vars_num > 0)
1497 /* Due to the way alias sets work, no variables with non-conflicting
1498 alias sets may be assigned the same address. Add conflicts to
1499 reflect this. */
1500 add_alias_set_conflicts ();
1502 /* If stack protection is enabled, we don't share space between
1503 vulnerable data and non-vulnerable data. */
1504 if (flag_stack_protect)
1505 add_stack_protection_conflicts ();
1507 /* Now that we have collected all stack variables, and have computed a
1508 minimal interference graph, attempt to save some stack space. */
1509 partition_stack_vars ();
1510 if (dump_file)
1511 dump_stack_var_partition ();
1514 /* There are several conditions under which we should create a
1515 stack guard: protect-all, alloca used, protected decls present. */
1516 if (flag_stack_protect == 2
1517 || (flag_stack_protect
1518 && (cfun->calls_alloca || has_protected_decls)))
1519 create_stack_guard ();
1521 /* Assign rtl to each variable based on these partitions. */
1522 if (stack_vars_num > 0)
1524 /* Reorder decls to be protected by iterating over the variables
1525 array multiple times, and allocating out of each phase in turn. */
1526 /* ??? We could probably integrate this into the qsort we did
1527 earlier, such that we naturally see these variables first,
1528 and thus naturally allocate things in the right order. */
1529 if (has_protected_decls)
1531 /* Phase 1 contains only character arrays. */
1532 expand_stack_vars (stack_protect_decl_phase_1);
1534 /* Phase 2 contains other kinds of arrays. */
1535 if (flag_stack_protect == 2)
1536 expand_stack_vars (stack_protect_decl_phase_2);
1539 expand_stack_vars (NULL);
1541 fini_vars_expansion ();
1544 /* If there were any artificial non-ignored vars without rtl
1545 found earlier, see if deferred stack allocation hasn't assigned
1546 rtl to them. */
1547 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1549 rtx rtl = DECL_RTL_IF_SET (var);
1551 /* Keep artificial non-ignored vars in cfun->local_decls
1552 chain until instantiate_decls. */
1553 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1554 add_local_decl (cfun, var);
1556 VEC_free (tree, heap, maybe_local_decls);
1558 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1559 if (STACK_ALIGNMENT_NEEDED)
1561 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1562 if (!FRAME_GROWS_DOWNWARD)
1563 frame_offset += align - 1;
1564 frame_offset &= -align;
1569 /* If we need to produce a detailed dump, print the tree representation
1570 for STMT to the dump file. SINCE is the last RTX after which the RTL
1571 generated for STMT should have been appended. */
1573 static void
1574 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1576 if (dump_file && (dump_flags & TDF_DETAILS))
1578 fprintf (dump_file, "\n;; ");
1579 print_gimple_stmt (dump_file, stmt, 0,
1580 TDF_SLIM | (dump_flags & TDF_LINENO));
1581 fprintf (dump_file, "\n");
1583 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1587 /* Maps the blocks that do not contain tree labels to rtx labels. */
1589 static struct pointer_map_t *lab_rtx_for_bb;
1591 /* Returns the label_rtx expression for a label starting basic block BB. */
1593 static rtx
1594 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1596 gimple_stmt_iterator gsi;
1597 tree lab;
1598 gimple lab_stmt;
1599 void **elt;
1601 if (bb->flags & BB_RTL)
1602 return block_label (bb);
1604 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1605 if (elt)
1606 return (rtx) *elt;
1608 /* Find the tree label if it is present. */
1610 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1612 lab_stmt = gsi_stmt (gsi);
1613 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1614 break;
1616 lab = gimple_label_label (lab_stmt);
1617 if (DECL_NONLOCAL (lab))
1618 break;
1620 return label_rtx (lab);
1623 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1624 *elt = gen_label_rtx ();
1625 return (rtx) *elt;
1629 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1630 of a basic block where we just expanded the conditional at the end,
1631 possibly clean up the CFG and instruction sequence. LAST is the
1632 last instruction before the just emitted jump sequence. */
1634 static void
1635 maybe_cleanup_end_of_block (edge e, rtx last)
1637 /* Special case: when jumpif decides that the condition is
1638 trivial it emits an unconditional jump (and the necessary
1639 barrier). But we still have two edges, the fallthru one is
1640 wrong. purge_dead_edges would clean this up later. Unfortunately
1641 we have to insert insns (and split edges) before
1642 find_many_sub_basic_blocks and hence before purge_dead_edges.
1643 But splitting edges might create new blocks which depend on the
1644 fact that if there are two edges there's no barrier. So the
1645 barrier would get lost and verify_flow_info would ICE. Instead
1646 of auditing all edge splitters to care for the barrier (which
1647 normally isn't there in a cleaned CFG), fix it here. */
1648 if (BARRIER_P (get_last_insn ()))
1650 rtx insn;
1651 remove_edge (e);
1652 /* Now, we have a single successor block, if we have insns to
1653 insert on the remaining edge we potentially will insert
1654 it at the end of this block (if the dest block isn't feasible)
1655 in order to avoid splitting the edge. This insertion will take
1656 place in front of the last jump. But we might have emitted
1657 multiple jumps (conditional and one unconditional) to the
1658 same destination. Inserting in front of the last one then
1659 is a problem. See PR 40021. We fix this by deleting all
1660 jumps except the last unconditional one. */
1661 insn = PREV_INSN (get_last_insn ());
1662 /* Make sure we have an unconditional jump. Otherwise we're
1663 confused. */
1664 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1665 for (insn = PREV_INSN (insn); insn != last;)
1667 insn = PREV_INSN (insn);
1668 if (JUMP_P (NEXT_INSN (insn)))
1670 if (!any_condjump_p (NEXT_INSN (insn)))
1672 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1673 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1675 delete_insn (NEXT_INSN (insn));
1681 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1682 Returns a new basic block if we've terminated the current basic
1683 block and created a new one. */
1685 static basic_block
1686 expand_gimple_cond (basic_block bb, gimple stmt)
1688 basic_block new_bb, dest;
1689 edge new_edge;
1690 edge true_edge;
1691 edge false_edge;
1692 rtx last2, last;
1693 enum tree_code code;
1694 tree op0, op1;
1696 code = gimple_cond_code (stmt);
1697 op0 = gimple_cond_lhs (stmt);
1698 op1 = gimple_cond_rhs (stmt);
1699 /* We're sometimes presented with such code:
1700 D.123_1 = x < y;
1701 if (D.123_1 != 0)
1703 This would expand to two comparisons which then later might
1704 be cleaned up by combine. But some pattern matchers like if-conversion
1705 work better when there's only one compare, so make up for this
1706 here as special exception if TER would have made the same change. */
1707 if (gimple_cond_single_var_p (stmt)
1708 && SA.values
1709 && TREE_CODE (op0) == SSA_NAME
1710 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1712 gimple second = SSA_NAME_DEF_STMT (op0);
1713 if (gimple_code (second) == GIMPLE_ASSIGN)
1715 enum tree_code code2 = gimple_assign_rhs_code (second);
1716 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1718 code = code2;
1719 op0 = gimple_assign_rhs1 (second);
1720 op1 = gimple_assign_rhs2 (second);
1722 /* If jumps are cheap turn some more codes into
1723 jumpy sequences. */
1724 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1726 if ((code2 == BIT_AND_EXPR
1727 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1728 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1729 || code2 == TRUTH_AND_EXPR)
1731 code = TRUTH_ANDIF_EXPR;
1732 op0 = gimple_assign_rhs1 (second);
1733 op1 = gimple_assign_rhs2 (second);
1735 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1737 code = TRUTH_ORIF_EXPR;
1738 op0 = gimple_assign_rhs1 (second);
1739 op1 = gimple_assign_rhs2 (second);
1745 last2 = last = get_last_insn ();
1747 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1748 if (gimple_has_location (stmt))
1750 set_curr_insn_source_location (gimple_location (stmt));
1751 set_curr_insn_block (gimple_block (stmt));
1754 /* These flags have no purpose in RTL land. */
1755 true_edge->flags &= ~EDGE_TRUE_VALUE;
1756 false_edge->flags &= ~EDGE_FALSE_VALUE;
1758 /* We can either have a pure conditional jump with one fallthru edge or
1759 two-way jump that needs to be decomposed into two basic blocks. */
1760 if (false_edge->dest == bb->next_bb)
1762 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1763 true_edge->probability);
1764 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1765 if (true_edge->goto_locus)
1767 set_curr_insn_source_location (true_edge->goto_locus);
1768 set_curr_insn_block (true_edge->goto_block);
1769 true_edge->goto_locus = curr_insn_locator ();
1771 true_edge->goto_block = NULL;
1772 false_edge->flags |= EDGE_FALLTHRU;
1773 maybe_cleanup_end_of_block (false_edge, last);
1774 return NULL;
1776 if (true_edge->dest == bb->next_bb)
1778 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1779 false_edge->probability);
1780 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1781 if (false_edge->goto_locus)
1783 set_curr_insn_source_location (false_edge->goto_locus);
1784 set_curr_insn_block (false_edge->goto_block);
1785 false_edge->goto_locus = curr_insn_locator ();
1787 false_edge->goto_block = NULL;
1788 true_edge->flags |= EDGE_FALLTHRU;
1789 maybe_cleanup_end_of_block (true_edge, last);
1790 return NULL;
1793 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1794 true_edge->probability);
1795 last = get_last_insn ();
1796 if (false_edge->goto_locus)
1798 set_curr_insn_source_location (false_edge->goto_locus);
1799 set_curr_insn_block (false_edge->goto_block);
1800 false_edge->goto_locus = curr_insn_locator ();
1802 false_edge->goto_block = NULL;
1803 emit_jump (label_rtx_for_bb (false_edge->dest));
1805 BB_END (bb) = last;
1806 if (BARRIER_P (BB_END (bb)))
1807 BB_END (bb) = PREV_INSN (BB_END (bb));
1808 update_bb_for_insn (bb);
1810 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1811 dest = false_edge->dest;
1812 redirect_edge_succ (false_edge, new_bb);
1813 false_edge->flags |= EDGE_FALLTHRU;
1814 new_bb->count = false_edge->count;
1815 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1816 new_edge = make_edge (new_bb, dest, 0);
1817 new_edge->probability = REG_BR_PROB_BASE;
1818 new_edge->count = new_bb->count;
1819 if (BARRIER_P (BB_END (new_bb)))
1820 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1821 update_bb_for_insn (new_bb);
1823 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1825 if (true_edge->goto_locus)
1827 set_curr_insn_source_location (true_edge->goto_locus);
1828 set_curr_insn_block (true_edge->goto_block);
1829 true_edge->goto_locus = curr_insn_locator ();
1831 true_edge->goto_block = NULL;
1833 return new_bb;
1836 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1837 statement STMT. */
1839 static void
1840 expand_call_stmt (gimple stmt)
1842 tree exp;
1843 tree lhs = gimple_call_lhs (stmt);
1844 size_t i;
1845 bool builtin_p;
1846 tree decl;
1848 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1850 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1851 decl = gimple_call_fndecl (stmt);
1852 builtin_p = decl && DECL_BUILT_IN (decl);
1854 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1855 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1857 for (i = 0; i < gimple_call_num_args (stmt); i++)
1859 tree arg = gimple_call_arg (stmt, i);
1860 gimple def;
1861 /* TER addresses into arguments of builtin functions so we have a
1862 chance to infer more correct alignment information. See PR39954. */
1863 if (builtin_p
1864 && TREE_CODE (arg) == SSA_NAME
1865 && (def = get_gimple_for_ssa_name (arg))
1866 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1867 arg = gimple_assign_rhs1 (def);
1868 CALL_EXPR_ARG (exp, i) = arg;
1871 if (gimple_has_side_effects (stmt))
1872 TREE_SIDE_EFFECTS (exp) = 1;
1874 if (gimple_call_nothrow_p (stmt))
1875 TREE_NOTHROW (exp) = 1;
1877 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1878 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1879 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1880 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1881 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1882 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1883 TREE_BLOCK (exp) = gimple_block (stmt);
1885 if (lhs)
1886 expand_assignment (lhs, exp, false);
1887 else
1888 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1891 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1892 STMT that doesn't require special handling for outgoing edges. That
1893 is no tailcalls and no GIMPLE_COND. */
1895 static void
1896 expand_gimple_stmt_1 (gimple stmt)
1898 tree op0;
1899 switch (gimple_code (stmt))
1901 case GIMPLE_GOTO:
1902 op0 = gimple_goto_dest (stmt);
1903 if (TREE_CODE (op0) == LABEL_DECL)
1904 expand_goto (op0);
1905 else
1906 expand_computed_goto (op0);
1907 break;
1908 case GIMPLE_LABEL:
1909 expand_label (gimple_label_label (stmt));
1910 break;
1911 case GIMPLE_NOP:
1912 case GIMPLE_PREDICT:
1913 break;
1914 case GIMPLE_SWITCH:
1915 expand_case (stmt);
1916 break;
1917 case GIMPLE_ASM:
1918 expand_asm_stmt (stmt);
1919 break;
1920 case GIMPLE_CALL:
1921 expand_call_stmt (stmt);
1922 break;
1924 case GIMPLE_RETURN:
1925 op0 = gimple_return_retval (stmt);
1927 if (op0 && op0 != error_mark_node)
1929 tree result = DECL_RESULT (current_function_decl);
1931 /* If we are not returning the current function's RESULT_DECL,
1932 build an assignment to it. */
1933 if (op0 != result)
1935 /* I believe that a function's RESULT_DECL is unique. */
1936 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1938 /* ??? We'd like to use simply expand_assignment here,
1939 but this fails if the value is of BLKmode but the return
1940 decl is a register. expand_return has special handling
1941 for this combination, which eventually should move
1942 to common code. See comments there. Until then, let's
1943 build a modify expression :-/ */
1944 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1945 result, op0);
1948 if (!op0)
1949 expand_null_return ();
1950 else
1951 expand_return (op0);
1952 break;
1954 case GIMPLE_ASSIGN:
1956 tree lhs = gimple_assign_lhs (stmt);
1958 /* Tree expand used to fiddle with |= and &= of two bitfield
1959 COMPONENT_REFs here. This can't happen with gimple, the LHS
1960 of binary assigns must be a gimple reg. */
1962 if (TREE_CODE (lhs) != SSA_NAME
1963 || get_gimple_rhs_class (gimple_expr_code (stmt))
1964 == GIMPLE_SINGLE_RHS)
1966 tree rhs = gimple_assign_rhs1 (stmt);
1967 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1968 == GIMPLE_SINGLE_RHS);
1969 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1970 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1971 expand_assignment (lhs, rhs,
1972 gimple_assign_nontemporal_move_p (stmt));
1974 else
1976 rtx target, temp;
1977 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1978 struct separate_ops ops;
1979 bool promoted = false;
1981 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1982 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1983 promoted = true;
1985 ops.code = gimple_assign_rhs_code (stmt);
1986 ops.type = TREE_TYPE (lhs);
1987 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1989 case GIMPLE_TERNARY_RHS:
1990 ops.op2 = gimple_assign_rhs3 (stmt);
1991 /* Fallthru */
1992 case GIMPLE_BINARY_RHS:
1993 ops.op1 = gimple_assign_rhs2 (stmt);
1994 /* Fallthru */
1995 case GIMPLE_UNARY_RHS:
1996 ops.op0 = gimple_assign_rhs1 (stmt);
1997 break;
1998 default:
1999 gcc_unreachable ();
2001 ops.location = gimple_location (stmt);
2003 /* If we want to use a nontemporal store, force the value to
2004 register first. If we store into a promoted register,
2005 don't directly expand to target. */
2006 temp = nontemporal || promoted ? NULL_RTX : target;
2007 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2008 EXPAND_NORMAL);
2010 if (temp == target)
2012 else if (promoted)
2014 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2015 /* If TEMP is a VOIDmode constant, use convert_modes to make
2016 sure that we properly convert it. */
2017 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2019 temp = convert_modes (GET_MODE (target),
2020 TYPE_MODE (ops.type),
2021 temp, unsignedp);
2022 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2023 GET_MODE (target), temp, unsignedp);
2026 convert_move (SUBREG_REG (target), temp, unsignedp);
2028 else if (nontemporal && emit_storent_insn (target, temp))
2030 else
2032 temp = force_operand (temp, target);
2033 if (temp != target)
2034 emit_move_insn (target, temp);
2038 break;
2040 default:
2041 gcc_unreachable ();
2045 /* Expand one gimple statement STMT and return the last RTL instruction
2046 before any of the newly generated ones.
2048 In addition to generating the necessary RTL instructions this also
2049 sets REG_EH_REGION notes if necessary and sets the current source
2050 location for diagnostics. */
2052 static rtx
2053 expand_gimple_stmt (gimple stmt)
2055 int lp_nr = 0;
2056 rtx last = NULL;
2057 location_t saved_location = input_location;
2059 last = get_last_insn ();
2061 /* If this is an expression of some kind and it has an associated line
2062 number, then emit the line number before expanding the expression.
2064 We need to save and restore the file and line information so that
2065 errors discovered during expansion are emitted with the right
2066 information. It would be better of the diagnostic routines
2067 used the file/line information embedded in the tree nodes rather
2068 than globals. */
2069 gcc_assert (cfun);
2071 if (gimple_has_location (stmt))
2073 input_location = gimple_location (stmt);
2074 set_curr_insn_source_location (input_location);
2076 /* Record where the insns produced belong. */
2077 set_curr_insn_block (gimple_block (stmt));
2080 expand_gimple_stmt_1 (stmt);
2081 /* Free any temporaries used to evaluate this statement. */
2082 free_temp_slots ();
2084 input_location = saved_location;
2086 /* Mark all insns that may trap. */
2087 lp_nr = lookup_stmt_eh_lp (stmt);
2088 if (lp_nr)
2090 rtx insn;
2091 for (insn = next_real_insn (last); insn;
2092 insn = next_real_insn (insn))
2094 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2095 /* If we want exceptions for non-call insns, any
2096 may_trap_p instruction may throw. */
2097 && GET_CODE (PATTERN (insn)) != CLOBBER
2098 && GET_CODE (PATTERN (insn)) != USE
2099 && insn_could_throw_p (insn))
2100 make_reg_eh_region_note (insn, 0, lp_nr);
2104 return last;
2107 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2108 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2109 generated a tail call (something that might be denied by the ABI
2110 rules governing the call; see calls.c).
2112 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2113 can still reach the rest of BB. The case here is __builtin_sqrt,
2114 where the NaN result goes through the external function (with a
2115 tailcall) and the normal result happens via a sqrt instruction. */
2117 static basic_block
2118 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2120 rtx last2, last;
2121 edge e;
2122 edge_iterator ei;
2123 int probability;
2124 gcov_type count;
2126 last2 = last = expand_gimple_stmt (stmt);
2128 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2129 if (CALL_P (last) && SIBLING_CALL_P (last))
2130 goto found;
2132 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2134 *can_fallthru = true;
2135 return NULL;
2137 found:
2138 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2139 Any instructions emitted here are about to be deleted. */
2140 do_pending_stack_adjust ();
2142 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2143 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2144 EH or abnormal edges, we shouldn't have created a tail call in
2145 the first place. So it seems to me we should just be removing
2146 all edges here, or redirecting the existing fallthru edge to
2147 the exit block. */
2149 probability = 0;
2150 count = 0;
2152 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2154 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2156 if (e->dest != EXIT_BLOCK_PTR)
2158 e->dest->count -= e->count;
2159 e->dest->frequency -= EDGE_FREQUENCY (e);
2160 if (e->dest->count < 0)
2161 e->dest->count = 0;
2162 if (e->dest->frequency < 0)
2163 e->dest->frequency = 0;
2165 count += e->count;
2166 probability += e->probability;
2167 remove_edge (e);
2169 else
2170 ei_next (&ei);
2173 /* This is somewhat ugly: the call_expr expander often emits instructions
2174 after the sibcall (to perform the function return). These confuse the
2175 find_many_sub_basic_blocks code, so we need to get rid of these. */
2176 last = NEXT_INSN (last);
2177 gcc_assert (BARRIER_P (last));
2179 *can_fallthru = false;
2180 while (NEXT_INSN (last))
2182 /* For instance an sqrt builtin expander expands if with
2183 sibcall in the then and label for `else`. */
2184 if (LABEL_P (NEXT_INSN (last)))
2186 *can_fallthru = true;
2187 break;
2189 delete_insn (NEXT_INSN (last));
2192 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2193 e->probability += probability;
2194 e->count += count;
2195 BB_END (bb) = last;
2196 update_bb_for_insn (bb);
2198 if (NEXT_INSN (last))
2200 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2202 last = BB_END (bb);
2203 if (BARRIER_P (last))
2204 BB_END (bb) = PREV_INSN (last);
2207 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2209 return bb;
2212 /* Return the difference between the floor and the truncated result of
2213 a signed division by OP1 with remainder MOD. */
2214 static rtx
2215 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2217 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2218 return gen_rtx_IF_THEN_ELSE
2219 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2220 gen_rtx_IF_THEN_ELSE
2221 (mode, gen_rtx_LT (BImode,
2222 gen_rtx_DIV (mode, op1, mod),
2223 const0_rtx),
2224 constm1_rtx, const0_rtx),
2225 const0_rtx);
2228 /* Return the difference between the ceil and the truncated result of
2229 a signed division by OP1 with remainder MOD. */
2230 static rtx
2231 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2233 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2234 return gen_rtx_IF_THEN_ELSE
2235 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2236 gen_rtx_IF_THEN_ELSE
2237 (mode, gen_rtx_GT (BImode,
2238 gen_rtx_DIV (mode, op1, mod),
2239 const0_rtx),
2240 const1_rtx, const0_rtx),
2241 const0_rtx);
2244 /* Return the difference between the ceil and the truncated result of
2245 an unsigned division by OP1 with remainder MOD. */
2246 static rtx
2247 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2249 /* (mod != 0 ? 1 : 0) */
2250 return gen_rtx_IF_THEN_ELSE
2251 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2252 const1_rtx, const0_rtx);
2255 /* Return the difference between the rounded and the truncated result
2256 of a signed division by OP1 with remainder MOD. Halfway cases are
2257 rounded away from zero, rather than to the nearest even number. */
2258 static rtx
2259 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2261 /* (abs (mod) >= abs (op1) - abs (mod)
2262 ? (op1 / mod > 0 ? 1 : -1)
2263 : 0) */
2264 return gen_rtx_IF_THEN_ELSE
2265 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2266 gen_rtx_MINUS (mode,
2267 gen_rtx_ABS (mode, op1),
2268 gen_rtx_ABS (mode, mod))),
2269 gen_rtx_IF_THEN_ELSE
2270 (mode, gen_rtx_GT (BImode,
2271 gen_rtx_DIV (mode, op1, mod),
2272 const0_rtx),
2273 const1_rtx, constm1_rtx),
2274 const0_rtx);
2277 /* Return the difference between the rounded and the truncated result
2278 of a unsigned division by OP1 with remainder MOD. Halfway cases
2279 are rounded away from zero, rather than to the nearest even
2280 number. */
2281 static rtx
2282 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2284 /* (mod >= op1 - mod ? 1 : 0) */
2285 return gen_rtx_IF_THEN_ELSE
2286 (mode, gen_rtx_GE (BImode, mod,
2287 gen_rtx_MINUS (mode, op1, mod)),
2288 const1_rtx, const0_rtx);
2291 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2292 any rtl. */
2294 static rtx
2295 convert_debug_memory_address (enum machine_mode mode, rtx x)
2297 enum machine_mode xmode = GET_MODE (x);
2299 #ifndef POINTERS_EXTEND_UNSIGNED
2300 gcc_assert (mode == Pmode);
2301 gcc_assert (xmode == mode || xmode == VOIDmode);
2302 #else
2303 gcc_assert (mode == Pmode || mode == ptr_mode);
2305 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2306 return x;
2308 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2309 x = simplify_gen_subreg (mode, x, xmode,
2310 subreg_lowpart_offset
2311 (mode, xmode));
2312 else if (POINTERS_EXTEND_UNSIGNED > 0)
2313 x = gen_rtx_ZERO_EXTEND (mode, x);
2314 else if (!POINTERS_EXTEND_UNSIGNED)
2315 x = gen_rtx_SIGN_EXTEND (mode, x);
2316 else
2317 gcc_unreachable ();
2318 #endif /* POINTERS_EXTEND_UNSIGNED */
2320 return x;
2323 /* Return an RTX equivalent to the value of the tree expression
2324 EXP. */
2326 static rtx
2327 expand_debug_expr (tree exp)
2329 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2330 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2331 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2332 addr_space_t as;
2334 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2336 case tcc_expression:
2337 switch (TREE_CODE (exp))
2339 case COND_EXPR:
2340 case DOT_PROD_EXPR:
2341 case WIDEN_MULT_PLUS_EXPR:
2342 case WIDEN_MULT_MINUS_EXPR:
2343 case FMA_EXPR:
2344 goto ternary;
2346 case TRUTH_ANDIF_EXPR:
2347 case TRUTH_ORIF_EXPR:
2348 case TRUTH_AND_EXPR:
2349 case TRUTH_OR_EXPR:
2350 case TRUTH_XOR_EXPR:
2351 goto binary;
2353 case TRUTH_NOT_EXPR:
2354 goto unary;
2356 default:
2357 break;
2359 break;
2361 ternary:
2362 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2363 if (!op2)
2364 return NULL_RTX;
2365 /* Fall through. */
2367 binary:
2368 case tcc_binary:
2369 case tcc_comparison:
2370 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2371 if (!op1)
2372 return NULL_RTX;
2373 /* Fall through. */
2375 unary:
2376 case tcc_unary:
2377 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2378 if (!op0)
2379 return NULL_RTX;
2380 break;
2382 case tcc_type:
2383 case tcc_statement:
2384 gcc_unreachable ();
2386 case tcc_constant:
2387 case tcc_exceptional:
2388 case tcc_declaration:
2389 case tcc_reference:
2390 case tcc_vl_exp:
2391 break;
2394 switch (TREE_CODE (exp))
2396 case STRING_CST:
2397 if (!lookup_constant_def (exp))
2399 if (strlen (TREE_STRING_POINTER (exp)) + 1
2400 != (size_t) TREE_STRING_LENGTH (exp))
2401 return NULL_RTX;
2402 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2403 op0 = gen_rtx_MEM (BLKmode, op0);
2404 set_mem_attributes (op0, exp, 0);
2405 return op0;
2407 /* Fall through... */
2409 case INTEGER_CST:
2410 case REAL_CST:
2411 case FIXED_CST:
2412 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2413 return op0;
2415 case COMPLEX_CST:
2416 gcc_assert (COMPLEX_MODE_P (mode));
2417 op0 = expand_debug_expr (TREE_REALPART (exp));
2418 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2419 return gen_rtx_CONCAT (mode, op0, op1);
2421 case DEBUG_EXPR_DECL:
2422 op0 = DECL_RTL_IF_SET (exp);
2424 if (op0)
2425 return op0;
2427 op0 = gen_rtx_DEBUG_EXPR (mode);
2428 DEBUG_EXPR_TREE_DECL (op0) = exp;
2429 SET_DECL_RTL (exp, op0);
2431 return op0;
2433 case VAR_DECL:
2434 case PARM_DECL:
2435 case FUNCTION_DECL:
2436 case LABEL_DECL:
2437 case CONST_DECL:
2438 case RESULT_DECL:
2439 op0 = DECL_RTL_IF_SET (exp);
2441 /* This decl was probably optimized away. */
2442 if (!op0)
2444 if (TREE_CODE (exp) != VAR_DECL
2445 || DECL_EXTERNAL (exp)
2446 || !TREE_STATIC (exp)
2447 || !DECL_NAME (exp)
2448 || DECL_HARD_REGISTER (exp)
2449 || mode == VOIDmode)
2450 return NULL;
2452 op0 = make_decl_rtl_for_debug (exp);
2453 if (!MEM_P (op0)
2454 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2455 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2456 return NULL;
2458 else
2459 op0 = copy_rtx (op0);
2461 if (GET_MODE (op0) == BLKmode
2462 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2463 below would ICE. While it is likely a FE bug,
2464 try to be robust here. See PR43166. */
2465 || mode == BLKmode
2466 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2468 gcc_assert (MEM_P (op0));
2469 op0 = adjust_address_nv (op0, mode, 0);
2470 return op0;
2473 /* Fall through. */
2475 adjust_mode:
2476 case PAREN_EXPR:
2477 case NOP_EXPR:
2478 case CONVERT_EXPR:
2480 enum machine_mode inner_mode = GET_MODE (op0);
2482 if (mode == inner_mode)
2483 return op0;
2485 if (inner_mode == VOIDmode)
2487 if (TREE_CODE (exp) == SSA_NAME)
2488 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2489 else
2490 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2491 if (mode == inner_mode)
2492 return op0;
2495 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2497 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2498 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2499 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2500 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2501 else
2502 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2504 else if (FLOAT_MODE_P (mode))
2506 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2507 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2508 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2509 else
2510 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2512 else if (FLOAT_MODE_P (inner_mode))
2514 if (unsignedp)
2515 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2516 else
2517 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2519 else if (CONSTANT_P (op0)
2520 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2521 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2522 subreg_lowpart_offset (mode,
2523 inner_mode));
2524 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2525 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2526 : unsignedp)
2527 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2528 else
2529 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2531 return op0;
2534 case MEM_REF:
2535 case INDIRECT_REF:
2536 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2537 if (!op0)
2538 return NULL;
2540 if (TREE_CODE (exp) == MEM_REF)
2542 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2543 || (GET_CODE (op0) == PLUS
2544 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2545 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2546 Instead just use get_inner_reference. */
2547 goto component_ref;
2549 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2550 if (!op1 || !CONST_INT_P (op1))
2551 return NULL;
2553 op0 = plus_constant (op0, INTVAL (op1));
2556 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2557 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2558 else
2559 as = ADDR_SPACE_GENERIC;
2561 op0 = gen_rtx_MEM (mode, op0);
2563 set_mem_attributes (op0, exp, 0);
2564 set_mem_addr_space (op0, as);
2566 return op0;
2568 case TARGET_MEM_REF:
2569 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2570 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2571 return NULL;
2573 op0 = expand_debug_expr
2574 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2575 if (!op0)
2576 return NULL;
2578 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2580 op0 = gen_rtx_MEM (mode, op0);
2582 set_mem_attributes (op0, exp, 0);
2583 set_mem_addr_space (op0, as);
2585 return op0;
2587 component_ref:
2588 case ARRAY_REF:
2589 case ARRAY_RANGE_REF:
2590 case COMPONENT_REF:
2591 case BIT_FIELD_REF:
2592 case REALPART_EXPR:
2593 case IMAGPART_EXPR:
2594 case VIEW_CONVERT_EXPR:
2596 enum machine_mode mode1;
2597 HOST_WIDE_INT bitsize, bitpos;
2598 tree offset;
2599 int volatilep = 0;
2600 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2601 &mode1, &unsignedp, &volatilep, false);
2602 rtx orig_op0;
2604 if (bitsize == 0)
2605 return NULL;
2607 orig_op0 = op0 = expand_debug_expr (tem);
2609 if (!op0)
2610 return NULL;
2612 if (offset)
2614 enum machine_mode addrmode, offmode;
2616 if (!MEM_P (op0))
2617 return NULL;
2619 op0 = XEXP (op0, 0);
2620 addrmode = GET_MODE (op0);
2621 if (addrmode == VOIDmode)
2622 addrmode = Pmode;
2624 op1 = expand_debug_expr (offset);
2625 if (!op1)
2626 return NULL;
2628 offmode = GET_MODE (op1);
2629 if (offmode == VOIDmode)
2630 offmode = TYPE_MODE (TREE_TYPE (offset));
2632 if (addrmode != offmode)
2633 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2634 subreg_lowpart_offset (addrmode,
2635 offmode));
2637 /* Don't use offset_address here, we don't need a
2638 recognizable address, and we don't want to generate
2639 code. */
2640 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2643 if (MEM_P (op0))
2645 if (mode1 == VOIDmode)
2646 /* Bitfield. */
2647 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2648 if (bitpos >= BITS_PER_UNIT)
2650 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2651 bitpos %= BITS_PER_UNIT;
2653 else if (bitpos < 0)
2655 HOST_WIDE_INT units
2656 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2657 op0 = adjust_address_nv (op0, mode1, units);
2658 bitpos += units * BITS_PER_UNIT;
2660 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2661 op0 = adjust_address_nv (op0, mode, 0);
2662 else if (GET_MODE (op0) != mode1)
2663 op0 = adjust_address_nv (op0, mode1, 0);
2664 else
2665 op0 = copy_rtx (op0);
2666 if (op0 == orig_op0)
2667 op0 = shallow_copy_rtx (op0);
2668 set_mem_attributes (op0, exp, 0);
2671 if (bitpos == 0 && mode == GET_MODE (op0))
2672 return op0;
2674 if (bitpos < 0)
2675 return NULL;
2677 if (GET_MODE (op0) == BLKmode)
2678 return NULL;
2680 if ((bitpos % BITS_PER_UNIT) == 0
2681 && bitsize == GET_MODE_BITSIZE (mode1))
2683 enum machine_mode opmode = GET_MODE (op0);
2685 if (opmode == VOIDmode)
2686 opmode = TYPE_MODE (TREE_TYPE (tem));
2688 /* This condition may hold if we're expanding the address
2689 right past the end of an array that turned out not to
2690 be addressable (i.e., the address was only computed in
2691 debug stmts). The gen_subreg below would rightfully
2692 crash, and the address doesn't really exist, so just
2693 drop it. */
2694 if (bitpos >= GET_MODE_BITSIZE (opmode))
2695 return NULL;
2697 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2698 return simplify_gen_subreg (mode, op0, opmode,
2699 bitpos / BITS_PER_UNIT);
2702 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2703 && TYPE_UNSIGNED (TREE_TYPE (exp))
2704 ? SIGN_EXTRACT
2705 : ZERO_EXTRACT, mode,
2706 GET_MODE (op0) != VOIDmode
2707 ? GET_MODE (op0)
2708 : TYPE_MODE (TREE_TYPE (tem)),
2709 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2712 case ABS_EXPR:
2713 return gen_rtx_ABS (mode, op0);
2715 case NEGATE_EXPR:
2716 return gen_rtx_NEG (mode, op0);
2718 case BIT_NOT_EXPR:
2719 return gen_rtx_NOT (mode, op0);
2721 case FLOAT_EXPR:
2722 if (unsignedp)
2723 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2724 else
2725 return gen_rtx_FLOAT (mode, op0);
2727 case FIX_TRUNC_EXPR:
2728 if (unsignedp)
2729 return gen_rtx_UNSIGNED_FIX (mode, op0);
2730 else
2731 return gen_rtx_FIX (mode, op0);
2733 case POINTER_PLUS_EXPR:
2734 /* For the rare target where pointers are not the same size as
2735 size_t, we need to check for mis-matched modes and correct
2736 the addend. */
2737 if (op0 && op1
2738 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2739 && GET_MODE (op0) != GET_MODE (op1))
2741 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2742 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2743 else
2744 /* We always sign-extend, regardless of the signedness of
2745 the operand, because the operand is always unsigned
2746 here even if the original C expression is signed. */
2747 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2749 /* Fall through. */
2750 case PLUS_EXPR:
2751 return gen_rtx_PLUS (mode, op0, op1);
2753 case MINUS_EXPR:
2754 return gen_rtx_MINUS (mode, op0, op1);
2756 case MULT_EXPR:
2757 return gen_rtx_MULT (mode, op0, op1);
2759 case RDIV_EXPR:
2760 case TRUNC_DIV_EXPR:
2761 case EXACT_DIV_EXPR:
2762 if (unsignedp)
2763 return gen_rtx_UDIV (mode, op0, op1);
2764 else
2765 return gen_rtx_DIV (mode, op0, op1);
2767 case TRUNC_MOD_EXPR:
2768 if (unsignedp)
2769 return gen_rtx_UMOD (mode, op0, op1);
2770 else
2771 return gen_rtx_MOD (mode, op0, op1);
2773 case FLOOR_DIV_EXPR:
2774 if (unsignedp)
2775 return gen_rtx_UDIV (mode, op0, op1);
2776 else
2778 rtx div = gen_rtx_DIV (mode, op0, op1);
2779 rtx mod = gen_rtx_MOD (mode, op0, op1);
2780 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2781 return gen_rtx_PLUS (mode, div, adj);
2784 case FLOOR_MOD_EXPR:
2785 if (unsignedp)
2786 return gen_rtx_UMOD (mode, op0, op1);
2787 else
2789 rtx mod = gen_rtx_MOD (mode, op0, op1);
2790 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2791 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2792 return gen_rtx_PLUS (mode, mod, adj);
2795 case CEIL_DIV_EXPR:
2796 if (unsignedp)
2798 rtx div = gen_rtx_UDIV (mode, op0, op1);
2799 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2800 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2801 return gen_rtx_PLUS (mode, div, adj);
2803 else
2805 rtx div = gen_rtx_DIV (mode, op0, op1);
2806 rtx mod = gen_rtx_MOD (mode, op0, op1);
2807 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2808 return gen_rtx_PLUS (mode, div, adj);
2811 case CEIL_MOD_EXPR:
2812 if (unsignedp)
2814 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2815 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2816 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2817 return gen_rtx_PLUS (mode, mod, adj);
2819 else
2821 rtx mod = gen_rtx_MOD (mode, op0, op1);
2822 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2823 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2824 return gen_rtx_PLUS (mode, mod, adj);
2827 case ROUND_DIV_EXPR:
2828 if (unsignedp)
2830 rtx div = gen_rtx_UDIV (mode, op0, op1);
2831 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2832 rtx adj = round_udiv_adjust (mode, mod, op1);
2833 return gen_rtx_PLUS (mode, div, adj);
2835 else
2837 rtx div = gen_rtx_DIV (mode, op0, op1);
2838 rtx mod = gen_rtx_MOD (mode, op0, op1);
2839 rtx adj = round_sdiv_adjust (mode, mod, op1);
2840 return gen_rtx_PLUS (mode, div, adj);
2843 case ROUND_MOD_EXPR:
2844 if (unsignedp)
2846 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2847 rtx adj = round_udiv_adjust (mode, mod, op1);
2848 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2849 return gen_rtx_PLUS (mode, mod, adj);
2851 else
2853 rtx mod = gen_rtx_MOD (mode, op0, op1);
2854 rtx adj = round_sdiv_adjust (mode, mod, op1);
2855 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2856 return gen_rtx_PLUS (mode, mod, adj);
2859 case LSHIFT_EXPR:
2860 return gen_rtx_ASHIFT (mode, op0, op1);
2862 case RSHIFT_EXPR:
2863 if (unsignedp)
2864 return gen_rtx_LSHIFTRT (mode, op0, op1);
2865 else
2866 return gen_rtx_ASHIFTRT (mode, op0, op1);
2868 case LROTATE_EXPR:
2869 return gen_rtx_ROTATE (mode, op0, op1);
2871 case RROTATE_EXPR:
2872 return gen_rtx_ROTATERT (mode, op0, op1);
2874 case MIN_EXPR:
2875 if (unsignedp)
2876 return gen_rtx_UMIN (mode, op0, op1);
2877 else
2878 return gen_rtx_SMIN (mode, op0, op1);
2880 case MAX_EXPR:
2881 if (unsignedp)
2882 return gen_rtx_UMAX (mode, op0, op1);
2883 else
2884 return gen_rtx_SMAX (mode, op0, op1);
2886 case BIT_AND_EXPR:
2887 case TRUTH_AND_EXPR:
2888 return gen_rtx_AND (mode, op0, op1);
2890 case BIT_IOR_EXPR:
2891 case TRUTH_OR_EXPR:
2892 return gen_rtx_IOR (mode, op0, op1);
2894 case BIT_XOR_EXPR:
2895 case TRUTH_XOR_EXPR:
2896 return gen_rtx_XOR (mode, op0, op1);
2898 case TRUTH_ANDIF_EXPR:
2899 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2901 case TRUTH_ORIF_EXPR:
2902 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2904 case TRUTH_NOT_EXPR:
2905 return gen_rtx_EQ (mode, op0, const0_rtx);
2907 case LT_EXPR:
2908 if (unsignedp)
2909 return gen_rtx_LTU (mode, op0, op1);
2910 else
2911 return gen_rtx_LT (mode, op0, op1);
2913 case LE_EXPR:
2914 if (unsignedp)
2915 return gen_rtx_LEU (mode, op0, op1);
2916 else
2917 return gen_rtx_LE (mode, op0, op1);
2919 case GT_EXPR:
2920 if (unsignedp)
2921 return gen_rtx_GTU (mode, op0, op1);
2922 else
2923 return gen_rtx_GT (mode, op0, op1);
2925 case GE_EXPR:
2926 if (unsignedp)
2927 return gen_rtx_GEU (mode, op0, op1);
2928 else
2929 return gen_rtx_GE (mode, op0, op1);
2931 case EQ_EXPR:
2932 return gen_rtx_EQ (mode, op0, op1);
2934 case NE_EXPR:
2935 return gen_rtx_NE (mode, op0, op1);
2937 case UNORDERED_EXPR:
2938 return gen_rtx_UNORDERED (mode, op0, op1);
2940 case ORDERED_EXPR:
2941 return gen_rtx_ORDERED (mode, op0, op1);
2943 case UNLT_EXPR:
2944 return gen_rtx_UNLT (mode, op0, op1);
2946 case UNLE_EXPR:
2947 return gen_rtx_UNLE (mode, op0, op1);
2949 case UNGT_EXPR:
2950 return gen_rtx_UNGT (mode, op0, op1);
2952 case UNGE_EXPR:
2953 return gen_rtx_UNGE (mode, op0, op1);
2955 case UNEQ_EXPR:
2956 return gen_rtx_UNEQ (mode, op0, op1);
2958 case LTGT_EXPR:
2959 return gen_rtx_LTGT (mode, op0, op1);
2961 case COND_EXPR:
2962 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2964 case COMPLEX_EXPR:
2965 gcc_assert (COMPLEX_MODE_P (mode));
2966 if (GET_MODE (op0) == VOIDmode)
2967 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2968 if (GET_MODE (op1) == VOIDmode)
2969 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2970 return gen_rtx_CONCAT (mode, op0, op1);
2972 case CONJ_EXPR:
2973 if (GET_CODE (op0) == CONCAT)
2974 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2975 gen_rtx_NEG (GET_MODE_INNER (mode),
2976 XEXP (op0, 1)));
2977 else
2979 enum machine_mode imode = GET_MODE_INNER (mode);
2980 rtx re, im;
2982 if (MEM_P (op0))
2984 re = adjust_address_nv (op0, imode, 0);
2985 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2987 else
2989 enum machine_mode ifmode = int_mode_for_mode (mode);
2990 enum machine_mode ihmode = int_mode_for_mode (imode);
2991 rtx halfsize;
2992 if (ifmode == BLKmode || ihmode == BLKmode)
2993 return NULL;
2994 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2995 re = op0;
2996 if (mode != ifmode)
2997 re = gen_rtx_SUBREG (ifmode, re, 0);
2998 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2999 if (imode != ihmode)
3000 re = gen_rtx_SUBREG (imode, re, 0);
3001 im = copy_rtx (op0);
3002 if (mode != ifmode)
3003 im = gen_rtx_SUBREG (ifmode, im, 0);
3004 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3005 if (imode != ihmode)
3006 im = gen_rtx_SUBREG (imode, im, 0);
3008 im = gen_rtx_NEG (imode, im);
3009 return gen_rtx_CONCAT (mode, re, im);
3012 case ADDR_EXPR:
3013 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3014 if (!op0 || !MEM_P (op0))
3016 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3017 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3018 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3019 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3020 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3022 if (handled_component_p (TREE_OPERAND (exp, 0)))
3024 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3025 tree decl
3026 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3027 &bitoffset, &bitsize, &maxsize);
3028 if ((TREE_CODE (decl) == VAR_DECL
3029 || TREE_CODE (decl) == PARM_DECL
3030 || TREE_CODE (decl) == RESULT_DECL)
3031 && !TREE_ADDRESSABLE (decl)
3032 && (bitoffset % BITS_PER_UNIT) == 0
3033 && bitsize > 0
3034 && bitsize == maxsize)
3035 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3036 bitoffset / BITS_PER_UNIT);
3039 return NULL;
3042 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
3044 return op0;
3046 case VECTOR_CST:
3047 exp = build_constructor_from_list (TREE_TYPE (exp),
3048 TREE_VECTOR_CST_ELTS (exp));
3049 /* Fall through. */
3051 case CONSTRUCTOR:
3052 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3054 unsigned i;
3055 tree val;
3057 op0 = gen_rtx_CONCATN
3058 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3060 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3062 op1 = expand_debug_expr (val);
3063 if (!op1)
3064 return NULL;
3065 XVECEXP (op0, 0, i) = op1;
3068 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3070 op1 = expand_debug_expr
3071 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3073 if (!op1)
3074 return NULL;
3076 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3077 XVECEXP (op0, 0, i) = op1;
3080 return op0;
3082 else
3083 goto flag_unsupported;
3085 case CALL_EXPR:
3086 /* ??? Maybe handle some builtins? */
3087 return NULL;
3089 case SSA_NAME:
3091 gimple g = get_gimple_for_ssa_name (exp);
3092 if (g)
3094 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3095 if (!op0)
3096 return NULL;
3098 else
3100 int part = var_to_partition (SA.map, exp);
3102 if (part == NO_PARTITION)
3103 return NULL;
3105 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3107 op0 = SA.partition_to_pseudo[part];
3109 goto adjust_mode;
3112 case ERROR_MARK:
3113 return NULL;
3115 /* Vector stuff. For most of the codes we don't have rtl codes. */
3116 case REALIGN_LOAD_EXPR:
3117 case REDUC_MAX_EXPR:
3118 case REDUC_MIN_EXPR:
3119 case REDUC_PLUS_EXPR:
3120 case VEC_COND_EXPR:
3121 case VEC_EXTRACT_EVEN_EXPR:
3122 case VEC_EXTRACT_ODD_EXPR:
3123 case VEC_INTERLEAVE_HIGH_EXPR:
3124 case VEC_INTERLEAVE_LOW_EXPR:
3125 case VEC_LSHIFT_EXPR:
3126 case VEC_PACK_FIX_TRUNC_EXPR:
3127 case VEC_PACK_SAT_EXPR:
3128 case VEC_PACK_TRUNC_EXPR:
3129 case VEC_RSHIFT_EXPR:
3130 case VEC_UNPACK_FLOAT_HI_EXPR:
3131 case VEC_UNPACK_FLOAT_LO_EXPR:
3132 case VEC_UNPACK_HI_EXPR:
3133 case VEC_UNPACK_LO_EXPR:
3134 case VEC_WIDEN_MULT_HI_EXPR:
3135 case VEC_WIDEN_MULT_LO_EXPR:
3136 return NULL;
3138 /* Misc codes. */
3139 case ADDR_SPACE_CONVERT_EXPR:
3140 case FIXED_CONVERT_EXPR:
3141 case OBJ_TYPE_REF:
3142 case WITH_SIZE_EXPR:
3143 return NULL;
3145 case DOT_PROD_EXPR:
3146 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3147 && SCALAR_INT_MODE_P (mode))
3149 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3150 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3151 else
3152 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3153 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3154 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3155 else
3156 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3157 op0 = gen_rtx_MULT (mode, op0, op1);
3158 return gen_rtx_PLUS (mode, op0, op2);
3160 return NULL;
3162 case WIDEN_MULT_EXPR:
3163 case WIDEN_MULT_PLUS_EXPR:
3164 case WIDEN_MULT_MINUS_EXPR:
3165 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3166 && SCALAR_INT_MODE_P (mode))
3168 enum machine_mode inner_mode = GET_MODE (op0);
3169 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3170 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3171 else
3172 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3173 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3174 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3175 else
3176 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3177 op0 = gen_rtx_MULT (mode, op0, op1);
3178 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3179 return op0;
3180 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3181 return gen_rtx_PLUS (mode, op0, op2);
3182 else
3183 return gen_rtx_MINUS (mode, op2, op0);
3185 return NULL;
3187 case WIDEN_SUM_EXPR:
3188 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3189 && SCALAR_INT_MODE_P (mode))
3191 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3192 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3193 else
3194 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3195 return gen_rtx_PLUS (mode, op0, op1);
3197 return NULL;
3199 case FMA_EXPR:
3200 return gen_rtx_FMA (mode, op0, op1, op2);
3202 default:
3203 flag_unsupported:
3204 #ifdef ENABLE_CHECKING
3205 debug_tree (exp);
3206 gcc_unreachable ();
3207 #else
3208 return NULL;
3209 #endif
3213 /* Expand the _LOCs in debug insns. We run this after expanding all
3214 regular insns, so that any variables referenced in the function
3215 will have their DECL_RTLs set. */
3217 static void
3218 expand_debug_locations (void)
3220 rtx insn;
3221 rtx last = get_last_insn ();
3222 int save_strict_alias = flag_strict_aliasing;
3224 /* New alias sets while setting up memory attributes cause
3225 -fcompare-debug failures, even though it doesn't bring about any
3226 codegen changes. */
3227 flag_strict_aliasing = 0;
3229 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3230 if (DEBUG_INSN_P (insn))
3232 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3233 rtx val;
3234 enum machine_mode mode;
3236 if (value == NULL_TREE)
3237 val = NULL_RTX;
3238 else
3240 val = expand_debug_expr (value);
3241 gcc_assert (last == get_last_insn ());
3244 if (!val)
3245 val = gen_rtx_UNKNOWN_VAR_LOC ();
3246 else
3248 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3250 gcc_assert (mode == GET_MODE (val)
3251 || (GET_MODE (val) == VOIDmode
3252 && (CONST_INT_P (val)
3253 || GET_CODE (val) == CONST_FIXED
3254 || GET_CODE (val) == CONST_DOUBLE
3255 || GET_CODE (val) == LABEL_REF)));
3258 INSN_VAR_LOCATION_LOC (insn) = val;
3261 flag_strict_aliasing = save_strict_alias;
3264 /* Expand basic block BB from GIMPLE trees to RTL. */
3266 static basic_block
3267 expand_gimple_basic_block (basic_block bb)
3269 gimple_stmt_iterator gsi;
3270 gimple_seq stmts;
3271 gimple stmt = NULL;
3272 rtx note, last;
3273 edge e;
3274 edge_iterator ei;
3275 void **elt;
3277 if (dump_file)
3278 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3279 bb->index);
3281 /* Note that since we are now transitioning from GIMPLE to RTL, we
3282 cannot use the gsi_*_bb() routines because they expect the basic
3283 block to be in GIMPLE, instead of RTL. Therefore, we need to
3284 access the BB sequence directly. */
3285 stmts = bb_seq (bb);
3286 bb->il.gimple = NULL;
3287 rtl_profile_for_bb (bb);
3288 init_rtl_bb_info (bb);
3289 bb->flags |= BB_RTL;
3291 /* Remove the RETURN_EXPR if we may fall though to the exit
3292 instead. */
3293 gsi = gsi_last (stmts);
3294 if (!gsi_end_p (gsi)
3295 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3297 gimple ret_stmt = gsi_stmt (gsi);
3299 gcc_assert (single_succ_p (bb));
3300 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3302 if (bb->next_bb == EXIT_BLOCK_PTR
3303 && !gimple_return_retval (ret_stmt))
3305 gsi_remove (&gsi, false);
3306 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3310 gsi = gsi_start (stmts);
3311 if (!gsi_end_p (gsi))
3313 stmt = gsi_stmt (gsi);
3314 if (gimple_code (stmt) != GIMPLE_LABEL)
3315 stmt = NULL;
3318 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3320 if (stmt || elt)
3322 last = get_last_insn ();
3324 if (stmt)
3326 expand_gimple_stmt (stmt);
3327 gsi_next (&gsi);
3330 if (elt)
3331 emit_label ((rtx) *elt);
3333 /* Java emits line number notes in the top of labels.
3334 ??? Make this go away once line number notes are obsoleted. */
3335 BB_HEAD (bb) = NEXT_INSN (last);
3336 if (NOTE_P (BB_HEAD (bb)))
3337 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3338 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3340 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3342 else
3343 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3345 NOTE_BASIC_BLOCK (note) = bb;
3347 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3349 basic_block new_bb;
3351 stmt = gsi_stmt (gsi);
3353 /* If this statement is a non-debug one, and we generate debug
3354 insns, then this one might be the last real use of a TERed
3355 SSA_NAME, but where there are still some debug uses further
3356 down. Expanding the current SSA name in such further debug
3357 uses by their RHS might lead to wrong debug info, as coalescing
3358 might make the operands of such RHS be placed into the same
3359 pseudo as something else. Like so:
3360 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3361 use(a_1);
3362 a_2 = ...
3363 #DEBUG ... => a_1
3364 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3365 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3366 the write to a_2 would actually have clobbered the place which
3367 formerly held a_0.
3369 So, instead of that, we recognize the situation, and generate
3370 debug temporaries at the last real use of TERed SSA names:
3371 a_1 = a_0 + 1;
3372 #DEBUG #D1 => a_1
3373 use(a_1);
3374 a_2 = ...
3375 #DEBUG ... => #D1
3377 if (MAY_HAVE_DEBUG_INSNS
3378 && SA.values
3379 && !is_gimple_debug (stmt))
3381 ssa_op_iter iter;
3382 tree op;
3383 gimple def;
3385 location_t sloc = get_curr_insn_source_location ();
3386 tree sblock = get_curr_insn_block ();
3388 /* Look for SSA names that have their last use here (TERed
3389 names always have only one real use). */
3390 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3391 if ((def = get_gimple_for_ssa_name (op)))
3393 imm_use_iterator imm_iter;
3394 use_operand_p use_p;
3395 bool have_debug_uses = false;
3397 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3399 if (gimple_debug_bind_p (USE_STMT (use_p)))
3401 have_debug_uses = true;
3402 break;
3406 if (have_debug_uses)
3408 /* OP is a TERed SSA name, with DEF it's defining
3409 statement, and where OP is used in further debug
3410 instructions. Generate a debug temporary, and
3411 replace all uses of OP in debug insns with that
3412 temporary. */
3413 gimple debugstmt;
3414 tree value = gimple_assign_rhs_to_tree (def);
3415 tree vexpr = make_node (DEBUG_EXPR_DECL);
3416 rtx val;
3417 enum machine_mode mode;
3419 set_curr_insn_source_location (gimple_location (def));
3420 set_curr_insn_block (gimple_block (def));
3422 DECL_ARTIFICIAL (vexpr) = 1;
3423 TREE_TYPE (vexpr) = TREE_TYPE (value);
3424 if (DECL_P (value))
3425 mode = DECL_MODE (value);
3426 else
3427 mode = TYPE_MODE (TREE_TYPE (value));
3428 DECL_MODE (vexpr) = mode;
3430 val = gen_rtx_VAR_LOCATION
3431 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3433 val = emit_debug_insn (val);
3435 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3437 if (!gimple_debug_bind_p (debugstmt))
3438 continue;
3440 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3441 SET_USE (use_p, vexpr);
3443 update_stmt (debugstmt);
3447 set_curr_insn_source_location (sloc);
3448 set_curr_insn_block (sblock);
3451 currently_expanding_gimple_stmt = stmt;
3453 /* Expand this statement, then evaluate the resulting RTL and
3454 fixup the CFG accordingly. */
3455 if (gimple_code (stmt) == GIMPLE_COND)
3457 new_bb = expand_gimple_cond (bb, stmt);
3458 if (new_bb)
3459 return new_bb;
3461 else if (gimple_debug_bind_p (stmt))
3463 location_t sloc = get_curr_insn_source_location ();
3464 tree sblock = get_curr_insn_block ();
3465 gimple_stmt_iterator nsi = gsi;
3467 for (;;)
3469 tree var = gimple_debug_bind_get_var (stmt);
3470 tree value;
3471 rtx val;
3472 enum machine_mode mode;
3474 if (gimple_debug_bind_has_value_p (stmt))
3475 value = gimple_debug_bind_get_value (stmt);
3476 else
3477 value = NULL_TREE;
3479 last = get_last_insn ();
3481 set_curr_insn_source_location (gimple_location (stmt));
3482 set_curr_insn_block (gimple_block (stmt));
3484 if (DECL_P (var))
3485 mode = DECL_MODE (var);
3486 else
3487 mode = TYPE_MODE (TREE_TYPE (var));
3489 val = gen_rtx_VAR_LOCATION
3490 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3492 val = emit_debug_insn (val);
3494 if (dump_file && (dump_flags & TDF_DETAILS))
3496 /* We can't dump the insn with a TREE where an RTX
3497 is expected. */
3498 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3499 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3500 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3503 /* In order not to generate too many debug temporaries,
3504 we delink all uses of debug statements we already expanded.
3505 Therefore debug statements between definition and real
3506 use of TERed SSA names will continue to use the SSA name,
3507 and not be replaced with debug temps. */
3508 delink_stmt_imm_use (stmt);
3510 gsi = nsi;
3511 gsi_next (&nsi);
3512 if (gsi_end_p (nsi))
3513 break;
3514 stmt = gsi_stmt (nsi);
3515 if (!gimple_debug_bind_p (stmt))
3516 break;
3519 set_curr_insn_source_location (sloc);
3520 set_curr_insn_block (sblock);
3522 else
3524 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3526 bool can_fallthru;
3527 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3528 if (new_bb)
3530 if (can_fallthru)
3531 bb = new_bb;
3532 else
3533 return new_bb;
3536 else
3538 def_operand_p def_p;
3539 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3541 if (def_p != NULL)
3543 /* Ignore this stmt if it is in the list of
3544 replaceable expressions. */
3545 if (SA.values
3546 && bitmap_bit_p (SA.values,
3547 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3548 continue;
3550 last = expand_gimple_stmt (stmt);
3551 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3556 currently_expanding_gimple_stmt = NULL;
3558 /* Expand implicit goto and convert goto_locus. */
3559 FOR_EACH_EDGE (e, ei, bb->succs)
3561 if (e->goto_locus && e->goto_block)
3563 set_curr_insn_source_location (e->goto_locus);
3564 set_curr_insn_block (e->goto_block);
3565 e->goto_locus = curr_insn_locator ();
3567 e->goto_block = NULL;
3568 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3570 emit_jump (label_rtx_for_bb (e->dest));
3571 e->flags &= ~EDGE_FALLTHRU;
3575 /* Expanded RTL can create a jump in the last instruction of block.
3576 This later might be assumed to be a jump to successor and break edge insertion.
3577 We need to insert dummy move to prevent this. PR41440. */
3578 if (single_succ_p (bb)
3579 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3580 && (last = get_last_insn ())
3581 && JUMP_P (last))
3583 rtx dummy = gen_reg_rtx (SImode);
3584 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3587 do_pending_stack_adjust ();
3589 /* Find the block tail. The last insn in the block is the insn
3590 before a barrier and/or table jump insn. */
3591 last = get_last_insn ();
3592 if (BARRIER_P (last))
3593 last = PREV_INSN (last);
3594 if (JUMP_TABLE_DATA_P (last))
3595 last = PREV_INSN (PREV_INSN (last));
3596 BB_END (bb) = last;
3598 update_bb_for_insn (bb);
3600 return bb;
3604 /* Create a basic block for initialization code. */
3606 static basic_block
3607 construct_init_block (void)
3609 basic_block init_block, first_block;
3610 edge e = NULL;
3611 int flags;
3613 /* Multiple entry points not supported yet. */
3614 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3615 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3616 init_rtl_bb_info (EXIT_BLOCK_PTR);
3617 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3618 EXIT_BLOCK_PTR->flags |= BB_RTL;
3620 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3622 /* When entry edge points to first basic block, we don't need jump,
3623 otherwise we have to jump into proper target. */
3624 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3626 tree label = gimple_block_label (e->dest);
3628 emit_jump (label_rtx (label));
3629 flags = 0;
3631 else
3632 flags = EDGE_FALLTHRU;
3634 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3635 get_last_insn (),
3636 ENTRY_BLOCK_PTR);
3637 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3638 init_block->count = ENTRY_BLOCK_PTR->count;
3639 if (e)
3641 first_block = e->dest;
3642 redirect_edge_succ (e, init_block);
3643 e = make_edge (init_block, first_block, flags);
3645 else
3646 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3647 e->probability = REG_BR_PROB_BASE;
3648 e->count = ENTRY_BLOCK_PTR->count;
3650 update_bb_for_insn (init_block);
3651 return init_block;
3654 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3655 found in the block tree. */
3657 static void
3658 set_block_levels (tree block, int level)
3660 while (block)
3662 BLOCK_NUMBER (block) = level;
3663 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3664 block = BLOCK_CHAIN (block);
3668 /* Create a block containing landing pads and similar stuff. */
3670 static void
3671 construct_exit_block (void)
3673 rtx head = get_last_insn ();
3674 rtx end;
3675 basic_block exit_block;
3676 edge e, e2;
3677 unsigned ix;
3678 edge_iterator ei;
3679 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3681 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3683 /* Make sure the locus is set to the end of the function, so that
3684 epilogue line numbers and warnings are set properly. */
3685 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3686 input_location = cfun->function_end_locus;
3688 /* The following insns belong to the top scope. */
3689 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3691 /* Generate rtl for function exit. */
3692 expand_function_end ();
3694 end = get_last_insn ();
3695 if (head == end)
3696 return;
3697 /* While emitting the function end we could move end of the last basic block.
3699 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3700 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3701 head = NEXT_INSN (head);
3702 exit_block = create_basic_block (NEXT_INSN (head), end,
3703 EXIT_BLOCK_PTR->prev_bb);
3704 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3705 exit_block->count = EXIT_BLOCK_PTR->count;
3707 ix = 0;
3708 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3710 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3711 if (!(e->flags & EDGE_ABNORMAL))
3712 redirect_edge_succ (e, exit_block);
3713 else
3714 ix++;
3717 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3718 e->probability = REG_BR_PROB_BASE;
3719 e->count = EXIT_BLOCK_PTR->count;
3720 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3721 if (e2 != e)
3723 e->count -= e2->count;
3724 exit_block->count -= e2->count;
3725 exit_block->frequency -= EDGE_FREQUENCY (e2);
3727 if (e->count < 0)
3728 e->count = 0;
3729 if (exit_block->count < 0)
3730 exit_block->count = 0;
3731 if (exit_block->frequency < 0)
3732 exit_block->frequency = 0;
3733 update_bb_for_insn (exit_block);
3736 /* Helper function for discover_nonconstant_array_refs.
3737 Look for ARRAY_REF nodes with non-constant indexes and mark them
3738 addressable. */
3740 static tree
3741 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3742 void *data ATTRIBUTE_UNUSED)
3744 tree t = *tp;
3746 if (IS_TYPE_OR_DECL_P (t))
3747 *walk_subtrees = 0;
3748 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3750 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3751 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3752 && (!TREE_OPERAND (t, 2)
3753 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3754 || (TREE_CODE (t) == COMPONENT_REF
3755 && (!TREE_OPERAND (t,2)
3756 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3757 || TREE_CODE (t) == BIT_FIELD_REF
3758 || TREE_CODE (t) == REALPART_EXPR
3759 || TREE_CODE (t) == IMAGPART_EXPR
3760 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3761 || CONVERT_EXPR_P (t))
3762 t = TREE_OPERAND (t, 0);
3764 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3766 t = get_base_address (t);
3767 if (t && DECL_P (t)
3768 && DECL_MODE (t) != BLKmode)
3769 TREE_ADDRESSABLE (t) = 1;
3772 *walk_subtrees = 0;
3775 return NULL_TREE;
3778 /* RTL expansion is not able to compile array references with variable
3779 offsets for arrays stored in single register. Discover such
3780 expressions and mark variables as addressable to avoid this
3781 scenario. */
3783 static void
3784 discover_nonconstant_array_refs (void)
3786 basic_block bb;
3787 gimple_stmt_iterator gsi;
3789 FOR_EACH_BB (bb)
3790 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3792 gimple stmt = gsi_stmt (gsi);
3793 if (!is_gimple_debug (stmt))
3794 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3798 /* This function sets crtl->args.internal_arg_pointer to a virtual
3799 register if DRAP is needed. Local register allocator will replace
3800 virtual_incoming_args_rtx with the virtual register. */
3802 static void
3803 expand_stack_alignment (void)
3805 rtx drap_rtx;
3806 unsigned int preferred_stack_boundary;
3808 if (! SUPPORTS_STACK_ALIGNMENT)
3809 return;
3811 if (cfun->calls_alloca
3812 || cfun->has_nonlocal_label
3813 || crtl->has_nonlocal_goto)
3814 crtl->need_drap = true;
3816 /* Call update_stack_boundary here again to update incoming stack
3817 boundary. It may set incoming stack alignment to a different
3818 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3819 use the minimum incoming stack alignment to check if it is OK
3820 to perform sibcall optimization since sibcall optimization will
3821 only align the outgoing stack to incoming stack boundary. */
3822 if (targetm.calls.update_stack_boundary)
3823 targetm.calls.update_stack_boundary ();
3825 /* The incoming stack frame has to be aligned at least at
3826 parm_stack_boundary. */
3827 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3829 /* Update crtl->stack_alignment_estimated and use it later to align
3830 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3831 exceptions since callgraph doesn't collect incoming stack alignment
3832 in this case. */
3833 if (cfun->can_throw_non_call_exceptions
3834 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3835 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3836 else
3837 preferred_stack_boundary = crtl->preferred_stack_boundary;
3838 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3839 crtl->stack_alignment_estimated = preferred_stack_boundary;
3840 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3841 crtl->stack_alignment_needed = preferred_stack_boundary;
3843 gcc_assert (crtl->stack_alignment_needed
3844 <= crtl->stack_alignment_estimated);
3846 crtl->stack_realign_needed
3847 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3848 crtl->stack_realign_tried = crtl->stack_realign_needed;
3850 crtl->stack_realign_processed = true;
3852 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3853 alignment. */
3854 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3855 drap_rtx = targetm.calls.get_drap_rtx ();
3857 /* stack_realign_drap and drap_rtx must match. */
3858 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3860 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3861 if (NULL != drap_rtx)
3863 crtl->args.internal_arg_pointer = drap_rtx;
3865 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3866 needed. */
3867 fixup_tail_calls ();
3871 /* Translate the intermediate representation contained in the CFG
3872 from GIMPLE trees to RTL.
3874 We do conversion per basic block and preserve/update the tree CFG.
3875 This implies we have to do some magic as the CFG can simultaneously
3876 consist of basic blocks containing RTL and GIMPLE trees. This can
3877 confuse the CFG hooks, so be careful to not manipulate CFG during
3878 the expansion. */
3880 static unsigned int
3881 gimple_expand_cfg (void)
3883 basic_block bb, init_block;
3884 sbitmap blocks;
3885 edge_iterator ei;
3886 edge e;
3887 rtx var_seq;
3888 unsigned i;
3890 timevar_push (TV_OUT_OF_SSA);
3891 rewrite_out_of_ssa (&SA);
3892 timevar_pop (TV_OUT_OF_SSA);
3893 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3894 sizeof (rtx));
3896 /* Some backends want to know that we are expanding to RTL. */
3897 currently_expanding_to_rtl = 1;
3899 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3901 insn_locators_alloc ();
3902 if (!DECL_IS_BUILTIN (current_function_decl))
3904 /* Eventually, all FEs should explicitly set function_start_locus. */
3905 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3906 set_curr_insn_source_location
3907 (DECL_SOURCE_LOCATION (current_function_decl));
3908 else
3909 set_curr_insn_source_location (cfun->function_start_locus);
3911 else
3912 set_curr_insn_source_location (UNKNOWN_LOCATION);
3913 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3914 prologue_locator = curr_insn_locator ();
3916 #ifdef INSN_SCHEDULING
3917 init_sched_attrs ();
3918 #endif
3920 /* Make sure first insn is a note even if we don't want linenums.
3921 This makes sure the first insn will never be deleted.
3922 Also, final expects a note to appear there. */
3923 emit_note (NOTE_INSN_DELETED);
3925 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3926 discover_nonconstant_array_refs ();
3928 targetm.expand_to_rtl_hook ();
3929 crtl->stack_alignment_needed = STACK_BOUNDARY;
3930 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3931 crtl->stack_alignment_estimated = 0;
3932 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3933 cfun->cfg->max_jumptable_ents = 0;
3935 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
3936 of the function section at exapnsion time to predict distance of calls. */
3937 resolve_unique_section (current_function_decl, 0, flag_function_sections);
3939 /* Expand the variables recorded during gimple lowering. */
3940 timevar_push (TV_VAR_EXPAND);
3941 start_sequence ();
3943 expand_used_vars ();
3945 var_seq = get_insns ();
3946 end_sequence ();
3947 timevar_pop (TV_VAR_EXPAND);
3949 /* Honor stack protection warnings. */
3950 if (warn_stack_protect)
3952 if (cfun->calls_alloca)
3953 warning (OPT_Wstack_protector,
3954 "stack protector not protecting local variables: "
3955 "variable length buffer");
3956 if (has_short_buffer && !crtl->stack_protect_guard)
3957 warning (OPT_Wstack_protector,
3958 "stack protector not protecting function: "
3959 "all local arrays are less than %d bytes long",
3960 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3963 /* Set up parameters and prepare for return, for the function. */
3964 expand_function_start (current_function_decl);
3966 /* If we emitted any instructions for setting up the variables,
3967 emit them before the FUNCTION_START note. */
3968 if (var_seq)
3970 emit_insn_before (var_seq, parm_birth_insn);
3972 /* In expand_function_end we'll insert the alloca save/restore
3973 before parm_birth_insn. We've just insertted an alloca call.
3974 Adjust the pointer to match. */
3975 parm_birth_insn = var_seq;
3978 /* Now that we also have the parameter RTXs, copy them over to our
3979 partitions. */
3980 for (i = 0; i < SA.map->num_partitions; i++)
3982 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3984 if (TREE_CODE (var) != VAR_DECL
3985 && !SA.partition_to_pseudo[i])
3986 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3987 gcc_assert (SA.partition_to_pseudo[i]);
3989 /* If this decl was marked as living in multiple places, reset
3990 this now to NULL. */
3991 if (DECL_RTL_IF_SET (var) == pc_rtx)
3992 SET_DECL_RTL (var, NULL);
3994 /* Some RTL parts really want to look at DECL_RTL(x) when x
3995 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3996 SET_DECL_RTL here making this available, but that would mean
3997 to select one of the potentially many RTLs for one DECL. Instead
3998 of doing that we simply reset the MEM_EXPR of the RTL in question,
3999 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4000 if (!DECL_RTL_SET_P (var))
4002 if (MEM_P (SA.partition_to_pseudo[i]))
4003 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4007 /* If this function is `main', emit a call to `__main'
4008 to run global initializers, etc. */
4009 if (DECL_NAME (current_function_decl)
4010 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4011 && DECL_FILE_SCOPE_P (current_function_decl))
4012 expand_main_function ();
4014 /* Initialize the stack_protect_guard field. This must happen after the
4015 call to __main (if any) so that the external decl is initialized. */
4016 if (crtl->stack_protect_guard)
4017 stack_protect_prologue ();
4019 expand_phi_nodes (&SA);
4021 /* Register rtl specific functions for cfg. */
4022 rtl_register_cfg_hooks ();
4024 init_block = construct_init_block ();
4026 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4027 remaining edges later. */
4028 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4029 e->flags &= ~EDGE_EXECUTABLE;
4031 lab_rtx_for_bb = pointer_map_create ();
4032 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4033 bb = expand_gimple_basic_block (bb);
4035 if (MAY_HAVE_DEBUG_INSNS)
4036 expand_debug_locations ();
4038 execute_free_datastructures ();
4039 timevar_push (TV_OUT_OF_SSA);
4040 finish_out_of_ssa (&SA);
4041 timevar_pop (TV_OUT_OF_SSA);
4043 timevar_push (TV_POST_EXPAND);
4044 /* We are no longer in SSA form. */
4045 cfun->gimple_df->in_ssa_p = false;
4047 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4048 conservatively to true until they are all profile aware. */
4049 pointer_map_destroy (lab_rtx_for_bb);
4050 free_histograms ();
4052 construct_exit_block ();
4053 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4054 insn_locators_finalize ();
4056 /* Zap the tree EH table. */
4057 set_eh_throw_stmt_table (cfun, NULL);
4059 rebuild_jump_labels (get_insns ());
4061 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4063 edge e;
4064 edge_iterator ei;
4065 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4067 if (e->insns.r)
4069 /* Avoid putting insns before parm_birth_insn. */
4070 if (e->src == ENTRY_BLOCK_PTR
4071 && single_succ_p (ENTRY_BLOCK_PTR)
4072 && parm_birth_insn)
4074 rtx insns = e->insns.r;
4075 e->insns.r = NULL_RTX;
4076 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4078 else
4079 commit_one_edge_insertion (e);
4081 else
4082 ei_next (&ei);
4086 /* We're done expanding trees to RTL. */
4087 currently_expanding_to_rtl = 0;
4089 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4091 edge e;
4092 edge_iterator ei;
4093 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4095 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4096 e->flags &= ~EDGE_EXECUTABLE;
4098 /* At the moment not all abnormal edges match the RTL
4099 representation. It is safe to remove them here as
4100 find_many_sub_basic_blocks will rediscover them.
4101 In the future we should get this fixed properly. */
4102 if ((e->flags & EDGE_ABNORMAL)
4103 && !(e->flags & EDGE_SIBCALL))
4104 remove_edge (e);
4105 else
4106 ei_next (&ei);
4110 blocks = sbitmap_alloc (last_basic_block);
4111 sbitmap_ones (blocks);
4112 find_many_sub_basic_blocks (blocks);
4113 sbitmap_free (blocks);
4114 purge_all_dead_edges ();
4116 compact_blocks ();
4118 expand_stack_alignment ();
4120 #ifdef ENABLE_CHECKING
4121 verify_flow_info ();
4122 #endif
4124 /* There's no need to defer outputting this function any more; we
4125 know we want to output it. */
4126 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4128 /* Now that we're done expanding trees to RTL, we shouldn't have any
4129 more CONCATs anywhere. */
4130 generating_concat_p = 0;
4132 if (dump_file)
4134 fprintf (dump_file,
4135 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4136 /* And the pass manager will dump RTL for us. */
4139 /* If we're emitting a nested function, make sure its parent gets
4140 emitted as well. Doing otherwise confuses debug info. */
4142 tree parent;
4143 for (parent = DECL_CONTEXT (current_function_decl);
4144 parent != NULL_TREE;
4145 parent = get_containing_scope (parent))
4146 if (TREE_CODE (parent) == FUNCTION_DECL)
4147 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4150 /* We are now committed to emitting code for this function. Do any
4151 preparation, such as emitting abstract debug info for the inline
4152 before it gets mangled by optimization. */
4153 if (cgraph_function_possibly_inlined_p (current_function_decl))
4154 (*debug_hooks->outlining_inline_function) (current_function_decl);
4156 TREE_ASM_WRITTEN (current_function_decl) = 1;
4158 /* After expanding, the return labels are no longer needed. */
4159 return_label = NULL;
4160 naked_return_label = NULL;
4161 /* Tag the blocks with a depth number so that change_scope can find
4162 the common parent easily. */
4163 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4164 default_rtl_profile ();
4165 timevar_pop (TV_POST_EXPAND);
4166 return 0;
4169 struct rtl_opt_pass pass_expand =
4172 RTL_PASS,
4173 "expand", /* name */
4174 NULL, /* gate */
4175 gimple_expand_cfg, /* execute */
4176 NULL, /* sub */
4177 NULL, /* next */
4178 0, /* static_pass_number */
4179 TV_EXPAND, /* tv_id */
4180 PROP_ssa | PROP_gimple_leh | PROP_cfg
4181 | PROP_gimple_lcx, /* properties_required */
4182 PROP_rtl, /* properties_provided */
4183 PROP_ssa | PROP_trees, /* properties_destroyed */
4184 TODO_verify_ssa | TODO_verify_flow
4185 | TODO_verify_stmts, /* todo_flags_start */
4186 TODO_dump_func
4187 | TODO_ggc_collect /* todo_flags_finish */