Remove deprecated DW_FORM_sig8 define.
[official-gcc.git] / gcc / cfgexpand.c
blobebe0a0352d48ceb486929dd8504270e7a1e0cda1
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54 struct ssaexpand SA;
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
61 statement STMT. */
63 tree
64 gimple_assign_rhs_to_tree (gimple stmt)
66 tree t;
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
94 && EXPR_P (t)
95 && gimple_block (stmt) != TREE_BLOCK (t)))
96 t = copy_node (t);
98 else
99 gcc_unreachable ();
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
106 return t;
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
112 #endif
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
119 static inline void
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
125 if (x && !MEM_P (x))
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
137 change this. */
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
150 else
151 SET_DECL_RTL (t, x);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
156 struct stack_var
158 /* The Variable. */
159 tree decl;
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
211 static unsigned int
212 get_decl_align_unit (tree decl)
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
215 return align / BITS_PER_UNIT;
218 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
219 Return the frame offset. */
221 static HOST_WIDE_INT
222 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
224 HOST_WIDE_INT offset, new_frame_offset;
226 new_frame_offset = frame_offset;
227 if (FRAME_GROWS_DOWNWARD)
229 new_frame_offset -= size + frame_phase;
230 new_frame_offset &= -align;
231 new_frame_offset += frame_phase;
232 offset = new_frame_offset;
234 else
236 new_frame_offset -= frame_phase;
237 new_frame_offset += align - 1;
238 new_frame_offset &= -align;
239 new_frame_offset += frame_phase;
240 offset = new_frame_offset;
241 new_frame_offset += size;
243 frame_offset = new_frame_offset;
245 if (frame_offset_overflow (frame_offset, cfun->decl))
246 frame_offset = offset = 0;
248 return offset;
251 /* Accumulate DECL into STACK_VARS. */
253 static void
254 add_stack_var (tree decl)
256 struct stack_var *v;
258 if (stack_vars_num >= stack_vars_alloc)
260 if (stack_vars_alloc)
261 stack_vars_alloc = stack_vars_alloc * 3 / 2;
262 else
263 stack_vars_alloc = 32;
264 stack_vars
265 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
267 v = &stack_vars[stack_vars_num];
269 v->decl = decl;
270 v->offset = 0;
271 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
272 /* Ensure that all variables have size, so that &a != &b for any two
273 variables that are simultaneously live. */
274 if (v->size == 0)
275 v->size = 1;
276 v->alignb = get_decl_align_unit (SSAVAR (decl));
278 /* All variables are initially in their own partition. */
279 v->representative = stack_vars_num;
280 v->next = EOC;
282 /* All variables initially conflict with no other. */
283 v->conflicts = NULL;
285 /* Ensure that this decl doesn't get put onto the list twice. */
286 set_rtl (decl, pc_rtx);
288 stack_vars_num++;
291 /* Make the decls associated with luid's X and Y conflict. */
293 static void
294 add_stack_var_conflict (size_t x, size_t y)
296 struct stack_var *a = &stack_vars[x];
297 struct stack_var *b = &stack_vars[y];
298 if (!a->conflicts)
299 a->conflicts = BITMAP_ALLOC (NULL);
300 if (!b->conflicts)
301 b->conflicts = BITMAP_ALLOC (NULL);
302 bitmap_set_bit (a->conflicts, y);
303 bitmap_set_bit (b->conflicts, x);
306 /* Check whether the decls associated with luid's X and Y conflict. */
308 static bool
309 stack_var_conflict_p (size_t x, size_t y)
311 struct stack_var *a = &stack_vars[x];
312 struct stack_var *b = &stack_vars[y];
313 if (!a->conflicts || !b->conflicts)
314 return false;
315 return bitmap_bit_p (a->conflicts, y);
318 /* Returns true if TYPE is or contains a union type. */
320 static bool
321 aggregate_contains_union_type (tree type)
323 tree field;
325 if (TREE_CODE (type) == UNION_TYPE
326 || TREE_CODE (type) == QUAL_UNION_TYPE)
327 return true;
328 if (TREE_CODE (type) == ARRAY_TYPE)
329 return aggregate_contains_union_type (TREE_TYPE (type));
330 if (TREE_CODE (type) != RECORD_TYPE)
331 return false;
333 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
334 if (TREE_CODE (field) == FIELD_DECL)
335 if (aggregate_contains_union_type (TREE_TYPE (field)))
336 return true;
338 return false;
341 /* A subroutine of expand_used_vars. If two variables X and Y have alias
342 sets that do not conflict, then do add a conflict for these variables
343 in the interference graph. We also need to make sure to add conflicts
344 for union containing structures. Else RTL alias analysis comes along
345 and due to type based aliasing rules decides that for two overlapping
346 union temporaries { short s; int i; } accesses to the same mem through
347 different types may not alias and happily reorders stores across
348 life-time boundaries of the temporaries (See PR25654).
349 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
351 static void
352 add_alias_set_conflicts (void)
354 size_t i, j, n = stack_vars_num;
356 for (i = 0; i < n; ++i)
358 tree type_i = TREE_TYPE (stack_vars[i].decl);
359 bool aggr_i = AGGREGATE_TYPE_P (type_i);
360 bool contains_union;
362 contains_union = aggregate_contains_union_type (type_i);
363 for (j = 0; j < i; ++j)
365 tree type_j = TREE_TYPE (stack_vars[j].decl);
366 bool aggr_j = AGGREGATE_TYPE_P (type_j);
367 if (aggr_i != aggr_j
368 /* Either the objects conflict by means of type based
369 aliasing rules, or we need to add a conflict. */
370 || !objects_must_conflict_p (type_i, type_j)
371 /* In case the types do not conflict ensure that access
372 to elements will conflict. In case of unions we have
373 to be careful as type based aliasing rules may say
374 access to the same memory does not conflict. So play
375 safe and add a conflict in this case. */
376 || contains_union)
377 add_stack_var_conflict (i, j);
382 /* A subroutine of partition_stack_vars. A comparison function for qsort,
383 sorting an array of indices by the properties of the object. */
385 static int
386 stack_var_cmp (const void *a, const void *b)
388 size_t ia = *(const size_t *)a;
389 size_t ib = *(const size_t *)b;
390 unsigned int aligna = stack_vars[ia].alignb;
391 unsigned int alignb = stack_vars[ib].alignb;
392 HOST_WIDE_INT sizea = stack_vars[ia].size;
393 HOST_WIDE_INT sizeb = stack_vars[ib].size;
394 tree decla = stack_vars[ia].decl;
395 tree declb = stack_vars[ib].decl;
396 bool largea, largeb;
397 unsigned int uida, uidb;
399 /* Primary compare on "large" alignment. Large comes first. */
400 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
401 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 if (largea != largeb)
403 return (int)largeb - (int)largea;
405 /* Secondary compare on size, decreasing */
406 if (sizea < sizeb)
407 return -1;
408 if (sizea > sizeb)
409 return 1;
411 /* Tertiary compare on true alignment, decreasing. */
412 if (aligna < alignb)
413 return -1;
414 if (aligna > alignb)
415 return 1;
417 /* Final compare on ID for sort stability, increasing.
418 Two SSA names are compared by their version, SSA names come before
419 non-SSA names, and two normal decls are compared by their DECL_UID. */
420 if (TREE_CODE (decla) == SSA_NAME)
422 if (TREE_CODE (declb) == SSA_NAME)
423 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
424 else
425 return -1;
427 else if (TREE_CODE (declb) == SSA_NAME)
428 return 1;
429 else
430 uida = DECL_UID (decla), uidb = DECL_UID (declb);
431 if (uida < uidb)
432 return 1;
433 if (uida > uidb)
434 return -1;
435 return 0;
439 /* If the points-to solution *PI points to variables that are in a partition
440 together with other variables add all partition members to the pointed-to
441 variables bitmap. */
443 static void
444 add_partitioned_vars_to_ptset (struct pt_solution *pt,
445 struct pointer_map_t *decls_to_partitions,
446 struct pointer_set_t *visited, bitmap temp)
448 bitmap_iterator bi;
449 unsigned i;
450 bitmap *part;
452 if (pt->anything
453 || pt->vars == NULL
454 /* The pointed-to vars bitmap is shared, it is enough to
455 visit it once. */
456 || pointer_set_insert(visited, pt->vars))
457 return;
459 bitmap_clear (temp);
461 /* By using a temporary bitmap to store all members of the partitions
462 we have to add we make sure to visit each of the partitions only
463 once. */
464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
465 if ((!temp
466 || !bitmap_bit_p (temp, i))
467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
468 (void *)(size_t) i)))
469 bitmap_ior_into (temp, *part);
470 if (!bitmap_empty_p (temp))
471 bitmap_ior_into (pt->vars, temp);
474 /* Update points-to sets based on partition info, so we can use them on RTL.
475 The bitmaps representing stack partitions will be saved until expand,
476 where partitioned decls used as bases in memory expressions will be
477 rewritten. */
479 static void
480 update_alias_info_with_stack_vars (void)
482 struct pointer_map_t *decls_to_partitions = NULL;
483 size_t i, j;
484 tree var = NULL_TREE;
486 for (i = 0; i < stack_vars_num; i++)
488 bitmap part = NULL;
489 tree name;
490 struct ptr_info_def *pi;
492 /* Not interested in partitions with single variable. */
493 if (stack_vars[i].representative != i
494 || stack_vars[i].next == EOC)
495 continue;
497 if (!decls_to_partitions)
499 decls_to_partitions = pointer_map_create ();
500 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
503 /* Create an SSA_NAME that points to the partition for use
504 as base during alias-oracle queries on RTL for bases that
505 have been partitioned. */
506 if (var == NULL_TREE)
507 var = create_tmp_var (ptr_type_node, NULL);
508 name = make_ssa_name (var, NULL);
510 /* Create bitmaps representing partitions. They will be used for
511 points-to sets later, so use GGC alloc. */
512 part = BITMAP_GGC_ALLOC ();
513 for (j = i; j != EOC; j = stack_vars[j].next)
515 tree decl = stack_vars[j].decl;
516 unsigned int uid = DECL_PT_UID (decl);
517 /* We should never end up partitioning SSA names (though they
518 may end up on the stack). Neither should we allocate stack
519 space to something that is unused and thus unreferenced, except
520 for -O0 where we are preserving even unreferenced variables. */
521 gcc_assert (DECL_P (decl)
522 && (!optimize
523 || referenced_var_lookup (cfun, DECL_UID (decl))));
524 bitmap_set_bit (part, uid);
525 *((bitmap *) pointer_map_insert (decls_to_partitions,
526 (void *)(size_t) uid)) = part;
527 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
528 decl)) = name;
531 /* Make the SSA name point to all partition members. */
532 pi = get_ptr_info (name);
533 pt_solution_set (&pi->pt, part, false, false);
536 /* Make all points-to sets that contain one member of a partition
537 contain all members of the partition. */
538 if (decls_to_partitions)
540 unsigned i;
541 struct pointer_set_t *visited = pointer_set_create ();
542 bitmap temp = BITMAP_ALLOC (NULL);
544 for (i = 1; i < num_ssa_names; i++)
546 tree name = ssa_name (i);
547 struct ptr_info_def *pi;
549 if (name
550 && POINTER_TYPE_P (TREE_TYPE (name))
551 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
552 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
553 visited, temp);
556 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
557 decls_to_partitions, visited, temp);
559 pointer_set_destroy (visited);
560 pointer_map_destroy (decls_to_partitions);
561 BITMAP_FREE (temp);
565 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
566 partitioning algorithm. Partitions A and B are known to be non-conflicting.
567 Merge them into a single partition A.
569 At the same time, add OFFSET to all variables in partition B. At the end
570 of the partitioning process we've have a nice block easy to lay out within
571 the stack frame. */
573 static void
574 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
576 size_t i, last;
577 struct stack_var *vb = &stack_vars[b];
578 bitmap_iterator bi;
579 unsigned u;
581 /* Update each element of partition B with the given offset,
582 and merge them into partition A. */
583 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
585 stack_vars[i].offset += offset;
586 stack_vars[i].representative = a;
588 stack_vars[last].next = stack_vars[a].next;
589 stack_vars[a].next = b;
591 /* Update the required alignment of partition A to account for B. */
592 if (stack_vars[a].alignb < stack_vars[b].alignb)
593 stack_vars[a].alignb = stack_vars[b].alignb;
595 /* Update the interference graph and merge the conflicts. */
596 if (vb->conflicts)
598 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
599 add_stack_var_conflict (a, stack_vars[u].representative);
600 BITMAP_FREE (vb->conflicts);
604 /* A subroutine of expand_used_vars. Binpack the variables into
605 partitions constrained by the interference graph. The overall
606 algorithm used is as follows:
608 Sort the objects by size.
609 For each object A {
610 S = size(A)
611 O = 0
612 loop {
613 Look for the largest non-conflicting object B with size <= S.
614 UNION (A, B)
615 offset(B) = O
616 O += size(B)
617 S -= size(B)
622 static void
623 partition_stack_vars (void)
625 size_t si, sj, n = stack_vars_num;
627 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
628 for (si = 0; si < n; ++si)
629 stack_vars_sorted[si] = si;
631 if (n == 1)
632 return;
634 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
636 for (si = 0; si < n; ++si)
638 size_t i = stack_vars_sorted[si];
639 HOST_WIDE_INT isize = stack_vars[i].size;
640 unsigned int ialign = stack_vars[i].alignb;
641 HOST_WIDE_INT offset = 0;
643 for (sj = si; sj-- > 0; )
645 size_t j = stack_vars_sorted[sj];
646 HOST_WIDE_INT jsize = stack_vars[j].size;
647 unsigned int jalign = stack_vars[j].alignb;
649 /* Ignore objects that aren't partition representatives. */
650 if (stack_vars[j].representative != j)
651 continue;
653 /* Ignore objects too large for the remaining space. */
654 if (isize < jsize)
655 continue;
657 /* Ignore conflicting objects. */
658 if (stack_var_conflict_p (i, j))
659 continue;
661 /* Do not mix objects of "small" (supported) alignment
662 and "large" (unsupported) alignment. */
663 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
664 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
665 continue;
667 /* Refine the remaining space check to include alignment. */
668 if (offset & (jalign - 1))
670 HOST_WIDE_INT toff = offset;
671 toff += jalign - 1;
672 toff &= -(HOST_WIDE_INT)jalign;
673 if (isize - (toff - offset) < jsize)
674 continue;
676 isize -= toff - offset;
677 offset = toff;
680 /* UNION the objects, placing J at OFFSET. */
681 union_stack_vars (i, j, offset);
683 isize -= jsize;
684 if (isize == 0)
685 break;
689 update_alias_info_with_stack_vars ();
692 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
694 static void
695 dump_stack_var_partition (void)
697 size_t si, i, j, n = stack_vars_num;
699 for (si = 0; si < n; ++si)
701 i = stack_vars_sorted[si];
703 /* Skip variables that aren't partition representatives, for now. */
704 if (stack_vars[i].representative != i)
705 continue;
707 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
708 " align %u\n", (unsigned long) i, stack_vars[i].size,
709 stack_vars[i].alignb);
711 for (j = i; j != EOC; j = stack_vars[j].next)
713 fputc ('\t', dump_file);
714 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
715 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
716 stack_vars[j].offset);
721 /* Assign rtl to DECL at BASE + OFFSET. */
723 static void
724 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
725 HOST_WIDE_INT offset)
727 unsigned align;
728 rtx x;
730 /* If this fails, we've overflowed the stack frame. Error nicely? */
731 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
733 x = plus_constant (base, offset);
734 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
736 if (TREE_CODE (decl) != SSA_NAME)
738 /* Set alignment we actually gave this decl if it isn't an SSA name.
739 If it is we generate stack slots only accidentally so it isn't as
740 important, we'll simply use the alignment that is already set. */
741 if (base == virtual_stack_vars_rtx)
742 offset -= frame_phase;
743 align = offset & -offset;
744 align *= BITS_PER_UNIT;
745 if (align == 0 || align > base_align)
746 align = base_align;
748 /* One would think that we could assert that we're not decreasing
749 alignment here, but (at least) the i386 port does exactly this
750 via the MINIMUM_ALIGNMENT hook. */
752 DECL_ALIGN (decl) = align;
753 DECL_USER_ALIGN (decl) = 0;
756 set_mem_attributes (x, SSAVAR (decl), true);
757 set_rtl (decl, x);
760 /* A subroutine of expand_used_vars. Give each partition representative
761 a unique location within the stack frame. Update each partition member
762 with that location. */
764 static void
765 expand_stack_vars (bool (*pred) (tree))
767 size_t si, i, j, n = stack_vars_num;
768 HOST_WIDE_INT large_size = 0, large_alloc = 0;
769 rtx large_base = NULL;
770 unsigned large_align = 0;
771 tree decl;
773 /* Determine if there are any variables requiring "large" alignment.
774 Since these are dynamically allocated, we only process these if
775 no predicate involved. */
776 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
777 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
779 /* Find the total size of these variables. */
780 for (si = 0; si < n; ++si)
782 unsigned alignb;
784 i = stack_vars_sorted[si];
785 alignb = stack_vars[i].alignb;
787 /* Stop when we get to the first decl with "small" alignment. */
788 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
789 break;
791 /* Skip variables that aren't partition representatives. */
792 if (stack_vars[i].representative != i)
793 continue;
795 /* Skip variables that have already had rtl assigned. See also
796 add_stack_var where we perpetrate this pc_rtx hack. */
797 decl = stack_vars[i].decl;
798 if ((TREE_CODE (decl) == SSA_NAME
799 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
800 : DECL_RTL (decl)) != pc_rtx)
801 continue;
803 large_size += alignb - 1;
804 large_size &= -(HOST_WIDE_INT)alignb;
805 large_size += stack_vars[i].size;
808 /* If there were any, allocate space. */
809 if (large_size > 0)
810 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
811 large_align, true);
814 for (si = 0; si < n; ++si)
816 rtx base;
817 unsigned base_align, alignb;
818 HOST_WIDE_INT offset;
820 i = stack_vars_sorted[si];
822 /* Skip variables that aren't partition representatives, for now. */
823 if (stack_vars[i].representative != i)
824 continue;
826 /* Skip variables that have already had rtl assigned. See also
827 add_stack_var where we perpetrate this pc_rtx hack. */
828 decl = stack_vars[i].decl;
829 if ((TREE_CODE (decl) == SSA_NAME
830 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
831 : DECL_RTL (decl)) != pc_rtx)
832 continue;
834 /* Check the predicate to see whether this variable should be
835 allocated in this pass. */
836 if (pred && !pred (decl))
837 continue;
839 alignb = stack_vars[i].alignb;
840 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
842 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
843 base = virtual_stack_vars_rtx;
844 base_align = crtl->max_used_stack_slot_alignment;
846 else
848 /* Large alignment is only processed in the last pass. */
849 if (pred)
850 continue;
851 gcc_assert (large_base != NULL);
853 large_alloc += alignb - 1;
854 large_alloc &= -(HOST_WIDE_INT)alignb;
855 offset = large_alloc;
856 large_alloc += stack_vars[i].size;
858 base = large_base;
859 base_align = large_align;
862 /* Create rtl for each variable based on their location within the
863 partition. */
864 for (j = i; j != EOC; j = stack_vars[j].next)
866 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
867 expand_one_stack_var_at (stack_vars[j].decl,
868 base, base_align,
869 stack_vars[j].offset + offset);
873 gcc_assert (large_alloc == large_size);
876 /* Take into account all sizes of partitions and reset DECL_RTLs. */
877 static HOST_WIDE_INT
878 account_stack_vars (void)
880 size_t si, j, i, n = stack_vars_num;
881 HOST_WIDE_INT size = 0;
883 for (si = 0; si < n; ++si)
885 i = stack_vars_sorted[si];
887 /* Skip variables that aren't partition representatives, for now. */
888 if (stack_vars[i].representative != i)
889 continue;
891 size += stack_vars[i].size;
892 for (j = i; j != EOC; j = stack_vars[j].next)
893 set_rtl (stack_vars[j].decl, NULL);
895 return size;
898 /* A subroutine of expand_one_var. Called to immediately assign rtl
899 to a variable to be allocated in the stack frame. */
901 static void
902 expand_one_stack_var (tree var)
904 HOST_WIDE_INT size, offset;
905 unsigned byte_align;
907 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
908 byte_align = get_decl_align_unit (SSAVAR (var));
910 /* We handle highly aligned variables in expand_stack_vars. */
911 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
913 offset = alloc_stack_frame_space (size, byte_align);
915 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
916 crtl->max_used_stack_slot_alignment, offset);
919 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
920 that will reside in a hard register. */
922 static void
923 expand_one_hard_reg_var (tree var)
925 rest_of_decl_compilation (var, 0, 0);
928 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
929 that will reside in a pseudo register. */
931 static void
932 expand_one_register_var (tree var)
934 tree decl = SSAVAR (var);
935 tree type = TREE_TYPE (decl);
936 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
937 rtx x = gen_reg_rtx (reg_mode);
939 set_rtl (var, x);
941 /* Note if the object is a user variable. */
942 if (!DECL_ARTIFICIAL (decl))
943 mark_user_reg (x);
945 if (POINTER_TYPE_P (type))
946 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
949 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
950 has some associated error, e.g. its type is error-mark. We just need
951 to pick something that won't crash the rest of the compiler. */
953 static void
954 expand_one_error_var (tree var)
956 enum machine_mode mode = DECL_MODE (var);
957 rtx x;
959 if (mode == BLKmode)
960 x = gen_rtx_MEM (BLKmode, const0_rtx);
961 else if (mode == VOIDmode)
962 x = const0_rtx;
963 else
964 x = gen_reg_rtx (mode);
966 SET_DECL_RTL (var, x);
969 /* A subroutine of expand_one_var. VAR is a variable that will be
970 allocated to the local stack frame. Return true if we wish to
971 add VAR to STACK_VARS so that it will be coalesced with other
972 variables. Return false to allocate VAR immediately.
974 This function is used to reduce the number of variables considered
975 for coalescing, which reduces the size of the quadratic problem. */
977 static bool
978 defer_stack_allocation (tree var, bool toplevel)
980 /* If stack protection is enabled, *all* stack variables must be deferred,
981 so that we can re-order the strings to the top of the frame. */
982 if (flag_stack_protect)
983 return true;
985 /* We handle "large" alignment via dynamic allocation. We want to handle
986 this extra complication in only one place, so defer them. */
987 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
988 return true;
990 /* Variables in the outermost scope automatically conflict with
991 every other variable. The only reason to want to defer them
992 at all is that, after sorting, we can more efficiently pack
993 small variables in the stack frame. Continue to defer at -O2. */
994 if (toplevel && optimize < 2)
995 return false;
997 /* Without optimization, *most* variables are allocated from the
998 stack, which makes the quadratic problem large exactly when we
999 want compilation to proceed as quickly as possible. On the
1000 other hand, we don't want the function's stack frame size to
1001 get completely out of hand. So we avoid adding scalars and
1002 "small" aggregates to the list at all. */
1003 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1004 return false;
1006 return true;
1009 /* A subroutine of expand_used_vars. Expand one variable according to
1010 its flavor. Variables to be placed on the stack are not actually
1011 expanded yet, merely recorded.
1012 When REALLY_EXPAND is false, only add stack values to be allocated.
1013 Return stack usage this variable is supposed to take.
1016 static HOST_WIDE_INT
1017 expand_one_var (tree var, bool toplevel, bool really_expand)
1019 unsigned int align = BITS_PER_UNIT;
1020 tree origvar = var;
1022 var = SSAVAR (var);
1024 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1026 /* Because we don't know if VAR will be in register or on stack,
1027 we conservatively assume it will be on stack even if VAR is
1028 eventually put into register after RA pass. For non-automatic
1029 variables, which won't be on stack, we collect alignment of
1030 type and ignore user specified alignment. */
1031 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1032 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1033 TYPE_MODE (TREE_TYPE (var)),
1034 TYPE_ALIGN (TREE_TYPE (var)));
1035 else if (DECL_HAS_VALUE_EXPR_P (var)
1036 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1037 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1038 or variables which were assigned a stack slot already by
1039 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1040 changed from the offset chosen to it. */
1041 align = crtl->stack_alignment_estimated;
1042 else
1043 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1045 /* If the variable alignment is very large we'll dynamicaly allocate
1046 it, which means that in-frame portion is just a pointer. */
1047 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1048 align = POINTER_SIZE;
1051 if (SUPPORTS_STACK_ALIGNMENT
1052 && crtl->stack_alignment_estimated < align)
1054 /* stack_alignment_estimated shouldn't change after stack
1055 realign decision made */
1056 gcc_assert(!crtl->stack_realign_processed);
1057 crtl->stack_alignment_estimated = align;
1060 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1061 So here we only make sure stack_alignment_needed >= align. */
1062 if (crtl->stack_alignment_needed < align)
1063 crtl->stack_alignment_needed = align;
1064 if (crtl->max_used_stack_slot_alignment < align)
1065 crtl->max_used_stack_slot_alignment = align;
1067 if (TREE_CODE (origvar) == SSA_NAME)
1069 gcc_assert (TREE_CODE (var) != VAR_DECL
1070 || (!DECL_EXTERNAL (var)
1071 && !DECL_HAS_VALUE_EXPR_P (var)
1072 && !TREE_STATIC (var)
1073 && TREE_TYPE (var) != error_mark_node
1074 && !DECL_HARD_REGISTER (var)
1075 && really_expand));
1077 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1079 else if (DECL_EXTERNAL (var))
1081 else if (DECL_HAS_VALUE_EXPR_P (var))
1083 else if (TREE_STATIC (var))
1085 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1087 else if (TREE_TYPE (var) == error_mark_node)
1089 if (really_expand)
1090 expand_one_error_var (var);
1092 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1094 if (really_expand)
1095 expand_one_hard_reg_var (var);
1097 else if (use_register_for_decl (var))
1099 if (really_expand)
1100 expand_one_register_var (origvar);
1102 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1104 if (really_expand)
1106 error ("size of variable %q+D is too large", var);
1107 expand_one_error_var (var);
1110 else if (defer_stack_allocation (var, toplevel))
1111 add_stack_var (origvar);
1112 else
1114 if (really_expand)
1115 expand_one_stack_var (origvar);
1116 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1118 return 0;
1121 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1122 expanding variables. Those variables that can be put into registers
1123 are allocated pseudos; those that can't are put on the stack.
1125 TOPLEVEL is true if this is the outermost BLOCK. */
1127 static void
1128 expand_used_vars_for_block (tree block, bool toplevel)
1130 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1131 tree t;
1133 old_sv_num = toplevel ? 0 : stack_vars_num;
1135 /* Expand all variables at this level. */
1136 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1137 if (TREE_USED (t))
1138 expand_one_var (t, toplevel, true);
1140 this_sv_num = stack_vars_num;
1142 /* Expand all variables at containing levels. */
1143 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1144 expand_used_vars_for_block (t, false);
1146 /* Since we do not track exact variable lifetimes (which is not even
1147 possible for variables whose address escapes), we mirror the block
1148 tree in the interference graph. Here we cause all variables at this
1149 level, and all sublevels, to conflict. */
1150 if (old_sv_num < this_sv_num)
1152 new_sv_num = stack_vars_num;
1154 for (i = old_sv_num; i < new_sv_num; ++i)
1155 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1156 add_stack_var_conflict (i, j);
1160 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1161 and clear TREE_USED on all local variables. */
1163 static void
1164 clear_tree_used (tree block)
1166 tree t;
1168 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1169 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1170 TREE_USED (t) = 0;
1172 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1173 clear_tree_used (t);
1176 /* Examine TYPE and determine a bit mask of the following features. */
1178 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1179 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1180 #define SPCT_HAS_ARRAY 4
1181 #define SPCT_HAS_AGGREGATE 8
1183 static unsigned int
1184 stack_protect_classify_type (tree type)
1186 unsigned int ret = 0;
1187 tree t;
1189 switch (TREE_CODE (type))
1191 case ARRAY_TYPE:
1192 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1193 if (t == char_type_node
1194 || t == signed_char_type_node
1195 || t == unsigned_char_type_node)
1197 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1198 unsigned HOST_WIDE_INT len;
1200 if (!TYPE_SIZE_UNIT (type)
1201 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1202 len = max;
1203 else
1204 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1206 if (len < max)
1207 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1208 else
1209 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1211 else
1212 ret = SPCT_HAS_ARRAY;
1213 break;
1215 case UNION_TYPE:
1216 case QUAL_UNION_TYPE:
1217 case RECORD_TYPE:
1218 ret = SPCT_HAS_AGGREGATE;
1219 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1220 if (TREE_CODE (t) == FIELD_DECL)
1221 ret |= stack_protect_classify_type (TREE_TYPE (t));
1222 break;
1224 default:
1225 break;
1228 return ret;
1231 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1232 part of the local stack frame. Remember if we ever return nonzero for
1233 any variable in this function. The return value is the phase number in
1234 which the variable should be allocated. */
1236 static int
1237 stack_protect_decl_phase (tree decl)
1239 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1240 int ret = 0;
1242 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1243 has_short_buffer = true;
1245 if (flag_stack_protect == 2)
1247 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1248 && !(bits & SPCT_HAS_AGGREGATE))
1249 ret = 1;
1250 else if (bits & SPCT_HAS_ARRAY)
1251 ret = 2;
1253 else
1254 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1256 if (ret)
1257 has_protected_decls = true;
1259 return ret;
1262 /* Two helper routines that check for phase 1 and phase 2. These are used
1263 as callbacks for expand_stack_vars. */
1265 static bool
1266 stack_protect_decl_phase_1 (tree decl)
1268 return stack_protect_decl_phase (decl) == 1;
1271 static bool
1272 stack_protect_decl_phase_2 (tree decl)
1274 return stack_protect_decl_phase (decl) == 2;
1277 /* Ensure that variables in different stack protection phases conflict
1278 so that they are not merged and share the same stack slot. */
1280 static void
1281 add_stack_protection_conflicts (void)
1283 size_t i, j, n = stack_vars_num;
1284 unsigned char *phase;
1286 phase = XNEWVEC (unsigned char, n);
1287 for (i = 0; i < n; ++i)
1288 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1290 for (i = 0; i < n; ++i)
1292 unsigned char ph_i = phase[i];
1293 for (j = 0; j < i; ++j)
1294 if (ph_i != phase[j])
1295 add_stack_var_conflict (i, j);
1298 XDELETEVEC (phase);
1301 /* Create a decl for the guard at the top of the stack frame. */
1303 static void
1304 create_stack_guard (void)
1306 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1307 VAR_DECL, NULL, ptr_type_node);
1308 TREE_THIS_VOLATILE (guard) = 1;
1309 TREE_USED (guard) = 1;
1310 expand_one_stack_var (guard);
1311 crtl->stack_protect_guard = guard;
1314 /* Prepare for expanding variables. */
1315 static void
1316 init_vars_expansion (void)
1318 tree t;
1319 unsigned ix;
1320 /* Set TREE_USED on all variables in the local_decls. */
1321 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1322 TREE_USED (t) = 1;
1324 /* Clear TREE_USED on all variables associated with a block scope. */
1325 clear_tree_used (DECL_INITIAL (current_function_decl));
1327 /* Initialize local stack smashing state. */
1328 has_protected_decls = false;
1329 has_short_buffer = false;
1332 /* Free up stack variable graph data. */
1333 static void
1334 fini_vars_expansion (void)
1336 size_t i, n = stack_vars_num;
1337 for (i = 0; i < n; i++)
1338 BITMAP_FREE (stack_vars[i].conflicts);
1339 XDELETEVEC (stack_vars);
1340 XDELETEVEC (stack_vars_sorted);
1341 stack_vars = NULL;
1342 stack_vars_alloc = stack_vars_num = 0;
1345 /* Make a fair guess for the size of the stack frame of the function
1346 in NODE. This doesn't have to be exact, the result is only used in
1347 the inline heuristics. So we don't want to run the full stack var
1348 packing algorithm (which is quadratic in the number of stack vars).
1349 Instead, we calculate the total size of all stack vars. This turns
1350 out to be a pretty fair estimate -- packing of stack vars doesn't
1351 happen very often. */
1353 HOST_WIDE_INT
1354 estimated_stack_frame_size (struct cgraph_node *node)
1356 HOST_WIDE_INT size = 0;
1357 size_t i;
1358 tree var;
1359 tree old_cur_fun_decl = current_function_decl;
1360 referenced_var_iterator rvi;
1361 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1363 current_function_decl = node->decl;
1364 push_cfun (fn);
1366 gcc_checking_assert (gimple_referenced_vars (fn));
1367 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1368 size += expand_one_var (var, true, false);
1370 if (stack_vars_num > 0)
1372 /* Fake sorting the stack vars for account_stack_vars (). */
1373 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1374 for (i = 0; i < stack_vars_num; ++i)
1375 stack_vars_sorted[i] = i;
1376 size += account_stack_vars ();
1377 fini_vars_expansion ();
1379 pop_cfun ();
1380 current_function_decl = old_cur_fun_decl;
1381 return size;
1384 /* Expand all variables used in the function. */
1386 static void
1387 expand_used_vars (void)
1389 tree var, outer_block = DECL_INITIAL (current_function_decl);
1390 VEC(tree,heap) *maybe_local_decls = NULL;
1391 unsigned i;
1392 unsigned len;
1394 /* Compute the phase of the stack frame for this function. */
1396 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1397 int off = STARTING_FRAME_OFFSET % align;
1398 frame_phase = off ? align - off : 0;
1401 init_vars_expansion ();
1403 for (i = 0; i < SA.map->num_partitions; i++)
1405 tree var = partition_to_var (SA.map, i);
1407 gcc_assert (is_gimple_reg (var));
1408 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1409 expand_one_var (var, true, true);
1410 else
1412 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1413 contain the default def (representing the parm or result itself)
1414 we don't do anything here. But those which don't contain the
1415 default def (representing a temporary based on the parm/result)
1416 we need to allocate space just like for normal VAR_DECLs. */
1417 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1419 expand_one_var (var, true, true);
1420 gcc_assert (SA.partition_to_pseudo[i]);
1425 /* At this point all variables on the local_decls with TREE_USED
1426 set are not associated with any block scope. Lay them out. */
1428 len = VEC_length (tree, cfun->local_decls);
1429 FOR_EACH_LOCAL_DECL (cfun, i, var)
1431 bool expand_now = false;
1433 /* Expanded above already. */
1434 if (is_gimple_reg (var))
1436 TREE_USED (var) = 0;
1437 goto next;
1439 /* We didn't set a block for static or extern because it's hard
1440 to tell the difference between a global variable (re)declared
1441 in a local scope, and one that's really declared there to
1442 begin with. And it doesn't really matter much, since we're
1443 not giving them stack space. Expand them now. */
1444 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1445 expand_now = true;
1447 /* If the variable is not associated with any block, then it
1448 was created by the optimizers, and could be live anywhere
1449 in the function. */
1450 else if (TREE_USED (var))
1451 expand_now = true;
1453 /* Finally, mark all variables on the list as used. We'll use
1454 this in a moment when we expand those associated with scopes. */
1455 TREE_USED (var) = 1;
1457 if (expand_now)
1458 expand_one_var (var, true, true);
1460 next:
1461 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1463 rtx rtl = DECL_RTL_IF_SET (var);
1465 /* Keep artificial non-ignored vars in cfun->local_decls
1466 chain until instantiate_decls. */
1467 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1468 add_local_decl (cfun, var);
1469 else if (rtl == NULL_RTX)
1470 /* If rtl isn't set yet, which can happen e.g. with
1471 -fstack-protector, retry before returning from this
1472 function. */
1473 VEC_safe_push (tree, heap, maybe_local_decls, var);
1477 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1479 +-----------------+-----------------+
1480 | ...processed... | ...duplicates...|
1481 +-----------------+-----------------+
1483 +-- LEN points here.
1485 We just want the duplicates, as those are the artificial
1486 non-ignored vars that we want to keep until instantiate_decls.
1487 Move them down and truncate the array. */
1488 if (!VEC_empty (tree, cfun->local_decls))
1489 VEC_block_remove (tree, cfun->local_decls, 0, len);
1491 /* At this point, all variables within the block tree with TREE_USED
1492 set are actually used by the optimized function. Lay them out. */
1493 expand_used_vars_for_block (outer_block, true);
1495 if (stack_vars_num > 0)
1497 /* Due to the way alias sets work, no variables with non-conflicting
1498 alias sets may be assigned the same address. Add conflicts to
1499 reflect this. */
1500 add_alias_set_conflicts ();
1502 /* If stack protection is enabled, we don't share space between
1503 vulnerable data and non-vulnerable data. */
1504 if (flag_stack_protect)
1505 add_stack_protection_conflicts ();
1507 /* Now that we have collected all stack variables, and have computed a
1508 minimal interference graph, attempt to save some stack space. */
1509 partition_stack_vars ();
1510 if (dump_file)
1511 dump_stack_var_partition ();
1514 /* There are several conditions under which we should create a
1515 stack guard: protect-all, alloca used, protected decls present. */
1516 if (flag_stack_protect == 2
1517 || (flag_stack_protect
1518 && (cfun->calls_alloca || has_protected_decls)))
1519 create_stack_guard ();
1521 /* Assign rtl to each variable based on these partitions. */
1522 if (stack_vars_num > 0)
1524 /* Reorder decls to be protected by iterating over the variables
1525 array multiple times, and allocating out of each phase in turn. */
1526 /* ??? We could probably integrate this into the qsort we did
1527 earlier, such that we naturally see these variables first,
1528 and thus naturally allocate things in the right order. */
1529 if (has_protected_decls)
1531 /* Phase 1 contains only character arrays. */
1532 expand_stack_vars (stack_protect_decl_phase_1);
1534 /* Phase 2 contains other kinds of arrays. */
1535 if (flag_stack_protect == 2)
1536 expand_stack_vars (stack_protect_decl_phase_2);
1539 expand_stack_vars (NULL);
1541 fini_vars_expansion ();
1544 /* If there were any artificial non-ignored vars without rtl
1545 found earlier, see if deferred stack allocation hasn't assigned
1546 rtl to them. */
1547 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1549 rtx rtl = DECL_RTL_IF_SET (var);
1551 /* Keep artificial non-ignored vars in cfun->local_decls
1552 chain until instantiate_decls. */
1553 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1554 add_local_decl (cfun, var);
1556 VEC_free (tree, heap, maybe_local_decls);
1558 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1559 if (STACK_ALIGNMENT_NEEDED)
1561 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1562 if (!FRAME_GROWS_DOWNWARD)
1563 frame_offset += align - 1;
1564 frame_offset &= -align;
1569 /* If we need to produce a detailed dump, print the tree representation
1570 for STMT to the dump file. SINCE is the last RTX after which the RTL
1571 generated for STMT should have been appended. */
1573 static void
1574 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1576 if (dump_file && (dump_flags & TDF_DETAILS))
1578 fprintf (dump_file, "\n;; ");
1579 print_gimple_stmt (dump_file, stmt, 0,
1580 TDF_SLIM | (dump_flags & TDF_LINENO));
1581 fprintf (dump_file, "\n");
1583 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1587 /* Maps the blocks that do not contain tree labels to rtx labels. */
1589 static struct pointer_map_t *lab_rtx_for_bb;
1591 /* Returns the label_rtx expression for a label starting basic block BB. */
1593 static rtx
1594 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1596 gimple_stmt_iterator gsi;
1597 tree lab;
1598 gimple lab_stmt;
1599 void **elt;
1601 if (bb->flags & BB_RTL)
1602 return block_label (bb);
1604 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1605 if (elt)
1606 return (rtx) *elt;
1608 /* Find the tree label if it is present. */
1610 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1612 lab_stmt = gsi_stmt (gsi);
1613 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1614 break;
1616 lab = gimple_label_label (lab_stmt);
1617 if (DECL_NONLOCAL (lab))
1618 break;
1620 return label_rtx (lab);
1623 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1624 *elt = gen_label_rtx ();
1625 return (rtx) *elt;
1629 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1630 of a basic block where we just expanded the conditional at the end,
1631 possibly clean up the CFG and instruction sequence. LAST is the
1632 last instruction before the just emitted jump sequence. */
1634 static void
1635 maybe_cleanup_end_of_block (edge e, rtx last)
1637 /* Special case: when jumpif decides that the condition is
1638 trivial it emits an unconditional jump (and the necessary
1639 barrier). But we still have two edges, the fallthru one is
1640 wrong. purge_dead_edges would clean this up later. Unfortunately
1641 we have to insert insns (and split edges) before
1642 find_many_sub_basic_blocks and hence before purge_dead_edges.
1643 But splitting edges might create new blocks which depend on the
1644 fact that if there are two edges there's no barrier. So the
1645 barrier would get lost and verify_flow_info would ICE. Instead
1646 of auditing all edge splitters to care for the barrier (which
1647 normally isn't there in a cleaned CFG), fix it here. */
1648 if (BARRIER_P (get_last_insn ()))
1650 rtx insn;
1651 remove_edge (e);
1652 /* Now, we have a single successor block, if we have insns to
1653 insert on the remaining edge we potentially will insert
1654 it at the end of this block (if the dest block isn't feasible)
1655 in order to avoid splitting the edge. This insertion will take
1656 place in front of the last jump. But we might have emitted
1657 multiple jumps (conditional and one unconditional) to the
1658 same destination. Inserting in front of the last one then
1659 is a problem. See PR 40021. We fix this by deleting all
1660 jumps except the last unconditional one. */
1661 insn = PREV_INSN (get_last_insn ());
1662 /* Make sure we have an unconditional jump. Otherwise we're
1663 confused. */
1664 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1665 for (insn = PREV_INSN (insn); insn != last;)
1667 insn = PREV_INSN (insn);
1668 if (JUMP_P (NEXT_INSN (insn)))
1670 if (!any_condjump_p (NEXT_INSN (insn)))
1672 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1673 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1675 delete_insn (NEXT_INSN (insn));
1681 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1682 Returns a new basic block if we've terminated the current basic
1683 block and created a new one. */
1685 static basic_block
1686 expand_gimple_cond (basic_block bb, gimple stmt)
1688 basic_block new_bb, dest;
1689 edge new_edge;
1690 edge true_edge;
1691 edge false_edge;
1692 rtx last2, last;
1693 enum tree_code code;
1694 tree op0, op1;
1696 code = gimple_cond_code (stmt);
1697 op0 = gimple_cond_lhs (stmt);
1698 op1 = gimple_cond_rhs (stmt);
1699 /* We're sometimes presented with such code:
1700 D.123_1 = x < y;
1701 if (D.123_1 != 0)
1703 This would expand to two comparisons which then later might
1704 be cleaned up by combine. But some pattern matchers like if-conversion
1705 work better when there's only one compare, so make up for this
1706 here as special exception if TER would have made the same change. */
1707 if (gimple_cond_single_var_p (stmt)
1708 && SA.values
1709 && TREE_CODE (op0) == SSA_NAME
1710 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1712 gimple second = SSA_NAME_DEF_STMT (op0);
1713 if (gimple_code (second) == GIMPLE_ASSIGN)
1715 enum tree_code code2 = gimple_assign_rhs_code (second);
1716 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1718 code = code2;
1719 op0 = gimple_assign_rhs1 (second);
1720 op1 = gimple_assign_rhs2 (second);
1722 /* If jumps are cheap turn some more codes into
1723 jumpy sequences. */
1724 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1726 if ((code2 == BIT_AND_EXPR
1727 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1728 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1729 || code2 == TRUTH_AND_EXPR)
1731 code = TRUTH_ANDIF_EXPR;
1732 op0 = gimple_assign_rhs1 (second);
1733 op1 = gimple_assign_rhs2 (second);
1735 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1737 code = TRUTH_ORIF_EXPR;
1738 op0 = gimple_assign_rhs1 (second);
1739 op1 = gimple_assign_rhs2 (second);
1745 last2 = last = get_last_insn ();
1747 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1748 if (gimple_has_location (stmt))
1750 set_curr_insn_source_location (gimple_location (stmt));
1751 set_curr_insn_block (gimple_block (stmt));
1754 /* These flags have no purpose in RTL land. */
1755 true_edge->flags &= ~EDGE_TRUE_VALUE;
1756 false_edge->flags &= ~EDGE_FALSE_VALUE;
1758 /* We can either have a pure conditional jump with one fallthru edge or
1759 two-way jump that needs to be decomposed into two basic blocks. */
1760 if (false_edge->dest == bb->next_bb)
1762 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1763 true_edge->probability);
1764 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1765 if (true_edge->goto_locus)
1767 set_curr_insn_source_location (true_edge->goto_locus);
1768 set_curr_insn_block (true_edge->goto_block);
1769 true_edge->goto_locus = curr_insn_locator ();
1771 true_edge->goto_block = NULL;
1772 false_edge->flags |= EDGE_FALLTHRU;
1773 maybe_cleanup_end_of_block (false_edge, last);
1774 return NULL;
1776 if (true_edge->dest == bb->next_bb)
1778 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1779 false_edge->probability);
1780 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1781 if (false_edge->goto_locus)
1783 set_curr_insn_source_location (false_edge->goto_locus);
1784 set_curr_insn_block (false_edge->goto_block);
1785 false_edge->goto_locus = curr_insn_locator ();
1787 false_edge->goto_block = NULL;
1788 true_edge->flags |= EDGE_FALLTHRU;
1789 maybe_cleanup_end_of_block (true_edge, last);
1790 return NULL;
1793 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1794 true_edge->probability);
1795 last = get_last_insn ();
1796 if (false_edge->goto_locus)
1798 set_curr_insn_source_location (false_edge->goto_locus);
1799 set_curr_insn_block (false_edge->goto_block);
1800 false_edge->goto_locus = curr_insn_locator ();
1802 false_edge->goto_block = NULL;
1803 emit_jump (label_rtx_for_bb (false_edge->dest));
1805 BB_END (bb) = last;
1806 if (BARRIER_P (BB_END (bb)))
1807 BB_END (bb) = PREV_INSN (BB_END (bb));
1808 update_bb_for_insn (bb);
1810 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1811 dest = false_edge->dest;
1812 redirect_edge_succ (false_edge, new_bb);
1813 false_edge->flags |= EDGE_FALLTHRU;
1814 new_bb->count = false_edge->count;
1815 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1816 new_edge = make_edge (new_bb, dest, 0);
1817 new_edge->probability = REG_BR_PROB_BASE;
1818 new_edge->count = new_bb->count;
1819 if (BARRIER_P (BB_END (new_bb)))
1820 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1821 update_bb_for_insn (new_bb);
1823 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1825 if (true_edge->goto_locus)
1827 set_curr_insn_source_location (true_edge->goto_locus);
1828 set_curr_insn_block (true_edge->goto_block);
1829 true_edge->goto_locus = curr_insn_locator ();
1831 true_edge->goto_block = NULL;
1833 return new_bb;
1836 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1837 statement STMT. */
1839 static void
1840 expand_call_stmt (gimple stmt)
1842 tree exp;
1843 tree lhs = gimple_call_lhs (stmt);
1844 size_t i;
1845 bool builtin_p;
1846 tree decl;
1848 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1850 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1851 decl = gimple_call_fndecl (stmt);
1852 builtin_p = decl && DECL_BUILT_IN (decl);
1854 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1855 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1857 for (i = 0; i < gimple_call_num_args (stmt); i++)
1859 tree arg = gimple_call_arg (stmt, i);
1860 gimple def;
1861 /* TER addresses into arguments of builtin functions so we have a
1862 chance to infer more correct alignment information. See PR39954. */
1863 if (builtin_p
1864 && TREE_CODE (arg) == SSA_NAME
1865 && (def = get_gimple_for_ssa_name (arg))
1866 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1867 arg = gimple_assign_rhs1 (def);
1868 CALL_EXPR_ARG (exp, i) = arg;
1871 if (gimple_has_side_effects (stmt))
1872 TREE_SIDE_EFFECTS (exp) = 1;
1874 if (gimple_call_nothrow_p (stmt))
1875 TREE_NOTHROW (exp) = 1;
1877 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1878 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1879 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1880 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1881 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1882 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1883 TREE_BLOCK (exp) = gimple_block (stmt);
1885 if (lhs)
1886 expand_assignment (lhs, exp, false);
1887 else
1888 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1891 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1892 STMT that doesn't require special handling for outgoing edges. That
1893 is no tailcalls and no GIMPLE_COND. */
1895 static void
1896 expand_gimple_stmt_1 (gimple stmt)
1898 tree op0;
1899 switch (gimple_code (stmt))
1901 case GIMPLE_GOTO:
1902 op0 = gimple_goto_dest (stmt);
1903 if (TREE_CODE (op0) == LABEL_DECL)
1904 expand_goto (op0);
1905 else
1906 expand_computed_goto (op0);
1907 break;
1908 case GIMPLE_LABEL:
1909 expand_label (gimple_label_label (stmt));
1910 break;
1911 case GIMPLE_NOP:
1912 case GIMPLE_PREDICT:
1913 break;
1914 case GIMPLE_SWITCH:
1915 expand_case (stmt);
1916 break;
1917 case GIMPLE_ASM:
1918 expand_asm_stmt (stmt);
1919 break;
1920 case GIMPLE_CALL:
1921 expand_call_stmt (stmt);
1922 break;
1924 case GIMPLE_RETURN:
1925 op0 = gimple_return_retval (stmt);
1927 if (op0 && op0 != error_mark_node)
1929 tree result = DECL_RESULT (current_function_decl);
1931 /* If we are not returning the current function's RESULT_DECL,
1932 build an assignment to it. */
1933 if (op0 != result)
1935 /* I believe that a function's RESULT_DECL is unique. */
1936 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1938 /* ??? We'd like to use simply expand_assignment here,
1939 but this fails if the value is of BLKmode but the return
1940 decl is a register. expand_return has special handling
1941 for this combination, which eventually should move
1942 to common code. See comments there. Until then, let's
1943 build a modify expression :-/ */
1944 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1945 result, op0);
1948 if (!op0)
1949 expand_null_return ();
1950 else
1951 expand_return (op0);
1952 break;
1954 case GIMPLE_ASSIGN:
1956 tree lhs = gimple_assign_lhs (stmt);
1958 /* Tree expand used to fiddle with |= and &= of two bitfield
1959 COMPONENT_REFs here. This can't happen with gimple, the LHS
1960 of binary assigns must be a gimple reg. */
1962 if (TREE_CODE (lhs) != SSA_NAME
1963 || get_gimple_rhs_class (gimple_expr_code (stmt))
1964 == GIMPLE_SINGLE_RHS)
1966 tree rhs = gimple_assign_rhs1 (stmt);
1967 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1968 == GIMPLE_SINGLE_RHS);
1969 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1970 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1971 expand_assignment (lhs, rhs,
1972 gimple_assign_nontemporal_move_p (stmt));
1974 else
1976 rtx target, temp;
1977 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1978 struct separate_ops ops;
1979 bool promoted = false;
1981 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1982 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1983 promoted = true;
1985 ops.code = gimple_assign_rhs_code (stmt);
1986 ops.type = TREE_TYPE (lhs);
1987 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1989 case GIMPLE_TERNARY_RHS:
1990 ops.op2 = gimple_assign_rhs3 (stmt);
1991 /* Fallthru */
1992 case GIMPLE_BINARY_RHS:
1993 ops.op1 = gimple_assign_rhs2 (stmt);
1994 /* Fallthru */
1995 case GIMPLE_UNARY_RHS:
1996 ops.op0 = gimple_assign_rhs1 (stmt);
1997 break;
1998 default:
1999 gcc_unreachable ();
2001 ops.location = gimple_location (stmt);
2003 /* If we want to use a nontemporal store, force the value to
2004 register first. If we store into a promoted register,
2005 don't directly expand to target. */
2006 temp = nontemporal || promoted ? NULL_RTX : target;
2007 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2008 EXPAND_NORMAL);
2010 if (temp == target)
2012 else if (promoted)
2014 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2015 /* If TEMP is a VOIDmode constant, use convert_modes to make
2016 sure that we properly convert it. */
2017 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2019 temp = convert_modes (GET_MODE (target),
2020 TYPE_MODE (ops.type),
2021 temp, unsignedp);
2022 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2023 GET_MODE (target), temp, unsignedp);
2026 convert_move (SUBREG_REG (target), temp, unsignedp);
2028 else if (nontemporal && emit_storent_insn (target, temp))
2030 else
2032 temp = force_operand (temp, target);
2033 if (temp != target)
2034 emit_move_insn (target, temp);
2038 break;
2040 default:
2041 gcc_unreachable ();
2045 /* Expand one gimple statement STMT and return the last RTL instruction
2046 before any of the newly generated ones.
2048 In addition to generating the necessary RTL instructions this also
2049 sets REG_EH_REGION notes if necessary and sets the current source
2050 location for diagnostics. */
2052 static rtx
2053 expand_gimple_stmt (gimple stmt)
2055 int lp_nr = 0;
2056 rtx last = NULL;
2057 location_t saved_location = input_location;
2059 last = get_last_insn ();
2061 /* If this is an expression of some kind and it has an associated line
2062 number, then emit the line number before expanding the expression.
2064 We need to save and restore the file and line information so that
2065 errors discovered during expansion are emitted with the right
2066 information. It would be better of the diagnostic routines
2067 used the file/line information embedded in the tree nodes rather
2068 than globals. */
2069 gcc_assert (cfun);
2071 if (gimple_has_location (stmt))
2073 input_location = gimple_location (stmt);
2074 set_curr_insn_source_location (input_location);
2076 /* Record where the insns produced belong. */
2077 set_curr_insn_block (gimple_block (stmt));
2080 expand_gimple_stmt_1 (stmt);
2081 /* Free any temporaries used to evaluate this statement. */
2082 free_temp_slots ();
2084 input_location = saved_location;
2086 /* Mark all insns that may trap. */
2087 lp_nr = lookup_stmt_eh_lp (stmt);
2088 if (lp_nr)
2090 rtx insn;
2091 for (insn = next_real_insn (last); insn;
2092 insn = next_real_insn (insn))
2094 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2095 /* If we want exceptions for non-call insns, any
2096 may_trap_p instruction may throw. */
2097 && GET_CODE (PATTERN (insn)) != CLOBBER
2098 && GET_CODE (PATTERN (insn)) != USE
2099 && insn_could_throw_p (insn))
2100 make_reg_eh_region_note (insn, 0, lp_nr);
2104 return last;
2107 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2108 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2109 generated a tail call (something that might be denied by the ABI
2110 rules governing the call; see calls.c).
2112 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2113 can still reach the rest of BB. The case here is __builtin_sqrt,
2114 where the NaN result goes through the external function (with a
2115 tailcall) and the normal result happens via a sqrt instruction. */
2117 static basic_block
2118 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2120 rtx last2, last;
2121 edge e;
2122 edge_iterator ei;
2123 int probability;
2124 gcov_type count;
2126 last2 = last = expand_gimple_stmt (stmt);
2128 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2129 if (CALL_P (last) && SIBLING_CALL_P (last))
2130 goto found;
2132 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2134 *can_fallthru = true;
2135 return NULL;
2137 found:
2138 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2139 Any instructions emitted here are about to be deleted. */
2140 do_pending_stack_adjust ();
2142 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2143 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2144 EH or abnormal edges, we shouldn't have created a tail call in
2145 the first place. So it seems to me we should just be removing
2146 all edges here, or redirecting the existing fallthru edge to
2147 the exit block. */
2149 probability = 0;
2150 count = 0;
2152 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2154 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2156 if (e->dest != EXIT_BLOCK_PTR)
2158 e->dest->count -= e->count;
2159 e->dest->frequency -= EDGE_FREQUENCY (e);
2160 if (e->dest->count < 0)
2161 e->dest->count = 0;
2162 if (e->dest->frequency < 0)
2163 e->dest->frequency = 0;
2165 count += e->count;
2166 probability += e->probability;
2167 remove_edge (e);
2169 else
2170 ei_next (&ei);
2173 /* This is somewhat ugly: the call_expr expander often emits instructions
2174 after the sibcall (to perform the function return). These confuse the
2175 find_many_sub_basic_blocks code, so we need to get rid of these. */
2176 last = NEXT_INSN (last);
2177 gcc_assert (BARRIER_P (last));
2179 *can_fallthru = false;
2180 while (NEXT_INSN (last))
2182 /* For instance an sqrt builtin expander expands if with
2183 sibcall in the then and label for `else`. */
2184 if (LABEL_P (NEXT_INSN (last)))
2186 *can_fallthru = true;
2187 break;
2189 delete_insn (NEXT_INSN (last));
2192 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2193 e->probability += probability;
2194 e->count += count;
2195 BB_END (bb) = last;
2196 update_bb_for_insn (bb);
2198 if (NEXT_INSN (last))
2200 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2202 last = BB_END (bb);
2203 if (BARRIER_P (last))
2204 BB_END (bb) = PREV_INSN (last);
2207 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2209 return bb;
2212 /* Return the difference between the floor and the truncated result of
2213 a signed division by OP1 with remainder MOD. */
2214 static rtx
2215 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2217 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2218 return gen_rtx_IF_THEN_ELSE
2219 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2220 gen_rtx_IF_THEN_ELSE
2221 (mode, gen_rtx_LT (BImode,
2222 gen_rtx_DIV (mode, op1, mod),
2223 const0_rtx),
2224 constm1_rtx, const0_rtx),
2225 const0_rtx);
2228 /* Return the difference between the ceil and the truncated result of
2229 a signed division by OP1 with remainder MOD. */
2230 static rtx
2231 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2233 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2234 return gen_rtx_IF_THEN_ELSE
2235 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2236 gen_rtx_IF_THEN_ELSE
2237 (mode, gen_rtx_GT (BImode,
2238 gen_rtx_DIV (mode, op1, mod),
2239 const0_rtx),
2240 const1_rtx, const0_rtx),
2241 const0_rtx);
2244 /* Return the difference between the ceil and the truncated result of
2245 an unsigned division by OP1 with remainder MOD. */
2246 static rtx
2247 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2249 /* (mod != 0 ? 1 : 0) */
2250 return gen_rtx_IF_THEN_ELSE
2251 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2252 const1_rtx, const0_rtx);
2255 /* Return the difference between the rounded and the truncated result
2256 of a signed division by OP1 with remainder MOD. Halfway cases are
2257 rounded away from zero, rather than to the nearest even number. */
2258 static rtx
2259 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2261 /* (abs (mod) >= abs (op1) - abs (mod)
2262 ? (op1 / mod > 0 ? 1 : -1)
2263 : 0) */
2264 return gen_rtx_IF_THEN_ELSE
2265 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2266 gen_rtx_MINUS (mode,
2267 gen_rtx_ABS (mode, op1),
2268 gen_rtx_ABS (mode, mod))),
2269 gen_rtx_IF_THEN_ELSE
2270 (mode, gen_rtx_GT (BImode,
2271 gen_rtx_DIV (mode, op1, mod),
2272 const0_rtx),
2273 const1_rtx, constm1_rtx),
2274 const0_rtx);
2277 /* Return the difference between the rounded and the truncated result
2278 of a unsigned division by OP1 with remainder MOD. Halfway cases
2279 are rounded away from zero, rather than to the nearest even
2280 number. */
2281 static rtx
2282 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2284 /* (mod >= op1 - mod ? 1 : 0) */
2285 return gen_rtx_IF_THEN_ELSE
2286 (mode, gen_rtx_GE (BImode, mod,
2287 gen_rtx_MINUS (mode, op1, mod)),
2288 const1_rtx, const0_rtx);
2291 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2292 any rtl. */
2294 static rtx
2295 convert_debug_memory_address (enum machine_mode mode, rtx x,
2296 addr_space_t as)
2298 enum machine_mode xmode = GET_MODE (x);
2300 #ifndef POINTERS_EXTEND_UNSIGNED
2301 gcc_assert (mode == Pmode
2302 || mode == targetm.addr_space.address_mode (as));
2303 gcc_assert (xmode == mode || xmode == VOIDmode);
2304 #else
2305 rtx temp;
2306 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2307 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2309 gcc_assert (mode == address_mode || mode == pointer_mode);
2311 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2312 return x;
2314 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2315 x = simplify_gen_subreg (mode, x, xmode,
2316 subreg_lowpart_offset
2317 (mode, xmode));
2318 else if (POINTERS_EXTEND_UNSIGNED > 0)
2319 x = gen_rtx_ZERO_EXTEND (mode, x);
2320 else if (!POINTERS_EXTEND_UNSIGNED)
2321 x = gen_rtx_SIGN_EXTEND (mode, x);
2322 else
2324 switch (GET_CODE (x))
2326 case SUBREG:
2327 if ((SUBREG_PROMOTED_VAR_P (x)
2328 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2329 || (GET_CODE (SUBREG_REG (x)) == PLUS
2330 && REG_P (XEXP (SUBREG_REG (x), 0))
2331 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2332 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2333 && GET_MODE (SUBREG_REG (x)) == mode)
2334 return SUBREG_REG (x);
2335 break;
2336 case LABEL_REF:
2337 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2338 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2339 return temp;
2340 case SYMBOL_REF:
2341 temp = shallow_copy_rtx (x);
2342 PUT_MODE (temp, mode);
2343 return temp;
2344 case CONST:
2345 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2346 if (temp)
2347 temp = gen_rtx_CONST (mode, temp);
2348 return temp;
2349 case PLUS:
2350 case MINUS:
2351 if (CONST_INT_P (XEXP (x, 1)))
2353 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2354 if (temp)
2355 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2357 break;
2358 default:
2359 break;
2361 /* Don't know how to express ptr_extend as operation in debug info. */
2362 return NULL;
2364 #endif /* POINTERS_EXTEND_UNSIGNED */
2366 return x;
2369 /* Return an RTX equivalent to the value of the tree expression
2370 EXP. */
2372 static rtx
2373 expand_debug_expr (tree exp)
2375 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2376 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2377 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2378 addr_space_t as;
2380 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2382 case tcc_expression:
2383 switch (TREE_CODE (exp))
2385 case COND_EXPR:
2386 case DOT_PROD_EXPR:
2387 case WIDEN_MULT_PLUS_EXPR:
2388 case WIDEN_MULT_MINUS_EXPR:
2389 case FMA_EXPR:
2390 goto ternary;
2392 case TRUTH_ANDIF_EXPR:
2393 case TRUTH_ORIF_EXPR:
2394 case TRUTH_AND_EXPR:
2395 case TRUTH_OR_EXPR:
2396 case TRUTH_XOR_EXPR:
2397 goto binary;
2399 case TRUTH_NOT_EXPR:
2400 goto unary;
2402 default:
2403 break;
2405 break;
2407 ternary:
2408 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2409 if (!op2)
2410 return NULL_RTX;
2411 /* Fall through. */
2413 binary:
2414 case tcc_binary:
2415 case tcc_comparison:
2416 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2417 if (!op1)
2418 return NULL_RTX;
2419 /* Fall through. */
2421 unary:
2422 case tcc_unary:
2423 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2424 if (!op0)
2425 return NULL_RTX;
2426 break;
2428 case tcc_type:
2429 case tcc_statement:
2430 gcc_unreachable ();
2432 case tcc_constant:
2433 case tcc_exceptional:
2434 case tcc_declaration:
2435 case tcc_reference:
2436 case tcc_vl_exp:
2437 break;
2440 switch (TREE_CODE (exp))
2442 case STRING_CST:
2443 if (!lookup_constant_def (exp))
2445 if (strlen (TREE_STRING_POINTER (exp)) + 1
2446 != (size_t) TREE_STRING_LENGTH (exp))
2447 return NULL_RTX;
2448 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2449 op0 = gen_rtx_MEM (BLKmode, op0);
2450 set_mem_attributes (op0, exp, 0);
2451 return op0;
2453 /* Fall through... */
2455 case INTEGER_CST:
2456 case REAL_CST:
2457 case FIXED_CST:
2458 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2459 return op0;
2461 case COMPLEX_CST:
2462 gcc_assert (COMPLEX_MODE_P (mode));
2463 op0 = expand_debug_expr (TREE_REALPART (exp));
2464 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2465 return gen_rtx_CONCAT (mode, op0, op1);
2467 case DEBUG_EXPR_DECL:
2468 op0 = DECL_RTL_IF_SET (exp);
2470 if (op0)
2471 return op0;
2473 op0 = gen_rtx_DEBUG_EXPR (mode);
2474 DEBUG_EXPR_TREE_DECL (op0) = exp;
2475 SET_DECL_RTL (exp, op0);
2477 return op0;
2479 case VAR_DECL:
2480 case PARM_DECL:
2481 case FUNCTION_DECL:
2482 case LABEL_DECL:
2483 case CONST_DECL:
2484 case RESULT_DECL:
2485 op0 = DECL_RTL_IF_SET (exp);
2487 /* This decl was probably optimized away. */
2488 if (!op0)
2490 if (TREE_CODE (exp) != VAR_DECL
2491 || DECL_EXTERNAL (exp)
2492 || !TREE_STATIC (exp)
2493 || !DECL_NAME (exp)
2494 || DECL_HARD_REGISTER (exp)
2495 || mode == VOIDmode)
2496 return NULL;
2498 op0 = make_decl_rtl_for_debug (exp);
2499 if (!MEM_P (op0)
2500 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2501 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2502 return NULL;
2504 else
2505 op0 = copy_rtx (op0);
2507 if (GET_MODE (op0) == BLKmode
2508 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2509 below would ICE. While it is likely a FE bug,
2510 try to be robust here. See PR43166. */
2511 || mode == BLKmode
2512 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2514 gcc_assert (MEM_P (op0));
2515 op0 = adjust_address_nv (op0, mode, 0);
2516 return op0;
2519 /* Fall through. */
2521 adjust_mode:
2522 case PAREN_EXPR:
2523 case NOP_EXPR:
2524 case CONVERT_EXPR:
2526 enum machine_mode inner_mode = GET_MODE (op0);
2528 if (mode == inner_mode)
2529 return op0;
2531 if (inner_mode == VOIDmode)
2533 if (TREE_CODE (exp) == SSA_NAME)
2534 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2535 else
2536 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2537 if (mode == inner_mode)
2538 return op0;
2541 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2543 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2544 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2545 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2546 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2547 else
2548 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2550 else if (FLOAT_MODE_P (mode))
2552 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2553 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2554 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2555 else
2556 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2558 else if (FLOAT_MODE_P (inner_mode))
2560 if (unsignedp)
2561 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2562 else
2563 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2565 else if (CONSTANT_P (op0)
2566 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2567 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2568 subreg_lowpart_offset (mode,
2569 inner_mode));
2570 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2571 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2572 : unsignedp)
2573 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2574 else
2575 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2577 return op0;
2580 case MEM_REF:
2581 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2583 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2584 TREE_OPERAND (exp, 0),
2585 TREE_OPERAND (exp, 1));
2586 if (newexp)
2587 return expand_debug_expr (newexp);
2589 /* FALLTHROUGH */
2590 case INDIRECT_REF:
2591 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2592 if (!op0)
2593 return NULL;
2595 if (TREE_CODE (exp) == MEM_REF)
2597 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2598 || (GET_CODE (op0) == PLUS
2599 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2600 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2601 Instead just use get_inner_reference. */
2602 goto component_ref;
2604 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2605 if (!op1 || !CONST_INT_P (op1))
2606 return NULL;
2608 op0 = plus_constant (op0, INTVAL (op1));
2611 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2612 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2613 else
2614 as = ADDR_SPACE_GENERIC;
2616 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2617 op0, as);
2618 if (op0 == NULL_RTX)
2619 return NULL;
2621 op0 = gen_rtx_MEM (mode, op0);
2622 set_mem_attributes (op0, exp, 0);
2623 if (TREE_CODE (exp) == MEM_REF
2624 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2625 set_mem_expr (op0, NULL_TREE);
2626 set_mem_addr_space (op0, as);
2628 return op0;
2630 case TARGET_MEM_REF:
2631 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2632 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2633 return NULL;
2635 op0 = expand_debug_expr
2636 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2637 if (!op0)
2638 return NULL;
2640 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2641 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2642 else
2643 as = ADDR_SPACE_GENERIC;
2645 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2646 op0, as);
2647 if (op0 == NULL_RTX)
2648 return NULL;
2650 op0 = gen_rtx_MEM (mode, op0);
2652 set_mem_attributes (op0, exp, 0);
2653 set_mem_addr_space (op0, as);
2655 return op0;
2657 component_ref:
2658 case ARRAY_REF:
2659 case ARRAY_RANGE_REF:
2660 case COMPONENT_REF:
2661 case BIT_FIELD_REF:
2662 case REALPART_EXPR:
2663 case IMAGPART_EXPR:
2664 case VIEW_CONVERT_EXPR:
2666 enum machine_mode mode1;
2667 HOST_WIDE_INT bitsize, bitpos;
2668 tree offset;
2669 int volatilep = 0;
2670 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2671 &mode1, &unsignedp, &volatilep, false);
2672 rtx orig_op0;
2674 if (bitsize == 0)
2675 return NULL;
2677 orig_op0 = op0 = expand_debug_expr (tem);
2679 if (!op0)
2680 return NULL;
2682 if (offset)
2684 enum machine_mode addrmode, offmode;
2686 if (!MEM_P (op0))
2687 return NULL;
2689 op0 = XEXP (op0, 0);
2690 addrmode = GET_MODE (op0);
2691 if (addrmode == VOIDmode)
2692 addrmode = Pmode;
2694 op1 = expand_debug_expr (offset);
2695 if (!op1)
2696 return NULL;
2698 offmode = GET_MODE (op1);
2699 if (offmode == VOIDmode)
2700 offmode = TYPE_MODE (TREE_TYPE (offset));
2702 if (addrmode != offmode)
2703 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2704 subreg_lowpart_offset (addrmode,
2705 offmode));
2707 /* Don't use offset_address here, we don't need a
2708 recognizable address, and we don't want to generate
2709 code. */
2710 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2713 if (MEM_P (op0))
2715 if (mode1 == VOIDmode)
2716 /* Bitfield. */
2717 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2718 if (bitpos >= BITS_PER_UNIT)
2720 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2721 bitpos %= BITS_PER_UNIT;
2723 else if (bitpos < 0)
2725 HOST_WIDE_INT units
2726 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2727 op0 = adjust_address_nv (op0, mode1, units);
2728 bitpos += units * BITS_PER_UNIT;
2730 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2731 op0 = adjust_address_nv (op0, mode, 0);
2732 else if (GET_MODE (op0) != mode1)
2733 op0 = adjust_address_nv (op0, mode1, 0);
2734 else
2735 op0 = copy_rtx (op0);
2736 if (op0 == orig_op0)
2737 op0 = shallow_copy_rtx (op0);
2738 set_mem_attributes (op0, exp, 0);
2741 if (bitpos == 0 && mode == GET_MODE (op0))
2742 return op0;
2744 if (bitpos < 0)
2745 return NULL;
2747 if (GET_MODE (op0) == BLKmode)
2748 return NULL;
2750 if ((bitpos % BITS_PER_UNIT) == 0
2751 && bitsize == GET_MODE_BITSIZE (mode1))
2753 enum machine_mode opmode = GET_MODE (op0);
2755 if (opmode == VOIDmode)
2756 opmode = TYPE_MODE (TREE_TYPE (tem));
2758 /* This condition may hold if we're expanding the address
2759 right past the end of an array that turned out not to
2760 be addressable (i.e., the address was only computed in
2761 debug stmts). The gen_subreg below would rightfully
2762 crash, and the address doesn't really exist, so just
2763 drop it. */
2764 if (bitpos >= GET_MODE_BITSIZE (opmode))
2765 return NULL;
2767 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2768 return simplify_gen_subreg (mode, op0, opmode,
2769 bitpos / BITS_PER_UNIT);
2772 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2773 && TYPE_UNSIGNED (TREE_TYPE (exp))
2774 ? SIGN_EXTRACT
2775 : ZERO_EXTRACT, mode,
2776 GET_MODE (op0) != VOIDmode
2777 ? GET_MODE (op0)
2778 : TYPE_MODE (TREE_TYPE (tem)),
2779 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2782 case ABS_EXPR:
2783 return gen_rtx_ABS (mode, op0);
2785 case NEGATE_EXPR:
2786 return gen_rtx_NEG (mode, op0);
2788 case BIT_NOT_EXPR:
2789 return gen_rtx_NOT (mode, op0);
2791 case FLOAT_EXPR:
2792 if (unsignedp)
2793 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2794 else
2795 return gen_rtx_FLOAT (mode, op0);
2797 case FIX_TRUNC_EXPR:
2798 if (unsignedp)
2799 return gen_rtx_UNSIGNED_FIX (mode, op0);
2800 else
2801 return gen_rtx_FIX (mode, op0);
2803 case POINTER_PLUS_EXPR:
2804 /* For the rare target where pointers are not the same size as
2805 size_t, we need to check for mis-matched modes and correct
2806 the addend. */
2807 if (op0 && op1
2808 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2809 && GET_MODE (op0) != GET_MODE (op1))
2811 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2812 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2813 else
2814 /* We always sign-extend, regardless of the signedness of
2815 the operand, because the operand is always unsigned
2816 here even if the original C expression is signed. */
2817 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2819 /* Fall through. */
2820 case PLUS_EXPR:
2821 return gen_rtx_PLUS (mode, op0, op1);
2823 case MINUS_EXPR:
2824 return gen_rtx_MINUS (mode, op0, op1);
2826 case MULT_EXPR:
2827 return gen_rtx_MULT (mode, op0, op1);
2829 case RDIV_EXPR:
2830 case TRUNC_DIV_EXPR:
2831 case EXACT_DIV_EXPR:
2832 if (unsignedp)
2833 return gen_rtx_UDIV (mode, op0, op1);
2834 else
2835 return gen_rtx_DIV (mode, op0, op1);
2837 case TRUNC_MOD_EXPR:
2838 if (unsignedp)
2839 return gen_rtx_UMOD (mode, op0, op1);
2840 else
2841 return gen_rtx_MOD (mode, op0, op1);
2843 case FLOOR_DIV_EXPR:
2844 if (unsignedp)
2845 return gen_rtx_UDIV (mode, op0, op1);
2846 else
2848 rtx div = gen_rtx_DIV (mode, op0, op1);
2849 rtx mod = gen_rtx_MOD (mode, op0, op1);
2850 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2851 return gen_rtx_PLUS (mode, div, adj);
2854 case FLOOR_MOD_EXPR:
2855 if (unsignedp)
2856 return gen_rtx_UMOD (mode, op0, op1);
2857 else
2859 rtx mod = gen_rtx_MOD (mode, op0, op1);
2860 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2861 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2862 return gen_rtx_PLUS (mode, mod, adj);
2865 case CEIL_DIV_EXPR:
2866 if (unsignedp)
2868 rtx div = gen_rtx_UDIV (mode, op0, op1);
2869 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2870 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2871 return gen_rtx_PLUS (mode, div, adj);
2873 else
2875 rtx div = gen_rtx_DIV (mode, op0, op1);
2876 rtx mod = gen_rtx_MOD (mode, op0, op1);
2877 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2878 return gen_rtx_PLUS (mode, div, adj);
2881 case CEIL_MOD_EXPR:
2882 if (unsignedp)
2884 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2885 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2886 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2887 return gen_rtx_PLUS (mode, mod, adj);
2889 else
2891 rtx mod = gen_rtx_MOD (mode, op0, op1);
2892 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2893 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2894 return gen_rtx_PLUS (mode, mod, adj);
2897 case ROUND_DIV_EXPR:
2898 if (unsignedp)
2900 rtx div = gen_rtx_UDIV (mode, op0, op1);
2901 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2902 rtx adj = round_udiv_adjust (mode, mod, op1);
2903 return gen_rtx_PLUS (mode, div, adj);
2905 else
2907 rtx div = gen_rtx_DIV (mode, op0, op1);
2908 rtx mod = gen_rtx_MOD (mode, op0, op1);
2909 rtx adj = round_sdiv_adjust (mode, mod, op1);
2910 return gen_rtx_PLUS (mode, div, adj);
2913 case ROUND_MOD_EXPR:
2914 if (unsignedp)
2916 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2917 rtx adj = round_udiv_adjust (mode, mod, op1);
2918 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2919 return gen_rtx_PLUS (mode, mod, adj);
2921 else
2923 rtx mod = gen_rtx_MOD (mode, op0, op1);
2924 rtx adj = round_sdiv_adjust (mode, mod, op1);
2925 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2926 return gen_rtx_PLUS (mode, mod, adj);
2929 case LSHIFT_EXPR:
2930 return gen_rtx_ASHIFT (mode, op0, op1);
2932 case RSHIFT_EXPR:
2933 if (unsignedp)
2934 return gen_rtx_LSHIFTRT (mode, op0, op1);
2935 else
2936 return gen_rtx_ASHIFTRT (mode, op0, op1);
2938 case LROTATE_EXPR:
2939 return gen_rtx_ROTATE (mode, op0, op1);
2941 case RROTATE_EXPR:
2942 return gen_rtx_ROTATERT (mode, op0, op1);
2944 case MIN_EXPR:
2945 if (unsignedp)
2946 return gen_rtx_UMIN (mode, op0, op1);
2947 else
2948 return gen_rtx_SMIN (mode, op0, op1);
2950 case MAX_EXPR:
2951 if (unsignedp)
2952 return gen_rtx_UMAX (mode, op0, op1);
2953 else
2954 return gen_rtx_SMAX (mode, op0, op1);
2956 case BIT_AND_EXPR:
2957 case TRUTH_AND_EXPR:
2958 return gen_rtx_AND (mode, op0, op1);
2960 case BIT_IOR_EXPR:
2961 case TRUTH_OR_EXPR:
2962 return gen_rtx_IOR (mode, op0, op1);
2964 case BIT_XOR_EXPR:
2965 case TRUTH_XOR_EXPR:
2966 return gen_rtx_XOR (mode, op0, op1);
2968 case TRUTH_ANDIF_EXPR:
2969 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2971 case TRUTH_ORIF_EXPR:
2972 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2974 case TRUTH_NOT_EXPR:
2975 return gen_rtx_EQ (mode, op0, const0_rtx);
2977 case LT_EXPR:
2978 if (unsignedp)
2979 return gen_rtx_LTU (mode, op0, op1);
2980 else
2981 return gen_rtx_LT (mode, op0, op1);
2983 case LE_EXPR:
2984 if (unsignedp)
2985 return gen_rtx_LEU (mode, op0, op1);
2986 else
2987 return gen_rtx_LE (mode, op0, op1);
2989 case GT_EXPR:
2990 if (unsignedp)
2991 return gen_rtx_GTU (mode, op0, op1);
2992 else
2993 return gen_rtx_GT (mode, op0, op1);
2995 case GE_EXPR:
2996 if (unsignedp)
2997 return gen_rtx_GEU (mode, op0, op1);
2998 else
2999 return gen_rtx_GE (mode, op0, op1);
3001 case EQ_EXPR:
3002 return gen_rtx_EQ (mode, op0, op1);
3004 case NE_EXPR:
3005 return gen_rtx_NE (mode, op0, op1);
3007 case UNORDERED_EXPR:
3008 return gen_rtx_UNORDERED (mode, op0, op1);
3010 case ORDERED_EXPR:
3011 return gen_rtx_ORDERED (mode, op0, op1);
3013 case UNLT_EXPR:
3014 return gen_rtx_UNLT (mode, op0, op1);
3016 case UNLE_EXPR:
3017 return gen_rtx_UNLE (mode, op0, op1);
3019 case UNGT_EXPR:
3020 return gen_rtx_UNGT (mode, op0, op1);
3022 case UNGE_EXPR:
3023 return gen_rtx_UNGE (mode, op0, op1);
3025 case UNEQ_EXPR:
3026 return gen_rtx_UNEQ (mode, op0, op1);
3028 case LTGT_EXPR:
3029 return gen_rtx_LTGT (mode, op0, op1);
3031 case COND_EXPR:
3032 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3034 case COMPLEX_EXPR:
3035 gcc_assert (COMPLEX_MODE_P (mode));
3036 if (GET_MODE (op0) == VOIDmode)
3037 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3038 if (GET_MODE (op1) == VOIDmode)
3039 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3040 return gen_rtx_CONCAT (mode, op0, op1);
3042 case CONJ_EXPR:
3043 if (GET_CODE (op0) == CONCAT)
3044 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3045 gen_rtx_NEG (GET_MODE_INNER (mode),
3046 XEXP (op0, 1)));
3047 else
3049 enum machine_mode imode = GET_MODE_INNER (mode);
3050 rtx re, im;
3052 if (MEM_P (op0))
3054 re = adjust_address_nv (op0, imode, 0);
3055 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3057 else
3059 enum machine_mode ifmode = int_mode_for_mode (mode);
3060 enum machine_mode ihmode = int_mode_for_mode (imode);
3061 rtx halfsize;
3062 if (ifmode == BLKmode || ihmode == BLKmode)
3063 return NULL;
3064 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3065 re = op0;
3066 if (mode != ifmode)
3067 re = gen_rtx_SUBREG (ifmode, re, 0);
3068 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3069 if (imode != ihmode)
3070 re = gen_rtx_SUBREG (imode, re, 0);
3071 im = copy_rtx (op0);
3072 if (mode != ifmode)
3073 im = gen_rtx_SUBREG (ifmode, im, 0);
3074 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3075 if (imode != ihmode)
3076 im = gen_rtx_SUBREG (imode, im, 0);
3078 im = gen_rtx_NEG (imode, im);
3079 return gen_rtx_CONCAT (mode, re, im);
3082 case ADDR_EXPR:
3083 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3084 if (!op0 || !MEM_P (op0))
3086 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3087 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3088 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3089 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3090 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3092 if (handled_component_p (TREE_OPERAND (exp, 0)))
3094 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3095 tree decl
3096 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3097 &bitoffset, &bitsize, &maxsize);
3098 if ((TREE_CODE (decl) == VAR_DECL
3099 || TREE_CODE (decl) == PARM_DECL
3100 || TREE_CODE (decl) == RESULT_DECL)
3101 && !TREE_ADDRESSABLE (decl)
3102 && (bitoffset % BITS_PER_UNIT) == 0
3103 && bitsize > 0
3104 && bitsize == maxsize)
3105 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3106 bitoffset / BITS_PER_UNIT);
3109 return NULL;
3112 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3113 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3115 return op0;
3117 case VECTOR_CST:
3118 exp = build_constructor_from_list (TREE_TYPE (exp),
3119 TREE_VECTOR_CST_ELTS (exp));
3120 /* Fall through. */
3122 case CONSTRUCTOR:
3123 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3125 unsigned i;
3126 tree val;
3128 op0 = gen_rtx_CONCATN
3129 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3131 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3133 op1 = expand_debug_expr (val);
3134 if (!op1)
3135 return NULL;
3136 XVECEXP (op0, 0, i) = op1;
3139 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3141 op1 = expand_debug_expr
3142 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3144 if (!op1)
3145 return NULL;
3147 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3148 XVECEXP (op0, 0, i) = op1;
3151 return op0;
3153 else
3154 goto flag_unsupported;
3156 case CALL_EXPR:
3157 /* ??? Maybe handle some builtins? */
3158 return NULL;
3160 case SSA_NAME:
3162 gimple g = get_gimple_for_ssa_name (exp);
3163 if (g)
3165 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3166 if (!op0)
3167 return NULL;
3169 else
3171 int part = var_to_partition (SA.map, exp);
3173 if (part == NO_PARTITION)
3175 /* If this is a reference to an incoming value of parameter
3176 that is never used in the code or where the incoming
3177 value is never used in the code, use PARM_DECL's
3178 DECL_RTL if set. */
3179 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3180 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3182 rtx incoming = DECL_INCOMING_RTL (SSA_NAME_VAR (exp));
3183 if (incoming
3184 && GET_MODE (incoming) != BLKmode
3185 && (REG_P (incoming)
3186 || (MEM_P (incoming) && REG_P (XEXP (incoming, 0)))))
3188 op0 = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3189 ENTRY_VALUE_EXP (op0) = incoming;
3190 goto adjust_mode;
3192 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3193 if (!op0)
3194 return NULL;
3195 goto adjust_mode;
3197 return NULL;
3200 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3202 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3204 goto adjust_mode;
3207 case ERROR_MARK:
3208 return NULL;
3210 /* Vector stuff. For most of the codes we don't have rtl codes. */
3211 case REALIGN_LOAD_EXPR:
3212 case REDUC_MAX_EXPR:
3213 case REDUC_MIN_EXPR:
3214 case REDUC_PLUS_EXPR:
3215 case VEC_COND_EXPR:
3216 case VEC_EXTRACT_EVEN_EXPR:
3217 case VEC_EXTRACT_ODD_EXPR:
3218 case VEC_INTERLEAVE_HIGH_EXPR:
3219 case VEC_INTERLEAVE_LOW_EXPR:
3220 case VEC_LSHIFT_EXPR:
3221 case VEC_PACK_FIX_TRUNC_EXPR:
3222 case VEC_PACK_SAT_EXPR:
3223 case VEC_PACK_TRUNC_EXPR:
3224 case VEC_RSHIFT_EXPR:
3225 case VEC_UNPACK_FLOAT_HI_EXPR:
3226 case VEC_UNPACK_FLOAT_LO_EXPR:
3227 case VEC_UNPACK_HI_EXPR:
3228 case VEC_UNPACK_LO_EXPR:
3229 case VEC_WIDEN_MULT_HI_EXPR:
3230 case VEC_WIDEN_MULT_LO_EXPR:
3231 return NULL;
3233 /* Misc codes. */
3234 case ADDR_SPACE_CONVERT_EXPR:
3235 case FIXED_CONVERT_EXPR:
3236 case OBJ_TYPE_REF:
3237 case WITH_SIZE_EXPR:
3238 return NULL;
3240 case DOT_PROD_EXPR:
3241 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3242 && SCALAR_INT_MODE_P (mode))
3244 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3245 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3246 else
3247 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3248 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3249 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3250 else
3251 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3252 op0 = gen_rtx_MULT (mode, op0, op1);
3253 return gen_rtx_PLUS (mode, op0, op2);
3255 return NULL;
3257 case WIDEN_MULT_EXPR:
3258 case WIDEN_MULT_PLUS_EXPR:
3259 case WIDEN_MULT_MINUS_EXPR:
3260 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3261 && SCALAR_INT_MODE_P (mode))
3263 enum machine_mode inner_mode = GET_MODE (op0);
3264 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3265 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3266 else
3267 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3268 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3269 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3270 else
3271 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3272 op0 = gen_rtx_MULT (mode, op0, op1);
3273 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3274 return op0;
3275 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3276 return gen_rtx_PLUS (mode, op0, op2);
3277 else
3278 return gen_rtx_MINUS (mode, op2, op0);
3280 return NULL;
3282 case WIDEN_SUM_EXPR:
3283 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3284 && SCALAR_INT_MODE_P (mode))
3286 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3287 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3288 else
3289 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3290 return gen_rtx_PLUS (mode, op0, op1);
3292 return NULL;
3294 case FMA_EXPR:
3295 return gen_rtx_FMA (mode, op0, op1, op2);
3297 default:
3298 flag_unsupported:
3299 #ifdef ENABLE_CHECKING
3300 debug_tree (exp);
3301 gcc_unreachable ();
3302 #else
3303 return NULL;
3304 #endif
3308 /* Expand the _LOCs in debug insns. We run this after expanding all
3309 regular insns, so that any variables referenced in the function
3310 will have their DECL_RTLs set. */
3312 static void
3313 expand_debug_locations (void)
3315 rtx insn;
3316 rtx last = get_last_insn ();
3317 int save_strict_alias = flag_strict_aliasing;
3319 /* New alias sets while setting up memory attributes cause
3320 -fcompare-debug failures, even though it doesn't bring about any
3321 codegen changes. */
3322 flag_strict_aliasing = 0;
3324 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3325 if (DEBUG_INSN_P (insn))
3327 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3328 rtx val;
3329 enum machine_mode mode;
3331 if (value == NULL_TREE)
3332 val = NULL_RTX;
3333 else
3335 val = expand_debug_expr (value);
3336 gcc_assert (last == get_last_insn ());
3339 if (!val)
3340 val = gen_rtx_UNKNOWN_VAR_LOC ();
3341 else
3343 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3345 gcc_assert (mode == GET_MODE (val)
3346 || (GET_MODE (val) == VOIDmode
3347 && (CONST_INT_P (val)
3348 || GET_CODE (val) == CONST_FIXED
3349 || GET_CODE (val) == CONST_DOUBLE
3350 || GET_CODE (val) == LABEL_REF)));
3353 INSN_VAR_LOCATION_LOC (insn) = val;
3356 flag_strict_aliasing = save_strict_alias;
3359 /* Expand basic block BB from GIMPLE trees to RTL. */
3361 static basic_block
3362 expand_gimple_basic_block (basic_block bb)
3364 gimple_stmt_iterator gsi;
3365 gimple_seq stmts;
3366 gimple stmt = NULL;
3367 rtx note, last;
3368 edge e;
3369 edge_iterator ei;
3370 void **elt;
3372 if (dump_file)
3373 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3374 bb->index);
3376 /* Note that since we are now transitioning from GIMPLE to RTL, we
3377 cannot use the gsi_*_bb() routines because they expect the basic
3378 block to be in GIMPLE, instead of RTL. Therefore, we need to
3379 access the BB sequence directly. */
3380 stmts = bb_seq (bb);
3381 bb->il.gimple = NULL;
3382 rtl_profile_for_bb (bb);
3383 init_rtl_bb_info (bb);
3384 bb->flags |= BB_RTL;
3386 /* Remove the RETURN_EXPR if we may fall though to the exit
3387 instead. */
3388 gsi = gsi_last (stmts);
3389 if (!gsi_end_p (gsi)
3390 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3392 gimple ret_stmt = gsi_stmt (gsi);
3394 gcc_assert (single_succ_p (bb));
3395 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3397 if (bb->next_bb == EXIT_BLOCK_PTR
3398 && !gimple_return_retval (ret_stmt))
3400 gsi_remove (&gsi, false);
3401 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3405 gsi = gsi_start (stmts);
3406 if (!gsi_end_p (gsi))
3408 stmt = gsi_stmt (gsi);
3409 if (gimple_code (stmt) != GIMPLE_LABEL)
3410 stmt = NULL;
3413 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3415 if (stmt || elt)
3417 last = get_last_insn ();
3419 if (stmt)
3421 expand_gimple_stmt (stmt);
3422 gsi_next (&gsi);
3425 if (elt)
3426 emit_label ((rtx) *elt);
3428 /* Java emits line number notes in the top of labels.
3429 ??? Make this go away once line number notes are obsoleted. */
3430 BB_HEAD (bb) = NEXT_INSN (last);
3431 if (NOTE_P (BB_HEAD (bb)))
3432 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3433 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3435 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3437 else
3438 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3440 NOTE_BASIC_BLOCK (note) = bb;
3442 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3444 basic_block new_bb;
3446 stmt = gsi_stmt (gsi);
3448 /* If this statement is a non-debug one, and we generate debug
3449 insns, then this one might be the last real use of a TERed
3450 SSA_NAME, but where there are still some debug uses further
3451 down. Expanding the current SSA name in such further debug
3452 uses by their RHS might lead to wrong debug info, as coalescing
3453 might make the operands of such RHS be placed into the same
3454 pseudo as something else. Like so:
3455 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3456 use(a_1);
3457 a_2 = ...
3458 #DEBUG ... => a_1
3459 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3460 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3461 the write to a_2 would actually have clobbered the place which
3462 formerly held a_0.
3464 So, instead of that, we recognize the situation, and generate
3465 debug temporaries at the last real use of TERed SSA names:
3466 a_1 = a_0 + 1;
3467 #DEBUG #D1 => a_1
3468 use(a_1);
3469 a_2 = ...
3470 #DEBUG ... => #D1
3472 if (MAY_HAVE_DEBUG_INSNS
3473 && SA.values
3474 && !is_gimple_debug (stmt))
3476 ssa_op_iter iter;
3477 tree op;
3478 gimple def;
3480 location_t sloc = get_curr_insn_source_location ();
3481 tree sblock = get_curr_insn_block ();
3483 /* Look for SSA names that have their last use here (TERed
3484 names always have only one real use). */
3485 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3486 if ((def = get_gimple_for_ssa_name (op)))
3488 imm_use_iterator imm_iter;
3489 use_operand_p use_p;
3490 bool have_debug_uses = false;
3492 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3494 if (gimple_debug_bind_p (USE_STMT (use_p)))
3496 have_debug_uses = true;
3497 break;
3501 if (have_debug_uses)
3503 /* OP is a TERed SSA name, with DEF it's defining
3504 statement, and where OP is used in further debug
3505 instructions. Generate a debug temporary, and
3506 replace all uses of OP in debug insns with that
3507 temporary. */
3508 gimple debugstmt;
3509 tree value = gimple_assign_rhs_to_tree (def);
3510 tree vexpr = make_node (DEBUG_EXPR_DECL);
3511 rtx val;
3512 enum machine_mode mode;
3514 set_curr_insn_source_location (gimple_location (def));
3515 set_curr_insn_block (gimple_block (def));
3517 DECL_ARTIFICIAL (vexpr) = 1;
3518 TREE_TYPE (vexpr) = TREE_TYPE (value);
3519 if (DECL_P (value))
3520 mode = DECL_MODE (value);
3521 else
3522 mode = TYPE_MODE (TREE_TYPE (value));
3523 DECL_MODE (vexpr) = mode;
3525 val = gen_rtx_VAR_LOCATION
3526 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3528 val = emit_debug_insn (val);
3530 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3532 if (!gimple_debug_bind_p (debugstmt))
3533 continue;
3535 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3536 SET_USE (use_p, vexpr);
3538 update_stmt (debugstmt);
3542 set_curr_insn_source_location (sloc);
3543 set_curr_insn_block (sblock);
3546 currently_expanding_gimple_stmt = stmt;
3548 /* Expand this statement, then evaluate the resulting RTL and
3549 fixup the CFG accordingly. */
3550 if (gimple_code (stmt) == GIMPLE_COND)
3552 new_bb = expand_gimple_cond (bb, stmt);
3553 if (new_bb)
3554 return new_bb;
3556 else if (gimple_debug_bind_p (stmt))
3558 location_t sloc = get_curr_insn_source_location ();
3559 tree sblock = get_curr_insn_block ();
3560 gimple_stmt_iterator nsi = gsi;
3562 for (;;)
3564 tree var = gimple_debug_bind_get_var (stmt);
3565 tree value;
3566 rtx val;
3567 enum machine_mode mode;
3569 if (gimple_debug_bind_has_value_p (stmt))
3570 value = gimple_debug_bind_get_value (stmt);
3571 else
3572 value = NULL_TREE;
3574 last = get_last_insn ();
3576 set_curr_insn_source_location (gimple_location (stmt));
3577 set_curr_insn_block (gimple_block (stmt));
3579 if (DECL_P (var))
3580 mode = DECL_MODE (var);
3581 else
3582 mode = TYPE_MODE (TREE_TYPE (var));
3584 val = gen_rtx_VAR_LOCATION
3585 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3587 val = emit_debug_insn (val);
3589 if (dump_file && (dump_flags & TDF_DETAILS))
3591 /* We can't dump the insn with a TREE where an RTX
3592 is expected. */
3593 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3594 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3595 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3598 /* In order not to generate too many debug temporaries,
3599 we delink all uses of debug statements we already expanded.
3600 Therefore debug statements between definition and real
3601 use of TERed SSA names will continue to use the SSA name,
3602 and not be replaced with debug temps. */
3603 delink_stmt_imm_use (stmt);
3605 gsi = nsi;
3606 gsi_next (&nsi);
3607 if (gsi_end_p (nsi))
3608 break;
3609 stmt = gsi_stmt (nsi);
3610 if (!gimple_debug_bind_p (stmt))
3611 break;
3614 set_curr_insn_source_location (sloc);
3615 set_curr_insn_block (sblock);
3617 else
3619 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3621 bool can_fallthru;
3622 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3623 if (new_bb)
3625 if (can_fallthru)
3626 bb = new_bb;
3627 else
3628 return new_bb;
3631 else
3633 def_operand_p def_p;
3634 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3636 if (def_p != NULL)
3638 /* Ignore this stmt if it is in the list of
3639 replaceable expressions. */
3640 if (SA.values
3641 && bitmap_bit_p (SA.values,
3642 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3643 continue;
3645 last = expand_gimple_stmt (stmt);
3646 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3651 currently_expanding_gimple_stmt = NULL;
3653 /* Expand implicit goto and convert goto_locus. */
3654 FOR_EACH_EDGE (e, ei, bb->succs)
3656 if (e->goto_locus && e->goto_block)
3658 set_curr_insn_source_location (e->goto_locus);
3659 set_curr_insn_block (e->goto_block);
3660 e->goto_locus = curr_insn_locator ();
3662 e->goto_block = NULL;
3663 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3665 emit_jump (label_rtx_for_bb (e->dest));
3666 e->flags &= ~EDGE_FALLTHRU;
3670 /* Expanded RTL can create a jump in the last instruction of block.
3671 This later might be assumed to be a jump to successor and break edge insertion.
3672 We need to insert dummy move to prevent this. PR41440. */
3673 if (single_succ_p (bb)
3674 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3675 && (last = get_last_insn ())
3676 && JUMP_P (last))
3678 rtx dummy = gen_reg_rtx (SImode);
3679 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3682 do_pending_stack_adjust ();
3684 /* Find the block tail. The last insn in the block is the insn
3685 before a barrier and/or table jump insn. */
3686 last = get_last_insn ();
3687 if (BARRIER_P (last))
3688 last = PREV_INSN (last);
3689 if (JUMP_TABLE_DATA_P (last))
3690 last = PREV_INSN (PREV_INSN (last));
3691 BB_END (bb) = last;
3693 update_bb_for_insn (bb);
3695 return bb;
3699 /* Create a basic block for initialization code. */
3701 static basic_block
3702 construct_init_block (void)
3704 basic_block init_block, first_block;
3705 edge e = NULL;
3706 int flags;
3708 /* Multiple entry points not supported yet. */
3709 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3710 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3711 init_rtl_bb_info (EXIT_BLOCK_PTR);
3712 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3713 EXIT_BLOCK_PTR->flags |= BB_RTL;
3715 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3717 /* When entry edge points to first basic block, we don't need jump,
3718 otherwise we have to jump into proper target. */
3719 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3721 tree label = gimple_block_label (e->dest);
3723 emit_jump (label_rtx (label));
3724 flags = 0;
3726 else
3727 flags = EDGE_FALLTHRU;
3729 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3730 get_last_insn (),
3731 ENTRY_BLOCK_PTR);
3732 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3733 init_block->count = ENTRY_BLOCK_PTR->count;
3734 if (e)
3736 first_block = e->dest;
3737 redirect_edge_succ (e, init_block);
3738 e = make_edge (init_block, first_block, flags);
3740 else
3741 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3742 e->probability = REG_BR_PROB_BASE;
3743 e->count = ENTRY_BLOCK_PTR->count;
3745 update_bb_for_insn (init_block);
3746 return init_block;
3749 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3750 found in the block tree. */
3752 static void
3753 set_block_levels (tree block, int level)
3755 while (block)
3757 BLOCK_NUMBER (block) = level;
3758 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3759 block = BLOCK_CHAIN (block);
3763 /* Create a block containing landing pads and similar stuff. */
3765 static void
3766 construct_exit_block (void)
3768 rtx head = get_last_insn ();
3769 rtx end;
3770 basic_block exit_block;
3771 edge e, e2;
3772 unsigned ix;
3773 edge_iterator ei;
3774 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3776 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3778 /* Make sure the locus is set to the end of the function, so that
3779 epilogue line numbers and warnings are set properly. */
3780 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3781 input_location = cfun->function_end_locus;
3783 /* The following insns belong to the top scope. */
3784 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3786 /* Generate rtl for function exit. */
3787 expand_function_end ();
3789 end = get_last_insn ();
3790 if (head == end)
3791 return;
3792 /* While emitting the function end we could move end of the last basic block.
3794 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3795 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3796 head = NEXT_INSN (head);
3797 exit_block = create_basic_block (NEXT_INSN (head), end,
3798 EXIT_BLOCK_PTR->prev_bb);
3799 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3800 exit_block->count = EXIT_BLOCK_PTR->count;
3802 ix = 0;
3803 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3805 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3806 if (!(e->flags & EDGE_ABNORMAL))
3807 redirect_edge_succ (e, exit_block);
3808 else
3809 ix++;
3812 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3813 e->probability = REG_BR_PROB_BASE;
3814 e->count = EXIT_BLOCK_PTR->count;
3815 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3816 if (e2 != e)
3818 e->count -= e2->count;
3819 exit_block->count -= e2->count;
3820 exit_block->frequency -= EDGE_FREQUENCY (e2);
3822 if (e->count < 0)
3823 e->count = 0;
3824 if (exit_block->count < 0)
3825 exit_block->count = 0;
3826 if (exit_block->frequency < 0)
3827 exit_block->frequency = 0;
3828 update_bb_for_insn (exit_block);
3831 /* Helper function for discover_nonconstant_array_refs.
3832 Look for ARRAY_REF nodes with non-constant indexes and mark them
3833 addressable. */
3835 static tree
3836 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3837 void *data ATTRIBUTE_UNUSED)
3839 tree t = *tp;
3841 if (IS_TYPE_OR_DECL_P (t))
3842 *walk_subtrees = 0;
3843 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3845 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3846 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3847 && (!TREE_OPERAND (t, 2)
3848 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3849 || (TREE_CODE (t) == COMPONENT_REF
3850 && (!TREE_OPERAND (t,2)
3851 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3852 || TREE_CODE (t) == BIT_FIELD_REF
3853 || TREE_CODE (t) == REALPART_EXPR
3854 || TREE_CODE (t) == IMAGPART_EXPR
3855 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3856 || CONVERT_EXPR_P (t))
3857 t = TREE_OPERAND (t, 0);
3859 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3861 t = get_base_address (t);
3862 if (t && DECL_P (t)
3863 && DECL_MODE (t) != BLKmode)
3864 TREE_ADDRESSABLE (t) = 1;
3867 *walk_subtrees = 0;
3870 return NULL_TREE;
3873 /* RTL expansion is not able to compile array references with variable
3874 offsets for arrays stored in single register. Discover such
3875 expressions and mark variables as addressable to avoid this
3876 scenario. */
3878 static void
3879 discover_nonconstant_array_refs (void)
3881 basic_block bb;
3882 gimple_stmt_iterator gsi;
3884 FOR_EACH_BB (bb)
3885 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3887 gimple stmt = gsi_stmt (gsi);
3888 if (!is_gimple_debug (stmt))
3889 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3893 /* This function sets crtl->args.internal_arg_pointer to a virtual
3894 register if DRAP is needed. Local register allocator will replace
3895 virtual_incoming_args_rtx with the virtual register. */
3897 static void
3898 expand_stack_alignment (void)
3900 rtx drap_rtx;
3901 unsigned int preferred_stack_boundary;
3903 if (! SUPPORTS_STACK_ALIGNMENT)
3904 return;
3906 if (cfun->calls_alloca
3907 || cfun->has_nonlocal_label
3908 || crtl->has_nonlocal_goto)
3909 crtl->need_drap = true;
3911 /* Call update_stack_boundary here again to update incoming stack
3912 boundary. It may set incoming stack alignment to a different
3913 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3914 use the minimum incoming stack alignment to check if it is OK
3915 to perform sibcall optimization since sibcall optimization will
3916 only align the outgoing stack to incoming stack boundary. */
3917 if (targetm.calls.update_stack_boundary)
3918 targetm.calls.update_stack_boundary ();
3920 /* The incoming stack frame has to be aligned at least at
3921 parm_stack_boundary. */
3922 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3924 /* Update crtl->stack_alignment_estimated and use it later to align
3925 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3926 exceptions since callgraph doesn't collect incoming stack alignment
3927 in this case. */
3928 if (cfun->can_throw_non_call_exceptions
3929 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3930 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3931 else
3932 preferred_stack_boundary = crtl->preferred_stack_boundary;
3933 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3934 crtl->stack_alignment_estimated = preferred_stack_boundary;
3935 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3936 crtl->stack_alignment_needed = preferred_stack_boundary;
3938 gcc_assert (crtl->stack_alignment_needed
3939 <= crtl->stack_alignment_estimated);
3941 crtl->stack_realign_needed
3942 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3943 crtl->stack_realign_tried = crtl->stack_realign_needed;
3945 crtl->stack_realign_processed = true;
3947 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3948 alignment. */
3949 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3950 drap_rtx = targetm.calls.get_drap_rtx ();
3952 /* stack_realign_drap and drap_rtx must match. */
3953 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3955 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3956 if (NULL != drap_rtx)
3958 crtl->args.internal_arg_pointer = drap_rtx;
3960 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3961 needed. */
3962 fixup_tail_calls ();
3966 /* Translate the intermediate representation contained in the CFG
3967 from GIMPLE trees to RTL.
3969 We do conversion per basic block and preserve/update the tree CFG.
3970 This implies we have to do some magic as the CFG can simultaneously
3971 consist of basic blocks containing RTL and GIMPLE trees. This can
3972 confuse the CFG hooks, so be careful to not manipulate CFG during
3973 the expansion. */
3975 static unsigned int
3976 gimple_expand_cfg (void)
3978 basic_block bb, init_block;
3979 sbitmap blocks;
3980 edge_iterator ei;
3981 edge e;
3982 rtx var_seq;
3983 unsigned i;
3985 timevar_push (TV_OUT_OF_SSA);
3986 rewrite_out_of_ssa (&SA);
3987 timevar_pop (TV_OUT_OF_SSA);
3988 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3989 sizeof (rtx));
3991 /* Some backends want to know that we are expanding to RTL. */
3992 currently_expanding_to_rtl = 1;
3994 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3996 insn_locators_alloc ();
3997 if (!DECL_IS_BUILTIN (current_function_decl))
3999 /* Eventually, all FEs should explicitly set function_start_locus. */
4000 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4001 set_curr_insn_source_location
4002 (DECL_SOURCE_LOCATION (current_function_decl));
4003 else
4004 set_curr_insn_source_location (cfun->function_start_locus);
4006 else
4007 set_curr_insn_source_location (UNKNOWN_LOCATION);
4008 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4009 prologue_locator = curr_insn_locator ();
4011 #ifdef INSN_SCHEDULING
4012 init_sched_attrs ();
4013 #endif
4015 /* Make sure first insn is a note even if we don't want linenums.
4016 This makes sure the first insn will never be deleted.
4017 Also, final expects a note to appear there. */
4018 emit_note (NOTE_INSN_DELETED);
4020 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4021 discover_nonconstant_array_refs ();
4023 targetm.expand_to_rtl_hook ();
4024 crtl->stack_alignment_needed = STACK_BOUNDARY;
4025 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4026 crtl->stack_alignment_estimated = 0;
4027 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4028 cfun->cfg->max_jumptable_ents = 0;
4030 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4031 of the function section at exapnsion time to predict distance of calls. */
4032 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4034 /* Expand the variables recorded during gimple lowering. */
4035 timevar_push (TV_VAR_EXPAND);
4036 start_sequence ();
4038 expand_used_vars ();
4040 var_seq = get_insns ();
4041 end_sequence ();
4042 timevar_pop (TV_VAR_EXPAND);
4044 /* Honor stack protection warnings. */
4045 if (warn_stack_protect)
4047 if (cfun->calls_alloca)
4048 warning (OPT_Wstack_protector,
4049 "stack protector not protecting local variables: "
4050 "variable length buffer");
4051 if (has_short_buffer && !crtl->stack_protect_guard)
4052 warning (OPT_Wstack_protector,
4053 "stack protector not protecting function: "
4054 "all local arrays are less than %d bytes long",
4055 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4058 /* Set up parameters and prepare for return, for the function. */
4059 expand_function_start (current_function_decl);
4061 /* If we emitted any instructions for setting up the variables,
4062 emit them before the FUNCTION_START note. */
4063 if (var_seq)
4065 emit_insn_before (var_seq, parm_birth_insn);
4067 /* In expand_function_end we'll insert the alloca save/restore
4068 before parm_birth_insn. We've just insertted an alloca call.
4069 Adjust the pointer to match. */
4070 parm_birth_insn = var_seq;
4073 /* Now that we also have the parameter RTXs, copy them over to our
4074 partitions. */
4075 for (i = 0; i < SA.map->num_partitions; i++)
4077 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4079 if (TREE_CODE (var) != VAR_DECL
4080 && !SA.partition_to_pseudo[i])
4081 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4082 gcc_assert (SA.partition_to_pseudo[i]);
4084 /* If this decl was marked as living in multiple places, reset
4085 this now to NULL. */
4086 if (DECL_RTL_IF_SET (var) == pc_rtx)
4087 SET_DECL_RTL (var, NULL);
4089 /* Some RTL parts really want to look at DECL_RTL(x) when x
4090 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4091 SET_DECL_RTL here making this available, but that would mean
4092 to select one of the potentially many RTLs for one DECL. Instead
4093 of doing that we simply reset the MEM_EXPR of the RTL in question,
4094 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4095 if (!DECL_RTL_SET_P (var))
4097 if (MEM_P (SA.partition_to_pseudo[i]))
4098 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4102 /* If this function is `main', emit a call to `__main'
4103 to run global initializers, etc. */
4104 if (DECL_NAME (current_function_decl)
4105 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4106 && DECL_FILE_SCOPE_P (current_function_decl))
4107 expand_main_function ();
4109 /* Initialize the stack_protect_guard field. This must happen after the
4110 call to __main (if any) so that the external decl is initialized. */
4111 if (crtl->stack_protect_guard)
4112 stack_protect_prologue ();
4114 expand_phi_nodes (&SA);
4116 /* Register rtl specific functions for cfg. */
4117 rtl_register_cfg_hooks ();
4119 init_block = construct_init_block ();
4121 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4122 remaining edges later. */
4123 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4124 e->flags &= ~EDGE_EXECUTABLE;
4126 lab_rtx_for_bb = pointer_map_create ();
4127 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4128 bb = expand_gimple_basic_block (bb);
4130 if (MAY_HAVE_DEBUG_INSNS)
4131 expand_debug_locations ();
4133 execute_free_datastructures ();
4134 timevar_push (TV_OUT_OF_SSA);
4135 finish_out_of_ssa (&SA);
4136 timevar_pop (TV_OUT_OF_SSA);
4138 timevar_push (TV_POST_EXPAND);
4139 /* We are no longer in SSA form. */
4140 cfun->gimple_df->in_ssa_p = false;
4142 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4143 conservatively to true until they are all profile aware. */
4144 pointer_map_destroy (lab_rtx_for_bb);
4145 free_histograms ();
4147 construct_exit_block ();
4148 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4149 insn_locators_finalize ();
4151 /* Zap the tree EH table. */
4152 set_eh_throw_stmt_table (cfun, NULL);
4154 rebuild_jump_labels (get_insns ());
4156 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4158 edge e;
4159 edge_iterator ei;
4160 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4162 if (e->insns.r)
4164 /* Avoid putting insns before parm_birth_insn. */
4165 if (e->src == ENTRY_BLOCK_PTR
4166 && single_succ_p (ENTRY_BLOCK_PTR)
4167 && parm_birth_insn)
4169 rtx insns = e->insns.r;
4170 e->insns.r = NULL_RTX;
4171 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4173 else
4174 commit_one_edge_insertion (e);
4176 else
4177 ei_next (&ei);
4181 /* We're done expanding trees to RTL. */
4182 currently_expanding_to_rtl = 0;
4184 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4186 edge e;
4187 edge_iterator ei;
4188 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4190 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4191 e->flags &= ~EDGE_EXECUTABLE;
4193 /* At the moment not all abnormal edges match the RTL
4194 representation. It is safe to remove them here as
4195 find_many_sub_basic_blocks will rediscover them.
4196 In the future we should get this fixed properly. */
4197 if ((e->flags & EDGE_ABNORMAL)
4198 && !(e->flags & EDGE_SIBCALL))
4199 remove_edge (e);
4200 else
4201 ei_next (&ei);
4205 blocks = sbitmap_alloc (last_basic_block);
4206 sbitmap_ones (blocks);
4207 find_many_sub_basic_blocks (blocks);
4208 sbitmap_free (blocks);
4209 purge_all_dead_edges ();
4211 compact_blocks ();
4213 expand_stack_alignment ();
4215 #ifdef ENABLE_CHECKING
4216 verify_flow_info ();
4217 #endif
4219 /* There's no need to defer outputting this function any more; we
4220 know we want to output it. */
4221 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4223 /* Now that we're done expanding trees to RTL, we shouldn't have any
4224 more CONCATs anywhere. */
4225 generating_concat_p = 0;
4227 if (dump_file)
4229 fprintf (dump_file,
4230 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4231 /* And the pass manager will dump RTL for us. */
4234 /* If we're emitting a nested function, make sure its parent gets
4235 emitted as well. Doing otherwise confuses debug info. */
4237 tree parent;
4238 for (parent = DECL_CONTEXT (current_function_decl);
4239 parent != NULL_TREE;
4240 parent = get_containing_scope (parent))
4241 if (TREE_CODE (parent) == FUNCTION_DECL)
4242 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4245 /* We are now committed to emitting code for this function. Do any
4246 preparation, such as emitting abstract debug info for the inline
4247 before it gets mangled by optimization. */
4248 if (cgraph_function_possibly_inlined_p (current_function_decl))
4249 (*debug_hooks->outlining_inline_function) (current_function_decl);
4251 TREE_ASM_WRITTEN (current_function_decl) = 1;
4253 /* After expanding, the return labels are no longer needed. */
4254 return_label = NULL;
4255 naked_return_label = NULL;
4256 /* Tag the blocks with a depth number so that change_scope can find
4257 the common parent easily. */
4258 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4259 default_rtl_profile ();
4260 timevar_pop (TV_POST_EXPAND);
4261 return 0;
4264 struct rtl_opt_pass pass_expand =
4267 RTL_PASS,
4268 "expand", /* name */
4269 NULL, /* gate */
4270 gimple_expand_cfg, /* execute */
4271 NULL, /* sub */
4272 NULL, /* next */
4273 0, /* static_pass_number */
4274 TV_EXPAND, /* tv_id */
4275 PROP_ssa | PROP_gimple_leh | PROP_cfg
4276 | PROP_gimple_lcx, /* properties_required */
4277 PROP_rtl, /* properties_provided */
4278 PROP_ssa | PROP_trees, /* properties_destroyed */
4279 TODO_verify_ssa | TODO_verify_flow
4280 | TODO_verify_stmts, /* todo_flags_start */
4281 TODO_dump_func
4282 | TODO_ggc_collect /* todo_flags_finish */