common.opt (fshow-column): Don't mark as C ObjC C++ ObjC++.
[official-gcc.git] / gcc / cfgexpand.c
blobcc8ff336a42e5364022c1a3e49ef2d0386df0ca4
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
51 /* This variable holds information helping the rewriting of SSA trees
52 into RTL. */
53 struct ssaexpand SA;
55 /* This variable holds the currently expanded gimple statement for purposes
56 of comminucating the profile info to the builtin expanders. */
57 gimple currently_expanding_gimple_stmt;
59 /* Return an expression tree corresponding to the RHS of GIMPLE
60 statement STMT. */
62 tree
63 gimple_assign_rhs_to_tree (gimple stmt)
65 tree t;
66 enum gimple_rhs_class grhs_class;
68 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
70 if (grhs_class == GIMPLE_BINARY_RHS)
71 t = build2 (gimple_assign_rhs_code (stmt),
72 TREE_TYPE (gimple_assign_lhs (stmt)),
73 gimple_assign_rhs1 (stmt),
74 gimple_assign_rhs2 (stmt));
75 else if (grhs_class == GIMPLE_UNARY_RHS)
76 t = build1 (gimple_assign_rhs_code (stmt),
77 TREE_TYPE (gimple_assign_lhs (stmt)),
78 gimple_assign_rhs1 (stmt));
79 else if (grhs_class == GIMPLE_SINGLE_RHS)
81 t = gimple_assign_rhs1 (stmt);
82 /* Avoid modifying this tree in place below. */
83 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
84 && gimple_location (stmt) != EXPR_LOCATION (t))
85 || (gimple_block (stmt)
86 && currently_expanding_to_rtl
87 && EXPR_P (t)
88 && gimple_block (stmt) != TREE_BLOCK (t)))
89 t = copy_node (t);
91 else
92 gcc_unreachable ();
94 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
95 SET_EXPR_LOCATION (t, gimple_location (stmt));
96 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
97 TREE_BLOCK (t) = gimple_block (stmt);
99 return t;
103 #ifndef STACK_ALIGNMENT_NEEDED
104 #define STACK_ALIGNMENT_NEEDED 1
105 #endif
107 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
109 /* Associate declaration T with storage space X. If T is no
110 SSA name this is exactly SET_DECL_RTL, otherwise make the
111 partition of T associated with X. */
112 static inline void
113 set_rtl (tree t, rtx x)
115 if (TREE_CODE (t) == SSA_NAME)
117 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
118 if (x && !MEM_P (x))
119 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
120 /* For the benefit of debug information at -O0 (where vartracking
121 doesn't run) record the place also in the base DECL if it's
122 a normal variable (not a parameter). */
123 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
125 tree var = SSA_NAME_VAR (t);
126 /* If we don't yet have something recorded, just record it now. */
127 if (!DECL_RTL_SET_P (var))
128 SET_DECL_RTL (var, x);
129 /* If we have it set alrady to "multiple places" don't
130 change this. */
131 else if (DECL_RTL (var) == pc_rtx)
133 /* If we have something recorded and it's not the same place
134 as we want to record now, we have multiple partitions for the
135 same base variable, with different places. We can't just
136 randomly chose one, hence we have to say that we don't know.
137 This only happens with optimization, and there var-tracking
138 will figure out the right thing. */
139 else if (DECL_RTL (var) != x)
140 SET_DECL_RTL (var, pc_rtx);
143 else
144 SET_DECL_RTL (t, x);
147 /* This structure holds data relevant to one variable that will be
148 placed in a stack slot. */
149 struct stack_var
151 /* The Variable. */
152 tree decl;
154 /* The offset of the variable. During partitioning, this is the
155 offset relative to the partition. After partitioning, this
156 is relative to the stack frame. */
157 HOST_WIDE_INT offset;
159 /* Initially, the size of the variable. Later, the size of the partition,
160 if this variable becomes it's partition's representative. */
161 HOST_WIDE_INT size;
163 /* The *byte* alignment required for this variable. Or as, with the
164 size, the alignment for this partition. */
165 unsigned int alignb;
167 /* The partition representative. */
168 size_t representative;
170 /* The next stack variable in the partition, or EOC. */
171 size_t next;
173 /* The numbers of conflicting stack variables. */
174 bitmap conflicts;
177 #define EOC ((size_t)-1)
179 /* We have an array of such objects while deciding allocation. */
180 static struct stack_var *stack_vars;
181 static size_t stack_vars_alloc;
182 static size_t stack_vars_num;
184 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
185 is non-decreasing. */
186 static size_t *stack_vars_sorted;
188 /* The phase of the stack frame. This is the known misalignment of
189 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
190 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
191 static int frame_phase;
193 /* Used during expand_used_vars to remember if we saw any decls for
194 which we'd like to enable stack smashing protection. */
195 static bool has_protected_decls;
197 /* Used during expand_used_vars. Remember if we say a character buffer
198 smaller than our cutoff threshold. Used for -Wstack-protector. */
199 static bool has_short_buffer;
201 /* Discover the byte alignment to use for DECL. Ignore alignment
202 we can't do with expected alignment of the stack boundary. */
204 static unsigned int
205 get_decl_align_unit (tree decl)
207 unsigned int align;
209 align = LOCAL_DECL_ALIGNMENT (decl);
211 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
212 align = MAX_SUPPORTED_STACK_ALIGNMENT;
214 if (SUPPORTS_STACK_ALIGNMENT)
216 if (crtl->stack_alignment_estimated < align)
218 gcc_assert(!crtl->stack_realign_processed);
219 crtl->stack_alignment_estimated = align;
223 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
224 So here we only make sure stack_alignment_needed >= align. */
225 if (crtl->stack_alignment_needed < align)
226 crtl->stack_alignment_needed = align;
227 if (crtl->max_used_stack_slot_alignment < align)
228 crtl->max_used_stack_slot_alignment = align;
230 return align / BITS_PER_UNIT;
233 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
234 Return the frame offset. */
236 static HOST_WIDE_INT
237 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
239 HOST_WIDE_INT offset, new_frame_offset;
241 new_frame_offset = frame_offset;
242 if (FRAME_GROWS_DOWNWARD)
244 new_frame_offset -= size + frame_phase;
245 new_frame_offset &= -align;
246 new_frame_offset += frame_phase;
247 offset = new_frame_offset;
249 else
251 new_frame_offset -= frame_phase;
252 new_frame_offset += align - 1;
253 new_frame_offset &= -align;
254 new_frame_offset += frame_phase;
255 offset = new_frame_offset;
256 new_frame_offset += size;
258 frame_offset = new_frame_offset;
260 if (frame_offset_overflow (frame_offset, cfun->decl))
261 frame_offset = offset = 0;
263 return offset;
266 /* Accumulate DECL into STACK_VARS. */
268 static void
269 add_stack_var (tree decl)
271 if (stack_vars_num >= stack_vars_alloc)
273 if (stack_vars_alloc)
274 stack_vars_alloc = stack_vars_alloc * 3 / 2;
275 else
276 stack_vars_alloc = 32;
277 stack_vars
278 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
280 stack_vars[stack_vars_num].decl = decl;
281 stack_vars[stack_vars_num].offset = 0;
282 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
283 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
285 /* All variables are initially in their own partition. */
286 stack_vars[stack_vars_num].representative = stack_vars_num;
287 stack_vars[stack_vars_num].next = EOC;
289 /* All variables initially conflict with no other. */
290 stack_vars[stack_vars_num].conflicts = NULL;
292 /* Ensure that this decl doesn't get put onto the list twice. */
293 set_rtl (decl, pc_rtx);
295 stack_vars_num++;
298 /* Make the decls associated with luid's X and Y conflict. */
300 static void
301 add_stack_var_conflict (size_t x, size_t y)
303 struct stack_var *a = &stack_vars[x];
304 struct stack_var *b = &stack_vars[y];
305 if (!a->conflicts)
306 a->conflicts = BITMAP_ALLOC (NULL);
307 if (!b->conflicts)
308 b->conflicts = BITMAP_ALLOC (NULL);
309 bitmap_set_bit (a->conflicts, y);
310 bitmap_set_bit (b->conflicts, x);
313 /* Check whether the decls associated with luid's X and Y conflict. */
315 static bool
316 stack_var_conflict_p (size_t x, size_t y)
318 struct stack_var *a = &stack_vars[x];
319 struct stack_var *b = &stack_vars[y];
320 if (!a->conflicts || !b->conflicts)
321 return false;
322 return bitmap_bit_p (a->conflicts, y);
325 /* Returns true if TYPE is or contains a union type. */
327 static bool
328 aggregate_contains_union_type (tree type)
330 tree field;
332 if (TREE_CODE (type) == UNION_TYPE
333 || TREE_CODE (type) == QUAL_UNION_TYPE)
334 return true;
335 if (TREE_CODE (type) == ARRAY_TYPE)
336 return aggregate_contains_union_type (TREE_TYPE (type));
337 if (TREE_CODE (type) != RECORD_TYPE)
338 return false;
340 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
341 if (TREE_CODE (field) == FIELD_DECL)
342 if (aggregate_contains_union_type (TREE_TYPE (field)))
343 return true;
345 return false;
348 /* A subroutine of expand_used_vars. If two variables X and Y have alias
349 sets that do not conflict, then do add a conflict for these variables
350 in the interference graph. We also need to make sure to add conflicts
351 for union containing structures. Else RTL alias analysis comes along
352 and due to type based aliasing rules decides that for two overlapping
353 union temporaries { short s; int i; } accesses to the same mem through
354 different types may not alias and happily reorders stores across
355 life-time boundaries of the temporaries (See PR25654).
356 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
358 static void
359 add_alias_set_conflicts (void)
361 size_t i, j, n = stack_vars_num;
363 for (i = 0; i < n; ++i)
365 tree type_i = TREE_TYPE (stack_vars[i].decl);
366 bool aggr_i = AGGREGATE_TYPE_P (type_i);
367 bool contains_union;
369 contains_union = aggregate_contains_union_type (type_i);
370 for (j = 0; j < i; ++j)
372 tree type_j = TREE_TYPE (stack_vars[j].decl);
373 bool aggr_j = AGGREGATE_TYPE_P (type_j);
374 if (aggr_i != aggr_j
375 /* Either the objects conflict by means of type based
376 aliasing rules, or we need to add a conflict. */
377 || !objects_must_conflict_p (type_i, type_j)
378 /* In case the types do not conflict ensure that access
379 to elements will conflict. In case of unions we have
380 to be careful as type based aliasing rules may say
381 access to the same memory does not conflict. So play
382 safe and add a conflict in this case. */
383 || contains_union)
384 add_stack_var_conflict (i, j);
389 /* A subroutine of partition_stack_vars. A comparison function for qsort,
390 sorting an array of indices by the size and type of the object. */
392 static int
393 stack_var_size_cmp (const void *a, const void *b)
395 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
396 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
397 tree decla, declb;
398 unsigned int uida, uidb;
400 if (sa < sb)
401 return -1;
402 if (sa > sb)
403 return 1;
404 decla = stack_vars[*(const size_t *)a].decl;
405 declb = stack_vars[*(const size_t *)b].decl;
406 /* For stack variables of the same size use and id of the decls
407 to make the sort stable. Two SSA names are compared by their
408 version, SSA names come before non-SSA names, and two normal
409 decls are compared by their DECL_UID. */
410 if (TREE_CODE (decla) == SSA_NAME)
412 if (TREE_CODE (declb) == SSA_NAME)
413 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
414 else
415 return -1;
417 else if (TREE_CODE (declb) == SSA_NAME)
418 return 1;
419 else
420 uida = DECL_UID (decla), uidb = DECL_UID (declb);
421 if (uida < uidb)
422 return -1;
423 if (uida > uidb)
424 return 1;
425 return 0;
429 /* If the points-to solution *PI points to variables that are in a partition
430 together with other variables add all partition members to the pointed-to
431 variables bitmap. */
433 static void
434 add_partitioned_vars_to_ptset (struct pt_solution *pt,
435 struct pointer_map_t *decls_to_partitions,
436 struct pointer_set_t *visited, bitmap temp)
438 bitmap_iterator bi;
439 unsigned i;
440 bitmap *part;
442 if (pt->anything
443 || pt->vars == NULL
444 /* The pointed-to vars bitmap is shared, it is enough to
445 visit it once. */
446 || pointer_set_insert(visited, pt->vars))
447 return;
449 bitmap_clear (temp);
451 /* By using a temporary bitmap to store all members of the partitions
452 we have to add we make sure to visit each of the partitions only
453 once. */
454 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
455 if ((!temp
456 || !bitmap_bit_p (temp, i))
457 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
458 (void *)(size_t) i)))
459 bitmap_ior_into (temp, *part);
460 if (!bitmap_empty_p (temp))
461 bitmap_ior_into (pt->vars, temp);
464 /* Update points-to sets based on partition info, so we can use them on RTL.
465 The bitmaps representing stack partitions will be saved until expand,
466 where partitioned decls used as bases in memory expressions will be
467 rewritten. */
469 static void
470 update_alias_info_with_stack_vars (void)
472 struct pointer_map_t *decls_to_partitions = NULL;
473 size_t i, j;
474 tree var = NULL_TREE;
476 for (i = 0; i < stack_vars_num; i++)
478 bitmap part = NULL;
479 tree name;
480 struct ptr_info_def *pi;
482 /* Not interested in partitions with single variable. */
483 if (stack_vars[i].representative != i
484 || stack_vars[i].next == EOC)
485 continue;
487 if (!decls_to_partitions)
489 decls_to_partitions = pointer_map_create ();
490 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
493 /* Create an SSA_NAME that points to the partition for use
494 as base during alias-oracle queries on RTL for bases that
495 have been partitioned. */
496 if (var == NULL_TREE)
497 var = create_tmp_var (ptr_type_node, NULL);
498 name = make_ssa_name (var, NULL);
500 /* Create bitmaps representing partitions. They will be used for
501 points-to sets later, so use GGC alloc. */
502 part = BITMAP_GGC_ALLOC ();
503 for (j = i; j != EOC; j = stack_vars[j].next)
505 tree decl = stack_vars[j].decl;
506 unsigned int uid = DECL_PT_UID (decl);
507 /* We should never end up partitioning SSA names (though they
508 may end up on the stack). Neither should we allocate stack
509 space to something that is unused and thus unreferenced. */
510 gcc_assert (DECL_P (decl)
511 && referenced_var_lookup (DECL_UID (decl)));
512 bitmap_set_bit (part, uid);
513 *((bitmap *) pointer_map_insert (decls_to_partitions,
514 (void *)(size_t) uid)) = part;
515 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
516 decl)) = name;
519 /* Make the SSA name point to all partition members. */
520 pi = get_ptr_info (name);
521 pt_solution_set (&pi->pt, part, false, false);
524 /* Make all points-to sets that contain one member of a partition
525 contain all members of the partition. */
526 if (decls_to_partitions)
528 unsigned i;
529 struct pointer_set_t *visited = pointer_set_create ();
530 bitmap temp = BITMAP_ALLOC (NULL);
532 for (i = 1; i < num_ssa_names; i++)
534 tree name = ssa_name (i);
535 struct ptr_info_def *pi;
537 if (name
538 && POINTER_TYPE_P (TREE_TYPE (name))
539 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
540 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
541 visited, temp);
544 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
545 decls_to_partitions, visited, temp);
547 pointer_set_destroy (visited);
548 pointer_map_destroy (decls_to_partitions);
549 BITMAP_FREE (temp);
553 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
554 partitioning algorithm. Partitions A and B are known to be non-conflicting.
555 Merge them into a single partition A.
557 At the same time, add OFFSET to all variables in partition B. At the end
558 of the partitioning process we've have a nice block easy to lay out within
559 the stack frame. */
561 static void
562 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
564 size_t i, last;
565 struct stack_var *vb = &stack_vars[b];
566 bitmap_iterator bi;
567 unsigned u;
569 /* Update each element of partition B with the given offset,
570 and merge them into partition A. */
571 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
573 stack_vars[i].offset += offset;
574 stack_vars[i].representative = a;
576 stack_vars[last].next = stack_vars[a].next;
577 stack_vars[a].next = b;
579 /* Update the required alignment of partition A to account for B. */
580 if (stack_vars[a].alignb < stack_vars[b].alignb)
581 stack_vars[a].alignb = stack_vars[b].alignb;
583 /* Update the interference graph and merge the conflicts. */
584 if (vb->conflicts)
586 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
587 add_stack_var_conflict (a, stack_vars[u].representative);
588 BITMAP_FREE (vb->conflicts);
592 /* A subroutine of expand_used_vars. Binpack the variables into
593 partitions constrained by the interference graph. The overall
594 algorithm used is as follows:
596 Sort the objects by size.
597 For each object A {
598 S = size(A)
599 O = 0
600 loop {
601 Look for the largest non-conflicting object B with size <= S.
602 UNION (A, B)
603 offset(B) = O
604 O += size(B)
605 S -= size(B)
610 static void
611 partition_stack_vars (void)
613 size_t si, sj, n = stack_vars_num;
615 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
616 for (si = 0; si < n; ++si)
617 stack_vars_sorted[si] = si;
619 if (n == 1)
620 return;
622 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
624 for (si = 0; si < n; ++si)
626 size_t i = stack_vars_sorted[si];
627 HOST_WIDE_INT isize = stack_vars[i].size;
628 HOST_WIDE_INT offset = 0;
630 for (sj = si; sj-- > 0; )
632 size_t j = stack_vars_sorted[sj];
633 HOST_WIDE_INT jsize = stack_vars[j].size;
634 unsigned int jalign = stack_vars[j].alignb;
636 /* Ignore objects that aren't partition representatives. */
637 if (stack_vars[j].representative != j)
638 continue;
640 /* Ignore objects too large for the remaining space. */
641 if (isize < jsize)
642 continue;
644 /* Ignore conflicting objects. */
645 if (stack_var_conflict_p (i, j))
646 continue;
648 /* Refine the remaining space check to include alignment. */
649 if (offset & (jalign - 1))
651 HOST_WIDE_INT toff = offset;
652 toff += jalign - 1;
653 toff &= -(HOST_WIDE_INT)jalign;
654 if (isize - (toff - offset) < jsize)
655 continue;
657 isize -= toff - offset;
658 offset = toff;
661 /* UNION the objects, placing J at OFFSET. */
662 union_stack_vars (i, j, offset);
664 isize -= jsize;
665 if (isize == 0)
666 break;
670 if (optimize)
671 update_alias_info_with_stack_vars ();
674 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
676 static void
677 dump_stack_var_partition (void)
679 size_t si, i, j, n = stack_vars_num;
681 for (si = 0; si < n; ++si)
683 i = stack_vars_sorted[si];
685 /* Skip variables that aren't partition representatives, for now. */
686 if (stack_vars[i].representative != i)
687 continue;
689 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
690 " align %u\n", (unsigned long) i, stack_vars[i].size,
691 stack_vars[i].alignb);
693 for (j = i; j != EOC; j = stack_vars[j].next)
695 fputc ('\t', dump_file);
696 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
697 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
698 stack_vars[j].offset);
703 /* Assign rtl to DECL at frame offset OFFSET. */
705 static void
706 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
708 /* Alignment is unsigned. */
709 unsigned HOST_WIDE_INT align;
710 rtx x;
712 /* If this fails, we've overflowed the stack frame. Error nicely? */
713 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
715 x = plus_constant (virtual_stack_vars_rtx, offset);
716 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
718 if (TREE_CODE (decl) != SSA_NAME)
720 /* Set alignment we actually gave this decl if it isn't an SSA name.
721 If it is we generate stack slots only accidentally so it isn't as
722 important, we'll simply use the alignment that is already set. */
723 offset -= frame_phase;
724 align = offset & -offset;
725 align *= BITS_PER_UNIT;
726 if (align == 0)
727 align = STACK_BOUNDARY;
728 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
729 align = MAX_SUPPORTED_STACK_ALIGNMENT;
731 DECL_ALIGN (decl) = align;
732 DECL_USER_ALIGN (decl) = 0;
735 set_mem_attributes (x, SSAVAR (decl), true);
736 set_rtl (decl, x);
739 /* A subroutine of expand_used_vars. Give each partition representative
740 a unique location within the stack frame. Update each partition member
741 with that location. */
743 static void
744 expand_stack_vars (bool (*pred) (tree))
746 size_t si, i, j, n = stack_vars_num;
748 for (si = 0; si < n; ++si)
750 HOST_WIDE_INT offset;
752 i = stack_vars_sorted[si];
754 /* Skip variables that aren't partition representatives, for now. */
755 if (stack_vars[i].representative != i)
756 continue;
758 /* Skip variables that have already had rtl assigned. See also
759 add_stack_var where we perpetrate this pc_rtx hack. */
760 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
761 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
762 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
763 continue;
765 /* Check the predicate to see whether this variable should be
766 allocated in this pass. */
767 if (pred && !pred (stack_vars[i].decl))
768 continue;
770 offset = alloc_stack_frame_space (stack_vars[i].size,
771 stack_vars[i].alignb);
773 /* Create rtl for each variable based on their location within the
774 partition. */
775 for (j = i; j != EOC; j = stack_vars[j].next)
777 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
778 expand_one_stack_var_at (stack_vars[j].decl,
779 stack_vars[j].offset + offset);
784 /* Take into account all sizes of partitions and reset DECL_RTLs. */
785 static HOST_WIDE_INT
786 account_stack_vars (void)
788 size_t si, j, i, n = stack_vars_num;
789 HOST_WIDE_INT size = 0;
791 for (si = 0; si < n; ++si)
793 i = stack_vars_sorted[si];
795 /* Skip variables that aren't partition representatives, for now. */
796 if (stack_vars[i].representative != i)
797 continue;
799 size += stack_vars[i].size;
800 for (j = i; j != EOC; j = stack_vars[j].next)
801 set_rtl (stack_vars[j].decl, NULL);
803 return size;
806 /* A subroutine of expand_one_var. Called to immediately assign rtl
807 to a variable to be allocated in the stack frame. */
809 static void
810 expand_one_stack_var (tree var)
812 HOST_WIDE_INT size, offset, align;
814 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
815 align = get_decl_align_unit (SSAVAR (var));
816 offset = alloc_stack_frame_space (size, align);
818 expand_one_stack_var_at (var, offset);
821 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
822 that will reside in a hard register. */
824 static void
825 expand_one_hard_reg_var (tree var)
827 rest_of_decl_compilation (var, 0, 0);
830 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
831 that will reside in a pseudo register. */
833 static void
834 expand_one_register_var (tree var)
836 tree decl = SSAVAR (var);
837 tree type = TREE_TYPE (decl);
838 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
839 rtx x = gen_reg_rtx (reg_mode);
841 set_rtl (var, x);
843 /* Note if the object is a user variable. */
844 if (!DECL_ARTIFICIAL (decl))
845 mark_user_reg (x);
847 if (POINTER_TYPE_P (type))
848 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
851 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
852 has some associated error, e.g. its type is error-mark. We just need
853 to pick something that won't crash the rest of the compiler. */
855 static void
856 expand_one_error_var (tree var)
858 enum machine_mode mode = DECL_MODE (var);
859 rtx x;
861 if (mode == BLKmode)
862 x = gen_rtx_MEM (BLKmode, const0_rtx);
863 else if (mode == VOIDmode)
864 x = const0_rtx;
865 else
866 x = gen_reg_rtx (mode);
868 SET_DECL_RTL (var, x);
871 /* A subroutine of expand_one_var. VAR is a variable that will be
872 allocated to the local stack frame. Return true if we wish to
873 add VAR to STACK_VARS so that it will be coalesced with other
874 variables. Return false to allocate VAR immediately.
876 This function is used to reduce the number of variables considered
877 for coalescing, which reduces the size of the quadratic problem. */
879 static bool
880 defer_stack_allocation (tree var, bool toplevel)
882 /* If stack protection is enabled, *all* stack variables must be deferred,
883 so that we can re-order the strings to the top of the frame. */
884 if (flag_stack_protect)
885 return true;
887 /* Variables in the outermost scope automatically conflict with
888 every other variable. The only reason to want to defer them
889 at all is that, after sorting, we can more efficiently pack
890 small variables in the stack frame. Continue to defer at -O2. */
891 if (toplevel && optimize < 2)
892 return false;
894 /* Without optimization, *most* variables are allocated from the
895 stack, which makes the quadratic problem large exactly when we
896 want compilation to proceed as quickly as possible. On the
897 other hand, we don't want the function's stack frame size to
898 get completely out of hand. So we avoid adding scalars and
899 "small" aggregates to the list at all. */
900 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
901 return false;
903 return true;
906 /* A subroutine of expand_used_vars. Expand one variable according to
907 its flavor. Variables to be placed on the stack are not actually
908 expanded yet, merely recorded.
909 When REALLY_EXPAND is false, only add stack values to be allocated.
910 Return stack usage this variable is supposed to take.
913 static HOST_WIDE_INT
914 expand_one_var (tree var, bool toplevel, bool really_expand)
916 tree origvar = var;
917 var = SSAVAR (var);
919 if (SUPPORTS_STACK_ALIGNMENT
920 && TREE_TYPE (var) != error_mark_node
921 && TREE_CODE (var) == VAR_DECL)
923 unsigned int align;
925 /* Because we don't know if VAR will be in register or on stack,
926 we conservatively assume it will be on stack even if VAR is
927 eventually put into register after RA pass. For non-automatic
928 variables, which won't be on stack, we collect alignment of
929 type and ignore user specified alignment. */
930 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
931 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
932 TYPE_MODE (TREE_TYPE (var)),
933 TYPE_ALIGN (TREE_TYPE (var)));
934 else
935 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
937 if (crtl->stack_alignment_estimated < align)
939 /* stack_alignment_estimated shouldn't change after stack
940 realign decision made */
941 gcc_assert(!crtl->stack_realign_processed);
942 crtl->stack_alignment_estimated = align;
946 if (TREE_CODE (origvar) == SSA_NAME)
948 gcc_assert (TREE_CODE (var) != VAR_DECL
949 || (!DECL_EXTERNAL (var)
950 && !DECL_HAS_VALUE_EXPR_P (var)
951 && !TREE_STATIC (var)
952 && TREE_TYPE (var) != error_mark_node
953 && !DECL_HARD_REGISTER (var)
954 && really_expand));
956 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
958 else if (DECL_EXTERNAL (var))
960 else if (DECL_HAS_VALUE_EXPR_P (var))
962 else if (TREE_STATIC (var))
964 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
966 else if (TREE_TYPE (var) == error_mark_node)
968 if (really_expand)
969 expand_one_error_var (var);
971 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
973 if (really_expand)
974 expand_one_hard_reg_var (var);
976 else if (use_register_for_decl (var))
978 if (really_expand)
979 expand_one_register_var (origvar);
981 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
983 if (really_expand)
985 error ("size of variable %q+D is too large", var);
986 expand_one_error_var (var);
989 else if (defer_stack_allocation (var, toplevel))
990 add_stack_var (origvar);
991 else
993 if (really_expand)
994 expand_one_stack_var (origvar);
995 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
997 return 0;
1000 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1001 expanding variables. Those variables that can be put into registers
1002 are allocated pseudos; those that can't are put on the stack.
1004 TOPLEVEL is true if this is the outermost BLOCK. */
1006 static void
1007 expand_used_vars_for_block (tree block, bool toplevel)
1009 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1010 tree t;
1012 old_sv_num = toplevel ? 0 : stack_vars_num;
1014 /* Expand all variables at this level. */
1015 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1016 if (TREE_USED (t))
1017 expand_one_var (t, toplevel, true);
1019 this_sv_num = stack_vars_num;
1021 /* Expand all variables at containing levels. */
1022 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1023 expand_used_vars_for_block (t, false);
1025 /* Since we do not track exact variable lifetimes (which is not even
1026 possible for variables whose address escapes), we mirror the block
1027 tree in the interference graph. Here we cause all variables at this
1028 level, and all sublevels, to conflict. */
1029 if (old_sv_num < this_sv_num)
1031 new_sv_num = stack_vars_num;
1033 for (i = old_sv_num; i < new_sv_num; ++i)
1034 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1035 add_stack_var_conflict (i, j);
1039 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1040 and clear TREE_USED on all local variables. */
1042 static void
1043 clear_tree_used (tree block)
1045 tree t;
1047 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1048 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1049 TREE_USED (t) = 0;
1051 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1052 clear_tree_used (t);
1055 /* Examine TYPE and determine a bit mask of the following features. */
1057 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1058 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1059 #define SPCT_HAS_ARRAY 4
1060 #define SPCT_HAS_AGGREGATE 8
1062 static unsigned int
1063 stack_protect_classify_type (tree type)
1065 unsigned int ret = 0;
1066 tree t;
1068 switch (TREE_CODE (type))
1070 case ARRAY_TYPE:
1071 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1072 if (t == char_type_node
1073 || t == signed_char_type_node
1074 || t == unsigned_char_type_node)
1076 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1077 unsigned HOST_WIDE_INT len;
1079 if (!TYPE_SIZE_UNIT (type)
1080 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1081 len = max;
1082 else
1083 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1085 if (len < max)
1086 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1087 else
1088 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1090 else
1091 ret = SPCT_HAS_ARRAY;
1092 break;
1094 case UNION_TYPE:
1095 case QUAL_UNION_TYPE:
1096 case RECORD_TYPE:
1097 ret = SPCT_HAS_AGGREGATE;
1098 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1099 if (TREE_CODE (t) == FIELD_DECL)
1100 ret |= stack_protect_classify_type (TREE_TYPE (t));
1101 break;
1103 default:
1104 break;
1107 return ret;
1110 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1111 part of the local stack frame. Remember if we ever return nonzero for
1112 any variable in this function. The return value is the phase number in
1113 which the variable should be allocated. */
1115 static int
1116 stack_protect_decl_phase (tree decl)
1118 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1119 int ret = 0;
1121 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1122 has_short_buffer = true;
1124 if (flag_stack_protect == 2)
1126 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1127 && !(bits & SPCT_HAS_AGGREGATE))
1128 ret = 1;
1129 else if (bits & SPCT_HAS_ARRAY)
1130 ret = 2;
1132 else
1133 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1135 if (ret)
1136 has_protected_decls = true;
1138 return ret;
1141 /* Two helper routines that check for phase 1 and phase 2. These are used
1142 as callbacks for expand_stack_vars. */
1144 static bool
1145 stack_protect_decl_phase_1 (tree decl)
1147 return stack_protect_decl_phase (decl) == 1;
1150 static bool
1151 stack_protect_decl_phase_2 (tree decl)
1153 return stack_protect_decl_phase (decl) == 2;
1156 /* Ensure that variables in different stack protection phases conflict
1157 so that they are not merged and share the same stack slot. */
1159 static void
1160 add_stack_protection_conflicts (void)
1162 size_t i, j, n = stack_vars_num;
1163 unsigned char *phase;
1165 phase = XNEWVEC (unsigned char, n);
1166 for (i = 0; i < n; ++i)
1167 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1169 for (i = 0; i < n; ++i)
1171 unsigned char ph_i = phase[i];
1172 for (j = 0; j < i; ++j)
1173 if (ph_i != phase[j])
1174 add_stack_var_conflict (i, j);
1177 XDELETEVEC (phase);
1180 /* Create a decl for the guard at the top of the stack frame. */
1182 static void
1183 create_stack_guard (void)
1185 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1186 VAR_DECL, NULL, ptr_type_node);
1187 TREE_THIS_VOLATILE (guard) = 1;
1188 TREE_USED (guard) = 1;
1189 expand_one_stack_var (guard);
1190 crtl->stack_protect_guard = guard;
1193 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1194 expanding variables. Those variables that can be put into registers
1195 are allocated pseudos; those that can't are put on the stack.
1197 TOPLEVEL is true if this is the outermost BLOCK. */
1199 static HOST_WIDE_INT
1200 account_used_vars_for_block (tree block, bool toplevel)
1202 tree t;
1203 HOST_WIDE_INT size = 0;
1205 /* Expand all variables at this level. */
1206 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1207 if (TREE_USED (t))
1208 size += expand_one_var (t, toplevel, false);
1210 /* Expand all variables at containing levels. */
1211 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1212 size += account_used_vars_for_block (t, false);
1214 return size;
1217 /* Prepare for expanding variables. */
1218 static void
1219 init_vars_expansion (void)
1221 tree t;
1222 /* Set TREE_USED on all variables in the local_decls. */
1223 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1224 TREE_USED (TREE_VALUE (t)) = 1;
1226 /* Clear TREE_USED on all variables associated with a block scope. */
1227 clear_tree_used (DECL_INITIAL (current_function_decl));
1229 /* Initialize local stack smashing state. */
1230 has_protected_decls = false;
1231 has_short_buffer = false;
1234 /* Free up stack variable graph data. */
1235 static void
1236 fini_vars_expansion (void)
1238 size_t i, n = stack_vars_num;
1239 for (i = 0; i < n; i++)
1240 BITMAP_FREE (stack_vars[i].conflicts);
1241 XDELETEVEC (stack_vars);
1242 XDELETEVEC (stack_vars_sorted);
1243 stack_vars = NULL;
1244 stack_vars_alloc = stack_vars_num = 0;
1247 /* Make a fair guess for the size of the stack frame of the current
1248 function. This doesn't have to be exact, the result is only used
1249 in the inline heuristics. So we don't want to run the full stack
1250 var packing algorithm (which is quadratic in the number of stack
1251 vars). Instead, we calculate the total size of all stack vars.
1252 This turns out to be a pretty fair estimate -- packing of stack
1253 vars doesn't happen very often. */
1255 HOST_WIDE_INT
1256 estimated_stack_frame_size (void)
1258 HOST_WIDE_INT size = 0;
1259 size_t i;
1260 tree t, outer_block = DECL_INITIAL (current_function_decl);
1262 init_vars_expansion ();
1264 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1266 tree var = TREE_VALUE (t);
1268 if (TREE_USED (var))
1269 size += expand_one_var (var, true, false);
1270 TREE_USED (var) = 1;
1272 size += account_used_vars_for_block (outer_block, true);
1274 if (stack_vars_num > 0)
1276 /* Fake sorting the stack vars for account_stack_vars (). */
1277 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1278 for (i = 0; i < stack_vars_num; ++i)
1279 stack_vars_sorted[i] = i;
1280 size += account_stack_vars ();
1281 fini_vars_expansion ();
1284 return size;
1287 /* Expand all variables used in the function. */
1289 static void
1290 expand_used_vars (void)
1292 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1293 tree maybe_local_decls = NULL_TREE;
1294 unsigned i;
1296 /* Compute the phase of the stack frame for this function. */
1298 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1299 int off = STARTING_FRAME_OFFSET % align;
1300 frame_phase = off ? align - off : 0;
1303 init_vars_expansion ();
1305 for (i = 0; i < SA.map->num_partitions; i++)
1307 tree var = partition_to_var (SA.map, i);
1309 gcc_assert (is_gimple_reg (var));
1310 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1311 expand_one_var (var, true, true);
1312 else
1314 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1315 contain the default def (representing the parm or result itself)
1316 we don't do anything here. But those which don't contain the
1317 default def (representing a temporary based on the parm/result)
1318 we need to allocate space just like for normal VAR_DECLs. */
1319 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1321 expand_one_var (var, true, true);
1322 gcc_assert (SA.partition_to_pseudo[i]);
1327 /* At this point all variables on the local_decls with TREE_USED
1328 set are not associated with any block scope. Lay them out. */
1329 t = cfun->local_decls;
1330 cfun->local_decls = NULL_TREE;
1331 for (; t; t = next)
1333 tree var = TREE_VALUE (t);
1334 bool expand_now = false;
1336 next = TREE_CHAIN (t);
1338 /* Expanded above already. */
1339 if (is_gimple_reg (var))
1341 TREE_USED (var) = 0;
1342 goto next;
1344 /* We didn't set a block for static or extern because it's hard
1345 to tell the difference between a global variable (re)declared
1346 in a local scope, and one that's really declared there to
1347 begin with. And it doesn't really matter much, since we're
1348 not giving them stack space. Expand them now. */
1349 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1350 expand_now = true;
1352 /* If the variable is not associated with any block, then it
1353 was created by the optimizers, and could be live anywhere
1354 in the function. */
1355 else if (TREE_USED (var))
1356 expand_now = true;
1358 /* Finally, mark all variables on the list as used. We'll use
1359 this in a moment when we expand those associated with scopes. */
1360 TREE_USED (var) = 1;
1362 if (expand_now)
1363 expand_one_var (var, true, true);
1365 next:
1366 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1368 rtx rtl = DECL_RTL_IF_SET (var);
1370 /* Keep artificial non-ignored vars in cfun->local_decls
1371 chain until instantiate_decls. */
1372 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1374 TREE_CHAIN (t) = cfun->local_decls;
1375 cfun->local_decls = t;
1376 continue;
1378 else if (rtl == NULL_RTX)
1380 /* If rtl isn't set yet, which can happen e.g. with
1381 -fstack-protector, retry before returning from this
1382 function. */
1383 TREE_CHAIN (t) = maybe_local_decls;
1384 maybe_local_decls = t;
1385 continue;
1389 ggc_free (t);
1392 /* At this point, all variables within the block tree with TREE_USED
1393 set are actually used by the optimized function. Lay them out. */
1394 expand_used_vars_for_block (outer_block, true);
1396 if (stack_vars_num > 0)
1398 /* Due to the way alias sets work, no variables with non-conflicting
1399 alias sets may be assigned the same address. Add conflicts to
1400 reflect this. */
1401 add_alias_set_conflicts ();
1403 /* If stack protection is enabled, we don't share space between
1404 vulnerable data and non-vulnerable data. */
1405 if (flag_stack_protect)
1406 add_stack_protection_conflicts ();
1408 /* Now that we have collected all stack variables, and have computed a
1409 minimal interference graph, attempt to save some stack space. */
1410 partition_stack_vars ();
1411 if (dump_file)
1412 dump_stack_var_partition ();
1415 /* There are several conditions under which we should create a
1416 stack guard: protect-all, alloca used, protected decls present. */
1417 if (flag_stack_protect == 2
1418 || (flag_stack_protect
1419 && (cfun->calls_alloca || has_protected_decls)))
1420 create_stack_guard ();
1422 /* Assign rtl to each variable based on these partitions. */
1423 if (stack_vars_num > 0)
1425 /* Reorder decls to be protected by iterating over the variables
1426 array multiple times, and allocating out of each phase in turn. */
1427 /* ??? We could probably integrate this into the qsort we did
1428 earlier, such that we naturally see these variables first,
1429 and thus naturally allocate things in the right order. */
1430 if (has_protected_decls)
1432 /* Phase 1 contains only character arrays. */
1433 expand_stack_vars (stack_protect_decl_phase_1);
1435 /* Phase 2 contains other kinds of arrays. */
1436 if (flag_stack_protect == 2)
1437 expand_stack_vars (stack_protect_decl_phase_2);
1440 expand_stack_vars (NULL);
1442 fini_vars_expansion ();
1445 /* If there were any artificial non-ignored vars without rtl
1446 found earlier, see if deferred stack allocation hasn't assigned
1447 rtl to them. */
1448 for (t = maybe_local_decls; t; t = next)
1450 tree var = TREE_VALUE (t);
1451 rtx rtl = DECL_RTL_IF_SET (var);
1453 next = TREE_CHAIN (t);
1455 /* Keep artificial non-ignored vars in cfun->local_decls
1456 chain until instantiate_decls. */
1457 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1459 TREE_CHAIN (t) = cfun->local_decls;
1460 cfun->local_decls = t;
1461 continue;
1464 ggc_free (t);
1467 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1468 if (STACK_ALIGNMENT_NEEDED)
1470 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1471 if (!FRAME_GROWS_DOWNWARD)
1472 frame_offset += align - 1;
1473 frame_offset &= -align;
1478 /* If we need to produce a detailed dump, print the tree representation
1479 for STMT to the dump file. SINCE is the last RTX after which the RTL
1480 generated for STMT should have been appended. */
1482 static void
1483 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1485 if (dump_file && (dump_flags & TDF_DETAILS))
1487 fprintf (dump_file, "\n;; ");
1488 print_gimple_stmt (dump_file, stmt, 0,
1489 TDF_SLIM | (dump_flags & TDF_LINENO));
1490 fprintf (dump_file, "\n");
1492 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1496 /* Maps the blocks that do not contain tree labels to rtx labels. */
1498 static struct pointer_map_t *lab_rtx_for_bb;
1500 /* Returns the label_rtx expression for a label starting basic block BB. */
1502 static rtx
1503 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1505 gimple_stmt_iterator gsi;
1506 tree lab;
1507 gimple lab_stmt;
1508 void **elt;
1510 if (bb->flags & BB_RTL)
1511 return block_label (bb);
1513 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1514 if (elt)
1515 return (rtx) *elt;
1517 /* Find the tree label if it is present. */
1519 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1521 lab_stmt = gsi_stmt (gsi);
1522 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1523 break;
1525 lab = gimple_label_label (lab_stmt);
1526 if (DECL_NONLOCAL (lab))
1527 break;
1529 return label_rtx (lab);
1532 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1533 *elt = gen_label_rtx ();
1534 return (rtx) *elt;
1538 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1539 of a basic block where we just expanded the conditional at the end,
1540 possibly clean up the CFG and instruction sequence. LAST is the
1541 last instruction before the just emitted jump sequence. */
1543 static void
1544 maybe_cleanup_end_of_block (edge e, rtx last)
1546 /* Special case: when jumpif decides that the condition is
1547 trivial it emits an unconditional jump (and the necessary
1548 barrier). But we still have two edges, the fallthru one is
1549 wrong. purge_dead_edges would clean this up later. Unfortunately
1550 we have to insert insns (and split edges) before
1551 find_many_sub_basic_blocks and hence before purge_dead_edges.
1552 But splitting edges might create new blocks which depend on the
1553 fact that if there are two edges there's no barrier. So the
1554 barrier would get lost and verify_flow_info would ICE. Instead
1555 of auditing all edge splitters to care for the barrier (which
1556 normally isn't there in a cleaned CFG), fix it here. */
1557 if (BARRIER_P (get_last_insn ()))
1559 rtx insn;
1560 remove_edge (e);
1561 /* Now, we have a single successor block, if we have insns to
1562 insert on the remaining edge we potentially will insert
1563 it at the end of this block (if the dest block isn't feasible)
1564 in order to avoid splitting the edge. This insertion will take
1565 place in front of the last jump. But we might have emitted
1566 multiple jumps (conditional and one unconditional) to the
1567 same destination. Inserting in front of the last one then
1568 is a problem. See PR 40021. We fix this by deleting all
1569 jumps except the last unconditional one. */
1570 insn = PREV_INSN (get_last_insn ());
1571 /* Make sure we have an unconditional jump. Otherwise we're
1572 confused. */
1573 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1574 for (insn = PREV_INSN (insn); insn != last;)
1576 insn = PREV_INSN (insn);
1577 if (JUMP_P (NEXT_INSN (insn)))
1578 delete_insn (NEXT_INSN (insn));
1583 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1584 Returns a new basic block if we've terminated the current basic
1585 block and created a new one. */
1587 static basic_block
1588 expand_gimple_cond (basic_block bb, gimple stmt)
1590 basic_block new_bb, dest;
1591 edge new_edge;
1592 edge true_edge;
1593 edge false_edge;
1594 rtx last2, last;
1595 enum tree_code code;
1596 tree op0, op1;
1598 code = gimple_cond_code (stmt);
1599 op0 = gimple_cond_lhs (stmt);
1600 op1 = gimple_cond_rhs (stmt);
1601 /* We're sometimes presented with such code:
1602 D.123_1 = x < y;
1603 if (D.123_1 != 0)
1605 This would expand to two comparisons which then later might
1606 be cleaned up by combine. But some pattern matchers like if-conversion
1607 work better when there's only one compare, so make up for this
1608 here as special exception if TER would have made the same change. */
1609 if (gimple_cond_single_var_p (stmt)
1610 && SA.values
1611 && TREE_CODE (op0) == SSA_NAME
1612 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1614 gimple second = SSA_NAME_DEF_STMT (op0);
1615 if (gimple_code (second) == GIMPLE_ASSIGN)
1617 enum tree_code code2 = gimple_assign_rhs_code (second);
1618 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1620 code = code2;
1621 op0 = gimple_assign_rhs1 (second);
1622 op1 = gimple_assign_rhs2 (second);
1624 /* If jumps are cheap turn some more codes into
1625 jumpy sequences. */
1626 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1628 if ((code2 == BIT_AND_EXPR
1629 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1630 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1631 || code2 == TRUTH_AND_EXPR)
1633 code = TRUTH_ANDIF_EXPR;
1634 op0 = gimple_assign_rhs1 (second);
1635 op1 = gimple_assign_rhs2 (second);
1637 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1639 code = TRUTH_ORIF_EXPR;
1640 op0 = gimple_assign_rhs1 (second);
1641 op1 = gimple_assign_rhs2 (second);
1647 last2 = last = get_last_insn ();
1649 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1650 if (gimple_has_location (stmt))
1652 set_curr_insn_source_location (gimple_location (stmt));
1653 set_curr_insn_block (gimple_block (stmt));
1656 /* These flags have no purpose in RTL land. */
1657 true_edge->flags &= ~EDGE_TRUE_VALUE;
1658 false_edge->flags &= ~EDGE_FALSE_VALUE;
1660 /* We can either have a pure conditional jump with one fallthru edge or
1661 two-way jump that needs to be decomposed into two basic blocks. */
1662 if (false_edge->dest == bb->next_bb)
1664 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1665 true_edge->probability);
1666 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1667 if (true_edge->goto_locus)
1669 set_curr_insn_source_location (true_edge->goto_locus);
1670 set_curr_insn_block (true_edge->goto_block);
1671 true_edge->goto_locus = curr_insn_locator ();
1673 true_edge->goto_block = NULL;
1674 false_edge->flags |= EDGE_FALLTHRU;
1675 maybe_cleanup_end_of_block (false_edge, last);
1676 return NULL;
1678 if (true_edge->dest == bb->next_bb)
1680 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1681 false_edge->probability);
1682 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1683 if (false_edge->goto_locus)
1685 set_curr_insn_source_location (false_edge->goto_locus);
1686 set_curr_insn_block (false_edge->goto_block);
1687 false_edge->goto_locus = curr_insn_locator ();
1689 false_edge->goto_block = NULL;
1690 true_edge->flags |= EDGE_FALLTHRU;
1691 maybe_cleanup_end_of_block (true_edge, last);
1692 return NULL;
1695 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1696 true_edge->probability);
1697 last = get_last_insn ();
1698 if (false_edge->goto_locus)
1700 set_curr_insn_source_location (false_edge->goto_locus);
1701 set_curr_insn_block (false_edge->goto_block);
1702 false_edge->goto_locus = curr_insn_locator ();
1704 false_edge->goto_block = NULL;
1705 emit_jump (label_rtx_for_bb (false_edge->dest));
1707 BB_END (bb) = last;
1708 if (BARRIER_P (BB_END (bb)))
1709 BB_END (bb) = PREV_INSN (BB_END (bb));
1710 update_bb_for_insn (bb);
1712 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1713 dest = false_edge->dest;
1714 redirect_edge_succ (false_edge, new_bb);
1715 false_edge->flags |= EDGE_FALLTHRU;
1716 new_bb->count = false_edge->count;
1717 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1718 new_edge = make_edge (new_bb, dest, 0);
1719 new_edge->probability = REG_BR_PROB_BASE;
1720 new_edge->count = new_bb->count;
1721 if (BARRIER_P (BB_END (new_bb)))
1722 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1723 update_bb_for_insn (new_bb);
1725 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1727 if (true_edge->goto_locus)
1729 set_curr_insn_source_location (true_edge->goto_locus);
1730 set_curr_insn_block (true_edge->goto_block);
1731 true_edge->goto_locus = curr_insn_locator ();
1733 true_edge->goto_block = NULL;
1735 return new_bb;
1738 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1739 statement STMT. */
1741 static void
1742 expand_call_stmt (gimple stmt)
1744 tree exp;
1745 tree lhs = gimple_call_lhs (stmt);
1746 size_t i;
1747 bool builtin_p;
1748 tree decl;
1750 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1752 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1753 decl = gimple_call_fndecl (stmt);
1754 builtin_p = decl && DECL_BUILT_IN (decl);
1756 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1757 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1759 for (i = 0; i < gimple_call_num_args (stmt); i++)
1761 tree arg = gimple_call_arg (stmt, i);
1762 gimple def;
1763 /* TER addresses into arguments of builtin functions so we have a
1764 chance to infer more correct alignment information. See PR39954. */
1765 if (builtin_p
1766 && TREE_CODE (arg) == SSA_NAME
1767 && (def = get_gimple_for_ssa_name (arg))
1768 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1769 arg = gimple_assign_rhs1 (def);
1770 CALL_EXPR_ARG (exp, i) = arg;
1773 if (gimple_has_side_effects (stmt))
1774 TREE_SIDE_EFFECTS (exp) = 1;
1776 if (gimple_call_nothrow_p (stmt))
1777 TREE_NOTHROW (exp) = 1;
1779 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1780 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1781 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1782 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1783 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1784 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1785 TREE_BLOCK (exp) = gimple_block (stmt);
1787 if (lhs)
1788 expand_assignment (lhs, exp, false);
1789 else
1790 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1793 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1794 STMT that doesn't require special handling for outgoing edges. That
1795 is no tailcalls and no GIMPLE_COND. */
1797 static void
1798 expand_gimple_stmt_1 (gimple stmt)
1800 tree op0;
1801 switch (gimple_code (stmt))
1803 case GIMPLE_GOTO:
1804 op0 = gimple_goto_dest (stmt);
1805 if (TREE_CODE (op0) == LABEL_DECL)
1806 expand_goto (op0);
1807 else
1808 expand_computed_goto (op0);
1809 break;
1810 case GIMPLE_LABEL:
1811 expand_label (gimple_label_label (stmt));
1812 break;
1813 case GIMPLE_NOP:
1814 case GIMPLE_PREDICT:
1815 break;
1816 case GIMPLE_SWITCH:
1817 expand_case (stmt);
1818 break;
1819 case GIMPLE_ASM:
1820 expand_asm_stmt (stmt);
1821 break;
1822 case GIMPLE_CALL:
1823 expand_call_stmt (stmt);
1824 break;
1826 case GIMPLE_RETURN:
1827 op0 = gimple_return_retval (stmt);
1829 if (op0 && op0 != error_mark_node)
1831 tree result = DECL_RESULT (current_function_decl);
1833 /* If we are not returning the current function's RESULT_DECL,
1834 build an assignment to it. */
1835 if (op0 != result)
1837 /* I believe that a function's RESULT_DECL is unique. */
1838 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1840 /* ??? We'd like to use simply expand_assignment here,
1841 but this fails if the value is of BLKmode but the return
1842 decl is a register. expand_return has special handling
1843 for this combination, which eventually should move
1844 to common code. See comments there. Until then, let's
1845 build a modify expression :-/ */
1846 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1847 result, op0);
1850 if (!op0)
1851 expand_null_return ();
1852 else
1853 expand_return (op0);
1854 break;
1856 case GIMPLE_ASSIGN:
1858 tree lhs = gimple_assign_lhs (stmt);
1860 /* Tree expand used to fiddle with |= and &= of two bitfield
1861 COMPONENT_REFs here. This can't happen with gimple, the LHS
1862 of binary assigns must be a gimple reg. */
1864 if (TREE_CODE (lhs) != SSA_NAME
1865 || get_gimple_rhs_class (gimple_expr_code (stmt))
1866 == GIMPLE_SINGLE_RHS)
1868 tree rhs = gimple_assign_rhs1 (stmt);
1869 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1870 == GIMPLE_SINGLE_RHS);
1871 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1872 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1873 expand_assignment (lhs, rhs,
1874 gimple_assign_nontemporal_move_p (stmt));
1876 else
1878 rtx target, temp;
1879 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1880 struct separate_ops ops;
1881 bool promoted = false;
1883 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1884 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1885 promoted = true;
1887 ops.code = gimple_assign_rhs_code (stmt);
1888 ops.type = TREE_TYPE (lhs);
1889 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1891 case GIMPLE_BINARY_RHS:
1892 ops.op1 = gimple_assign_rhs2 (stmt);
1893 /* Fallthru */
1894 case GIMPLE_UNARY_RHS:
1895 ops.op0 = gimple_assign_rhs1 (stmt);
1896 break;
1897 default:
1898 gcc_unreachable ();
1900 ops.location = gimple_location (stmt);
1902 /* If we want to use a nontemporal store, force the value to
1903 register first. If we store into a promoted register,
1904 don't directly expand to target. */
1905 temp = nontemporal || promoted ? NULL_RTX : target;
1906 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1907 EXPAND_NORMAL);
1909 if (temp == target)
1911 else if (promoted)
1913 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1914 /* If TEMP is a VOIDmode constant, use convert_modes to make
1915 sure that we properly convert it. */
1916 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1918 temp = convert_modes (GET_MODE (target),
1919 TYPE_MODE (ops.type),
1920 temp, unsignedp);
1921 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1922 GET_MODE (target), temp, unsignedp);
1925 convert_move (SUBREG_REG (target), temp, unsignedp);
1927 else if (nontemporal && emit_storent_insn (target, temp))
1929 else
1931 temp = force_operand (temp, target);
1932 if (temp != target)
1933 emit_move_insn (target, temp);
1937 break;
1939 default:
1940 gcc_unreachable ();
1944 /* Expand one gimple statement STMT and return the last RTL instruction
1945 before any of the newly generated ones.
1947 In addition to generating the necessary RTL instructions this also
1948 sets REG_EH_REGION notes if necessary and sets the current source
1949 location for diagnostics. */
1951 static rtx
1952 expand_gimple_stmt (gimple stmt)
1954 int lp_nr = 0;
1955 rtx last = NULL;
1956 location_t saved_location = input_location;
1958 last = get_last_insn ();
1960 /* If this is an expression of some kind and it has an associated line
1961 number, then emit the line number before expanding the expression.
1963 We need to save and restore the file and line information so that
1964 errors discovered during expansion are emitted with the right
1965 information. It would be better of the diagnostic routines
1966 used the file/line information embedded in the tree nodes rather
1967 than globals. */
1968 gcc_assert (cfun);
1970 if (gimple_has_location (stmt))
1972 input_location = gimple_location (stmt);
1973 set_curr_insn_source_location (input_location);
1975 /* Record where the insns produced belong. */
1976 set_curr_insn_block (gimple_block (stmt));
1979 expand_gimple_stmt_1 (stmt);
1980 /* Free any temporaries used to evaluate this statement. */
1981 free_temp_slots ();
1983 input_location = saved_location;
1985 /* Mark all insns that may trap. */
1986 lp_nr = lookup_stmt_eh_lp (stmt);
1987 if (lp_nr)
1989 rtx insn;
1990 for (insn = next_real_insn (last); insn;
1991 insn = next_real_insn (insn))
1993 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1994 /* If we want exceptions for non-call insns, any
1995 may_trap_p instruction may throw. */
1996 && GET_CODE (PATTERN (insn)) != CLOBBER
1997 && GET_CODE (PATTERN (insn)) != USE
1998 && insn_could_throw_p (insn))
1999 make_reg_eh_region_note (insn, 0, lp_nr);
2003 return last;
2006 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2007 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2008 generated a tail call (something that might be denied by the ABI
2009 rules governing the call; see calls.c).
2011 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2012 can still reach the rest of BB. The case here is __builtin_sqrt,
2013 where the NaN result goes through the external function (with a
2014 tailcall) and the normal result happens via a sqrt instruction. */
2016 static basic_block
2017 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2019 rtx last2, last;
2020 edge e;
2021 edge_iterator ei;
2022 int probability;
2023 gcov_type count;
2025 last2 = last = expand_gimple_stmt (stmt);
2027 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2028 if (CALL_P (last) && SIBLING_CALL_P (last))
2029 goto found;
2031 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2033 *can_fallthru = true;
2034 return NULL;
2036 found:
2037 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2038 Any instructions emitted here are about to be deleted. */
2039 do_pending_stack_adjust ();
2041 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2042 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2043 EH or abnormal edges, we shouldn't have created a tail call in
2044 the first place. So it seems to me we should just be removing
2045 all edges here, or redirecting the existing fallthru edge to
2046 the exit block. */
2048 probability = 0;
2049 count = 0;
2051 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2053 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2055 if (e->dest != EXIT_BLOCK_PTR)
2057 e->dest->count -= e->count;
2058 e->dest->frequency -= EDGE_FREQUENCY (e);
2059 if (e->dest->count < 0)
2060 e->dest->count = 0;
2061 if (e->dest->frequency < 0)
2062 e->dest->frequency = 0;
2064 count += e->count;
2065 probability += e->probability;
2066 remove_edge (e);
2068 else
2069 ei_next (&ei);
2072 /* This is somewhat ugly: the call_expr expander often emits instructions
2073 after the sibcall (to perform the function return). These confuse the
2074 find_many_sub_basic_blocks code, so we need to get rid of these. */
2075 last = NEXT_INSN (last);
2076 gcc_assert (BARRIER_P (last));
2078 *can_fallthru = false;
2079 while (NEXT_INSN (last))
2081 /* For instance an sqrt builtin expander expands if with
2082 sibcall in the then and label for `else`. */
2083 if (LABEL_P (NEXT_INSN (last)))
2085 *can_fallthru = true;
2086 break;
2088 delete_insn (NEXT_INSN (last));
2091 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2092 e->probability += probability;
2093 e->count += count;
2094 BB_END (bb) = last;
2095 update_bb_for_insn (bb);
2097 if (NEXT_INSN (last))
2099 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2101 last = BB_END (bb);
2102 if (BARRIER_P (last))
2103 BB_END (bb) = PREV_INSN (last);
2106 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2108 return bb;
2111 /* Return the difference between the floor and the truncated result of
2112 a signed division by OP1 with remainder MOD. */
2113 static rtx
2114 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2116 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2117 return gen_rtx_IF_THEN_ELSE
2118 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2119 gen_rtx_IF_THEN_ELSE
2120 (mode, gen_rtx_LT (BImode,
2121 gen_rtx_DIV (mode, op1, mod),
2122 const0_rtx),
2123 constm1_rtx, const0_rtx),
2124 const0_rtx);
2127 /* Return the difference between the ceil and the truncated result of
2128 a signed division by OP1 with remainder MOD. */
2129 static rtx
2130 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2132 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2133 return gen_rtx_IF_THEN_ELSE
2134 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2135 gen_rtx_IF_THEN_ELSE
2136 (mode, gen_rtx_GT (BImode,
2137 gen_rtx_DIV (mode, op1, mod),
2138 const0_rtx),
2139 const1_rtx, const0_rtx),
2140 const0_rtx);
2143 /* Return the difference between the ceil and the truncated result of
2144 an unsigned division by OP1 with remainder MOD. */
2145 static rtx
2146 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2148 /* (mod != 0 ? 1 : 0) */
2149 return gen_rtx_IF_THEN_ELSE
2150 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2151 const1_rtx, const0_rtx);
2154 /* Return the difference between the rounded and the truncated result
2155 of a signed division by OP1 with remainder MOD. Halfway cases are
2156 rounded away from zero, rather than to the nearest even number. */
2157 static rtx
2158 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2160 /* (abs (mod) >= abs (op1) - abs (mod)
2161 ? (op1 / mod > 0 ? 1 : -1)
2162 : 0) */
2163 return gen_rtx_IF_THEN_ELSE
2164 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2165 gen_rtx_MINUS (mode,
2166 gen_rtx_ABS (mode, op1),
2167 gen_rtx_ABS (mode, mod))),
2168 gen_rtx_IF_THEN_ELSE
2169 (mode, gen_rtx_GT (BImode,
2170 gen_rtx_DIV (mode, op1, mod),
2171 const0_rtx),
2172 const1_rtx, constm1_rtx),
2173 const0_rtx);
2176 /* Return the difference between the rounded and the truncated result
2177 of a unsigned division by OP1 with remainder MOD. Halfway cases
2178 are rounded away from zero, rather than to the nearest even
2179 number. */
2180 static rtx
2181 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2183 /* (mod >= op1 - mod ? 1 : 0) */
2184 return gen_rtx_IF_THEN_ELSE
2185 (mode, gen_rtx_GE (BImode, mod,
2186 gen_rtx_MINUS (mode, op1, mod)),
2187 const1_rtx, const0_rtx);
2190 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2191 any rtl. */
2193 static rtx
2194 convert_debug_memory_address (enum machine_mode mode, rtx x)
2196 enum machine_mode xmode = GET_MODE (x);
2198 #ifndef POINTERS_EXTEND_UNSIGNED
2199 gcc_assert (mode == Pmode);
2200 gcc_assert (xmode == mode || xmode == VOIDmode);
2201 #else
2202 gcc_assert (mode == Pmode || mode == ptr_mode);
2204 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2205 return x;
2207 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2208 x = simplify_gen_subreg (mode, x, xmode,
2209 subreg_lowpart_offset
2210 (mode, xmode));
2211 else if (POINTERS_EXTEND_UNSIGNED > 0)
2212 x = gen_rtx_ZERO_EXTEND (mode, x);
2213 else if (!POINTERS_EXTEND_UNSIGNED)
2214 x = gen_rtx_SIGN_EXTEND (mode, x);
2215 else
2216 gcc_unreachable ();
2217 #endif /* POINTERS_EXTEND_UNSIGNED */
2219 return x;
2222 /* Return an RTX equivalent to the value of the tree expression
2223 EXP. */
2225 static rtx
2226 expand_debug_expr (tree exp)
2228 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2229 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2230 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2231 addr_space_t as;
2232 enum machine_mode address_mode;
2234 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2236 case tcc_expression:
2237 switch (TREE_CODE (exp))
2239 case COND_EXPR:
2240 case DOT_PROD_EXPR:
2241 goto ternary;
2243 case TRUTH_ANDIF_EXPR:
2244 case TRUTH_ORIF_EXPR:
2245 case TRUTH_AND_EXPR:
2246 case TRUTH_OR_EXPR:
2247 case TRUTH_XOR_EXPR:
2248 goto binary;
2250 case TRUTH_NOT_EXPR:
2251 goto unary;
2253 default:
2254 break;
2256 break;
2258 ternary:
2259 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2260 if (!op2)
2261 return NULL_RTX;
2262 /* Fall through. */
2264 binary:
2265 case tcc_binary:
2266 case tcc_comparison:
2267 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2268 if (!op1)
2269 return NULL_RTX;
2270 /* Fall through. */
2272 unary:
2273 case tcc_unary:
2274 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2275 if (!op0)
2276 return NULL_RTX;
2277 break;
2279 case tcc_type:
2280 case tcc_statement:
2281 gcc_unreachable ();
2283 case tcc_constant:
2284 case tcc_exceptional:
2285 case tcc_declaration:
2286 case tcc_reference:
2287 case tcc_vl_exp:
2288 break;
2291 switch (TREE_CODE (exp))
2293 case STRING_CST:
2294 if (!lookup_constant_def (exp))
2296 if (strlen (TREE_STRING_POINTER (exp)) + 1
2297 != (size_t) TREE_STRING_LENGTH (exp))
2298 return NULL_RTX;
2299 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2300 op0 = gen_rtx_MEM (BLKmode, op0);
2301 set_mem_attributes (op0, exp, 0);
2302 return op0;
2304 /* Fall through... */
2306 case INTEGER_CST:
2307 case REAL_CST:
2308 case FIXED_CST:
2309 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2310 return op0;
2312 case COMPLEX_CST:
2313 gcc_assert (COMPLEX_MODE_P (mode));
2314 op0 = expand_debug_expr (TREE_REALPART (exp));
2315 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2316 return gen_rtx_CONCAT (mode, op0, op1);
2318 case DEBUG_EXPR_DECL:
2319 op0 = DECL_RTL_IF_SET (exp);
2321 if (op0)
2322 return op0;
2324 op0 = gen_rtx_DEBUG_EXPR (mode);
2325 DEBUG_EXPR_TREE_DECL (op0) = exp;
2326 SET_DECL_RTL (exp, op0);
2328 return op0;
2330 case VAR_DECL:
2331 case PARM_DECL:
2332 case FUNCTION_DECL:
2333 case LABEL_DECL:
2334 case CONST_DECL:
2335 case RESULT_DECL:
2336 op0 = DECL_RTL_IF_SET (exp);
2338 /* This decl was probably optimized away. */
2339 if (!op0)
2341 if (TREE_CODE (exp) != VAR_DECL
2342 || DECL_EXTERNAL (exp)
2343 || !TREE_STATIC (exp)
2344 || !DECL_NAME (exp)
2345 || DECL_HARD_REGISTER (exp)
2346 || mode == VOIDmode)
2347 return NULL;
2349 op0 = make_decl_rtl_for_debug (exp);
2350 if (!MEM_P (op0)
2351 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2352 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2353 return NULL;
2355 else
2356 op0 = copy_rtx (op0);
2358 if (GET_MODE (op0) == BLKmode
2359 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2360 below would ICE. While it is likely a FE bug,
2361 try to be robust here. See PR43166. */
2362 || mode == BLKmode
2363 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2365 gcc_assert (MEM_P (op0));
2366 op0 = adjust_address_nv (op0, mode, 0);
2367 return op0;
2370 /* Fall through. */
2372 adjust_mode:
2373 case PAREN_EXPR:
2374 case NOP_EXPR:
2375 case CONVERT_EXPR:
2377 enum machine_mode inner_mode = GET_MODE (op0);
2379 if (mode == inner_mode)
2380 return op0;
2382 if (inner_mode == VOIDmode)
2384 if (TREE_CODE (exp) == SSA_NAME)
2385 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2386 else
2387 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2388 if (mode == inner_mode)
2389 return op0;
2392 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2394 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2395 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2396 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2397 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2398 else
2399 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2401 else if (FLOAT_MODE_P (mode))
2403 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2404 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2405 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2406 else
2407 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2409 else if (FLOAT_MODE_P (inner_mode))
2411 if (unsignedp)
2412 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2413 else
2414 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2416 else if (CONSTANT_P (op0)
2417 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2418 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2419 subreg_lowpart_offset (mode,
2420 inner_mode));
2421 else if (unsignedp)
2422 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2423 else
2424 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2426 return op0;
2429 case INDIRECT_REF:
2430 case ALIGN_INDIRECT_REF:
2431 case MISALIGNED_INDIRECT_REF:
2432 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2433 if (!op0)
2434 return NULL;
2436 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2438 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2439 address_mode = targetm.addr_space.address_mode (as);
2441 else
2443 as = ADDR_SPACE_GENERIC;
2444 address_mode = Pmode;
2447 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2449 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
2450 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
2453 op0 = gen_rtx_MEM (mode, op0);
2455 set_mem_attributes (op0, exp, 0);
2456 set_mem_addr_space (op0, as);
2458 return op0;
2460 case TARGET_MEM_REF:
2461 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2462 return NULL;
2464 op0 = expand_debug_expr
2465 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2466 if (!op0)
2467 return NULL;
2469 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2471 op0 = gen_rtx_MEM (mode, op0);
2473 set_mem_attributes (op0, exp, 0);
2474 set_mem_addr_space (op0, as);
2476 return op0;
2478 case ARRAY_REF:
2479 case ARRAY_RANGE_REF:
2480 case COMPONENT_REF:
2481 case BIT_FIELD_REF:
2482 case REALPART_EXPR:
2483 case IMAGPART_EXPR:
2484 case VIEW_CONVERT_EXPR:
2486 enum machine_mode mode1;
2487 HOST_WIDE_INT bitsize, bitpos;
2488 tree offset;
2489 int volatilep = 0;
2490 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2491 &mode1, &unsignedp, &volatilep, false);
2492 rtx orig_op0;
2494 if (bitsize == 0)
2495 return NULL;
2497 orig_op0 = op0 = expand_debug_expr (tem);
2499 if (!op0)
2500 return NULL;
2502 if (offset)
2504 enum machine_mode addrmode, offmode;
2506 if (!MEM_P (op0))
2507 return NULL;
2509 op0 = XEXP (op0, 0);
2510 addrmode = GET_MODE (op0);
2511 if (addrmode == VOIDmode)
2512 addrmode = Pmode;
2514 op1 = expand_debug_expr (offset);
2515 if (!op1)
2516 return NULL;
2518 offmode = GET_MODE (op1);
2519 if (offmode == VOIDmode)
2520 offmode = TYPE_MODE (TREE_TYPE (offset));
2522 if (addrmode != offmode)
2523 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2524 subreg_lowpart_offset (addrmode,
2525 offmode));
2527 /* Don't use offset_address here, we don't need a
2528 recognizable address, and we don't want to generate
2529 code. */
2530 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2533 if (MEM_P (op0))
2535 if (mode1 == VOIDmode)
2536 /* Bitfield. */
2537 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2538 if (bitpos >= BITS_PER_UNIT)
2540 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2541 bitpos %= BITS_PER_UNIT;
2543 else if (bitpos < 0)
2545 HOST_WIDE_INT units
2546 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2547 op0 = adjust_address_nv (op0, mode1, units);
2548 bitpos += units * BITS_PER_UNIT;
2550 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2551 op0 = adjust_address_nv (op0, mode, 0);
2552 else if (GET_MODE (op0) != mode1)
2553 op0 = adjust_address_nv (op0, mode1, 0);
2554 else
2555 op0 = copy_rtx (op0);
2556 if (op0 == orig_op0)
2557 op0 = shallow_copy_rtx (op0);
2558 set_mem_attributes (op0, exp, 0);
2561 if (bitpos == 0 && mode == GET_MODE (op0))
2562 return op0;
2564 if (bitpos < 0)
2565 return NULL;
2567 if (GET_MODE (op0) == BLKmode)
2568 return NULL;
2570 if ((bitpos % BITS_PER_UNIT) == 0
2571 && bitsize == GET_MODE_BITSIZE (mode1))
2573 enum machine_mode opmode = GET_MODE (op0);
2575 if (opmode == VOIDmode)
2576 opmode = mode1;
2578 /* This condition may hold if we're expanding the address
2579 right past the end of an array that turned out not to
2580 be addressable (i.e., the address was only computed in
2581 debug stmts). The gen_subreg below would rightfully
2582 crash, and the address doesn't really exist, so just
2583 drop it. */
2584 if (bitpos >= GET_MODE_BITSIZE (opmode))
2585 return NULL;
2587 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2588 return simplify_gen_subreg (mode, op0, opmode,
2589 bitpos / BITS_PER_UNIT);
2592 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2593 && TYPE_UNSIGNED (TREE_TYPE (exp))
2594 ? SIGN_EXTRACT
2595 : ZERO_EXTRACT, mode,
2596 GET_MODE (op0) != VOIDmode
2597 ? GET_MODE (op0) : mode1,
2598 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2601 case ABS_EXPR:
2602 return gen_rtx_ABS (mode, op0);
2604 case NEGATE_EXPR:
2605 return gen_rtx_NEG (mode, op0);
2607 case BIT_NOT_EXPR:
2608 return gen_rtx_NOT (mode, op0);
2610 case FLOAT_EXPR:
2611 if (unsignedp)
2612 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2613 else
2614 return gen_rtx_FLOAT (mode, op0);
2616 case FIX_TRUNC_EXPR:
2617 if (unsignedp)
2618 return gen_rtx_UNSIGNED_FIX (mode, op0);
2619 else
2620 return gen_rtx_FIX (mode, op0);
2622 case POINTER_PLUS_EXPR:
2623 /* For the rare target where pointers are not the same size as
2624 size_t, we need to check for mis-matched modes and correct
2625 the addend. */
2626 if (op0 && op1
2627 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2628 && GET_MODE (op0) != GET_MODE (op1))
2630 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2631 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2632 else
2633 /* We always sign-extend, regardless of the signedness of
2634 the operand, because the operand is always unsigned
2635 here even if the original C expression is signed. */
2636 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2638 /* Fall through. */
2639 case PLUS_EXPR:
2640 return gen_rtx_PLUS (mode, op0, op1);
2642 case MINUS_EXPR:
2643 return gen_rtx_MINUS (mode, op0, op1);
2645 case MULT_EXPR:
2646 return gen_rtx_MULT (mode, op0, op1);
2648 case RDIV_EXPR:
2649 case TRUNC_DIV_EXPR:
2650 case EXACT_DIV_EXPR:
2651 if (unsignedp)
2652 return gen_rtx_UDIV (mode, op0, op1);
2653 else
2654 return gen_rtx_DIV (mode, op0, op1);
2656 case TRUNC_MOD_EXPR:
2657 if (unsignedp)
2658 return gen_rtx_UMOD (mode, op0, op1);
2659 else
2660 return gen_rtx_MOD (mode, op0, op1);
2662 case FLOOR_DIV_EXPR:
2663 if (unsignedp)
2664 return gen_rtx_UDIV (mode, op0, op1);
2665 else
2667 rtx div = gen_rtx_DIV (mode, op0, op1);
2668 rtx mod = gen_rtx_MOD (mode, op0, op1);
2669 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2670 return gen_rtx_PLUS (mode, div, adj);
2673 case FLOOR_MOD_EXPR:
2674 if (unsignedp)
2675 return gen_rtx_UMOD (mode, op0, op1);
2676 else
2678 rtx mod = gen_rtx_MOD (mode, op0, op1);
2679 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2680 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2681 return gen_rtx_PLUS (mode, mod, adj);
2684 case CEIL_DIV_EXPR:
2685 if (unsignedp)
2687 rtx div = gen_rtx_UDIV (mode, op0, op1);
2688 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2689 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2690 return gen_rtx_PLUS (mode, div, adj);
2692 else
2694 rtx div = gen_rtx_DIV (mode, op0, op1);
2695 rtx mod = gen_rtx_MOD (mode, op0, op1);
2696 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2697 return gen_rtx_PLUS (mode, div, adj);
2700 case CEIL_MOD_EXPR:
2701 if (unsignedp)
2703 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2704 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2705 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2706 return gen_rtx_PLUS (mode, mod, adj);
2708 else
2710 rtx mod = gen_rtx_MOD (mode, op0, op1);
2711 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2712 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2713 return gen_rtx_PLUS (mode, mod, adj);
2716 case ROUND_DIV_EXPR:
2717 if (unsignedp)
2719 rtx div = gen_rtx_UDIV (mode, op0, op1);
2720 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2721 rtx adj = round_udiv_adjust (mode, mod, op1);
2722 return gen_rtx_PLUS (mode, div, adj);
2724 else
2726 rtx div = gen_rtx_DIV (mode, op0, op1);
2727 rtx mod = gen_rtx_MOD (mode, op0, op1);
2728 rtx adj = round_sdiv_adjust (mode, mod, op1);
2729 return gen_rtx_PLUS (mode, div, adj);
2732 case ROUND_MOD_EXPR:
2733 if (unsignedp)
2735 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2736 rtx adj = round_udiv_adjust (mode, mod, op1);
2737 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2738 return gen_rtx_PLUS (mode, mod, adj);
2740 else
2742 rtx mod = gen_rtx_MOD (mode, op0, op1);
2743 rtx adj = round_sdiv_adjust (mode, mod, op1);
2744 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2745 return gen_rtx_PLUS (mode, mod, adj);
2748 case LSHIFT_EXPR:
2749 return gen_rtx_ASHIFT (mode, op0, op1);
2751 case RSHIFT_EXPR:
2752 if (unsignedp)
2753 return gen_rtx_LSHIFTRT (mode, op0, op1);
2754 else
2755 return gen_rtx_ASHIFTRT (mode, op0, op1);
2757 case LROTATE_EXPR:
2758 return gen_rtx_ROTATE (mode, op0, op1);
2760 case RROTATE_EXPR:
2761 return gen_rtx_ROTATERT (mode, op0, op1);
2763 case MIN_EXPR:
2764 if (unsignedp)
2765 return gen_rtx_UMIN (mode, op0, op1);
2766 else
2767 return gen_rtx_SMIN (mode, op0, op1);
2769 case MAX_EXPR:
2770 if (unsignedp)
2771 return gen_rtx_UMAX (mode, op0, op1);
2772 else
2773 return gen_rtx_SMAX (mode, op0, op1);
2775 case BIT_AND_EXPR:
2776 case TRUTH_AND_EXPR:
2777 return gen_rtx_AND (mode, op0, op1);
2779 case BIT_IOR_EXPR:
2780 case TRUTH_OR_EXPR:
2781 return gen_rtx_IOR (mode, op0, op1);
2783 case BIT_XOR_EXPR:
2784 case TRUTH_XOR_EXPR:
2785 return gen_rtx_XOR (mode, op0, op1);
2787 case TRUTH_ANDIF_EXPR:
2788 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2790 case TRUTH_ORIF_EXPR:
2791 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2793 case TRUTH_NOT_EXPR:
2794 return gen_rtx_EQ (mode, op0, const0_rtx);
2796 case LT_EXPR:
2797 if (unsignedp)
2798 return gen_rtx_LTU (mode, op0, op1);
2799 else
2800 return gen_rtx_LT (mode, op0, op1);
2802 case LE_EXPR:
2803 if (unsignedp)
2804 return gen_rtx_LEU (mode, op0, op1);
2805 else
2806 return gen_rtx_LE (mode, op0, op1);
2808 case GT_EXPR:
2809 if (unsignedp)
2810 return gen_rtx_GTU (mode, op0, op1);
2811 else
2812 return gen_rtx_GT (mode, op0, op1);
2814 case GE_EXPR:
2815 if (unsignedp)
2816 return gen_rtx_GEU (mode, op0, op1);
2817 else
2818 return gen_rtx_GE (mode, op0, op1);
2820 case EQ_EXPR:
2821 return gen_rtx_EQ (mode, op0, op1);
2823 case NE_EXPR:
2824 return gen_rtx_NE (mode, op0, op1);
2826 case UNORDERED_EXPR:
2827 return gen_rtx_UNORDERED (mode, op0, op1);
2829 case ORDERED_EXPR:
2830 return gen_rtx_ORDERED (mode, op0, op1);
2832 case UNLT_EXPR:
2833 return gen_rtx_UNLT (mode, op0, op1);
2835 case UNLE_EXPR:
2836 return gen_rtx_UNLE (mode, op0, op1);
2838 case UNGT_EXPR:
2839 return gen_rtx_UNGT (mode, op0, op1);
2841 case UNGE_EXPR:
2842 return gen_rtx_UNGE (mode, op0, op1);
2844 case UNEQ_EXPR:
2845 return gen_rtx_UNEQ (mode, op0, op1);
2847 case LTGT_EXPR:
2848 return gen_rtx_LTGT (mode, op0, op1);
2850 case COND_EXPR:
2851 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2853 case COMPLEX_EXPR:
2854 gcc_assert (COMPLEX_MODE_P (mode));
2855 if (GET_MODE (op0) == VOIDmode)
2856 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2857 if (GET_MODE (op1) == VOIDmode)
2858 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2859 return gen_rtx_CONCAT (mode, op0, op1);
2861 case CONJ_EXPR:
2862 if (GET_CODE (op0) == CONCAT)
2863 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2864 gen_rtx_NEG (GET_MODE_INNER (mode),
2865 XEXP (op0, 1)));
2866 else
2868 enum machine_mode imode = GET_MODE_INNER (mode);
2869 rtx re, im;
2871 if (MEM_P (op0))
2873 re = adjust_address_nv (op0, imode, 0);
2874 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2876 else
2878 enum machine_mode ifmode = int_mode_for_mode (mode);
2879 enum machine_mode ihmode = int_mode_for_mode (imode);
2880 rtx halfsize;
2881 if (ifmode == BLKmode || ihmode == BLKmode)
2882 return NULL;
2883 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2884 re = op0;
2885 if (mode != ifmode)
2886 re = gen_rtx_SUBREG (ifmode, re, 0);
2887 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2888 if (imode != ihmode)
2889 re = gen_rtx_SUBREG (imode, re, 0);
2890 im = copy_rtx (op0);
2891 if (mode != ifmode)
2892 im = gen_rtx_SUBREG (ifmode, im, 0);
2893 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2894 if (imode != ihmode)
2895 im = gen_rtx_SUBREG (imode, im, 0);
2897 im = gen_rtx_NEG (imode, im);
2898 return gen_rtx_CONCAT (mode, re, im);
2901 case ADDR_EXPR:
2902 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2903 if (!op0 || !MEM_P (op0))
2904 return NULL;
2906 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2908 return op0;
2910 case VECTOR_CST:
2911 exp = build_constructor_from_list (TREE_TYPE (exp),
2912 TREE_VECTOR_CST_ELTS (exp));
2913 /* Fall through. */
2915 case CONSTRUCTOR:
2916 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2918 unsigned i;
2919 tree val;
2921 op0 = gen_rtx_CONCATN
2922 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2924 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2926 op1 = expand_debug_expr (val);
2927 if (!op1)
2928 return NULL;
2929 XVECEXP (op0, 0, i) = op1;
2932 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2934 op1 = expand_debug_expr
2935 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2937 if (!op1)
2938 return NULL;
2940 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2941 XVECEXP (op0, 0, i) = op1;
2944 return op0;
2946 else
2947 goto flag_unsupported;
2949 case CALL_EXPR:
2950 /* ??? Maybe handle some builtins? */
2951 return NULL;
2953 case SSA_NAME:
2955 gimple g = get_gimple_for_ssa_name (exp);
2956 if (g)
2958 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2959 if (!op0)
2960 return NULL;
2962 else
2964 int part = var_to_partition (SA.map, exp);
2966 if (part == NO_PARTITION)
2967 return NULL;
2969 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2971 op0 = SA.partition_to_pseudo[part];
2973 goto adjust_mode;
2976 case ERROR_MARK:
2977 return NULL;
2979 /* Vector stuff. For most of the codes we don't have rtl codes. */
2980 case REALIGN_LOAD_EXPR:
2981 case REDUC_MAX_EXPR:
2982 case REDUC_MIN_EXPR:
2983 case REDUC_PLUS_EXPR:
2984 case VEC_COND_EXPR:
2985 case VEC_EXTRACT_EVEN_EXPR:
2986 case VEC_EXTRACT_ODD_EXPR:
2987 case VEC_INTERLEAVE_HIGH_EXPR:
2988 case VEC_INTERLEAVE_LOW_EXPR:
2989 case VEC_LSHIFT_EXPR:
2990 case VEC_PACK_FIX_TRUNC_EXPR:
2991 case VEC_PACK_SAT_EXPR:
2992 case VEC_PACK_TRUNC_EXPR:
2993 case VEC_RSHIFT_EXPR:
2994 case VEC_UNPACK_FLOAT_HI_EXPR:
2995 case VEC_UNPACK_FLOAT_LO_EXPR:
2996 case VEC_UNPACK_HI_EXPR:
2997 case VEC_UNPACK_LO_EXPR:
2998 case VEC_WIDEN_MULT_HI_EXPR:
2999 case VEC_WIDEN_MULT_LO_EXPR:
3000 return NULL;
3002 /* Misc codes. */
3003 case ADDR_SPACE_CONVERT_EXPR:
3004 case FIXED_CONVERT_EXPR:
3005 case OBJ_TYPE_REF:
3006 case WITH_SIZE_EXPR:
3007 return NULL;
3009 case DOT_PROD_EXPR:
3010 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3011 && SCALAR_INT_MODE_P (mode))
3013 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3014 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3015 else
3016 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3017 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3018 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3019 else
3020 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3021 op0 = gen_rtx_MULT (mode, op0, op1);
3022 return gen_rtx_PLUS (mode, op0, op2);
3024 return NULL;
3026 case WIDEN_MULT_EXPR:
3027 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3028 && SCALAR_INT_MODE_P (mode))
3030 enum machine_mode inner_mode = GET_MODE (op0);
3031 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3032 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3033 else
3034 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3035 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3036 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3037 else
3038 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3039 return gen_rtx_MULT (mode, op0, op1);
3041 return NULL;
3043 case WIDEN_SUM_EXPR:
3044 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3045 && SCALAR_INT_MODE_P (mode))
3047 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3048 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3049 else
3050 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3051 return gen_rtx_PLUS (mode, op0, op1);
3053 return NULL;
3055 default:
3056 flag_unsupported:
3057 #ifdef ENABLE_CHECKING
3058 debug_tree (exp);
3059 gcc_unreachable ();
3060 #else
3061 return NULL;
3062 #endif
3066 /* Expand the _LOCs in debug insns. We run this after expanding all
3067 regular insns, so that any variables referenced in the function
3068 will have their DECL_RTLs set. */
3070 static void
3071 expand_debug_locations (void)
3073 rtx insn;
3074 rtx last = get_last_insn ();
3075 int save_strict_alias = flag_strict_aliasing;
3077 /* New alias sets while setting up memory attributes cause
3078 -fcompare-debug failures, even though it doesn't bring about any
3079 codegen changes. */
3080 flag_strict_aliasing = 0;
3082 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3083 if (DEBUG_INSN_P (insn))
3085 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3086 rtx val;
3087 enum machine_mode mode;
3089 if (value == NULL_TREE)
3090 val = NULL_RTX;
3091 else
3093 val = expand_debug_expr (value);
3094 gcc_assert (last == get_last_insn ());
3097 if (!val)
3098 val = gen_rtx_UNKNOWN_VAR_LOC ();
3099 else
3101 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3103 gcc_assert (mode == GET_MODE (val)
3104 || (GET_MODE (val) == VOIDmode
3105 && (CONST_INT_P (val)
3106 || GET_CODE (val) == CONST_FIXED
3107 || GET_CODE (val) == CONST_DOUBLE
3108 || GET_CODE (val) == LABEL_REF)));
3111 INSN_VAR_LOCATION_LOC (insn) = val;
3114 flag_strict_aliasing = save_strict_alias;
3117 /* Expand basic block BB from GIMPLE trees to RTL. */
3119 static basic_block
3120 expand_gimple_basic_block (basic_block bb)
3122 gimple_stmt_iterator gsi;
3123 gimple_seq stmts;
3124 gimple stmt = NULL;
3125 rtx note, last;
3126 edge e;
3127 edge_iterator ei;
3128 void **elt;
3130 if (dump_file)
3131 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3132 bb->index);
3134 /* Note that since we are now transitioning from GIMPLE to RTL, we
3135 cannot use the gsi_*_bb() routines because they expect the basic
3136 block to be in GIMPLE, instead of RTL. Therefore, we need to
3137 access the BB sequence directly. */
3138 stmts = bb_seq (bb);
3139 bb->il.gimple = NULL;
3140 rtl_profile_for_bb (bb);
3141 init_rtl_bb_info (bb);
3142 bb->flags |= BB_RTL;
3144 /* Remove the RETURN_EXPR if we may fall though to the exit
3145 instead. */
3146 gsi = gsi_last (stmts);
3147 if (!gsi_end_p (gsi)
3148 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3150 gimple ret_stmt = gsi_stmt (gsi);
3152 gcc_assert (single_succ_p (bb));
3153 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3155 if (bb->next_bb == EXIT_BLOCK_PTR
3156 && !gimple_return_retval (ret_stmt))
3158 gsi_remove (&gsi, false);
3159 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3163 gsi = gsi_start (stmts);
3164 if (!gsi_end_p (gsi))
3166 stmt = gsi_stmt (gsi);
3167 if (gimple_code (stmt) != GIMPLE_LABEL)
3168 stmt = NULL;
3171 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3173 if (stmt || elt)
3175 last = get_last_insn ();
3177 if (stmt)
3179 expand_gimple_stmt (stmt);
3180 gsi_next (&gsi);
3183 if (elt)
3184 emit_label ((rtx) *elt);
3186 /* Java emits line number notes in the top of labels.
3187 ??? Make this go away once line number notes are obsoleted. */
3188 BB_HEAD (bb) = NEXT_INSN (last);
3189 if (NOTE_P (BB_HEAD (bb)))
3190 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3191 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3193 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3195 else
3196 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3198 NOTE_BASIC_BLOCK (note) = bb;
3200 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3202 basic_block new_bb;
3204 stmt = gsi_stmt (gsi);
3206 /* If this statement is a non-debug one, and we generate debug
3207 insns, then this one might be the last real use of a TERed
3208 SSA_NAME, but where there are still some debug uses further
3209 down. Expanding the current SSA name in such further debug
3210 uses by their RHS might lead to wrong debug info, as coalescing
3211 might make the operands of such RHS be placed into the same
3212 pseudo as something else. Like so:
3213 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3214 use(a_1);
3215 a_2 = ...
3216 #DEBUG ... => a_1
3217 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3218 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3219 the write to a_2 would actually have clobbered the place which
3220 formerly held a_0.
3222 So, instead of that, we recognize the situation, and generate
3223 debug temporaries at the last real use of TERed SSA names:
3224 a_1 = a_0 + 1;
3225 #DEBUG #D1 => a_1
3226 use(a_1);
3227 a_2 = ...
3228 #DEBUG ... => #D1
3230 if (MAY_HAVE_DEBUG_INSNS
3231 && SA.values
3232 && !is_gimple_debug (stmt))
3234 ssa_op_iter iter;
3235 tree op;
3236 gimple def;
3238 location_t sloc = get_curr_insn_source_location ();
3239 tree sblock = get_curr_insn_block ();
3241 /* Look for SSA names that have their last use here (TERed
3242 names always have only one real use). */
3243 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3244 if ((def = get_gimple_for_ssa_name (op)))
3246 imm_use_iterator imm_iter;
3247 use_operand_p use_p;
3248 bool have_debug_uses = false;
3250 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3252 if (gimple_debug_bind_p (USE_STMT (use_p)))
3254 have_debug_uses = true;
3255 break;
3259 if (have_debug_uses)
3261 /* OP is a TERed SSA name, with DEF it's defining
3262 statement, and where OP is used in further debug
3263 instructions. Generate a debug temporary, and
3264 replace all uses of OP in debug insns with that
3265 temporary. */
3266 gimple debugstmt;
3267 tree value = gimple_assign_rhs_to_tree (def);
3268 tree vexpr = make_node (DEBUG_EXPR_DECL);
3269 rtx val;
3270 enum machine_mode mode;
3272 set_curr_insn_source_location (gimple_location (def));
3273 set_curr_insn_block (gimple_block (def));
3275 DECL_ARTIFICIAL (vexpr) = 1;
3276 TREE_TYPE (vexpr) = TREE_TYPE (value);
3277 if (DECL_P (value))
3278 mode = DECL_MODE (value);
3279 else
3280 mode = TYPE_MODE (TREE_TYPE (value));
3281 DECL_MODE (vexpr) = mode;
3283 val = gen_rtx_VAR_LOCATION
3284 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3286 val = emit_debug_insn (val);
3288 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3290 if (!gimple_debug_bind_p (debugstmt))
3291 continue;
3293 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3294 SET_USE (use_p, vexpr);
3296 update_stmt (debugstmt);
3300 set_curr_insn_source_location (sloc);
3301 set_curr_insn_block (sblock);
3304 currently_expanding_gimple_stmt = stmt;
3306 /* Expand this statement, then evaluate the resulting RTL and
3307 fixup the CFG accordingly. */
3308 if (gimple_code (stmt) == GIMPLE_COND)
3310 new_bb = expand_gimple_cond (bb, stmt);
3311 if (new_bb)
3312 return new_bb;
3314 else if (gimple_debug_bind_p (stmt))
3316 location_t sloc = get_curr_insn_source_location ();
3317 tree sblock = get_curr_insn_block ();
3318 gimple_stmt_iterator nsi = gsi;
3320 for (;;)
3322 tree var = gimple_debug_bind_get_var (stmt);
3323 tree value;
3324 rtx val;
3325 enum machine_mode mode;
3327 if (gimple_debug_bind_has_value_p (stmt))
3328 value = gimple_debug_bind_get_value (stmt);
3329 else
3330 value = NULL_TREE;
3332 last = get_last_insn ();
3334 set_curr_insn_source_location (gimple_location (stmt));
3335 set_curr_insn_block (gimple_block (stmt));
3337 if (DECL_P (var))
3338 mode = DECL_MODE (var);
3339 else
3340 mode = TYPE_MODE (TREE_TYPE (var));
3342 val = gen_rtx_VAR_LOCATION
3343 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3345 val = emit_debug_insn (val);
3347 if (dump_file && (dump_flags & TDF_DETAILS))
3349 /* We can't dump the insn with a TREE where an RTX
3350 is expected. */
3351 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3352 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3353 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3356 /* In order not to generate too many debug temporaries,
3357 we delink all uses of debug statements we already expanded.
3358 Therefore debug statements between definition and real
3359 use of TERed SSA names will continue to use the SSA name,
3360 and not be replaced with debug temps. */
3361 delink_stmt_imm_use (stmt);
3363 gsi = nsi;
3364 gsi_next (&nsi);
3365 if (gsi_end_p (nsi))
3366 break;
3367 stmt = gsi_stmt (nsi);
3368 if (!gimple_debug_bind_p (stmt))
3369 break;
3372 set_curr_insn_source_location (sloc);
3373 set_curr_insn_block (sblock);
3375 else
3377 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3379 bool can_fallthru;
3380 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3381 if (new_bb)
3383 if (can_fallthru)
3384 bb = new_bb;
3385 else
3386 return new_bb;
3389 else
3391 def_operand_p def_p;
3392 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3394 if (def_p != NULL)
3396 /* Ignore this stmt if it is in the list of
3397 replaceable expressions. */
3398 if (SA.values
3399 && bitmap_bit_p (SA.values,
3400 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3401 continue;
3403 last = expand_gimple_stmt (stmt);
3404 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3409 currently_expanding_gimple_stmt = NULL;
3411 /* Expand implicit goto and convert goto_locus. */
3412 FOR_EACH_EDGE (e, ei, bb->succs)
3414 if (e->goto_locus && e->goto_block)
3416 set_curr_insn_source_location (e->goto_locus);
3417 set_curr_insn_block (e->goto_block);
3418 e->goto_locus = curr_insn_locator ();
3420 e->goto_block = NULL;
3421 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3423 emit_jump (label_rtx_for_bb (e->dest));
3424 e->flags &= ~EDGE_FALLTHRU;
3428 /* Expanded RTL can create a jump in the last instruction of block.
3429 This later might be assumed to be a jump to successor and break edge insertion.
3430 We need to insert dummy move to prevent this. PR41440. */
3431 if (single_succ_p (bb)
3432 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3433 && (last = get_last_insn ())
3434 && JUMP_P (last))
3436 rtx dummy = gen_reg_rtx (SImode);
3437 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3440 do_pending_stack_adjust ();
3442 /* Find the block tail. The last insn in the block is the insn
3443 before a barrier and/or table jump insn. */
3444 last = get_last_insn ();
3445 if (BARRIER_P (last))
3446 last = PREV_INSN (last);
3447 if (JUMP_TABLE_DATA_P (last))
3448 last = PREV_INSN (PREV_INSN (last));
3449 BB_END (bb) = last;
3451 update_bb_for_insn (bb);
3453 return bb;
3457 /* Create a basic block for initialization code. */
3459 static basic_block
3460 construct_init_block (void)
3462 basic_block init_block, first_block;
3463 edge e = NULL;
3464 int flags;
3466 /* Multiple entry points not supported yet. */
3467 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3468 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3469 init_rtl_bb_info (EXIT_BLOCK_PTR);
3470 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3471 EXIT_BLOCK_PTR->flags |= BB_RTL;
3473 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3475 /* When entry edge points to first basic block, we don't need jump,
3476 otherwise we have to jump into proper target. */
3477 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3479 tree label = gimple_block_label (e->dest);
3481 emit_jump (label_rtx (label));
3482 flags = 0;
3484 else
3485 flags = EDGE_FALLTHRU;
3487 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3488 get_last_insn (),
3489 ENTRY_BLOCK_PTR);
3490 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3491 init_block->count = ENTRY_BLOCK_PTR->count;
3492 if (e)
3494 first_block = e->dest;
3495 redirect_edge_succ (e, init_block);
3496 e = make_edge (init_block, first_block, flags);
3498 else
3499 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3500 e->probability = REG_BR_PROB_BASE;
3501 e->count = ENTRY_BLOCK_PTR->count;
3503 update_bb_for_insn (init_block);
3504 return init_block;
3507 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3508 found in the block tree. */
3510 static void
3511 set_block_levels (tree block, int level)
3513 while (block)
3515 BLOCK_NUMBER (block) = level;
3516 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3517 block = BLOCK_CHAIN (block);
3521 /* Create a block containing landing pads and similar stuff. */
3523 static void
3524 construct_exit_block (void)
3526 rtx head = get_last_insn ();
3527 rtx end;
3528 basic_block exit_block;
3529 edge e, e2;
3530 unsigned ix;
3531 edge_iterator ei;
3532 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3534 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3536 /* Make sure the locus is set to the end of the function, so that
3537 epilogue line numbers and warnings are set properly. */
3538 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3539 input_location = cfun->function_end_locus;
3541 /* The following insns belong to the top scope. */
3542 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3544 /* Generate rtl for function exit. */
3545 expand_function_end ();
3547 end = get_last_insn ();
3548 if (head == end)
3549 return;
3550 /* While emitting the function end we could move end of the last basic block.
3552 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3553 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3554 head = NEXT_INSN (head);
3555 exit_block = create_basic_block (NEXT_INSN (head), end,
3556 EXIT_BLOCK_PTR->prev_bb);
3557 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3558 exit_block->count = EXIT_BLOCK_PTR->count;
3560 ix = 0;
3561 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3563 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3564 if (!(e->flags & EDGE_ABNORMAL))
3565 redirect_edge_succ (e, exit_block);
3566 else
3567 ix++;
3570 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3571 e->probability = REG_BR_PROB_BASE;
3572 e->count = EXIT_BLOCK_PTR->count;
3573 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3574 if (e2 != e)
3576 e->count -= e2->count;
3577 exit_block->count -= e2->count;
3578 exit_block->frequency -= EDGE_FREQUENCY (e2);
3580 if (e->count < 0)
3581 e->count = 0;
3582 if (exit_block->count < 0)
3583 exit_block->count = 0;
3584 if (exit_block->frequency < 0)
3585 exit_block->frequency = 0;
3586 update_bb_for_insn (exit_block);
3589 /* Helper function for discover_nonconstant_array_refs.
3590 Look for ARRAY_REF nodes with non-constant indexes and mark them
3591 addressable. */
3593 static tree
3594 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3595 void *data ATTRIBUTE_UNUSED)
3597 tree t = *tp;
3599 if (IS_TYPE_OR_DECL_P (t))
3600 *walk_subtrees = 0;
3601 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3603 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3604 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3605 && (!TREE_OPERAND (t, 2)
3606 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3607 || (TREE_CODE (t) == COMPONENT_REF
3608 && (!TREE_OPERAND (t,2)
3609 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3610 || TREE_CODE (t) == BIT_FIELD_REF
3611 || TREE_CODE (t) == REALPART_EXPR
3612 || TREE_CODE (t) == IMAGPART_EXPR
3613 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3614 || CONVERT_EXPR_P (t))
3615 t = TREE_OPERAND (t, 0);
3617 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3619 t = get_base_address (t);
3620 if (t && DECL_P (t)
3621 && DECL_MODE (t) != BLKmode)
3622 TREE_ADDRESSABLE (t) = 1;
3625 *walk_subtrees = 0;
3628 return NULL_TREE;
3631 /* RTL expansion is not able to compile array references with variable
3632 offsets for arrays stored in single register. Discover such
3633 expressions and mark variables as addressable to avoid this
3634 scenario. */
3636 static void
3637 discover_nonconstant_array_refs (void)
3639 basic_block bb;
3640 gimple_stmt_iterator gsi;
3642 FOR_EACH_BB (bb)
3643 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3645 gimple stmt = gsi_stmt (gsi);
3646 if (!is_gimple_debug (stmt))
3647 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3651 /* This function sets crtl->args.internal_arg_pointer to a virtual
3652 register if DRAP is needed. Local register allocator will replace
3653 virtual_incoming_args_rtx with the virtual register. */
3655 static void
3656 expand_stack_alignment (void)
3658 rtx drap_rtx;
3659 unsigned int preferred_stack_boundary;
3661 if (! SUPPORTS_STACK_ALIGNMENT)
3662 return;
3664 if (cfun->calls_alloca
3665 || cfun->has_nonlocal_label
3666 || crtl->has_nonlocal_goto)
3667 crtl->need_drap = true;
3669 /* Call update_stack_boundary here again to update incoming stack
3670 boundary. It may set incoming stack alignment to a different
3671 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3672 use the minimum incoming stack alignment to check if it is OK
3673 to perform sibcall optimization since sibcall optimization will
3674 only align the outgoing stack to incoming stack boundary. */
3675 if (targetm.calls.update_stack_boundary)
3676 targetm.calls.update_stack_boundary ();
3678 /* The incoming stack frame has to be aligned at least at
3679 parm_stack_boundary. */
3680 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3682 /* Update crtl->stack_alignment_estimated and use it later to align
3683 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3684 exceptions since callgraph doesn't collect incoming stack alignment
3685 in this case. */
3686 if (cfun->can_throw_non_call_exceptions
3687 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3688 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3689 else
3690 preferred_stack_boundary = crtl->preferred_stack_boundary;
3691 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3692 crtl->stack_alignment_estimated = preferred_stack_boundary;
3693 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3694 crtl->stack_alignment_needed = preferred_stack_boundary;
3696 gcc_assert (crtl->stack_alignment_needed
3697 <= crtl->stack_alignment_estimated);
3699 crtl->stack_realign_needed
3700 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3701 crtl->stack_realign_tried = crtl->stack_realign_needed;
3703 crtl->stack_realign_processed = true;
3705 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3706 alignment. */
3707 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3708 drap_rtx = targetm.calls.get_drap_rtx ();
3710 /* stack_realign_drap and drap_rtx must match. */
3711 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3713 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3714 if (NULL != drap_rtx)
3716 crtl->args.internal_arg_pointer = drap_rtx;
3718 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3719 needed. */
3720 fixup_tail_calls ();
3724 /* Translate the intermediate representation contained in the CFG
3725 from GIMPLE trees to RTL.
3727 We do conversion per basic block and preserve/update the tree CFG.
3728 This implies we have to do some magic as the CFG can simultaneously
3729 consist of basic blocks containing RTL and GIMPLE trees. This can
3730 confuse the CFG hooks, so be careful to not manipulate CFG during
3731 the expansion. */
3733 static unsigned int
3734 gimple_expand_cfg (void)
3736 basic_block bb, init_block;
3737 sbitmap blocks;
3738 edge_iterator ei;
3739 edge e;
3740 unsigned i;
3742 rewrite_out_of_ssa (&SA);
3743 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3744 sizeof (rtx));
3746 /* Some backends want to know that we are expanding to RTL. */
3747 currently_expanding_to_rtl = 1;
3749 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3751 insn_locators_alloc ();
3752 if (!DECL_IS_BUILTIN (current_function_decl))
3754 /* Eventually, all FEs should explicitly set function_start_locus. */
3755 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3756 set_curr_insn_source_location
3757 (DECL_SOURCE_LOCATION (current_function_decl));
3758 else
3759 set_curr_insn_source_location (cfun->function_start_locus);
3761 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3762 prologue_locator = curr_insn_locator ();
3764 /* Make sure first insn is a note even if we don't want linenums.
3765 This makes sure the first insn will never be deleted.
3766 Also, final expects a note to appear there. */
3767 emit_note (NOTE_INSN_DELETED);
3769 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3770 discover_nonconstant_array_refs ();
3772 targetm.expand_to_rtl_hook ();
3773 crtl->stack_alignment_needed = STACK_BOUNDARY;
3774 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3775 crtl->stack_alignment_estimated = 0;
3776 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3777 cfun->cfg->max_jumptable_ents = 0;
3780 /* Expand the variables recorded during gimple lowering. */
3781 expand_used_vars ();
3783 /* Honor stack protection warnings. */
3784 if (warn_stack_protect)
3786 if (cfun->calls_alloca)
3787 warning (OPT_Wstack_protector,
3788 "stack protector not protecting local variables: "
3789 "variable length buffer");
3790 if (has_short_buffer && !crtl->stack_protect_guard)
3791 warning (OPT_Wstack_protector,
3792 "stack protector not protecting function: "
3793 "all local arrays are less than %d bytes long",
3794 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3797 /* Set up parameters and prepare for return, for the function. */
3798 expand_function_start (current_function_decl);
3800 /* Now that we also have the parameter RTXs, copy them over to our
3801 partitions. */
3802 for (i = 0; i < SA.map->num_partitions; i++)
3804 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3806 if (TREE_CODE (var) != VAR_DECL
3807 && !SA.partition_to_pseudo[i])
3808 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3809 gcc_assert (SA.partition_to_pseudo[i]);
3811 /* If this decl was marked as living in multiple places, reset
3812 this now to NULL. */
3813 if (DECL_RTL_IF_SET (var) == pc_rtx)
3814 SET_DECL_RTL (var, NULL);
3816 /* Some RTL parts really want to look at DECL_RTL(x) when x
3817 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3818 SET_DECL_RTL here making this available, but that would mean
3819 to select one of the potentially many RTLs for one DECL. Instead
3820 of doing that we simply reset the MEM_EXPR of the RTL in question,
3821 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3822 if (!DECL_RTL_SET_P (var))
3824 if (MEM_P (SA.partition_to_pseudo[i]))
3825 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3829 /* If this function is `main', emit a call to `__main'
3830 to run global initializers, etc. */
3831 if (DECL_NAME (current_function_decl)
3832 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3833 && DECL_FILE_SCOPE_P (current_function_decl))
3834 expand_main_function ();
3836 /* Initialize the stack_protect_guard field. This must happen after the
3837 call to __main (if any) so that the external decl is initialized. */
3838 if (crtl->stack_protect_guard)
3839 stack_protect_prologue ();
3841 expand_phi_nodes (&SA);
3843 /* Register rtl specific functions for cfg. */
3844 rtl_register_cfg_hooks ();
3846 init_block = construct_init_block ();
3848 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3849 remaining edges later. */
3850 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3851 e->flags &= ~EDGE_EXECUTABLE;
3853 lab_rtx_for_bb = pointer_map_create ();
3854 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3855 bb = expand_gimple_basic_block (bb);
3857 if (MAY_HAVE_DEBUG_INSNS)
3858 expand_debug_locations ();
3860 execute_free_datastructures ();
3861 finish_out_of_ssa (&SA);
3863 /* We are no longer in SSA form. */
3864 cfun->gimple_df->in_ssa_p = false;
3866 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3867 conservatively to true until they are all profile aware. */
3868 pointer_map_destroy (lab_rtx_for_bb);
3869 free_histograms ();
3871 construct_exit_block ();
3872 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3873 insn_locators_finalize ();
3875 /* Zap the tree EH table. */
3876 set_eh_throw_stmt_table (cfun, NULL);
3878 rebuild_jump_labels (get_insns ());
3880 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3882 edge e;
3883 edge_iterator ei;
3884 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3886 if (e->insns.r)
3887 commit_one_edge_insertion (e);
3888 else
3889 ei_next (&ei);
3893 /* We're done expanding trees to RTL. */
3894 currently_expanding_to_rtl = 0;
3896 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3898 edge e;
3899 edge_iterator ei;
3900 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3902 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3903 e->flags &= ~EDGE_EXECUTABLE;
3905 /* At the moment not all abnormal edges match the RTL
3906 representation. It is safe to remove them here as
3907 find_many_sub_basic_blocks will rediscover them.
3908 In the future we should get this fixed properly. */
3909 if ((e->flags & EDGE_ABNORMAL)
3910 && !(e->flags & EDGE_SIBCALL))
3911 remove_edge (e);
3912 else
3913 ei_next (&ei);
3917 blocks = sbitmap_alloc (last_basic_block);
3918 sbitmap_ones (blocks);
3919 find_many_sub_basic_blocks (blocks);
3920 sbitmap_free (blocks);
3921 purge_all_dead_edges ();
3923 compact_blocks ();
3925 expand_stack_alignment ();
3927 #ifdef ENABLE_CHECKING
3928 verify_flow_info ();
3929 #endif
3931 /* There's no need to defer outputting this function any more; we
3932 know we want to output it. */
3933 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3935 /* Now that we're done expanding trees to RTL, we shouldn't have any
3936 more CONCATs anywhere. */
3937 generating_concat_p = 0;
3939 if (dump_file)
3941 fprintf (dump_file,
3942 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3943 /* And the pass manager will dump RTL for us. */
3946 /* If we're emitting a nested function, make sure its parent gets
3947 emitted as well. Doing otherwise confuses debug info. */
3949 tree parent;
3950 for (parent = DECL_CONTEXT (current_function_decl);
3951 parent != NULL_TREE;
3952 parent = get_containing_scope (parent))
3953 if (TREE_CODE (parent) == FUNCTION_DECL)
3954 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
3957 /* We are now committed to emitting code for this function. Do any
3958 preparation, such as emitting abstract debug info for the inline
3959 before it gets mangled by optimization. */
3960 if (cgraph_function_possibly_inlined_p (current_function_decl))
3961 (*debug_hooks->outlining_inline_function) (current_function_decl);
3963 TREE_ASM_WRITTEN (current_function_decl) = 1;
3965 /* After expanding, the return labels are no longer needed. */
3966 return_label = NULL;
3967 naked_return_label = NULL;
3968 /* Tag the blocks with a depth number so that change_scope can find
3969 the common parent easily. */
3970 set_block_levels (DECL_INITIAL (cfun->decl), 0);
3971 default_rtl_profile ();
3972 return 0;
3975 struct rtl_opt_pass pass_expand =
3978 RTL_PASS,
3979 "expand", /* name */
3980 NULL, /* gate */
3981 gimple_expand_cfg, /* execute */
3982 NULL, /* sub */
3983 NULL, /* next */
3984 0, /* static_pass_number */
3985 TV_EXPAND, /* tv_id */
3986 PROP_ssa | PROP_gimple_leh | PROP_cfg
3987 | PROP_gimple_lcx, /* properties_required */
3988 PROP_rtl, /* properties_provided */
3989 PROP_ssa | PROP_trees, /* properties_destroyed */
3990 TODO_verify_ssa | TODO_verify_flow
3991 | TODO_verify_stmts, /* todo_flags_start */
3992 TODO_dump_func
3993 | TODO_ggc_collect /* todo_flags_finish */