Merged r157653 through r157895 into branch.
[official-gcc.git] / gcc / cfgexpand.c
blobeeb5c739b787f841864c191a2351eafd84c1aaaa
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "toplev.h"
40 #include "debug.h"
41 #include "params.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
44 #include "target.h"
45 #include "ssaexpand.h"
48 /* This variable holds information helping the rewriting of SSA trees
49 into RTL. */
50 struct ssaexpand SA;
52 /* This variable holds the currently expanded gimple statement for purposes
53 of comminucating the profile info to the builtin expanders. */
54 gimple currently_expanding_gimple_stmt;
56 /* Return an expression tree corresponding to the RHS of GIMPLE
57 statement STMT. */
59 tree
60 gimple_assign_rhs_to_tree (gimple stmt)
62 tree t;
63 enum gimple_rhs_class grhs_class;
65 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
67 if (grhs_class == GIMPLE_BINARY_RHS)
68 t = build2 (gimple_assign_rhs_code (stmt),
69 TREE_TYPE (gimple_assign_lhs (stmt)),
70 gimple_assign_rhs1 (stmt),
71 gimple_assign_rhs2 (stmt));
72 else if (grhs_class == GIMPLE_UNARY_RHS)
73 t = build1 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt));
76 else if (grhs_class == GIMPLE_SINGLE_RHS)
78 t = gimple_assign_rhs1 (stmt);
79 /* Avoid modifying this tree in place below. */
80 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
81 && gimple_location (stmt) != EXPR_LOCATION (t))
82 || (gimple_block (stmt)
83 && currently_expanding_to_rtl
84 && EXPR_P (t)
85 && gimple_block (stmt) != TREE_BLOCK (t)))
86 t = copy_node (t);
88 else
89 gcc_unreachable ();
91 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
92 SET_EXPR_LOCATION (t, gimple_location (stmt));
93 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
94 TREE_BLOCK (t) = gimple_block (stmt);
96 return t;
100 #ifndef STACK_ALIGNMENT_NEEDED
101 #define STACK_ALIGNMENT_NEEDED 1
102 #endif
104 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
106 /* Associate declaration T with storage space X. If T is no
107 SSA name this is exactly SET_DECL_RTL, otherwise make the
108 partition of T associated with X. */
109 static inline void
110 set_rtl (tree t, rtx x)
112 if (TREE_CODE (t) == SSA_NAME)
114 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
115 if (x && !MEM_P (x))
116 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
117 /* For the benefit of debug information at -O0 (where vartracking
118 doesn't run) record the place also in the base DECL if it's
119 a normal variable (not a parameter). */
120 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
122 tree var = SSA_NAME_VAR (t);
123 /* If we don't yet have something recorded, just record it now. */
124 if (!DECL_RTL_SET_P (var))
125 SET_DECL_RTL (var, x);
126 /* If we have it set alrady to "multiple places" don't
127 change this. */
128 else if (DECL_RTL (var) == pc_rtx)
130 /* If we have something recorded and it's not the same place
131 as we want to record now, we have multiple partitions for the
132 same base variable, with different places. We can't just
133 randomly chose one, hence we have to say that we don't know.
134 This only happens with optimization, and there var-tracking
135 will figure out the right thing. */
136 else if (DECL_RTL (var) != x)
137 SET_DECL_RTL (var, pc_rtx);
140 else
141 SET_DECL_RTL (t, x);
144 /* This structure holds data relevant to one variable that will be
145 placed in a stack slot. */
146 struct stack_var
148 /* The Variable. */
149 tree decl;
151 /* The offset of the variable. During partitioning, this is the
152 offset relative to the partition. After partitioning, this
153 is relative to the stack frame. */
154 HOST_WIDE_INT offset;
156 /* Initially, the size of the variable. Later, the size of the partition,
157 if this variable becomes it's partition's representative. */
158 HOST_WIDE_INT size;
160 /* The *byte* alignment required for this variable. Or as, with the
161 size, the alignment for this partition. */
162 unsigned int alignb;
164 /* The partition representative. */
165 size_t representative;
167 /* The next stack variable in the partition, or EOC. */
168 size_t next;
170 /* The numbers of conflicting stack variables. */
171 bitmap conflicts;
174 #define EOC ((size_t)-1)
176 /* We have an array of such objects while deciding allocation. */
177 static struct stack_var *stack_vars;
178 static size_t stack_vars_alloc;
179 static size_t stack_vars_num;
181 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
182 is non-decreasing. */
183 static size_t *stack_vars_sorted;
185 /* The phase of the stack frame. This is the known misalignment of
186 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
187 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
188 static int frame_phase;
190 /* Used during expand_used_vars to remember if we saw any decls for
191 which we'd like to enable stack smashing protection. */
192 static bool has_protected_decls;
194 /* Used during expand_used_vars. Remember if we say a character buffer
195 smaller than our cutoff threshold. Used for -Wstack-protector. */
196 static bool has_short_buffer;
198 /* Discover the byte alignment to use for DECL. Ignore alignment
199 we can't do with expected alignment of the stack boundary. */
201 static unsigned int
202 get_decl_align_unit (tree decl)
204 unsigned int align;
206 align = LOCAL_DECL_ALIGNMENT (decl);
208 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
209 align = MAX_SUPPORTED_STACK_ALIGNMENT;
211 if (SUPPORTS_STACK_ALIGNMENT)
213 if (crtl->stack_alignment_estimated < align)
215 gcc_assert(!crtl->stack_realign_processed);
216 crtl->stack_alignment_estimated = align;
220 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
221 So here we only make sure stack_alignment_needed >= align. */
222 if (crtl->stack_alignment_needed < align)
223 crtl->stack_alignment_needed = align;
224 if (crtl->max_used_stack_slot_alignment < align)
225 crtl->max_used_stack_slot_alignment = align;
227 return align / BITS_PER_UNIT;
230 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
231 Return the frame offset. */
233 static HOST_WIDE_INT
234 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
236 HOST_WIDE_INT offset, new_frame_offset;
238 new_frame_offset = frame_offset;
239 if (FRAME_GROWS_DOWNWARD)
241 new_frame_offset -= size + frame_phase;
242 new_frame_offset &= -align;
243 new_frame_offset += frame_phase;
244 offset = new_frame_offset;
246 else
248 new_frame_offset -= frame_phase;
249 new_frame_offset += align - 1;
250 new_frame_offset &= -align;
251 new_frame_offset += frame_phase;
252 offset = new_frame_offset;
253 new_frame_offset += size;
255 frame_offset = new_frame_offset;
257 if (frame_offset_overflow (frame_offset, cfun->decl))
258 frame_offset = offset = 0;
260 return offset;
263 /* Accumulate DECL into STACK_VARS. */
265 static void
266 add_stack_var (tree decl)
268 if (stack_vars_num >= stack_vars_alloc)
270 if (stack_vars_alloc)
271 stack_vars_alloc = stack_vars_alloc * 3 / 2;
272 else
273 stack_vars_alloc = 32;
274 stack_vars
275 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
277 stack_vars[stack_vars_num].decl = decl;
278 stack_vars[stack_vars_num].offset = 0;
279 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
280 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
282 /* All variables are initially in their own partition. */
283 stack_vars[stack_vars_num].representative = stack_vars_num;
284 stack_vars[stack_vars_num].next = EOC;
286 /* All variables initially conflict with no other. */
287 stack_vars[stack_vars_num].conflicts = NULL;
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl, pc_rtx);
292 stack_vars_num++;
295 /* Make the decls associated with luid's X and Y conflict. */
297 static void
298 add_stack_var_conflict (size_t x, size_t y)
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
302 if (!a->conflicts)
303 a->conflicts = BITMAP_ALLOC (NULL);
304 if (!b->conflicts)
305 b->conflicts = BITMAP_ALLOC (NULL);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
310 /* Check whether the decls associated with luid's X and Y conflict. */
312 static bool
313 stack_var_conflict_p (size_t x, size_t y)
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
317 if (!a->conflicts || !b->conflicts)
318 return false;
319 return bitmap_bit_p (a->conflicts, y);
322 /* Returns true if TYPE is or contains a union type. */
324 static bool
325 aggregate_contains_union_type (tree type)
327 tree field;
329 if (TREE_CODE (type) == UNION_TYPE
330 || TREE_CODE (type) == QUAL_UNION_TYPE)
331 return true;
332 if (TREE_CODE (type) == ARRAY_TYPE)
333 return aggregate_contains_union_type (TREE_TYPE (type));
334 if (TREE_CODE (type) != RECORD_TYPE)
335 return false;
337 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
338 if (TREE_CODE (field) == FIELD_DECL)
339 if (aggregate_contains_union_type (TREE_TYPE (field)))
340 return true;
342 return false;
345 /* A subroutine of expand_used_vars. If two variables X and Y have alias
346 sets that do not conflict, then do add a conflict for these variables
347 in the interference graph. We also need to make sure to add conflicts
348 for union containing structures. Else RTL alias analysis comes along
349 and due to type based aliasing rules decides that for two overlapping
350 union temporaries { short s; int i; } accesses to the same mem through
351 different types may not alias and happily reorders stores across
352 life-time boundaries of the temporaries (See PR25654).
353 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
355 static void
356 add_alias_set_conflicts (void)
358 size_t i, j, n = stack_vars_num;
360 for (i = 0; i < n; ++i)
362 tree type_i = TREE_TYPE (stack_vars[i].decl);
363 bool aggr_i = AGGREGATE_TYPE_P (type_i);
364 bool contains_union;
366 contains_union = aggregate_contains_union_type (type_i);
367 for (j = 0; j < i; ++j)
369 tree type_j = TREE_TYPE (stack_vars[j].decl);
370 bool aggr_j = AGGREGATE_TYPE_P (type_j);
371 if (aggr_i != aggr_j
372 /* Either the objects conflict by means of type based
373 aliasing rules, or we need to add a conflict. */
374 || !objects_must_conflict_p (type_i, type_j)
375 /* In case the types do not conflict ensure that access
376 to elements will conflict. In case of unions we have
377 to be careful as type based aliasing rules may say
378 access to the same memory does not conflict. So play
379 safe and add a conflict in this case. */
380 || contains_union)
381 add_stack_var_conflict (i, j);
386 /* A subroutine of partition_stack_vars. A comparison function for qsort,
387 sorting an array of indices by the size and type of the object. */
389 static int
390 stack_var_size_cmp (const void *a, const void *b)
392 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
393 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
394 tree decla, declb;
395 unsigned int uida, uidb;
397 if (sa < sb)
398 return -1;
399 if (sa > sb)
400 return 1;
401 decla = stack_vars[*(const size_t *)a].decl;
402 declb = stack_vars[*(const size_t *)b].decl;
403 /* For stack variables of the same size use and id of the decls
404 to make the sort stable. Two SSA names are compared by their
405 version, SSA names come before non-SSA names, and two normal
406 decls are compared by their DECL_UID. */
407 if (TREE_CODE (decla) == SSA_NAME)
409 if (TREE_CODE (declb) == SSA_NAME)
410 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
411 else
412 return -1;
414 else if (TREE_CODE (declb) == SSA_NAME)
415 return 1;
416 else
417 uida = DECL_UID (decla), uidb = DECL_UID (declb);
418 if (uida < uidb)
419 return -1;
420 if (uida > uidb)
421 return 1;
422 return 0;
426 /* If the points-to solution *PI points to variables that are in a partition
427 together with other variables add all partition members to the pointed-to
428 variables bitmap. */
430 static void
431 add_partitioned_vars_to_ptset (struct pt_solution *pt,
432 struct pointer_map_t *decls_to_partitions,
433 struct pointer_set_t *visited, bitmap temp)
435 bitmap_iterator bi;
436 unsigned i;
437 bitmap *part;
439 if (pt->anything
440 || pt->vars == NULL
441 /* The pointed-to vars bitmap is shared, it is enough to
442 visit it once. */
443 || pointer_set_insert(visited, pt->vars))
444 return;
446 bitmap_clear (temp);
448 /* By using a temporary bitmap to store all members of the partitions
449 we have to add we make sure to visit each of the partitions only
450 once. */
451 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
452 if ((!temp
453 || !bitmap_bit_p (temp, i))
454 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
455 (void *)(size_t) i)))
456 bitmap_ior_into (temp, *part);
457 if (!bitmap_empty_p (temp))
458 bitmap_ior_into (pt->vars, temp);
461 /* Update points-to sets based on partition info, so we can use them on RTL.
462 The bitmaps representing stack partitions will be saved until expand,
463 where partitioned decls used as bases in memory expressions will be
464 rewritten. */
466 static void
467 update_alias_info_with_stack_vars (void)
469 struct pointer_map_t *decls_to_partitions = NULL;
470 size_t i, j;
471 tree var = NULL_TREE;
473 for (i = 0; i < stack_vars_num; i++)
475 bitmap part = NULL;
476 tree name;
477 struct ptr_info_def *pi;
479 /* Not interested in partitions with single variable. */
480 if (stack_vars[i].representative != i
481 || stack_vars[i].next == EOC)
482 continue;
484 if (!decls_to_partitions)
486 decls_to_partitions = pointer_map_create ();
487 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
490 /* Create an SSA_NAME that points to the partition for use
491 as base during alias-oracle queries on RTL for bases that
492 have been partitioned. */
493 if (var == NULL_TREE)
494 var = create_tmp_var (ptr_type_node, NULL);
495 name = make_ssa_name (var, NULL);
497 /* Create bitmaps representing partitions. They will be used for
498 points-to sets later, so use GGC alloc. */
499 part = BITMAP_GGC_ALLOC ();
500 for (j = i; j != EOC; j = stack_vars[j].next)
502 tree decl = stack_vars[j].decl;
503 unsigned int uid = DECL_UID (decl);
504 /* We should never end up partitioning SSA names (though they
505 may end up on the stack). Neither should we allocate stack
506 space to something that is unused and thus unreferenced. */
507 gcc_assert (DECL_P (decl)
508 && referenced_var_lookup (uid));
509 bitmap_set_bit (part, uid);
510 *((bitmap *) pointer_map_insert (decls_to_partitions,
511 (void *)(size_t) uid)) = part;
512 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
513 decl)) = name;
516 /* Make the SSA name point to all partition members. */
517 pi = get_ptr_info (name);
518 pt_solution_set (&pi->pt, part);
521 /* Make all points-to sets that contain one member of a partition
522 contain all members of the partition. */
523 if (decls_to_partitions)
525 unsigned i;
526 struct pointer_set_t *visited = pointer_set_create ();
527 bitmap temp = BITMAP_ALLOC (NULL);
529 for (i = 1; i < num_ssa_names; i++)
531 tree name = ssa_name (i);
532 struct ptr_info_def *pi;
534 if (name
535 && POINTER_TYPE_P (TREE_TYPE (name))
536 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
537 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
538 visited, temp);
541 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
542 decls_to_partitions, visited, temp);
543 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
544 decls_to_partitions, visited, temp);
546 pointer_set_destroy (visited);
547 pointer_map_destroy (decls_to_partitions);
548 BITMAP_FREE (temp);
552 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
553 partitioning algorithm. Partitions A and B are known to be non-conflicting.
554 Merge them into a single partition A.
556 At the same time, add OFFSET to all variables in partition B. At the end
557 of the partitioning process we've have a nice block easy to lay out within
558 the stack frame. */
560 static void
561 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
563 size_t i, last;
564 struct stack_var *vb = &stack_vars[b];
565 bitmap_iterator bi;
566 unsigned u;
568 /* Update each element of partition B with the given offset,
569 and merge them into partition A. */
570 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
572 stack_vars[i].offset += offset;
573 stack_vars[i].representative = a;
575 stack_vars[last].next = stack_vars[a].next;
576 stack_vars[a].next = b;
578 /* Update the required alignment of partition A to account for B. */
579 if (stack_vars[a].alignb < stack_vars[b].alignb)
580 stack_vars[a].alignb = stack_vars[b].alignb;
582 /* Update the interference graph and merge the conflicts. */
583 if (vb->conflicts)
585 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
586 add_stack_var_conflict (a, stack_vars[u].representative);
587 BITMAP_FREE (vb->conflicts);
591 /* A subroutine of expand_used_vars. Binpack the variables into
592 partitions constrained by the interference graph. The overall
593 algorithm used is as follows:
595 Sort the objects by size.
596 For each object A {
597 S = size(A)
598 O = 0
599 loop {
600 Look for the largest non-conflicting object B with size <= S.
601 UNION (A, B)
602 offset(B) = O
603 O += size(B)
604 S -= size(B)
609 static void
610 partition_stack_vars (void)
612 size_t si, sj, n = stack_vars_num;
614 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
615 for (si = 0; si < n; ++si)
616 stack_vars_sorted[si] = si;
618 if (n == 1)
619 return;
621 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
623 for (si = 0; si < n; ++si)
625 size_t i = stack_vars_sorted[si];
626 HOST_WIDE_INT isize = stack_vars[i].size;
627 HOST_WIDE_INT offset = 0;
629 for (sj = si; sj-- > 0; )
631 size_t j = stack_vars_sorted[sj];
632 HOST_WIDE_INT jsize = stack_vars[j].size;
633 unsigned int jalign = stack_vars[j].alignb;
635 /* Ignore objects that aren't partition representatives. */
636 if (stack_vars[j].representative != j)
637 continue;
639 /* Ignore objects too large for the remaining space. */
640 if (isize < jsize)
641 continue;
643 /* Ignore conflicting objects. */
644 if (stack_var_conflict_p (i, j))
645 continue;
647 /* Refine the remaining space check to include alignment. */
648 if (offset & (jalign - 1))
650 HOST_WIDE_INT toff = offset;
651 toff += jalign - 1;
652 toff &= -(HOST_WIDE_INT)jalign;
653 if (isize - (toff - offset) < jsize)
654 continue;
656 isize -= toff - offset;
657 offset = toff;
660 /* UNION the objects, placing J at OFFSET. */
661 union_stack_vars (i, j, offset);
663 isize -= jsize;
664 if (isize == 0)
665 break;
669 if (optimize)
670 update_alias_info_with_stack_vars ();
673 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
675 static void
676 dump_stack_var_partition (void)
678 size_t si, i, j, n = stack_vars_num;
680 for (si = 0; si < n; ++si)
682 i = stack_vars_sorted[si];
684 /* Skip variables that aren't partition representatives, for now. */
685 if (stack_vars[i].representative != i)
686 continue;
688 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
689 " align %u\n", (unsigned long) i, stack_vars[i].size,
690 stack_vars[i].alignb);
692 for (j = i; j != EOC; j = stack_vars[j].next)
694 fputc ('\t', dump_file);
695 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
696 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
697 stack_vars[j].offset);
702 /* Assign rtl to DECL at frame offset OFFSET. */
704 static void
705 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
707 /* Alignment is unsigned. */
708 unsigned HOST_WIDE_INT align;
709 rtx x;
711 /* If this fails, we've overflowed the stack frame. Error nicely? */
712 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
714 x = plus_constant (virtual_stack_vars_rtx, offset);
715 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
717 if (TREE_CODE (decl) != SSA_NAME)
719 /* Set alignment we actually gave this decl if it isn't an SSA name.
720 If it is we generate stack slots only accidentally so it isn't as
721 important, we'll simply use the alignment that is already set. */
722 offset -= frame_phase;
723 align = offset & -offset;
724 align *= BITS_PER_UNIT;
725 if (align == 0)
726 align = STACK_BOUNDARY;
727 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
728 align = MAX_SUPPORTED_STACK_ALIGNMENT;
730 DECL_ALIGN (decl) = align;
731 DECL_USER_ALIGN (decl) = 0;
734 set_mem_attributes (x, SSAVAR (decl), true);
735 set_rtl (decl, x);
738 /* A subroutine of expand_used_vars. Give each partition representative
739 a unique location within the stack frame. Update each partition member
740 with that location. */
742 static void
743 expand_stack_vars (bool (*pred) (tree))
745 size_t si, i, j, n = stack_vars_num;
747 for (si = 0; si < n; ++si)
749 HOST_WIDE_INT offset;
751 i = stack_vars_sorted[si];
753 /* Skip variables that aren't partition representatives, for now. */
754 if (stack_vars[i].representative != i)
755 continue;
757 /* Skip variables that have already had rtl assigned. See also
758 add_stack_var where we perpetrate this pc_rtx hack. */
759 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
760 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
761 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
762 continue;
764 /* Check the predicate to see whether this variable should be
765 allocated in this pass. */
766 if (pred && !pred (stack_vars[i].decl))
767 continue;
769 offset = alloc_stack_frame_space (stack_vars[i].size,
770 stack_vars[i].alignb);
772 /* Create rtl for each variable based on their location within the
773 partition. */
774 for (j = i; j != EOC; j = stack_vars[j].next)
776 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
777 expand_one_stack_var_at (stack_vars[j].decl,
778 stack_vars[j].offset + offset);
783 /* Take into account all sizes of partitions and reset DECL_RTLs. */
784 static HOST_WIDE_INT
785 account_stack_vars (void)
787 size_t si, j, i, n = stack_vars_num;
788 HOST_WIDE_INT size = 0;
790 for (si = 0; si < n; ++si)
792 i = stack_vars_sorted[si];
794 /* Skip variables that aren't partition representatives, for now. */
795 if (stack_vars[i].representative != i)
796 continue;
798 size += stack_vars[i].size;
799 for (j = i; j != EOC; j = stack_vars[j].next)
800 set_rtl (stack_vars[j].decl, NULL);
802 return size;
805 /* A subroutine of expand_one_var. Called to immediately assign rtl
806 to a variable to be allocated in the stack frame. */
808 static void
809 expand_one_stack_var (tree var)
811 HOST_WIDE_INT size, offset, align;
813 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
814 align = get_decl_align_unit (SSAVAR (var));
815 offset = alloc_stack_frame_space (size, align);
817 expand_one_stack_var_at (var, offset);
820 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
821 that will reside in a hard register. */
823 static void
824 expand_one_hard_reg_var (tree var)
826 rest_of_decl_compilation (var, 0, 0);
829 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
830 that will reside in a pseudo register. */
832 static void
833 expand_one_register_var (tree var)
835 tree decl = SSAVAR (var);
836 tree type = TREE_TYPE (decl);
837 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
838 rtx x = gen_reg_rtx (reg_mode);
840 set_rtl (var, x);
842 /* Note if the object is a user variable. */
843 if (!DECL_ARTIFICIAL (decl))
844 mark_user_reg (x);
846 if (POINTER_TYPE_P (type))
847 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
850 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
851 has some associated error, e.g. its type is error-mark. We just need
852 to pick something that won't crash the rest of the compiler. */
854 static void
855 expand_one_error_var (tree var)
857 enum machine_mode mode = DECL_MODE (var);
858 rtx x;
860 if (mode == BLKmode)
861 x = gen_rtx_MEM (BLKmode, const0_rtx);
862 else if (mode == VOIDmode)
863 x = const0_rtx;
864 else
865 x = gen_reg_rtx (mode);
867 SET_DECL_RTL (var, x);
870 /* A subroutine of expand_one_var. VAR is a variable that will be
871 allocated to the local stack frame. Return true if we wish to
872 add VAR to STACK_VARS so that it will be coalesced with other
873 variables. Return false to allocate VAR immediately.
875 This function is used to reduce the number of variables considered
876 for coalescing, which reduces the size of the quadratic problem. */
878 static bool
879 defer_stack_allocation (tree var, bool toplevel)
881 /* If stack protection is enabled, *all* stack variables must be deferred,
882 so that we can re-order the strings to the top of the frame. */
883 if (flag_stack_protect)
884 return true;
886 /* Variables in the outermost scope automatically conflict with
887 every other variable. The only reason to want to defer them
888 at all is that, after sorting, we can more efficiently pack
889 small variables in the stack frame. Continue to defer at -O2. */
890 if (toplevel && optimize < 2)
891 return false;
893 /* Without optimization, *most* variables are allocated from the
894 stack, which makes the quadratic problem large exactly when we
895 want compilation to proceed as quickly as possible. On the
896 other hand, we don't want the function's stack frame size to
897 get completely out of hand. So we avoid adding scalars and
898 "small" aggregates to the list at all. */
899 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
900 return false;
902 return true;
905 /* A subroutine of expand_used_vars. Expand one variable according to
906 its flavor. Variables to be placed on the stack are not actually
907 expanded yet, merely recorded.
908 When REALLY_EXPAND is false, only add stack values to be allocated.
909 Return stack usage this variable is supposed to take.
912 static HOST_WIDE_INT
913 expand_one_var (tree var, bool toplevel, bool really_expand)
915 tree origvar = var;
916 var = SSAVAR (var);
918 if (SUPPORTS_STACK_ALIGNMENT
919 && TREE_TYPE (var) != error_mark_node
920 && TREE_CODE (var) == VAR_DECL)
922 unsigned int align;
924 /* Because we don't know if VAR will be in register or on stack,
925 we conservatively assume it will be on stack even if VAR is
926 eventually put into register after RA pass. For non-automatic
927 variables, which won't be on stack, we collect alignment of
928 type and ignore user specified alignment. */
929 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
930 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
931 TYPE_MODE (TREE_TYPE (var)),
932 TYPE_ALIGN (TREE_TYPE (var)));
933 else
934 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
936 if (crtl->stack_alignment_estimated < align)
938 /* stack_alignment_estimated shouldn't change after stack
939 realign decision made */
940 gcc_assert(!crtl->stack_realign_processed);
941 crtl->stack_alignment_estimated = align;
945 if (TREE_CODE (origvar) == SSA_NAME)
947 gcc_assert (TREE_CODE (var) != VAR_DECL
948 || (!DECL_EXTERNAL (var)
949 && !DECL_HAS_VALUE_EXPR_P (var)
950 && !TREE_STATIC (var)
951 && TREE_TYPE (var) != error_mark_node
952 && !DECL_HARD_REGISTER (var)
953 && really_expand));
955 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
957 else if (DECL_EXTERNAL (var))
959 else if (DECL_HAS_VALUE_EXPR_P (var))
961 else if (TREE_STATIC (var))
963 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
965 else if (TREE_TYPE (var) == error_mark_node)
967 if (really_expand)
968 expand_one_error_var (var);
970 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
972 if (really_expand)
973 expand_one_hard_reg_var (var);
975 else if (use_register_for_decl (var))
977 if (really_expand)
978 expand_one_register_var (origvar);
980 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
982 if (really_expand)
984 error ("size of variable %q+D is too large", var);
985 expand_one_error_var (var);
988 else if (defer_stack_allocation (var, toplevel))
989 add_stack_var (origvar);
990 else
992 if (really_expand)
993 expand_one_stack_var (origvar);
994 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
996 return 0;
999 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1000 expanding variables. Those variables that can be put into registers
1001 are allocated pseudos; those that can't are put on the stack.
1003 TOPLEVEL is true if this is the outermost BLOCK. */
1005 static void
1006 expand_used_vars_for_block (tree block, bool toplevel)
1008 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1009 tree t;
1011 old_sv_num = toplevel ? 0 : stack_vars_num;
1013 /* Expand all variables at this level. */
1014 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1015 if (TREE_USED (t))
1016 expand_one_var (t, toplevel, true);
1018 this_sv_num = stack_vars_num;
1020 /* Expand all variables at containing levels. */
1021 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1022 expand_used_vars_for_block (t, false);
1024 /* Since we do not track exact variable lifetimes (which is not even
1025 possible for variables whose address escapes), we mirror the block
1026 tree in the interference graph. Here we cause all variables at this
1027 level, and all sublevels, to conflict. */
1028 if (old_sv_num < this_sv_num)
1030 new_sv_num = stack_vars_num;
1032 for (i = old_sv_num; i < new_sv_num; ++i)
1033 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1034 add_stack_var_conflict (i, j);
1038 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1039 and clear TREE_USED on all local variables. */
1041 static void
1042 clear_tree_used (tree block)
1044 tree t;
1046 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1047 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1048 TREE_USED (t) = 0;
1050 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1051 clear_tree_used (t);
1054 /* Examine TYPE and determine a bit mask of the following features. */
1056 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1057 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1058 #define SPCT_HAS_ARRAY 4
1059 #define SPCT_HAS_AGGREGATE 8
1061 static unsigned int
1062 stack_protect_classify_type (tree type)
1064 unsigned int ret = 0;
1065 tree t;
1067 switch (TREE_CODE (type))
1069 case ARRAY_TYPE:
1070 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1071 if (t == char_type_node
1072 || t == signed_char_type_node
1073 || t == unsigned_char_type_node)
1075 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1076 unsigned HOST_WIDE_INT len;
1078 if (!TYPE_SIZE_UNIT (type)
1079 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1080 len = max;
1081 else
1082 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1084 if (len < max)
1085 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1086 else
1087 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1089 else
1090 ret = SPCT_HAS_ARRAY;
1091 break;
1093 case UNION_TYPE:
1094 case QUAL_UNION_TYPE:
1095 case RECORD_TYPE:
1096 ret = SPCT_HAS_AGGREGATE;
1097 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1098 if (TREE_CODE (t) == FIELD_DECL)
1099 ret |= stack_protect_classify_type (TREE_TYPE (t));
1100 break;
1102 default:
1103 break;
1106 return ret;
1109 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1110 part of the local stack frame. Remember if we ever return nonzero for
1111 any variable in this function. The return value is the phase number in
1112 which the variable should be allocated. */
1114 static int
1115 stack_protect_decl_phase (tree decl)
1117 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1118 int ret = 0;
1120 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1121 has_short_buffer = true;
1123 if (flag_stack_protect == 2)
1125 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1126 && !(bits & SPCT_HAS_AGGREGATE))
1127 ret = 1;
1128 else if (bits & SPCT_HAS_ARRAY)
1129 ret = 2;
1131 else
1132 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1134 if (ret)
1135 has_protected_decls = true;
1137 return ret;
1140 /* Two helper routines that check for phase 1 and phase 2. These are used
1141 as callbacks for expand_stack_vars. */
1143 static bool
1144 stack_protect_decl_phase_1 (tree decl)
1146 return stack_protect_decl_phase (decl) == 1;
1149 static bool
1150 stack_protect_decl_phase_2 (tree decl)
1152 return stack_protect_decl_phase (decl) == 2;
1155 /* Ensure that variables in different stack protection phases conflict
1156 so that they are not merged and share the same stack slot. */
1158 static void
1159 add_stack_protection_conflicts (void)
1161 size_t i, j, n = stack_vars_num;
1162 unsigned char *phase;
1164 phase = XNEWVEC (unsigned char, n);
1165 for (i = 0; i < n; ++i)
1166 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1168 for (i = 0; i < n; ++i)
1170 unsigned char ph_i = phase[i];
1171 for (j = 0; j < i; ++j)
1172 if (ph_i != phase[j])
1173 add_stack_var_conflict (i, j);
1176 XDELETEVEC (phase);
1179 /* Create a decl for the guard at the top of the stack frame. */
1181 static void
1182 create_stack_guard (void)
1184 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1185 VAR_DECL, NULL, ptr_type_node);
1186 TREE_THIS_VOLATILE (guard) = 1;
1187 TREE_USED (guard) = 1;
1188 expand_one_stack_var (guard);
1189 crtl->stack_protect_guard = guard;
1192 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1193 expanding variables. Those variables that can be put into registers
1194 are allocated pseudos; those that can't are put on the stack.
1196 TOPLEVEL is true if this is the outermost BLOCK. */
1198 static HOST_WIDE_INT
1199 account_used_vars_for_block (tree block, bool toplevel)
1201 tree t;
1202 HOST_WIDE_INT size = 0;
1204 /* Expand all variables at this level. */
1205 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1206 if (TREE_USED (t))
1207 size += expand_one_var (t, toplevel, false);
1209 /* Expand all variables at containing levels. */
1210 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1211 size += account_used_vars_for_block (t, false);
1213 return size;
1216 /* Prepare for expanding variables. */
1217 static void
1218 init_vars_expansion (void)
1220 tree t;
1221 /* Set TREE_USED on all variables in the local_decls. */
1222 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1223 TREE_USED (TREE_VALUE (t)) = 1;
1225 /* Clear TREE_USED on all variables associated with a block scope. */
1226 clear_tree_used (DECL_INITIAL (current_function_decl));
1228 /* Initialize local stack smashing state. */
1229 has_protected_decls = false;
1230 has_short_buffer = false;
1233 /* Free up stack variable graph data. */
1234 static void
1235 fini_vars_expansion (void)
1237 size_t i, n = stack_vars_num;
1238 for (i = 0; i < n; i++)
1239 BITMAP_FREE (stack_vars[i].conflicts);
1240 XDELETEVEC (stack_vars);
1241 XDELETEVEC (stack_vars_sorted);
1242 stack_vars = NULL;
1243 stack_vars_alloc = stack_vars_num = 0;
1246 /* Make a fair guess for the size of the stack frame of the current
1247 function. This doesn't have to be exact, the result is only used
1248 in the inline heuristics. So we don't want to run the full stack
1249 var packing algorithm (which is quadratic in the number of stack
1250 vars). Instead, we calculate the total size of all stack vars.
1251 This turns out to be a pretty fair estimate -- packing of stack
1252 vars doesn't happen very often. */
1254 HOST_WIDE_INT
1255 estimated_stack_frame_size (void)
1257 HOST_WIDE_INT size = 0;
1258 size_t i;
1259 tree t, outer_block = DECL_INITIAL (current_function_decl);
1261 init_vars_expansion ();
1263 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1265 tree var = TREE_VALUE (t);
1267 if (TREE_USED (var))
1268 size += expand_one_var (var, true, false);
1269 TREE_USED (var) = 1;
1271 size += account_used_vars_for_block (outer_block, true);
1273 if (stack_vars_num > 0)
1275 /* Fake sorting the stack vars for account_stack_vars (). */
1276 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1277 for (i = 0; i < stack_vars_num; ++i)
1278 stack_vars_sorted[i] = i;
1279 size += account_stack_vars ();
1280 fini_vars_expansion ();
1283 return size;
1286 /* Expand all variables used in the function. */
1288 static void
1289 expand_used_vars (void)
1291 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1292 tree maybe_local_decls = NULL_TREE;
1293 unsigned i;
1295 /* Compute the phase of the stack frame for this function. */
1297 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1298 int off = STARTING_FRAME_OFFSET % align;
1299 frame_phase = off ? align - off : 0;
1302 init_vars_expansion ();
1304 for (i = 0; i < SA.map->num_partitions; i++)
1306 tree var = partition_to_var (SA.map, i);
1308 gcc_assert (is_gimple_reg (var));
1309 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1310 expand_one_var (var, true, true);
1311 else
1313 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1314 contain the default def (representing the parm or result itself)
1315 we don't do anything here. But those which don't contain the
1316 default def (representing a temporary based on the parm/result)
1317 we need to allocate space just like for normal VAR_DECLs. */
1318 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1320 expand_one_var (var, true, true);
1321 gcc_assert (SA.partition_to_pseudo[i]);
1326 /* At this point all variables on the local_decls with TREE_USED
1327 set are not associated with any block scope. Lay them out. */
1328 t = cfun->local_decls;
1329 cfun->local_decls = NULL_TREE;
1330 for (; t; t = next)
1332 tree var = TREE_VALUE (t);
1333 bool expand_now = false;
1335 next = TREE_CHAIN (t);
1337 /* Expanded above already. */
1338 if (is_gimple_reg (var))
1340 TREE_USED (var) = 0;
1341 goto next;
1343 /* We didn't set a block for static or extern because it's hard
1344 to tell the difference between a global variable (re)declared
1345 in a local scope, and one that's really declared there to
1346 begin with. And it doesn't really matter much, since we're
1347 not giving them stack space. Expand them now. */
1348 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1349 expand_now = true;
1351 /* If the variable is not associated with any block, then it
1352 was created by the optimizers, and could be live anywhere
1353 in the function. */
1354 else if (TREE_USED (var))
1355 expand_now = true;
1357 /* Finally, mark all variables on the list as used. We'll use
1358 this in a moment when we expand those associated with scopes. */
1359 TREE_USED (var) = 1;
1361 if (expand_now)
1362 expand_one_var (var, true, true);
1364 next:
1365 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1367 rtx rtl = DECL_RTL_IF_SET (var);
1369 /* Keep artificial non-ignored vars in cfun->local_decls
1370 chain until instantiate_decls. */
1371 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1373 TREE_CHAIN (t) = cfun->local_decls;
1374 cfun->local_decls = t;
1375 continue;
1377 else if (rtl == NULL_RTX)
1379 /* If rtl isn't set yet, which can happen e.g. with
1380 -fstack-protector, retry before returning from this
1381 function. */
1382 TREE_CHAIN (t) = maybe_local_decls;
1383 maybe_local_decls = t;
1384 continue;
1388 ggc_free (t);
1391 /* At this point, all variables within the block tree with TREE_USED
1392 set are actually used by the optimized function. Lay them out. */
1393 expand_used_vars_for_block (outer_block, true);
1395 if (stack_vars_num > 0)
1397 /* Due to the way alias sets work, no variables with non-conflicting
1398 alias sets may be assigned the same address. Add conflicts to
1399 reflect this. */
1400 add_alias_set_conflicts ();
1402 /* If stack protection is enabled, we don't share space between
1403 vulnerable data and non-vulnerable data. */
1404 if (flag_stack_protect)
1405 add_stack_protection_conflicts ();
1407 /* Now that we have collected all stack variables, and have computed a
1408 minimal interference graph, attempt to save some stack space. */
1409 partition_stack_vars ();
1410 if (dump_file)
1411 dump_stack_var_partition ();
1414 /* There are several conditions under which we should create a
1415 stack guard: protect-all, alloca used, protected decls present. */
1416 if (flag_stack_protect == 2
1417 || (flag_stack_protect
1418 && (cfun->calls_alloca || has_protected_decls)))
1419 create_stack_guard ();
1421 /* Assign rtl to each variable based on these partitions. */
1422 if (stack_vars_num > 0)
1424 /* Reorder decls to be protected by iterating over the variables
1425 array multiple times, and allocating out of each phase in turn. */
1426 /* ??? We could probably integrate this into the qsort we did
1427 earlier, such that we naturally see these variables first,
1428 and thus naturally allocate things in the right order. */
1429 if (has_protected_decls)
1431 /* Phase 1 contains only character arrays. */
1432 expand_stack_vars (stack_protect_decl_phase_1);
1434 /* Phase 2 contains other kinds of arrays. */
1435 if (flag_stack_protect == 2)
1436 expand_stack_vars (stack_protect_decl_phase_2);
1439 expand_stack_vars (NULL);
1441 fini_vars_expansion ();
1444 /* If there were any artificial non-ignored vars without rtl
1445 found earlier, see if deferred stack allocation hasn't assigned
1446 rtl to them. */
1447 for (t = maybe_local_decls; t; t = next)
1449 tree var = TREE_VALUE (t);
1450 rtx rtl = DECL_RTL_IF_SET (var);
1452 next = TREE_CHAIN (t);
1454 /* Keep artificial non-ignored vars in cfun->local_decls
1455 chain until instantiate_decls. */
1456 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1458 TREE_CHAIN (t) = cfun->local_decls;
1459 cfun->local_decls = t;
1460 continue;
1463 ggc_free (t);
1466 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1467 if (STACK_ALIGNMENT_NEEDED)
1469 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1470 if (!FRAME_GROWS_DOWNWARD)
1471 frame_offset += align - 1;
1472 frame_offset &= -align;
1477 /* If we need to produce a detailed dump, print the tree representation
1478 for STMT to the dump file. SINCE is the last RTX after which the RTL
1479 generated for STMT should have been appended. */
1481 static void
1482 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1484 if (dump_file && (dump_flags & TDF_DETAILS))
1486 fprintf (dump_file, "\n;; ");
1487 print_gimple_stmt (dump_file, stmt, 0,
1488 TDF_SLIM | (dump_flags & TDF_LINENO));
1489 fprintf (dump_file, "\n");
1491 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1495 /* Maps the blocks that do not contain tree labels to rtx labels. */
1497 static struct pointer_map_t *lab_rtx_for_bb;
1499 /* Returns the label_rtx expression for a label starting basic block BB. */
1501 static rtx
1502 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1504 gimple_stmt_iterator gsi;
1505 tree lab;
1506 gimple lab_stmt;
1507 void **elt;
1509 if (bb->flags & BB_RTL)
1510 return block_label (bb);
1512 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1513 if (elt)
1514 return (rtx) *elt;
1516 /* Find the tree label if it is present. */
1518 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1520 lab_stmt = gsi_stmt (gsi);
1521 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1522 break;
1524 lab = gimple_label_label (lab_stmt);
1525 if (DECL_NONLOCAL (lab))
1526 break;
1528 return label_rtx (lab);
1531 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1532 *elt = gen_label_rtx ();
1533 return (rtx) *elt;
1537 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1538 of a basic block where we just expanded the conditional at the end,
1539 possibly clean up the CFG and instruction sequence. LAST is the
1540 last instruction before the just emitted jump sequence. */
1542 static void
1543 maybe_cleanup_end_of_block (edge e, rtx last)
1545 /* Special case: when jumpif decides that the condition is
1546 trivial it emits an unconditional jump (and the necessary
1547 barrier). But we still have two edges, the fallthru one is
1548 wrong. purge_dead_edges would clean this up later. Unfortunately
1549 we have to insert insns (and split edges) before
1550 find_many_sub_basic_blocks and hence before purge_dead_edges.
1551 But splitting edges might create new blocks which depend on the
1552 fact that if there are two edges there's no barrier. So the
1553 barrier would get lost and verify_flow_info would ICE. Instead
1554 of auditing all edge splitters to care for the barrier (which
1555 normally isn't there in a cleaned CFG), fix it here. */
1556 if (BARRIER_P (get_last_insn ()))
1558 rtx insn;
1559 remove_edge (e);
1560 /* Now, we have a single successor block, if we have insns to
1561 insert on the remaining edge we potentially will insert
1562 it at the end of this block (if the dest block isn't feasible)
1563 in order to avoid splitting the edge. This insertion will take
1564 place in front of the last jump. But we might have emitted
1565 multiple jumps (conditional and one unconditional) to the
1566 same destination. Inserting in front of the last one then
1567 is a problem. See PR 40021. We fix this by deleting all
1568 jumps except the last unconditional one. */
1569 insn = PREV_INSN (get_last_insn ());
1570 /* Make sure we have an unconditional jump. Otherwise we're
1571 confused. */
1572 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1573 for (insn = PREV_INSN (insn); insn != last;)
1575 insn = PREV_INSN (insn);
1576 if (JUMP_P (NEXT_INSN (insn)))
1577 delete_insn (NEXT_INSN (insn));
1582 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1583 Returns a new basic block if we've terminated the current basic
1584 block and created a new one. */
1586 static basic_block
1587 expand_gimple_cond (basic_block bb, gimple stmt)
1589 basic_block new_bb, dest;
1590 edge new_edge;
1591 edge true_edge;
1592 edge false_edge;
1593 rtx last2, last;
1594 enum tree_code code;
1595 tree op0, op1;
1597 code = gimple_cond_code (stmt);
1598 op0 = gimple_cond_lhs (stmt);
1599 op1 = gimple_cond_rhs (stmt);
1600 /* We're sometimes presented with such code:
1601 D.123_1 = x < y;
1602 if (D.123_1 != 0)
1604 This would expand to two comparisons which then later might
1605 be cleaned up by combine. But some pattern matchers like if-conversion
1606 work better when there's only one compare, so make up for this
1607 here as special exception if TER would have made the same change. */
1608 if (gimple_cond_single_var_p (stmt)
1609 && SA.values
1610 && TREE_CODE (op0) == SSA_NAME
1611 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1613 gimple second = SSA_NAME_DEF_STMT (op0);
1614 if (gimple_code (second) == GIMPLE_ASSIGN)
1616 enum tree_code code2 = gimple_assign_rhs_code (second);
1617 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1619 code = code2;
1620 op0 = gimple_assign_rhs1 (second);
1621 op1 = gimple_assign_rhs2 (second);
1623 /* If jumps are cheap turn some more codes into
1624 jumpy sequences. */
1625 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1627 if ((code2 == BIT_AND_EXPR
1628 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1629 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1630 || code2 == TRUTH_AND_EXPR)
1632 code = TRUTH_ANDIF_EXPR;
1633 op0 = gimple_assign_rhs1 (second);
1634 op1 = gimple_assign_rhs2 (second);
1636 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1638 code = TRUTH_ORIF_EXPR;
1639 op0 = gimple_assign_rhs1 (second);
1640 op1 = gimple_assign_rhs2 (second);
1646 last2 = last = get_last_insn ();
1648 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1649 if (gimple_has_location (stmt))
1651 set_curr_insn_source_location (gimple_location (stmt));
1652 set_curr_insn_block (gimple_block (stmt));
1655 /* These flags have no purpose in RTL land. */
1656 true_edge->flags &= ~EDGE_TRUE_VALUE;
1657 false_edge->flags &= ~EDGE_FALSE_VALUE;
1659 /* We can either have a pure conditional jump with one fallthru edge or
1660 two-way jump that needs to be decomposed into two basic blocks. */
1661 if (false_edge->dest == bb->next_bb)
1663 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1664 true_edge->probability);
1665 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1666 if (true_edge->goto_locus)
1668 set_curr_insn_source_location (true_edge->goto_locus);
1669 set_curr_insn_block (true_edge->goto_block);
1670 true_edge->goto_locus = curr_insn_locator ();
1672 true_edge->goto_block = NULL;
1673 false_edge->flags |= EDGE_FALLTHRU;
1674 maybe_cleanup_end_of_block (false_edge, last);
1675 return NULL;
1677 if (true_edge->dest == bb->next_bb)
1679 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1680 false_edge->probability);
1681 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1682 if (false_edge->goto_locus)
1684 set_curr_insn_source_location (false_edge->goto_locus);
1685 set_curr_insn_block (false_edge->goto_block);
1686 false_edge->goto_locus = curr_insn_locator ();
1688 false_edge->goto_block = NULL;
1689 true_edge->flags |= EDGE_FALLTHRU;
1690 maybe_cleanup_end_of_block (true_edge, last);
1691 return NULL;
1694 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1695 true_edge->probability);
1696 last = get_last_insn ();
1697 if (false_edge->goto_locus)
1699 set_curr_insn_source_location (false_edge->goto_locus);
1700 set_curr_insn_block (false_edge->goto_block);
1701 false_edge->goto_locus = curr_insn_locator ();
1703 false_edge->goto_block = NULL;
1704 emit_jump (label_rtx_for_bb (false_edge->dest));
1706 BB_END (bb) = last;
1707 if (BARRIER_P (BB_END (bb)))
1708 BB_END (bb) = PREV_INSN (BB_END (bb));
1709 update_bb_for_insn (bb);
1711 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1712 dest = false_edge->dest;
1713 redirect_edge_succ (false_edge, new_bb);
1714 false_edge->flags |= EDGE_FALLTHRU;
1715 new_bb->count = false_edge->count;
1716 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1717 new_edge = make_edge (new_bb, dest, 0);
1718 new_edge->probability = REG_BR_PROB_BASE;
1719 new_edge->count = new_bb->count;
1720 if (BARRIER_P (BB_END (new_bb)))
1721 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1722 update_bb_for_insn (new_bb);
1724 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1726 if (true_edge->goto_locus)
1728 set_curr_insn_source_location (true_edge->goto_locus);
1729 set_curr_insn_block (true_edge->goto_block);
1730 true_edge->goto_locus = curr_insn_locator ();
1732 true_edge->goto_block = NULL;
1734 return new_bb;
1737 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1738 statement STMT. */
1740 static void
1741 expand_call_stmt (gimple stmt)
1743 tree exp;
1744 tree lhs = gimple_call_lhs (stmt);
1745 size_t i;
1746 bool builtin_p;
1747 tree decl;
1749 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1751 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1752 decl = gimple_call_fndecl (stmt);
1753 builtin_p = decl && DECL_BUILT_IN (decl);
1755 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1756 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1758 for (i = 0; i < gimple_call_num_args (stmt); i++)
1760 tree arg = gimple_call_arg (stmt, i);
1761 gimple def;
1762 /* TER addresses into arguments of builtin functions so we have a
1763 chance to infer more correct alignment information. See PR39954. */
1764 if (builtin_p
1765 && TREE_CODE (arg) == SSA_NAME
1766 && (def = get_gimple_for_ssa_name (arg))
1767 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1768 arg = gimple_assign_rhs1 (def);
1769 CALL_EXPR_ARG (exp, i) = arg;
1772 if (gimple_has_side_effects (stmt))
1773 TREE_SIDE_EFFECTS (exp) = 1;
1775 if (gimple_call_nothrow_p (stmt))
1776 TREE_NOTHROW (exp) = 1;
1778 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1779 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1780 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1781 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1782 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1783 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1784 TREE_BLOCK (exp) = gimple_block (stmt);
1786 if (lhs)
1787 expand_assignment (lhs, exp, false);
1788 else
1789 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1792 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1793 STMT that doesn't require special handling for outgoing edges. That
1794 is no tailcalls and no GIMPLE_COND. */
1796 static void
1797 expand_gimple_stmt_1 (gimple stmt)
1799 tree op0;
1800 switch (gimple_code (stmt))
1802 case GIMPLE_GOTO:
1803 op0 = gimple_goto_dest (stmt);
1804 if (TREE_CODE (op0) == LABEL_DECL)
1805 expand_goto (op0);
1806 else
1807 expand_computed_goto (op0);
1808 break;
1809 case GIMPLE_LABEL:
1810 expand_label (gimple_label_label (stmt));
1811 break;
1812 case GIMPLE_NOP:
1813 case GIMPLE_PREDICT:
1814 break;
1815 case GIMPLE_SWITCH:
1816 expand_case (stmt);
1817 break;
1818 case GIMPLE_ASM:
1819 expand_asm_stmt (stmt);
1820 break;
1821 case GIMPLE_CALL:
1822 expand_call_stmt (stmt);
1823 break;
1825 case GIMPLE_RETURN:
1826 op0 = gimple_return_retval (stmt);
1828 if (op0 && op0 != error_mark_node)
1830 tree result = DECL_RESULT (current_function_decl);
1832 /* If we are not returning the current function's RESULT_DECL,
1833 build an assignment to it. */
1834 if (op0 != result)
1836 /* I believe that a function's RESULT_DECL is unique. */
1837 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1839 /* ??? We'd like to use simply expand_assignment here,
1840 but this fails if the value is of BLKmode but the return
1841 decl is a register. expand_return has special handling
1842 for this combination, which eventually should move
1843 to common code. See comments there. Until then, let's
1844 build a modify expression :-/ */
1845 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1846 result, op0);
1849 if (!op0)
1850 expand_null_return ();
1851 else
1852 expand_return (op0);
1853 break;
1855 case GIMPLE_ASSIGN:
1857 tree lhs = gimple_assign_lhs (stmt);
1859 /* Tree expand used to fiddle with |= and &= of two bitfield
1860 COMPONENT_REFs here. This can't happen with gimple, the LHS
1861 of binary assigns must be a gimple reg. */
1863 if (TREE_CODE (lhs) != SSA_NAME
1864 || get_gimple_rhs_class (gimple_expr_code (stmt))
1865 == GIMPLE_SINGLE_RHS)
1867 tree rhs = gimple_assign_rhs1 (stmt);
1868 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1869 == GIMPLE_SINGLE_RHS);
1870 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1871 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1872 expand_assignment (lhs, rhs,
1873 gimple_assign_nontemporal_move_p (stmt));
1875 else
1877 rtx target, temp;
1878 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1879 struct separate_ops ops;
1880 bool promoted = false;
1882 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1883 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1884 promoted = true;
1886 ops.code = gimple_assign_rhs_code (stmt);
1887 ops.type = TREE_TYPE (lhs);
1888 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1890 case GIMPLE_BINARY_RHS:
1891 ops.op1 = gimple_assign_rhs2 (stmt);
1892 /* Fallthru */
1893 case GIMPLE_UNARY_RHS:
1894 ops.op0 = gimple_assign_rhs1 (stmt);
1895 break;
1896 default:
1897 gcc_unreachable ();
1899 ops.location = gimple_location (stmt);
1901 /* If we want to use a nontemporal store, force the value to
1902 register first. If we store into a promoted register,
1903 don't directly expand to target. */
1904 temp = nontemporal || promoted ? NULL_RTX : target;
1905 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1906 EXPAND_NORMAL);
1908 if (temp == target)
1910 else if (promoted)
1912 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1913 /* If TEMP is a VOIDmode constant, use convert_modes to make
1914 sure that we properly convert it. */
1915 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1917 temp = convert_modes (GET_MODE (target),
1918 TYPE_MODE (ops.type),
1919 temp, unsignedp);
1920 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1921 GET_MODE (target), temp, unsignedp);
1924 convert_move (SUBREG_REG (target), temp, unsignedp);
1926 else if (nontemporal && emit_storent_insn (target, temp))
1928 else
1930 temp = force_operand (temp, target);
1931 if (temp != target)
1932 emit_move_insn (target, temp);
1936 break;
1938 default:
1939 gcc_unreachable ();
1943 /* Expand one gimple statement STMT and return the last RTL instruction
1944 before any of the newly generated ones.
1946 In addition to generating the necessary RTL instructions this also
1947 sets REG_EH_REGION notes if necessary and sets the current source
1948 location for diagnostics. */
1950 static rtx
1951 expand_gimple_stmt (gimple stmt)
1953 int lp_nr = 0;
1954 rtx last = NULL;
1955 location_t saved_location = input_location;
1957 last = get_last_insn ();
1959 /* If this is an expression of some kind and it has an associated line
1960 number, then emit the line number before expanding the expression.
1962 We need to save and restore the file and line information so that
1963 errors discovered during expansion are emitted with the right
1964 information. It would be better of the diagnostic routines
1965 used the file/line information embedded in the tree nodes rather
1966 than globals. */
1967 gcc_assert (cfun);
1969 if (gimple_has_location (stmt))
1971 input_location = gimple_location (stmt);
1972 set_curr_insn_source_location (input_location);
1974 /* Record where the insns produced belong. */
1975 set_curr_insn_block (gimple_block (stmt));
1978 expand_gimple_stmt_1 (stmt);
1979 /* Free any temporaries used to evaluate this statement. */
1980 free_temp_slots ();
1982 input_location = saved_location;
1984 /* Mark all insns that may trap. */
1985 lp_nr = lookup_stmt_eh_lp (stmt);
1986 if (lp_nr)
1988 rtx insn;
1989 for (insn = next_real_insn (last); insn;
1990 insn = next_real_insn (insn))
1992 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1993 /* If we want exceptions for non-call insns, any
1994 may_trap_p instruction may throw. */
1995 && GET_CODE (PATTERN (insn)) != CLOBBER
1996 && GET_CODE (PATTERN (insn)) != USE
1997 && insn_could_throw_p (insn))
1998 make_reg_eh_region_note (insn, 0, lp_nr);
2002 return last;
2005 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2006 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2007 generated a tail call (something that might be denied by the ABI
2008 rules governing the call; see calls.c).
2010 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2011 can still reach the rest of BB. The case here is __builtin_sqrt,
2012 where the NaN result goes through the external function (with a
2013 tailcall) and the normal result happens via a sqrt instruction. */
2015 static basic_block
2016 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2018 rtx last2, last;
2019 edge e;
2020 edge_iterator ei;
2021 int probability;
2022 gcov_type count;
2024 last2 = last = expand_gimple_stmt (stmt);
2026 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2027 if (CALL_P (last) && SIBLING_CALL_P (last))
2028 goto found;
2030 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2032 *can_fallthru = true;
2033 return NULL;
2035 found:
2036 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2037 Any instructions emitted here are about to be deleted. */
2038 do_pending_stack_adjust ();
2040 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2041 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2042 EH or abnormal edges, we shouldn't have created a tail call in
2043 the first place. So it seems to me we should just be removing
2044 all edges here, or redirecting the existing fallthru edge to
2045 the exit block. */
2047 probability = 0;
2048 count = 0;
2050 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2052 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2054 if (e->dest != EXIT_BLOCK_PTR)
2056 e->dest->count -= e->count;
2057 e->dest->frequency -= EDGE_FREQUENCY (e);
2058 if (e->dest->count < 0)
2059 e->dest->count = 0;
2060 if (e->dest->frequency < 0)
2061 e->dest->frequency = 0;
2063 count += e->count;
2064 probability += e->probability;
2065 remove_edge (e);
2067 else
2068 ei_next (&ei);
2071 /* This is somewhat ugly: the call_expr expander often emits instructions
2072 after the sibcall (to perform the function return). These confuse the
2073 find_many_sub_basic_blocks code, so we need to get rid of these. */
2074 last = NEXT_INSN (last);
2075 gcc_assert (BARRIER_P (last));
2077 *can_fallthru = false;
2078 while (NEXT_INSN (last))
2080 /* For instance an sqrt builtin expander expands if with
2081 sibcall in the then and label for `else`. */
2082 if (LABEL_P (NEXT_INSN (last)))
2084 *can_fallthru = true;
2085 break;
2087 delete_insn (NEXT_INSN (last));
2090 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2091 e->probability += probability;
2092 e->count += count;
2093 BB_END (bb) = last;
2094 update_bb_for_insn (bb);
2096 if (NEXT_INSN (last))
2098 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2100 last = BB_END (bb);
2101 if (BARRIER_P (last))
2102 BB_END (bb) = PREV_INSN (last);
2105 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2107 return bb;
2110 /* Return the difference between the floor and the truncated result of
2111 a signed division by OP1 with remainder MOD. */
2112 static rtx
2113 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2115 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2116 return gen_rtx_IF_THEN_ELSE
2117 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2118 gen_rtx_IF_THEN_ELSE
2119 (mode, gen_rtx_LT (BImode,
2120 gen_rtx_DIV (mode, op1, mod),
2121 const0_rtx),
2122 constm1_rtx, const0_rtx),
2123 const0_rtx);
2126 /* Return the difference between the ceil and the truncated result of
2127 a signed division by OP1 with remainder MOD. */
2128 static rtx
2129 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2131 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2132 return gen_rtx_IF_THEN_ELSE
2133 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2134 gen_rtx_IF_THEN_ELSE
2135 (mode, gen_rtx_GT (BImode,
2136 gen_rtx_DIV (mode, op1, mod),
2137 const0_rtx),
2138 const1_rtx, const0_rtx),
2139 const0_rtx);
2142 /* Return the difference between the ceil and the truncated result of
2143 an unsigned division by OP1 with remainder MOD. */
2144 static rtx
2145 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2147 /* (mod != 0 ? 1 : 0) */
2148 return gen_rtx_IF_THEN_ELSE
2149 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2150 const1_rtx, const0_rtx);
2153 /* Return the difference between the rounded and the truncated result
2154 of a signed division by OP1 with remainder MOD. Halfway cases are
2155 rounded away from zero, rather than to the nearest even number. */
2156 static rtx
2157 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2159 /* (abs (mod) >= abs (op1) - abs (mod)
2160 ? (op1 / mod > 0 ? 1 : -1)
2161 : 0) */
2162 return gen_rtx_IF_THEN_ELSE
2163 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2164 gen_rtx_MINUS (mode,
2165 gen_rtx_ABS (mode, op1),
2166 gen_rtx_ABS (mode, mod))),
2167 gen_rtx_IF_THEN_ELSE
2168 (mode, gen_rtx_GT (BImode,
2169 gen_rtx_DIV (mode, op1, mod),
2170 const0_rtx),
2171 const1_rtx, constm1_rtx),
2172 const0_rtx);
2175 /* Return the difference between the rounded and the truncated result
2176 of a unsigned division by OP1 with remainder MOD. Halfway cases
2177 are rounded away from zero, rather than to the nearest even
2178 number. */
2179 static rtx
2180 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2182 /* (mod >= op1 - mod ? 1 : 0) */
2183 return gen_rtx_IF_THEN_ELSE
2184 (mode, gen_rtx_GE (BImode, mod,
2185 gen_rtx_MINUS (mode, op1, mod)),
2186 const1_rtx, const0_rtx);
2189 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2190 any rtl. */
2192 static rtx
2193 convert_debug_memory_address (enum machine_mode mode, rtx x)
2195 enum machine_mode xmode = GET_MODE (x);
2197 #ifndef POINTERS_EXTEND_UNSIGNED
2198 gcc_assert (mode == Pmode);
2199 gcc_assert (xmode == mode || xmode == VOIDmode);
2200 #else
2201 gcc_assert (mode == Pmode || mode == ptr_mode);
2203 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2204 return x;
2206 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2207 x = simplify_gen_subreg (mode, x, xmode,
2208 subreg_lowpart_offset
2209 (mode, xmode));
2210 else if (POINTERS_EXTEND_UNSIGNED > 0)
2211 x = gen_rtx_ZERO_EXTEND (mode, x);
2212 else if (!POINTERS_EXTEND_UNSIGNED)
2213 x = gen_rtx_SIGN_EXTEND (mode, x);
2214 else
2215 gcc_unreachable ();
2216 #endif /* POINTERS_EXTEND_UNSIGNED */
2218 return x;
2221 /* Return an RTX equivalent to the value of the tree expression
2222 EXP. */
2224 static rtx
2225 expand_debug_expr (tree exp)
2227 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2228 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2229 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2230 addr_space_t as;
2231 enum machine_mode address_mode;
2233 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2235 case tcc_expression:
2236 switch (TREE_CODE (exp))
2238 case COND_EXPR:
2239 case DOT_PROD_EXPR:
2240 goto ternary;
2242 case TRUTH_ANDIF_EXPR:
2243 case TRUTH_ORIF_EXPR:
2244 case TRUTH_AND_EXPR:
2245 case TRUTH_OR_EXPR:
2246 case TRUTH_XOR_EXPR:
2247 goto binary;
2249 case TRUTH_NOT_EXPR:
2250 goto unary;
2252 default:
2253 break;
2255 break;
2257 ternary:
2258 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2259 if (!op2)
2260 return NULL_RTX;
2261 /* Fall through. */
2263 binary:
2264 case tcc_binary:
2265 case tcc_comparison:
2266 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2267 if (!op1)
2268 return NULL_RTX;
2269 /* Fall through. */
2271 unary:
2272 case tcc_unary:
2273 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2274 if (!op0)
2275 return NULL_RTX;
2276 break;
2278 case tcc_type:
2279 case tcc_statement:
2280 gcc_unreachable ();
2282 case tcc_constant:
2283 case tcc_exceptional:
2284 case tcc_declaration:
2285 case tcc_reference:
2286 case tcc_vl_exp:
2287 break;
2290 switch (TREE_CODE (exp))
2292 case STRING_CST:
2293 if (!lookup_constant_def (exp))
2295 if (strlen (TREE_STRING_POINTER (exp)) + 1
2296 != (size_t) TREE_STRING_LENGTH (exp))
2297 return NULL_RTX;
2298 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2299 op0 = gen_rtx_MEM (BLKmode, op0);
2300 set_mem_attributes (op0, exp, 0);
2301 return op0;
2303 /* Fall through... */
2305 case INTEGER_CST:
2306 case REAL_CST:
2307 case FIXED_CST:
2308 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2309 return op0;
2311 case COMPLEX_CST:
2312 gcc_assert (COMPLEX_MODE_P (mode));
2313 op0 = expand_debug_expr (TREE_REALPART (exp));
2314 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2315 return gen_rtx_CONCAT (mode, op0, op1);
2317 case DEBUG_EXPR_DECL:
2318 op0 = DECL_RTL_IF_SET (exp);
2320 if (op0)
2321 return op0;
2323 op0 = gen_rtx_DEBUG_EXPR (mode);
2324 DEBUG_EXPR_TREE_DECL (op0) = exp;
2325 SET_DECL_RTL (exp, op0);
2327 return op0;
2329 case VAR_DECL:
2330 case PARM_DECL:
2331 case FUNCTION_DECL:
2332 case LABEL_DECL:
2333 case CONST_DECL:
2334 case RESULT_DECL:
2335 op0 = DECL_RTL_IF_SET (exp);
2337 /* This decl was probably optimized away. */
2338 if (!op0)
2340 if (TREE_CODE (exp) != VAR_DECL
2341 || DECL_EXTERNAL (exp)
2342 || !TREE_STATIC (exp)
2343 || !DECL_NAME (exp)
2344 || DECL_HARD_REGISTER (exp)
2345 || mode == VOIDmode)
2346 return NULL;
2348 op0 = make_decl_rtl_for_debug (exp);
2349 if (!MEM_P (op0)
2350 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2351 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2352 return NULL;
2354 else
2355 op0 = copy_rtx (op0);
2357 if (GET_MODE (op0) == BLKmode
2358 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2359 below would ICE. While it is likely a FE bug,
2360 try to be robust here. See PR43166. */
2361 || mode == BLKmode
2362 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2364 gcc_assert (MEM_P (op0));
2365 op0 = adjust_address_nv (op0, mode, 0);
2366 return op0;
2369 /* Fall through. */
2371 adjust_mode:
2372 case PAREN_EXPR:
2373 case NOP_EXPR:
2374 case CONVERT_EXPR:
2376 enum machine_mode inner_mode = GET_MODE (op0);
2378 if (mode == inner_mode)
2379 return op0;
2381 if (inner_mode == VOIDmode)
2383 if (TREE_CODE (exp) == SSA_NAME)
2384 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2385 else
2386 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2387 if (mode == inner_mode)
2388 return op0;
2391 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2393 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2394 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2395 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2396 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2397 else
2398 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2400 else if (FLOAT_MODE_P (mode))
2402 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2403 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2404 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2405 else
2406 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2408 else if (FLOAT_MODE_P (inner_mode))
2410 if (unsignedp)
2411 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2412 else
2413 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2415 else if (CONSTANT_P (op0)
2416 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2417 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2418 subreg_lowpart_offset (mode,
2419 inner_mode));
2420 else if (unsignedp)
2421 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2422 else
2423 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2425 return op0;
2428 case INDIRECT_REF:
2429 case ALIGN_INDIRECT_REF:
2430 case MISALIGNED_INDIRECT_REF:
2431 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2432 if (!op0)
2433 return NULL;
2435 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2437 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2438 address_mode = targetm.addr_space.address_mode (as);
2440 else
2442 as = ADDR_SPACE_GENERIC;
2443 address_mode = Pmode;
2446 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2448 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
2449 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
2452 op0 = gen_rtx_MEM (mode, op0);
2454 set_mem_attributes (op0, exp, 0);
2455 set_mem_addr_space (op0, as);
2457 return op0;
2459 case TARGET_MEM_REF:
2460 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2461 return NULL;
2463 op0 = expand_debug_expr
2464 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2465 if (!op0)
2466 return NULL;
2468 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2470 op0 = gen_rtx_MEM (mode, op0);
2472 set_mem_attributes (op0, exp, 0);
2473 set_mem_addr_space (op0, as);
2475 return op0;
2477 case ARRAY_REF:
2478 case ARRAY_RANGE_REF:
2479 case COMPONENT_REF:
2480 case BIT_FIELD_REF:
2481 case REALPART_EXPR:
2482 case IMAGPART_EXPR:
2483 case VIEW_CONVERT_EXPR:
2485 enum machine_mode mode1;
2486 HOST_WIDE_INT bitsize, bitpos;
2487 tree offset;
2488 int volatilep = 0;
2489 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2490 &mode1, &unsignedp, &volatilep, false);
2491 rtx orig_op0;
2493 if (bitsize == 0)
2494 return NULL;
2496 orig_op0 = op0 = expand_debug_expr (tem);
2498 if (!op0)
2499 return NULL;
2501 if (offset)
2503 enum machine_mode addrmode, offmode;
2505 gcc_assert (MEM_P (op0));
2507 op0 = XEXP (op0, 0);
2508 addrmode = GET_MODE (op0);
2509 if (addrmode == VOIDmode)
2510 addrmode = Pmode;
2512 op1 = expand_debug_expr (offset);
2513 if (!op1)
2514 return NULL;
2516 offmode = GET_MODE (op1);
2517 if (offmode == VOIDmode)
2518 offmode = TYPE_MODE (TREE_TYPE (offset));
2520 if (addrmode != offmode)
2521 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2522 subreg_lowpart_offset (addrmode,
2523 offmode));
2525 /* Don't use offset_address here, we don't need a
2526 recognizable address, and we don't want to generate
2527 code. */
2528 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2531 if (MEM_P (op0))
2533 if (mode1 == VOIDmode)
2534 /* Bitfield. */
2535 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2536 if (bitpos >= BITS_PER_UNIT)
2538 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2539 bitpos %= BITS_PER_UNIT;
2541 else if (bitpos < 0)
2543 HOST_WIDE_INT units
2544 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2545 op0 = adjust_address_nv (op0, mode1, units);
2546 bitpos += units * BITS_PER_UNIT;
2548 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2549 op0 = adjust_address_nv (op0, mode, 0);
2550 else if (GET_MODE (op0) != mode1)
2551 op0 = adjust_address_nv (op0, mode1, 0);
2552 else
2553 op0 = copy_rtx (op0);
2554 if (op0 == orig_op0)
2555 op0 = shallow_copy_rtx (op0);
2556 set_mem_attributes (op0, exp, 0);
2559 if (bitpos == 0 && mode == GET_MODE (op0))
2560 return op0;
2562 if (bitpos < 0)
2563 return NULL;
2565 if ((bitpos % BITS_PER_UNIT) == 0
2566 && bitsize == GET_MODE_BITSIZE (mode1))
2568 enum machine_mode opmode = GET_MODE (op0);
2570 gcc_assert (opmode != BLKmode);
2572 if (opmode == VOIDmode)
2573 opmode = mode1;
2575 /* This condition may hold if we're expanding the address
2576 right past the end of an array that turned out not to
2577 be addressable (i.e., the address was only computed in
2578 debug stmts). The gen_subreg below would rightfully
2579 crash, and the address doesn't really exist, so just
2580 drop it. */
2581 if (bitpos >= GET_MODE_BITSIZE (opmode))
2582 return NULL;
2584 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2585 return simplify_gen_subreg (mode, op0, opmode,
2586 bitpos / BITS_PER_UNIT);
2589 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2590 && TYPE_UNSIGNED (TREE_TYPE (exp))
2591 ? SIGN_EXTRACT
2592 : ZERO_EXTRACT, mode,
2593 GET_MODE (op0) != VOIDmode
2594 ? GET_MODE (op0) : mode1,
2595 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2598 case ABS_EXPR:
2599 return gen_rtx_ABS (mode, op0);
2601 case NEGATE_EXPR:
2602 return gen_rtx_NEG (mode, op0);
2604 case BIT_NOT_EXPR:
2605 return gen_rtx_NOT (mode, op0);
2607 case FLOAT_EXPR:
2608 if (unsignedp)
2609 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2610 else
2611 return gen_rtx_FLOAT (mode, op0);
2613 case FIX_TRUNC_EXPR:
2614 if (unsignedp)
2615 return gen_rtx_UNSIGNED_FIX (mode, op0);
2616 else
2617 return gen_rtx_FIX (mode, op0);
2619 case POINTER_PLUS_EXPR:
2620 case PLUS_EXPR:
2621 return gen_rtx_PLUS (mode, op0, op1);
2623 case MINUS_EXPR:
2624 return gen_rtx_MINUS (mode, op0, op1);
2626 case MULT_EXPR:
2627 return gen_rtx_MULT (mode, op0, op1);
2629 case RDIV_EXPR:
2630 case TRUNC_DIV_EXPR:
2631 case EXACT_DIV_EXPR:
2632 if (unsignedp)
2633 return gen_rtx_UDIV (mode, op0, op1);
2634 else
2635 return gen_rtx_DIV (mode, op0, op1);
2637 case TRUNC_MOD_EXPR:
2638 if (unsignedp)
2639 return gen_rtx_UMOD (mode, op0, op1);
2640 else
2641 return gen_rtx_MOD (mode, op0, op1);
2643 case FLOOR_DIV_EXPR:
2644 if (unsignedp)
2645 return gen_rtx_UDIV (mode, op0, op1);
2646 else
2648 rtx div = gen_rtx_DIV (mode, op0, op1);
2649 rtx mod = gen_rtx_MOD (mode, op0, op1);
2650 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2651 return gen_rtx_PLUS (mode, div, adj);
2654 case FLOOR_MOD_EXPR:
2655 if (unsignedp)
2656 return gen_rtx_UMOD (mode, op0, op1);
2657 else
2659 rtx mod = gen_rtx_MOD (mode, op0, op1);
2660 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2661 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2662 return gen_rtx_PLUS (mode, mod, adj);
2665 case CEIL_DIV_EXPR:
2666 if (unsignedp)
2668 rtx div = gen_rtx_UDIV (mode, op0, op1);
2669 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2670 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2671 return gen_rtx_PLUS (mode, div, adj);
2673 else
2675 rtx div = gen_rtx_DIV (mode, op0, op1);
2676 rtx mod = gen_rtx_MOD (mode, op0, op1);
2677 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2678 return gen_rtx_PLUS (mode, div, adj);
2681 case CEIL_MOD_EXPR:
2682 if (unsignedp)
2684 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2685 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2686 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2687 return gen_rtx_PLUS (mode, mod, adj);
2689 else
2691 rtx mod = gen_rtx_MOD (mode, op0, op1);
2692 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2693 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2694 return gen_rtx_PLUS (mode, mod, adj);
2697 case ROUND_DIV_EXPR:
2698 if (unsignedp)
2700 rtx div = gen_rtx_UDIV (mode, op0, op1);
2701 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2702 rtx adj = round_udiv_adjust (mode, mod, op1);
2703 return gen_rtx_PLUS (mode, div, adj);
2705 else
2707 rtx div = gen_rtx_DIV (mode, op0, op1);
2708 rtx mod = gen_rtx_MOD (mode, op0, op1);
2709 rtx adj = round_sdiv_adjust (mode, mod, op1);
2710 return gen_rtx_PLUS (mode, div, adj);
2713 case ROUND_MOD_EXPR:
2714 if (unsignedp)
2716 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2717 rtx adj = round_udiv_adjust (mode, mod, op1);
2718 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2719 return gen_rtx_PLUS (mode, mod, adj);
2721 else
2723 rtx mod = gen_rtx_MOD (mode, op0, op1);
2724 rtx adj = round_sdiv_adjust (mode, mod, op1);
2725 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2726 return gen_rtx_PLUS (mode, mod, adj);
2729 case LSHIFT_EXPR:
2730 return gen_rtx_ASHIFT (mode, op0, op1);
2732 case RSHIFT_EXPR:
2733 if (unsignedp)
2734 return gen_rtx_LSHIFTRT (mode, op0, op1);
2735 else
2736 return gen_rtx_ASHIFTRT (mode, op0, op1);
2738 case LROTATE_EXPR:
2739 return gen_rtx_ROTATE (mode, op0, op1);
2741 case RROTATE_EXPR:
2742 return gen_rtx_ROTATERT (mode, op0, op1);
2744 case MIN_EXPR:
2745 if (unsignedp)
2746 return gen_rtx_UMIN (mode, op0, op1);
2747 else
2748 return gen_rtx_SMIN (mode, op0, op1);
2750 case MAX_EXPR:
2751 if (unsignedp)
2752 return gen_rtx_UMAX (mode, op0, op1);
2753 else
2754 return gen_rtx_SMAX (mode, op0, op1);
2756 case BIT_AND_EXPR:
2757 case TRUTH_AND_EXPR:
2758 return gen_rtx_AND (mode, op0, op1);
2760 case BIT_IOR_EXPR:
2761 case TRUTH_OR_EXPR:
2762 return gen_rtx_IOR (mode, op0, op1);
2764 case BIT_XOR_EXPR:
2765 case TRUTH_XOR_EXPR:
2766 return gen_rtx_XOR (mode, op0, op1);
2768 case TRUTH_ANDIF_EXPR:
2769 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2771 case TRUTH_ORIF_EXPR:
2772 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2774 case TRUTH_NOT_EXPR:
2775 return gen_rtx_EQ (mode, op0, const0_rtx);
2777 case LT_EXPR:
2778 if (unsignedp)
2779 return gen_rtx_LTU (mode, op0, op1);
2780 else
2781 return gen_rtx_LT (mode, op0, op1);
2783 case LE_EXPR:
2784 if (unsignedp)
2785 return gen_rtx_LEU (mode, op0, op1);
2786 else
2787 return gen_rtx_LE (mode, op0, op1);
2789 case GT_EXPR:
2790 if (unsignedp)
2791 return gen_rtx_GTU (mode, op0, op1);
2792 else
2793 return gen_rtx_GT (mode, op0, op1);
2795 case GE_EXPR:
2796 if (unsignedp)
2797 return gen_rtx_GEU (mode, op0, op1);
2798 else
2799 return gen_rtx_GE (mode, op0, op1);
2801 case EQ_EXPR:
2802 return gen_rtx_EQ (mode, op0, op1);
2804 case NE_EXPR:
2805 return gen_rtx_NE (mode, op0, op1);
2807 case UNORDERED_EXPR:
2808 return gen_rtx_UNORDERED (mode, op0, op1);
2810 case ORDERED_EXPR:
2811 return gen_rtx_ORDERED (mode, op0, op1);
2813 case UNLT_EXPR:
2814 return gen_rtx_UNLT (mode, op0, op1);
2816 case UNLE_EXPR:
2817 return gen_rtx_UNLE (mode, op0, op1);
2819 case UNGT_EXPR:
2820 return gen_rtx_UNGT (mode, op0, op1);
2822 case UNGE_EXPR:
2823 return gen_rtx_UNGE (mode, op0, op1);
2825 case UNEQ_EXPR:
2826 return gen_rtx_UNEQ (mode, op0, op1);
2828 case LTGT_EXPR:
2829 return gen_rtx_LTGT (mode, op0, op1);
2831 case COND_EXPR:
2832 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2834 case COMPLEX_EXPR:
2835 gcc_assert (COMPLEX_MODE_P (mode));
2836 if (GET_MODE (op0) == VOIDmode)
2837 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2838 if (GET_MODE (op1) == VOIDmode)
2839 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2840 return gen_rtx_CONCAT (mode, op0, op1);
2842 case CONJ_EXPR:
2843 if (GET_CODE (op0) == CONCAT)
2844 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2845 gen_rtx_NEG (GET_MODE_INNER (mode),
2846 XEXP (op0, 1)));
2847 else
2849 enum machine_mode imode = GET_MODE_INNER (mode);
2850 rtx re, im;
2852 if (MEM_P (op0))
2854 re = adjust_address_nv (op0, imode, 0);
2855 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2857 else
2859 enum machine_mode ifmode = int_mode_for_mode (mode);
2860 enum machine_mode ihmode = int_mode_for_mode (imode);
2861 rtx halfsize;
2862 if (ifmode == BLKmode || ihmode == BLKmode)
2863 return NULL;
2864 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2865 re = op0;
2866 if (mode != ifmode)
2867 re = gen_rtx_SUBREG (ifmode, re, 0);
2868 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2869 if (imode != ihmode)
2870 re = gen_rtx_SUBREG (imode, re, 0);
2871 im = copy_rtx (op0);
2872 if (mode != ifmode)
2873 im = gen_rtx_SUBREG (ifmode, im, 0);
2874 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2875 if (imode != ihmode)
2876 im = gen_rtx_SUBREG (imode, im, 0);
2878 im = gen_rtx_NEG (imode, im);
2879 return gen_rtx_CONCAT (mode, re, im);
2882 case ADDR_EXPR:
2883 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2884 if (!op0 || !MEM_P (op0))
2885 return NULL;
2887 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2889 return op0;
2891 case VECTOR_CST:
2892 exp = build_constructor_from_list (TREE_TYPE (exp),
2893 TREE_VECTOR_CST_ELTS (exp));
2894 /* Fall through. */
2896 case CONSTRUCTOR:
2897 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2899 unsigned i;
2900 tree val;
2902 op0 = gen_rtx_CONCATN
2903 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2905 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2907 op1 = expand_debug_expr (val);
2908 if (!op1)
2909 return NULL;
2910 XVECEXP (op0, 0, i) = op1;
2913 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2915 op1 = expand_debug_expr
2916 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2918 if (!op1)
2919 return NULL;
2921 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2922 XVECEXP (op0, 0, i) = op1;
2925 return op0;
2927 else
2928 goto flag_unsupported;
2930 case CALL_EXPR:
2931 /* ??? Maybe handle some builtins? */
2932 return NULL;
2934 case SSA_NAME:
2936 gimple g = get_gimple_for_ssa_name (exp);
2937 if (g)
2939 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2940 if (!op0)
2941 return NULL;
2943 else
2945 int part = var_to_partition (SA.map, exp);
2947 if (part == NO_PARTITION)
2948 return NULL;
2950 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2952 op0 = SA.partition_to_pseudo[part];
2954 goto adjust_mode;
2957 case ERROR_MARK:
2958 return NULL;
2960 /* Vector stuff. For most of the codes we don't have rtl codes. */
2961 case REALIGN_LOAD_EXPR:
2962 case REDUC_MAX_EXPR:
2963 case REDUC_MIN_EXPR:
2964 case REDUC_PLUS_EXPR:
2965 case VEC_COND_EXPR:
2966 case VEC_EXTRACT_EVEN_EXPR:
2967 case VEC_EXTRACT_ODD_EXPR:
2968 case VEC_INTERLEAVE_HIGH_EXPR:
2969 case VEC_INTERLEAVE_LOW_EXPR:
2970 case VEC_LSHIFT_EXPR:
2971 case VEC_PACK_FIX_TRUNC_EXPR:
2972 case VEC_PACK_SAT_EXPR:
2973 case VEC_PACK_TRUNC_EXPR:
2974 case VEC_RSHIFT_EXPR:
2975 case VEC_UNPACK_FLOAT_HI_EXPR:
2976 case VEC_UNPACK_FLOAT_LO_EXPR:
2977 case VEC_UNPACK_HI_EXPR:
2978 case VEC_UNPACK_LO_EXPR:
2979 case VEC_WIDEN_MULT_HI_EXPR:
2980 case VEC_WIDEN_MULT_LO_EXPR:
2981 return NULL;
2983 /* Misc codes. */
2984 case ADDR_SPACE_CONVERT_EXPR:
2985 case FIXED_CONVERT_EXPR:
2986 case OBJ_TYPE_REF:
2987 case WITH_SIZE_EXPR:
2988 return NULL;
2990 case DOT_PROD_EXPR:
2991 if (SCALAR_INT_MODE_P (GET_MODE (op0))
2992 && SCALAR_INT_MODE_P (mode))
2994 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2995 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2996 else
2997 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2998 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
2999 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3000 else
3001 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3002 op0 = gen_rtx_MULT (mode, op0, op1);
3003 return gen_rtx_PLUS (mode, op0, op2);
3005 return NULL;
3007 case WIDEN_MULT_EXPR:
3008 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3009 && SCALAR_INT_MODE_P (mode))
3011 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3012 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3013 else
3014 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3015 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3016 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3017 else
3018 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3019 return gen_rtx_MULT (mode, op0, op1);
3021 return NULL;
3023 case WIDEN_SUM_EXPR:
3024 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3025 && SCALAR_INT_MODE_P (mode))
3027 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3028 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3029 else
3030 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3031 return gen_rtx_PLUS (mode, op0, op1);
3033 return NULL;
3035 default:
3036 flag_unsupported:
3037 #ifdef ENABLE_CHECKING
3038 debug_tree (exp);
3039 gcc_unreachable ();
3040 #else
3041 return NULL;
3042 #endif
3046 /* Expand the _LOCs in debug insns. We run this after expanding all
3047 regular insns, so that any variables referenced in the function
3048 will have their DECL_RTLs set. */
3050 static void
3051 expand_debug_locations (void)
3053 rtx insn;
3054 rtx last = get_last_insn ();
3055 int save_strict_alias = flag_strict_aliasing;
3057 /* New alias sets while setting up memory attributes cause
3058 -fcompare-debug failures, even though it doesn't bring about any
3059 codegen changes. */
3060 flag_strict_aliasing = 0;
3062 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3063 if (DEBUG_INSN_P (insn))
3065 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3066 rtx val;
3067 enum machine_mode mode;
3069 if (value == NULL_TREE)
3070 val = NULL_RTX;
3071 else
3073 val = expand_debug_expr (value);
3074 gcc_assert (last == get_last_insn ());
3077 if (!val)
3078 val = gen_rtx_UNKNOWN_VAR_LOC ();
3079 else
3081 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3083 gcc_assert (mode == GET_MODE (val)
3084 || (GET_MODE (val) == VOIDmode
3085 && (CONST_INT_P (val)
3086 || GET_CODE (val) == CONST_FIXED
3087 || GET_CODE (val) == CONST_DOUBLE
3088 || GET_CODE (val) == LABEL_REF)));
3091 INSN_VAR_LOCATION_LOC (insn) = val;
3094 flag_strict_aliasing = save_strict_alias;
3097 /* Expand basic block BB from GIMPLE trees to RTL. */
3099 static basic_block
3100 expand_gimple_basic_block (basic_block bb)
3102 gimple_stmt_iterator gsi;
3103 gimple_seq stmts;
3104 gimple stmt = NULL;
3105 rtx note, last;
3106 edge e;
3107 edge_iterator ei;
3108 void **elt;
3110 if (dump_file)
3111 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3112 bb->index);
3114 /* Note that since we are now transitioning from GIMPLE to RTL, we
3115 cannot use the gsi_*_bb() routines because they expect the basic
3116 block to be in GIMPLE, instead of RTL. Therefore, we need to
3117 access the BB sequence directly. */
3118 stmts = bb_seq (bb);
3119 bb->il.gimple = NULL;
3120 rtl_profile_for_bb (bb);
3121 init_rtl_bb_info (bb);
3122 bb->flags |= BB_RTL;
3124 /* Remove the RETURN_EXPR if we may fall though to the exit
3125 instead. */
3126 gsi = gsi_last (stmts);
3127 if (!gsi_end_p (gsi)
3128 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3130 gimple ret_stmt = gsi_stmt (gsi);
3132 gcc_assert (single_succ_p (bb));
3133 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3135 if (bb->next_bb == EXIT_BLOCK_PTR
3136 && !gimple_return_retval (ret_stmt))
3138 gsi_remove (&gsi, false);
3139 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3143 gsi = gsi_start (stmts);
3144 if (!gsi_end_p (gsi))
3146 stmt = gsi_stmt (gsi);
3147 if (gimple_code (stmt) != GIMPLE_LABEL)
3148 stmt = NULL;
3151 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3153 if (stmt || elt)
3155 last = get_last_insn ();
3157 if (stmt)
3159 expand_gimple_stmt (stmt);
3160 gsi_next (&gsi);
3163 if (elt)
3164 emit_label ((rtx) *elt);
3166 /* Java emits line number notes in the top of labels.
3167 ??? Make this go away once line number notes are obsoleted. */
3168 BB_HEAD (bb) = NEXT_INSN (last);
3169 if (NOTE_P (BB_HEAD (bb)))
3170 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3171 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3173 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3175 else
3176 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3178 NOTE_BASIC_BLOCK (note) = bb;
3180 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3182 basic_block new_bb;
3184 stmt = gsi_stmt (gsi);
3186 /* If this statement is a non-debug one, and we generate debug
3187 insns, then this one might be the last real use of a TERed
3188 SSA_NAME, but where there are still some debug uses further
3189 down. Expanding the current SSA name in such further debug
3190 uses by their RHS might lead to wrong debug info, as coalescing
3191 might make the operands of such RHS be placed into the same
3192 pseudo as something else. Like so:
3193 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3194 use(a_1);
3195 a_2 = ...
3196 #DEBUG ... => a_1
3197 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3198 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3199 the write to a_2 would actually have clobbered the place which
3200 formerly held a_0.
3202 So, instead of that, we recognize the situation, and generate
3203 debug temporaries at the last real use of TERed SSA names:
3204 a_1 = a_0 + 1;
3205 #DEBUG #D1 => a_1
3206 use(a_1);
3207 a_2 = ...
3208 #DEBUG ... => #D1
3210 if (MAY_HAVE_DEBUG_INSNS
3211 && SA.values
3212 && !is_gimple_debug (stmt))
3214 ssa_op_iter iter;
3215 tree op;
3216 gimple def;
3218 location_t sloc = get_curr_insn_source_location ();
3219 tree sblock = get_curr_insn_block ();
3221 /* Look for SSA names that have their last use here (TERed
3222 names always have only one real use). */
3223 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3224 if ((def = get_gimple_for_ssa_name (op)))
3226 imm_use_iterator imm_iter;
3227 use_operand_p use_p;
3228 bool have_debug_uses = false;
3230 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3232 if (gimple_debug_bind_p (USE_STMT (use_p)))
3234 have_debug_uses = true;
3235 break;
3239 if (have_debug_uses)
3241 /* OP is a TERed SSA name, with DEF it's defining
3242 statement, and where OP is used in further debug
3243 instructions. Generate a debug temporary, and
3244 replace all uses of OP in debug insns with that
3245 temporary. */
3246 gimple debugstmt;
3247 tree value = gimple_assign_rhs_to_tree (def);
3248 tree vexpr = make_node (DEBUG_EXPR_DECL);
3249 rtx val;
3250 enum machine_mode mode;
3252 set_curr_insn_source_location (gimple_location (def));
3253 set_curr_insn_block (gimple_block (def));
3255 DECL_ARTIFICIAL (vexpr) = 1;
3256 TREE_TYPE (vexpr) = TREE_TYPE (value);
3257 if (DECL_P (value))
3258 mode = DECL_MODE (value);
3259 else
3260 mode = TYPE_MODE (TREE_TYPE (value));
3261 DECL_MODE (vexpr) = mode;
3263 val = gen_rtx_VAR_LOCATION
3264 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3266 val = emit_debug_insn (val);
3268 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3270 if (!gimple_debug_bind_p (debugstmt))
3271 continue;
3273 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3274 SET_USE (use_p, vexpr);
3276 update_stmt (debugstmt);
3280 set_curr_insn_source_location (sloc);
3281 set_curr_insn_block (sblock);
3284 currently_expanding_gimple_stmt = stmt;
3286 /* Expand this statement, then evaluate the resulting RTL and
3287 fixup the CFG accordingly. */
3288 if (gimple_code (stmt) == GIMPLE_COND)
3290 new_bb = expand_gimple_cond (bb, stmt);
3291 if (new_bb)
3292 return new_bb;
3294 else if (gimple_debug_bind_p (stmt))
3296 location_t sloc = get_curr_insn_source_location ();
3297 tree sblock = get_curr_insn_block ();
3298 gimple_stmt_iterator nsi = gsi;
3300 for (;;)
3302 tree var = gimple_debug_bind_get_var (stmt);
3303 tree value;
3304 rtx val;
3305 enum machine_mode mode;
3307 if (gimple_debug_bind_has_value_p (stmt))
3308 value = gimple_debug_bind_get_value (stmt);
3309 else
3310 value = NULL_TREE;
3312 last = get_last_insn ();
3314 set_curr_insn_source_location (gimple_location (stmt));
3315 set_curr_insn_block (gimple_block (stmt));
3317 if (DECL_P (var))
3318 mode = DECL_MODE (var);
3319 else
3320 mode = TYPE_MODE (TREE_TYPE (var));
3322 val = gen_rtx_VAR_LOCATION
3323 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3325 val = emit_debug_insn (val);
3327 if (dump_file && (dump_flags & TDF_DETAILS))
3329 /* We can't dump the insn with a TREE where an RTX
3330 is expected. */
3331 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3332 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3333 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3336 /* In order not to generate too many debug temporaries,
3337 we delink all uses of debug statements we already expanded.
3338 Therefore debug statements between definition and real
3339 use of TERed SSA names will continue to use the SSA name,
3340 and not be replaced with debug temps. */
3341 delink_stmt_imm_use (stmt);
3343 gsi = nsi;
3344 gsi_next (&nsi);
3345 if (gsi_end_p (nsi))
3346 break;
3347 stmt = gsi_stmt (nsi);
3348 if (!gimple_debug_bind_p (stmt))
3349 break;
3352 set_curr_insn_source_location (sloc);
3353 set_curr_insn_block (sblock);
3355 else
3357 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3359 bool can_fallthru;
3360 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3361 if (new_bb)
3363 if (can_fallthru)
3364 bb = new_bb;
3365 else
3366 return new_bb;
3369 else
3371 def_operand_p def_p;
3372 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3374 if (def_p != NULL)
3376 /* Ignore this stmt if it is in the list of
3377 replaceable expressions. */
3378 if (SA.values
3379 && bitmap_bit_p (SA.values,
3380 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3381 continue;
3383 last = expand_gimple_stmt (stmt);
3384 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3389 currently_expanding_gimple_stmt = NULL;
3391 /* Expand implicit goto and convert goto_locus. */
3392 FOR_EACH_EDGE (e, ei, bb->succs)
3394 if (e->goto_locus && e->goto_block)
3396 set_curr_insn_source_location (e->goto_locus);
3397 set_curr_insn_block (e->goto_block);
3398 e->goto_locus = curr_insn_locator ();
3400 e->goto_block = NULL;
3401 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3403 emit_jump (label_rtx_for_bb (e->dest));
3404 e->flags &= ~EDGE_FALLTHRU;
3408 /* Expanded RTL can create a jump in the last instruction of block.
3409 This later might be assumed to be a jump to successor and break edge insertion.
3410 We need to insert dummy move to prevent this. PR41440. */
3411 if (single_succ_p (bb)
3412 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3413 && (last = get_last_insn ())
3414 && JUMP_P (last))
3416 rtx dummy = gen_reg_rtx (SImode);
3417 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3420 do_pending_stack_adjust ();
3422 /* Find the block tail. The last insn in the block is the insn
3423 before a barrier and/or table jump insn. */
3424 last = get_last_insn ();
3425 if (BARRIER_P (last))
3426 last = PREV_INSN (last);
3427 if (JUMP_TABLE_DATA_P (last))
3428 last = PREV_INSN (PREV_INSN (last));
3429 BB_END (bb) = last;
3431 update_bb_for_insn (bb);
3433 return bb;
3437 /* Create a basic block for initialization code. */
3439 static basic_block
3440 construct_init_block (void)
3442 basic_block init_block, first_block;
3443 edge e = NULL;
3444 int flags;
3446 /* Multiple entry points not supported yet. */
3447 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3448 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3449 init_rtl_bb_info (EXIT_BLOCK_PTR);
3450 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3451 EXIT_BLOCK_PTR->flags |= BB_RTL;
3453 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3455 /* When entry edge points to first basic block, we don't need jump,
3456 otherwise we have to jump into proper target. */
3457 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3459 tree label = gimple_block_label (e->dest);
3461 emit_jump (label_rtx (label));
3462 flags = 0;
3464 else
3465 flags = EDGE_FALLTHRU;
3467 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3468 get_last_insn (),
3469 ENTRY_BLOCK_PTR);
3470 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3471 init_block->count = ENTRY_BLOCK_PTR->count;
3472 if (e)
3474 first_block = e->dest;
3475 redirect_edge_succ (e, init_block);
3476 e = make_edge (init_block, first_block, flags);
3478 else
3479 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3480 e->probability = REG_BR_PROB_BASE;
3481 e->count = ENTRY_BLOCK_PTR->count;
3483 update_bb_for_insn (init_block);
3484 return init_block;
3487 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3488 found in the block tree. */
3490 static void
3491 set_block_levels (tree block, int level)
3493 while (block)
3495 BLOCK_NUMBER (block) = level;
3496 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3497 block = BLOCK_CHAIN (block);
3501 /* Create a block containing landing pads and similar stuff. */
3503 static void
3504 construct_exit_block (void)
3506 rtx head = get_last_insn ();
3507 rtx end;
3508 basic_block exit_block;
3509 edge e, e2;
3510 unsigned ix;
3511 edge_iterator ei;
3512 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3514 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3516 /* Make sure the locus is set to the end of the function, so that
3517 epilogue line numbers and warnings are set properly. */
3518 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3519 input_location = cfun->function_end_locus;
3521 /* The following insns belong to the top scope. */
3522 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3524 /* Generate rtl for function exit. */
3525 expand_function_end ();
3527 end = get_last_insn ();
3528 if (head == end)
3529 return;
3530 /* While emitting the function end we could move end of the last basic block.
3532 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3533 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3534 head = NEXT_INSN (head);
3535 exit_block = create_basic_block (NEXT_INSN (head), end,
3536 EXIT_BLOCK_PTR->prev_bb);
3537 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3538 exit_block->count = EXIT_BLOCK_PTR->count;
3540 ix = 0;
3541 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3543 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3544 if (!(e->flags & EDGE_ABNORMAL))
3545 redirect_edge_succ (e, exit_block);
3546 else
3547 ix++;
3550 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3551 e->probability = REG_BR_PROB_BASE;
3552 e->count = EXIT_BLOCK_PTR->count;
3553 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3554 if (e2 != e)
3556 e->count -= e2->count;
3557 exit_block->count -= e2->count;
3558 exit_block->frequency -= EDGE_FREQUENCY (e2);
3560 if (e->count < 0)
3561 e->count = 0;
3562 if (exit_block->count < 0)
3563 exit_block->count = 0;
3564 if (exit_block->frequency < 0)
3565 exit_block->frequency = 0;
3566 update_bb_for_insn (exit_block);
3569 /* Helper function for discover_nonconstant_array_refs.
3570 Look for ARRAY_REF nodes with non-constant indexes and mark them
3571 addressable. */
3573 static tree
3574 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3575 void *data ATTRIBUTE_UNUSED)
3577 tree t = *tp;
3579 if (IS_TYPE_OR_DECL_P (t))
3580 *walk_subtrees = 0;
3581 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3583 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3584 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3585 && (!TREE_OPERAND (t, 2)
3586 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3587 || (TREE_CODE (t) == COMPONENT_REF
3588 && (!TREE_OPERAND (t,2)
3589 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3590 || TREE_CODE (t) == BIT_FIELD_REF
3591 || TREE_CODE (t) == REALPART_EXPR
3592 || TREE_CODE (t) == IMAGPART_EXPR
3593 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3594 || CONVERT_EXPR_P (t))
3595 t = TREE_OPERAND (t, 0);
3597 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3599 t = get_base_address (t);
3600 if (t && DECL_P (t)
3601 && DECL_MODE (t) != BLKmode)
3602 TREE_ADDRESSABLE (t) = 1;
3605 *walk_subtrees = 0;
3608 return NULL_TREE;
3611 /* RTL expansion is not able to compile array references with variable
3612 offsets for arrays stored in single register. Discover such
3613 expressions and mark variables as addressable to avoid this
3614 scenario. */
3616 static void
3617 discover_nonconstant_array_refs (void)
3619 basic_block bb;
3620 gimple_stmt_iterator gsi;
3622 FOR_EACH_BB (bb)
3623 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3625 gimple stmt = gsi_stmt (gsi);
3626 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3630 /* This function sets crtl->args.internal_arg_pointer to a virtual
3631 register if DRAP is needed. Local register allocator will replace
3632 virtual_incoming_args_rtx with the virtual register. */
3634 static void
3635 expand_stack_alignment (void)
3637 rtx drap_rtx;
3638 unsigned int preferred_stack_boundary;
3640 if (! SUPPORTS_STACK_ALIGNMENT)
3641 return;
3643 if (cfun->calls_alloca
3644 || cfun->has_nonlocal_label
3645 || crtl->has_nonlocal_goto)
3646 crtl->need_drap = true;
3648 /* Call update_stack_boundary here again to update incoming stack
3649 boundary. It may set incoming stack alignment to a different
3650 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3651 use the minimum incoming stack alignment to check if it is OK
3652 to perform sibcall optimization since sibcall optimization will
3653 only align the outgoing stack to incoming stack boundary. */
3654 if (targetm.calls.update_stack_boundary)
3655 targetm.calls.update_stack_boundary ();
3657 /* The incoming stack frame has to be aligned at least at
3658 parm_stack_boundary. */
3659 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3661 /* Update crtl->stack_alignment_estimated and use it later to align
3662 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3663 exceptions since callgraph doesn't collect incoming stack alignment
3664 in this case. */
3665 if (flag_non_call_exceptions
3666 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3667 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3668 else
3669 preferred_stack_boundary = crtl->preferred_stack_boundary;
3670 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3671 crtl->stack_alignment_estimated = preferred_stack_boundary;
3672 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3673 crtl->stack_alignment_needed = preferred_stack_boundary;
3675 gcc_assert (crtl->stack_alignment_needed
3676 <= crtl->stack_alignment_estimated);
3678 crtl->stack_realign_needed
3679 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3680 crtl->stack_realign_tried = crtl->stack_realign_needed;
3682 crtl->stack_realign_processed = true;
3684 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3685 alignment. */
3686 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3687 drap_rtx = targetm.calls.get_drap_rtx ();
3689 /* stack_realign_drap and drap_rtx must match. */
3690 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3692 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3693 if (NULL != drap_rtx)
3695 crtl->args.internal_arg_pointer = drap_rtx;
3697 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3698 needed. */
3699 fixup_tail_calls ();
3703 /* Translate the intermediate representation contained in the CFG
3704 from GIMPLE trees to RTL.
3706 We do conversion per basic block and preserve/update the tree CFG.
3707 This implies we have to do some magic as the CFG can simultaneously
3708 consist of basic blocks containing RTL and GIMPLE trees. This can
3709 confuse the CFG hooks, so be careful to not manipulate CFG during
3710 the expansion. */
3712 static unsigned int
3713 gimple_expand_cfg (void)
3715 basic_block bb, init_block;
3716 sbitmap blocks;
3717 edge_iterator ei;
3718 edge e;
3719 unsigned i;
3721 rewrite_out_of_ssa (&SA);
3722 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3723 sizeof (rtx));
3725 /* Some backends want to know that we are expanding to RTL. */
3726 currently_expanding_to_rtl = 1;
3728 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3730 insn_locators_alloc ();
3731 if (!DECL_IS_BUILTIN (current_function_decl))
3733 /* Eventually, all FEs should explicitly set function_start_locus. */
3734 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3735 set_curr_insn_source_location
3736 (DECL_SOURCE_LOCATION (current_function_decl));
3737 else
3738 set_curr_insn_source_location (cfun->function_start_locus);
3740 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3741 prologue_locator = curr_insn_locator ();
3743 /* Make sure first insn is a note even if we don't want linenums.
3744 This makes sure the first insn will never be deleted.
3745 Also, final expects a note to appear there. */
3746 emit_note (NOTE_INSN_DELETED);
3748 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3749 discover_nonconstant_array_refs ();
3751 targetm.expand_to_rtl_hook ();
3752 crtl->stack_alignment_needed = STACK_BOUNDARY;
3753 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3754 crtl->stack_alignment_estimated = 0;
3755 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3756 cfun->cfg->max_jumptable_ents = 0;
3759 /* Expand the variables recorded during gimple lowering. */
3760 expand_used_vars ();
3762 /* Honor stack protection warnings. */
3763 if (warn_stack_protect)
3765 if (cfun->calls_alloca)
3766 warning (OPT_Wstack_protector,
3767 "not protecting local variables: variable length buffer");
3768 if (has_short_buffer && !crtl->stack_protect_guard)
3769 warning (OPT_Wstack_protector,
3770 "not protecting function: no buffer at least %d bytes long",
3771 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3774 /* Set up parameters and prepare for return, for the function. */
3775 expand_function_start (current_function_decl);
3777 /* Now that we also have the parameter RTXs, copy them over to our
3778 partitions. */
3779 for (i = 0; i < SA.map->num_partitions; i++)
3781 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3783 if (TREE_CODE (var) != VAR_DECL
3784 && !SA.partition_to_pseudo[i])
3785 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3786 gcc_assert (SA.partition_to_pseudo[i]);
3788 /* If this decl was marked as living in multiple places, reset
3789 this now to NULL. */
3790 if (DECL_RTL_IF_SET (var) == pc_rtx)
3791 SET_DECL_RTL (var, NULL);
3793 /* Some RTL parts really want to look at DECL_RTL(x) when x
3794 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3795 SET_DECL_RTL here making this available, but that would mean
3796 to select one of the potentially many RTLs for one DECL. Instead
3797 of doing that we simply reset the MEM_EXPR of the RTL in question,
3798 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3799 if (!DECL_RTL_SET_P (var))
3801 if (MEM_P (SA.partition_to_pseudo[i]))
3802 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3806 /* If this function is `main', emit a call to `__main'
3807 to run global initializers, etc. */
3808 if (DECL_NAME (current_function_decl)
3809 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3810 && DECL_FILE_SCOPE_P (current_function_decl))
3811 expand_main_function ();
3813 /* Initialize the stack_protect_guard field. This must happen after the
3814 call to __main (if any) so that the external decl is initialized. */
3815 if (crtl->stack_protect_guard)
3816 stack_protect_prologue ();
3818 expand_phi_nodes (&SA);
3820 /* Register rtl specific functions for cfg. */
3821 rtl_register_cfg_hooks ();
3823 init_block = construct_init_block ();
3825 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3826 remaining edges later. */
3827 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3828 e->flags &= ~EDGE_EXECUTABLE;
3830 lab_rtx_for_bb = pointer_map_create ();
3831 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3832 bb = expand_gimple_basic_block (bb);
3834 if (MAY_HAVE_DEBUG_INSNS)
3835 expand_debug_locations ();
3837 execute_free_datastructures ();
3838 finish_out_of_ssa (&SA);
3840 /* We are no longer in SSA form. */
3841 cfun->gimple_df->in_ssa_p = false;
3843 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3844 conservatively to true until they are all profile aware. */
3845 pointer_map_destroy (lab_rtx_for_bb);
3846 free_histograms ();
3848 construct_exit_block ();
3849 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3850 insn_locators_finalize ();
3852 /* Zap the tree EH table. */
3853 set_eh_throw_stmt_table (cfun, NULL);
3855 rebuild_jump_labels (get_insns ());
3857 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3859 edge e;
3860 edge_iterator ei;
3861 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3863 if (e->insns.r)
3864 commit_one_edge_insertion (e);
3865 else
3866 ei_next (&ei);
3870 /* We're done expanding trees to RTL. */
3871 currently_expanding_to_rtl = 0;
3873 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3875 edge e;
3876 edge_iterator ei;
3877 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3879 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3880 e->flags &= ~EDGE_EXECUTABLE;
3882 /* At the moment not all abnormal edges match the RTL
3883 representation. It is safe to remove them here as
3884 find_many_sub_basic_blocks will rediscover them.
3885 In the future we should get this fixed properly. */
3886 if ((e->flags & EDGE_ABNORMAL)
3887 && !(e->flags & EDGE_SIBCALL))
3888 remove_edge (e);
3889 else
3890 ei_next (&ei);
3894 blocks = sbitmap_alloc (last_basic_block);
3895 sbitmap_ones (blocks);
3896 find_many_sub_basic_blocks (blocks);
3897 sbitmap_free (blocks);
3898 purge_all_dead_edges ();
3900 compact_blocks ();
3902 expand_stack_alignment ();
3904 #ifdef ENABLE_CHECKING
3905 verify_flow_info ();
3906 #endif
3908 /* There's no need to defer outputting this function any more; we
3909 know we want to output it. */
3910 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3912 /* Now that we're done expanding trees to RTL, we shouldn't have any
3913 more CONCATs anywhere. */
3914 generating_concat_p = 0;
3916 if (dump_file)
3918 fprintf (dump_file,
3919 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3920 /* And the pass manager will dump RTL for us. */
3923 /* If we're emitting a nested function, make sure its parent gets
3924 emitted as well. Doing otherwise confuses debug info. */
3926 tree parent;
3927 for (parent = DECL_CONTEXT (current_function_decl);
3928 parent != NULL_TREE;
3929 parent = get_containing_scope (parent))
3930 if (TREE_CODE (parent) == FUNCTION_DECL)
3931 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
3934 /* We are now committed to emitting code for this function. Do any
3935 preparation, such as emitting abstract debug info for the inline
3936 before it gets mangled by optimization. */
3937 if (cgraph_function_possibly_inlined_p (current_function_decl))
3938 (*debug_hooks->outlining_inline_function) (current_function_decl);
3940 TREE_ASM_WRITTEN (current_function_decl) = 1;
3942 /* After expanding, the return labels are no longer needed. */
3943 return_label = NULL;
3944 naked_return_label = NULL;
3945 /* Tag the blocks with a depth number so that change_scope can find
3946 the common parent easily. */
3947 set_block_levels (DECL_INITIAL (cfun->decl), 0);
3948 default_rtl_profile ();
3949 return 0;
3952 struct rtl_opt_pass pass_expand =
3955 RTL_PASS,
3956 "expand", /* name */
3957 NULL, /* gate */
3958 gimple_expand_cfg, /* execute */
3959 NULL, /* sub */
3960 NULL, /* next */
3961 0, /* static_pass_number */
3962 TV_EXPAND, /* tv_id */
3963 PROP_ssa | PROP_gimple_leh | PROP_cfg
3964 | PROP_gimple_lcx, /* properties_required */
3965 PROP_rtl, /* properties_provided */
3966 PROP_ssa | PROP_trees, /* properties_destroyed */
3967 TODO_verify_ssa | TODO_verify_flow
3968 | TODO_verify_stmts, /* todo_flags_start */
3969 TODO_dump_func
3970 | TODO_ggc_collect /* todo_flags_finish */