* gcc.target/powerpc/altivec-volatile.c: Adjust expected warning.
[official-gcc.git] / gcc / cfgexpand.c
blob5b0596ec7065c259bb314c05945f2a0f68cdecb3
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54 struct ssaexpand SA;
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
61 statement STMT. */
63 tree
64 gimple_assign_rhs_to_tree (gimple stmt)
66 tree t;
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
94 && EXPR_P (t)
95 && gimple_block (stmt) != TREE_BLOCK (t)))
96 t = copy_node (t);
98 else
99 gcc_unreachable ();
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
106 return t;
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
112 #endif
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
119 static inline void
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
125 if (x && !MEM_P (x))
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
137 change this. */
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
150 else
151 SET_DECL_RTL (t, x);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
156 struct stack_var
158 /* The Variable. */
159 tree decl;
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
211 static unsigned int
212 get_decl_align_unit (tree decl)
214 unsigned int align;
216 align = LOCAL_DECL_ALIGNMENT (decl);
218 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
219 align = MAX_SUPPORTED_STACK_ALIGNMENT;
221 if (SUPPORTS_STACK_ALIGNMENT)
223 if (crtl->stack_alignment_estimated < align)
225 gcc_assert(!crtl->stack_realign_processed);
226 crtl->stack_alignment_estimated = align;
230 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
231 So here we only make sure stack_alignment_needed >= align. */
232 if (crtl->stack_alignment_needed < align)
233 crtl->stack_alignment_needed = align;
234 if (crtl->max_used_stack_slot_alignment < align)
235 crtl->max_used_stack_slot_alignment = align;
237 return align / BITS_PER_UNIT;
240 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
241 Return the frame offset. */
243 static HOST_WIDE_INT
244 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
246 HOST_WIDE_INT offset, new_frame_offset;
248 new_frame_offset = frame_offset;
249 if (FRAME_GROWS_DOWNWARD)
251 new_frame_offset -= size + frame_phase;
252 new_frame_offset &= -align;
253 new_frame_offset += frame_phase;
254 offset = new_frame_offset;
256 else
258 new_frame_offset -= frame_phase;
259 new_frame_offset += align - 1;
260 new_frame_offset &= -align;
261 new_frame_offset += frame_phase;
262 offset = new_frame_offset;
263 new_frame_offset += size;
265 frame_offset = new_frame_offset;
267 if (frame_offset_overflow (frame_offset, cfun->decl))
268 frame_offset = offset = 0;
270 return offset;
273 /* Accumulate DECL into STACK_VARS. */
275 static void
276 add_stack_var (tree decl)
278 if (stack_vars_num >= stack_vars_alloc)
280 if (stack_vars_alloc)
281 stack_vars_alloc = stack_vars_alloc * 3 / 2;
282 else
283 stack_vars_alloc = 32;
284 stack_vars
285 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
287 stack_vars[stack_vars_num].decl = decl;
288 stack_vars[stack_vars_num].offset = 0;
289 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
290 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
292 /* All variables are initially in their own partition. */
293 stack_vars[stack_vars_num].representative = stack_vars_num;
294 stack_vars[stack_vars_num].next = EOC;
296 /* All variables initially conflict with no other. */
297 stack_vars[stack_vars_num].conflicts = NULL;
299 /* Ensure that this decl doesn't get put onto the list twice. */
300 set_rtl (decl, pc_rtx);
302 stack_vars_num++;
305 /* Make the decls associated with luid's X and Y conflict. */
307 static void
308 add_stack_var_conflict (size_t x, size_t y)
310 struct stack_var *a = &stack_vars[x];
311 struct stack_var *b = &stack_vars[y];
312 if (!a->conflicts)
313 a->conflicts = BITMAP_ALLOC (NULL);
314 if (!b->conflicts)
315 b->conflicts = BITMAP_ALLOC (NULL);
316 bitmap_set_bit (a->conflicts, y);
317 bitmap_set_bit (b->conflicts, x);
320 /* Check whether the decls associated with luid's X and Y conflict. */
322 static bool
323 stack_var_conflict_p (size_t x, size_t y)
325 struct stack_var *a = &stack_vars[x];
326 struct stack_var *b = &stack_vars[y];
327 if (!a->conflicts || !b->conflicts)
328 return false;
329 return bitmap_bit_p (a->conflicts, y);
332 /* Returns true if TYPE is or contains a union type. */
334 static bool
335 aggregate_contains_union_type (tree type)
337 tree field;
339 if (TREE_CODE (type) == UNION_TYPE
340 || TREE_CODE (type) == QUAL_UNION_TYPE)
341 return true;
342 if (TREE_CODE (type) == ARRAY_TYPE)
343 return aggregate_contains_union_type (TREE_TYPE (type));
344 if (TREE_CODE (type) != RECORD_TYPE)
345 return false;
347 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
348 if (TREE_CODE (field) == FIELD_DECL)
349 if (aggregate_contains_union_type (TREE_TYPE (field)))
350 return true;
352 return false;
355 /* A subroutine of expand_used_vars. If two variables X and Y have alias
356 sets that do not conflict, then do add a conflict for these variables
357 in the interference graph. We also need to make sure to add conflicts
358 for union containing structures. Else RTL alias analysis comes along
359 and due to type based aliasing rules decides that for two overlapping
360 union temporaries { short s; int i; } accesses to the same mem through
361 different types may not alias and happily reorders stores across
362 life-time boundaries of the temporaries (See PR25654).
363 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
365 static void
366 add_alias_set_conflicts (void)
368 size_t i, j, n = stack_vars_num;
370 for (i = 0; i < n; ++i)
372 tree type_i = TREE_TYPE (stack_vars[i].decl);
373 bool aggr_i = AGGREGATE_TYPE_P (type_i);
374 bool contains_union;
376 contains_union = aggregate_contains_union_type (type_i);
377 for (j = 0; j < i; ++j)
379 tree type_j = TREE_TYPE (stack_vars[j].decl);
380 bool aggr_j = AGGREGATE_TYPE_P (type_j);
381 if (aggr_i != aggr_j
382 /* Either the objects conflict by means of type based
383 aliasing rules, or we need to add a conflict. */
384 || !objects_must_conflict_p (type_i, type_j)
385 /* In case the types do not conflict ensure that access
386 to elements will conflict. In case of unions we have
387 to be careful as type based aliasing rules may say
388 access to the same memory does not conflict. So play
389 safe and add a conflict in this case. */
390 || contains_union)
391 add_stack_var_conflict (i, j);
396 /* A subroutine of partition_stack_vars. A comparison function for qsort,
397 sorting an array of indices by the size and type of the object. */
399 static int
400 stack_var_size_cmp (const void *a, const void *b)
402 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
403 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
404 tree decla, declb;
405 unsigned int uida, uidb;
407 if (sa < sb)
408 return -1;
409 if (sa > sb)
410 return 1;
411 decla = stack_vars[*(const size_t *)a].decl;
412 declb = stack_vars[*(const size_t *)b].decl;
413 /* For stack variables of the same size use and id of the decls
414 to make the sort stable. Two SSA names are compared by their
415 version, SSA names come before non-SSA names, and two normal
416 decls are compared by their DECL_UID. */
417 if (TREE_CODE (decla) == SSA_NAME)
419 if (TREE_CODE (declb) == SSA_NAME)
420 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
421 else
422 return -1;
424 else if (TREE_CODE (declb) == SSA_NAME)
425 return 1;
426 else
427 uida = DECL_UID (decla), uidb = DECL_UID (declb);
428 if (uida < uidb)
429 return -1;
430 if (uida > uidb)
431 return 1;
432 return 0;
436 /* If the points-to solution *PI points to variables that are in a partition
437 together with other variables add all partition members to the pointed-to
438 variables bitmap. */
440 static void
441 add_partitioned_vars_to_ptset (struct pt_solution *pt,
442 struct pointer_map_t *decls_to_partitions,
443 struct pointer_set_t *visited, bitmap temp)
445 bitmap_iterator bi;
446 unsigned i;
447 bitmap *part;
449 if (pt->anything
450 || pt->vars == NULL
451 /* The pointed-to vars bitmap is shared, it is enough to
452 visit it once. */
453 || pointer_set_insert(visited, pt->vars))
454 return;
456 bitmap_clear (temp);
458 /* By using a temporary bitmap to store all members of the partitions
459 we have to add we make sure to visit each of the partitions only
460 once. */
461 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
462 if ((!temp
463 || !bitmap_bit_p (temp, i))
464 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
465 (void *)(size_t) i)))
466 bitmap_ior_into (temp, *part);
467 if (!bitmap_empty_p (temp))
468 bitmap_ior_into (pt->vars, temp);
471 /* Update points-to sets based on partition info, so we can use them on RTL.
472 The bitmaps representing stack partitions will be saved until expand,
473 where partitioned decls used as bases in memory expressions will be
474 rewritten. */
476 static void
477 update_alias_info_with_stack_vars (void)
479 struct pointer_map_t *decls_to_partitions = NULL;
480 size_t i, j;
481 tree var = NULL_TREE;
483 for (i = 0; i < stack_vars_num; i++)
485 bitmap part = NULL;
486 tree name;
487 struct ptr_info_def *pi;
489 /* Not interested in partitions with single variable. */
490 if (stack_vars[i].representative != i
491 || stack_vars[i].next == EOC)
492 continue;
494 if (!decls_to_partitions)
496 decls_to_partitions = pointer_map_create ();
497 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
500 /* Create an SSA_NAME that points to the partition for use
501 as base during alias-oracle queries on RTL for bases that
502 have been partitioned. */
503 if (var == NULL_TREE)
504 var = create_tmp_var (ptr_type_node, NULL);
505 name = make_ssa_name (var, NULL);
507 /* Create bitmaps representing partitions. They will be used for
508 points-to sets later, so use GGC alloc. */
509 part = BITMAP_GGC_ALLOC ();
510 for (j = i; j != EOC; j = stack_vars[j].next)
512 tree decl = stack_vars[j].decl;
513 unsigned int uid = DECL_PT_UID (decl);
514 /* We should never end up partitioning SSA names (though they
515 may end up on the stack). Neither should we allocate stack
516 space to something that is unused and thus unreferenced. */
517 gcc_assert (DECL_P (decl)
518 && referenced_var_lookup (DECL_UID (decl)));
519 bitmap_set_bit (part, uid);
520 *((bitmap *) pointer_map_insert (decls_to_partitions,
521 (void *)(size_t) uid)) = part;
522 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
523 decl)) = name;
526 /* Make the SSA name point to all partition members. */
527 pi = get_ptr_info (name);
528 pt_solution_set (&pi->pt, part, false, false);
531 /* Make all points-to sets that contain one member of a partition
532 contain all members of the partition. */
533 if (decls_to_partitions)
535 unsigned i;
536 struct pointer_set_t *visited = pointer_set_create ();
537 bitmap temp = BITMAP_ALLOC (NULL);
539 for (i = 1; i < num_ssa_names; i++)
541 tree name = ssa_name (i);
542 struct ptr_info_def *pi;
544 if (name
545 && POINTER_TYPE_P (TREE_TYPE (name))
546 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
547 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
548 visited, temp);
551 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
552 decls_to_partitions, visited, temp);
554 pointer_set_destroy (visited);
555 pointer_map_destroy (decls_to_partitions);
556 BITMAP_FREE (temp);
560 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
561 partitioning algorithm. Partitions A and B are known to be non-conflicting.
562 Merge them into a single partition A.
564 At the same time, add OFFSET to all variables in partition B. At the end
565 of the partitioning process we've have a nice block easy to lay out within
566 the stack frame. */
568 static void
569 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
571 size_t i, last;
572 struct stack_var *vb = &stack_vars[b];
573 bitmap_iterator bi;
574 unsigned u;
576 /* Update each element of partition B with the given offset,
577 and merge them into partition A. */
578 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
580 stack_vars[i].offset += offset;
581 stack_vars[i].representative = a;
583 stack_vars[last].next = stack_vars[a].next;
584 stack_vars[a].next = b;
586 /* Update the required alignment of partition A to account for B. */
587 if (stack_vars[a].alignb < stack_vars[b].alignb)
588 stack_vars[a].alignb = stack_vars[b].alignb;
590 /* Update the interference graph and merge the conflicts. */
591 if (vb->conflicts)
593 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
594 add_stack_var_conflict (a, stack_vars[u].representative);
595 BITMAP_FREE (vb->conflicts);
599 /* A subroutine of expand_used_vars. Binpack the variables into
600 partitions constrained by the interference graph. The overall
601 algorithm used is as follows:
603 Sort the objects by size.
604 For each object A {
605 S = size(A)
606 O = 0
607 loop {
608 Look for the largest non-conflicting object B with size <= S.
609 UNION (A, B)
610 offset(B) = O
611 O += size(B)
612 S -= size(B)
617 static void
618 partition_stack_vars (void)
620 size_t si, sj, n = stack_vars_num;
622 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
623 for (si = 0; si < n; ++si)
624 stack_vars_sorted[si] = si;
626 if (n == 1)
627 return;
629 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
631 for (si = 0; si < n; ++si)
633 size_t i = stack_vars_sorted[si];
634 HOST_WIDE_INT isize = stack_vars[i].size;
635 HOST_WIDE_INT offset = 0;
637 for (sj = si; sj-- > 0; )
639 size_t j = stack_vars_sorted[sj];
640 HOST_WIDE_INT jsize = stack_vars[j].size;
641 unsigned int jalign = stack_vars[j].alignb;
643 /* Ignore objects that aren't partition representatives. */
644 if (stack_vars[j].representative != j)
645 continue;
647 /* Ignore objects too large for the remaining space. */
648 if (isize < jsize)
649 continue;
651 /* Ignore conflicting objects. */
652 if (stack_var_conflict_p (i, j))
653 continue;
655 /* Refine the remaining space check to include alignment. */
656 if (offset & (jalign - 1))
658 HOST_WIDE_INT toff = offset;
659 toff += jalign - 1;
660 toff &= -(HOST_WIDE_INT)jalign;
661 if (isize - (toff - offset) < jsize)
662 continue;
664 isize -= toff - offset;
665 offset = toff;
668 /* UNION the objects, placing J at OFFSET. */
669 union_stack_vars (i, j, offset);
671 isize -= jsize;
672 if (isize == 0)
673 break;
677 if (optimize)
678 update_alias_info_with_stack_vars ();
681 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
683 static void
684 dump_stack_var_partition (void)
686 size_t si, i, j, n = stack_vars_num;
688 for (si = 0; si < n; ++si)
690 i = stack_vars_sorted[si];
692 /* Skip variables that aren't partition representatives, for now. */
693 if (stack_vars[i].representative != i)
694 continue;
696 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
697 " align %u\n", (unsigned long) i, stack_vars[i].size,
698 stack_vars[i].alignb);
700 for (j = i; j != EOC; j = stack_vars[j].next)
702 fputc ('\t', dump_file);
703 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
704 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
705 stack_vars[j].offset);
710 /* Assign rtl to DECL at frame offset OFFSET. */
712 static void
713 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
715 /* Alignment is unsigned. */
716 unsigned HOST_WIDE_INT align;
717 rtx x;
719 /* If this fails, we've overflowed the stack frame. Error nicely? */
720 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
722 x = plus_constant (virtual_stack_vars_rtx, offset);
723 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
725 if (TREE_CODE (decl) != SSA_NAME)
727 /* Set alignment we actually gave this decl if it isn't an SSA name.
728 If it is we generate stack slots only accidentally so it isn't as
729 important, we'll simply use the alignment that is already set. */
730 offset -= frame_phase;
731 align = offset & -offset;
732 align *= BITS_PER_UNIT;
733 if (align == 0)
734 align = STACK_BOUNDARY;
735 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
736 align = MAX_SUPPORTED_STACK_ALIGNMENT;
738 DECL_ALIGN (decl) = align;
739 DECL_USER_ALIGN (decl) = 0;
742 set_mem_attributes (x, SSAVAR (decl), true);
743 set_rtl (decl, x);
746 /* A subroutine of expand_used_vars. Give each partition representative
747 a unique location within the stack frame. Update each partition member
748 with that location. */
750 static void
751 expand_stack_vars (bool (*pred) (tree))
753 size_t si, i, j, n = stack_vars_num;
755 for (si = 0; si < n; ++si)
757 HOST_WIDE_INT offset;
759 i = stack_vars_sorted[si];
761 /* Skip variables that aren't partition representatives, for now. */
762 if (stack_vars[i].representative != i)
763 continue;
765 /* Skip variables that have already had rtl assigned. See also
766 add_stack_var where we perpetrate this pc_rtx hack. */
767 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
768 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
769 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
770 continue;
772 /* Check the predicate to see whether this variable should be
773 allocated in this pass. */
774 if (pred && !pred (stack_vars[i].decl))
775 continue;
777 offset = alloc_stack_frame_space (stack_vars[i].size,
778 stack_vars[i].alignb);
780 /* Create rtl for each variable based on their location within the
781 partition. */
782 for (j = i; j != EOC; j = stack_vars[j].next)
784 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
785 expand_one_stack_var_at (stack_vars[j].decl,
786 stack_vars[j].offset + offset);
791 /* Take into account all sizes of partitions and reset DECL_RTLs. */
792 static HOST_WIDE_INT
793 account_stack_vars (void)
795 size_t si, j, i, n = stack_vars_num;
796 HOST_WIDE_INT size = 0;
798 for (si = 0; si < n; ++si)
800 i = stack_vars_sorted[si];
802 /* Skip variables that aren't partition representatives, for now. */
803 if (stack_vars[i].representative != i)
804 continue;
806 size += stack_vars[i].size;
807 for (j = i; j != EOC; j = stack_vars[j].next)
808 set_rtl (stack_vars[j].decl, NULL);
810 return size;
813 /* A subroutine of expand_one_var. Called to immediately assign rtl
814 to a variable to be allocated in the stack frame. */
816 static void
817 expand_one_stack_var (tree var)
819 HOST_WIDE_INT size, offset, align;
821 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
822 align = get_decl_align_unit (SSAVAR (var));
823 offset = alloc_stack_frame_space (size, align);
825 expand_one_stack_var_at (var, offset);
828 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
829 that will reside in a hard register. */
831 static void
832 expand_one_hard_reg_var (tree var)
834 rest_of_decl_compilation (var, 0, 0);
837 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
838 that will reside in a pseudo register. */
840 static void
841 expand_one_register_var (tree var)
843 tree decl = SSAVAR (var);
844 tree type = TREE_TYPE (decl);
845 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
846 rtx x = gen_reg_rtx (reg_mode);
848 set_rtl (var, x);
850 /* Note if the object is a user variable. */
851 if (!DECL_ARTIFICIAL (decl))
852 mark_user_reg (x);
854 if (POINTER_TYPE_P (type))
855 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
858 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
859 has some associated error, e.g. its type is error-mark. We just need
860 to pick something that won't crash the rest of the compiler. */
862 static void
863 expand_one_error_var (tree var)
865 enum machine_mode mode = DECL_MODE (var);
866 rtx x;
868 if (mode == BLKmode)
869 x = gen_rtx_MEM (BLKmode, const0_rtx);
870 else if (mode == VOIDmode)
871 x = const0_rtx;
872 else
873 x = gen_reg_rtx (mode);
875 SET_DECL_RTL (var, x);
878 /* A subroutine of expand_one_var. VAR is a variable that will be
879 allocated to the local stack frame. Return true if we wish to
880 add VAR to STACK_VARS so that it will be coalesced with other
881 variables. Return false to allocate VAR immediately.
883 This function is used to reduce the number of variables considered
884 for coalescing, which reduces the size of the quadratic problem. */
886 static bool
887 defer_stack_allocation (tree var, bool toplevel)
889 /* If stack protection is enabled, *all* stack variables must be deferred,
890 so that we can re-order the strings to the top of the frame. */
891 if (flag_stack_protect)
892 return true;
894 /* Variables in the outermost scope automatically conflict with
895 every other variable. The only reason to want to defer them
896 at all is that, after sorting, we can more efficiently pack
897 small variables in the stack frame. Continue to defer at -O2. */
898 if (toplevel && optimize < 2)
899 return false;
901 /* Without optimization, *most* variables are allocated from the
902 stack, which makes the quadratic problem large exactly when we
903 want compilation to proceed as quickly as possible. On the
904 other hand, we don't want the function's stack frame size to
905 get completely out of hand. So we avoid adding scalars and
906 "small" aggregates to the list at all. */
907 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
908 return false;
910 return true;
913 /* A subroutine of expand_used_vars. Expand one variable according to
914 its flavor. Variables to be placed on the stack are not actually
915 expanded yet, merely recorded.
916 When REALLY_EXPAND is false, only add stack values to be allocated.
917 Return stack usage this variable is supposed to take.
920 static HOST_WIDE_INT
921 expand_one_var (tree var, bool toplevel, bool really_expand)
923 tree origvar = var;
924 var = SSAVAR (var);
926 if (SUPPORTS_STACK_ALIGNMENT
927 && TREE_TYPE (var) != error_mark_node
928 && TREE_CODE (var) == VAR_DECL)
930 unsigned int align;
932 /* Because we don't know if VAR will be in register or on stack,
933 we conservatively assume it will be on stack even if VAR is
934 eventually put into register after RA pass. For non-automatic
935 variables, which won't be on stack, we collect alignment of
936 type and ignore user specified alignment. */
937 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
938 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
939 TYPE_MODE (TREE_TYPE (var)),
940 TYPE_ALIGN (TREE_TYPE (var)));
941 else
942 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
944 if (crtl->stack_alignment_estimated < align)
946 /* stack_alignment_estimated shouldn't change after stack
947 realign decision made */
948 gcc_assert(!crtl->stack_realign_processed);
949 crtl->stack_alignment_estimated = align;
953 if (TREE_CODE (origvar) == SSA_NAME)
955 gcc_assert (TREE_CODE (var) != VAR_DECL
956 || (!DECL_EXTERNAL (var)
957 && !DECL_HAS_VALUE_EXPR_P (var)
958 && !TREE_STATIC (var)
959 && TREE_TYPE (var) != error_mark_node
960 && !DECL_HARD_REGISTER (var)
961 && really_expand));
963 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
965 else if (DECL_EXTERNAL (var))
967 else if (DECL_HAS_VALUE_EXPR_P (var))
969 else if (TREE_STATIC (var))
971 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
973 else if (TREE_TYPE (var) == error_mark_node)
975 if (really_expand)
976 expand_one_error_var (var);
978 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
980 if (really_expand)
981 expand_one_hard_reg_var (var);
983 else if (use_register_for_decl (var))
985 if (really_expand)
986 expand_one_register_var (origvar);
988 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
990 if (really_expand)
992 error ("size of variable %q+D is too large", var);
993 expand_one_error_var (var);
996 else if (defer_stack_allocation (var, toplevel))
997 add_stack_var (origvar);
998 else
1000 if (really_expand)
1001 expand_one_stack_var (origvar);
1002 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1004 return 0;
1007 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1008 expanding variables. Those variables that can be put into registers
1009 are allocated pseudos; those that can't are put on the stack.
1011 TOPLEVEL is true if this is the outermost BLOCK. */
1013 static void
1014 expand_used_vars_for_block (tree block, bool toplevel)
1016 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1017 tree t;
1019 old_sv_num = toplevel ? 0 : stack_vars_num;
1021 /* Expand all variables at this level. */
1022 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1023 if (TREE_USED (t))
1024 expand_one_var (t, toplevel, true);
1026 this_sv_num = stack_vars_num;
1028 /* Expand all variables at containing levels. */
1029 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1030 expand_used_vars_for_block (t, false);
1032 /* Since we do not track exact variable lifetimes (which is not even
1033 possible for variables whose address escapes), we mirror the block
1034 tree in the interference graph. Here we cause all variables at this
1035 level, and all sublevels, to conflict. */
1036 if (old_sv_num < this_sv_num)
1038 new_sv_num = stack_vars_num;
1040 for (i = old_sv_num; i < new_sv_num; ++i)
1041 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1042 add_stack_var_conflict (i, j);
1046 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1047 and clear TREE_USED on all local variables. */
1049 static void
1050 clear_tree_used (tree block)
1052 tree t;
1054 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1055 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1056 TREE_USED (t) = 0;
1058 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1059 clear_tree_used (t);
1062 /* Examine TYPE and determine a bit mask of the following features. */
1064 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1065 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1066 #define SPCT_HAS_ARRAY 4
1067 #define SPCT_HAS_AGGREGATE 8
1069 static unsigned int
1070 stack_protect_classify_type (tree type)
1072 unsigned int ret = 0;
1073 tree t;
1075 switch (TREE_CODE (type))
1077 case ARRAY_TYPE:
1078 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1079 if (t == char_type_node
1080 || t == signed_char_type_node
1081 || t == unsigned_char_type_node)
1083 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1084 unsigned HOST_WIDE_INT len;
1086 if (!TYPE_SIZE_UNIT (type)
1087 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1088 len = max;
1089 else
1090 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1092 if (len < max)
1093 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1094 else
1095 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1097 else
1098 ret = SPCT_HAS_ARRAY;
1099 break;
1101 case UNION_TYPE:
1102 case QUAL_UNION_TYPE:
1103 case RECORD_TYPE:
1104 ret = SPCT_HAS_AGGREGATE;
1105 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1106 if (TREE_CODE (t) == FIELD_DECL)
1107 ret |= stack_protect_classify_type (TREE_TYPE (t));
1108 break;
1110 default:
1111 break;
1114 return ret;
1117 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1118 part of the local stack frame. Remember if we ever return nonzero for
1119 any variable in this function. The return value is the phase number in
1120 which the variable should be allocated. */
1122 static int
1123 stack_protect_decl_phase (tree decl)
1125 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1126 int ret = 0;
1128 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1129 has_short_buffer = true;
1131 if (flag_stack_protect == 2)
1133 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1134 && !(bits & SPCT_HAS_AGGREGATE))
1135 ret = 1;
1136 else if (bits & SPCT_HAS_ARRAY)
1137 ret = 2;
1139 else
1140 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1142 if (ret)
1143 has_protected_decls = true;
1145 return ret;
1148 /* Two helper routines that check for phase 1 and phase 2. These are used
1149 as callbacks for expand_stack_vars. */
1151 static bool
1152 stack_protect_decl_phase_1 (tree decl)
1154 return stack_protect_decl_phase (decl) == 1;
1157 static bool
1158 stack_protect_decl_phase_2 (tree decl)
1160 return stack_protect_decl_phase (decl) == 2;
1163 /* Ensure that variables in different stack protection phases conflict
1164 so that they are not merged and share the same stack slot. */
1166 static void
1167 add_stack_protection_conflicts (void)
1169 size_t i, j, n = stack_vars_num;
1170 unsigned char *phase;
1172 phase = XNEWVEC (unsigned char, n);
1173 for (i = 0; i < n; ++i)
1174 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1176 for (i = 0; i < n; ++i)
1178 unsigned char ph_i = phase[i];
1179 for (j = 0; j < i; ++j)
1180 if (ph_i != phase[j])
1181 add_stack_var_conflict (i, j);
1184 XDELETEVEC (phase);
1187 /* Create a decl for the guard at the top of the stack frame. */
1189 static void
1190 create_stack_guard (void)
1192 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1193 VAR_DECL, NULL, ptr_type_node);
1194 TREE_THIS_VOLATILE (guard) = 1;
1195 TREE_USED (guard) = 1;
1196 expand_one_stack_var (guard);
1197 crtl->stack_protect_guard = guard;
1200 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1201 expanding variables. Those variables that can be put into registers
1202 are allocated pseudos; those that can't are put on the stack.
1204 TOPLEVEL is true if this is the outermost BLOCK. */
1206 static HOST_WIDE_INT
1207 account_used_vars_for_block (tree block, bool toplevel)
1209 tree t;
1210 HOST_WIDE_INT size = 0;
1212 /* Expand all variables at this level. */
1213 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1214 if (TREE_USED (t))
1215 size += expand_one_var (t, toplevel, false);
1217 /* Expand all variables at containing levels. */
1218 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1219 size += account_used_vars_for_block (t, false);
1221 return size;
1224 /* Prepare for expanding variables. */
1225 static void
1226 init_vars_expansion (void)
1228 tree t;
1229 unsigned ix;
1230 /* Set TREE_USED on all variables in the local_decls. */
1231 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1232 TREE_USED (t) = 1;
1234 /* Clear TREE_USED on all variables associated with a block scope. */
1235 clear_tree_used (DECL_INITIAL (current_function_decl));
1237 /* Initialize local stack smashing state. */
1238 has_protected_decls = false;
1239 has_short_buffer = false;
1242 /* Free up stack variable graph data. */
1243 static void
1244 fini_vars_expansion (void)
1246 size_t i, n = stack_vars_num;
1247 for (i = 0; i < n; i++)
1248 BITMAP_FREE (stack_vars[i].conflicts);
1249 XDELETEVEC (stack_vars);
1250 XDELETEVEC (stack_vars_sorted);
1251 stack_vars = NULL;
1252 stack_vars_alloc = stack_vars_num = 0;
1255 /* Make a fair guess for the size of the stack frame of the current
1256 function. This doesn't have to be exact, the result is only used
1257 in the inline heuristics. So we don't want to run the full stack
1258 var packing algorithm (which is quadratic in the number of stack
1259 vars). Instead, we calculate the total size of all stack vars.
1260 This turns out to be a pretty fair estimate -- packing of stack
1261 vars doesn't happen very often. */
1263 HOST_WIDE_INT
1264 estimated_stack_frame_size (void)
1266 HOST_WIDE_INT size = 0;
1267 size_t i;
1268 tree var, outer_block = DECL_INITIAL (current_function_decl);
1269 unsigned ix;
1271 init_vars_expansion ();
1273 FOR_EACH_LOCAL_DECL (cfun, ix, var)
1275 if (TREE_USED (var))
1276 size += expand_one_var (var, true, false);
1277 TREE_USED (var) = 1;
1279 size += account_used_vars_for_block (outer_block, true);
1281 if (stack_vars_num > 0)
1283 /* Fake sorting the stack vars for account_stack_vars (). */
1284 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1285 for (i = 0; i < stack_vars_num; ++i)
1286 stack_vars_sorted[i] = i;
1287 size += account_stack_vars ();
1288 fini_vars_expansion ();
1291 return size;
1294 /* Expand all variables used in the function. */
1296 static void
1297 expand_used_vars (void)
1299 tree var, outer_block = DECL_INITIAL (current_function_decl);
1300 VEC(tree,heap) *maybe_local_decls = NULL;
1301 unsigned i;
1302 unsigned len;
1304 /* Compute the phase of the stack frame for this function. */
1306 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1307 int off = STARTING_FRAME_OFFSET % align;
1308 frame_phase = off ? align - off : 0;
1311 init_vars_expansion ();
1313 for (i = 0; i < SA.map->num_partitions; i++)
1315 tree var = partition_to_var (SA.map, i);
1317 gcc_assert (is_gimple_reg (var));
1318 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1319 expand_one_var (var, true, true);
1320 else
1322 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1323 contain the default def (representing the parm or result itself)
1324 we don't do anything here. But those which don't contain the
1325 default def (representing a temporary based on the parm/result)
1326 we need to allocate space just like for normal VAR_DECLs. */
1327 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1329 expand_one_var (var, true, true);
1330 gcc_assert (SA.partition_to_pseudo[i]);
1335 /* At this point all variables on the local_decls with TREE_USED
1336 set are not associated with any block scope. Lay them out. */
1338 len = VEC_length (tree, cfun->local_decls);
1339 FOR_EACH_LOCAL_DECL (cfun, i, var)
1341 bool expand_now = false;
1343 /* Expanded above already. */
1344 if (is_gimple_reg (var))
1346 TREE_USED (var) = 0;
1347 goto next;
1349 /* We didn't set a block for static or extern because it's hard
1350 to tell the difference between a global variable (re)declared
1351 in a local scope, and one that's really declared there to
1352 begin with. And it doesn't really matter much, since we're
1353 not giving them stack space. Expand them now. */
1354 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1355 expand_now = true;
1357 /* If the variable is not associated with any block, then it
1358 was created by the optimizers, and could be live anywhere
1359 in the function. */
1360 else if (TREE_USED (var))
1361 expand_now = true;
1363 /* Finally, mark all variables on the list as used. We'll use
1364 this in a moment when we expand those associated with scopes. */
1365 TREE_USED (var) = 1;
1367 if (expand_now)
1368 expand_one_var (var, true, true);
1370 next:
1371 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1373 rtx rtl = DECL_RTL_IF_SET (var);
1375 /* Keep artificial non-ignored vars in cfun->local_decls
1376 chain until instantiate_decls. */
1377 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1378 add_local_decl (cfun, var);
1379 else if (rtl == NULL_RTX)
1380 /* If rtl isn't set yet, which can happen e.g. with
1381 -fstack-protector, retry before returning from this
1382 function. */
1383 VEC_safe_push (tree, heap, maybe_local_decls, var);
1387 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1389 +-----------------+-----------------+
1390 | ...processed... | ...duplicates...|
1391 +-----------------+-----------------+
1393 +-- LEN points here.
1395 We just want the duplicates, as those are the artificial
1396 non-ignored vars that we want to keep until instantiate_decls.
1397 Move them down and truncate the array. */
1398 if (!VEC_empty (tree, cfun->local_decls))
1399 VEC_block_remove (tree, cfun->local_decls, 0, len);
1401 /* At this point, all variables within the block tree with TREE_USED
1402 set are actually used by the optimized function. Lay them out. */
1403 expand_used_vars_for_block (outer_block, true);
1405 if (stack_vars_num > 0)
1407 /* Due to the way alias sets work, no variables with non-conflicting
1408 alias sets may be assigned the same address. Add conflicts to
1409 reflect this. */
1410 add_alias_set_conflicts ();
1412 /* If stack protection is enabled, we don't share space between
1413 vulnerable data and non-vulnerable data. */
1414 if (flag_stack_protect)
1415 add_stack_protection_conflicts ();
1417 /* Now that we have collected all stack variables, and have computed a
1418 minimal interference graph, attempt to save some stack space. */
1419 partition_stack_vars ();
1420 if (dump_file)
1421 dump_stack_var_partition ();
1424 /* There are several conditions under which we should create a
1425 stack guard: protect-all, alloca used, protected decls present. */
1426 if (flag_stack_protect == 2
1427 || (flag_stack_protect
1428 && (cfun->calls_alloca || has_protected_decls)))
1429 create_stack_guard ();
1431 /* Assign rtl to each variable based on these partitions. */
1432 if (stack_vars_num > 0)
1434 /* Reorder decls to be protected by iterating over the variables
1435 array multiple times, and allocating out of each phase in turn. */
1436 /* ??? We could probably integrate this into the qsort we did
1437 earlier, such that we naturally see these variables first,
1438 and thus naturally allocate things in the right order. */
1439 if (has_protected_decls)
1441 /* Phase 1 contains only character arrays. */
1442 expand_stack_vars (stack_protect_decl_phase_1);
1444 /* Phase 2 contains other kinds of arrays. */
1445 if (flag_stack_protect == 2)
1446 expand_stack_vars (stack_protect_decl_phase_2);
1449 expand_stack_vars (NULL);
1451 fini_vars_expansion ();
1454 /* If there were any artificial non-ignored vars without rtl
1455 found earlier, see if deferred stack allocation hasn't assigned
1456 rtl to them. */
1457 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1459 rtx rtl = DECL_RTL_IF_SET (var);
1461 /* Keep artificial non-ignored vars in cfun->local_decls
1462 chain until instantiate_decls. */
1463 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1464 add_local_decl (cfun, var);
1466 VEC_free (tree, heap, maybe_local_decls);
1468 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1469 if (STACK_ALIGNMENT_NEEDED)
1471 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1472 if (!FRAME_GROWS_DOWNWARD)
1473 frame_offset += align - 1;
1474 frame_offset &= -align;
1479 /* If we need to produce a detailed dump, print the tree representation
1480 for STMT to the dump file. SINCE is the last RTX after which the RTL
1481 generated for STMT should have been appended. */
1483 static void
1484 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1486 if (dump_file && (dump_flags & TDF_DETAILS))
1488 fprintf (dump_file, "\n;; ");
1489 print_gimple_stmt (dump_file, stmt, 0,
1490 TDF_SLIM | (dump_flags & TDF_LINENO));
1491 fprintf (dump_file, "\n");
1493 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1497 /* Maps the blocks that do not contain tree labels to rtx labels. */
1499 static struct pointer_map_t *lab_rtx_for_bb;
1501 /* Returns the label_rtx expression for a label starting basic block BB. */
1503 static rtx
1504 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1506 gimple_stmt_iterator gsi;
1507 tree lab;
1508 gimple lab_stmt;
1509 void **elt;
1511 if (bb->flags & BB_RTL)
1512 return block_label (bb);
1514 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1515 if (elt)
1516 return (rtx) *elt;
1518 /* Find the tree label if it is present. */
1520 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1522 lab_stmt = gsi_stmt (gsi);
1523 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1524 break;
1526 lab = gimple_label_label (lab_stmt);
1527 if (DECL_NONLOCAL (lab))
1528 break;
1530 return label_rtx (lab);
1533 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1534 *elt = gen_label_rtx ();
1535 return (rtx) *elt;
1539 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1540 of a basic block where we just expanded the conditional at the end,
1541 possibly clean up the CFG and instruction sequence. LAST is the
1542 last instruction before the just emitted jump sequence. */
1544 static void
1545 maybe_cleanup_end_of_block (edge e, rtx last)
1547 /* Special case: when jumpif decides that the condition is
1548 trivial it emits an unconditional jump (and the necessary
1549 barrier). But we still have two edges, the fallthru one is
1550 wrong. purge_dead_edges would clean this up later. Unfortunately
1551 we have to insert insns (and split edges) before
1552 find_many_sub_basic_blocks and hence before purge_dead_edges.
1553 But splitting edges might create new blocks which depend on the
1554 fact that if there are two edges there's no barrier. So the
1555 barrier would get lost and verify_flow_info would ICE. Instead
1556 of auditing all edge splitters to care for the barrier (which
1557 normally isn't there in a cleaned CFG), fix it here. */
1558 if (BARRIER_P (get_last_insn ()))
1560 rtx insn;
1561 remove_edge (e);
1562 /* Now, we have a single successor block, if we have insns to
1563 insert on the remaining edge we potentially will insert
1564 it at the end of this block (if the dest block isn't feasible)
1565 in order to avoid splitting the edge. This insertion will take
1566 place in front of the last jump. But we might have emitted
1567 multiple jumps (conditional and one unconditional) to the
1568 same destination. Inserting in front of the last one then
1569 is a problem. See PR 40021. We fix this by deleting all
1570 jumps except the last unconditional one. */
1571 insn = PREV_INSN (get_last_insn ());
1572 /* Make sure we have an unconditional jump. Otherwise we're
1573 confused. */
1574 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1575 for (insn = PREV_INSN (insn); insn != last;)
1577 insn = PREV_INSN (insn);
1578 if (JUMP_P (NEXT_INSN (insn)))
1579 delete_insn (NEXT_INSN (insn));
1584 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1585 Returns a new basic block if we've terminated the current basic
1586 block and created a new one. */
1588 static basic_block
1589 expand_gimple_cond (basic_block bb, gimple stmt)
1591 basic_block new_bb, dest;
1592 edge new_edge;
1593 edge true_edge;
1594 edge false_edge;
1595 rtx last2, last;
1596 enum tree_code code;
1597 tree op0, op1;
1599 code = gimple_cond_code (stmt);
1600 op0 = gimple_cond_lhs (stmt);
1601 op1 = gimple_cond_rhs (stmt);
1602 /* We're sometimes presented with such code:
1603 D.123_1 = x < y;
1604 if (D.123_1 != 0)
1606 This would expand to two comparisons which then later might
1607 be cleaned up by combine. But some pattern matchers like if-conversion
1608 work better when there's only one compare, so make up for this
1609 here as special exception if TER would have made the same change. */
1610 if (gimple_cond_single_var_p (stmt)
1611 && SA.values
1612 && TREE_CODE (op0) == SSA_NAME
1613 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1615 gimple second = SSA_NAME_DEF_STMT (op0);
1616 if (gimple_code (second) == GIMPLE_ASSIGN)
1618 enum tree_code code2 = gimple_assign_rhs_code (second);
1619 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1621 code = code2;
1622 op0 = gimple_assign_rhs1 (second);
1623 op1 = gimple_assign_rhs2 (second);
1625 /* If jumps are cheap turn some more codes into
1626 jumpy sequences. */
1627 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1629 if ((code2 == BIT_AND_EXPR
1630 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1631 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1632 || code2 == TRUTH_AND_EXPR)
1634 code = TRUTH_ANDIF_EXPR;
1635 op0 = gimple_assign_rhs1 (second);
1636 op1 = gimple_assign_rhs2 (second);
1638 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1640 code = TRUTH_ORIF_EXPR;
1641 op0 = gimple_assign_rhs1 (second);
1642 op1 = gimple_assign_rhs2 (second);
1648 last2 = last = get_last_insn ();
1650 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1651 if (gimple_has_location (stmt))
1653 set_curr_insn_source_location (gimple_location (stmt));
1654 set_curr_insn_block (gimple_block (stmt));
1657 /* These flags have no purpose in RTL land. */
1658 true_edge->flags &= ~EDGE_TRUE_VALUE;
1659 false_edge->flags &= ~EDGE_FALSE_VALUE;
1661 /* We can either have a pure conditional jump with one fallthru edge or
1662 two-way jump that needs to be decomposed into two basic blocks. */
1663 if (false_edge->dest == bb->next_bb)
1665 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1666 true_edge->probability);
1667 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1668 if (true_edge->goto_locus)
1670 set_curr_insn_source_location (true_edge->goto_locus);
1671 set_curr_insn_block (true_edge->goto_block);
1672 true_edge->goto_locus = curr_insn_locator ();
1674 true_edge->goto_block = NULL;
1675 false_edge->flags |= EDGE_FALLTHRU;
1676 maybe_cleanup_end_of_block (false_edge, last);
1677 return NULL;
1679 if (true_edge->dest == bb->next_bb)
1681 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1682 false_edge->probability);
1683 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1684 if (false_edge->goto_locus)
1686 set_curr_insn_source_location (false_edge->goto_locus);
1687 set_curr_insn_block (false_edge->goto_block);
1688 false_edge->goto_locus = curr_insn_locator ();
1690 false_edge->goto_block = NULL;
1691 true_edge->flags |= EDGE_FALLTHRU;
1692 maybe_cleanup_end_of_block (true_edge, last);
1693 return NULL;
1696 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1697 true_edge->probability);
1698 last = get_last_insn ();
1699 if (false_edge->goto_locus)
1701 set_curr_insn_source_location (false_edge->goto_locus);
1702 set_curr_insn_block (false_edge->goto_block);
1703 false_edge->goto_locus = curr_insn_locator ();
1705 false_edge->goto_block = NULL;
1706 emit_jump (label_rtx_for_bb (false_edge->dest));
1708 BB_END (bb) = last;
1709 if (BARRIER_P (BB_END (bb)))
1710 BB_END (bb) = PREV_INSN (BB_END (bb));
1711 update_bb_for_insn (bb);
1713 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1714 dest = false_edge->dest;
1715 redirect_edge_succ (false_edge, new_bb);
1716 false_edge->flags |= EDGE_FALLTHRU;
1717 new_bb->count = false_edge->count;
1718 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1719 new_edge = make_edge (new_bb, dest, 0);
1720 new_edge->probability = REG_BR_PROB_BASE;
1721 new_edge->count = new_bb->count;
1722 if (BARRIER_P (BB_END (new_bb)))
1723 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1724 update_bb_for_insn (new_bb);
1726 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1728 if (true_edge->goto_locus)
1730 set_curr_insn_source_location (true_edge->goto_locus);
1731 set_curr_insn_block (true_edge->goto_block);
1732 true_edge->goto_locus = curr_insn_locator ();
1734 true_edge->goto_block = NULL;
1736 return new_bb;
1739 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1740 statement STMT. */
1742 static void
1743 expand_call_stmt (gimple stmt)
1745 tree exp;
1746 tree lhs = gimple_call_lhs (stmt);
1747 size_t i;
1748 bool builtin_p;
1749 tree decl;
1751 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1753 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1754 decl = gimple_call_fndecl (stmt);
1755 builtin_p = decl && DECL_BUILT_IN (decl);
1757 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1758 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1760 for (i = 0; i < gimple_call_num_args (stmt); i++)
1762 tree arg = gimple_call_arg (stmt, i);
1763 gimple def;
1764 /* TER addresses into arguments of builtin functions so we have a
1765 chance to infer more correct alignment information. See PR39954. */
1766 if (builtin_p
1767 && TREE_CODE (arg) == SSA_NAME
1768 && (def = get_gimple_for_ssa_name (arg))
1769 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1770 arg = gimple_assign_rhs1 (def);
1771 CALL_EXPR_ARG (exp, i) = arg;
1774 if (gimple_has_side_effects (stmt))
1775 TREE_SIDE_EFFECTS (exp) = 1;
1777 if (gimple_call_nothrow_p (stmt))
1778 TREE_NOTHROW (exp) = 1;
1780 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1781 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1782 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1783 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1784 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1785 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1786 TREE_BLOCK (exp) = gimple_block (stmt);
1788 if (lhs)
1789 expand_assignment (lhs, exp, false);
1790 else
1791 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1794 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1795 STMT that doesn't require special handling for outgoing edges. That
1796 is no tailcalls and no GIMPLE_COND. */
1798 static void
1799 expand_gimple_stmt_1 (gimple stmt)
1801 tree op0;
1802 switch (gimple_code (stmt))
1804 case GIMPLE_GOTO:
1805 op0 = gimple_goto_dest (stmt);
1806 if (TREE_CODE (op0) == LABEL_DECL)
1807 expand_goto (op0);
1808 else
1809 expand_computed_goto (op0);
1810 break;
1811 case GIMPLE_LABEL:
1812 expand_label (gimple_label_label (stmt));
1813 break;
1814 case GIMPLE_NOP:
1815 case GIMPLE_PREDICT:
1816 break;
1817 case GIMPLE_SWITCH:
1818 expand_case (stmt);
1819 break;
1820 case GIMPLE_ASM:
1821 expand_asm_stmt (stmt);
1822 break;
1823 case GIMPLE_CALL:
1824 expand_call_stmt (stmt);
1825 break;
1827 case GIMPLE_RETURN:
1828 op0 = gimple_return_retval (stmt);
1830 if (op0 && op0 != error_mark_node)
1832 tree result = DECL_RESULT (current_function_decl);
1834 /* If we are not returning the current function's RESULT_DECL,
1835 build an assignment to it. */
1836 if (op0 != result)
1838 /* I believe that a function's RESULT_DECL is unique. */
1839 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1841 /* ??? We'd like to use simply expand_assignment here,
1842 but this fails if the value is of BLKmode but the return
1843 decl is a register. expand_return has special handling
1844 for this combination, which eventually should move
1845 to common code. See comments there. Until then, let's
1846 build a modify expression :-/ */
1847 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1848 result, op0);
1851 if (!op0)
1852 expand_null_return ();
1853 else
1854 expand_return (op0);
1855 break;
1857 case GIMPLE_ASSIGN:
1859 tree lhs = gimple_assign_lhs (stmt);
1861 /* Tree expand used to fiddle with |= and &= of two bitfield
1862 COMPONENT_REFs here. This can't happen with gimple, the LHS
1863 of binary assigns must be a gimple reg. */
1865 if (TREE_CODE (lhs) != SSA_NAME
1866 || get_gimple_rhs_class (gimple_expr_code (stmt))
1867 == GIMPLE_SINGLE_RHS)
1869 tree rhs = gimple_assign_rhs1 (stmt);
1870 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1871 == GIMPLE_SINGLE_RHS);
1872 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1873 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1874 expand_assignment (lhs, rhs,
1875 gimple_assign_nontemporal_move_p (stmt));
1877 else
1879 rtx target, temp;
1880 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1881 struct separate_ops ops;
1882 bool promoted = false;
1884 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1885 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1886 promoted = true;
1888 ops.code = gimple_assign_rhs_code (stmt);
1889 ops.type = TREE_TYPE (lhs);
1890 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1892 case GIMPLE_TERNARY_RHS:
1893 ops.op2 = gimple_assign_rhs3 (stmt);
1894 /* Fallthru */
1895 case GIMPLE_BINARY_RHS:
1896 ops.op1 = gimple_assign_rhs2 (stmt);
1897 /* Fallthru */
1898 case GIMPLE_UNARY_RHS:
1899 ops.op0 = gimple_assign_rhs1 (stmt);
1900 break;
1901 default:
1902 gcc_unreachable ();
1904 ops.location = gimple_location (stmt);
1906 /* If we want to use a nontemporal store, force the value to
1907 register first. If we store into a promoted register,
1908 don't directly expand to target. */
1909 temp = nontemporal || promoted ? NULL_RTX : target;
1910 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1911 EXPAND_NORMAL);
1913 if (temp == target)
1915 else if (promoted)
1917 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1918 /* If TEMP is a VOIDmode constant, use convert_modes to make
1919 sure that we properly convert it. */
1920 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1922 temp = convert_modes (GET_MODE (target),
1923 TYPE_MODE (ops.type),
1924 temp, unsignedp);
1925 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1926 GET_MODE (target), temp, unsignedp);
1929 convert_move (SUBREG_REG (target), temp, unsignedp);
1931 else if (nontemporal && emit_storent_insn (target, temp))
1933 else
1935 temp = force_operand (temp, target);
1936 if (temp != target)
1937 emit_move_insn (target, temp);
1941 break;
1943 default:
1944 gcc_unreachable ();
1948 /* Expand one gimple statement STMT and return the last RTL instruction
1949 before any of the newly generated ones.
1951 In addition to generating the necessary RTL instructions this also
1952 sets REG_EH_REGION notes if necessary and sets the current source
1953 location for diagnostics. */
1955 static rtx
1956 expand_gimple_stmt (gimple stmt)
1958 int lp_nr = 0;
1959 rtx last = NULL;
1960 location_t saved_location = input_location;
1962 last = get_last_insn ();
1964 /* If this is an expression of some kind and it has an associated line
1965 number, then emit the line number before expanding the expression.
1967 We need to save and restore the file and line information so that
1968 errors discovered during expansion are emitted with the right
1969 information. It would be better of the diagnostic routines
1970 used the file/line information embedded in the tree nodes rather
1971 than globals. */
1972 gcc_assert (cfun);
1974 if (gimple_has_location (stmt))
1976 input_location = gimple_location (stmt);
1977 set_curr_insn_source_location (input_location);
1979 /* Record where the insns produced belong. */
1980 set_curr_insn_block (gimple_block (stmt));
1983 expand_gimple_stmt_1 (stmt);
1984 /* Free any temporaries used to evaluate this statement. */
1985 free_temp_slots ();
1987 input_location = saved_location;
1989 /* Mark all insns that may trap. */
1990 lp_nr = lookup_stmt_eh_lp (stmt);
1991 if (lp_nr)
1993 rtx insn;
1994 for (insn = next_real_insn (last); insn;
1995 insn = next_real_insn (insn))
1997 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1998 /* If we want exceptions for non-call insns, any
1999 may_trap_p instruction may throw. */
2000 && GET_CODE (PATTERN (insn)) != CLOBBER
2001 && GET_CODE (PATTERN (insn)) != USE
2002 && insn_could_throw_p (insn))
2003 make_reg_eh_region_note (insn, 0, lp_nr);
2007 return last;
2010 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2011 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2012 generated a tail call (something that might be denied by the ABI
2013 rules governing the call; see calls.c).
2015 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2016 can still reach the rest of BB. The case here is __builtin_sqrt,
2017 where the NaN result goes through the external function (with a
2018 tailcall) and the normal result happens via a sqrt instruction. */
2020 static basic_block
2021 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2023 rtx last2, last;
2024 edge e;
2025 edge_iterator ei;
2026 int probability;
2027 gcov_type count;
2029 last2 = last = expand_gimple_stmt (stmt);
2031 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2032 if (CALL_P (last) && SIBLING_CALL_P (last))
2033 goto found;
2035 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2037 *can_fallthru = true;
2038 return NULL;
2040 found:
2041 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2042 Any instructions emitted here are about to be deleted. */
2043 do_pending_stack_adjust ();
2045 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2046 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2047 EH or abnormal edges, we shouldn't have created a tail call in
2048 the first place. So it seems to me we should just be removing
2049 all edges here, or redirecting the existing fallthru edge to
2050 the exit block. */
2052 probability = 0;
2053 count = 0;
2055 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2057 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2059 if (e->dest != EXIT_BLOCK_PTR)
2061 e->dest->count -= e->count;
2062 e->dest->frequency -= EDGE_FREQUENCY (e);
2063 if (e->dest->count < 0)
2064 e->dest->count = 0;
2065 if (e->dest->frequency < 0)
2066 e->dest->frequency = 0;
2068 count += e->count;
2069 probability += e->probability;
2070 remove_edge (e);
2072 else
2073 ei_next (&ei);
2076 /* This is somewhat ugly: the call_expr expander often emits instructions
2077 after the sibcall (to perform the function return). These confuse the
2078 find_many_sub_basic_blocks code, so we need to get rid of these. */
2079 last = NEXT_INSN (last);
2080 gcc_assert (BARRIER_P (last));
2082 *can_fallthru = false;
2083 while (NEXT_INSN (last))
2085 /* For instance an sqrt builtin expander expands if with
2086 sibcall in the then and label for `else`. */
2087 if (LABEL_P (NEXT_INSN (last)))
2089 *can_fallthru = true;
2090 break;
2092 delete_insn (NEXT_INSN (last));
2095 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2096 e->probability += probability;
2097 e->count += count;
2098 BB_END (bb) = last;
2099 update_bb_for_insn (bb);
2101 if (NEXT_INSN (last))
2103 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2105 last = BB_END (bb);
2106 if (BARRIER_P (last))
2107 BB_END (bb) = PREV_INSN (last);
2110 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2112 return bb;
2115 /* Return the difference between the floor and the truncated result of
2116 a signed division by OP1 with remainder MOD. */
2117 static rtx
2118 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2120 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2121 return gen_rtx_IF_THEN_ELSE
2122 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2123 gen_rtx_IF_THEN_ELSE
2124 (mode, gen_rtx_LT (BImode,
2125 gen_rtx_DIV (mode, op1, mod),
2126 const0_rtx),
2127 constm1_rtx, const0_rtx),
2128 const0_rtx);
2131 /* Return the difference between the ceil and the truncated result of
2132 a signed division by OP1 with remainder MOD. */
2133 static rtx
2134 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2136 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2137 return gen_rtx_IF_THEN_ELSE
2138 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2139 gen_rtx_IF_THEN_ELSE
2140 (mode, gen_rtx_GT (BImode,
2141 gen_rtx_DIV (mode, op1, mod),
2142 const0_rtx),
2143 const1_rtx, const0_rtx),
2144 const0_rtx);
2147 /* Return the difference between the ceil and the truncated result of
2148 an unsigned division by OP1 with remainder MOD. */
2149 static rtx
2150 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2152 /* (mod != 0 ? 1 : 0) */
2153 return gen_rtx_IF_THEN_ELSE
2154 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2155 const1_rtx, const0_rtx);
2158 /* Return the difference between the rounded and the truncated result
2159 of a signed division by OP1 with remainder MOD. Halfway cases are
2160 rounded away from zero, rather than to the nearest even number. */
2161 static rtx
2162 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2164 /* (abs (mod) >= abs (op1) - abs (mod)
2165 ? (op1 / mod > 0 ? 1 : -1)
2166 : 0) */
2167 return gen_rtx_IF_THEN_ELSE
2168 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2169 gen_rtx_MINUS (mode,
2170 gen_rtx_ABS (mode, op1),
2171 gen_rtx_ABS (mode, mod))),
2172 gen_rtx_IF_THEN_ELSE
2173 (mode, gen_rtx_GT (BImode,
2174 gen_rtx_DIV (mode, op1, mod),
2175 const0_rtx),
2176 const1_rtx, constm1_rtx),
2177 const0_rtx);
2180 /* Return the difference between the rounded and the truncated result
2181 of a unsigned division by OP1 with remainder MOD. Halfway cases
2182 are rounded away from zero, rather than to the nearest even
2183 number. */
2184 static rtx
2185 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2187 /* (mod >= op1 - mod ? 1 : 0) */
2188 return gen_rtx_IF_THEN_ELSE
2189 (mode, gen_rtx_GE (BImode, mod,
2190 gen_rtx_MINUS (mode, op1, mod)),
2191 const1_rtx, const0_rtx);
2194 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2195 any rtl. */
2197 static rtx
2198 convert_debug_memory_address (enum machine_mode mode, rtx x)
2200 enum machine_mode xmode = GET_MODE (x);
2202 #ifndef POINTERS_EXTEND_UNSIGNED
2203 gcc_assert (mode == Pmode);
2204 gcc_assert (xmode == mode || xmode == VOIDmode);
2205 #else
2206 gcc_assert (mode == Pmode || mode == ptr_mode);
2208 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2209 return x;
2211 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2212 x = simplify_gen_subreg (mode, x, xmode,
2213 subreg_lowpart_offset
2214 (mode, xmode));
2215 else if (POINTERS_EXTEND_UNSIGNED > 0)
2216 x = gen_rtx_ZERO_EXTEND (mode, x);
2217 else if (!POINTERS_EXTEND_UNSIGNED)
2218 x = gen_rtx_SIGN_EXTEND (mode, x);
2219 else
2220 gcc_unreachable ();
2221 #endif /* POINTERS_EXTEND_UNSIGNED */
2223 return x;
2226 /* Return an RTX equivalent to the value of the tree expression
2227 EXP. */
2229 static rtx
2230 expand_debug_expr (tree exp)
2232 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2233 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2234 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2235 addr_space_t as;
2237 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2239 case tcc_expression:
2240 switch (TREE_CODE (exp))
2242 case COND_EXPR:
2243 case DOT_PROD_EXPR:
2244 case WIDEN_MULT_PLUS_EXPR:
2245 case WIDEN_MULT_MINUS_EXPR:
2246 goto ternary;
2248 case TRUTH_ANDIF_EXPR:
2249 case TRUTH_ORIF_EXPR:
2250 case TRUTH_AND_EXPR:
2251 case TRUTH_OR_EXPR:
2252 case TRUTH_XOR_EXPR:
2253 goto binary;
2255 case TRUTH_NOT_EXPR:
2256 goto unary;
2258 default:
2259 break;
2261 break;
2263 ternary:
2264 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2265 if (!op2)
2266 return NULL_RTX;
2267 /* Fall through. */
2269 binary:
2270 case tcc_binary:
2271 case tcc_comparison:
2272 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2273 if (!op1)
2274 return NULL_RTX;
2275 /* Fall through. */
2277 unary:
2278 case tcc_unary:
2279 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2280 if (!op0)
2281 return NULL_RTX;
2282 break;
2284 case tcc_type:
2285 case tcc_statement:
2286 gcc_unreachable ();
2288 case tcc_constant:
2289 case tcc_exceptional:
2290 case tcc_declaration:
2291 case tcc_reference:
2292 case tcc_vl_exp:
2293 break;
2296 switch (TREE_CODE (exp))
2298 case STRING_CST:
2299 if (!lookup_constant_def (exp))
2301 if (strlen (TREE_STRING_POINTER (exp)) + 1
2302 != (size_t) TREE_STRING_LENGTH (exp))
2303 return NULL_RTX;
2304 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2305 op0 = gen_rtx_MEM (BLKmode, op0);
2306 set_mem_attributes (op0, exp, 0);
2307 return op0;
2309 /* Fall through... */
2311 case INTEGER_CST:
2312 case REAL_CST:
2313 case FIXED_CST:
2314 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2315 return op0;
2317 case COMPLEX_CST:
2318 gcc_assert (COMPLEX_MODE_P (mode));
2319 op0 = expand_debug_expr (TREE_REALPART (exp));
2320 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2321 return gen_rtx_CONCAT (mode, op0, op1);
2323 case DEBUG_EXPR_DECL:
2324 op0 = DECL_RTL_IF_SET (exp);
2326 if (op0)
2327 return op0;
2329 op0 = gen_rtx_DEBUG_EXPR (mode);
2330 DEBUG_EXPR_TREE_DECL (op0) = exp;
2331 SET_DECL_RTL (exp, op0);
2333 return op0;
2335 case VAR_DECL:
2336 case PARM_DECL:
2337 case FUNCTION_DECL:
2338 case LABEL_DECL:
2339 case CONST_DECL:
2340 case RESULT_DECL:
2341 op0 = DECL_RTL_IF_SET (exp);
2343 /* This decl was probably optimized away. */
2344 if (!op0)
2346 if (TREE_CODE (exp) != VAR_DECL
2347 || DECL_EXTERNAL (exp)
2348 || !TREE_STATIC (exp)
2349 || !DECL_NAME (exp)
2350 || DECL_HARD_REGISTER (exp)
2351 || mode == VOIDmode)
2352 return NULL;
2354 op0 = make_decl_rtl_for_debug (exp);
2355 if (!MEM_P (op0)
2356 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2357 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2358 return NULL;
2360 else
2361 op0 = copy_rtx (op0);
2363 if (GET_MODE (op0) == BLKmode
2364 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2365 below would ICE. While it is likely a FE bug,
2366 try to be robust here. See PR43166. */
2367 || mode == BLKmode
2368 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2370 gcc_assert (MEM_P (op0));
2371 op0 = adjust_address_nv (op0, mode, 0);
2372 return op0;
2375 /* Fall through. */
2377 adjust_mode:
2378 case PAREN_EXPR:
2379 case NOP_EXPR:
2380 case CONVERT_EXPR:
2382 enum machine_mode inner_mode = GET_MODE (op0);
2384 if (mode == inner_mode)
2385 return op0;
2387 if (inner_mode == VOIDmode)
2389 if (TREE_CODE (exp) == SSA_NAME)
2390 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2391 else
2392 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2393 if (mode == inner_mode)
2394 return op0;
2397 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2399 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2400 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2401 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2402 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2403 else
2404 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2406 else if (FLOAT_MODE_P (mode))
2408 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2409 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2410 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2411 else
2412 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2414 else if (FLOAT_MODE_P (inner_mode))
2416 if (unsignedp)
2417 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2418 else
2419 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2421 else if (CONSTANT_P (op0)
2422 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2423 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2424 subreg_lowpart_offset (mode,
2425 inner_mode));
2426 else if (unsignedp)
2427 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2428 else
2429 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2431 return op0;
2434 case MEM_REF:
2435 /* ??? FIXME. */
2436 if (!integer_zerop (TREE_OPERAND (exp, 1)))
2437 return NULL;
2438 /* Fallthru. */
2439 case INDIRECT_REF:
2440 case MISALIGNED_INDIRECT_REF:
2441 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2442 if (!op0)
2443 return NULL;
2445 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2446 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2447 else
2448 as = ADDR_SPACE_GENERIC;
2450 op0 = gen_rtx_MEM (mode, op0);
2452 set_mem_attributes (op0, exp, 0);
2453 set_mem_addr_space (op0, as);
2455 return op0;
2457 case TARGET_MEM_REF:
2458 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2459 return NULL;
2461 op0 = expand_debug_expr
2462 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2463 if (!op0)
2464 return NULL;
2466 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2468 op0 = gen_rtx_MEM (mode, op0);
2470 set_mem_attributes (op0, exp, 0);
2471 set_mem_addr_space (op0, as);
2473 return op0;
2475 case ARRAY_REF:
2476 case ARRAY_RANGE_REF:
2477 case COMPONENT_REF:
2478 case BIT_FIELD_REF:
2479 case REALPART_EXPR:
2480 case IMAGPART_EXPR:
2481 case VIEW_CONVERT_EXPR:
2483 enum machine_mode mode1;
2484 HOST_WIDE_INT bitsize, bitpos;
2485 tree offset;
2486 int volatilep = 0;
2487 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2488 &mode1, &unsignedp, &volatilep, false);
2489 rtx orig_op0;
2491 if (bitsize == 0)
2492 return NULL;
2494 orig_op0 = op0 = expand_debug_expr (tem);
2496 if (!op0)
2497 return NULL;
2499 if (offset)
2501 enum machine_mode addrmode, offmode;
2503 if (!MEM_P (op0))
2504 return NULL;
2506 op0 = XEXP (op0, 0);
2507 addrmode = GET_MODE (op0);
2508 if (addrmode == VOIDmode)
2509 addrmode = Pmode;
2511 op1 = expand_debug_expr (offset);
2512 if (!op1)
2513 return NULL;
2515 offmode = GET_MODE (op1);
2516 if (offmode == VOIDmode)
2517 offmode = TYPE_MODE (TREE_TYPE (offset));
2519 if (addrmode != offmode)
2520 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2521 subreg_lowpart_offset (addrmode,
2522 offmode));
2524 /* Don't use offset_address here, we don't need a
2525 recognizable address, and we don't want to generate
2526 code. */
2527 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2530 if (MEM_P (op0))
2532 if (mode1 == VOIDmode)
2533 /* Bitfield. */
2534 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2535 if (bitpos >= BITS_PER_UNIT)
2537 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2538 bitpos %= BITS_PER_UNIT;
2540 else if (bitpos < 0)
2542 HOST_WIDE_INT units
2543 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2544 op0 = adjust_address_nv (op0, mode1, units);
2545 bitpos += units * BITS_PER_UNIT;
2547 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2548 op0 = adjust_address_nv (op0, mode, 0);
2549 else if (GET_MODE (op0) != mode1)
2550 op0 = adjust_address_nv (op0, mode1, 0);
2551 else
2552 op0 = copy_rtx (op0);
2553 if (op0 == orig_op0)
2554 op0 = shallow_copy_rtx (op0);
2555 set_mem_attributes (op0, exp, 0);
2558 if (bitpos == 0 && mode == GET_MODE (op0))
2559 return op0;
2561 if (bitpos < 0)
2562 return NULL;
2564 if (GET_MODE (op0) == BLKmode)
2565 return NULL;
2567 if ((bitpos % BITS_PER_UNIT) == 0
2568 && bitsize == GET_MODE_BITSIZE (mode1))
2570 enum machine_mode opmode = GET_MODE (op0);
2572 if (opmode == VOIDmode)
2573 opmode = mode1;
2575 /* This condition may hold if we're expanding the address
2576 right past the end of an array that turned out not to
2577 be addressable (i.e., the address was only computed in
2578 debug stmts). The gen_subreg below would rightfully
2579 crash, and the address doesn't really exist, so just
2580 drop it. */
2581 if (bitpos >= GET_MODE_BITSIZE (opmode))
2582 return NULL;
2584 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2585 return simplify_gen_subreg (mode, op0, opmode,
2586 bitpos / BITS_PER_UNIT);
2589 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2590 && TYPE_UNSIGNED (TREE_TYPE (exp))
2591 ? SIGN_EXTRACT
2592 : ZERO_EXTRACT, mode,
2593 GET_MODE (op0) != VOIDmode
2594 ? GET_MODE (op0) : mode1,
2595 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2598 case ABS_EXPR:
2599 return gen_rtx_ABS (mode, op0);
2601 case NEGATE_EXPR:
2602 return gen_rtx_NEG (mode, op0);
2604 case BIT_NOT_EXPR:
2605 return gen_rtx_NOT (mode, op0);
2607 case FLOAT_EXPR:
2608 if (unsignedp)
2609 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2610 else
2611 return gen_rtx_FLOAT (mode, op0);
2613 case FIX_TRUNC_EXPR:
2614 if (unsignedp)
2615 return gen_rtx_UNSIGNED_FIX (mode, op0);
2616 else
2617 return gen_rtx_FIX (mode, op0);
2619 case POINTER_PLUS_EXPR:
2620 /* For the rare target where pointers are not the same size as
2621 size_t, we need to check for mis-matched modes and correct
2622 the addend. */
2623 if (op0 && op1
2624 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2625 && GET_MODE (op0) != GET_MODE (op1))
2627 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2628 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2629 else
2630 /* We always sign-extend, regardless of the signedness of
2631 the operand, because the operand is always unsigned
2632 here even if the original C expression is signed. */
2633 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2635 /* Fall through. */
2636 case PLUS_EXPR:
2637 return gen_rtx_PLUS (mode, op0, op1);
2639 case MINUS_EXPR:
2640 return gen_rtx_MINUS (mode, op0, op1);
2642 case MULT_EXPR:
2643 return gen_rtx_MULT (mode, op0, op1);
2645 case RDIV_EXPR:
2646 case TRUNC_DIV_EXPR:
2647 case EXACT_DIV_EXPR:
2648 if (unsignedp)
2649 return gen_rtx_UDIV (mode, op0, op1);
2650 else
2651 return gen_rtx_DIV (mode, op0, op1);
2653 case TRUNC_MOD_EXPR:
2654 if (unsignedp)
2655 return gen_rtx_UMOD (mode, op0, op1);
2656 else
2657 return gen_rtx_MOD (mode, op0, op1);
2659 case FLOOR_DIV_EXPR:
2660 if (unsignedp)
2661 return gen_rtx_UDIV (mode, op0, op1);
2662 else
2664 rtx div = gen_rtx_DIV (mode, op0, op1);
2665 rtx mod = gen_rtx_MOD (mode, op0, op1);
2666 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2667 return gen_rtx_PLUS (mode, div, adj);
2670 case FLOOR_MOD_EXPR:
2671 if (unsignedp)
2672 return gen_rtx_UMOD (mode, op0, op1);
2673 else
2675 rtx mod = gen_rtx_MOD (mode, op0, op1);
2676 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2677 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2678 return gen_rtx_PLUS (mode, mod, adj);
2681 case CEIL_DIV_EXPR:
2682 if (unsignedp)
2684 rtx div = gen_rtx_UDIV (mode, op0, op1);
2685 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2686 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2687 return gen_rtx_PLUS (mode, div, adj);
2689 else
2691 rtx div = gen_rtx_DIV (mode, op0, op1);
2692 rtx mod = gen_rtx_MOD (mode, op0, op1);
2693 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2694 return gen_rtx_PLUS (mode, div, adj);
2697 case CEIL_MOD_EXPR:
2698 if (unsignedp)
2700 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2701 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2702 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2703 return gen_rtx_PLUS (mode, mod, adj);
2705 else
2707 rtx mod = gen_rtx_MOD (mode, op0, op1);
2708 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2709 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2710 return gen_rtx_PLUS (mode, mod, adj);
2713 case ROUND_DIV_EXPR:
2714 if (unsignedp)
2716 rtx div = gen_rtx_UDIV (mode, op0, op1);
2717 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2718 rtx adj = round_udiv_adjust (mode, mod, op1);
2719 return gen_rtx_PLUS (mode, div, adj);
2721 else
2723 rtx div = gen_rtx_DIV (mode, op0, op1);
2724 rtx mod = gen_rtx_MOD (mode, op0, op1);
2725 rtx adj = round_sdiv_adjust (mode, mod, op1);
2726 return gen_rtx_PLUS (mode, div, adj);
2729 case ROUND_MOD_EXPR:
2730 if (unsignedp)
2732 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2733 rtx adj = round_udiv_adjust (mode, mod, op1);
2734 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2735 return gen_rtx_PLUS (mode, mod, adj);
2737 else
2739 rtx mod = gen_rtx_MOD (mode, op0, op1);
2740 rtx adj = round_sdiv_adjust (mode, mod, op1);
2741 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2742 return gen_rtx_PLUS (mode, mod, adj);
2745 case LSHIFT_EXPR:
2746 return gen_rtx_ASHIFT (mode, op0, op1);
2748 case RSHIFT_EXPR:
2749 if (unsignedp)
2750 return gen_rtx_LSHIFTRT (mode, op0, op1);
2751 else
2752 return gen_rtx_ASHIFTRT (mode, op0, op1);
2754 case LROTATE_EXPR:
2755 return gen_rtx_ROTATE (mode, op0, op1);
2757 case RROTATE_EXPR:
2758 return gen_rtx_ROTATERT (mode, op0, op1);
2760 case MIN_EXPR:
2761 if (unsignedp)
2762 return gen_rtx_UMIN (mode, op0, op1);
2763 else
2764 return gen_rtx_SMIN (mode, op0, op1);
2766 case MAX_EXPR:
2767 if (unsignedp)
2768 return gen_rtx_UMAX (mode, op0, op1);
2769 else
2770 return gen_rtx_SMAX (mode, op0, op1);
2772 case BIT_AND_EXPR:
2773 case TRUTH_AND_EXPR:
2774 return gen_rtx_AND (mode, op0, op1);
2776 case BIT_IOR_EXPR:
2777 case TRUTH_OR_EXPR:
2778 return gen_rtx_IOR (mode, op0, op1);
2780 case BIT_XOR_EXPR:
2781 case TRUTH_XOR_EXPR:
2782 return gen_rtx_XOR (mode, op0, op1);
2784 case TRUTH_ANDIF_EXPR:
2785 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2787 case TRUTH_ORIF_EXPR:
2788 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2790 case TRUTH_NOT_EXPR:
2791 return gen_rtx_EQ (mode, op0, const0_rtx);
2793 case LT_EXPR:
2794 if (unsignedp)
2795 return gen_rtx_LTU (mode, op0, op1);
2796 else
2797 return gen_rtx_LT (mode, op0, op1);
2799 case LE_EXPR:
2800 if (unsignedp)
2801 return gen_rtx_LEU (mode, op0, op1);
2802 else
2803 return gen_rtx_LE (mode, op0, op1);
2805 case GT_EXPR:
2806 if (unsignedp)
2807 return gen_rtx_GTU (mode, op0, op1);
2808 else
2809 return gen_rtx_GT (mode, op0, op1);
2811 case GE_EXPR:
2812 if (unsignedp)
2813 return gen_rtx_GEU (mode, op0, op1);
2814 else
2815 return gen_rtx_GE (mode, op0, op1);
2817 case EQ_EXPR:
2818 return gen_rtx_EQ (mode, op0, op1);
2820 case NE_EXPR:
2821 return gen_rtx_NE (mode, op0, op1);
2823 case UNORDERED_EXPR:
2824 return gen_rtx_UNORDERED (mode, op0, op1);
2826 case ORDERED_EXPR:
2827 return gen_rtx_ORDERED (mode, op0, op1);
2829 case UNLT_EXPR:
2830 return gen_rtx_UNLT (mode, op0, op1);
2832 case UNLE_EXPR:
2833 return gen_rtx_UNLE (mode, op0, op1);
2835 case UNGT_EXPR:
2836 return gen_rtx_UNGT (mode, op0, op1);
2838 case UNGE_EXPR:
2839 return gen_rtx_UNGE (mode, op0, op1);
2841 case UNEQ_EXPR:
2842 return gen_rtx_UNEQ (mode, op0, op1);
2844 case LTGT_EXPR:
2845 return gen_rtx_LTGT (mode, op0, op1);
2847 case COND_EXPR:
2848 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2850 case COMPLEX_EXPR:
2851 gcc_assert (COMPLEX_MODE_P (mode));
2852 if (GET_MODE (op0) == VOIDmode)
2853 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2854 if (GET_MODE (op1) == VOIDmode)
2855 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2856 return gen_rtx_CONCAT (mode, op0, op1);
2858 case CONJ_EXPR:
2859 if (GET_CODE (op0) == CONCAT)
2860 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2861 gen_rtx_NEG (GET_MODE_INNER (mode),
2862 XEXP (op0, 1)));
2863 else
2865 enum machine_mode imode = GET_MODE_INNER (mode);
2866 rtx re, im;
2868 if (MEM_P (op0))
2870 re = adjust_address_nv (op0, imode, 0);
2871 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2873 else
2875 enum machine_mode ifmode = int_mode_for_mode (mode);
2876 enum machine_mode ihmode = int_mode_for_mode (imode);
2877 rtx halfsize;
2878 if (ifmode == BLKmode || ihmode == BLKmode)
2879 return NULL;
2880 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2881 re = op0;
2882 if (mode != ifmode)
2883 re = gen_rtx_SUBREG (ifmode, re, 0);
2884 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2885 if (imode != ihmode)
2886 re = gen_rtx_SUBREG (imode, re, 0);
2887 im = copy_rtx (op0);
2888 if (mode != ifmode)
2889 im = gen_rtx_SUBREG (ifmode, im, 0);
2890 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2891 if (imode != ihmode)
2892 im = gen_rtx_SUBREG (imode, im, 0);
2894 im = gen_rtx_NEG (imode, im);
2895 return gen_rtx_CONCAT (mode, re, im);
2898 case ADDR_EXPR:
2899 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2900 if (!op0 || !MEM_P (op0))
2901 return NULL;
2903 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2905 return op0;
2907 case VECTOR_CST:
2908 exp = build_constructor_from_list (TREE_TYPE (exp),
2909 TREE_VECTOR_CST_ELTS (exp));
2910 /* Fall through. */
2912 case CONSTRUCTOR:
2913 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2915 unsigned i;
2916 tree val;
2918 op0 = gen_rtx_CONCATN
2919 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2921 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2923 op1 = expand_debug_expr (val);
2924 if (!op1)
2925 return NULL;
2926 XVECEXP (op0, 0, i) = op1;
2929 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2931 op1 = expand_debug_expr
2932 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2934 if (!op1)
2935 return NULL;
2937 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2938 XVECEXP (op0, 0, i) = op1;
2941 return op0;
2943 else
2944 goto flag_unsupported;
2946 case CALL_EXPR:
2947 /* ??? Maybe handle some builtins? */
2948 return NULL;
2950 case SSA_NAME:
2952 gimple g = get_gimple_for_ssa_name (exp);
2953 if (g)
2955 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2956 if (!op0)
2957 return NULL;
2959 else
2961 int part = var_to_partition (SA.map, exp);
2963 if (part == NO_PARTITION)
2964 return NULL;
2966 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2968 op0 = SA.partition_to_pseudo[part];
2970 goto adjust_mode;
2973 case ERROR_MARK:
2974 return NULL;
2976 /* Vector stuff. For most of the codes we don't have rtl codes. */
2977 case REALIGN_LOAD_EXPR:
2978 case REDUC_MAX_EXPR:
2979 case REDUC_MIN_EXPR:
2980 case REDUC_PLUS_EXPR:
2981 case VEC_COND_EXPR:
2982 case VEC_EXTRACT_EVEN_EXPR:
2983 case VEC_EXTRACT_ODD_EXPR:
2984 case VEC_INTERLEAVE_HIGH_EXPR:
2985 case VEC_INTERLEAVE_LOW_EXPR:
2986 case VEC_LSHIFT_EXPR:
2987 case VEC_PACK_FIX_TRUNC_EXPR:
2988 case VEC_PACK_SAT_EXPR:
2989 case VEC_PACK_TRUNC_EXPR:
2990 case VEC_RSHIFT_EXPR:
2991 case VEC_UNPACK_FLOAT_HI_EXPR:
2992 case VEC_UNPACK_FLOAT_LO_EXPR:
2993 case VEC_UNPACK_HI_EXPR:
2994 case VEC_UNPACK_LO_EXPR:
2995 case VEC_WIDEN_MULT_HI_EXPR:
2996 case VEC_WIDEN_MULT_LO_EXPR:
2997 return NULL;
2999 /* Misc codes. */
3000 case ADDR_SPACE_CONVERT_EXPR:
3001 case FIXED_CONVERT_EXPR:
3002 case OBJ_TYPE_REF:
3003 case WITH_SIZE_EXPR:
3004 return NULL;
3006 case DOT_PROD_EXPR:
3007 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3008 && SCALAR_INT_MODE_P (mode))
3010 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3011 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3012 else
3013 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3014 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3015 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3016 else
3017 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3018 op0 = gen_rtx_MULT (mode, op0, op1);
3019 return gen_rtx_PLUS (mode, op0, op2);
3021 return NULL;
3023 case WIDEN_MULT_EXPR:
3024 case WIDEN_MULT_PLUS_EXPR:
3025 case WIDEN_MULT_MINUS_EXPR:
3026 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3027 && SCALAR_INT_MODE_P (mode))
3029 enum machine_mode inner_mode = GET_MODE (op0);
3030 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3031 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3032 else
3033 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3034 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3035 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3036 else
3037 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3038 op0 = gen_rtx_MULT (mode, op0, op1);
3039 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3040 return op0;
3041 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3042 return gen_rtx_PLUS (mode, op0, op2);
3043 else
3044 return gen_rtx_MINUS (mode, op2, op0);
3046 return NULL;
3048 case WIDEN_SUM_EXPR:
3049 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3050 && SCALAR_INT_MODE_P (mode))
3052 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3053 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3054 else
3055 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3056 return gen_rtx_PLUS (mode, op0, op1);
3058 return NULL;
3060 default:
3061 flag_unsupported:
3062 #ifdef ENABLE_CHECKING
3063 debug_tree (exp);
3064 gcc_unreachable ();
3065 #else
3066 return NULL;
3067 #endif
3071 /* Expand the _LOCs in debug insns. We run this after expanding all
3072 regular insns, so that any variables referenced in the function
3073 will have their DECL_RTLs set. */
3075 static void
3076 expand_debug_locations (void)
3078 rtx insn;
3079 rtx last = get_last_insn ();
3080 int save_strict_alias = flag_strict_aliasing;
3082 /* New alias sets while setting up memory attributes cause
3083 -fcompare-debug failures, even though it doesn't bring about any
3084 codegen changes. */
3085 flag_strict_aliasing = 0;
3087 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3088 if (DEBUG_INSN_P (insn))
3090 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3091 rtx val;
3092 enum machine_mode mode;
3094 if (value == NULL_TREE)
3095 val = NULL_RTX;
3096 else
3098 val = expand_debug_expr (value);
3099 gcc_assert (last == get_last_insn ());
3102 if (!val)
3103 val = gen_rtx_UNKNOWN_VAR_LOC ();
3104 else
3106 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3108 gcc_assert (mode == GET_MODE (val)
3109 || (GET_MODE (val) == VOIDmode
3110 && (CONST_INT_P (val)
3111 || GET_CODE (val) == CONST_FIXED
3112 || GET_CODE (val) == CONST_DOUBLE
3113 || GET_CODE (val) == LABEL_REF)));
3116 INSN_VAR_LOCATION_LOC (insn) = val;
3119 flag_strict_aliasing = save_strict_alias;
3122 /* Expand basic block BB from GIMPLE trees to RTL. */
3124 static basic_block
3125 expand_gimple_basic_block (basic_block bb)
3127 gimple_stmt_iterator gsi;
3128 gimple_seq stmts;
3129 gimple stmt = NULL;
3130 rtx note, last;
3131 edge e;
3132 edge_iterator ei;
3133 void **elt;
3135 if (dump_file)
3136 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3137 bb->index);
3139 /* Note that since we are now transitioning from GIMPLE to RTL, we
3140 cannot use the gsi_*_bb() routines because they expect the basic
3141 block to be in GIMPLE, instead of RTL. Therefore, we need to
3142 access the BB sequence directly. */
3143 stmts = bb_seq (bb);
3144 bb->il.gimple = NULL;
3145 rtl_profile_for_bb (bb);
3146 init_rtl_bb_info (bb);
3147 bb->flags |= BB_RTL;
3149 /* Remove the RETURN_EXPR if we may fall though to the exit
3150 instead. */
3151 gsi = gsi_last (stmts);
3152 if (!gsi_end_p (gsi)
3153 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3155 gimple ret_stmt = gsi_stmt (gsi);
3157 gcc_assert (single_succ_p (bb));
3158 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3160 if (bb->next_bb == EXIT_BLOCK_PTR
3161 && !gimple_return_retval (ret_stmt))
3163 gsi_remove (&gsi, false);
3164 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3168 gsi = gsi_start (stmts);
3169 if (!gsi_end_p (gsi))
3171 stmt = gsi_stmt (gsi);
3172 if (gimple_code (stmt) != GIMPLE_LABEL)
3173 stmt = NULL;
3176 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3178 if (stmt || elt)
3180 last = get_last_insn ();
3182 if (stmt)
3184 expand_gimple_stmt (stmt);
3185 gsi_next (&gsi);
3188 if (elt)
3189 emit_label ((rtx) *elt);
3191 /* Java emits line number notes in the top of labels.
3192 ??? Make this go away once line number notes are obsoleted. */
3193 BB_HEAD (bb) = NEXT_INSN (last);
3194 if (NOTE_P (BB_HEAD (bb)))
3195 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3196 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3198 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3200 else
3201 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3203 NOTE_BASIC_BLOCK (note) = bb;
3205 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3207 basic_block new_bb;
3209 stmt = gsi_stmt (gsi);
3211 /* If this statement is a non-debug one, and we generate debug
3212 insns, then this one might be the last real use of a TERed
3213 SSA_NAME, but where there are still some debug uses further
3214 down. Expanding the current SSA name in such further debug
3215 uses by their RHS might lead to wrong debug info, as coalescing
3216 might make the operands of such RHS be placed into the same
3217 pseudo as something else. Like so:
3218 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3219 use(a_1);
3220 a_2 = ...
3221 #DEBUG ... => a_1
3222 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3223 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3224 the write to a_2 would actually have clobbered the place which
3225 formerly held a_0.
3227 So, instead of that, we recognize the situation, and generate
3228 debug temporaries at the last real use of TERed SSA names:
3229 a_1 = a_0 + 1;
3230 #DEBUG #D1 => a_1
3231 use(a_1);
3232 a_2 = ...
3233 #DEBUG ... => #D1
3235 if (MAY_HAVE_DEBUG_INSNS
3236 && SA.values
3237 && !is_gimple_debug (stmt))
3239 ssa_op_iter iter;
3240 tree op;
3241 gimple def;
3243 location_t sloc = get_curr_insn_source_location ();
3244 tree sblock = get_curr_insn_block ();
3246 /* Look for SSA names that have their last use here (TERed
3247 names always have only one real use). */
3248 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3249 if ((def = get_gimple_for_ssa_name (op)))
3251 imm_use_iterator imm_iter;
3252 use_operand_p use_p;
3253 bool have_debug_uses = false;
3255 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3257 if (gimple_debug_bind_p (USE_STMT (use_p)))
3259 have_debug_uses = true;
3260 break;
3264 if (have_debug_uses)
3266 /* OP is a TERed SSA name, with DEF it's defining
3267 statement, and where OP is used in further debug
3268 instructions. Generate a debug temporary, and
3269 replace all uses of OP in debug insns with that
3270 temporary. */
3271 gimple debugstmt;
3272 tree value = gimple_assign_rhs_to_tree (def);
3273 tree vexpr = make_node (DEBUG_EXPR_DECL);
3274 rtx val;
3275 enum machine_mode mode;
3277 set_curr_insn_source_location (gimple_location (def));
3278 set_curr_insn_block (gimple_block (def));
3280 DECL_ARTIFICIAL (vexpr) = 1;
3281 TREE_TYPE (vexpr) = TREE_TYPE (value);
3282 if (DECL_P (value))
3283 mode = DECL_MODE (value);
3284 else
3285 mode = TYPE_MODE (TREE_TYPE (value));
3286 DECL_MODE (vexpr) = mode;
3288 val = gen_rtx_VAR_LOCATION
3289 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3291 val = emit_debug_insn (val);
3293 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3295 if (!gimple_debug_bind_p (debugstmt))
3296 continue;
3298 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3299 SET_USE (use_p, vexpr);
3301 update_stmt (debugstmt);
3305 set_curr_insn_source_location (sloc);
3306 set_curr_insn_block (sblock);
3309 currently_expanding_gimple_stmt = stmt;
3311 /* Expand this statement, then evaluate the resulting RTL and
3312 fixup the CFG accordingly. */
3313 if (gimple_code (stmt) == GIMPLE_COND)
3315 new_bb = expand_gimple_cond (bb, stmt);
3316 if (new_bb)
3317 return new_bb;
3319 else if (gimple_debug_bind_p (stmt))
3321 location_t sloc = get_curr_insn_source_location ();
3322 tree sblock = get_curr_insn_block ();
3323 gimple_stmt_iterator nsi = gsi;
3325 for (;;)
3327 tree var = gimple_debug_bind_get_var (stmt);
3328 tree value;
3329 rtx val;
3330 enum machine_mode mode;
3332 if (gimple_debug_bind_has_value_p (stmt))
3333 value = gimple_debug_bind_get_value (stmt);
3334 else
3335 value = NULL_TREE;
3337 last = get_last_insn ();
3339 set_curr_insn_source_location (gimple_location (stmt));
3340 set_curr_insn_block (gimple_block (stmt));
3342 if (DECL_P (var))
3343 mode = DECL_MODE (var);
3344 else
3345 mode = TYPE_MODE (TREE_TYPE (var));
3347 val = gen_rtx_VAR_LOCATION
3348 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3350 val = emit_debug_insn (val);
3352 if (dump_file && (dump_flags & TDF_DETAILS))
3354 /* We can't dump the insn with a TREE where an RTX
3355 is expected. */
3356 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3357 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3358 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3361 /* In order not to generate too many debug temporaries,
3362 we delink all uses of debug statements we already expanded.
3363 Therefore debug statements between definition and real
3364 use of TERed SSA names will continue to use the SSA name,
3365 and not be replaced with debug temps. */
3366 delink_stmt_imm_use (stmt);
3368 gsi = nsi;
3369 gsi_next (&nsi);
3370 if (gsi_end_p (nsi))
3371 break;
3372 stmt = gsi_stmt (nsi);
3373 if (!gimple_debug_bind_p (stmt))
3374 break;
3377 set_curr_insn_source_location (sloc);
3378 set_curr_insn_block (sblock);
3380 else
3382 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3384 bool can_fallthru;
3385 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3386 if (new_bb)
3388 if (can_fallthru)
3389 bb = new_bb;
3390 else
3391 return new_bb;
3394 else
3396 def_operand_p def_p;
3397 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3399 if (def_p != NULL)
3401 /* Ignore this stmt if it is in the list of
3402 replaceable expressions. */
3403 if (SA.values
3404 && bitmap_bit_p (SA.values,
3405 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3406 continue;
3408 last = expand_gimple_stmt (stmt);
3409 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3414 currently_expanding_gimple_stmt = NULL;
3416 /* Expand implicit goto and convert goto_locus. */
3417 FOR_EACH_EDGE (e, ei, bb->succs)
3419 if (e->goto_locus && e->goto_block)
3421 set_curr_insn_source_location (e->goto_locus);
3422 set_curr_insn_block (e->goto_block);
3423 e->goto_locus = curr_insn_locator ();
3425 e->goto_block = NULL;
3426 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3428 emit_jump (label_rtx_for_bb (e->dest));
3429 e->flags &= ~EDGE_FALLTHRU;
3433 /* Expanded RTL can create a jump in the last instruction of block.
3434 This later might be assumed to be a jump to successor and break edge insertion.
3435 We need to insert dummy move to prevent this. PR41440. */
3436 if (single_succ_p (bb)
3437 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3438 && (last = get_last_insn ())
3439 && JUMP_P (last))
3441 rtx dummy = gen_reg_rtx (SImode);
3442 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3445 do_pending_stack_adjust ();
3447 /* Find the block tail. The last insn in the block is the insn
3448 before a barrier and/or table jump insn. */
3449 last = get_last_insn ();
3450 if (BARRIER_P (last))
3451 last = PREV_INSN (last);
3452 if (JUMP_TABLE_DATA_P (last))
3453 last = PREV_INSN (PREV_INSN (last));
3454 BB_END (bb) = last;
3456 update_bb_for_insn (bb);
3458 return bb;
3462 /* Create a basic block for initialization code. */
3464 static basic_block
3465 construct_init_block (void)
3467 basic_block init_block, first_block;
3468 edge e = NULL;
3469 int flags;
3471 /* Multiple entry points not supported yet. */
3472 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3473 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3474 init_rtl_bb_info (EXIT_BLOCK_PTR);
3475 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3476 EXIT_BLOCK_PTR->flags |= BB_RTL;
3478 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3480 /* When entry edge points to first basic block, we don't need jump,
3481 otherwise we have to jump into proper target. */
3482 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3484 tree label = gimple_block_label (e->dest);
3486 emit_jump (label_rtx (label));
3487 flags = 0;
3489 else
3490 flags = EDGE_FALLTHRU;
3492 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3493 get_last_insn (),
3494 ENTRY_BLOCK_PTR);
3495 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3496 init_block->count = ENTRY_BLOCK_PTR->count;
3497 if (e)
3499 first_block = e->dest;
3500 redirect_edge_succ (e, init_block);
3501 e = make_edge (init_block, first_block, flags);
3503 else
3504 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3505 e->probability = REG_BR_PROB_BASE;
3506 e->count = ENTRY_BLOCK_PTR->count;
3508 update_bb_for_insn (init_block);
3509 return init_block;
3512 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3513 found in the block tree. */
3515 static void
3516 set_block_levels (tree block, int level)
3518 while (block)
3520 BLOCK_NUMBER (block) = level;
3521 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3522 block = BLOCK_CHAIN (block);
3526 /* Create a block containing landing pads and similar stuff. */
3528 static void
3529 construct_exit_block (void)
3531 rtx head = get_last_insn ();
3532 rtx end;
3533 basic_block exit_block;
3534 edge e, e2;
3535 unsigned ix;
3536 edge_iterator ei;
3537 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3539 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3541 /* Make sure the locus is set to the end of the function, so that
3542 epilogue line numbers and warnings are set properly. */
3543 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3544 input_location = cfun->function_end_locus;
3546 /* The following insns belong to the top scope. */
3547 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3549 /* Generate rtl for function exit. */
3550 expand_function_end ();
3552 end = get_last_insn ();
3553 if (head == end)
3554 return;
3555 /* While emitting the function end we could move end of the last basic block.
3557 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3558 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3559 head = NEXT_INSN (head);
3560 exit_block = create_basic_block (NEXT_INSN (head), end,
3561 EXIT_BLOCK_PTR->prev_bb);
3562 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3563 exit_block->count = EXIT_BLOCK_PTR->count;
3565 ix = 0;
3566 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3568 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3569 if (!(e->flags & EDGE_ABNORMAL))
3570 redirect_edge_succ (e, exit_block);
3571 else
3572 ix++;
3575 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3576 e->probability = REG_BR_PROB_BASE;
3577 e->count = EXIT_BLOCK_PTR->count;
3578 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3579 if (e2 != e)
3581 e->count -= e2->count;
3582 exit_block->count -= e2->count;
3583 exit_block->frequency -= EDGE_FREQUENCY (e2);
3585 if (e->count < 0)
3586 e->count = 0;
3587 if (exit_block->count < 0)
3588 exit_block->count = 0;
3589 if (exit_block->frequency < 0)
3590 exit_block->frequency = 0;
3591 update_bb_for_insn (exit_block);
3594 /* Helper function for discover_nonconstant_array_refs.
3595 Look for ARRAY_REF nodes with non-constant indexes and mark them
3596 addressable. */
3598 static tree
3599 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3600 void *data ATTRIBUTE_UNUSED)
3602 tree t = *tp;
3604 if (IS_TYPE_OR_DECL_P (t))
3605 *walk_subtrees = 0;
3606 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3608 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3609 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3610 && (!TREE_OPERAND (t, 2)
3611 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3612 || (TREE_CODE (t) == COMPONENT_REF
3613 && (!TREE_OPERAND (t,2)
3614 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3615 || TREE_CODE (t) == BIT_FIELD_REF
3616 || TREE_CODE (t) == REALPART_EXPR
3617 || TREE_CODE (t) == IMAGPART_EXPR
3618 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3619 || CONVERT_EXPR_P (t))
3620 t = TREE_OPERAND (t, 0);
3622 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3624 t = get_base_address (t);
3625 if (t && DECL_P (t)
3626 && DECL_MODE (t) != BLKmode)
3627 TREE_ADDRESSABLE (t) = 1;
3630 *walk_subtrees = 0;
3633 return NULL_TREE;
3636 /* RTL expansion is not able to compile array references with variable
3637 offsets for arrays stored in single register. Discover such
3638 expressions and mark variables as addressable to avoid this
3639 scenario. */
3641 static void
3642 discover_nonconstant_array_refs (void)
3644 basic_block bb;
3645 gimple_stmt_iterator gsi;
3647 FOR_EACH_BB (bb)
3648 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3650 gimple stmt = gsi_stmt (gsi);
3651 if (!is_gimple_debug (stmt))
3652 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3656 /* This function sets crtl->args.internal_arg_pointer to a virtual
3657 register if DRAP is needed. Local register allocator will replace
3658 virtual_incoming_args_rtx with the virtual register. */
3660 static void
3661 expand_stack_alignment (void)
3663 rtx drap_rtx;
3664 unsigned int preferred_stack_boundary;
3666 if (! SUPPORTS_STACK_ALIGNMENT)
3667 return;
3669 if (cfun->calls_alloca
3670 || cfun->has_nonlocal_label
3671 || crtl->has_nonlocal_goto)
3672 crtl->need_drap = true;
3674 /* Call update_stack_boundary here again to update incoming stack
3675 boundary. It may set incoming stack alignment to a different
3676 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3677 use the minimum incoming stack alignment to check if it is OK
3678 to perform sibcall optimization since sibcall optimization will
3679 only align the outgoing stack to incoming stack boundary. */
3680 if (targetm.calls.update_stack_boundary)
3681 targetm.calls.update_stack_boundary ();
3683 /* The incoming stack frame has to be aligned at least at
3684 parm_stack_boundary. */
3685 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3687 /* Update crtl->stack_alignment_estimated and use it later to align
3688 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3689 exceptions since callgraph doesn't collect incoming stack alignment
3690 in this case. */
3691 if (cfun->can_throw_non_call_exceptions
3692 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3693 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3694 else
3695 preferred_stack_boundary = crtl->preferred_stack_boundary;
3696 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3697 crtl->stack_alignment_estimated = preferred_stack_boundary;
3698 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3699 crtl->stack_alignment_needed = preferred_stack_boundary;
3701 gcc_assert (crtl->stack_alignment_needed
3702 <= crtl->stack_alignment_estimated);
3704 crtl->stack_realign_needed
3705 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3706 crtl->stack_realign_tried = crtl->stack_realign_needed;
3708 crtl->stack_realign_processed = true;
3710 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3711 alignment. */
3712 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3713 drap_rtx = targetm.calls.get_drap_rtx ();
3715 /* stack_realign_drap and drap_rtx must match. */
3716 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3718 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3719 if (NULL != drap_rtx)
3721 crtl->args.internal_arg_pointer = drap_rtx;
3723 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3724 needed. */
3725 fixup_tail_calls ();
3729 /* Translate the intermediate representation contained in the CFG
3730 from GIMPLE trees to RTL.
3732 We do conversion per basic block and preserve/update the tree CFG.
3733 This implies we have to do some magic as the CFG can simultaneously
3734 consist of basic blocks containing RTL and GIMPLE trees. This can
3735 confuse the CFG hooks, so be careful to not manipulate CFG during
3736 the expansion. */
3738 static unsigned int
3739 gimple_expand_cfg (void)
3741 basic_block bb, init_block;
3742 sbitmap blocks;
3743 edge_iterator ei;
3744 edge e;
3745 unsigned i;
3747 timevar_push (TV_OUT_OF_SSA);
3748 rewrite_out_of_ssa (&SA);
3749 timevar_pop (TV_OUT_OF_SSA);
3750 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3751 sizeof (rtx));
3753 /* Some backends want to know that we are expanding to RTL. */
3754 currently_expanding_to_rtl = 1;
3756 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3758 insn_locators_alloc ();
3759 if (!DECL_IS_BUILTIN (current_function_decl))
3761 /* Eventually, all FEs should explicitly set function_start_locus. */
3762 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3763 set_curr_insn_source_location
3764 (DECL_SOURCE_LOCATION (current_function_decl));
3765 else
3766 set_curr_insn_source_location (cfun->function_start_locus);
3768 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3769 prologue_locator = curr_insn_locator ();
3771 #ifdef INSN_SCHEDULING
3772 init_sched_attrs ();
3773 #endif
3775 /* Make sure first insn is a note even if we don't want linenums.
3776 This makes sure the first insn will never be deleted.
3777 Also, final expects a note to appear there. */
3778 emit_note (NOTE_INSN_DELETED);
3780 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3781 discover_nonconstant_array_refs ();
3783 targetm.expand_to_rtl_hook ();
3784 crtl->stack_alignment_needed = STACK_BOUNDARY;
3785 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3786 crtl->stack_alignment_estimated = 0;
3787 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3788 cfun->cfg->max_jumptable_ents = 0;
3791 /* Expand the variables recorded during gimple lowering. */
3792 timevar_push (TV_VAR_EXPAND);
3793 expand_used_vars ();
3794 timevar_pop (TV_VAR_EXPAND);
3796 /* Honor stack protection warnings. */
3797 if (warn_stack_protect)
3799 if (cfun->calls_alloca)
3800 warning (OPT_Wstack_protector,
3801 "stack protector not protecting local variables: "
3802 "variable length buffer");
3803 if (has_short_buffer && !crtl->stack_protect_guard)
3804 warning (OPT_Wstack_protector,
3805 "stack protector not protecting function: "
3806 "all local arrays are less than %d bytes long",
3807 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3810 /* Set up parameters and prepare for return, for the function. */
3811 expand_function_start (current_function_decl);
3813 /* Now that we also have the parameter RTXs, copy them over to our
3814 partitions. */
3815 for (i = 0; i < SA.map->num_partitions; i++)
3817 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3819 if (TREE_CODE (var) != VAR_DECL
3820 && !SA.partition_to_pseudo[i])
3821 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3822 gcc_assert (SA.partition_to_pseudo[i]);
3824 /* If this decl was marked as living in multiple places, reset
3825 this now to NULL. */
3826 if (DECL_RTL_IF_SET (var) == pc_rtx)
3827 SET_DECL_RTL (var, NULL);
3829 /* Some RTL parts really want to look at DECL_RTL(x) when x
3830 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3831 SET_DECL_RTL here making this available, but that would mean
3832 to select one of the potentially many RTLs for one DECL. Instead
3833 of doing that we simply reset the MEM_EXPR of the RTL in question,
3834 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3835 if (!DECL_RTL_SET_P (var))
3837 if (MEM_P (SA.partition_to_pseudo[i]))
3838 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3842 /* If this function is `main', emit a call to `__main'
3843 to run global initializers, etc. */
3844 if (DECL_NAME (current_function_decl)
3845 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3846 && DECL_FILE_SCOPE_P (current_function_decl))
3847 expand_main_function ();
3849 /* Initialize the stack_protect_guard field. This must happen after the
3850 call to __main (if any) so that the external decl is initialized. */
3851 if (crtl->stack_protect_guard)
3852 stack_protect_prologue ();
3854 expand_phi_nodes (&SA);
3856 /* Register rtl specific functions for cfg. */
3857 rtl_register_cfg_hooks ();
3859 init_block = construct_init_block ();
3861 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3862 remaining edges later. */
3863 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3864 e->flags &= ~EDGE_EXECUTABLE;
3866 lab_rtx_for_bb = pointer_map_create ();
3867 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3868 bb = expand_gimple_basic_block (bb);
3870 if (MAY_HAVE_DEBUG_INSNS)
3871 expand_debug_locations ();
3873 execute_free_datastructures ();
3874 timevar_push (TV_OUT_OF_SSA);
3875 finish_out_of_ssa (&SA);
3876 timevar_pop (TV_OUT_OF_SSA);
3878 timevar_push (TV_POST_EXPAND);
3879 /* We are no longer in SSA form. */
3880 cfun->gimple_df->in_ssa_p = false;
3882 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3883 conservatively to true until they are all profile aware. */
3884 pointer_map_destroy (lab_rtx_for_bb);
3885 free_histograms ();
3887 construct_exit_block ();
3888 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3889 insn_locators_finalize ();
3891 /* Zap the tree EH table. */
3892 set_eh_throw_stmt_table (cfun, NULL);
3894 rebuild_jump_labels (get_insns ());
3896 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3898 edge e;
3899 edge_iterator ei;
3900 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3902 if (e->insns.r)
3903 commit_one_edge_insertion (e);
3904 else
3905 ei_next (&ei);
3909 /* We're done expanding trees to RTL. */
3910 currently_expanding_to_rtl = 0;
3912 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3914 edge e;
3915 edge_iterator ei;
3916 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3918 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3919 e->flags &= ~EDGE_EXECUTABLE;
3921 /* At the moment not all abnormal edges match the RTL
3922 representation. It is safe to remove them here as
3923 find_many_sub_basic_blocks will rediscover them.
3924 In the future we should get this fixed properly. */
3925 if ((e->flags & EDGE_ABNORMAL)
3926 && !(e->flags & EDGE_SIBCALL))
3927 remove_edge (e);
3928 else
3929 ei_next (&ei);
3933 blocks = sbitmap_alloc (last_basic_block);
3934 sbitmap_ones (blocks);
3935 find_many_sub_basic_blocks (blocks);
3936 sbitmap_free (blocks);
3937 purge_all_dead_edges ();
3939 compact_blocks ();
3941 expand_stack_alignment ();
3943 #ifdef ENABLE_CHECKING
3944 verify_flow_info ();
3945 #endif
3947 /* There's no need to defer outputting this function any more; we
3948 know we want to output it. */
3949 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3951 /* Now that we're done expanding trees to RTL, we shouldn't have any
3952 more CONCATs anywhere. */
3953 generating_concat_p = 0;
3955 if (dump_file)
3957 fprintf (dump_file,
3958 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3959 /* And the pass manager will dump RTL for us. */
3962 /* If we're emitting a nested function, make sure its parent gets
3963 emitted as well. Doing otherwise confuses debug info. */
3965 tree parent;
3966 for (parent = DECL_CONTEXT (current_function_decl);
3967 parent != NULL_TREE;
3968 parent = get_containing_scope (parent))
3969 if (TREE_CODE (parent) == FUNCTION_DECL)
3970 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
3973 /* We are now committed to emitting code for this function. Do any
3974 preparation, such as emitting abstract debug info for the inline
3975 before it gets mangled by optimization. */
3976 if (cgraph_function_possibly_inlined_p (current_function_decl))
3977 (*debug_hooks->outlining_inline_function) (current_function_decl);
3979 TREE_ASM_WRITTEN (current_function_decl) = 1;
3981 /* After expanding, the return labels are no longer needed. */
3982 return_label = NULL;
3983 naked_return_label = NULL;
3984 /* Tag the blocks with a depth number so that change_scope can find
3985 the common parent easily. */
3986 set_block_levels (DECL_INITIAL (cfun->decl), 0);
3987 default_rtl_profile ();
3988 timevar_pop (TV_POST_EXPAND);
3989 return 0;
3992 struct rtl_opt_pass pass_expand =
3995 RTL_PASS,
3996 "expand", /* name */
3997 NULL, /* gate */
3998 gimple_expand_cfg, /* execute */
3999 NULL, /* sub */
4000 NULL, /* next */
4001 0, /* static_pass_number */
4002 TV_EXPAND, /* tv_id */
4003 PROP_ssa | PROP_gimple_leh | PROP_cfg
4004 | PROP_gimple_lcx, /* properties_required */
4005 PROP_rtl, /* properties_provided */
4006 PROP_ssa | PROP_trees, /* properties_destroyed */
4007 TODO_verify_ssa | TODO_verify_flow
4008 | TODO_verify_stmts, /* todo_flags_start */
4009 TODO_dump_func
4010 | TODO_ggc_collect /* todo_flags_finish */