Factor uses of build_pairwise_scheduling.
[official-gcc/Ramakrishna.git] / gcc / cfgexpand.c
blob95f694855000e39981ad882bec94b3f6341b99a4
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "toplev.h"
40 #include "debug.h"
41 #include "params.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
44 #include "target.h"
45 #include "ssaexpand.h"
48 /* This variable holds information helping the rewriting of SSA trees
49 into RTL. */
50 struct ssaexpand SA;
52 /* This variable holds the currently expanded gimple statement for purposes
53 of comminucating the profile info to the builtin expanders. */
54 gimple currently_expanding_gimple_stmt;
56 /* Return an expression tree corresponding to the RHS of GIMPLE
57 statement STMT. */
59 tree
60 gimple_assign_rhs_to_tree (gimple stmt)
62 tree t;
63 enum gimple_rhs_class grhs_class;
65 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
67 if (grhs_class == GIMPLE_BINARY_RHS)
68 t = build2 (gimple_assign_rhs_code (stmt),
69 TREE_TYPE (gimple_assign_lhs (stmt)),
70 gimple_assign_rhs1 (stmt),
71 gimple_assign_rhs2 (stmt));
72 else if (grhs_class == GIMPLE_UNARY_RHS)
73 t = build1 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt));
76 else if (grhs_class == GIMPLE_SINGLE_RHS)
78 t = gimple_assign_rhs1 (stmt);
79 /* Avoid modifying this tree in place below. */
80 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
81 && gimple_location (stmt) != EXPR_LOCATION (t))
82 t = copy_node (t);
84 else
85 gcc_unreachable ();
87 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
88 SET_EXPR_LOCATION (t, gimple_location (stmt));
90 return t;
94 /* Verify that there is exactly single jump instruction since last and attach
95 REG_BR_PROB note specifying probability.
96 ??? We really ought to pass the probability down to RTL expanders and let it
97 re-distribute it when the conditional expands into multiple conditionals.
98 This is however difficult to do. */
99 void
100 add_reg_br_prob_note (rtx last, int probability)
102 if (profile_status == PROFILE_ABSENT)
103 return;
104 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
105 if (JUMP_P (last))
107 /* It is common to emit condjump-around-jump sequence when we don't know
108 how to reverse the conditional. Special case this. */
109 if (!any_condjump_p (last)
110 || !JUMP_P (NEXT_INSN (last))
111 || !simplejump_p (NEXT_INSN (last))
112 || !NEXT_INSN (NEXT_INSN (last))
113 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
114 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
115 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
116 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
117 goto failed;
118 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
119 add_reg_note (last, REG_BR_PROB,
120 GEN_INT (REG_BR_PROB_BASE - probability));
121 return;
123 if (!last || !JUMP_P (last) || !any_condjump_p (last))
124 goto failed;
125 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
126 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
127 return;
128 failed:
129 if (dump_file)
130 fprintf (dump_file, "Failed to add probability note\n");
134 #ifndef STACK_ALIGNMENT_NEEDED
135 #define STACK_ALIGNMENT_NEEDED 1
136 #endif
138 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
140 /* Associate declaration T with storage space X. If T is no
141 SSA name this is exactly SET_DECL_RTL, otherwise make the
142 partition of T associated with X. */
143 static inline void
144 set_rtl (tree t, rtx x)
146 if (TREE_CODE (t) == SSA_NAME)
148 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
149 if (x && !MEM_P (x))
150 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
151 /* For the benefit of debug information at -O0 (where vartracking
152 doesn't run) record the place also in the base DECL if it's
153 a normal variable (not a parameter). */
154 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
156 tree var = SSA_NAME_VAR (t);
157 /* If we don't yet have something recorded, just record it now. */
158 if (!DECL_RTL_SET_P (var))
159 SET_DECL_RTL (var, x);
160 /* If we have it set alrady to "multiple places" don't
161 change this. */
162 else if (DECL_RTL (var) == pc_rtx)
164 /* If we have something recorded and it's not the same place
165 as we want to record now, we have multiple partitions for the
166 same base variable, with different places. We can't just
167 randomly chose one, hence we have to say that we don't know.
168 This only happens with optimization, and there var-tracking
169 will figure out the right thing. */
170 else if (DECL_RTL (var) != x)
171 SET_DECL_RTL (var, pc_rtx);
174 else
175 SET_DECL_RTL (t, x);
178 /* This structure holds data relevant to one variable that will be
179 placed in a stack slot. */
180 struct stack_var
182 /* The Variable. */
183 tree decl;
185 /* The offset of the variable. During partitioning, this is the
186 offset relative to the partition. After partitioning, this
187 is relative to the stack frame. */
188 HOST_WIDE_INT offset;
190 /* Initially, the size of the variable. Later, the size of the partition,
191 if this variable becomes it's partition's representative. */
192 HOST_WIDE_INT size;
194 /* The *byte* alignment required for this variable. Or as, with the
195 size, the alignment for this partition. */
196 unsigned int alignb;
198 /* The partition representative. */
199 size_t representative;
201 /* The next stack variable in the partition, or EOC. */
202 size_t next;
205 #define EOC ((size_t)-1)
207 /* We have an array of such objects while deciding allocation. */
208 static struct stack_var *stack_vars;
209 static size_t stack_vars_alloc;
210 static size_t stack_vars_num;
212 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
213 is non-decreasing. */
214 static size_t *stack_vars_sorted;
216 /* We have an interference graph between such objects. This graph
217 is lower triangular. */
218 static bool *stack_vars_conflict;
219 static size_t stack_vars_conflict_alloc;
220 static size_t n_stack_vars_conflict;
222 /* The phase of the stack frame. This is the known misalignment of
223 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
224 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
225 static int frame_phase;
227 /* Used during expand_used_vars to remember if we saw any decls for
228 which we'd like to enable stack smashing protection. */
229 static bool has_protected_decls;
231 /* Used during expand_used_vars. Remember if we say a character buffer
232 smaller than our cutoff threshold. Used for -Wstack-protector. */
233 static bool has_short_buffer;
235 /* Discover the byte alignment to use for DECL. Ignore alignment
236 we can't do with expected alignment of the stack boundary. */
238 static unsigned int
239 get_decl_align_unit (tree decl)
241 unsigned int align;
243 align = LOCAL_DECL_ALIGNMENT (decl);
245 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
246 align = MAX_SUPPORTED_STACK_ALIGNMENT;
248 if (SUPPORTS_STACK_ALIGNMENT)
250 if (crtl->stack_alignment_estimated < align)
252 gcc_assert(!crtl->stack_realign_processed);
253 crtl->stack_alignment_estimated = align;
257 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
258 So here we only make sure stack_alignment_needed >= align. */
259 if (crtl->stack_alignment_needed < align)
260 crtl->stack_alignment_needed = align;
261 if (crtl->max_used_stack_slot_alignment < align)
262 crtl->max_used_stack_slot_alignment = align;
264 return align / BITS_PER_UNIT;
267 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
268 Return the frame offset. */
270 static HOST_WIDE_INT
271 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
273 HOST_WIDE_INT offset, new_frame_offset;
275 new_frame_offset = frame_offset;
276 if (FRAME_GROWS_DOWNWARD)
278 new_frame_offset -= size + frame_phase;
279 new_frame_offset &= -align;
280 new_frame_offset += frame_phase;
281 offset = new_frame_offset;
283 else
285 new_frame_offset -= frame_phase;
286 new_frame_offset += align - 1;
287 new_frame_offset &= -align;
288 new_frame_offset += frame_phase;
289 offset = new_frame_offset;
290 new_frame_offset += size;
292 frame_offset = new_frame_offset;
294 if (frame_offset_overflow (frame_offset, cfun->decl))
295 frame_offset = offset = 0;
297 return offset;
300 /* Accumulate DECL into STACK_VARS. */
302 static void
303 add_stack_var (tree decl)
305 if (stack_vars_num >= stack_vars_alloc)
307 if (stack_vars_alloc)
308 stack_vars_alloc = stack_vars_alloc * 3 / 2;
309 else
310 stack_vars_alloc = 32;
311 stack_vars
312 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
314 stack_vars[stack_vars_num].decl = decl;
315 stack_vars[stack_vars_num].offset = 0;
316 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
317 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
319 /* All variables are initially in their own partition. */
320 stack_vars[stack_vars_num].representative = stack_vars_num;
321 stack_vars[stack_vars_num].next = EOC;
323 /* Ensure that this decl doesn't get put onto the list twice. */
324 set_rtl (decl, pc_rtx);
326 stack_vars_num++;
329 /* Compute the linear index of a lower-triangular coordinate (I, J). */
331 static size_t
332 triangular_index (size_t i, size_t j)
334 if (i < j)
336 size_t t;
337 t = i, i = j, j = t;
340 if (i & 1)
341 return ((i + 1) / 2) * i + j;
342 else
343 return (i / 2) * (i + 1) + j;
346 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
348 static void
349 resize_stack_vars_conflict (size_t n)
351 size_t size = triangular_index (n-1, n-1) + 1;
353 if (size <= stack_vars_conflict_alloc)
355 if (n > n_stack_vars_conflict)
356 fatal_error ("program is too large to be compiled on this machine");
357 return;
360 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
361 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
362 (size - stack_vars_conflict_alloc) * sizeof (bool));
363 stack_vars_conflict_alloc = size;
364 n_stack_vars_conflict = n;
367 /* Make the decls associated with luid's X and Y conflict. */
369 static void
370 add_stack_var_conflict (size_t x, size_t y)
372 size_t index = triangular_index (x, y);
373 gcc_assert (index < stack_vars_conflict_alloc);
374 stack_vars_conflict[index] = true;
377 /* Check whether the decls associated with luid's X and Y conflict. */
379 static bool
380 stack_var_conflict_p (size_t x, size_t y)
382 size_t index = triangular_index (x, y);
383 gcc_assert (index < stack_vars_conflict_alloc);
384 return stack_vars_conflict[index];
387 /* Returns true if TYPE is or contains a union type. */
389 static bool
390 aggregate_contains_union_type (tree type)
392 tree field;
394 if (TREE_CODE (type) == UNION_TYPE
395 || TREE_CODE (type) == QUAL_UNION_TYPE)
396 return true;
397 if (TREE_CODE (type) == ARRAY_TYPE)
398 return aggregate_contains_union_type (TREE_TYPE (type));
399 if (TREE_CODE (type) != RECORD_TYPE)
400 return false;
402 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
403 if (TREE_CODE (field) == FIELD_DECL)
404 if (aggregate_contains_union_type (TREE_TYPE (field)))
405 return true;
407 return false;
410 /* A subroutine of expand_used_vars. If two variables X and Y have alias
411 sets that do not conflict, then do add a conflict for these variables
412 in the interference graph. We also need to make sure to add conflicts
413 for union containing structures. Else RTL alias analysis comes along
414 and due to type based aliasing rules decides that for two overlapping
415 union temporaries { short s; int i; } accesses to the same mem through
416 different types may not alias and happily reorders stores across
417 life-time boundaries of the temporaries (See PR25654).
418 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
420 static void
421 add_alias_set_conflicts (void)
423 size_t i, j, n = stack_vars_num;
425 for (i = 0; i < n; ++i)
427 tree type_i = TREE_TYPE (stack_vars[i].decl);
428 bool aggr_i = AGGREGATE_TYPE_P (type_i);
429 bool contains_union;
431 contains_union = aggregate_contains_union_type (type_i);
432 for (j = 0; j < i; ++j)
434 tree type_j = TREE_TYPE (stack_vars[j].decl);
435 bool aggr_j = AGGREGATE_TYPE_P (type_j);
436 if (aggr_i != aggr_j
437 /* Either the objects conflict by means of type based
438 aliasing rules, or we need to add a conflict. */
439 || !objects_must_conflict_p (type_i, type_j)
440 /* In case the types do not conflict ensure that access
441 to elements will conflict. In case of unions we have
442 to be careful as type based aliasing rules may say
443 access to the same memory does not conflict. So play
444 safe and add a conflict in this case. */
445 || contains_union)
446 add_stack_var_conflict (i, j);
451 /* A subroutine of partition_stack_vars. A comparison function for qsort,
452 sorting an array of indices by the size and type of the object. */
454 static int
455 stack_var_size_cmp (const void *a, const void *b)
457 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
458 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
459 tree decla, declb;
460 unsigned int uida, uidb;
462 if (sa < sb)
463 return -1;
464 if (sa > sb)
465 return 1;
466 decla = stack_vars[*(const size_t *)a].decl;
467 declb = stack_vars[*(const size_t *)b].decl;
468 /* For stack variables of the same size use and id of the decls
469 to make the sort stable. Two SSA names are compared by their
470 version, SSA names come before non-SSA names, and two normal
471 decls are compared by their DECL_UID. */
472 if (TREE_CODE (decla) == SSA_NAME)
474 if (TREE_CODE (declb) == SSA_NAME)
475 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
476 else
477 return -1;
479 else if (TREE_CODE (declb) == SSA_NAME)
480 return 1;
481 else
482 uida = DECL_UID (decla), uidb = DECL_UID (declb);
483 if (uida < uidb)
484 return -1;
485 if (uida > uidb)
486 return 1;
487 return 0;
491 /* If the points-to solution *PI points to variables that are in a partition
492 together with other variables add all partition members to the pointed-to
493 variables bitmap. */
495 static void
496 add_partitioned_vars_to_ptset (struct pt_solution *pt,
497 struct pointer_map_t *decls_to_partitions,
498 struct pointer_set_t *visited, bitmap temp)
500 bitmap_iterator bi;
501 unsigned i;
502 bitmap *part;
504 if (pt->anything
505 || pt->vars == NULL
506 /* The pointed-to vars bitmap is shared, it is enough to
507 visit it once. */
508 || pointer_set_insert(visited, pt->vars))
509 return;
511 bitmap_clear (temp);
513 /* By using a temporary bitmap to store all members of the partitions
514 we have to add we make sure to visit each of the partitions only
515 once. */
516 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
517 if ((!temp
518 || !bitmap_bit_p (temp, i))
519 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
520 (void *)(size_t) i)))
521 bitmap_ior_into (temp, *part);
522 if (!bitmap_empty_p (temp))
523 bitmap_ior_into (pt->vars, temp);
526 /* Update points-to sets based on partition info, so we can use them on RTL.
527 The bitmaps representing stack partitions will be saved until expand,
528 where partitioned decls used as bases in memory expressions will be
529 rewritten. */
531 static void
532 update_alias_info_with_stack_vars (void)
534 struct pointer_map_t *decls_to_partitions = NULL;
535 size_t i, j;
536 tree var = NULL_TREE;
538 for (i = 0; i < stack_vars_num; i++)
540 bitmap part = NULL;
541 tree name;
542 struct ptr_info_def *pi;
544 /* Not interested in partitions with single variable. */
545 if (stack_vars[i].representative != i
546 || stack_vars[i].next == EOC)
547 continue;
549 if (!decls_to_partitions)
551 decls_to_partitions = pointer_map_create ();
552 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
555 /* Create an SSA_NAME that points to the partition for use
556 as base during alias-oracle queries on RTL for bases that
557 have been partitioned. */
558 if (var == NULL_TREE)
559 var = create_tmp_var (ptr_type_node, NULL);
560 name = make_ssa_name (var, NULL);
562 /* Create bitmaps representing partitions. They will be used for
563 points-to sets later, so use GGC alloc. */
564 part = BITMAP_GGC_ALLOC ();
565 for (j = i; j != EOC; j = stack_vars[j].next)
567 tree decl = stack_vars[j].decl;
568 unsigned int uid = DECL_UID (decl);
569 /* We should never end up partitioning SSA names (though they
570 may end up on the stack). Neither should we allocate stack
571 space to something that is unused and thus unreferenced. */
572 gcc_assert (DECL_P (decl)
573 && referenced_var_lookup (uid));
574 bitmap_set_bit (part, uid);
575 *((bitmap *) pointer_map_insert (decls_to_partitions,
576 (void *)(size_t) uid)) = part;
577 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
578 decl)) = name;
581 /* Make the SSA name point to all partition members. */
582 pi = get_ptr_info (name);
583 pt_solution_set (&pi->pt, part);
586 /* Make all points-to sets that contain one member of a partition
587 contain all members of the partition. */
588 if (decls_to_partitions)
590 unsigned i;
591 struct pointer_set_t *visited = pointer_set_create ();
592 bitmap temp = BITMAP_ALLOC (NULL);
594 for (i = 1; i < num_ssa_names; i++)
596 tree name = ssa_name (i);
597 struct ptr_info_def *pi;
599 if (name
600 && POINTER_TYPE_P (TREE_TYPE (name))
601 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
602 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
603 visited, temp);
606 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
607 decls_to_partitions, visited, temp);
608 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
609 decls_to_partitions, visited, temp);
611 pointer_set_destroy (visited);
612 pointer_map_destroy (decls_to_partitions);
613 BITMAP_FREE (temp);
617 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
618 partitioning algorithm. Partitions A and B are known to be non-conflicting.
619 Merge them into a single partition A.
621 At the same time, add OFFSET to all variables in partition B. At the end
622 of the partitioning process we've have a nice block easy to lay out within
623 the stack frame. */
625 static void
626 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
628 size_t i, last;
630 /* Update each element of partition B with the given offset,
631 and merge them into partition A. */
632 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
634 stack_vars[i].offset += offset;
635 stack_vars[i].representative = a;
637 stack_vars[last].next = stack_vars[a].next;
638 stack_vars[a].next = b;
640 /* Update the required alignment of partition A to account for B. */
641 if (stack_vars[a].alignb < stack_vars[b].alignb)
642 stack_vars[a].alignb = stack_vars[b].alignb;
644 /* Update the interference graph and merge the conflicts. */
645 for (last = stack_vars_num, i = 0; i < last; ++i)
646 if (stack_var_conflict_p (b, i))
647 add_stack_var_conflict (a, i);
650 /* A subroutine of expand_used_vars. Binpack the variables into
651 partitions constrained by the interference graph. The overall
652 algorithm used is as follows:
654 Sort the objects by size.
655 For each object A {
656 S = size(A)
657 O = 0
658 loop {
659 Look for the largest non-conflicting object B with size <= S.
660 UNION (A, B)
661 offset(B) = O
662 O += size(B)
663 S -= size(B)
668 static void
669 partition_stack_vars (void)
671 size_t si, sj, n = stack_vars_num;
673 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
674 for (si = 0; si < n; ++si)
675 stack_vars_sorted[si] = si;
677 if (n == 1)
678 return;
680 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
682 /* Special case: detect when all variables conflict, and thus we can't
683 do anything during the partitioning loop. It isn't uncommon (with
684 C code at least) to declare all variables at the top of the function,
685 and if we're not inlining, then all variables will be in the same scope.
686 Take advantage of very fast libc routines for this scan. */
687 gcc_assert (sizeof(bool) == sizeof(char));
688 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
689 return;
691 for (si = 0; si < n; ++si)
693 size_t i = stack_vars_sorted[si];
694 HOST_WIDE_INT isize = stack_vars[i].size;
695 HOST_WIDE_INT offset = 0;
697 for (sj = si; sj-- > 0; )
699 size_t j = stack_vars_sorted[sj];
700 HOST_WIDE_INT jsize = stack_vars[j].size;
701 unsigned int jalign = stack_vars[j].alignb;
703 /* Ignore objects that aren't partition representatives. */
704 if (stack_vars[j].representative != j)
705 continue;
707 /* Ignore objects too large for the remaining space. */
708 if (isize < jsize)
709 continue;
711 /* Ignore conflicting objects. */
712 if (stack_var_conflict_p (i, j))
713 continue;
715 /* Refine the remaining space check to include alignment. */
716 if (offset & (jalign - 1))
718 HOST_WIDE_INT toff = offset;
719 toff += jalign - 1;
720 toff &= -(HOST_WIDE_INT)jalign;
721 if (isize - (toff - offset) < jsize)
722 continue;
724 isize -= toff - offset;
725 offset = toff;
728 /* UNION the objects, placing J at OFFSET. */
729 union_stack_vars (i, j, offset);
731 isize -= jsize;
732 if (isize == 0)
733 break;
737 if (optimize)
738 update_alias_info_with_stack_vars ();
741 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
743 static void
744 dump_stack_var_partition (void)
746 size_t si, i, j, n = stack_vars_num;
748 for (si = 0; si < n; ++si)
750 i = stack_vars_sorted[si];
752 /* Skip variables that aren't partition representatives, for now. */
753 if (stack_vars[i].representative != i)
754 continue;
756 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
757 " align %u\n", (unsigned long) i, stack_vars[i].size,
758 stack_vars[i].alignb);
760 for (j = i; j != EOC; j = stack_vars[j].next)
762 fputc ('\t', dump_file);
763 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
764 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
765 stack_vars[j].offset);
770 /* Assign rtl to DECL at frame offset OFFSET. */
772 static void
773 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
775 /* Alignment is unsigned. */
776 unsigned HOST_WIDE_INT align;
777 rtx x;
779 /* If this fails, we've overflowed the stack frame. Error nicely? */
780 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
782 x = plus_constant (virtual_stack_vars_rtx, offset);
783 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
785 if (TREE_CODE (decl) != SSA_NAME)
787 /* Set alignment we actually gave this decl if it isn't an SSA name.
788 If it is we generate stack slots only accidentally so it isn't as
789 important, we'll simply use the alignment that is already set. */
790 offset -= frame_phase;
791 align = offset & -offset;
792 align *= BITS_PER_UNIT;
793 if (align == 0)
794 align = STACK_BOUNDARY;
795 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
796 align = MAX_SUPPORTED_STACK_ALIGNMENT;
798 DECL_ALIGN (decl) = align;
799 DECL_USER_ALIGN (decl) = 0;
802 set_mem_attributes (x, SSAVAR (decl), true);
803 set_rtl (decl, x);
806 /* A subroutine of expand_used_vars. Give each partition representative
807 a unique location within the stack frame. Update each partition member
808 with that location. */
810 static void
811 expand_stack_vars (bool (*pred) (tree))
813 size_t si, i, j, n = stack_vars_num;
815 for (si = 0; si < n; ++si)
817 HOST_WIDE_INT offset;
819 i = stack_vars_sorted[si];
821 /* Skip variables that aren't partition representatives, for now. */
822 if (stack_vars[i].representative != i)
823 continue;
825 /* Skip variables that have already had rtl assigned. See also
826 add_stack_var where we perpetrate this pc_rtx hack. */
827 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
828 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
829 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
830 continue;
832 /* Check the predicate to see whether this variable should be
833 allocated in this pass. */
834 if (pred && !pred (stack_vars[i].decl))
835 continue;
837 offset = alloc_stack_frame_space (stack_vars[i].size,
838 stack_vars[i].alignb);
840 /* Create rtl for each variable based on their location within the
841 partition. */
842 for (j = i; j != EOC; j = stack_vars[j].next)
844 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
845 expand_one_stack_var_at (stack_vars[j].decl,
846 stack_vars[j].offset + offset);
851 /* Take into account all sizes of partitions and reset DECL_RTLs. */
852 static HOST_WIDE_INT
853 account_stack_vars (void)
855 size_t si, j, i, n = stack_vars_num;
856 HOST_WIDE_INT size = 0;
858 for (si = 0; si < n; ++si)
860 i = stack_vars_sorted[si];
862 /* Skip variables that aren't partition representatives, for now. */
863 if (stack_vars[i].representative != i)
864 continue;
866 size += stack_vars[i].size;
867 for (j = i; j != EOC; j = stack_vars[j].next)
868 set_rtl (stack_vars[j].decl, NULL);
870 return size;
873 /* A subroutine of expand_one_var. Called to immediately assign rtl
874 to a variable to be allocated in the stack frame. */
876 static void
877 expand_one_stack_var (tree var)
879 HOST_WIDE_INT size, offset, align;
881 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
882 align = get_decl_align_unit (SSAVAR (var));
883 offset = alloc_stack_frame_space (size, align);
885 expand_one_stack_var_at (var, offset);
888 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
889 that will reside in a hard register. */
891 static void
892 expand_one_hard_reg_var (tree var)
894 rest_of_decl_compilation (var, 0, 0);
897 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
898 that will reside in a pseudo register. */
900 static void
901 expand_one_register_var (tree var)
903 tree decl = SSAVAR (var);
904 tree type = TREE_TYPE (decl);
905 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
906 rtx x = gen_reg_rtx (reg_mode);
908 set_rtl (var, x);
910 /* Note if the object is a user variable. */
911 if (!DECL_ARTIFICIAL (decl))
912 mark_user_reg (x);
914 if (POINTER_TYPE_P (type))
915 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
918 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
919 has some associated error, e.g. its type is error-mark. We just need
920 to pick something that won't crash the rest of the compiler. */
922 static void
923 expand_one_error_var (tree var)
925 enum machine_mode mode = DECL_MODE (var);
926 rtx x;
928 if (mode == BLKmode)
929 x = gen_rtx_MEM (BLKmode, const0_rtx);
930 else if (mode == VOIDmode)
931 x = const0_rtx;
932 else
933 x = gen_reg_rtx (mode);
935 SET_DECL_RTL (var, x);
938 /* A subroutine of expand_one_var. VAR is a variable that will be
939 allocated to the local stack frame. Return true if we wish to
940 add VAR to STACK_VARS so that it will be coalesced with other
941 variables. Return false to allocate VAR immediately.
943 This function is used to reduce the number of variables considered
944 for coalescing, which reduces the size of the quadratic problem. */
946 static bool
947 defer_stack_allocation (tree var, bool toplevel)
949 /* If stack protection is enabled, *all* stack variables must be deferred,
950 so that we can re-order the strings to the top of the frame. */
951 if (flag_stack_protect)
952 return true;
954 /* Variables in the outermost scope automatically conflict with
955 every other variable. The only reason to want to defer them
956 at all is that, after sorting, we can more efficiently pack
957 small variables in the stack frame. Continue to defer at -O2. */
958 if (toplevel && optimize < 2)
959 return false;
961 /* Without optimization, *most* variables are allocated from the
962 stack, which makes the quadratic problem large exactly when we
963 want compilation to proceed as quickly as possible. On the
964 other hand, we don't want the function's stack frame size to
965 get completely out of hand. So we avoid adding scalars and
966 "small" aggregates to the list at all. */
967 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
968 return false;
970 return true;
973 /* A subroutine of expand_used_vars. Expand one variable according to
974 its flavor. Variables to be placed on the stack are not actually
975 expanded yet, merely recorded.
976 When REALLY_EXPAND is false, only add stack values to be allocated.
977 Return stack usage this variable is supposed to take.
980 static HOST_WIDE_INT
981 expand_one_var (tree var, bool toplevel, bool really_expand)
983 tree origvar = var;
984 var = SSAVAR (var);
986 if (SUPPORTS_STACK_ALIGNMENT
987 && TREE_TYPE (var) != error_mark_node
988 && TREE_CODE (var) == VAR_DECL)
990 unsigned int align;
992 /* Because we don't know if VAR will be in register or on stack,
993 we conservatively assume it will be on stack even if VAR is
994 eventually put into register after RA pass. For non-automatic
995 variables, which won't be on stack, we collect alignment of
996 type and ignore user specified alignment. */
997 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
998 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
999 TYPE_MODE (TREE_TYPE (var)),
1000 TYPE_ALIGN (TREE_TYPE (var)));
1001 else
1002 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1004 if (crtl->stack_alignment_estimated < align)
1006 /* stack_alignment_estimated shouldn't change after stack
1007 realign decision made */
1008 gcc_assert(!crtl->stack_realign_processed);
1009 crtl->stack_alignment_estimated = align;
1013 if (TREE_CODE (origvar) == SSA_NAME)
1015 gcc_assert (TREE_CODE (var) != VAR_DECL
1016 || (!DECL_EXTERNAL (var)
1017 && !DECL_HAS_VALUE_EXPR_P (var)
1018 && !TREE_STATIC (var)
1019 && TREE_TYPE (var) != error_mark_node
1020 && !DECL_HARD_REGISTER (var)
1021 && really_expand));
1023 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1025 else if (DECL_EXTERNAL (var))
1027 else if (DECL_HAS_VALUE_EXPR_P (var))
1029 else if (TREE_STATIC (var))
1031 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1033 else if (TREE_TYPE (var) == error_mark_node)
1035 if (really_expand)
1036 expand_one_error_var (var);
1038 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1040 if (really_expand)
1041 expand_one_hard_reg_var (var);
1043 else if (use_register_for_decl (var))
1045 if (really_expand)
1046 expand_one_register_var (origvar);
1048 else if (defer_stack_allocation (var, toplevel))
1049 add_stack_var (origvar);
1050 else
1052 if (really_expand)
1053 expand_one_stack_var (origvar);
1054 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1056 return 0;
1059 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1060 expanding variables. Those variables that can be put into registers
1061 are allocated pseudos; those that can't are put on the stack.
1063 TOPLEVEL is true if this is the outermost BLOCK. */
1065 static void
1066 expand_used_vars_for_block (tree block, bool toplevel)
1068 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1069 tree t;
1071 old_sv_num = toplevel ? 0 : stack_vars_num;
1073 /* Expand all variables at this level. */
1074 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1075 if (TREE_USED (t))
1076 expand_one_var (t, toplevel, true);
1078 this_sv_num = stack_vars_num;
1080 /* Expand all variables at containing levels. */
1081 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1082 expand_used_vars_for_block (t, false);
1084 /* Since we do not track exact variable lifetimes (which is not even
1085 possible for variables whose address escapes), we mirror the block
1086 tree in the interference graph. Here we cause all variables at this
1087 level, and all sublevels, to conflict. Do make certain that a
1088 variable conflicts with itself. */
1089 if (old_sv_num < this_sv_num)
1091 new_sv_num = stack_vars_num;
1092 resize_stack_vars_conflict (new_sv_num);
1094 for (i = old_sv_num; i < new_sv_num; ++i)
1095 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1096 add_stack_var_conflict (i, j);
1100 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1101 and clear TREE_USED on all local variables. */
1103 static void
1104 clear_tree_used (tree block)
1106 tree t;
1108 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1109 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1110 TREE_USED (t) = 0;
1112 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1113 clear_tree_used (t);
1116 /* Examine TYPE and determine a bit mask of the following features. */
1118 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1119 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1120 #define SPCT_HAS_ARRAY 4
1121 #define SPCT_HAS_AGGREGATE 8
1123 static unsigned int
1124 stack_protect_classify_type (tree type)
1126 unsigned int ret = 0;
1127 tree t;
1129 switch (TREE_CODE (type))
1131 case ARRAY_TYPE:
1132 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1133 if (t == char_type_node
1134 || t == signed_char_type_node
1135 || t == unsigned_char_type_node)
1137 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1138 unsigned HOST_WIDE_INT len;
1140 if (!TYPE_SIZE_UNIT (type)
1141 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1142 len = max;
1143 else
1144 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1146 if (len < max)
1147 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1148 else
1149 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1151 else
1152 ret = SPCT_HAS_ARRAY;
1153 break;
1155 case UNION_TYPE:
1156 case QUAL_UNION_TYPE:
1157 case RECORD_TYPE:
1158 ret = SPCT_HAS_AGGREGATE;
1159 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1160 if (TREE_CODE (t) == FIELD_DECL)
1161 ret |= stack_protect_classify_type (TREE_TYPE (t));
1162 break;
1164 default:
1165 break;
1168 return ret;
1171 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1172 part of the local stack frame. Remember if we ever return nonzero for
1173 any variable in this function. The return value is the phase number in
1174 which the variable should be allocated. */
1176 static int
1177 stack_protect_decl_phase (tree decl)
1179 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1180 int ret = 0;
1182 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1183 has_short_buffer = true;
1185 if (flag_stack_protect == 2)
1187 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1188 && !(bits & SPCT_HAS_AGGREGATE))
1189 ret = 1;
1190 else if (bits & SPCT_HAS_ARRAY)
1191 ret = 2;
1193 else
1194 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1196 if (ret)
1197 has_protected_decls = true;
1199 return ret;
1202 /* Two helper routines that check for phase 1 and phase 2. These are used
1203 as callbacks for expand_stack_vars. */
1205 static bool
1206 stack_protect_decl_phase_1 (tree decl)
1208 return stack_protect_decl_phase (decl) == 1;
1211 static bool
1212 stack_protect_decl_phase_2 (tree decl)
1214 return stack_protect_decl_phase (decl) == 2;
1217 /* Ensure that variables in different stack protection phases conflict
1218 so that they are not merged and share the same stack slot. */
1220 static void
1221 add_stack_protection_conflicts (void)
1223 size_t i, j, n = stack_vars_num;
1224 unsigned char *phase;
1226 phase = XNEWVEC (unsigned char, n);
1227 for (i = 0; i < n; ++i)
1228 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1230 for (i = 0; i < n; ++i)
1232 unsigned char ph_i = phase[i];
1233 for (j = 0; j < i; ++j)
1234 if (ph_i != phase[j])
1235 add_stack_var_conflict (i, j);
1238 XDELETEVEC (phase);
1241 /* Create a decl for the guard at the top of the stack frame. */
1243 static void
1244 create_stack_guard (void)
1246 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1247 VAR_DECL, NULL, ptr_type_node);
1248 TREE_THIS_VOLATILE (guard) = 1;
1249 TREE_USED (guard) = 1;
1250 expand_one_stack_var (guard);
1251 crtl->stack_protect_guard = guard;
1254 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1255 expanding variables. Those variables that can be put into registers
1256 are allocated pseudos; those that can't are put on the stack.
1258 TOPLEVEL is true if this is the outermost BLOCK. */
1260 static HOST_WIDE_INT
1261 account_used_vars_for_block (tree block, bool toplevel)
1263 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1264 tree t;
1265 HOST_WIDE_INT size = 0;
1267 old_sv_num = toplevel ? 0 : stack_vars_num;
1269 /* Expand all variables at this level. */
1270 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1271 if (TREE_USED (t))
1272 size += expand_one_var (t, toplevel, false);
1274 this_sv_num = stack_vars_num;
1276 /* Expand all variables at containing levels. */
1277 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1278 size += account_used_vars_for_block (t, false);
1280 /* Since we do not track exact variable lifetimes (which is not even
1281 possible for variables whose address escapes), we mirror the block
1282 tree in the interference graph. Here we cause all variables at this
1283 level, and all sublevels, to conflict. Do make certain that a
1284 variable conflicts with itself. */
1285 if (old_sv_num < this_sv_num)
1287 new_sv_num = stack_vars_num;
1288 resize_stack_vars_conflict (new_sv_num);
1290 for (i = old_sv_num; i < new_sv_num; ++i)
1291 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1292 add_stack_var_conflict (i, j);
1294 return size;
1297 /* Prepare for expanding variables. */
1298 static void
1299 init_vars_expansion (void)
1301 tree t;
1302 /* Set TREE_USED on all variables in the local_decls. */
1303 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1304 TREE_USED (TREE_VALUE (t)) = 1;
1306 /* Clear TREE_USED on all variables associated with a block scope. */
1307 clear_tree_used (DECL_INITIAL (current_function_decl));
1309 /* Initialize local stack smashing state. */
1310 has_protected_decls = false;
1311 has_short_buffer = false;
1314 /* Free up stack variable graph data. */
1315 static void
1316 fini_vars_expansion (void)
1318 XDELETEVEC (stack_vars);
1319 XDELETEVEC (stack_vars_sorted);
1320 XDELETEVEC (stack_vars_conflict);
1321 stack_vars = NULL;
1322 stack_vars_alloc = stack_vars_num = 0;
1323 stack_vars_conflict = NULL;
1324 stack_vars_conflict_alloc = 0;
1327 /* Make a fair guess for the size of the stack frame of the current
1328 function. This doesn't have to be exact, the result is only used
1329 in the inline heuristics. So we don't want to run the full stack
1330 var packing algorithm (which is quadratic in the number of stack
1331 vars). Instead, we calculate the total size of all stack vars.
1332 This turns out to be a pretty fair estimate -- packing of stack
1333 vars doesn't happen very often. */
1335 HOST_WIDE_INT
1336 estimated_stack_frame_size (void)
1338 HOST_WIDE_INT size = 0;
1339 size_t i;
1340 tree t, outer_block = DECL_INITIAL (current_function_decl);
1342 init_vars_expansion ();
1344 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1346 tree var = TREE_VALUE (t);
1348 if (TREE_USED (var))
1349 size += expand_one_var (var, true, false);
1350 TREE_USED (var) = 1;
1352 size += account_used_vars_for_block (outer_block, true);
1354 if (stack_vars_num > 0)
1356 /* Fake sorting the stack vars for account_stack_vars (). */
1357 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1358 for (i = 0; i < stack_vars_num; ++i)
1359 stack_vars_sorted[i] = i;
1360 size += account_stack_vars ();
1361 fini_vars_expansion ();
1364 return size;
1367 /* Expand all variables used in the function. */
1369 static void
1370 expand_used_vars (void)
1372 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1373 unsigned i;
1375 /* Compute the phase of the stack frame for this function. */
1377 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1378 int off = STARTING_FRAME_OFFSET % align;
1379 frame_phase = off ? align - off : 0;
1382 init_vars_expansion ();
1384 for (i = 0; i < SA.map->num_partitions; i++)
1386 tree var = partition_to_var (SA.map, i);
1388 gcc_assert (is_gimple_reg (var));
1389 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1390 expand_one_var (var, true, true);
1391 else
1393 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1394 contain the default def (representing the parm or result itself)
1395 we don't do anything here. But those which don't contain the
1396 default def (representing a temporary based on the parm/result)
1397 we need to allocate space just like for normal VAR_DECLs. */
1398 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1400 expand_one_var (var, true, true);
1401 gcc_assert (SA.partition_to_pseudo[i]);
1406 /* At this point all variables on the local_decls with TREE_USED
1407 set are not associated with any block scope. Lay them out. */
1408 t = cfun->local_decls;
1409 cfun->local_decls = NULL_TREE;
1410 for (; t; t = next)
1412 tree var = TREE_VALUE (t);
1413 bool expand_now = false;
1415 next = TREE_CHAIN (t);
1417 /* Expanded above already. */
1418 if (is_gimple_reg (var))
1420 TREE_USED (var) = 0;
1421 ggc_free (t);
1422 continue;
1424 /* We didn't set a block for static or extern because it's hard
1425 to tell the difference between a global variable (re)declared
1426 in a local scope, and one that's really declared there to
1427 begin with. And it doesn't really matter much, since we're
1428 not giving them stack space. Expand them now. */
1429 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1430 expand_now = true;
1432 /* If the variable is not associated with any block, then it
1433 was created by the optimizers, and could be live anywhere
1434 in the function. */
1435 else if (TREE_USED (var))
1436 expand_now = true;
1438 /* Finally, mark all variables on the list as used. We'll use
1439 this in a moment when we expand those associated with scopes. */
1440 TREE_USED (var) = 1;
1442 if (expand_now)
1444 expand_one_var (var, true, true);
1445 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1447 rtx rtl = DECL_RTL_IF_SET (var);
1449 /* Keep artificial non-ignored vars in cfun->local_decls
1450 chain until instantiate_decls. */
1451 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1453 TREE_CHAIN (t) = cfun->local_decls;
1454 cfun->local_decls = t;
1455 continue;
1460 ggc_free (t);
1463 /* At this point, all variables within the block tree with TREE_USED
1464 set are actually used by the optimized function. Lay them out. */
1465 expand_used_vars_for_block (outer_block, true);
1467 if (stack_vars_num > 0)
1469 /* Due to the way alias sets work, no variables with non-conflicting
1470 alias sets may be assigned the same address. Add conflicts to
1471 reflect this. */
1472 add_alias_set_conflicts ();
1474 /* If stack protection is enabled, we don't share space between
1475 vulnerable data and non-vulnerable data. */
1476 if (flag_stack_protect)
1477 add_stack_protection_conflicts ();
1479 /* Now that we have collected all stack variables, and have computed a
1480 minimal interference graph, attempt to save some stack space. */
1481 partition_stack_vars ();
1482 if (dump_file)
1483 dump_stack_var_partition ();
1486 /* There are several conditions under which we should create a
1487 stack guard: protect-all, alloca used, protected decls present. */
1488 if (flag_stack_protect == 2
1489 || (flag_stack_protect
1490 && (cfun->calls_alloca || has_protected_decls)))
1491 create_stack_guard ();
1493 /* Assign rtl to each variable based on these partitions. */
1494 if (stack_vars_num > 0)
1496 /* Reorder decls to be protected by iterating over the variables
1497 array multiple times, and allocating out of each phase in turn. */
1498 /* ??? We could probably integrate this into the qsort we did
1499 earlier, such that we naturally see these variables first,
1500 and thus naturally allocate things in the right order. */
1501 if (has_protected_decls)
1503 /* Phase 1 contains only character arrays. */
1504 expand_stack_vars (stack_protect_decl_phase_1);
1506 /* Phase 2 contains other kinds of arrays. */
1507 if (flag_stack_protect == 2)
1508 expand_stack_vars (stack_protect_decl_phase_2);
1511 expand_stack_vars (NULL);
1513 fini_vars_expansion ();
1516 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1517 if (STACK_ALIGNMENT_NEEDED)
1519 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1520 if (!FRAME_GROWS_DOWNWARD)
1521 frame_offset += align - 1;
1522 frame_offset &= -align;
1527 /* If we need to produce a detailed dump, print the tree representation
1528 for STMT to the dump file. SINCE is the last RTX after which the RTL
1529 generated for STMT should have been appended. */
1531 static void
1532 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1534 if (dump_file && (dump_flags & TDF_DETAILS))
1536 fprintf (dump_file, "\n;; ");
1537 print_gimple_stmt (dump_file, stmt, 0,
1538 TDF_SLIM | (dump_flags & TDF_LINENO));
1539 fprintf (dump_file, "\n");
1541 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1545 /* Maps the blocks that do not contain tree labels to rtx labels. */
1547 static struct pointer_map_t *lab_rtx_for_bb;
1549 /* Returns the label_rtx expression for a label starting basic block BB. */
1551 static rtx
1552 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1554 gimple_stmt_iterator gsi;
1555 tree lab;
1556 gimple lab_stmt;
1557 void **elt;
1559 if (bb->flags & BB_RTL)
1560 return block_label (bb);
1562 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1563 if (elt)
1564 return (rtx) *elt;
1566 /* Find the tree label if it is present. */
1568 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1570 lab_stmt = gsi_stmt (gsi);
1571 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1572 break;
1574 lab = gimple_label_label (lab_stmt);
1575 if (DECL_NONLOCAL (lab))
1576 break;
1578 return label_rtx (lab);
1581 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1582 *elt = gen_label_rtx ();
1583 return (rtx) *elt;
1587 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1588 of a basic block where we just expanded the conditional at the end,
1589 possibly clean up the CFG and instruction sequence. LAST is the
1590 last instruction before the just emitted jump sequence. */
1592 static void
1593 maybe_cleanup_end_of_block (edge e, rtx last)
1595 /* Special case: when jumpif decides that the condition is
1596 trivial it emits an unconditional jump (and the necessary
1597 barrier). But we still have two edges, the fallthru one is
1598 wrong. purge_dead_edges would clean this up later. Unfortunately
1599 we have to insert insns (and split edges) before
1600 find_many_sub_basic_blocks and hence before purge_dead_edges.
1601 But splitting edges might create new blocks which depend on the
1602 fact that if there are two edges there's no barrier. So the
1603 barrier would get lost and verify_flow_info would ICE. Instead
1604 of auditing all edge splitters to care for the barrier (which
1605 normally isn't there in a cleaned CFG), fix it here. */
1606 if (BARRIER_P (get_last_insn ()))
1608 rtx insn;
1609 remove_edge (e);
1610 /* Now, we have a single successor block, if we have insns to
1611 insert on the remaining edge we potentially will insert
1612 it at the end of this block (if the dest block isn't feasible)
1613 in order to avoid splitting the edge. This insertion will take
1614 place in front of the last jump. But we might have emitted
1615 multiple jumps (conditional and one unconditional) to the
1616 same destination. Inserting in front of the last one then
1617 is a problem. See PR 40021. We fix this by deleting all
1618 jumps except the last unconditional one. */
1619 insn = PREV_INSN (get_last_insn ());
1620 /* Make sure we have an unconditional jump. Otherwise we're
1621 confused. */
1622 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1623 for (insn = PREV_INSN (insn); insn != last;)
1625 insn = PREV_INSN (insn);
1626 if (JUMP_P (NEXT_INSN (insn)))
1627 delete_insn (NEXT_INSN (insn));
1632 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1633 Returns a new basic block if we've terminated the current basic
1634 block and created a new one. */
1636 static basic_block
1637 expand_gimple_cond (basic_block bb, gimple stmt)
1639 basic_block new_bb, dest;
1640 edge new_edge;
1641 edge true_edge;
1642 edge false_edge;
1643 rtx last2, last;
1644 enum tree_code code;
1645 tree op0, op1;
1647 code = gimple_cond_code (stmt);
1648 op0 = gimple_cond_lhs (stmt);
1649 op1 = gimple_cond_rhs (stmt);
1650 /* We're sometimes presented with such code:
1651 D.123_1 = x < y;
1652 if (D.123_1 != 0)
1654 This would expand to two comparisons which then later might
1655 be cleaned up by combine. But some pattern matchers like if-conversion
1656 work better when there's only one compare, so make up for this
1657 here as special exception if TER would have made the same change. */
1658 if (gimple_cond_single_var_p (stmt)
1659 && SA.values
1660 && TREE_CODE (op0) == SSA_NAME
1661 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1663 gimple second = SSA_NAME_DEF_STMT (op0);
1664 if (gimple_code (second) == GIMPLE_ASSIGN
1665 && TREE_CODE_CLASS (gimple_assign_rhs_code (second))
1666 == tcc_comparison)
1668 code = gimple_assign_rhs_code (second);
1669 op0 = gimple_assign_rhs1 (second);
1670 op1 = gimple_assign_rhs2 (second);
1674 last2 = last = get_last_insn ();
1676 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1677 if (gimple_has_location (stmt))
1679 set_curr_insn_source_location (gimple_location (stmt));
1680 set_curr_insn_block (gimple_block (stmt));
1683 /* These flags have no purpose in RTL land. */
1684 true_edge->flags &= ~EDGE_TRUE_VALUE;
1685 false_edge->flags &= ~EDGE_FALSE_VALUE;
1687 /* We can either have a pure conditional jump with one fallthru edge or
1688 two-way jump that needs to be decomposed into two basic blocks. */
1689 if (false_edge->dest == bb->next_bb)
1691 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest));
1692 add_reg_br_prob_note (last, true_edge->probability);
1693 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1694 if (true_edge->goto_locus)
1696 set_curr_insn_source_location (true_edge->goto_locus);
1697 set_curr_insn_block (true_edge->goto_block);
1698 true_edge->goto_locus = curr_insn_locator ();
1700 true_edge->goto_block = NULL;
1701 false_edge->flags |= EDGE_FALLTHRU;
1702 maybe_cleanup_end_of_block (false_edge, last);
1703 return NULL;
1705 if (true_edge->dest == bb->next_bb)
1707 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest));
1708 add_reg_br_prob_note (last, false_edge->probability);
1709 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1710 if (false_edge->goto_locus)
1712 set_curr_insn_source_location (false_edge->goto_locus);
1713 set_curr_insn_block (false_edge->goto_block);
1714 false_edge->goto_locus = curr_insn_locator ();
1716 false_edge->goto_block = NULL;
1717 true_edge->flags |= EDGE_FALLTHRU;
1718 maybe_cleanup_end_of_block (true_edge, last);
1719 return NULL;
1722 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest));
1723 add_reg_br_prob_note (last, true_edge->probability);
1724 last = get_last_insn ();
1725 if (false_edge->goto_locus)
1727 set_curr_insn_source_location (false_edge->goto_locus);
1728 set_curr_insn_block (false_edge->goto_block);
1729 false_edge->goto_locus = curr_insn_locator ();
1731 false_edge->goto_block = NULL;
1732 emit_jump (label_rtx_for_bb (false_edge->dest));
1734 BB_END (bb) = last;
1735 if (BARRIER_P (BB_END (bb)))
1736 BB_END (bb) = PREV_INSN (BB_END (bb));
1737 update_bb_for_insn (bb);
1739 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1740 dest = false_edge->dest;
1741 redirect_edge_succ (false_edge, new_bb);
1742 false_edge->flags |= EDGE_FALLTHRU;
1743 new_bb->count = false_edge->count;
1744 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1745 new_edge = make_edge (new_bb, dest, 0);
1746 new_edge->probability = REG_BR_PROB_BASE;
1747 new_edge->count = new_bb->count;
1748 if (BARRIER_P (BB_END (new_bb)))
1749 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1750 update_bb_for_insn (new_bb);
1752 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1754 if (true_edge->goto_locus)
1756 set_curr_insn_source_location (true_edge->goto_locus);
1757 set_curr_insn_block (true_edge->goto_block);
1758 true_edge->goto_locus = curr_insn_locator ();
1760 true_edge->goto_block = NULL;
1762 return new_bb;
1765 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1766 statement STMT. */
1768 static void
1769 expand_call_stmt (gimple stmt)
1771 tree exp;
1772 tree lhs = gimple_call_lhs (stmt);
1773 size_t i;
1775 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1777 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1778 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1779 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1781 for (i = 0; i < gimple_call_num_args (stmt); i++)
1782 CALL_EXPR_ARG (exp, i) = gimple_call_arg (stmt, i);
1784 if (gimple_has_side_effects (stmt))
1785 TREE_SIDE_EFFECTS (exp) = 1;
1787 if (gimple_call_nothrow_p (stmt))
1788 TREE_NOTHROW (exp) = 1;
1790 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1791 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1792 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1793 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1794 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1795 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1796 TREE_BLOCK (exp) = gimple_block (stmt);
1798 if (lhs)
1799 expand_assignment (lhs, exp, false);
1800 else
1801 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1804 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1805 STMT that doesn't require special handling for outgoing edges. That
1806 is no tailcalls and no GIMPLE_COND. */
1808 static void
1809 expand_gimple_stmt_1 (gimple stmt)
1811 tree op0;
1812 switch (gimple_code (stmt))
1814 case GIMPLE_GOTO:
1815 op0 = gimple_goto_dest (stmt);
1816 if (TREE_CODE (op0) == LABEL_DECL)
1817 expand_goto (op0);
1818 else
1819 expand_computed_goto (op0);
1820 break;
1821 case GIMPLE_LABEL:
1822 expand_label (gimple_label_label (stmt));
1823 break;
1824 case GIMPLE_NOP:
1825 case GIMPLE_PREDICT:
1826 break;
1827 case GIMPLE_SWITCH:
1828 expand_case (stmt);
1829 break;
1830 case GIMPLE_ASM:
1831 expand_asm_stmt (stmt);
1832 break;
1833 case GIMPLE_CALL:
1834 expand_call_stmt (stmt);
1835 break;
1837 case GIMPLE_RETURN:
1838 op0 = gimple_return_retval (stmt);
1840 if (op0 && op0 != error_mark_node)
1842 tree result = DECL_RESULT (current_function_decl);
1844 /* If we are not returning the current function's RESULT_DECL,
1845 build an assignment to it. */
1846 if (op0 != result)
1848 /* I believe that a function's RESULT_DECL is unique. */
1849 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1851 /* ??? We'd like to use simply expand_assignment here,
1852 but this fails if the value is of BLKmode but the return
1853 decl is a register. expand_return has special handling
1854 for this combination, which eventually should move
1855 to common code. See comments there. Until then, let's
1856 build a modify expression :-/ */
1857 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1858 result, op0);
1861 if (!op0)
1862 expand_null_return ();
1863 else
1864 expand_return (op0);
1865 break;
1867 case GIMPLE_ASSIGN:
1869 tree lhs = gimple_assign_lhs (stmt);
1871 /* Tree expand used to fiddle with |= and &= of two bitfield
1872 COMPONENT_REFs here. This can't happen with gimple, the LHS
1873 of binary assigns must be a gimple reg. */
1875 if (TREE_CODE (lhs) != SSA_NAME
1876 || get_gimple_rhs_class (gimple_expr_code (stmt))
1877 == GIMPLE_SINGLE_RHS)
1879 tree rhs = gimple_assign_rhs1 (stmt);
1880 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1881 == GIMPLE_SINGLE_RHS);
1882 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1883 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1884 expand_assignment (lhs, rhs,
1885 gimple_assign_nontemporal_move_p (stmt));
1887 else
1889 rtx target, temp;
1890 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1891 struct separate_ops ops;
1892 bool promoted = false;
1894 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1895 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1896 promoted = true;
1898 ops.code = gimple_assign_rhs_code (stmt);
1899 ops.type = TREE_TYPE (lhs);
1900 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1902 case GIMPLE_BINARY_RHS:
1903 ops.op1 = gimple_assign_rhs2 (stmt);
1904 /* Fallthru */
1905 case GIMPLE_UNARY_RHS:
1906 ops.op0 = gimple_assign_rhs1 (stmt);
1907 break;
1908 default:
1909 gcc_unreachable ();
1911 ops.location = gimple_location (stmt);
1913 /* If we want to use a nontemporal store, force the value to
1914 register first. If we store into a promoted register,
1915 don't directly expand to target. */
1916 temp = nontemporal || promoted ? NULL_RTX : target;
1917 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1918 EXPAND_NORMAL);
1920 if (temp == target)
1922 else if (promoted)
1924 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1925 /* If TEMP is a VOIDmode constant, use convert_modes to make
1926 sure that we properly convert it. */
1927 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1929 temp = convert_modes (GET_MODE (target),
1930 TYPE_MODE (ops.type),
1931 temp, unsignedp);
1932 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1933 GET_MODE (target), temp, unsignedp);
1936 convert_move (SUBREG_REG (target), temp, unsignedp);
1938 else if (nontemporal && emit_storent_insn (target, temp))
1940 else
1942 temp = force_operand (temp, target);
1943 if (temp != target)
1944 emit_move_insn (target, temp);
1948 break;
1950 default:
1951 gcc_unreachable ();
1955 /* Expand one gimple statement STMT and return the last RTL instruction
1956 before any of the newly generated ones.
1958 In addition to generating the necessary RTL instructions this also
1959 sets REG_EH_REGION notes if necessary and sets the current source
1960 location for diagnostics. */
1962 static rtx
1963 expand_gimple_stmt (gimple stmt)
1965 int lp_nr = 0;
1966 rtx last = NULL;
1967 location_t saved_location = input_location;
1969 last = get_last_insn ();
1971 /* If this is an expression of some kind and it has an associated line
1972 number, then emit the line number before expanding the expression.
1974 We need to save and restore the file and line information so that
1975 errors discovered during expansion are emitted with the right
1976 information. It would be better of the diagnostic routines
1977 used the file/line information embedded in the tree nodes rather
1978 than globals. */
1979 gcc_assert (cfun);
1981 if (gimple_has_location (stmt))
1983 input_location = gimple_location (stmt);
1984 set_curr_insn_source_location (input_location);
1986 /* Record where the insns produced belong. */
1987 set_curr_insn_block (gimple_block (stmt));
1990 expand_gimple_stmt_1 (stmt);
1991 /* Free any temporaries used to evaluate this statement. */
1992 free_temp_slots ();
1994 input_location = saved_location;
1996 /* Mark all insns that may trap. */
1997 lp_nr = lookup_stmt_eh_lp (stmt);
1998 if (lp_nr)
2000 rtx insn;
2001 for (insn = next_real_insn (last); insn;
2002 insn = next_real_insn (insn))
2004 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2005 /* If we want exceptions for non-call insns, any
2006 may_trap_p instruction may throw. */
2007 && GET_CODE (PATTERN (insn)) != CLOBBER
2008 && GET_CODE (PATTERN (insn)) != USE
2009 && insn_could_throw_p (insn))
2010 make_reg_eh_region_note (insn, 0, lp_nr);
2014 return last;
2017 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2018 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2019 generated a tail call (something that might be denied by the ABI
2020 rules governing the call; see calls.c).
2022 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2023 can still reach the rest of BB. The case here is __builtin_sqrt,
2024 where the NaN result goes through the external function (with a
2025 tailcall) and the normal result happens via a sqrt instruction. */
2027 static basic_block
2028 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2030 rtx last2, last;
2031 edge e;
2032 edge_iterator ei;
2033 int probability;
2034 gcov_type count;
2036 last2 = last = expand_gimple_stmt (stmt);
2038 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2039 if (CALL_P (last) && SIBLING_CALL_P (last))
2040 goto found;
2042 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2044 *can_fallthru = true;
2045 return NULL;
2047 found:
2048 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2049 Any instructions emitted here are about to be deleted. */
2050 do_pending_stack_adjust ();
2052 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2053 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2054 EH or abnormal edges, we shouldn't have created a tail call in
2055 the first place. So it seems to me we should just be removing
2056 all edges here, or redirecting the existing fallthru edge to
2057 the exit block. */
2059 probability = 0;
2060 count = 0;
2062 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2064 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2066 if (e->dest != EXIT_BLOCK_PTR)
2068 e->dest->count -= e->count;
2069 e->dest->frequency -= EDGE_FREQUENCY (e);
2070 if (e->dest->count < 0)
2071 e->dest->count = 0;
2072 if (e->dest->frequency < 0)
2073 e->dest->frequency = 0;
2075 count += e->count;
2076 probability += e->probability;
2077 remove_edge (e);
2079 else
2080 ei_next (&ei);
2083 /* This is somewhat ugly: the call_expr expander often emits instructions
2084 after the sibcall (to perform the function return). These confuse the
2085 find_many_sub_basic_blocks code, so we need to get rid of these. */
2086 last = NEXT_INSN (last);
2087 gcc_assert (BARRIER_P (last));
2089 *can_fallthru = false;
2090 while (NEXT_INSN (last))
2092 /* For instance an sqrt builtin expander expands if with
2093 sibcall in the then and label for `else`. */
2094 if (LABEL_P (NEXT_INSN (last)))
2096 *can_fallthru = true;
2097 break;
2099 delete_insn (NEXT_INSN (last));
2102 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2103 e->probability += probability;
2104 e->count += count;
2105 BB_END (bb) = last;
2106 update_bb_for_insn (bb);
2108 if (NEXT_INSN (last))
2110 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2112 last = BB_END (bb);
2113 if (BARRIER_P (last))
2114 BB_END (bb) = PREV_INSN (last);
2117 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2119 return bb;
2122 /* Return the difference between the floor and the truncated result of
2123 a signed division by OP1 with remainder MOD. */
2124 static rtx
2125 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2127 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2128 return gen_rtx_IF_THEN_ELSE
2129 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2130 gen_rtx_IF_THEN_ELSE
2131 (mode, gen_rtx_LT (BImode,
2132 gen_rtx_DIV (mode, op1, mod),
2133 const0_rtx),
2134 constm1_rtx, const0_rtx),
2135 const0_rtx);
2138 /* Return the difference between the ceil and the truncated result of
2139 a signed division by OP1 with remainder MOD. */
2140 static rtx
2141 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2143 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2144 return gen_rtx_IF_THEN_ELSE
2145 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2146 gen_rtx_IF_THEN_ELSE
2147 (mode, gen_rtx_GT (BImode,
2148 gen_rtx_DIV (mode, op1, mod),
2149 const0_rtx),
2150 const1_rtx, const0_rtx),
2151 const0_rtx);
2154 /* Return the difference between the ceil and the truncated result of
2155 an unsigned division by OP1 with remainder MOD. */
2156 static rtx
2157 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2159 /* (mod != 0 ? 1 : 0) */
2160 return gen_rtx_IF_THEN_ELSE
2161 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2162 const1_rtx, const0_rtx);
2165 /* Return the difference between the rounded and the truncated result
2166 of a signed division by OP1 with remainder MOD. Halfway cases are
2167 rounded away from zero, rather than to the nearest even number. */
2168 static rtx
2169 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2171 /* (abs (mod) >= abs (op1) - abs (mod)
2172 ? (op1 / mod > 0 ? 1 : -1)
2173 : 0) */
2174 return gen_rtx_IF_THEN_ELSE
2175 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2176 gen_rtx_MINUS (mode,
2177 gen_rtx_ABS (mode, op1),
2178 gen_rtx_ABS (mode, mod))),
2179 gen_rtx_IF_THEN_ELSE
2180 (mode, gen_rtx_GT (BImode,
2181 gen_rtx_DIV (mode, op1, mod),
2182 const0_rtx),
2183 const1_rtx, constm1_rtx),
2184 const0_rtx);
2187 /* Return the difference between the rounded and the truncated result
2188 of a unsigned division by OP1 with remainder MOD. Halfway cases
2189 are rounded away from zero, rather than to the nearest even
2190 number. */
2191 static rtx
2192 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2194 /* (mod >= op1 - mod ? 1 : 0) */
2195 return gen_rtx_IF_THEN_ELSE
2196 (mode, gen_rtx_GE (BImode, mod,
2197 gen_rtx_MINUS (mode, op1, mod)),
2198 const1_rtx, const0_rtx);
2201 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2202 any rtl. */
2204 static rtx
2205 convert_debug_memory_address (enum machine_mode mode, rtx x)
2207 enum machine_mode xmode = GET_MODE (x);
2209 #ifndef POINTERS_EXTEND_UNSIGNED
2210 gcc_assert (mode == Pmode);
2211 gcc_assert (xmode == mode || xmode == VOIDmode);
2212 #else
2213 gcc_assert (mode == Pmode || mode == ptr_mode);
2215 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2216 return x;
2218 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2219 x = simplify_gen_subreg (mode, x, xmode,
2220 subreg_lowpart_offset
2221 (mode, xmode));
2222 else if (POINTERS_EXTEND_UNSIGNED > 0)
2223 x = gen_rtx_ZERO_EXTEND (mode, x);
2224 else if (!POINTERS_EXTEND_UNSIGNED)
2225 x = gen_rtx_SIGN_EXTEND (mode, x);
2226 else
2227 gcc_unreachable ();
2228 #endif /* POINTERS_EXTEND_UNSIGNED */
2230 return x;
2233 /* Return an RTX equivalent to the value of the tree expression
2234 EXP. */
2236 static rtx
2237 expand_debug_expr (tree exp)
2239 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2240 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2241 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2242 addr_space_t as;
2243 enum machine_mode address_mode;
2244 enum machine_mode pointer_mode;
2246 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2248 case tcc_expression:
2249 switch (TREE_CODE (exp))
2251 case COND_EXPR:
2252 goto ternary;
2254 case TRUTH_ANDIF_EXPR:
2255 case TRUTH_ORIF_EXPR:
2256 case TRUTH_AND_EXPR:
2257 case TRUTH_OR_EXPR:
2258 case TRUTH_XOR_EXPR:
2259 goto binary;
2261 case TRUTH_NOT_EXPR:
2262 goto unary;
2264 default:
2265 break;
2267 break;
2269 ternary:
2270 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2271 if (!op2)
2272 return NULL_RTX;
2273 /* Fall through. */
2275 binary:
2276 case tcc_binary:
2277 case tcc_comparison:
2278 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2279 if (!op1)
2280 return NULL_RTX;
2281 /* Fall through. */
2283 unary:
2284 case tcc_unary:
2285 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2286 if (!op0)
2287 return NULL_RTX;
2288 break;
2290 case tcc_type:
2291 case tcc_statement:
2292 gcc_unreachable ();
2294 case tcc_constant:
2295 case tcc_exceptional:
2296 case tcc_declaration:
2297 case tcc_reference:
2298 case tcc_vl_exp:
2299 break;
2302 switch (TREE_CODE (exp))
2304 case STRING_CST:
2305 if (!lookup_constant_def (exp))
2307 if (strlen (TREE_STRING_POINTER (exp)) + 1
2308 != (size_t) TREE_STRING_LENGTH (exp))
2309 return NULL_RTX;
2310 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2311 op0 = gen_rtx_MEM (BLKmode, op0);
2312 set_mem_attributes (op0, exp, 0);
2313 return op0;
2315 /* Fall through... */
2317 case INTEGER_CST:
2318 case REAL_CST:
2319 case FIXED_CST:
2320 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2321 return op0;
2323 case COMPLEX_CST:
2324 gcc_assert (COMPLEX_MODE_P (mode));
2325 op0 = expand_debug_expr (TREE_REALPART (exp));
2326 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2327 return gen_rtx_CONCAT (mode, op0, op1);
2329 case DEBUG_EXPR_DECL:
2330 op0 = DECL_RTL_IF_SET (exp);
2332 if (op0)
2333 return op0;
2335 op0 = gen_rtx_DEBUG_EXPR (mode);
2336 DEBUG_EXPR_TREE_DECL (op0) = exp;
2337 SET_DECL_RTL (exp, op0);
2339 return op0;
2341 case VAR_DECL:
2342 case PARM_DECL:
2343 case FUNCTION_DECL:
2344 case LABEL_DECL:
2345 case CONST_DECL:
2346 case RESULT_DECL:
2347 op0 = DECL_RTL_IF_SET (exp);
2349 /* This decl was probably optimized away. */
2350 if (!op0)
2352 if (TREE_CODE (exp) != VAR_DECL
2353 || DECL_EXTERNAL (exp)
2354 || !TREE_STATIC (exp)
2355 || !DECL_NAME (exp)
2356 || DECL_HARD_REGISTER (exp)
2357 || mode == VOIDmode)
2358 return NULL;
2360 op0 = DECL_RTL (exp);
2361 SET_DECL_RTL (exp, NULL);
2362 if (!MEM_P (op0)
2363 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2364 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2365 return NULL;
2367 else
2368 op0 = copy_rtx (op0);
2370 if (GET_MODE (op0) == BLKmode)
2372 gcc_assert (MEM_P (op0));
2373 op0 = adjust_address_nv (op0, mode, 0);
2374 return op0;
2377 /* Fall through. */
2379 adjust_mode:
2380 case PAREN_EXPR:
2381 case NOP_EXPR:
2382 case CONVERT_EXPR:
2384 enum machine_mode inner_mode = GET_MODE (op0);
2386 if (mode == inner_mode)
2387 return op0;
2389 if (inner_mode == VOIDmode)
2391 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2392 if (mode == inner_mode)
2393 return op0;
2396 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2398 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2399 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2400 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2401 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2402 else
2403 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2405 else if (FLOAT_MODE_P (mode))
2407 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2408 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2409 else
2410 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2412 else if (FLOAT_MODE_P (inner_mode))
2414 if (unsignedp)
2415 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2416 else
2417 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2419 else if (CONSTANT_P (op0)
2420 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2421 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2422 subreg_lowpart_offset (mode,
2423 inner_mode));
2424 else if (unsignedp)
2425 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2426 else
2427 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2429 return op0;
2432 case INDIRECT_REF:
2433 case ALIGN_INDIRECT_REF:
2434 case MISALIGNED_INDIRECT_REF:
2435 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2436 if (!op0)
2437 return NULL;
2439 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2440 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2441 else
2442 as = ADDR_SPACE_GENERIC;
2444 address_mode = targetm.addr_space.address_mode (as);
2445 pointer_mode = targetm.addr_space.pointer_mode (as);
2447 gcc_assert (GET_MODE (op0) == address_mode
2448 || GET_MODE (op0) == pointer_mode
2449 || GET_CODE (op0) == CONST_INT
2450 || GET_CODE (op0) == CONST_DOUBLE);
2452 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2454 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
2455 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
2458 op0 = gen_rtx_MEM (mode, op0);
2460 set_mem_attributes (op0, exp, 0);
2461 set_mem_addr_space (op0, as);
2463 return op0;
2465 case TARGET_MEM_REF:
2466 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2467 return NULL;
2469 op0 = expand_debug_expr
2470 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)),
2471 exp));
2472 if (!op0)
2473 return NULL;
2475 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2476 address_mode = targetm.addr_space.address_mode (as);
2477 pointer_mode = targetm.addr_space.pointer_mode (as);
2479 gcc_assert (GET_MODE (op0) == address_mode
2480 || GET_MODE (op0) == pointer_mode
2481 || GET_CODE (op0) == CONST_INT
2482 || GET_CODE (op0) == CONST_DOUBLE);
2484 op0 = gen_rtx_MEM (mode, op0);
2486 set_mem_attributes (op0, exp, 0);
2487 set_mem_addr_space (op0, as);
2489 return op0;
2491 case ARRAY_REF:
2492 case ARRAY_RANGE_REF:
2493 case COMPONENT_REF:
2494 case BIT_FIELD_REF:
2495 case REALPART_EXPR:
2496 case IMAGPART_EXPR:
2497 case VIEW_CONVERT_EXPR:
2499 enum machine_mode mode1;
2500 HOST_WIDE_INT bitsize, bitpos;
2501 tree offset;
2502 int volatilep = 0;
2503 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2504 &mode1, &unsignedp, &volatilep, false);
2505 rtx orig_op0;
2507 if (bitsize == 0)
2508 return NULL;
2510 orig_op0 = op0 = expand_debug_expr (tem);
2512 if (!op0)
2513 return NULL;
2515 if (offset)
2517 enum machine_mode addrmode, offmode;
2519 gcc_assert (MEM_P (op0));
2521 op0 = XEXP (op0, 0);
2522 addrmode = GET_MODE (op0);
2523 if (addrmode == VOIDmode)
2524 addrmode = Pmode;
2526 op1 = expand_debug_expr (offset);
2527 if (!op1)
2528 return NULL;
2530 offmode = GET_MODE (op1);
2531 if (offmode == VOIDmode)
2532 offmode = TYPE_MODE (TREE_TYPE (offset));
2534 if (addrmode != offmode)
2535 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2536 subreg_lowpart_offset (addrmode,
2537 offmode));
2539 /* Don't use offset_address here, we don't need a
2540 recognizable address, and we don't want to generate
2541 code. */
2542 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2545 if (MEM_P (op0))
2547 if (mode1 == VOIDmode)
2548 /* Bitfield. */
2549 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2550 if (bitpos >= BITS_PER_UNIT)
2552 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2553 bitpos %= BITS_PER_UNIT;
2555 else if (bitpos < 0)
2557 HOST_WIDE_INT units
2558 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2559 op0 = adjust_address_nv (op0, mode1, units);
2560 bitpos += units * BITS_PER_UNIT;
2562 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2563 op0 = adjust_address_nv (op0, mode, 0);
2564 else if (GET_MODE (op0) != mode1)
2565 op0 = adjust_address_nv (op0, mode1, 0);
2566 else
2567 op0 = copy_rtx (op0);
2568 if (op0 == orig_op0)
2569 op0 = shallow_copy_rtx (op0);
2570 set_mem_attributes (op0, exp, 0);
2573 if (bitpos == 0 && mode == GET_MODE (op0))
2574 return op0;
2576 if (bitpos < 0)
2577 return NULL;
2579 if ((bitpos % BITS_PER_UNIT) == 0
2580 && bitsize == GET_MODE_BITSIZE (mode1))
2582 enum machine_mode opmode = GET_MODE (op0);
2584 gcc_assert (opmode != BLKmode);
2586 if (opmode == VOIDmode)
2587 opmode = mode1;
2589 /* This condition may hold if we're expanding the address
2590 right past the end of an array that turned out not to
2591 be addressable (i.e., the address was only computed in
2592 debug stmts). The gen_subreg below would rightfully
2593 crash, and the address doesn't really exist, so just
2594 drop it. */
2595 if (bitpos >= GET_MODE_BITSIZE (opmode))
2596 return NULL;
2598 return simplify_gen_subreg (mode, op0, opmode,
2599 bitpos / BITS_PER_UNIT);
2602 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2603 && TYPE_UNSIGNED (TREE_TYPE (exp))
2604 ? SIGN_EXTRACT
2605 : ZERO_EXTRACT, mode,
2606 GET_MODE (op0) != VOIDmode
2607 ? GET_MODE (op0) : mode1,
2608 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2611 case ABS_EXPR:
2612 return gen_rtx_ABS (mode, op0);
2614 case NEGATE_EXPR:
2615 return gen_rtx_NEG (mode, op0);
2617 case BIT_NOT_EXPR:
2618 return gen_rtx_NOT (mode, op0);
2620 case FLOAT_EXPR:
2621 if (unsignedp)
2622 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2623 else
2624 return gen_rtx_FLOAT (mode, op0);
2626 case FIX_TRUNC_EXPR:
2627 if (unsignedp)
2628 return gen_rtx_UNSIGNED_FIX (mode, op0);
2629 else
2630 return gen_rtx_FIX (mode, op0);
2632 case POINTER_PLUS_EXPR:
2633 case PLUS_EXPR:
2634 return gen_rtx_PLUS (mode, op0, op1);
2636 case MINUS_EXPR:
2637 return gen_rtx_MINUS (mode, op0, op1);
2639 case MULT_EXPR:
2640 return gen_rtx_MULT (mode, op0, op1);
2642 case RDIV_EXPR:
2643 case TRUNC_DIV_EXPR:
2644 case EXACT_DIV_EXPR:
2645 if (unsignedp)
2646 return gen_rtx_UDIV (mode, op0, op1);
2647 else
2648 return gen_rtx_DIV (mode, op0, op1);
2650 case TRUNC_MOD_EXPR:
2651 if (unsignedp)
2652 return gen_rtx_UMOD (mode, op0, op1);
2653 else
2654 return gen_rtx_MOD (mode, op0, op1);
2656 case FLOOR_DIV_EXPR:
2657 if (unsignedp)
2658 return gen_rtx_UDIV (mode, op0, op1);
2659 else
2661 rtx div = gen_rtx_DIV (mode, op0, op1);
2662 rtx mod = gen_rtx_MOD (mode, op0, op1);
2663 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2664 return gen_rtx_PLUS (mode, div, adj);
2667 case FLOOR_MOD_EXPR:
2668 if (unsignedp)
2669 return gen_rtx_UMOD (mode, op0, op1);
2670 else
2672 rtx mod = gen_rtx_MOD (mode, op0, op1);
2673 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2674 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2675 return gen_rtx_PLUS (mode, mod, adj);
2678 case CEIL_DIV_EXPR:
2679 if (unsignedp)
2681 rtx div = gen_rtx_UDIV (mode, op0, op1);
2682 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2683 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2684 return gen_rtx_PLUS (mode, div, adj);
2686 else
2688 rtx div = gen_rtx_DIV (mode, op0, op1);
2689 rtx mod = gen_rtx_MOD (mode, op0, op1);
2690 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2691 return gen_rtx_PLUS (mode, div, adj);
2694 case CEIL_MOD_EXPR:
2695 if (unsignedp)
2697 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2698 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2699 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2700 return gen_rtx_PLUS (mode, mod, adj);
2702 else
2704 rtx mod = gen_rtx_MOD (mode, op0, op1);
2705 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2706 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2707 return gen_rtx_PLUS (mode, mod, adj);
2710 case ROUND_DIV_EXPR:
2711 if (unsignedp)
2713 rtx div = gen_rtx_UDIV (mode, op0, op1);
2714 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2715 rtx adj = round_udiv_adjust (mode, mod, op1);
2716 return gen_rtx_PLUS (mode, div, adj);
2718 else
2720 rtx div = gen_rtx_DIV (mode, op0, op1);
2721 rtx mod = gen_rtx_MOD (mode, op0, op1);
2722 rtx adj = round_sdiv_adjust (mode, mod, op1);
2723 return gen_rtx_PLUS (mode, div, adj);
2726 case ROUND_MOD_EXPR:
2727 if (unsignedp)
2729 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2730 rtx adj = round_udiv_adjust (mode, mod, op1);
2731 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2732 return gen_rtx_PLUS (mode, mod, adj);
2734 else
2736 rtx mod = gen_rtx_MOD (mode, op0, op1);
2737 rtx adj = round_sdiv_adjust (mode, mod, op1);
2738 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2739 return gen_rtx_PLUS (mode, mod, adj);
2742 case LSHIFT_EXPR:
2743 return gen_rtx_ASHIFT (mode, op0, op1);
2745 case RSHIFT_EXPR:
2746 if (unsignedp)
2747 return gen_rtx_LSHIFTRT (mode, op0, op1);
2748 else
2749 return gen_rtx_ASHIFTRT (mode, op0, op1);
2751 case LROTATE_EXPR:
2752 return gen_rtx_ROTATE (mode, op0, op1);
2754 case RROTATE_EXPR:
2755 return gen_rtx_ROTATERT (mode, op0, op1);
2757 case MIN_EXPR:
2758 if (unsignedp)
2759 return gen_rtx_UMIN (mode, op0, op1);
2760 else
2761 return gen_rtx_SMIN (mode, op0, op1);
2763 case MAX_EXPR:
2764 if (unsignedp)
2765 return gen_rtx_UMAX (mode, op0, op1);
2766 else
2767 return gen_rtx_SMAX (mode, op0, op1);
2769 case BIT_AND_EXPR:
2770 case TRUTH_AND_EXPR:
2771 return gen_rtx_AND (mode, op0, op1);
2773 case BIT_IOR_EXPR:
2774 case TRUTH_OR_EXPR:
2775 return gen_rtx_IOR (mode, op0, op1);
2777 case BIT_XOR_EXPR:
2778 case TRUTH_XOR_EXPR:
2779 return gen_rtx_XOR (mode, op0, op1);
2781 case TRUTH_ANDIF_EXPR:
2782 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2784 case TRUTH_ORIF_EXPR:
2785 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2787 case TRUTH_NOT_EXPR:
2788 return gen_rtx_EQ (mode, op0, const0_rtx);
2790 case LT_EXPR:
2791 if (unsignedp)
2792 return gen_rtx_LTU (mode, op0, op1);
2793 else
2794 return gen_rtx_LT (mode, op0, op1);
2796 case LE_EXPR:
2797 if (unsignedp)
2798 return gen_rtx_LEU (mode, op0, op1);
2799 else
2800 return gen_rtx_LE (mode, op0, op1);
2802 case GT_EXPR:
2803 if (unsignedp)
2804 return gen_rtx_GTU (mode, op0, op1);
2805 else
2806 return gen_rtx_GT (mode, op0, op1);
2808 case GE_EXPR:
2809 if (unsignedp)
2810 return gen_rtx_GEU (mode, op0, op1);
2811 else
2812 return gen_rtx_GE (mode, op0, op1);
2814 case EQ_EXPR:
2815 return gen_rtx_EQ (mode, op0, op1);
2817 case NE_EXPR:
2818 return gen_rtx_NE (mode, op0, op1);
2820 case UNORDERED_EXPR:
2821 return gen_rtx_UNORDERED (mode, op0, op1);
2823 case ORDERED_EXPR:
2824 return gen_rtx_ORDERED (mode, op0, op1);
2826 case UNLT_EXPR:
2827 return gen_rtx_UNLT (mode, op0, op1);
2829 case UNLE_EXPR:
2830 return gen_rtx_UNLE (mode, op0, op1);
2832 case UNGT_EXPR:
2833 return gen_rtx_UNGT (mode, op0, op1);
2835 case UNGE_EXPR:
2836 return gen_rtx_UNGE (mode, op0, op1);
2838 case UNEQ_EXPR:
2839 return gen_rtx_UNEQ (mode, op0, op1);
2841 case LTGT_EXPR:
2842 return gen_rtx_LTGT (mode, op0, op1);
2844 case COND_EXPR:
2845 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2847 case COMPLEX_EXPR:
2848 gcc_assert (COMPLEX_MODE_P (mode));
2849 if (GET_MODE (op0) == VOIDmode)
2850 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2851 if (GET_MODE (op1) == VOIDmode)
2852 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2853 return gen_rtx_CONCAT (mode, op0, op1);
2855 case CONJ_EXPR:
2856 if (GET_CODE (op0) == CONCAT)
2857 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2858 gen_rtx_NEG (GET_MODE_INNER (mode),
2859 XEXP (op0, 1)));
2860 else
2862 enum machine_mode imode = GET_MODE_INNER (mode);
2863 rtx re, im;
2865 if (MEM_P (op0))
2867 re = adjust_address_nv (op0, imode, 0);
2868 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2870 else
2872 enum machine_mode ifmode = int_mode_for_mode (mode);
2873 enum machine_mode ihmode = int_mode_for_mode (imode);
2874 rtx halfsize;
2875 if (ifmode == BLKmode || ihmode == BLKmode)
2876 return NULL;
2877 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2878 re = op0;
2879 if (mode != ifmode)
2880 re = gen_rtx_SUBREG (ifmode, re, 0);
2881 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2882 if (imode != ihmode)
2883 re = gen_rtx_SUBREG (imode, re, 0);
2884 im = copy_rtx (op0);
2885 if (mode != ifmode)
2886 im = gen_rtx_SUBREG (ifmode, im, 0);
2887 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2888 if (imode != ihmode)
2889 im = gen_rtx_SUBREG (imode, im, 0);
2891 im = gen_rtx_NEG (imode, im);
2892 return gen_rtx_CONCAT (mode, re, im);
2895 case ADDR_EXPR:
2896 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2897 if (!op0 || !MEM_P (op0))
2898 return NULL;
2900 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2902 return op0;
2904 case VECTOR_CST:
2905 exp = build_constructor_from_list (TREE_TYPE (exp),
2906 TREE_VECTOR_CST_ELTS (exp));
2907 /* Fall through. */
2909 case CONSTRUCTOR:
2910 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2912 unsigned i;
2913 tree val;
2915 op0 = gen_rtx_CONCATN
2916 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2918 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2920 op1 = expand_debug_expr (val);
2921 if (!op1)
2922 return NULL;
2923 XVECEXP (op0, 0, i) = op1;
2926 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2928 op1 = expand_debug_expr
2929 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2931 if (!op1)
2932 return NULL;
2934 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2935 XVECEXP (op0, 0, i) = op1;
2938 return op0;
2940 else
2941 goto flag_unsupported;
2943 case CALL_EXPR:
2944 /* ??? Maybe handle some builtins? */
2945 return NULL;
2947 case SSA_NAME:
2949 int part = var_to_partition (SA.map, exp);
2951 if (part == NO_PARTITION)
2952 return NULL;
2954 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2956 op0 = SA.partition_to_pseudo[part];
2957 goto adjust_mode;
2960 case ERROR_MARK:
2961 return NULL;
2963 default:
2964 flag_unsupported:
2965 #ifdef ENABLE_CHECKING
2966 debug_tree (exp);
2967 gcc_unreachable ();
2968 #else
2969 return NULL;
2970 #endif
2974 /* Expand the _LOCs in debug insns. We run this after expanding all
2975 regular insns, so that any variables referenced in the function
2976 will have their DECL_RTLs set. */
2978 static void
2979 expand_debug_locations (void)
2981 rtx insn;
2982 rtx last = get_last_insn ();
2983 int save_strict_alias = flag_strict_aliasing;
2985 /* New alias sets while setting up memory attributes cause
2986 -fcompare-debug failures, even though it doesn't bring about any
2987 codegen changes. */
2988 flag_strict_aliasing = 0;
2990 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2991 if (DEBUG_INSN_P (insn))
2993 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
2994 rtx val;
2995 enum machine_mode mode;
2997 if (value == NULL_TREE)
2998 val = NULL_RTX;
2999 else
3001 val = expand_debug_expr (value);
3002 gcc_assert (last == get_last_insn ());
3005 if (!val)
3006 val = gen_rtx_UNKNOWN_VAR_LOC ();
3007 else
3009 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3011 gcc_assert (mode == GET_MODE (val)
3012 || (GET_MODE (val) == VOIDmode
3013 && (CONST_INT_P (val)
3014 || GET_CODE (val) == CONST_FIXED
3015 || GET_CODE (val) == CONST_DOUBLE
3016 || GET_CODE (val) == LABEL_REF)));
3019 INSN_VAR_LOCATION_LOC (insn) = val;
3022 flag_strict_aliasing = save_strict_alias;
3025 /* Expand basic block BB from GIMPLE trees to RTL. */
3027 static basic_block
3028 expand_gimple_basic_block (basic_block bb)
3030 gimple_stmt_iterator gsi;
3031 gimple_seq stmts;
3032 gimple stmt = NULL;
3033 rtx note, last;
3034 edge e;
3035 edge_iterator ei;
3036 void **elt;
3038 if (dump_file)
3039 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3040 bb->index);
3042 /* Note that since we are now transitioning from GIMPLE to RTL, we
3043 cannot use the gsi_*_bb() routines because they expect the basic
3044 block to be in GIMPLE, instead of RTL. Therefore, we need to
3045 access the BB sequence directly. */
3046 stmts = bb_seq (bb);
3047 bb->il.gimple = NULL;
3048 rtl_profile_for_bb (bb);
3049 init_rtl_bb_info (bb);
3050 bb->flags |= BB_RTL;
3052 /* Remove the RETURN_EXPR if we may fall though to the exit
3053 instead. */
3054 gsi = gsi_last (stmts);
3055 if (!gsi_end_p (gsi)
3056 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3058 gimple ret_stmt = gsi_stmt (gsi);
3060 gcc_assert (single_succ_p (bb));
3061 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3063 if (bb->next_bb == EXIT_BLOCK_PTR
3064 && !gimple_return_retval (ret_stmt))
3066 gsi_remove (&gsi, false);
3067 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3071 gsi = gsi_start (stmts);
3072 if (!gsi_end_p (gsi))
3074 stmt = gsi_stmt (gsi);
3075 if (gimple_code (stmt) != GIMPLE_LABEL)
3076 stmt = NULL;
3079 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3081 if (stmt || elt)
3083 last = get_last_insn ();
3085 if (stmt)
3087 expand_gimple_stmt (stmt);
3088 gsi_next (&gsi);
3091 if (elt)
3092 emit_label ((rtx) *elt);
3094 /* Java emits line number notes in the top of labels.
3095 ??? Make this go away once line number notes are obsoleted. */
3096 BB_HEAD (bb) = NEXT_INSN (last);
3097 if (NOTE_P (BB_HEAD (bb)))
3098 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3099 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3101 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3103 else
3104 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3106 NOTE_BASIC_BLOCK (note) = bb;
3108 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3110 basic_block new_bb;
3112 stmt = gsi_stmt (gsi);
3113 currently_expanding_gimple_stmt = stmt;
3115 /* Expand this statement, then evaluate the resulting RTL and
3116 fixup the CFG accordingly. */
3117 if (gimple_code (stmt) == GIMPLE_COND)
3119 new_bb = expand_gimple_cond (bb, stmt);
3120 if (new_bb)
3121 return new_bb;
3123 else if (gimple_debug_bind_p (stmt))
3125 location_t sloc = get_curr_insn_source_location ();
3126 tree sblock = get_curr_insn_block ();
3127 gimple_stmt_iterator nsi = gsi;
3129 for (;;)
3131 tree var = gimple_debug_bind_get_var (stmt);
3132 tree value;
3133 rtx val;
3134 enum machine_mode mode;
3136 if (gimple_debug_bind_has_value_p (stmt))
3137 value = gimple_debug_bind_get_value (stmt);
3138 else
3139 value = NULL_TREE;
3141 last = get_last_insn ();
3143 set_curr_insn_source_location (gimple_location (stmt));
3144 set_curr_insn_block (gimple_block (stmt));
3146 if (DECL_P (var))
3147 mode = DECL_MODE (var);
3148 else
3149 mode = TYPE_MODE (TREE_TYPE (var));
3151 val = gen_rtx_VAR_LOCATION
3152 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3154 val = emit_debug_insn (val);
3156 if (dump_file && (dump_flags & TDF_DETAILS))
3158 /* We can't dump the insn with a TREE where an RTX
3159 is expected. */
3160 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3161 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3162 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3165 gsi = nsi;
3166 gsi_next (&nsi);
3167 if (gsi_end_p (nsi))
3168 break;
3169 stmt = gsi_stmt (nsi);
3170 if (!gimple_debug_bind_p (stmt))
3171 break;
3174 set_curr_insn_source_location (sloc);
3175 set_curr_insn_block (sblock);
3177 else
3179 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3181 bool can_fallthru;
3182 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3183 if (new_bb)
3185 if (can_fallthru)
3186 bb = new_bb;
3187 else
3188 return new_bb;
3191 else
3193 def_operand_p def_p;
3194 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3196 if (def_p != NULL)
3198 /* Ignore this stmt if it is in the list of
3199 replaceable expressions. */
3200 if (SA.values
3201 && bitmap_bit_p (SA.values,
3202 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3203 continue;
3205 last = expand_gimple_stmt (stmt);
3206 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3211 currently_expanding_gimple_stmt = NULL;
3213 /* Expand implicit goto and convert goto_locus. */
3214 FOR_EACH_EDGE (e, ei, bb->succs)
3216 if (e->goto_locus && e->goto_block)
3218 set_curr_insn_source_location (e->goto_locus);
3219 set_curr_insn_block (e->goto_block);
3220 e->goto_locus = curr_insn_locator ();
3222 e->goto_block = NULL;
3223 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3225 emit_jump (label_rtx_for_bb (e->dest));
3226 e->flags &= ~EDGE_FALLTHRU;
3230 /* Expanded RTL can create a jump in the last instruction of block.
3231 This later might be assumed to be a jump to successor and break edge insertion.
3232 We need to insert dummy move to prevent this. PR41440. */
3233 if (single_succ_p (bb)
3234 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3235 && (last = get_last_insn ())
3236 && JUMP_P (last))
3238 rtx dummy = gen_reg_rtx (SImode);
3239 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3242 do_pending_stack_adjust ();
3244 /* Find the block tail. The last insn in the block is the insn
3245 before a barrier and/or table jump insn. */
3246 last = get_last_insn ();
3247 if (BARRIER_P (last))
3248 last = PREV_INSN (last);
3249 if (JUMP_TABLE_DATA_P (last))
3250 last = PREV_INSN (PREV_INSN (last));
3251 BB_END (bb) = last;
3253 update_bb_for_insn (bb);
3255 return bb;
3259 /* Create a basic block for initialization code. */
3261 static basic_block
3262 construct_init_block (void)
3264 basic_block init_block, first_block;
3265 edge e = NULL;
3266 int flags;
3268 /* Multiple entry points not supported yet. */
3269 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3270 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3271 init_rtl_bb_info (EXIT_BLOCK_PTR);
3272 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3273 EXIT_BLOCK_PTR->flags |= BB_RTL;
3275 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3277 /* When entry edge points to first basic block, we don't need jump,
3278 otherwise we have to jump into proper target. */
3279 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3281 tree label = gimple_block_label (e->dest);
3283 emit_jump (label_rtx (label));
3284 flags = 0;
3286 else
3287 flags = EDGE_FALLTHRU;
3289 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3290 get_last_insn (),
3291 ENTRY_BLOCK_PTR);
3292 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3293 init_block->count = ENTRY_BLOCK_PTR->count;
3294 if (e)
3296 first_block = e->dest;
3297 redirect_edge_succ (e, init_block);
3298 e = make_edge (init_block, first_block, flags);
3300 else
3301 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3302 e->probability = REG_BR_PROB_BASE;
3303 e->count = ENTRY_BLOCK_PTR->count;
3305 update_bb_for_insn (init_block);
3306 return init_block;
3309 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3310 found in the block tree. */
3312 static void
3313 set_block_levels (tree block, int level)
3315 while (block)
3317 BLOCK_NUMBER (block) = level;
3318 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3319 block = BLOCK_CHAIN (block);
3323 /* Create a block containing landing pads and similar stuff. */
3325 static void
3326 construct_exit_block (void)
3328 rtx head = get_last_insn ();
3329 rtx end;
3330 basic_block exit_block;
3331 edge e, e2;
3332 unsigned ix;
3333 edge_iterator ei;
3334 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3336 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3338 /* Make sure the locus is set to the end of the function, so that
3339 epilogue line numbers and warnings are set properly. */
3340 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3341 input_location = cfun->function_end_locus;
3343 /* The following insns belong to the top scope. */
3344 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3346 /* Generate rtl for function exit. */
3347 expand_function_end ();
3349 end = get_last_insn ();
3350 if (head == end)
3351 return;
3352 /* While emitting the function end we could move end of the last basic block.
3354 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3355 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3356 head = NEXT_INSN (head);
3357 exit_block = create_basic_block (NEXT_INSN (head), end,
3358 EXIT_BLOCK_PTR->prev_bb);
3359 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3360 exit_block->count = EXIT_BLOCK_PTR->count;
3362 ix = 0;
3363 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3365 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3366 if (!(e->flags & EDGE_ABNORMAL))
3367 redirect_edge_succ (e, exit_block);
3368 else
3369 ix++;
3372 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3373 e->probability = REG_BR_PROB_BASE;
3374 e->count = EXIT_BLOCK_PTR->count;
3375 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3376 if (e2 != e)
3378 e->count -= e2->count;
3379 exit_block->count -= e2->count;
3380 exit_block->frequency -= EDGE_FREQUENCY (e2);
3382 if (e->count < 0)
3383 e->count = 0;
3384 if (exit_block->count < 0)
3385 exit_block->count = 0;
3386 if (exit_block->frequency < 0)
3387 exit_block->frequency = 0;
3388 update_bb_for_insn (exit_block);
3391 /* Helper function for discover_nonconstant_array_refs.
3392 Look for ARRAY_REF nodes with non-constant indexes and mark them
3393 addressable. */
3395 static tree
3396 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3397 void *data ATTRIBUTE_UNUSED)
3399 tree t = *tp;
3401 if (IS_TYPE_OR_DECL_P (t))
3402 *walk_subtrees = 0;
3403 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3405 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3406 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3407 && (!TREE_OPERAND (t, 2)
3408 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3409 || (TREE_CODE (t) == COMPONENT_REF
3410 && (!TREE_OPERAND (t,2)
3411 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3412 || TREE_CODE (t) == BIT_FIELD_REF
3413 || TREE_CODE (t) == REALPART_EXPR
3414 || TREE_CODE (t) == IMAGPART_EXPR
3415 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3416 || CONVERT_EXPR_P (t))
3417 t = TREE_OPERAND (t, 0);
3419 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3421 t = get_base_address (t);
3422 if (t && DECL_P (t)
3423 && DECL_MODE (t) != BLKmode)
3424 TREE_ADDRESSABLE (t) = 1;
3427 *walk_subtrees = 0;
3430 return NULL_TREE;
3433 /* RTL expansion is not able to compile array references with variable
3434 offsets for arrays stored in single register. Discover such
3435 expressions and mark variables as addressable to avoid this
3436 scenario. */
3438 static void
3439 discover_nonconstant_array_refs (void)
3441 basic_block bb;
3442 gimple_stmt_iterator gsi;
3444 FOR_EACH_BB (bb)
3445 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3447 gimple stmt = gsi_stmt (gsi);
3448 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3452 /* This function sets crtl->args.internal_arg_pointer to a virtual
3453 register if DRAP is needed. Local register allocator will replace
3454 virtual_incoming_args_rtx with the virtual register. */
3456 static void
3457 expand_stack_alignment (void)
3459 rtx drap_rtx;
3460 unsigned int preferred_stack_boundary;
3462 if (! SUPPORTS_STACK_ALIGNMENT)
3463 return;
3465 if (cfun->calls_alloca
3466 || cfun->has_nonlocal_label
3467 || crtl->has_nonlocal_goto)
3468 crtl->need_drap = true;
3470 /* Call update_stack_boundary here again to update incoming stack
3471 boundary. It may set incoming stack alignment to a different
3472 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3473 use the minimum incoming stack alignment to check if it is OK
3474 to perform sibcall optimization since sibcall optimization will
3475 only align the outgoing stack to incoming stack boundary. */
3476 if (targetm.calls.update_stack_boundary)
3477 targetm.calls.update_stack_boundary ();
3479 /* The incoming stack frame has to be aligned at least at
3480 parm_stack_boundary. */
3481 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3483 /* Update crtl->stack_alignment_estimated and use it later to align
3484 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3485 exceptions since callgraph doesn't collect incoming stack alignment
3486 in this case. */
3487 if (flag_non_call_exceptions
3488 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3489 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3490 else
3491 preferred_stack_boundary = crtl->preferred_stack_boundary;
3492 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3493 crtl->stack_alignment_estimated = preferred_stack_boundary;
3494 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3495 crtl->stack_alignment_needed = preferred_stack_boundary;
3497 gcc_assert (crtl->stack_alignment_needed
3498 <= crtl->stack_alignment_estimated);
3500 crtl->stack_realign_needed
3501 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3502 crtl->stack_realign_tried = crtl->stack_realign_needed;
3504 crtl->stack_realign_processed = true;
3506 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3507 alignment. */
3508 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3509 drap_rtx = targetm.calls.get_drap_rtx ();
3511 /* stack_realign_drap and drap_rtx must match. */
3512 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3514 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3515 if (NULL != drap_rtx)
3517 crtl->args.internal_arg_pointer = drap_rtx;
3519 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3520 needed. */
3521 fixup_tail_calls ();
3525 /* Translate the intermediate representation contained in the CFG
3526 from GIMPLE trees to RTL.
3528 We do conversion per basic block and preserve/update the tree CFG.
3529 This implies we have to do some magic as the CFG can simultaneously
3530 consist of basic blocks containing RTL and GIMPLE trees. This can
3531 confuse the CFG hooks, so be careful to not manipulate CFG during
3532 the expansion. */
3534 static unsigned int
3535 gimple_expand_cfg (void)
3537 basic_block bb, init_block;
3538 sbitmap blocks;
3539 edge_iterator ei;
3540 edge e;
3541 unsigned i;
3543 rewrite_out_of_ssa (&SA);
3544 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3545 sizeof (rtx));
3547 /* Some backends want to know that we are expanding to RTL. */
3548 currently_expanding_to_rtl = 1;
3550 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3552 insn_locators_alloc ();
3553 if (!DECL_IS_BUILTIN (current_function_decl))
3555 /* Eventually, all FEs should explicitly set function_start_locus. */
3556 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3557 set_curr_insn_source_location
3558 (DECL_SOURCE_LOCATION (current_function_decl));
3559 else
3560 set_curr_insn_source_location (cfun->function_start_locus);
3562 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3563 prologue_locator = curr_insn_locator ();
3565 /* Make sure first insn is a note even if we don't want linenums.
3566 This makes sure the first insn will never be deleted.
3567 Also, final expects a note to appear there. */
3568 emit_note (NOTE_INSN_DELETED);
3570 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3571 discover_nonconstant_array_refs ();
3573 targetm.expand_to_rtl_hook ();
3574 crtl->stack_alignment_needed = STACK_BOUNDARY;
3575 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3576 crtl->stack_alignment_estimated = 0;
3577 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3578 cfun->cfg->max_jumptable_ents = 0;
3581 /* Expand the variables recorded during gimple lowering. */
3582 expand_used_vars ();
3584 /* Honor stack protection warnings. */
3585 if (warn_stack_protect)
3587 if (cfun->calls_alloca)
3588 warning (OPT_Wstack_protector,
3589 "not protecting local variables: variable length buffer");
3590 if (has_short_buffer && !crtl->stack_protect_guard)
3591 warning (OPT_Wstack_protector,
3592 "not protecting function: no buffer at least %d bytes long",
3593 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3596 /* Set up parameters and prepare for return, for the function. */
3597 expand_function_start (current_function_decl);
3599 /* Now that we also have the parameter RTXs, copy them over to our
3600 partitions. */
3601 for (i = 0; i < SA.map->num_partitions; i++)
3603 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3605 if (TREE_CODE (var) != VAR_DECL
3606 && !SA.partition_to_pseudo[i])
3607 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3608 gcc_assert (SA.partition_to_pseudo[i]);
3610 /* If this decl was marked as living in multiple places, reset
3611 this now to NULL. */
3612 if (DECL_RTL_IF_SET (var) == pc_rtx)
3613 SET_DECL_RTL (var, NULL);
3615 /* Some RTL parts really want to look at DECL_RTL(x) when x
3616 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3617 SET_DECL_RTL here making this available, but that would mean
3618 to select one of the potentially many RTLs for one DECL. Instead
3619 of doing that we simply reset the MEM_EXPR of the RTL in question,
3620 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3621 if (!DECL_RTL_SET_P (var))
3623 if (MEM_P (SA.partition_to_pseudo[i]))
3624 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3628 /* If this function is `main', emit a call to `__main'
3629 to run global initializers, etc. */
3630 if (DECL_NAME (current_function_decl)
3631 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3632 && DECL_FILE_SCOPE_P (current_function_decl))
3633 expand_main_function ();
3635 /* Initialize the stack_protect_guard field. This must happen after the
3636 call to __main (if any) so that the external decl is initialized. */
3637 if (crtl->stack_protect_guard)
3638 stack_protect_prologue ();
3640 expand_phi_nodes (&SA);
3642 /* Register rtl specific functions for cfg. */
3643 rtl_register_cfg_hooks ();
3645 init_block = construct_init_block ();
3647 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3648 remaining edges later. */
3649 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3650 e->flags &= ~EDGE_EXECUTABLE;
3652 lab_rtx_for_bb = pointer_map_create ();
3653 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3654 bb = expand_gimple_basic_block (bb);
3656 if (MAY_HAVE_DEBUG_INSNS)
3657 expand_debug_locations ();
3659 execute_free_datastructures ();
3660 finish_out_of_ssa (&SA);
3662 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3663 conservatively to true until they are all profile aware. */
3664 pointer_map_destroy (lab_rtx_for_bb);
3665 free_histograms ();
3667 construct_exit_block ();
3668 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3669 insn_locators_finalize ();
3671 /* Zap the tree EH table. */
3672 set_eh_throw_stmt_table (cfun, NULL);
3674 rebuild_jump_labels (get_insns ());
3676 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3678 edge e;
3679 edge_iterator ei;
3680 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3682 if (e->insns.r)
3683 commit_one_edge_insertion (e);
3684 else
3685 ei_next (&ei);
3689 /* We're done expanding trees to RTL. */
3690 currently_expanding_to_rtl = 0;
3692 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3694 edge e;
3695 edge_iterator ei;
3696 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3698 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3699 e->flags &= ~EDGE_EXECUTABLE;
3701 /* At the moment not all abnormal edges match the RTL
3702 representation. It is safe to remove them here as
3703 find_many_sub_basic_blocks will rediscover them.
3704 In the future we should get this fixed properly. */
3705 if ((e->flags & EDGE_ABNORMAL)
3706 && !(e->flags & EDGE_SIBCALL))
3707 remove_edge (e);
3708 else
3709 ei_next (&ei);
3713 blocks = sbitmap_alloc (last_basic_block);
3714 sbitmap_ones (blocks);
3715 find_many_sub_basic_blocks (blocks);
3716 sbitmap_free (blocks);
3717 purge_all_dead_edges ();
3719 compact_blocks ();
3721 expand_stack_alignment ();
3723 #ifdef ENABLE_CHECKING
3724 verify_flow_info ();
3725 #endif
3727 /* There's no need to defer outputting this function any more; we
3728 know we want to output it. */
3729 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3731 /* Now that we're done expanding trees to RTL, we shouldn't have any
3732 more CONCATs anywhere. */
3733 generating_concat_p = 0;
3735 if (dump_file)
3737 fprintf (dump_file,
3738 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3739 /* And the pass manager will dump RTL for us. */
3742 /* If we're emitting a nested function, make sure its parent gets
3743 emitted as well. Doing otherwise confuses debug info. */
3745 tree parent;
3746 for (parent = DECL_CONTEXT (current_function_decl);
3747 parent != NULL_TREE;
3748 parent = get_containing_scope (parent))
3749 if (TREE_CODE (parent) == FUNCTION_DECL)
3750 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
3753 /* We are now committed to emitting code for this function. Do any
3754 preparation, such as emitting abstract debug info for the inline
3755 before it gets mangled by optimization. */
3756 if (cgraph_function_possibly_inlined_p (current_function_decl))
3757 (*debug_hooks->outlining_inline_function) (current_function_decl);
3759 TREE_ASM_WRITTEN (current_function_decl) = 1;
3761 /* After expanding, the return labels are no longer needed. */
3762 return_label = NULL;
3763 naked_return_label = NULL;
3764 /* Tag the blocks with a depth number so that change_scope can find
3765 the common parent easily. */
3766 set_block_levels (DECL_INITIAL (cfun->decl), 0);
3767 default_rtl_profile ();
3768 return 0;
3771 struct rtl_opt_pass pass_expand =
3774 RTL_PASS,
3775 "expand", /* name */
3776 NULL, /* gate */
3777 gimple_expand_cfg, /* execute */
3778 NULL, /* sub */
3779 NULL, /* next */
3780 0, /* static_pass_number */
3781 TV_EXPAND, /* tv_id */
3782 PROP_ssa | PROP_gimple_leh | PROP_cfg,/* properties_required */
3783 PROP_rtl, /* properties_provided */
3784 PROP_ssa | PROP_trees, /* properties_destroyed */
3785 TODO_verify_ssa | TODO_verify_flow
3786 | TODO_verify_stmts, /* todo_flags_start */
3787 TODO_dump_func
3788 | TODO_ggc_collect /* todo_flags_finish */