PR tree-optimization/54321
[official-gcc.git] / gcc / cfgexpand.c
blob91457eb24550926eb6a2c17bf618c7ebc91dc4a8
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
34 #include "except.h"
35 #include "flags.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
38 #include "toplev.h"
39 #include "debug.h"
40 #include "params.h"
41 #include "tree-inline.h"
42 #include "value-prof.h"
43 #include "target.h"
44 #include "ssaexpand.h"
45 #include "bitmap.h"
46 #include "sbitmap.h"
47 #include "cfgloop.h"
48 #include "regs.h" /* For reg_renumber. */
49 #include "insn-attr.h" /* For INSN_SCHEDULING. */
51 /* This variable holds information helping the rewriting of SSA trees
52 into RTL. */
53 struct ssaexpand SA;
55 /* This variable holds the currently expanded gimple statement for purposes
56 of comminucating the profile info to the builtin expanders. */
57 gimple currently_expanding_gimple_stmt;
59 static rtx expand_debug_expr (tree);
61 /* Return an expression tree corresponding to the RHS of GIMPLE
62 statement STMT. */
64 tree
65 gimple_assign_rhs_to_tree (gimple stmt)
67 tree t;
68 enum gimple_rhs_class grhs_class;
70 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
72 if (grhs_class == GIMPLE_TERNARY_RHS)
73 t = build3 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt),
76 gimple_assign_rhs2 (stmt),
77 gimple_assign_rhs3 (stmt));
78 else if (grhs_class == GIMPLE_BINARY_RHS)
79 t = build2 (gimple_assign_rhs_code (stmt),
80 TREE_TYPE (gimple_assign_lhs (stmt)),
81 gimple_assign_rhs1 (stmt),
82 gimple_assign_rhs2 (stmt));
83 else if (grhs_class == GIMPLE_UNARY_RHS)
84 t = build1 (gimple_assign_rhs_code (stmt),
85 TREE_TYPE (gimple_assign_lhs (stmt)),
86 gimple_assign_rhs1 (stmt));
87 else if (grhs_class == GIMPLE_SINGLE_RHS)
89 t = gimple_assign_rhs1 (stmt);
90 /* Avoid modifying this tree in place below. */
91 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
92 && gimple_location (stmt) != EXPR_LOCATION (t))
93 || (gimple_block (stmt)
94 && currently_expanding_to_rtl
95 && EXPR_P (t)
96 && gimple_block (stmt) != TREE_BLOCK (t)))
97 t = copy_node (t);
99 else
100 gcc_unreachable ();
102 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
103 SET_EXPR_LOCATION (t, gimple_location (stmt));
104 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
105 TREE_BLOCK (t) = gimple_block (stmt);
107 return t;
111 #ifndef STACK_ALIGNMENT_NEEDED
112 #define STACK_ALIGNMENT_NEEDED 1
113 #endif
115 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
117 /* Associate declaration T with storage space X. If T is no
118 SSA name this is exactly SET_DECL_RTL, otherwise make the
119 partition of T associated with X. */
120 static inline void
121 set_rtl (tree t, rtx x)
123 if (TREE_CODE (t) == SSA_NAME)
125 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
126 if (x && !MEM_P (x))
127 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
128 /* For the benefit of debug information at -O0 (where vartracking
129 doesn't run) record the place also in the base DECL if it's
130 a normal variable (not a parameter). */
131 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
133 tree var = SSA_NAME_VAR (t);
134 /* If we don't yet have something recorded, just record it now. */
135 if (!DECL_RTL_SET_P (var))
136 SET_DECL_RTL (var, x);
137 /* If we have it set already to "multiple places" don't
138 change this. */
139 else if (DECL_RTL (var) == pc_rtx)
141 /* If we have something recorded and it's not the same place
142 as we want to record now, we have multiple partitions for the
143 same base variable, with different places. We can't just
144 randomly chose one, hence we have to say that we don't know.
145 This only happens with optimization, and there var-tracking
146 will figure out the right thing. */
147 else if (DECL_RTL (var) != x)
148 SET_DECL_RTL (var, pc_rtx);
151 else
152 SET_DECL_RTL (t, x);
155 /* This structure holds data relevant to one variable that will be
156 placed in a stack slot. */
157 struct stack_var
159 /* The Variable. */
160 tree decl;
162 /* Initially, the size of the variable. Later, the size of the partition,
163 if this variable becomes it's partition's representative. */
164 HOST_WIDE_INT size;
166 /* The *byte* alignment required for this variable. Or as, with the
167 size, the alignment for this partition. */
168 unsigned int alignb;
170 /* The partition representative. */
171 size_t representative;
173 /* The next stack variable in the partition, or EOC. */
174 size_t next;
176 /* The numbers of conflicting stack variables. */
177 bitmap conflicts;
180 #define EOC ((size_t)-1)
182 /* We have an array of such objects while deciding allocation. */
183 static struct stack_var *stack_vars;
184 static size_t stack_vars_alloc;
185 static size_t stack_vars_num;
186 static struct pointer_map_t *decl_to_stack_part;
188 /* Conflict bitmaps go on this obstack. This allows us to destroy
189 all of them in one big sweep. */
190 static bitmap_obstack stack_var_bitmap_obstack;
192 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
193 is non-decreasing. */
194 static size_t *stack_vars_sorted;
196 /* The phase of the stack frame. This is the known misalignment of
197 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
198 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
199 static int frame_phase;
201 /* Used during expand_used_vars to remember if we saw any decls for
202 which we'd like to enable stack smashing protection. */
203 static bool has_protected_decls;
205 /* Used during expand_used_vars. Remember if we say a character buffer
206 smaller than our cutoff threshold. Used for -Wstack-protector. */
207 static bool has_short_buffer;
209 /* Compute the byte alignment to use for DECL. Ignore alignment
210 we can't do with expected alignment of the stack boundary. */
212 static unsigned int
213 align_local_variable (tree decl)
215 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
216 DECL_ALIGN (decl) = align;
217 return align / BITS_PER_UNIT;
220 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
221 Return the frame offset. */
223 static HOST_WIDE_INT
224 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
226 HOST_WIDE_INT offset, new_frame_offset;
228 new_frame_offset = frame_offset;
229 if (FRAME_GROWS_DOWNWARD)
231 new_frame_offset -= size + frame_phase;
232 new_frame_offset &= -align;
233 new_frame_offset += frame_phase;
234 offset = new_frame_offset;
236 else
238 new_frame_offset -= frame_phase;
239 new_frame_offset += align - 1;
240 new_frame_offset &= -align;
241 new_frame_offset += frame_phase;
242 offset = new_frame_offset;
243 new_frame_offset += size;
245 frame_offset = new_frame_offset;
247 if (frame_offset_overflow (frame_offset, cfun->decl))
248 frame_offset = offset = 0;
250 return offset;
253 /* Accumulate DECL into STACK_VARS. */
255 static void
256 add_stack_var (tree decl)
258 struct stack_var *v;
260 if (stack_vars_num >= stack_vars_alloc)
262 if (stack_vars_alloc)
263 stack_vars_alloc = stack_vars_alloc * 3 / 2;
264 else
265 stack_vars_alloc = 32;
266 stack_vars
267 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
269 if (!decl_to_stack_part)
270 decl_to_stack_part = pointer_map_create ();
272 v = &stack_vars[stack_vars_num];
273 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
275 v->decl = decl;
276 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
277 /* Ensure that all variables have size, so that &a != &b for any two
278 variables that are simultaneously live. */
279 if (v->size == 0)
280 v->size = 1;
281 v->alignb = align_local_variable (SSAVAR (decl));
282 /* An alignment of zero can mightily confuse us later. */
283 gcc_assert (v->alignb != 0);
285 /* All variables are initially in their own partition. */
286 v->representative = stack_vars_num;
287 v->next = EOC;
289 /* All variables initially conflict with no other. */
290 v->conflicts = NULL;
292 /* Ensure that this decl doesn't get put onto the list twice. */
293 set_rtl (decl, pc_rtx);
295 stack_vars_num++;
298 /* Make the decls associated with luid's X and Y conflict. */
300 static void
301 add_stack_var_conflict (size_t x, size_t y)
303 struct stack_var *a = &stack_vars[x];
304 struct stack_var *b = &stack_vars[y];
305 if (!a->conflicts)
306 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
307 if (!b->conflicts)
308 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
309 bitmap_set_bit (a->conflicts, y);
310 bitmap_set_bit (b->conflicts, x);
313 /* Check whether the decls associated with luid's X and Y conflict. */
315 static bool
316 stack_var_conflict_p (size_t x, size_t y)
318 struct stack_var *a = &stack_vars[x];
319 struct stack_var *b = &stack_vars[y];
320 if (x == y)
321 return false;
322 /* Partitions containing an SSA name result from gimple registers
323 with things like unsupported modes. They are top-level and
324 hence conflict with everything else. */
325 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
326 return true;
328 if (!a->conflicts || !b->conflicts)
329 return false;
330 return bitmap_bit_p (a->conflicts, y);
333 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
334 enter its partition number into bitmap DATA. */
336 static bool
337 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
339 bitmap active = (bitmap)data;
340 op = get_base_address (op);
341 if (op
342 && DECL_P (op)
343 && DECL_RTL_IF_SET (op) == pc_rtx)
345 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
346 if (v)
347 bitmap_set_bit (active, *v);
349 return false;
352 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
353 record conflicts between it and all currently active other partitions
354 from bitmap DATA. */
356 static bool
357 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
359 bitmap active = (bitmap)data;
360 op = get_base_address (op);
361 if (op
362 && DECL_P (op)
363 && DECL_RTL_IF_SET (op) == pc_rtx)
365 size_t *v =
366 (size_t *) pointer_map_contains (decl_to_stack_part, op);
367 if (v && bitmap_set_bit (active, *v))
369 size_t num = *v;
370 bitmap_iterator bi;
371 unsigned i;
372 gcc_assert (num < stack_vars_num);
373 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
374 add_stack_var_conflict (num, i);
377 return false;
380 /* Helper routine for add_scope_conflicts, calculating the active partitions
381 at the end of BB, leaving the result in WORK. We're called to generate
382 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
383 liveness. */
385 static void
386 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
388 edge e;
389 edge_iterator ei;
390 gimple_stmt_iterator gsi;
391 bool (*visit)(gimple, tree, void *);
393 bitmap_clear (work);
394 FOR_EACH_EDGE (e, ei, bb->preds)
395 bitmap_ior_into (work, (bitmap)e->src->aux);
397 visit = visit_op;
399 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
401 gimple stmt = gsi_stmt (gsi);
402 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
404 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
406 gimple stmt = gsi_stmt (gsi);
408 if (gimple_clobber_p (stmt))
410 tree lhs = gimple_assign_lhs (stmt);
411 size_t *v;
412 /* Nested function lowering might introduce LHSs
413 that are COMPONENT_REFs. */
414 if (TREE_CODE (lhs) != VAR_DECL)
415 continue;
416 if (DECL_RTL_IF_SET (lhs) == pc_rtx
417 && (v = (size_t *)
418 pointer_map_contains (decl_to_stack_part, lhs)))
419 bitmap_clear_bit (work, *v);
421 else if (!is_gimple_debug (stmt))
423 if (for_conflict
424 && visit == visit_op)
426 /* If this is the first real instruction in this BB we need
427 to add conflicts for everything live at this point now.
428 Unlike classical liveness for named objects we can't
429 rely on seeing a def/use of the names we're interested in.
430 There might merely be indirect loads/stores. We'd not add any
431 conflicts for such partitions. */
432 bitmap_iterator bi;
433 unsigned i;
434 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
436 struct stack_var *a = &stack_vars[i];
437 if (!a->conflicts)
438 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
439 bitmap_ior_into (a->conflicts, work);
441 visit = visit_conflict;
443 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
448 /* Generate stack partition conflicts between all partitions that are
449 simultaneously live. */
451 static void
452 add_scope_conflicts (void)
454 basic_block bb;
455 bool changed;
456 bitmap work = BITMAP_ALLOC (NULL);
457 int *rpo;
458 int n_bbs;
460 /* We approximate the live range of a stack variable by taking the first
461 mention of its name as starting point(s), and by the end-of-scope
462 death clobber added by gimplify as ending point(s) of the range.
463 This overapproximates in the case we for instance moved an address-taken
464 operation upward, without also moving a dereference to it upwards.
465 But it's conservatively correct as a variable never can hold values
466 before its name is mentioned at least once.
468 We then do a mostly classical bitmap liveness algorithm. */
470 FOR_ALL_BB (bb)
471 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
473 rpo = XNEWVEC (int, last_basic_block);
474 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
476 changed = true;
477 while (changed)
479 int i;
480 changed = false;
481 for (i = 0; i < n_bbs; i++)
483 bitmap active;
484 bb = BASIC_BLOCK (rpo[i]);
485 active = (bitmap)bb->aux;
486 add_scope_conflicts_1 (bb, work, false);
487 if (bitmap_ior_into (active, work))
488 changed = true;
492 FOR_EACH_BB (bb)
493 add_scope_conflicts_1 (bb, work, true);
495 free (rpo);
496 BITMAP_FREE (work);
497 FOR_ALL_BB (bb)
498 BITMAP_FREE (bb->aux);
501 /* A subroutine of partition_stack_vars. A comparison function for qsort,
502 sorting an array of indices by the properties of the object. */
504 static int
505 stack_var_cmp (const void *a, const void *b)
507 size_t ia = *(const size_t *)a;
508 size_t ib = *(const size_t *)b;
509 unsigned int aligna = stack_vars[ia].alignb;
510 unsigned int alignb = stack_vars[ib].alignb;
511 HOST_WIDE_INT sizea = stack_vars[ia].size;
512 HOST_WIDE_INT sizeb = stack_vars[ib].size;
513 tree decla = stack_vars[ia].decl;
514 tree declb = stack_vars[ib].decl;
515 bool largea, largeb;
516 unsigned int uida, uidb;
518 /* Primary compare on "large" alignment. Large comes first. */
519 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
520 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
521 if (largea != largeb)
522 return (int)largeb - (int)largea;
524 /* Secondary compare on size, decreasing */
525 if (sizea > sizeb)
526 return -1;
527 if (sizea < sizeb)
528 return 1;
530 /* Tertiary compare on true alignment, decreasing. */
531 if (aligna < alignb)
532 return -1;
533 if (aligna > alignb)
534 return 1;
536 /* Final compare on ID for sort stability, increasing.
537 Two SSA names are compared by their version, SSA names come before
538 non-SSA names, and two normal decls are compared by their DECL_UID. */
539 if (TREE_CODE (decla) == SSA_NAME)
541 if (TREE_CODE (declb) == SSA_NAME)
542 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
543 else
544 return -1;
546 else if (TREE_CODE (declb) == SSA_NAME)
547 return 1;
548 else
549 uida = DECL_UID (decla), uidb = DECL_UID (declb);
550 if (uida < uidb)
551 return 1;
552 if (uida > uidb)
553 return -1;
554 return 0;
558 /* If the points-to solution *PI points to variables that are in a partition
559 together with other variables add all partition members to the pointed-to
560 variables bitmap. */
562 static void
563 add_partitioned_vars_to_ptset (struct pt_solution *pt,
564 struct pointer_map_t *decls_to_partitions,
565 struct pointer_set_t *visited, bitmap temp)
567 bitmap_iterator bi;
568 unsigned i;
569 bitmap *part;
571 if (pt->anything
572 || pt->vars == NULL
573 /* The pointed-to vars bitmap is shared, it is enough to
574 visit it once. */
575 || pointer_set_insert(visited, pt->vars))
576 return;
578 bitmap_clear (temp);
580 /* By using a temporary bitmap to store all members of the partitions
581 we have to add we make sure to visit each of the partitions only
582 once. */
583 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
584 if ((!temp
585 || !bitmap_bit_p (temp, i))
586 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
587 (void *)(size_t) i)))
588 bitmap_ior_into (temp, *part);
589 if (!bitmap_empty_p (temp))
590 bitmap_ior_into (pt->vars, temp);
593 /* Update points-to sets based on partition info, so we can use them on RTL.
594 The bitmaps representing stack partitions will be saved until expand,
595 where partitioned decls used as bases in memory expressions will be
596 rewritten. */
598 static void
599 update_alias_info_with_stack_vars (void)
601 struct pointer_map_t *decls_to_partitions = NULL;
602 size_t i, j;
603 tree var = NULL_TREE;
605 for (i = 0; i < stack_vars_num; i++)
607 bitmap part = NULL;
608 tree name;
609 struct ptr_info_def *pi;
611 /* Not interested in partitions with single variable. */
612 if (stack_vars[i].representative != i
613 || stack_vars[i].next == EOC)
614 continue;
616 if (!decls_to_partitions)
618 decls_to_partitions = pointer_map_create ();
619 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
622 /* Create an SSA_NAME that points to the partition for use
623 as base during alias-oracle queries on RTL for bases that
624 have been partitioned. */
625 if (var == NULL_TREE)
626 var = create_tmp_var (ptr_type_node, NULL);
627 name = make_ssa_name (var, NULL);
629 /* Create bitmaps representing partitions. They will be used for
630 points-to sets later, so use GGC alloc. */
631 part = BITMAP_GGC_ALLOC ();
632 for (j = i; j != EOC; j = stack_vars[j].next)
634 tree decl = stack_vars[j].decl;
635 unsigned int uid = DECL_PT_UID (decl);
636 bitmap_set_bit (part, uid);
637 *((bitmap *) pointer_map_insert (decls_to_partitions,
638 (void *)(size_t) uid)) = part;
639 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
640 decl)) = name;
643 /* Make the SSA name point to all partition members. */
644 pi = get_ptr_info (name);
645 pt_solution_set (&pi->pt, part, false);
648 /* Make all points-to sets that contain one member of a partition
649 contain all members of the partition. */
650 if (decls_to_partitions)
652 unsigned i;
653 struct pointer_set_t *visited = pointer_set_create ();
654 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
656 for (i = 1; i < num_ssa_names; i++)
658 tree name = ssa_name (i);
659 struct ptr_info_def *pi;
661 if (name
662 && POINTER_TYPE_P (TREE_TYPE (name))
663 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
664 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
665 visited, temp);
668 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
669 decls_to_partitions, visited, temp);
671 pointer_set_destroy (visited);
672 pointer_map_destroy (decls_to_partitions);
673 BITMAP_FREE (temp);
677 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
678 partitioning algorithm. Partitions A and B are known to be non-conflicting.
679 Merge them into a single partition A. */
681 static void
682 union_stack_vars (size_t a, size_t b)
684 struct stack_var *vb = &stack_vars[b];
685 bitmap_iterator bi;
686 unsigned u;
688 gcc_assert (stack_vars[b].next == EOC);
689 /* Add B to A's partition. */
690 stack_vars[b].next = stack_vars[a].next;
691 stack_vars[b].representative = a;
692 stack_vars[a].next = b;
694 /* Update the required alignment of partition A to account for B. */
695 if (stack_vars[a].alignb < stack_vars[b].alignb)
696 stack_vars[a].alignb = stack_vars[b].alignb;
698 /* Update the interference graph and merge the conflicts. */
699 if (vb->conflicts)
701 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
702 add_stack_var_conflict (a, stack_vars[u].representative);
703 BITMAP_FREE (vb->conflicts);
707 /* A subroutine of expand_used_vars. Binpack the variables into
708 partitions constrained by the interference graph. The overall
709 algorithm used is as follows:
711 Sort the objects by size in descending order.
712 For each object A {
713 S = size(A)
714 O = 0
715 loop {
716 Look for the largest non-conflicting object B with size <= S.
717 UNION (A, B)
722 static void
723 partition_stack_vars (void)
725 size_t si, sj, n = stack_vars_num;
727 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
728 for (si = 0; si < n; ++si)
729 stack_vars_sorted[si] = si;
731 if (n == 1)
732 return;
734 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
736 for (si = 0; si < n; ++si)
738 size_t i = stack_vars_sorted[si];
739 unsigned int ialign = stack_vars[i].alignb;
741 /* Ignore objects that aren't partition representatives. If we
742 see a var that is not a partition representative, it must
743 have been merged earlier. */
744 if (stack_vars[i].representative != i)
745 continue;
747 for (sj = si + 1; sj < n; ++sj)
749 size_t j = stack_vars_sorted[sj];
750 unsigned int jalign = stack_vars[j].alignb;
752 /* Ignore objects that aren't partition representatives. */
753 if (stack_vars[j].representative != j)
754 continue;
756 /* Ignore conflicting objects. */
757 if (stack_var_conflict_p (i, j))
758 continue;
760 /* Do not mix objects of "small" (supported) alignment
761 and "large" (unsupported) alignment. */
762 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
763 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
764 continue;
766 /* UNION the objects, placing J at OFFSET. */
767 union_stack_vars (i, j);
771 update_alias_info_with_stack_vars ();
774 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
776 static void
777 dump_stack_var_partition (void)
779 size_t si, i, j, n = stack_vars_num;
781 for (si = 0; si < n; ++si)
783 i = stack_vars_sorted[si];
785 /* Skip variables that aren't partition representatives, for now. */
786 if (stack_vars[i].representative != i)
787 continue;
789 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
790 " align %u\n", (unsigned long) i, stack_vars[i].size,
791 stack_vars[i].alignb);
793 for (j = i; j != EOC; j = stack_vars[j].next)
795 fputc ('\t', dump_file);
796 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
798 fputc ('\n', dump_file);
802 /* Assign rtl to DECL at BASE + OFFSET. */
804 static void
805 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
806 HOST_WIDE_INT offset)
808 unsigned align;
809 rtx x;
811 /* If this fails, we've overflowed the stack frame. Error nicely? */
812 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
814 x = plus_constant (Pmode, base, offset);
815 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
817 if (TREE_CODE (decl) != SSA_NAME)
819 /* Set alignment we actually gave this decl if it isn't an SSA name.
820 If it is we generate stack slots only accidentally so it isn't as
821 important, we'll simply use the alignment that is already set. */
822 if (base == virtual_stack_vars_rtx)
823 offset -= frame_phase;
824 align = offset & -offset;
825 align *= BITS_PER_UNIT;
826 if (align == 0 || align > base_align)
827 align = base_align;
829 /* One would think that we could assert that we're not decreasing
830 alignment here, but (at least) the i386 port does exactly this
831 via the MINIMUM_ALIGNMENT hook. */
833 DECL_ALIGN (decl) = align;
834 DECL_USER_ALIGN (decl) = 0;
837 set_mem_attributes (x, SSAVAR (decl), true);
838 set_rtl (decl, x);
841 /* A subroutine of expand_used_vars. Give each partition representative
842 a unique location within the stack frame. Update each partition member
843 with that location. */
845 static void
846 expand_stack_vars (bool (*pred) (tree))
848 size_t si, i, j, n = stack_vars_num;
849 HOST_WIDE_INT large_size = 0, large_alloc = 0;
850 rtx large_base = NULL;
851 unsigned large_align = 0;
852 tree decl;
854 /* Determine if there are any variables requiring "large" alignment.
855 Since these are dynamically allocated, we only process these if
856 no predicate involved. */
857 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
858 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
860 /* Find the total size of these variables. */
861 for (si = 0; si < n; ++si)
863 unsigned alignb;
865 i = stack_vars_sorted[si];
866 alignb = stack_vars[i].alignb;
868 /* Stop when we get to the first decl with "small" alignment. */
869 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
870 break;
872 /* Skip variables that aren't partition representatives. */
873 if (stack_vars[i].representative != i)
874 continue;
876 /* Skip variables that have already had rtl assigned. See also
877 add_stack_var where we perpetrate this pc_rtx hack. */
878 decl = stack_vars[i].decl;
879 if ((TREE_CODE (decl) == SSA_NAME
880 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
881 : DECL_RTL (decl)) != pc_rtx)
882 continue;
884 large_size += alignb - 1;
885 large_size &= -(HOST_WIDE_INT)alignb;
886 large_size += stack_vars[i].size;
889 /* If there were any, allocate space. */
890 if (large_size > 0)
891 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
892 large_align, true);
895 for (si = 0; si < n; ++si)
897 rtx base;
898 unsigned base_align, alignb;
899 HOST_WIDE_INT offset;
901 i = stack_vars_sorted[si];
903 /* Skip variables that aren't partition representatives, for now. */
904 if (stack_vars[i].representative != i)
905 continue;
907 /* Skip variables that have already had rtl assigned. See also
908 add_stack_var where we perpetrate this pc_rtx hack. */
909 decl = stack_vars[i].decl;
910 if ((TREE_CODE (decl) == SSA_NAME
911 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
912 : DECL_RTL (decl)) != pc_rtx)
913 continue;
915 /* Check the predicate to see whether this variable should be
916 allocated in this pass. */
917 if (pred && !pred (decl))
918 continue;
920 alignb = stack_vars[i].alignb;
921 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
923 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
924 base = virtual_stack_vars_rtx;
925 base_align = crtl->max_used_stack_slot_alignment;
927 else
929 /* Large alignment is only processed in the last pass. */
930 if (pred)
931 continue;
932 gcc_assert (large_base != NULL);
934 large_alloc += alignb - 1;
935 large_alloc &= -(HOST_WIDE_INT)alignb;
936 offset = large_alloc;
937 large_alloc += stack_vars[i].size;
939 base = large_base;
940 base_align = large_align;
943 /* Create rtl for each variable based on their location within the
944 partition. */
945 for (j = i; j != EOC; j = stack_vars[j].next)
947 expand_one_stack_var_at (stack_vars[j].decl,
948 base, base_align,
949 offset);
953 gcc_assert (large_alloc == large_size);
956 /* Take into account all sizes of partitions and reset DECL_RTLs. */
957 static HOST_WIDE_INT
958 account_stack_vars (void)
960 size_t si, j, i, n = stack_vars_num;
961 HOST_WIDE_INT size = 0;
963 for (si = 0; si < n; ++si)
965 i = stack_vars_sorted[si];
967 /* Skip variables that aren't partition representatives, for now. */
968 if (stack_vars[i].representative != i)
969 continue;
971 size += stack_vars[i].size;
972 for (j = i; j != EOC; j = stack_vars[j].next)
973 set_rtl (stack_vars[j].decl, NULL);
975 return size;
978 /* A subroutine of expand_one_var. Called to immediately assign rtl
979 to a variable to be allocated in the stack frame. */
981 static void
982 expand_one_stack_var (tree var)
984 HOST_WIDE_INT size, offset;
985 unsigned byte_align;
987 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
988 byte_align = align_local_variable (SSAVAR (var));
990 /* We handle highly aligned variables in expand_stack_vars. */
991 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
993 offset = alloc_stack_frame_space (size, byte_align);
995 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
996 crtl->max_used_stack_slot_alignment, offset);
999 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1000 that will reside in a hard register. */
1002 static void
1003 expand_one_hard_reg_var (tree var)
1005 rest_of_decl_compilation (var, 0, 0);
1008 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1009 that will reside in a pseudo register. */
1011 static void
1012 expand_one_register_var (tree var)
1014 tree decl = SSAVAR (var);
1015 tree type = TREE_TYPE (decl);
1016 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1017 rtx x = gen_reg_rtx (reg_mode);
1019 set_rtl (var, x);
1021 /* Note if the object is a user variable. */
1022 if (!DECL_ARTIFICIAL (decl))
1023 mark_user_reg (x);
1025 if (POINTER_TYPE_P (type))
1026 mark_reg_pointer (x, get_pointer_alignment (var));
1029 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1030 has some associated error, e.g. its type is error-mark. We just need
1031 to pick something that won't crash the rest of the compiler. */
1033 static void
1034 expand_one_error_var (tree var)
1036 enum machine_mode mode = DECL_MODE (var);
1037 rtx x;
1039 if (mode == BLKmode)
1040 x = gen_rtx_MEM (BLKmode, const0_rtx);
1041 else if (mode == VOIDmode)
1042 x = const0_rtx;
1043 else
1044 x = gen_reg_rtx (mode);
1046 SET_DECL_RTL (var, x);
1049 /* A subroutine of expand_one_var. VAR is a variable that will be
1050 allocated to the local stack frame. Return true if we wish to
1051 add VAR to STACK_VARS so that it will be coalesced with other
1052 variables. Return false to allocate VAR immediately.
1054 This function is used to reduce the number of variables considered
1055 for coalescing, which reduces the size of the quadratic problem. */
1057 static bool
1058 defer_stack_allocation (tree var, bool toplevel)
1060 /* If stack protection is enabled, *all* stack variables must be deferred,
1061 so that we can re-order the strings to the top of the frame. */
1062 if (flag_stack_protect)
1063 return true;
1065 /* We handle "large" alignment via dynamic allocation. We want to handle
1066 this extra complication in only one place, so defer them. */
1067 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1068 return true;
1070 /* Variables in the outermost scope automatically conflict with
1071 every other variable. The only reason to want to defer them
1072 at all is that, after sorting, we can more efficiently pack
1073 small variables in the stack frame. Continue to defer at -O2. */
1074 if (toplevel && optimize < 2)
1075 return false;
1077 /* Without optimization, *most* variables are allocated from the
1078 stack, which makes the quadratic problem large exactly when we
1079 want compilation to proceed as quickly as possible. On the
1080 other hand, we don't want the function's stack frame size to
1081 get completely out of hand. So we avoid adding scalars and
1082 "small" aggregates to the list at all. */
1083 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1084 return false;
1086 return true;
1089 /* A subroutine of expand_used_vars. Expand one variable according to
1090 its flavor. Variables to be placed on the stack are not actually
1091 expanded yet, merely recorded.
1092 When REALLY_EXPAND is false, only add stack values to be allocated.
1093 Return stack usage this variable is supposed to take.
1096 static HOST_WIDE_INT
1097 expand_one_var (tree var, bool toplevel, bool really_expand)
1099 unsigned int align = BITS_PER_UNIT;
1100 tree origvar = var;
1102 var = SSAVAR (var);
1104 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1106 /* Because we don't know if VAR will be in register or on stack,
1107 we conservatively assume it will be on stack even if VAR is
1108 eventually put into register after RA pass. For non-automatic
1109 variables, which won't be on stack, we collect alignment of
1110 type and ignore user specified alignment. */
1111 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1112 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1113 TYPE_MODE (TREE_TYPE (var)),
1114 TYPE_ALIGN (TREE_TYPE (var)));
1115 else if (DECL_HAS_VALUE_EXPR_P (var)
1116 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1117 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1118 or variables which were assigned a stack slot already by
1119 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1120 changed from the offset chosen to it. */
1121 align = crtl->stack_alignment_estimated;
1122 else
1123 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1125 /* If the variable alignment is very large we'll dynamicaly allocate
1126 it, which means that in-frame portion is just a pointer. */
1127 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1128 align = POINTER_SIZE;
1131 if (SUPPORTS_STACK_ALIGNMENT
1132 && crtl->stack_alignment_estimated < align)
1134 /* stack_alignment_estimated shouldn't change after stack
1135 realign decision made */
1136 gcc_assert(!crtl->stack_realign_processed);
1137 crtl->stack_alignment_estimated = align;
1140 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1141 So here we only make sure stack_alignment_needed >= align. */
1142 if (crtl->stack_alignment_needed < align)
1143 crtl->stack_alignment_needed = align;
1144 if (crtl->max_used_stack_slot_alignment < align)
1145 crtl->max_used_stack_slot_alignment = align;
1147 if (TREE_CODE (origvar) == SSA_NAME)
1149 gcc_assert (TREE_CODE (var) != VAR_DECL
1150 || (!DECL_EXTERNAL (var)
1151 && !DECL_HAS_VALUE_EXPR_P (var)
1152 && !TREE_STATIC (var)
1153 && TREE_TYPE (var) != error_mark_node
1154 && !DECL_HARD_REGISTER (var)
1155 && really_expand));
1157 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1159 else if (DECL_EXTERNAL (var))
1161 else if (DECL_HAS_VALUE_EXPR_P (var))
1163 else if (TREE_STATIC (var))
1165 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1167 else if (TREE_TYPE (var) == error_mark_node)
1169 if (really_expand)
1170 expand_one_error_var (var);
1172 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1174 if (really_expand)
1175 expand_one_hard_reg_var (var);
1177 else if (use_register_for_decl (var))
1179 if (really_expand)
1180 expand_one_register_var (origvar);
1182 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1184 /* Reject variables which cover more than half of the address-space. */
1185 if (really_expand)
1187 error ("size of variable %q+D is too large", var);
1188 expand_one_error_var (var);
1191 else if (defer_stack_allocation (var, toplevel))
1192 add_stack_var (origvar);
1193 else
1195 if (really_expand)
1196 expand_one_stack_var (origvar);
1197 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1199 return 0;
1202 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1203 expanding variables. Those variables that can be put into registers
1204 are allocated pseudos; those that can't are put on the stack.
1206 TOPLEVEL is true if this is the outermost BLOCK. */
1208 static void
1209 expand_used_vars_for_block (tree block, bool toplevel)
1211 tree t;
1213 /* Expand all variables at this level. */
1214 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1215 if (TREE_USED (t)
1216 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1217 || !DECL_NONSHAREABLE (t)))
1218 expand_one_var (t, toplevel, true);
1220 /* Expand all variables at containing levels. */
1221 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1222 expand_used_vars_for_block (t, false);
1225 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1226 and clear TREE_USED on all local variables. */
1228 static void
1229 clear_tree_used (tree block)
1231 tree t;
1233 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1234 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1235 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1236 || !DECL_NONSHAREABLE (t))
1237 TREE_USED (t) = 0;
1239 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1240 clear_tree_used (t);
1243 /* Examine TYPE and determine a bit mask of the following features. */
1245 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1246 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1247 #define SPCT_HAS_ARRAY 4
1248 #define SPCT_HAS_AGGREGATE 8
1250 static unsigned int
1251 stack_protect_classify_type (tree type)
1253 unsigned int ret = 0;
1254 tree t;
1256 switch (TREE_CODE (type))
1258 case ARRAY_TYPE:
1259 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1260 if (t == char_type_node
1261 || t == signed_char_type_node
1262 || t == unsigned_char_type_node)
1264 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1265 unsigned HOST_WIDE_INT len;
1267 if (!TYPE_SIZE_UNIT (type)
1268 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1269 len = max;
1270 else
1271 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1273 if (len < max)
1274 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1275 else
1276 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1278 else
1279 ret = SPCT_HAS_ARRAY;
1280 break;
1282 case UNION_TYPE:
1283 case QUAL_UNION_TYPE:
1284 case RECORD_TYPE:
1285 ret = SPCT_HAS_AGGREGATE;
1286 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1287 if (TREE_CODE (t) == FIELD_DECL)
1288 ret |= stack_protect_classify_type (TREE_TYPE (t));
1289 break;
1291 default:
1292 break;
1295 return ret;
1298 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1299 part of the local stack frame. Remember if we ever return nonzero for
1300 any variable in this function. The return value is the phase number in
1301 which the variable should be allocated. */
1303 static int
1304 stack_protect_decl_phase (tree decl)
1306 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1307 int ret = 0;
1309 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1310 has_short_buffer = true;
1312 if (flag_stack_protect == 2)
1314 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1315 && !(bits & SPCT_HAS_AGGREGATE))
1316 ret = 1;
1317 else if (bits & SPCT_HAS_ARRAY)
1318 ret = 2;
1320 else
1321 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1323 if (ret)
1324 has_protected_decls = true;
1326 return ret;
1329 /* Two helper routines that check for phase 1 and phase 2. These are used
1330 as callbacks for expand_stack_vars. */
1332 static bool
1333 stack_protect_decl_phase_1 (tree decl)
1335 return stack_protect_decl_phase (decl) == 1;
1338 static bool
1339 stack_protect_decl_phase_2 (tree decl)
1341 return stack_protect_decl_phase (decl) == 2;
1344 /* Ensure that variables in different stack protection phases conflict
1345 so that they are not merged and share the same stack slot. */
1347 static void
1348 add_stack_protection_conflicts (void)
1350 size_t i, j, n = stack_vars_num;
1351 unsigned char *phase;
1353 phase = XNEWVEC (unsigned char, n);
1354 for (i = 0; i < n; ++i)
1355 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1357 for (i = 0; i < n; ++i)
1359 unsigned char ph_i = phase[i];
1360 for (j = i + 1; j < n; ++j)
1361 if (ph_i != phase[j])
1362 add_stack_var_conflict (i, j);
1365 XDELETEVEC (phase);
1368 /* Create a decl for the guard at the top of the stack frame. */
1370 static void
1371 create_stack_guard (void)
1373 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1374 VAR_DECL, NULL, ptr_type_node);
1375 TREE_THIS_VOLATILE (guard) = 1;
1376 TREE_USED (guard) = 1;
1377 expand_one_stack_var (guard);
1378 crtl->stack_protect_guard = guard;
1381 /* Prepare for expanding variables. */
1382 static void
1383 init_vars_expansion (void)
1385 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1386 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1388 /* A map from decl to stack partition. */
1389 decl_to_stack_part = pointer_map_create ();
1391 /* Initialize local stack smashing state. */
1392 has_protected_decls = false;
1393 has_short_buffer = false;
1396 /* Free up stack variable graph data. */
1397 static void
1398 fini_vars_expansion (void)
1400 bitmap_obstack_release (&stack_var_bitmap_obstack);
1401 if (stack_vars)
1402 XDELETEVEC (stack_vars);
1403 if (stack_vars_sorted)
1404 XDELETEVEC (stack_vars_sorted);
1405 stack_vars = NULL;
1406 stack_vars_sorted = NULL;
1407 stack_vars_alloc = stack_vars_num = 0;
1408 pointer_map_destroy (decl_to_stack_part);
1409 decl_to_stack_part = NULL;
1412 /* Make a fair guess for the size of the stack frame of the function
1413 in NODE. This doesn't have to be exact, the result is only used in
1414 the inline heuristics. So we don't want to run the full stack var
1415 packing algorithm (which is quadratic in the number of stack vars).
1416 Instead, we calculate the total size of all stack vars. This turns
1417 out to be a pretty fair estimate -- packing of stack vars doesn't
1418 happen very often. */
1420 HOST_WIDE_INT
1421 estimated_stack_frame_size (struct cgraph_node *node)
1423 HOST_WIDE_INT size = 0;
1424 size_t i;
1425 tree var;
1426 tree old_cur_fun_decl = current_function_decl;
1427 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1429 current_function_decl = node->symbol.decl;
1430 push_cfun (fn);
1432 init_vars_expansion ();
1434 FOR_EACH_LOCAL_DECL (fn, i, var)
1435 if (auto_var_in_fn_p (var, fn->decl))
1436 size += expand_one_var (var, true, false);
1438 if (stack_vars_num > 0)
1440 /* Fake sorting the stack vars for account_stack_vars (). */
1441 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1442 for (i = 0; i < stack_vars_num; ++i)
1443 stack_vars_sorted[i] = i;
1444 size += account_stack_vars ();
1447 fini_vars_expansion ();
1448 pop_cfun ();
1449 current_function_decl = old_cur_fun_decl;
1450 return size;
1453 /* Expand all variables used in the function. */
1455 static void
1456 expand_used_vars (void)
1458 tree var, outer_block = DECL_INITIAL (current_function_decl);
1459 VEC(tree,heap) *maybe_local_decls = NULL;
1460 struct pointer_map_t *ssa_name_decls;
1461 unsigned i;
1462 unsigned len;
1464 /* Compute the phase of the stack frame for this function. */
1466 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1467 int off = STARTING_FRAME_OFFSET % align;
1468 frame_phase = off ? align - off : 0;
1471 /* Set TREE_USED on all variables in the local_decls. */
1472 FOR_EACH_LOCAL_DECL (cfun, i, var)
1473 TREE_USED (var) = 1;
1474 /* Clear TREE_USED on all variables associated with a block scope. */
1475 clear_tree_used (DECL_INITIAL (current_function_decl));
1477 init_vars_expansion ();
1479 ssa_name_decls = pointer_map_create ();
1480 for (i = 0; i < SA.map->num_partitions; i++)
1482 tree var = partition_to_var (SA.map, i);
1484 gcc_assert (!virtual_operand_p (var));
1486 /* Assign decls to each SSA name partition, share decls for partitions
1487 we could have coalesced (those with the same type). */
1488 if (SSA_NAME_VAR (var) == NULL_TREE)
1490 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1491 if (!*slot)
1492 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1493 replace_ssa_name_symbol (var, (tree) *slot);
1496 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1497 expand_one_var (var, true, true);
1498 else
1500 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1501 contain the default def (representing the parm or result itself)
1502 we don't do anything here. But those which don't contain the
1503 default def (representing a temporary based on the parm/result)
1504 we need to allocate space just like for normal VAR_DECLs. */
1505 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1507 expand_one_var (var, true, true);
1508 gcc_assert (SA.partition_to_pseudo[i]);
1512 pointer_map_destroy (ssa_name_decls);
1514 /* At this point all variables on the local_decls with TREE_USED
1515 set are not associated with any block scope. Lay them out. */
1517 len = VEC_length (tree, cfun->local_decls);
1518 FOR_EACH_LOCAL_DECL (cfun, i, var)
1520 bool expand_now = false;
1522 /* Expanded above already. */
1523 if (is_gimple_reg (var))
1525 TREE_USED (var) = 0;
1526 goto next;
1528 /* We didn't set a block for static or extern because it's hard
1529 to tell the difference between a global variable (re)declared
1530 in a local scope, and one that's really declared there to
1531 begin with. And it doesn't really matter much, since we're
1532 not giving them stack space. Expand them now. */
1533 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1534 expand_now = true;
1536 /* If the variable is not associated with any block, then it
1537 was created by the optimizers, and could be live anywhere
1538 in the function. */
1539 else if (TREE_USED (var))
1540 expand_now = true;
1542 /* Finally, mark all variables on the list as used. We'll use
1543 this in a moment when we expand those associated with scopes. */
1544 TREE_USED (var) = 1;
1546 if (expand_now)
1547 expand_one_var (var, true, true);
1549 next:
1550 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1552 rtx rtl = DECL_RTL_IF_SET (var);
1554 /* Keep artificial non-ignored vars in cfun->local_decls
1555 chain until instantiate_decls. */
1556 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1557 add_local_decl (cfun, var);
1558 else if (rtl == NULL_RTX)
1559 /* If rtl isn't set yet, which can happen e.g. with
1560 -fstack-protector, retry before returning from this
1561 function. */
1562 VEC_safe_push (tree, heap, maybe_local_decls, var);
1566 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1568 +-----------------+-----------------+
1569 | ...processed... | ...duplicates...|
1570 +-----------------+-----------------+
1572 +-- LEN points here.
1574 We just want the duplicates, as those are the artificial
1575 non-ignored vars that we want to keep until instantiate_decls.
1576 Move them down and truncate the array. */
1577 if (!VEC_empty (tree, cfun->local_decls))
1578 VEC_block_remove (tree, cfun->local_decls, 0, len);
1580 /* At this point, all variables within the block tree with TREE_USED
1581 set are actually used by the optimized function. Lay them out. */
1582 expand_used_vars_for_block (outer_block, true);
1584 if (stack_vars_num > 0)
1586 add_scope_conflicts ();
1588 /* If stack protection is enabled, we don't share space between
1589 vulnerable data and non-vulnerable data. */
1590 if (flag_stack_protect)
1591 add_stack_protection_conflicts ();
1593 /* Now that we have collected all stack variables, and have computed a
1594 minimal interference graph, attempt to save some stack space. */
1595 partition_stack_vars ();
1596 if (dump_file)
1597 dump_stack_var_partition ();
1600 /* There are several conditions under which we should create a
1601 stack guard: protect-all, alloca used, protected decls present. */
1602 if (flag_stack_protect == 2
1603 || (flag_stack_protect
1604 && (cfun->calls_alloca || has_protected_decls)))
1605 create_stack_guard ();
1607 /* Assign rtl to each variable based on these partitions. */
1608 if (stack_vars_num > 0)
1610 /* Reorder decls to be protected by iterating over the variables
1611 array multiple times, and allocating out of each phase in turn. */
1612 /* ??? We could probably integrate this into the qsort we did
1613 earlier, such that we naturally see these variables first,
1614 and thus naturally allocate things in the right order. */
1615 if (has_protected_decls)
1617 /* Phase 1 contains only character arrays. */
1618 expand_stack_vars (stack_protect_decl_phase_1);
1620 /* Phase 2 contains other kinds of arrays. */
1621 if (flag_stack_protect == 2)
1622 expand_stack_vars (stack_protect_decl_phase_2);
1625 expand_stack_vars (NULL);
1628 fini_vars_expansion ();
1630 /* If there were any artificial non-ignored vars without rtl
1631 found earlier, see if deferred stack allocation hasn't assigned
1632 rtl to them. */
1633 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1635 rtx rtl = DECL_RTL_IF_SET (var);
1637 /* Keep artificial non-ignored vars in cfun->local_decls
1638 chain until instantiate_decls. */
1639 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1640 add_local_decl (cfun, var);
1642 VEC_free (tree, heap, maybe_local_decls);
1644 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1645 if (STACK_ALIGNMENT_NEEDED)
1647 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1648 if (!FRAME_GROWS_DOWNWARD)
1649 frame_offset += align - 1;
1650 frame_offset &= -align;
1655 /* If we need to produce a detailed dump, print the tree representation
1656 for STMT to the dump file. SINCE is the last RTX after which the RTL
1657 generated for STMT should have been appended. */
1659 static void
1660 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1662 if (dump_file && (dump_flags & TDF_DETAILS))
1664 fprintf (dump_file, "\n;; ");
1665 print_gimple_stmt (dump_file, stmt, 0,
1666 TDF_SLIM | (dump_flags & TDF_LINENO));
1667 fprintf (dump_file, "\n");
1669 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1673 /* Maps the blocks that do not contain tree labels to rtx labels. */
1675 static struct pointer_map_t *lab_rtx_for_bb;
1677 /* Returns the label_rtx expression for a label starting basic block BB. */
1679 static rtx
1680 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1682 gimple_stmt_iterator gsi;
1683 tree lab;
1684 gimple lab_stmt;
1685 void **elt;
1687 if (bb->flags & BB_RTL)
1688 return block_label (bb);
1690 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1691 if (elt)
1692 return (rtx) *elt;
1694 /* Find the tree label if it is present. */
1696 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1698 lab_stmt = gsi_stmt (gsi);
1699 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1700 break;
1702 lab = gimple_label_label (lab_stmt);
1703 if (DECL_NONLOCAL (lab))
1704 break;
1706 return label_rtx (lab);
1709 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1710 *elt = gen_label_rtx ();
1711 return (rtx) *elt;
1715 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1716 of a basic block where we just expanded the conditional at the end,
1717 possibly clean up the CFG and instruction sequence. LAST is the
1718 last instruction before the just emitted jump sequence. */
1720 static void
1721 maybe_cleanup_end_of_block (edge e, rtx last)
1723 /* Special case: when jumpif decides that the condition is
1724 trivial it emits an unconditional jump (and the necessary
1725 barrier). But we still have two edges, the fallthru one is
1726 wrong. purge_dead_edges would clean this up later. Unfortunately
1727 we have to insert insns (and split edges) before
1728 find_many_sub_basic_blocks and hence before purge_dead_edges.
1729 But splitting edges might create new blocks which depend on the
1730 fact that if there are two edges there's no barrier. So the
1731 barrier would get lost and verify_flow_info would ICE. Instead
1732 of auditing all edge splitters to care for the barrier (which
1733 normally isn't there in a cleaned CFG), fix it here. */
1734 if (BARRIER_P (get_last_insn ()))
1736 rtx insn;
1737 remove_edge (e);
1738 /* Now, we have a single successor block, if we have insns to
1739 insert on the remaining edge we potentially will insert
1740 it at the end of this block (if the dest block isn't feasible)
1741 in order to avoid splitting the edge. This insertion will take
1742 place in front of the last jump. But we might have emitted
1743 multiple jumps (conditional and one unconditional) to the
1744 same destination. Inserting in front of the last one then
1745 is a problem. See PR 40021. We fix this by deleting all
1746 jumps except the last unconditional one. */
1747 insn = PREV_INSN (get_last_insn ());
1748 /* Make sure we have an unconditional jump. Otherwise we're
1749 confused. */
1750 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1751 for (insn = PREV_INSN (insn); insn != last;)
1753 insn = PREV_INSN (insn);
1754 if (JUMP_P (NEXT_INSN (insn)))
1756 if (!any_condjump_p (NEXT_INSN (insn)))
1758 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1759 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1761 delete_insn (NEXT_INSN (insn));
1767 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1768 Returns a new basic block if we've terminated the current basic
1769 block and created a new one. */
1771 static basic_block
1772 expand_gimple_cond (basic_block bb, gimple stmt)
1774 basic_block new_bb, dest;
1775 edge new_edge;
1776 edge true_edge;
1777 edge false_edge;
1778 rtx last2, last;
1779 enum tree_code code;
1780 tree op0, op1;
1782 code = gimple_cond_code (stmt);
1783 op0 = gimple_cond_lhs (stmt);
1784 op1 = gimple_cond_rhs (stmt);
1785 /* We're sometimes presented with such code:
1786 D.123_1 = x < y;
1787 if (D.123_1 != 0)
1789 This would expand to two comparisons which then later might
1790 be cleaned up by combine. But some pattern matchers like if-conversion
1791 work better when there's only one compare, so make up for this
1792 here as special exception if TER would have made the same change. */
1793 if (gimple_cond_single_var_p (stmt)
1794 && SA.values
1795 && TREE_CODE (op0) == SSA_NAME
1796 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1798 gimple second = SSA_NAME_DEF_STMT (op0);
1799 if (gimple_code (second) == GIMPLE_ASSIGN)
1801 enum tree_code code2 = gimple_assign_rhs_code (second);
1802 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1804 code = code2;
1805 op0 = gimple_assign_rhs1 (second);
1806 op1 = gimple_assign_rhs2 (second);
1808 /* If jumps are cheap turn some more codes into
1809 jumpy sequences. */
1810 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1812 if ((code2 == BIT_AND_EXPR
1813 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1814 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1815 || code2 == TRUTH_AND_EXPR)
1817 code = TRUTH_ANDIF_EXPR;
1818 op0 = gimple_assign_rhs1 (second);
1819 op1 = gimple_assign_rhs2 (second);
1821 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1823 code = TRUTH_ORIF_EXPR;
1824 op0 = gimple_assign_rhs1 (second);
1825 op1 = gimple_assign_rhs2 (second);
1831 last2 = last = get_last_insn ();
1833 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1834 set_curr_insn_source_location (gimple_location (stmt));
1835 set_curr_insn_block (gimple_block (stmt));
1837 /* These flags have no purpose in RTL land. */
1838 true_edge->flags &= ~EDGE_TRUE_VALUE;
1839 false_edge->flags &= ~EDGE_FALSE_VALUE;
1841 /* We can either have a pure conditional jump with one fallthru edge or
1842 two-way jump that needs to be decomposed into two basic blocks. */
1843 if (false_edge->dest == bb->next_bb)
1845 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1846 true_edge->probability);
1847 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1848 if (true_edge->goto_locus)
1850 set_curr_insn_source_location (true_edge->goto_locus);
1851 set_curr_insn_block (true_edge->goto_block);
1852 true_edge->goto_locus = curr_insn_locator ();
1854 true_edge->goto_block = NULL;
1855 false_edge->flags |= EDGE_FALLTHRU;
1856 maybe_cleanup_end_of_block (false_edge, last);
1857 return NULL;
1859 if (true_edge->dest == bb->next_bb)
1861 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1862 false_edge->probability);
1863 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1864 if (false_edge->goto_locus)
1866 set_curr_insn_source_location (false_edge->goto_locus);
1867 set_curr_insn_block (false_edge->goto_block);
1868 false_edge->goto_locus = curr_insn_locator ();
1870 false_edge->goto_block = NULL;
1871 true_edge->flags |= EDGE_FALLTHRU;
1872 maybe_cleanup_end_of_block (true_edge, last);
1873 return NULL;
1876 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1877 true_edge->probability);
1878 last = get_last_insn ();
1879 if (false_edge->goto_locus)
1881 set_curr_insn_source_location (false_edge->goto_locus);
1882 set_curr_insn_block (false_edge->goto_block);
1883 false_edge->goto_locus = curr_insn_locator ();
1885 false_edge->goto_block = NULL;
1886 emit_jump (label_rtx_for_bb (false_edge->dest));
1888 BB_END (bb) = last;
1889 if (BARRIER_P (BB_END (bb)))
1890 BB_END (bb) = PREV_INSN (BB_END (bb));
1891 update_bb_for_insn (bb);
1893 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1894 dest = false_edge->dest;
1895 redirect_edge_succ (false_edge, new_bb);
1896 false_edge->flags |= EDGE_FALLTHRU;
1897 new_bb->count = false_edge->count;
1898 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1899 if (current_loops && bb->loop_father)
1900 add_bb_to_loop (new_bb, bb->loop_father);
1901 new_edge = make_edge (new_bb, dest, 0);
1902 new_edge->probability = REG_BR_PROB_BASE;
1903 new_edge->count = new_bb->count;
1904 if (BARRIER_P (BB_END (new_bb)))
1905 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1906 update_bb_for_insn (new_bb);
1908 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1910 if (true_edge->goto_locus)
1912 set_curr_insn_source_location (true_edge->goto_locus);
1913 set_curr_insn_block (true_edge->goto_block);
1914 true_edge->goto_locus = curr_insn_locator ();
1916 true_edge->goto_block = NULL;
1918 return new_bb;
1921 /* Mark all calls that can have a transaction restart. */
1923 static void
1924 mark_transaction_restart_calls (gimple stmt)
1926 struct tm_restart_node dummy;
1927 void **slot;
1929 if (!cfun->gimple_df->tm_restart)
1930 return;
1932 dummy.stmt = stmt;
1933 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1934 if (slot)
1936 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1937 tree list = n->label_or_list;
1938 rtx insn;
1940 for (insn = next_real_insn (get_last_insn ());
1941 !CALL_P (insn);
1942 insn = next_real_insn (insn))
1943 continue;
1945 if (TREE_CODE (list) == LABEL_DECL)
1946 add_reg_note (insn, REG_TM, label_rtx (list));
1947 else
1948 for (; list ; list = TREE_CHAIN (list))
1949 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1953 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1954 statement STMT. */
1956 static void
1957 expand_call_stmt (gimple stmt)
1959 tree exp, decl, lhs;
1960 bool builtin_p;
1961 size_t i;
1963 if (gimple_call_internal_p (stmt))
1965 expand_internal_call (stmt);
1966 return;
1969 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1971 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1972 decl = gimple_call_fndecl (stmt);
1973 builtin_p = decl && DECL_BUILT_IN (decl);
1975 /* If this is not a builtin function, the function type through which the
1976 call is made may be different from the type of the function. */
1977 if (!builtin_p)
1978 CALL_EXPR_FN (exp)
1979 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
1980 CALL_EXPR_FN (exp));
1982 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1983 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1985 for (i = 0; i < gimple_call_num_args (stmt); i++)
1987 tree arg = gimple_call_arg (stmt, i);
1988 gimple def;
1989 /* TER addresses into arguments of builtin functions so we have a
1990 chance to infer more correct alignment information. See PR39954. */
1991 if (builtin_p
1992 && TREE_CODE (arg) == SSA_NAME
1993 && (def = get_gimple_for_ssa_name (arg))
1994 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1995 arg = gimple_assign_rhs1 (def);
1996 CALL_EXPR_ARG (exp, i) = arg;
1999 if (gimple_has_side_effects (stmt))
2000 TREE_SIDE_EFFECTS (exp) = 1;
2002 if (gimple_call_nothrow_p (stmt))
2003 TREE_NOTHROW (exp) = 1;
2005 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2006 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2007 if (decl
2008 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2009 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2010 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2011 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2012 else
2013 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2014 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2015 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2016 TREE_BLOCK (exp) = gimple_block (stmt);
2018 /* Ensure RTL is created for debug args. */
2019 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2021 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2022 unsigned int ix;
2023 tree dtemp;
2025 if (debug_args)
2026 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2028 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2029 expand_debug_expr (dtemp);
2033 lhs = gimple_call_lhs (stmt);
2034 if (lhs)
2035 expand_assignment (lhs, exp, false);
2036 else
2037 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2039 mark_transaction_restart_calls (stmt);
2042 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2043 STMT that doesn't require special handling for outgoing edges. That
2044 is no tailcalls and no GIMPLE_COND. */
2046 static void
2047 expand_gimple_stmt_1 (gimple stmt)
2049 tree op0;
2051 set_curr_insn_source_location (gimple_location (stmt));
2052 set_curr_insn_block (gimple_block (stmt));
2054 switch (gimple_code (stmt))
2056 case GIMPLE_GOTO:
2057 op0 = gimple_goto_dest (stmt);
2058 if (TREE_CODE (op0) == LABEL_DECL)
2059 expand_goto (op0);
2060 else
2061 expand_computed_goto (op0);
2062 break;
2063 case GIMPLE_LABEL:
2064 expand_label (gimple_label_label (stmt));
2065 break;
2066 case GIMPLE_NOP:
2067 case GIMPLE_PREDICT:
2068 break;
2069 case GIMPLE_SWITCH:
2070 expand_case (stmt);
2071 break;
2072 case GIMPLE_ASM:
2073 expand_asm_stmt (stmt);
2074 break;
2075 case GIMPLE_CALL:
2076 expand_call_stmt (stmt);
2077 break;
2079 case GIMPLE_RETURN:
2080 op0 = gimple_return_retval (stmt);
2082 if (op0 && op0 != error_mark_node)
2084 tree result = DECL_RESULT (current_function_decl);
2086 /* If we are not returning the current function's RESULT_DECL,
2087 build an assignment to it. */
2088 if (op0 != result)
2090 /* I believe that a function's RESULT_DECL is unique. */
2091 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2093 /* ??? We'd like to use simply expand_assignment here,
2094 but this fails if the value is of BLKmode but the return
2095 decl is a register. expand_return has special handling
2096 for this combination, which eventually should move
2097 to common code. See comments there. Until then, let's
2098 build a modify expression :-/ */
2099 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2100 result, op0);
2103 if (!op0)
2104 expand_null_return ();
2105 else
2106 expand_return (op0);
2107 break;
2109 case GIMPLE_ASSIGN:
2111 tree lhs = gimple_assign_lhs (stmt);
2113 /* Tree expand used to fiddle with |= and &= of two bitfield
2114 COMPONENT_REFs here. This can't happen with gimple, the LHS
2115 of binary assigns must be a gimple reg. */
2117 if (TREE_CODE (lhs) != SSA_NAME
2118 || get_gimple_rhs_class (gimple_expr_code (stmt))
2119 == GIMPLE_SINGLE_RHS)
2121 tree rhs = gimple_assign_rhs1 (stmt);
2122 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2123 == GIMPLE_SINGLE_RHS);
2124 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2125 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2126 if (TREE_CLOBBER_P (rhs))
2127 /* This is a clobber to mark the going out of scope for
2128 this LHS. */
2130 else
2131 expand_assignment (lhs, rhs,
2132 gimple_assign_nontemporal_move_p (stmt));
2134 else
2136 rtx target, temp;
2137 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2138 struct separate_ops ops;
2139 bool promoted = false;
2141 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2142 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2143 promoted = true;
2145 ops.code = gimple_assign_rhs_code (stmt);
2146 ops.type = TREE_TYPE (lhs);
2147 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2149 case GIMPLE_TERNARY_RHS:
2150 ops.op2 = gimple_assign_rhs3 (stmt);
2151 /* Fallthru */
2152 case GIMPLE_BINARY_RHS:
2153 ops.op1 = gimple_assign_rhs2 (stmt);
2154 /* Fallthru */
2155 case GIMPLE_UNARY_RHS:
2156 ops.op0 = gimple_assign_rhs1 (stmt);
2157 break;
2158 default:
2159 gcc_unreachable ();
2161 ops.location = gimple_location (stmt);
2163 /* If we want to use a nontemporal store, force the value to
2164 register first. If we store into a promoted register,
2165 don't directly expand to target. */
2166 temp = nontemporal || promoted ? NULL_RTX : target;
2167 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2168 EXPAND_NORMAL);
2170 if (temp == target)
2172 else if (promoted)
2174 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2175 /* If TEMP is a VOIDmode constant, use convert_modes to make
2176 sure that we properly convert it. */
2177 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2179 temp = convert_modes (GET_MODE (target),
2180 TYPE_MODE (ops.type),
2181 temp, unsignedp);
2182 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2183 GET_MODE (target), temp, unsignedp);
2186 convert_move (SUBREG_REG (target), temp, unsignedp);
2188 else if (nontemporal && emit_storent_insn (target, temp))
2190 else
2192 temp = force_operand (temp, target);
2193 if (temp != target)
2194 emit_move_insn (target, temp);
2198 break;
2200 default:
2201 gcc_unreachable ();
2205 /* Expand one gimple statement STMT and return the last RTL instruction
2206 before any of the newly generated ones.
2208 In addition to generating the necessary RTL instructions this also
2209 sets REG_EH_REGION notes if necessary and sets the current source
2210 location for diagnostics. */
2212 static rtx
2213 expand_gimple_stmt (gimple stmt)
2215 location_t saved_location = input_location;
2216 rtx last = get_last_insn ();
2217 int lp_nr;
2219 gcc_assert (cfun);
2221 /* We need to save and restore the current source location so that errors
2222 discovered during expansion are emitted with the right location. But
2223 it would be better if the diagnostic routines used the source location
2224 embedded in the tree nodes rather than globals. */
2225 if (gimple_has_location (stmt))
2226 input_location = gimple_location (stmt);
2228 expand_gimple_stmt_1 (stmt);
2230 /* Free any temporaries used to evaluate this statement. */
2231 free_temp_slots ();
2233 input_location = saved_location;
2235 /* Mark all insns that may trap. */
2236 lp_nr = lookup_stmt_eh_lp (stmt);
2237 if (lp_nr)
2239 rtx insn;
2240 for (insn = next_real_insn (last); insn;
2241 insn = next_real_insn (insn))
2243 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2244 /* If we want exceptions for non-call insns, any
2245 may_trap_p instruction may throw. */
2246 && GET_CODE (PATTERN (insn)) != CLOBBER
2247 && GET_CODE (PATTERN (insn)) != USE
2248 && insn_could_throw_p (insn))
2249 make_reg_eh_region_note (insn, 0, lp_nr);
2253 return last;
2256 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2257 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2258 generated a tail call (something that might be denied by the ABI
2259 rules governing the call; see calls.c).
2261 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2262 can still reach the rest of BB. The case here is __builtin_sqrt,
2263 where the NaN result goes through the external function (with a
2264 tailcall) and the normal result happens via a sqrt instruction. */
2266 static basic_block
2267 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2269 rtx last2, last;
2270 edge e;
2271 edge_iterator ei;
2272 int probability;
2273 gcov_type count;
2275 last2 = last = expand_gimple_stmt (stmt);
2277 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2278 if (CALL_P (last) && SIBLING_CALL_P (last))
2279 goto found;
2281 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2283 *can_fallthru = true;
2284 return NULL;
2286 found:
2287 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2288 Any instructions emitted here are about to be deleted. */
2289 do_pending_stack_adjust ();
2291 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2292 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2293 EH or abnormal edges, we shouldn't have created a tail call in
2294 the first place. So it seems to me we should just be removing
2295 all edges here, or redirecting the existing fallthru edge to
2296 the exit block. */
2298 probability = 0;
2299 count = 0;
2301 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2303 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2305 if (e->dest != EXIT_BLOCK_PTR)
2307 e->dest->count -= e->count;
2308 e->dest->frequency -= EDGE_FREQUENCY (e);
2309 if (e->dest->count < 0)
2310 e->dest->count = 0;
2311 if (e->dest->frequency < 0)
2312 e->dest->frequency = 0;
2314 count += e->count;
2315 probability += e->probability;
2316 remove_edge (e);
2318 else
2319 ei_next (&ei);
2322 /* This is somewhat ugly: the call_expr expander often emits instructions
2323 after the sibcall (to perform the function return). These confuse the
2324 find_many_sub_basic_blocks code, so we need to get rid of these. */
2325 last = NEXT_INSN (last);
2326 gcc_assert (BARRIER_P (last));
2328 *can_fallthru = false;
2329 while (NEXT_INSN (last))
2331 /* For instance an sqrt builtin expander expands if with
2332 sibcall in the then and label for `else`. */
2333 if (LABEL_P (NEXT_INSN (last)))
2335 *can_fallthru = true;
2336 break;
2338 delete_insn (NEXT_INSN (last));
2341 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2342 e->probability += probability;
2343 e->count += count;
2344 BB_END (bb) = last;
2345 update_bb_for_insn (bb);
2347 if (NEXT_INSN (last))
2349 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2351 last = BB_END (bb);
2352 if (BARRIER_P (last))
2353 BB_END (bb) = PREV_INSN (last);
2356 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2358 return bb;
2361 /* Return the difference between the floor and the truncated result of
2362 a signed division by OP1 with remainder MOD. */
2363 static rtx
2364 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2366 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2367 return gen_rtx_IF_THEN_ELSE
2368 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2369 gen_rtx_IF_THEN_ELSE
2370 (mode, gen_rtx_LT (BImode,
2371 gen_rtx_DIV (mode, op1, mod),
2372 const0_rtx),
2373 constm1_rtx, const0_rtx),
2374 const0_rtx);
2377 /* Return the difference between the ceil and the truncated result of
2378 a signed division by OP1 with remainder MOD. */
2379 static rtx
2380 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2382 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2383 return gen_rtx_IF_THEN_ELSE
2384 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2385 gen_rtx_IF_THEN_ELSE
2386 (mode, gen_rtx_GT (BImode,
2387 gen_rtx_DIV (mode, op1, mod),
2388 const0_rtx),
2389 const1_rtx, const0_rtx),
2390 const0_rtx);
2393 /* Return the difference between the ceil and the truncated result of
2394 an unsigned division by OP1 with remainder MOD. */
2395 static rtx
2396 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2398 /* (mod != 0 ? 1 : 0) */
2399 return gen_rtx_IF_THEN_ELSE
2400 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2401 const1_rtx, const0_rtx);
2404 /* Return the difference between the rounded and the truncated result
2405 of a signed division by OP1 with remainder MOD. Halfway cases are
2406 rounded away from zero, rather than to the nearest even number. */
2407 static rtx
2408 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2410 /* (abs (mod) >= abs (op1) - abs (mod)
2411 ? (op1 / mod > 0 ? 1 : -1)
2412 : 0) */
2413 return gen_rtx_IF_THEN_ELSE
2414 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2415 gen_rtx_MINUS (mode,
2416 gen_rtx_ABS (mode, op1),
2417 gen_rtx_ABS (mode, mod))),
2418 gen_rtx_IF_THEN_ELSE
2419 (mode, gen_rtx_GT (BImode,
2420 gen_rtx_DIV (mode, op1, mod),
2421 const0_rtx),
2422 const1_rtx, constm1_rtx),
2423 const0_rtx);
2426 /* Return the difference between the rounded and the truncated result
2427 of a unsigned division by OP1 with remainder MOD. Halfway cases
2428 are rounded away from zero, rather than to the nearest even
2429 number. */
2430 static rtx
2431 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2433 /* (mod >= op1 - mod ? 1 : 0) */
2434 return gen_rtx_IF_THEN_ELSE
2435 (mode, gen_rtx_GE (BImode, mod,
2436 gen_rtx_MINUS (mode, op1, mod)),
2437 const1_rtx, const0_rtx);
2440 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2441 any rtl. */
2443 static rtx
2444 convert_debug_memory_address (enum machine_mode mode, rtx x,
2445 addr_space_t as)
2447 enum machine_mode xmode = GET_MODE (x);
2449 #ifndef POINTERS_EXTEND_UNSIGNED
2450 gcc_assert (mode == Pmode
2451 || mode == targetm.addr_space.address_mode (as));
2452 gcc_assert (xmode == mode || xmode == VOIDmode);
2453 #else
2454 rtx temp;
2456 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2458 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2459 return x;
2461 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2462 x = simplify_gen_subreg (mode, x, xmode,
2463 subreg_lowpart_offset
2464 (mode, xmode));
2465 else if (POINTERS_EXTEND_UNSIGNED > 0)
2466 x = gen_rtx_ZERO_EXTEND (mode, x);
2467 else if (!POINTERS_EXTEND_UNSIGNED)
2468 x = gen_rtx_SIGN_EXTEND (mode, x);
2469 else
2471 switch (GET_CODE (x))
2473 case SUBREG:
2474 if ((SUBREG_PROMOTED_VAR_P (x)
2475 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2476 || (GET_CODE (SUBREG_REG (x)) == PLUS
2477 && REG_P (XEXP (SUBREG_REG (x), 0))
2478 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2479 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2480 && GET_MODE (SUBREG_REG (x)) == mode)
2481 return SUBREG_REG (x);
2482 break;
2483 case LABEL_REF:
2484 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2485 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2486 return temp;
2487 case SYMBOL_REF:
2488 temp = shallow_copy_rtx (x);
2489 PUT_MODE (temp, mode);
2490 return temp;
2491 case CONST:
2492 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2493 if (temp)
2494 temp = gen_rtx_CONST (mode, temp);
2495 return temp;
2496 case PLUS:
2497 case MINUS:
2498 if (CONST_INT_P (XEXP (x, 1)))
2500 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2501 if (temp)
2502 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2504 break;
2505 default:
2506 break;
2508 /* Don't know how to express ptr_extend as operation in debug info. */
2509 return NULL;
2511 #endif /* POINTERS_EXTEND_UNSIGNED */
2513 return x;
2516 /* Return an RTX equivalent to the value of the parameter DECL. */
2518 static rtx
2519 expand_debug_parm_decl (tree decl)
2521 rtx incoming = DECL_INCOMING_RTL (decl);
2523 if (incoming
2524 && GET_MODE (incoming) != BLKmode
2525 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2526 || (MEM_P (incoming)
2527 && REG_P (XEXP (incoming, 0))
2528 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2530 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2532 #ifdef HAVE_window_save
2533 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2534 If the target machine has an explicit window save instruction, the
2535 actual entry value is the corresponding OUTGOING_REGNO instead. */
2536 if (REG_P (incoming)
2537 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2538 incoming
2539 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2540 OUTGOING_REGNO (REGNO (incoming)), 0);
2541 else if (MEM_P (incoming))
2543 rtx reg = XEXP (incoming, 0);
2544 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2546 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2547 incoming = replace_equiv_address_nv (incoming, reg);
2550 #endif
2552 ENTRY_VALUE_EXP (rtl) = incoming;
2553 return rtl;
2556 if (incoming
2557 && GET_MODE (incoming) != BLKmode
2558 && !TREE_ADDRESSABLE (decl)
2559 && MEM_P (incoming)
2560 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2561 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2562 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2563 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2564 return incoming;
2566 return NULL_RTX;
2569 /* Return an RTX equivalent to the value of the tree expression EXP. */
2571 static rtx
2572 expand_debug_expr (tree exp)
2574 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2575 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2576 enum machine_mode inner_mode = VOIDmode;
2577 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2578 addr_space_t as;
2580 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2582 case tcc_expression:
2583 switch (TREE_CODE (exp))
2585 case COND_EXPR:
2586 case DOT_PROD_EXPR:
2587 case WIDEN_MULT_PLUS_EXPR:
2588 case WIDEN_MULT_MINUS_EXPR:
2589 case FMA_EXPR:
2590 goto ternary;
2592 case TRUTH_ANDIF_EXPR:
2593 case TRUTH_ORIF_EXPR:
2594 case TRUTH_AND_EXPR:
2595 case TRUTH_OR_EXPR:
2596 case TRUTH_XOR_EXPR:
2597 goto binary;
2599 case TRUTH_NOT_EXPR:
2600 goto unary;
2602 default:
2603 break;
2605 break;
2607 ternary:
2608 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2609 if (!op2)
2610 return NULL_RTX;
2611 /* Fall through. */
2613 binary:
2614 case tcc_binary:
2615 case tcc_comparison:
2616 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2617 if (!op1)
2618 return NULL_RTX;
2619 /* Fall through. */
2621 unary:
2622 case tcc_unary:
2623 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2624 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2625 if (!op0)
2626 return NULL_RTX;
2627 break;
2629 case tcc_type:
2630 case tcc_statement:
2631 gcc_unreachable ();
2633 case tcc_constant:
2634 case tcc_exceptional:
2635 case tcc_declaration:
2636 case tcc_reference:
2637 case tcc_vl_exp:
2638 break;
2641 switch (TREE_CODE (exp))
2643 case STRING_CST:
2644 if (!lookup_constant_def (exp))
2646 if (strlen (TREE_STRING_POINTER (exp)) + 1
2647 != (size_t) TREE_STRING_LENGTH (exp))
2648 return NULL_RTX;
2649 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2650 op0 = gen_rtx_MEM (BLKmode, op0);
2651 set_mem_attributes (op0, exp, 0);
2652 return op0;
2654 /* Fall through... */
2656 case INTEGER_CST:
2657 case REAL_CST:
2658 case FIXED_CST:
2659 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2660 return op0;
2662 case COMPLEX_CST:
2663 gcc_assert (COMPLEX_MODE_P (mode));
2664 op0 = expand_debug_expr (TREE_REALPART (exp));
2665 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2666 return gen_rtx_CONCAT (mode, op0, op1);
2668 case DEBUG_EXPR_DECL:
2669 op0 = DECL_RTL_IF_SET (exp);
2671 if (op0)
2672 return op0;
2674 op0 = gen_rtx_DEBUG_EXPR (mode);
2675 DEBUG_EXPR_TREE_DECL (op0) = exp;
2676 SET_DECL_RTL (exp, op0);
2678 return op0;
2680 case VAR_DECL:
2681 case PARM_DECL:
2682 case FUNCTION_DECL:
2683 case LABEL_DECL:
2684 case CONST_DECL:
2685 case RESULT_DECL:
2686 op0 = DECL_RTL_IF_SET (exp);
2688 /* This decl was probably optimized away. */
2689 if (!op0)
2691 if (TREE_CODE (exp) != VAR_DECL
2692 || DECL_EXTERNAL (exp)
2693 || !TREE_STATIC (exp)
2694 || !DECL_NAME (exp)
2695 || DECL_HARD_REGISTER (exp)
2696 || DECL_IN_CONSTANT_POOL (exp)
2697 || mode == VOIDmode)
2698 return NULL;
2700 op0 = make_decl_rtl_for_debug (exp);
2701 if (!MEM_P (op0)
2702 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2703 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2704 return NULL;
2706 else
2707 op0 = copy_rtx (op0);
2709 if (GET_MODE (op0) == BLKmode
2710 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2711 below would ICE. While it is likely a FE bug,
2712 try to be robust here. See PR43166. */
2713 || mode == BLKmode
2714 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2716 gcc_assert (MEM_P (op0));
2717 op0 = adjust_address_nv (op0, mode, 0);
2718 return op0;
2721 /* Fall through. */
2723 adjust_mode:
2724 case PAREN_EXPR:
2725 case NOP_EXPR:
2726 case CONVERT_EXPR:
2728 inner_mode = GET_MODE (op0);
2730 if (mode == inner_mode)
2731 return op0;
2733 if (inner_mode == VOIDmode)
2735 if (TREE_CODE (exp) == SSA_NAME)
2736 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2737 else
2738 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2739 if (mode == inner_mode)
2740 return op0;
2743 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2745 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2746 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2747 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2748 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2749 else
2750 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2752 else if (FLOAT_MODE_P (mode))
2754 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2755 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2756 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2757 else
2758 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2760 else if (FLOAT_MODE_P (inner_mode))
2762 if (unsignedp)
2763 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2764 else
2765 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2767 else if (CONSTANT_P (op0)
2768 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2769 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2770 subreg_lowpart_offset (mode,
2771 inner_mode));
2772 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2773 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2774 : unsignedp)
2775 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2776 else
2777 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2779 return op0;
2782 case MEM_REF:
2783 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2785 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2786 TREE_OPERAND (exp, 0),
2787 TREE_OPERAND (exp, 1));
2788 if (newexp)
2789 return expand_debug_expr (newexp);
2791 /* FALLTHROUGH */
2792 case INDIRECT_REF:
2793 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2794 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2795 if (!op0)
2796 return NULL;
2798 if (TREE_CODE (exp) == MEM_REF)
2800 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2801 || (GET_CODE (op0) == PLUS
2802 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2803 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2804 Instead just use get_inner_reference. */
2805 goto component_ref;
2807 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2808 if (!op1 || !CONST_INT_P (op1))
2809 return NULL;
2811 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2814 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2815 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2816 else
2817 as = ADDR_SPACE_GENERIC;
2819 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2820 op0, as);
2821 if (op0 == NULL_RTX)
2822 return NULL;
2824 op0 = gen_rtx_MEM (mode, op0);
2825 set_mem_attributes (op0, exp, 0);
2826 if (TREE_CODE (exp) == MEM_REF
2827 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2828 set_mem_expr (op0, NULL_TREE);
2829 set_mem_addr_space (op0, as);
2831 return op0;
2833 case TARGET_MEM_REF:
2834 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2835 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2836 return NULL;
2838 op0 = expand_debug_expr
2839 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2840 if (!op0)
2841 return NULL;
2843 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2844 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2845 else
2846 as = ADDR_SPACE_GENERIC;
2848 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2849 op0, as);
2850 if (op0 == NULL_RTX)
2851 return NULL;
2853 op0 = gen_rtx_MEM (mode, op0);
2855 set_mem_attributes (op0, exp, 0);
2856 set_mem_addr_space (op0, as);
2858 return op0;
2860 component_ref:
2861 case ARRAY_REF:
2862 case ARRAY_RANGE_REF:
2863 case COMPONENT_REF:
2864 case BIT_FIELD_REF:
2865 case REALPART_EXPR:
2866 case IMAGPART_EXPR:
2867 case VIEW_CONVERT_EXPR:
2869 enum machine_mode mode1;
2870 HOST_WIDE_INT bitsize, bitpos;
2871 tree offset;
2872 int volatilep = 0;
2873 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2874 &mode1, &unsignedp, &volatilep, false);
2875 rtx orig_op0;
2877 if (bitsize == 0)
2878 return NULL;
2880 orig_op0 = op0 = expand_debug_expr (tem);
2882 if (!op0)
2883 return NULL;
2885 if (offset)
2887 enum machine_mode addrmode, offmode;
2889 if (!MEM_P (op0))
2890 return NULL;
2892 op0 = XEXP (op0, 0);
2893 addrmode = GET_MODE (op0);
2894 if (addrmode == VOIDmode)
2895 addrmode = Pmode;
2897 op1 = expand_debug_expr (offset);
2898 if (!op1)
2899 return NULL;
2901 offmode = GET_MODE (op1);
2902 if (offmode == VOIDmode)
2903 offmode = TYPE_MODE (TREE_TYPE (offset));
2905 if (addrmode != offmode)
2906 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2907 subreg_lowpart_offset (addrmode,
2908 offmode));
2910 /* Don't use offset_address here, we don't need a
2911 recognizable address, and we don't want to generate
2912 code. */
2913 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2914 op0, op1));
2917 if (MEM_P (op0))
2919 if (mode1 == VOIDmode)
2920 /* Bitfield. */
2921 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2922 if (bitpos >= BITS_PER_UNIT)
2924 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2925 bitpos %= BITS_PER_UNIT;
2927 else if (bitpos < 0)
2929 HOST_WIDE_INT units
2930 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2931 op0 = adjust_address_nv (op0, mode1, units);
2932 bitpos += units * BITS_PER_UNIT;
2934 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2935 op0 = adjust_address_nv (op0, mode, 0);
2936 else if (GET_MODE (op0) != mode1)
2937 op0 = adjust_address_nv (op0, mode1, 0);
2938 else
2939 op0 = copy_rtx (op0);
2940 if (op0 == orig_op0)
2941 op0 = shallow_copy_rtx (op0);
2942 set_mem_attributes (op0, exp, 0);
2945 if (bitpos == 0 && mode == GET_MODE (op0))
2946 return op0;
2948 if (bitpos < 0)
2949 return NULL;
2951 if (GET_MODE (op0) == BLKmode)
2952 return NULL;
2954 if ((bitpos % BITS_PER_UNIT) == 0
2955 && bitsize == GET_MODE_BITSIZE (mode1))
2957 enum machine_mode opmode = GET_MODE (op0);
2959 if (opmode == VOIDmode)
2960 opmode = TYPE_MODE (TREE_TYPE (tem));
2962 /* This condition may hold if we're expanding the address
2963 right past the end of an array that turned out not to
2964 be addressable (i.e., the address was only computed in
2965 debug stmts). The gen_subreg below would rightfully
2966 crash, and the address doesn't really exist, so just
2967 drop it. */
2968 if (bitpos >= GET_MODE_BITSIZE (opmode))
2969 return NULL;
2971 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2972 return simplify_gen_subreg (mode, op0, opmode,
2973 bitpos / BITS_PER_UNIT);
2976 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2977 && TYPE_UNSIGNED (TREE_TYPE (exp))
2978 ? SIGN_EXTRACT
2979 : ZERO_EXTRACT, mode,
2980 GET_MODE (op0) != VOIDmode
2981 ? GET_MODE (op0)
2982 : TYPE_MODE (TREE_TYPE (tem)),
2983 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2986 case ABS_EXPR:
2987 return simplify_gen_unary (ABS, mode, op0, mode);
2989 case NEGATE_EXPR:
2990 return simplify_gen_unary (NEG, mode, op0, mode);
2992 case BIT_NOT_EXPR:
2993 return simplify_gen_unary (NOT, mode, op0, mode);
2995 case FLOAT_EXPR:
2996 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
2997 0)))
2998 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
2999 inner_mode);
3001 case FIX_TRUNC_EXPR:
3002 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3003 inner_mode);
3005 case POINTER_PLUS_EXPR:
3006 /* For the rare target where pointers are not the same size as
3007 size_t, we need to check for mis-matched modes and correct
3008 the addend. */
3009 if (op0 && op1
3010 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3011 && GET_MODE (op0) != GET_MODE (op1))
3013 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3014 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3015 GET_MODE (op1));
3016 else
3017 /* We always sign-extend, regardless of the signedness of
3018 the operand, because the operand is always unsigned
3019 here even if the original C expression is signed. */
3020 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3021 GET_MODE (op1));
3023 /* Fall through. */
3024 case PLUS_EXPR:
3025 return simplify_gen_binary (PLUS, mode, op0, op1);
3027 case MINUS_EXPR:
3028 return simplify_gen_binary (MINUS, mode, op0, op1);
3030 case MULT_EXPR:
3031 return simplify_gen_binary (MULT, mode, op0, op1);
3033 case RDIV_EXPR:
3034 case TRUNC_DIV_EXPR:
3035 case EXACT_DIV_EXPR:
3036 if (unsignedp)
3037 return simplify_gen_binary (UDIV, mode, op0, op1);
3038 else
3039 return simplify_gen_binary (DIV, mode, op0, op1);
3041 case TRUNC_MOD_EXPR:
3042 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3044 case FLOOR_DIV_EXPR:
3045 if (unsignedp)
3046 return simplify_gen_binary (UDIV, mode, op0, op1);
3047 else
3049 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3050 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3051 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3052 return simplify_gen_binary (PLUS, mode, div, adj);
3055 case FLOOR_MOD_EXPR:
3056 if (unsignedp)
3057 return simplify_gen_binary (UMOD, mode, op0, op1);
3058 else
3060 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3061 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3062 adj = simplify_gen_unary (NEG, mode,
3063 simplify_gen_binary (MULT, mode, adj, op1),
3064 mode);
3065 return simplify_gen_binary (PLUS, mode, mod, adj);
3068 case CEIL_DIV_EXPR:
3069 if (unsignedp)
3071 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3072 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3073 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3074 return simplify_gen_binary (PLUS, mode, div, adj);
3076 else
3078 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3079 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3080 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3081 return simplify_gen_binary (PLUS, mode, div, adj);
3084 case CEIL_MOD_EXPR:
3085 if (unsignedp)
3087 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3088 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3089 adj = simplify_gen_unary (NEG, mode,
3090 simplify_gen_binary (MULT, mode, adj, op1),
3091 mode);
3092 return simplify_gen_binary (PLUS, mode, mod, adj);
3094 else
3096 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3097 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3098 adj = simplify_gen_unary (NEG, mode,
3099 simplify_gen_binary (MULT, mode, adj, op1),
3100 mode);
3101 return simplify_gen_binary (PLUS, mode, mod, adj);
3104 case ROUND_DIV_EXPR:
3105 if (unsignedp)
3107 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3108 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3109 rtx adj = round_udiv_adjust (mode, mod, op1);
3110 return simplify_gen_binary (PLUS, mode, div, adj);
3112 else
3114 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3115 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3116 rtx adj = round_sdiv_adjust (mode, mod, op1);
3117 return simplify_gen_binary (PLUS, mode, div, adj);
3120 case ROUND_MOD_EXPR:
3121 if (unsignedp)
3123 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3124 rtx adj = round_udiv_adjust (mode, mod, op1);
3125 adj = simplify_gen_unary (NEG, mode,
3126 simplify_gen_binary (MULT, mode, adj, op1),
3127 mode);
3128 return simplify_gen_binary (PLUS, mode, mod, adj);
3130 else
3132 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3133 rtx adj = round_sdiv_adjust (mode, mod, op1);
3134 adj = simplify_gen_unary (NEG, mode,
3135 simplify_gen_binary (MULT, mode, adj, op1),
3136 mode);
3137 return simplify_gen_binary (PLUS, mode, mod, adj);
3140 case LSHIFT_EXPR:
3141 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3143 case RSHIFT_EXPR:
3144 if (unsignedp)
3145 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3146 else
3147 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3149 case LROTATE_EXPR:
3150 return simplify_gen_binary (ROTATE, mode, op0, op1);
3152 case RROTATE_EXPR:
3153 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3155 case MIN_EXPR:
3156 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3158 case MAX_EXPR:
3159 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3161 case BIT_AND_EXPR:
3162 case TRUTH_AND_EXPR:
3163 return simplify_gen_binary (AND, mode, op0, op1);
3165 case BIT_IOR_EXPR:
3166 case TRUTH_OR_EXPR:
3167 return simplify_gen_binary (IOR, mode, op0, op1);
3169 case BIT_XOR_EXPR:
3170 case TRUTH_XOR_EXPR:
3171 return simplify_gen_binary (XOR, mode, op0, op1);
3173 case TRUTH_ANDIF_EXPR:
3174 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3176 case TRUTH_ORIF_EXPR:
3177 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3179 case TRUTH_NOT_EXPR:
3180 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3182 case LT_EXPR:
3183 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3184 op0, op1);
3186 case LE_EXPR:
3187 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3188 op0, op1);
3190 case GT_EXPR:
3191 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3192 op0, op1);
3194 case GE_EXPR:
3195 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3196 op0, op1);
3198 case EQ_EXPR:
3199 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3201 case NE_EXPR:
3202 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3204 case UNORDERED_EXPR:
3205 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3207 case ORDERED_EXPR:
3208 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3210 case UNLT_EXPR:
3211 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3213 case UNLE_EXPR:
3214 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3216 case UNGT_EXPR:
3217 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3219 case UNGE_EXPR:
3220 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3222 case UNEQ_EXPR:
3223 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3225 case LTGT_EXPR:
3226 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3228 case COND_EXPR:
3229 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3231 case COMPLEX_EXPR:
3232 gcc_assert (COMPLEX_MODE_P (mode));
3233 if (GET_MODE (op0) == VOIDmode)
3234 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3235 if (GET_MODE (op1) == VOIDmode)
3236 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3237 return gen_rtx_CONCAT (mode, op0, op1);
3239 case CONJ_EXPR:
3240 if (GET_CODE (op0) == CONCAT)
3241 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3242 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3243 XEXP (op0, 1),
3244 GET_MODE_INNER (mode)));
3245 else
3247 enum machine_mode imode = GET_MODE_INNER (mode);
3248 rtx re, im;
3250 if (MEM_P (op0))
3252 re = adjust_address_nv (op0, imode, 0);
3253 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3255 else
3257 enum machine_mode ifmode = int_mode_for_mode (mode);
3258 enum machine_mode ihmode = int_mode_for_mode (imode);
3259 rtx halfsize;
3260 if (ifmode == BLKmode || ihmode == BLKmode)
3261 return NULL;
3262 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3263 re = op0;
3264 if (mode != ifmode)
3265 re = gen_rtx_SUBREG (ifmode, re, 0);
3266 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3267 if (imode != ihmode)
3268 re = gen_rtx_SUBREG (imode, re, 0);
3269 im = copy_rtx (op0);
3270 if (mode != ifmode)
3271 im = gen_rtx_SUBREG (ifmode, im, 0);
3272 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3273 if (imode != ihmode)
3274 im = gen_rtx_SUBREG (imode, im, 0);
3276 im = gen_rtx_NEG (imode, im);
3277 return gen_rtx_CONCAT (mode, re, im);
3280 case ADDR_EXPR:
3281 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3282 if (!op0 || !MEM_P (op0))
3284 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3285 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3286 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3287 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3288 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3289 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3291 if (handled_component_p (TREE_OPERAND (exp, 0)))
3293 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3294 tree decl
3295 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3296 &bitoffset, &bitsize, &maxsize);
3297 if ((TREE_CODE (decl) == VAR_DECL
3298 || TREE_CODE (decl) == PARM_DECL
3299 || TREE_CODE (decl) == RESULT_DECL)
3300 && (!TREE_ADDRESSABLE (decl)
3301 || target_for_debug_bind (decl))
3302 && (bitoffset % BITS_PER_UNIT) == 0
3303 && bitsize > 0
3304 && bitsize == maxsize)
3306 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3307 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3311 return NULL;
3314 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3315 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3317 return op0;
3319 case VECTOR_CST:
3321 unsigned i;
3323 op0 = gen_rtx_CONCATN
3324 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3326 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3328 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3329 if (!op1)
3330 return NULL;
3331 XVECEXP (op0, 0, i) = op1;
3334 return op0;
3337 case CONSTRUCTOR:
3338 if (TREE_CLOBBER_P (exp))
3339 return NULL;
3340 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3342 unsigned i;
3343 tree val;
3345 op0 = gen_rtx_CONCATN
3346 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3348 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3350 op1 = expand_debug_expr (val);
3351 if (!op1)
3352 return NULL;
3353 XVECEXP (op0, 0, i) = op1;
3356 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3358 op1 = expand_debug_expr
3359 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3361 if (!op1)
3362 return NULL;
3364 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3365 XVECEXP (op0, 0, i) = op1;
3368 return op0;
3370 else
3371 goto flag_unsupported;
3373 case CALL_EXPR:
3374 /* ??? Maybe handle some builtins? */
3375 return NULL;
3377 case SSA_NAME:
3379 gimple g = get_gimple_for_ssa_name (exp);
3380 if (g)
3382 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3383 if (!op0)
3384 return NULL;
3386 else
3388 int part = var_to_partition (SA.map, exp);
3390 if (part == NO_PARTITION)
3392 /* If this is a reference to an incoming value of parameter
3393 that is never used in the code or where the incoming
3394 value is never used in the code, use PARM_DECL's
3395 DECL_RTL if set. */
3396 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3397 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3399 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3400 if (op0)
3401 goto adjust_mode;
3402 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3403 if (op0)
3404 goto adjust_mode;
3406 return NULL;
3409 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3411 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3413 goto adjust_mode;
3416 case ERROR_MARK:
3417 return NULL;
3419 /* Vector stuff. For most of the codes we don't have rtl codes. */
3420 case REALIGN_LOAD_EXPR:
3421 case REDUC_MAX_EXPR:
3422 case REDUC_MIN_EXPR:
3423 case REDUC_PLUS_EXPR:
3424 case VEC_COND_EXPR:
3425 case VEC_LSHIFT_EXPR:
3426 case VEC_PACK_FIX_TRUNC_EXPR:
3427 case VEC_PACK_SAT_EXPR:
3428 case VEC_PACK_TRUNC_EXPR:
3429 case VEC_RSHIFT_EXPR:
3430 case VEC_UNPACK_FLOAT_HI_EXPR:
3431 case VEC_UNPACK_FLOAT_LO_EXPR:
3432 case VEC_UNPACK_HI_EXPR:
3433 case VEC_UNPACK_LO_EXPR:
3434 case VEC_WIDEN_MULT_HI_EXPR:
3435 case VEC_WIDEN_MULT_LO_EXPR:
3436 case VEC_WIDEN_MULT_EVEN_EXPR:
3437 case VEC_WIDEN_MULT_ODD_EXPR:
3438 case VEC_WIDEN_LSHIFT_HI_EXPR:
3439 case VEC_WIDEN_LSHIFT_LO_EXPR:
3440 case VEC_PERM_EXPR:
3441 return NULL;
3443 /* Misc codes. */
3444 case ADDR_SPACE_CONVERT_EXPR:
3445 case FIXED_CONVERT_EXPR:
3446 case OBJ_TYPE_REF:
3447 case WITH_SIZE_EXPR:
3448 return NULL;
3450 case DOT_PROD_EXPR:
3451 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3452 && SCALAR_INT_MODE_P (mode))
3455 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3456 0)))
3457 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3458 inner_mode);
3460 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3461 1)))
3462 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3463 inner_mode);
3464 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3465 return simplify_gen_binary (PLUS, mode, op0, op2);
3467 return NULL;
3469 case WIDEN_MULT_EXPR:
3470 case WIDEN_MULT_PLUS_EXPR:
3471 case WIDEN_MULT_MINUS_EXPR:
3472 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3473 && SCALAR_INT_MODE_P (mode))
3475 inner_mode = GET_MODE (op0);
3476 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3477 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3478 else
3479 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3480 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3481 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3482 else
3483 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3484 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3485 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3486 return op0;
3487 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3488 return simplify_gen_binary (PLUS, mode, op0, op2);
3489 else
3490 return simplify_gen_binary (MINUS, mode, op2, op0);
3492 return NULL;
3494 case MULT_HIGHPART_EXPR:
3495 /* ??? Similar to the above. */
3496 return NULL;
3498 case WIDEN_SUM_EXPR:
3499 case WIDEN_LSHIFT_EXPR:
3500 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3501 && SCALAR_INT_MODE_P (mode))
3504 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3505 0)))
3506 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3507 inner_mode);
3508 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3509 ? ASHIFT : PLUS, mode, op0, op1);
3511 return NULL;
3513 case FMA_EXPR:
3514 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3516 default:
3517 flag_unsupported:
3518 #ifdef ENABLE_CHECKING
3519 debug_tree (exp);
3520 gcc_unreachable ();
3521 #else
3522 return NULL;
3523 #endif
3527 /* Return an RTX equivalent to the source bind value of the tree expression
3528 EXP. */
3530 static rtx
3531 expand_debug_source_expr (tree exp)
3533 rtx op0 = NULL_RTX;
3534 enum machine_mode mode = VOIDmode, inner_mode;
3536 switch (TREE_CODE (exp))
3538 case PARM_DECL:
3540 mode = DECL_MODE (exp);
3541 op0 = expand_debug_parm_decl (exp);
3542 if (op0)
3543 break;
3544 /* See if this isn't an argument that has been completely
3545 optimized out. */
3546 if (!DECL_RTL_SET_P (exp)
3547 && !DECL_INCOMING_RTL (exp)
3548 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3550 tree aexp = exp;
3551 if (DECL_ABSTRACT_ORIGIN (exp))
3552 aexp = DECL_ABSTRACT_ORIGIN (exp);
3553 if (DECL_CONTEXT (aexp)
3554 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3556 VEC(tree, gc) **debug_args;
3557 unsigned int ix;
3558 tree ddecl;
3559 #ifdef ENABLE_CHECKING
3560 tree parm;
3561 for (parm = DECL_ARGUMENTS (current_function_decl);
3562 parm; parm = DECL_CHAIN (parm))
3563 gcc_assert (parm != exp
3564 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3565 #endif
3566 debug_args = decl_debug_args_lookup (current_function_decl);
3567 if (debug_args != NULL)
3569 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3570 ix += 2)
3571 if (ddecl == aexp)
3572 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3576 break;
3578 default:
3579 break;
3582 if (op0 == NULL_RTX)
3583 return NULL_RTX;
3585 inner_mode = GET_MODE (op0);
3586 if (mode == inner_mode)
3587 return op0;
3589 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3591 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3592 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3593 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3594 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3595 else
3596 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3598 else if (FLOAT_MODE_P (mode))
3599 gcc_unreachable ();
3600 else if (FLOAT_MODE_P (inner_mode))
3602 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3603 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3604 else
3605 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3607 else if (CONSTANT_P (op0)
3608 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3609 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3610 subreg_lowpart_offset (mode, inner_mode));
3611 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3612 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3613 else
3614 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3616 return op0;
3619 /* Expand the _LOCs in debug insns. We run this after expanding all
3620 regular insns, so that any variables referenced in the function
3621 will have their DECL_RTLs set. */
3623 static void
3624 expand_debug_locations (void)
3626 rtx insn;
3627 rtx last = get_last_insn ();
3628 int save_strict_alias = flag_strict_aliasing;
3630 /* New alias sets while setting up memory attributes cause
3631 -fcompare-debug failures, even though it doesn't bring about any
3632 codegen changes. */
3633 flag_strict_aliasing = 0;
3635 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3636 if (DEBUG_INSN_P (insn))
3638 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3639 rtx val;
3640 enum machine_mode mode;
3642 if (value == NULL_TREE)
3643 val = NULL_RTX;
3644 else
3646 if (INSN_VAR_LOCATION_STATUS (insn)
3647 == VAR_INIT_STATUS_UNINITIALIZED)
3648 val = expand_debug_source_expr (value);
3649 else
3650 val = expand_debug_expr (value);
3651 gcc_assert (last == get_last_insn ());
3654 if (!val)
3655 val = gen_rtx_UNKNOWN_VAR_LOC ();
3656 else
3658 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3660 gcc_assert (mode == GET_MODE (val)
3661 || (GET_MODE (val) == VOIDmode
3662 && (CONST_INT_P (val)
3663 || GET_CODE (val) == CONST_FIXED
3664 || CONST_DOUBLE_AS_INT_P (val)
3665 || GET_CODE (val) == LABEL_REF)));
3668 INSN_VAR_LOCATION_LOC (insn) = val;
3671 flag_strict_aliasing = save_strict_alias;
3674 /* Expand basic block BB from GIMPLE trees to RTL. */
3676 static basic_block
3677 expand_gimple_basic_block (basic_block bb)
3679 gimple_stmt_iterator gsi;
3680 gimple_seq stmts;
3681 gimple stmt = NULL;
3682 rtx note, last;
3683 edge e;
3684 edge_iterator ei;
3685 void **elt;
3687 if (dump_file)
3688 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3689 bb->index);
3691 /* Note that since we are now transitioning from GIMPLE to RTL, we
3692 cannot use the gsi_*_bb() routines because they expect the basic
3693 block to be in GIMPLE, instead of RTL. Therefore, we need to
3694 access the BB sequence directly. */
3695 stmts = bb_seq (bb);
3696 bb->il.gimple.seq = NULL;
3697 bb->il.gimple.phi_nodes = NULL;
3698 rtl_profile_for_bb (bb);
3699 init_rtl_bb_info (bb);
3700 bb->flags |= BB_RTL;
3702 /* Remove the RETURN_EXPR if we may fall though to the exit
3703 instead. */
3704 gsi = gsi_last (stmts);
3705 if (!gsi_end_p (gsi)
3706 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3708 gimple ret_stmt = gsi_stmt (gsi);
3710 gcc_assert (single_succ_p (bb));
3711 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3713 if (bb->next_bb == EXIT_BLOCK_PTR
3714 && !gimple_return_retval (ret_stmt))
3716 gsi_remove (&gsi, false);
3717 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3721 gsi = gsi_start (stmts);
3722 if (!gsi_end_p (gsi))
3724 stmt = gsi_stmt (gsi);
3725 if (gimple_code (stmt) != GIMPLE_LABEL)
3726 stmt = NULL;
3729 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3731 if (stmt || elt)
3733 last = get_last_insn ();
3735 if (stmt)
3737 expand_gimple_stmt (stmt);
3738 gsi_next (&gsi);
3741 if (elt)
3742 emit_label ((rtx) *elt);
3744 /* Java emits line number notes in the top of labels.
3745 ??? Make this go away once line number notes are obsoleted. */
3746 BB_HEAD (bb) = NEXT_INSN (last);
3747 if (NOTE_P (BB_HEAD (bb)))
3748 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3749 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3751 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3753 else
3754 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3756 NOTE_BASIC_BLOCK (note) = bb;
3758 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3760 basic_block new_bb;
3762 stmt = gsi_stmt (gsi);
3764 /* If this statement is a non-debug one, and we generate debug
3765 insns, then this one might be the last real use of a TERed
3766 SSA_NAME, but where there are still some debug uses further
3767 down. Expanding the current SSA name in such further debug
3768 uses by their RHS might lead to wrong debug info, as coalescing
3769 might make the operands of such RHS be placed into the same
3770 pseudo as something else. Like so:
3771 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3772 use(a_1);
3773 a_2 = ...
3774 #DEBUG ... => a_1
3775 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3776 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3777 the write to a_2 would actually have clobbered the place which
3778 formerly held a_0.
3780 So, instead of that, we recognize the situation, and generate
3781 debug temporaries at the last real use of TERed SSA names:
3782 a_1 = a_0 + 1;
3783 #DEBUG #D1 => a_1
3784 use(a_1);
3785 a_2 = ...
3786 #DEBUG ... => #D1
3788 if (MAY_HAVE_DEBUG_INSNS
3789 && SA.values
3790 && !is_gimple_debug (stmt))
3792 ssa_op_iter iter;
3793 tree op;
3794 gimple def;
3796 location_t sloc = get_curr_insn_source_location ();
3797 tree sblock = get_curr_insn_block ();
3799 /* Look for SSA names that have their last use here (TERed
3800 names always have only one real use). */
3801 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3802 if ((def = get_gimple_for_ssa_name (op)))
3804 imm_use_iterator imm_iter;
3805 use_operand_p use_p;
3806 bool have_debug_uses = false;
3808 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3810 if (gimple_debug_bind_p (USE_STMT (use_p)))
3812 have_debug_uses = true;
3813 break;
3817 if (have_debug_uses)
3819 /* OP is a TERed SSA name, with DEF it's defining
3820 statement, and where OP is used in further debug
3821 instructions. Generate a debug temporary, and
3822 replace all uses of OP in debug insns with that
3823 temporary. */
3824 gimple debugstmt;
3825 tree value = gimple_assign_rhs_to_tree (def);
3826 tree vexpr = make_node (DEBUG_EXPR_DECL);
3827 rtx val;
3828 enum machine_mode mode;
3830 set_curr_insn_source_location (gimple_location (def));
3831 set_curr_insn_block (gimple_block (def));
3833 DECL_ARTIFICIAL (vexpr) = 1;
3834 TREE_TYPE (vexpr) = TREE_TYPE (value);
3835 if (DECL_P (value))
3836 mode = DECL_MODE (value);
3837 else
3838 mode = TYPE_MODE (TREE_TYPE (value));
3839 DECL_MODE (vexpr) = mode;
3841 val = gen_rtx_VAR_LOCATION
3842 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3844 emit_debug_insn (val);
3846 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3848 if (!gimple_debug_bind_p (debugstmt))
3849 continue;
3851 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3852 SET_USE (use_p, vexpr);
3854 update_stmt (debugstmt);
3858 set_curr_insn_source_location (sloc);
3859 set_curr_insn_block (sblock);
3862 currently_expanding_gimple_stmt = stmt;
3864 /* Expand this statement, then evaluate the resulting RTL and
3865 fixup the CFG accordingly. */
3866 if (gimple_code (stmt) == GIMPLE_COND)
3868 new_bb = expand_gimple_cond (bb, stmt);
3869 if (new_bb)
3870 return new_bb;
3872 else if (gimple_debug_bind_p (stmt))
3874 location_t sloc = get_curr_insn_source_location ();
3875 tree sblock = get_curr_insn_block ();
3876 gimple_stmt_iterator nsi = gsi;
3878 for (;;)
3880 tree var = gimple_debug_bind_get_var (stmt);
3881 tree value;
3882 rtx val;
3883 enum machine_mode mode;
3885 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3886 && TREE_CODE (var) != LABEL_DECL
3887 && !target_for_debug_bind (var))
3888 goto delink_debug_stmt;
3890 if (gimple_debug_bind_has_value_p (stmt))
3891 value = gimple_debug_bind_get_value (stmt);
3892 else
3893 value = NULL_TREE;
3895 last = get_last_insn ();
3897 set_curr_insn_source_location (gimple_location (stmt));
3898 set_curr_insn_block (gimple_block (stmt));
3900 if (DECL_P (var))
3901 mode = DECL_MODE (var);
3902 else
3903 mode = TYPE_MODE (TREE_TYPE (var));
3905 val = gen_rtx_VAR_LOCATION
3906 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3908 emit_debug_insn (val);
3910 if (dump_file && (dump_flags & TDF_DETAILS))
3912 /* We can't dump the insn with a TREE where an RTX
3913 is expected. */
3914 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3915 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3916 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3919 delink_debug_stmt:
3920 /* In order not to generate too many debug temporaries,
3921 we delink all uses of debug statements we already expanded.
3922 Therefore debug statements between definition and real
3923 use of TERed SSA names will continue to use the SSA name,
3924 and not be replaced with debug temps. */
3925 delink_stmt_imm_use (stmt);
3927 gsi = nsi;
3928 gsi_next (&nsi);
3929 if (gsi_end_p (nsi))
3930 break;
3931 stmt = gsi_stmt (nsi);
3932 if (!gimple_debug_bind_p (stmt))
3933 break;
3936 set_curr_insn_source_location (sloc);
3937 set_curr_insn_block (sblock);
3939 else if (gimple_debug_source_bind_p (stmt))
3941 location_t sloc = get_curr_insn_source_location ();
3942 tree sblock = get_curr_insn_block ();
3943 tree var = gimple_debug_source_bind_get_var (stmt);
3944 tree value = gimple_debug_source_bind_get_value (stmt);
3945 rtx val;
3946 enum machine_mode mode;
3948 last = get_last_insn ();
3950 set_curr_insn_source_location (gimple_location (stmt));
3951 set_curr_insn_block (gimple_block (stmt));
3953 mode = DECL_MODE (var);
3955 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3956 VAR_INIT_STATUS_UNINITIALIZED);
3958 emit_debug_insn (val);
3960 if (dump_file && (dump_flags & TDF_DETAILS))
3962 /* We can't dump the insn with a TREE where an RTX
3963 is expected. */
3964 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3965 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3966 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3969 set_curr_insn_source_location (sloc);
3970 set_curr_insn_block (sblock);
3972 else
3974 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3976 bool can_fallthru;
3977 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3978 if (new_bb)
3980 if (can_fallthru)
3981 bb = new_bb;
3982 else
3983 return new_bb;
3986 else
3988 def_operand_p def_p;
3989 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3991 if (def_p != NULL)
3993 /* Ignore this stmt if it is in the list of
3994 replaceable expressions. */
3995 if (SA.values
3996 && bitmap_bit_p (SA.values,
3997 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3998 continue;
4000 last = expand_gimple_stmt (stmt);
4001 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4006 currently_expanding_gimple_stmt = NULL;
4008 /* Expand implicit goto and convert goto_locus. */
4009 FOR_EACH_EDGE (e, ei, bb->succs)
4011 if (e->goto_locus && e->goto_block)
4013 set_curr_insn_source_location (e->goto_locus);
4014 set_curr_insn_block (e->goto_block);
4015 e->goto_locus = curr_insn_locator ();
4017 e->goto_block = NULL;
4018 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4020 emit_jump (label_rtx_for_bb (e->dest));
4021 e->flags &= ~EDGE_FALLTHRU;
4025 /* Expanded RTL can create a jump in the last instruction of block.
4026 This later might be assumed to be a jump to successor and break edge insertion.
4027 We need to insert dummy move to prevent this. PR41440. */
4028 if (single_succ_p (bb)
4029 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4030 && (last = get_last_insn ())
4031 && JUMP_P (last))
4033 rtx dummy = gen_reg_rtx (SImode);
4034 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4037 do_pending_stack_adjust ();
4039 /* Find the block tail. The last insn in the block is the insn
4040 before a barrier and/or table jump insn. */
4041 last = get_last_insn ();
4042 if (BARRIER_P (last))
4043 last = PREV_INSN (last);
4044 if (JUMP_TABLE_DATA_P (last))
4045 last = PREV_INSN (PREV_INSN (last));
4046 BB_END (bb) = last;
4048 update_bb_for_insn (bb);
4050 return bb;
4054 /* Create a basic block for initialization code. */
4056 static basic_block
4057 construct_init_block (void)
4059 basic_block init_block, first_block;
4060 edge e = NULL;
4061 int flags;
4063 /* Multiple entry points not supported yet. */
4064 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4065 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4066 init_rtl_bb_info (EXIT_BLOCK_PTR);
4067 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4068 EXIT_BLOCK_PTR->flags |= BB_RTL;
4070 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4072 /* When entry edge points to first basic block, we don't need jump,
4073 otherwise we have to jump into proper target. */
4074 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4076 tree label = gimple_block_label (e->dest);
4078 emit_jump (label_rtx (label));
4079 flags = 0;
4081 else
4082 flags = EDGE_FALLTHRU;
4084 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4085 get_last_insn (),
4086 ENTRY_BLOCK_PTR);
4087 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4088 init_block->count = ENTRY_BLOCK_PTR->count;
4089 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4090 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4091 if (e)
4093 first_block = e->dest;
4094 redirect_edge_succ (e, init_block);
4095 e = make_edge (init_block, first_block, flags);
4097 else
4098 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4099 e->probability = REG_BR_PROB_BASE;
4100 e->count = ENTRY_BLOCK_PTR->count;
4102 update_bb_for_insn (init_block);
4103 return init_block;
4106 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4107 found in the block tree. */
4109 static void
4110 set_block_levels (tree block, int level)
4112 while (block)
4114 BLOCK_NUMBER (block) = level;
4115 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4116 block = BLOCK_CHAIN (block);
4120 /* Create a block containing landing pads and similar stuff. */
4122 static void
4123 construct_exit_block (void)
4125 rtx head = get_last_insn ();
4126 rtx end;
4127 basic_block exit_block;
4128 edge e, e2;
4129 unsigned ix;
4130 edge_iterator ei;
4131 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4133 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4135 /* Make sure the locus is set to the end of the function, so that
4136 epilogue line numbers and warnings are set properly. */
4137 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4138 input_location = cfun->function_end_locus;
4140 /* The following insns belong to the top scope. */
4141 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4143 /* Generate rtl for function exit. */
4144 expand_function_end ();
4146 end = get_last_insn ();
4147 if (head == end)
4148 return;
4149 /* While emitting the function end we could move end of the last basic block.
4151 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4152 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4153 head = NEXT_INSN (head);
4154 exit_block = create_basic_block (NEXT_INSN (head), end,
4155 EXIT_BLOCK_PTR->prev_bb);
4156 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4157 exit_block->count = EXIT_BLOCK_PTR->count;
4158 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4159 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4161 ix = 0;
4162 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4164 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4165 if (!(e->flags & EDGE_ABNORMAL))
4166 redirect_edge_succ (e, exit_block);
4167 else
4168 ix++;
4171 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4172 e->probability = REG_BR_PROB_BASE;
4173 e->count = EXIT_BLOCK_PTR->count;
4174 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4175 if (e2 != e)
4177 e->count -= e2->count;
4178 exit_block->count -= e2->count;
4179 exit_block->frequency -= EDGE_FREQUENCY (e2);
4181 if (e->count < 0)
4182 e->count = 0;
4183 if (exit_block->count < 0)
4184 exit_block->count = 0;
4185 if (exit_block->frequency < 0)
4186 exit_block->frequency = 0;
4187 update_bb_for_insn (exit_block);
4190 /* Helper function for discover_nonconstant_array_refs.
4191 Look for ARRAY_REF nodes with non-constant indexes and mark them
4192 addressable. */
4194 static tree
4195 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4196 void *data ATTRIBUTE_UNUSED)
4198 tree t = *tp;
4200 if (IS_TYPE_OR_DECL_P (t))
4201 *walk_subtrees = 0;
4202 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4204 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4205 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4206 && (!TREE_OPERAND (t, 2)
4207 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4208 || (TREE_CODE (t) == COMPONENT_REF
4209 && (!TREE_OPERAND (t,2)
4210 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4211 || TREE_CODE (t) == BIT_FIELD_REF
4212 || TREE_CODE (t) == REALPART_EXPR
4213 || TREE_CODE (t) == IMAGPART_EXPR
4214 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4215 || CONVERT_EXPR_P (t))
4216 t = TREE_OPERAND (t, 0);
4218 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4220 t = get_base_address (t);
4221 if (t && DECL_P (t)
4222 && DECL_MODE (t) != BLKmode)
4223 TREE_ADDRESSABLE (t) = 1;
4226 *walk_subtrees = 0;
4229 return NULL_TREE;
4232 /* RTL expansion is not able to compile array references with variable
4233 offsets for arrays stored in single register. Discover such
4234 expressions and mark variables as addressable to avoid this
4235 scenario. */
4237 static void
4238 discover_nonconstant_array_refs (void)
4240 basic_block bb;
4241 gimple_stmt_iterator gsi;
4243 FOR_EACH_BB (bb)
4244 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4246 gimple stmt = gsi_stmt (gsi);
4247 if (!is_gimple_debug (stmt))
4248 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4252 /* This function sets crtl->args.internal_arg_pointer to a virtual
4253 register if DRAP is needed. Local register allocator will replace
4254 virtual_incoming_args_rtx with the virtual register. */
4256 static void
4257 expand_stack_alignment (void)
4259 rtx drap_rtx;
4260 unsigned int preferred_stack_boundary;
4262 if (! SUPPORTS_STACK_ALIGNMENT)
4263 return;
4265 if (cfun->calls_alloca
4266 || cfun->has_nonlocal_label
4267 || crtl->has_nonlocal_goto)
4268 crtl->need_drap = true;
4270 /* Call update_stack_boundary here again to update incoming stack
4271 boundary. It may set incoming stack alignment to a different
4272 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4273 use the minimum incoming stack alignment to check if it is OK
4274 to perform sibcall optimization since sibcall optimization will
4275 only align the outgoing stack to incoming stack boundary. */
4276 if (targetm.calls.update_stack_boundary)
4277 targetm.calls.update_stack_boundary ();
4279 /* The incoming stack frame has to be aligned at least at
4280 parm_stack_boundary. */
4281 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4283 /* Update crtl->stack_alignment_estimated and use it later to align
4284 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4285 exceptions since callgraph doesn't collect incoming stack alignment
4286 in this case. */
4287 if (cfun->can_throw_non_call_exceptions
4288 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4289 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4290 else
4291 preferred_stack_boundary = crtl->preferred_stack_boundary;
4292 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4293 crtl->stack_alignment_estimated = preferred_stack_boundary;
4294 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4295 crtl->stack_alignment_needed = preferred_stack_boundary;
4297 gcc_assert (crtl->stack_alignment_needed
4298 <= crtl->stack_alignment_estimated);
4300 crtl->stack_realign_needed
4301 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4302 crtl->stack_realign_tried = crtl->stack_realign_needed;
4304 crtl->stack_realign_processed = true;
4306 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4307 alignment. */
4308 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4309 drap_rtx = targetm.calls.get_drap_rtx ();
4311 /* stack_realign_drap and drap_rtx must match. */
4312 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4314 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4315 if (NULL != drap_rtx)
4317 crtl->args.internal_arg_pointer = drap_rtx;
4319 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4320 needed. */
4321 fixup_tail_calls ();
4325 /* Translate the intermediate representation contained in the CFG
4326 from GIMPLE trees to RTL.
4328 We do conversion per basic block and preserve/update the tree CFG.
4329 This implies we have to do some magic as the CFG can simultaneously
4330 consist of basic blocks containing RTL and GIMPLE trees. This can
4331 confuse the CFG hooks, so be careful to not manipulate CFG during
4332 the expansion. */
4334 static unsigned int
4335 gimple_expand_cfg (void)
4337 basic_block bb, init_block;
4338 sbitmap blocks;
4339 edge_iterator ei;
4340 edge e;
4341 rtx var_seq;
4342 unsigned i;
4344 timevar_push (TV_OUT_OF_SSA);
4345 rewrite_out_of_ssa (&SA);
4346 timevar_pop (TV_OUT_OF_SSA);
4347 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
4349 /* Make sure all values used by the optimization passes have sane
4350 defaults. */
4351 reg_renumber = 0;
4353 /* Some backends want to know that we are expanding to RTL. */
4354 currently_expanding_to_rtl = 1;
4355 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4356 free_dominance_info (CDI_DOMINATORS);
4358 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4360 insn_locators_alloc ();
4361 if (!DECL_IS_BUILTIN (current_function_decl))
4363 /* Eventually, all FEs should explicitly set function_start_locus. */
4364 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4365 set_curr_insn_source_location
4366 (DECL_SOURCE_LOCATION (current_function_decl));
4367 else
4368 set_curr_insn_source_location (cfun->function_start_locus);
4370 else
4371 set_curr_insn_source_location (UNKNOWN_LOCATION);
4372 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4373 prologue_locator = curr_insn_locator ();
4375 #ifdef INSN_SCHEDULING
4376 init_sched_attrs ();
4377 #endif
4379 /* Make sure first insn is a note even if we don't want linenums.
4380 This makes sure the first insn will never be deleted.
4381 Also, final expects a note to appear there. */
4382 emit_note (NOTE_INSN_DELETED);
4384 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4385 discover_nonconstant_array_refs ();
4387 targetm.expand_to_rtl_hook ();
4388 crtl->stack_alignment_needed = STACK_BOUNDARY;
4389 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4390 crtl->stack_alignment_estimated = 0;
4391 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4392 cfun->cfg->max_jumptable_ents = 0;
4394 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4395 of the function section at exapnsion time to predict distance of calls. */
4396 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4398 /* Expand the variables recorded during gimple lowering. */
4399 timevar_push (TV_VAR_EXPAND);
4400 start_sequence ();
4402 expand_used_vars ();
4404 var_seq = get_insns ();
4405 end_sequence ();
4406 timevar_pop (TV_VAR_EXPAND);
4408 /* Honor stack protection warnings. */
4409 if (warn_stack_protect)
4411 if (cfun->calls_alloca)
4412 warning (OPT_Wstack_protector,
4413 "stack protector not protecting local variables: "
4414 "variable length buffer");
4415 if (has_short_buffer && !crtl->stack_protect_guard)
4416 warning (OPT_Wstack_protector,
4417 "stack protector not protecting function: "
4418 "all local arrays are less than %d bytes long",
4419 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4422 /* Set up parameters and prepare for return, for the function. */
4423 expand_function_start (current_function_decl);
4425 /* If we emitted any instructions for setting up the variables,
4426 emit them before the FUNCTION_START note. */
4427 if (var_seq)
4429 emit_insn_before (var_seq, parm_birth_insn);
4431 /* In expand_function_end we'll insert the alloca save/restore
4432 before parm_birth_insn. We've just insertted an alloca call.
4433 Adjust the pointer to match. */
4434 parm_birth_insn = var_seq;
4437 /* Now that we also have the parameter RTXs, copy them over to our
4438 partitions. */
4439 for (i = 0; i < SA.map->num_partitions; i++)
4441 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4443 if (TREE_CODE (var) != VAR_DECL
4444 && !SA.partition_to_pseudo[i])
4445 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4446 gcc_assert (SA.partition_to_pseudo[i]);
4448 /* If this decl was marked as living in multiple places, reset
4449 this now to NULL. */
4450 if (DECL_RTL_IF_SET (var) == pc_rtx)
4451 SET_DECL_RTL (var, NULL);
4453 /* Some RTL parts really want to look at DECL_RTL(x) when x
4454 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4455 SET_DECL_RTL here making this available, but that would mean
4456 to select one of the potentially many RTLs for one DECL. Instead
4457 of doing that we simply reset the MEM_EXPR of the RTL in question,
4458 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4459 if (!DECL_RTL_SET_P (var))
4461 if (MEM_P (SA.partition_to_pseudo[i]))
4462 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4466 /* If we have a class containing differently aligned pointers
4467 we need to merge those into the corresponding RTL pointer
4468 alignment. */
4469 for (i = 1; i < num_ssa_names; i++)
4471 tree name = ssa_name (i);
4472 int part;
4473 rtx r;
4475 if (!name
4476 /* We might have generated new SSA names in
4477 update_alias_info_with_stack_vars. They will have a NULL
4478 defining statements, and won't be part of the partitioning,
4479 so ignore those. */
4480 || !SSA_NAME_DEF_STMT (name))
4481 continue;
4482 part = var_to_partition (SA.map, name);
4483 if (part == NO_PARTITION)
4484 continue;
4486 /* Adjust all partition members to get the underlying decl of
4487 the representative which we might have created in expand_one_var. */
4488 if (SSA_NAME_VAR (name) == NULL_TREE)
4490 tree leader = partition_to_var (SA.map, part);
4491 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
4492 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
4494 if (!POINTER_TYPE_P (TREE_TYPE (name)))
4495 continue;
4497 r = SA.partition_to_pseudo[part];
4498 if (REG_P (r))
4499 mark_reg_pointer (r, get_pointer_alignment (name));
4502 /* If this function is `main', emit a call to `__main'
4503 to run global initializers, etc. */
4504 if (DECL_NAME (current_function_decl)
4505 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4506 && DECL_FILE_SCOPE_P (current_function_decl))
4507 expand_main_function ();
4509 /* Initialize the stack_protect_guard field. This must happen after the
4510 call to __main (if any) so that the external decl is initialized. */
4511 if (crtl->stack_protect_guard)
4512 stack_protect_prologue ();
4514 expand_phi_nodes (&SA);
4516 /* Register rtl specific functions for cfg. */
4517 rtl_register_cfg_hooks ();
4519 init_block = construct_init_block ();
4521 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4522 remaining edges later. */
4523 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4524 e->flags &= ~EDGE_EXECUTABLE;
4526 lab_rtx_for_bb = pointer_map_create ();
4527 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4528 bb = expand_gimple_basic_block (bb);
4530 if (MAY_HAVE_DEBUG_INSNS)
4531 expand_debug_locations ();
4533 /* Free stuff we no longer need after GIMPLE optimizations. */
4534 free_dominance_info (CDI_DOMINATORS);
4535 free_dominance_info (CDI_POST_DOMINATORS);
4536 delete_tree_cfg_annotations ();
4538 timevar_push (TV_OUT_OF_SSA);
4539 finish_out_of_ssa (&SA);
4540 timevar_pop (TV_OUT_OF_SSA);
4542 timevar_push (TV_POST_EXPAND);
4543 /* We are no longer in SSA form. */
4544 cfun->gimple_df->in_ssa_p = false;
4545 if (current_loops)
4546 loops_state_clear (LOOP_CLOSED_SSA);
4548 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4549 conservatively to true until they are all profile aware. */
4550 pointer_map_destroy (lab_rtx_for_bb);
4551 free_histograms ();
4553 construct_exit_block ();
4554 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4555 insn_locators_finalize ();
4557 /* Zap the tree EH table. */
4558 set_eh_throw_stmt_table (cfun, NULL);
4560 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4561 split edges which edge insertions might do. */
4562 rebuild_jump_labels (get_insns ());
4564 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4566 edge e;
4567 edge_iterator ei;
4568 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4570 if (e->insns.r)
4572 rebuild_jump_labels_chain (e->insns.r);
4573 /* Avoid putting insns before parm_birth_insn. */
4574 if (e->src == ENTRY_BLOCK_PTR
4575 && single_succ_p (ENTRY_BLOCK_PTR)
4576 && parm_birth_insn)
4578 rtx insns = e->insns.r;
4579 e->insns.r = NULL_RTX;
4580 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4582 else
4583 commit_one_edge_insertion (e);
4585 else
4586 ei_next (&ei);
4590 /* We're done expanding trees to RTL. */
4591 currently_expanding_to_rtl = 0;
4593 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4595 edge e;
4596 edge_iterator ei;
4597 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4599 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4600 e->flags &= ~EDGE_EXECUTABLE;
4602 /* At the moment not all abnormal edges match the RTL
4603 representation. It is safe to remove them here as
4604 find_many_sub_basic_blocks will rediscover them.
4605 In the future we should get this fixed properly. */
4606 if ((e->flags & EDGE_ABNORMAL)
4607 && !(e->flags & EDGE_SIBCALL))
4608 remove_edge (e);
4609 else
4610 ei_next (&ei);
4614 blocks = sbitmap_alloc (last_basic_block);
4615 sbitmap_ones (blocks);
4616 find_many_sub_basic_blocks (blocks);
4617 sbitmap_free (blocks);
4618 purge_all_dead_edges ();
4620 expand_stack_alignment ();
4622 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4623 function. */
4624 if (crtl->tail_call_emit)
4625 fixup_tail_calls ();
4627 /* After initial rtl generation, call back to finish generating
4628 exception support code. We need to do this before cleaning up
4629 the CFG as the code does not expect dead landing pads. */
4630 if (cfun->eh->region_tree != NULL)
4631 finish_eh_generation ();
4633 /* Remove unreachable blocks, otherwise we cannot compute dominators
4634 which are needed for loop state verification. As a side-effect
4635 this also compacts blocks.
4636 ??? We cannot remove trivially dead insns here as for example
4637 the DRAP reg on i?86 is not magically live at this point.
4638 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4639 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4641 #ifdef ENABLE_CHECKING
4642 verify_flow_info ();
4643 #endif
4645 /* Initialize pseudos allocated for hard registers. */
4646 emit_initial_value_sets ();
4648 /* And finally unshare all RTL. */
4649 unshare_all_rtl ();
4651 /* There's no need to defer outputting this function any more; we
4652 know we want to output it. */
4653 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4655 /* Now that we're done expanding trees to RTL, we shouldn't have any
4656 more CONCATs anywhere. */
4657 generating_concat_p = 0;
4659 if (dump_file)
4661 fprintf (dump_file,
4662 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4663 /* And the pass manager will dump RTL for us. */
4666 /* If we're emitting a nested function, make sure its parent gets
4667 emitted as well. Doing otherwise confuses debug info. */
4669 tree parent;
4670 for (parent = DECL_CONTEXT (current_function_decl);
4671 parent != NULL_TREE;
4672 parent = get_containing_scope (parent))
4673 if (TREE_CODE (parent) == FUNCTION_DECL)
4674 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4677 /* We are now committed to emitting code for this function. Do any
4678 preparation, such as emitting abstract debug info for the inline
4679 before it gets mangled by optimization. */
4680 if (cgraph_function_possibly_inlined_p (current_function_decl))
4681 (*debug_hooks->outlining_inline_function) (current_function_decl);
4683 TREE_ASM_WRITTEN (current_function_decl) = 1;
4685 /* After expanding, the return labels are no longer needed. */
4686 return_label = NULL;
4687 naked_return_label = NULL;
4689 /* After expanding, the tm_restart map is no longer needed. */
4690 if (cfun->gimple_df->tm_restart)
4692 htab_delete (cfun->gimple_df->tm_restart);
4693 cfun->gimple_df->tm_restart = NULL;
4696 /* Tag the blocks with a depth number so that change_scope can find
4697 the common parent easily. */
4698 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4699 default_rtl_profile ();
4701 timevar_pop (TV_POST_EXPAND);
4703 return 0;
4706 struct rtl_opt_pass pass_expand =
4709 RTL_PASS,
4710 "expand", /* name */
4711 NULL, /* gate */
4712 gimple_expand_cfg, /* execute */
4713 NULL, /* sub */
4714 NULL, /* next */
4715 0, /* static_pass_number */
4716 TV_EXPAND, /* tv_id */
4717 PROP_ssa | PROP_gimple_leh | PROP_cfg
4718 | PROP_gimple_lcx, /* properties_required */
4719 PROP_rtl, /* properties_provided */
4720 PROP_ssa | PROP_trees, /* properties_destroyed */
4721 TODO_verify_ssa | TODO_verify_flow
4722 | TODO_verify_stmts, /* todo_flags_start */
4723 TODO_ggc_collect /* todo_flags_finish */