gcc/
[official-gcc.git] / gcc / cfgexpand.c
blob126b132eb055e31cc8af0d5625b5a597ede66bab
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "cfgloop.h"
51 #include "regs.h" /* For reg_renumber. */
52 #include "integrate.h" /* For emit_initial_value_sets. */
53 #include "insn-attr.h" /* For INSN_SCHEDULING. */
55 /* This variable holds information helping the rewriting of SSA trees
56 into RTL. */
57 struct ssaexpand SA;
59 /* This variable holds the currently expanded gimple statement for purposes
60 of comminucating the profile info to the builtin expanders. */
61 gimple currently_expanding_gimple_stmt;
63 static rtx expand_debug_expr (tree);
65 /* Return an expression tree corresponding to the RHS of GIMPLE
66 statement STMT. */
68 tree
69 gimple_assign_rhs_to_tree (gimple stmt)
71 tree t;
72 enum gimple_rhs_class grhs_class;
74 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
76 if (grhs_class == GIMPLE_TERNARY_RHS)
77 t = build3 (gimple_assign_rhs_code (stmt),
78 TREE_TYPE (gimple_assign_lhs (stmt)),
79 gimple_assign_rhs1 (stmt),
80 gimple_assign_rhs2 (stmt),
81 gimple_assign_rhs3 (stmt));
82 else if (grhs_class == GIMPLE_BINARY_RHS)
83 t = build2 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt),
86 gimple_assign_rhs2 (stmt));
87 else if (grhs_class == GIMPLE_UNARY_RHS)
88 t = build1 (gimple_assign_rhs_code (stmt),
89 TREE_TYPE (gimple_assign_lhs (stmt)),
90 gimple_assign_rhs1 (stmt));
91 else if (grhs_class == GIMPLE_SINGLE_RHS)
93 t = gimple_assign_rhs1 (stmt);
94 /* Avoid modifying this tree in place below. */
95 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
96 && gimple_location (stmt) != EXPR_LOCATION (t))
97 || (gimple_block (stmt)
98 && currently_expanding_to_rtl
99 && EXPR_P (t)
100 && gimple_block (stmt) != TREE_BLOCK (t)))
101 t = copy_node (t);
103 else
104 gcc_unreachable ();
106 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
107 SET_EXPR_LOCATION (t, gimple_location (stmt));
108 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
109 TREE_BLOCK (t) = gimple_block (stmt);
111 return t;
115 #ifndef STACK_ALIGNMENT_NEEDED
116 #define STACK_ALIGNMENT_NEEDED 1
117 #endif
119 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
121 /* Associate declaration T with storage space X. If T is no
122 SSA name this is exactly SET_DECL_RTL, otherwise make the
123 partition of T associated with X. */
124 static inline void
125 set_rtl (tree t, rtx x)
127 if (TREE_CODE (t) == SSA_NAME)
129 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
130 if (x && !MEM_P (x))
131 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
132 /* For the benefit of debug information at -O0 (where vartracking
133 doesn't run) record the place also in the base DECL if it's
134 a normal variable (not a parameter). */
135 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
137 tree var = SSA_NAME_VAR (t);
138 /* If we don't yet have something recorded, just record it now. */
139 if (!DECL_RTL_SET_P (var))
140 SET_DECL_RTL (var, x);
141 /* If we have it set already to "multiple places" don't
142 change this. */
143 else if (DECL_RTL (var) == pc_rtx)
145 /* If we have something recorded and it's not the same place
146 as we want to record now, we have multiple partitions for the
147 same base variable, with different places. We can't just
148 randomly chose one, hence we have to say that we don't know.
149 This only happens with optimization, and there var-tracking
150 will figure out the right thing. */
151 else if (DECL_RTL (var) != x)
152 SET_DECL_RTL (var, pc_rtx);
155 else
156 SET_DECL_RTL (t, x);
159 /* This structure holds data relevant to one variable that will be
160 placed in a stack slot. */
161 struct stack_var
163 /* The Variable. */
164 tree decl;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
190 static struct pointer_map_t *decl_to_stack_part;
192 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
193 is non-decreasing. */
194 static size_t *stack_vars_sorted;
196 /* The phase of the stack frame. This is the known misalignment of
197 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
198 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
199 static int frame_phase;
201 /* Used during expand_used_vars to remember if we saw any decls for
202 which we'd like to enable stack smashing protection. */
203 static bool has_protected_decls;
205 /* Used during expand_used_vars. Remember if we say a character buffer
206 smaller than our cutoff threshold. Used for -Wstack-protector. */
207 static bool has_short_buffer;
209 /* Compute the byte alignment to use for DECL. Ignore alignment
210 we can't do with expected alignment of the stack boundary. */
212 static unsigned int
213 align_local_variable (tree decl)
215 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
216 DECL_ALIGN (decl) = align;
217 return align / BITS_PER_UNIT;
220 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
221 Return the frame offset. */
223 static HOST_WIDE_INT
224 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
226 HOST_WIDE_INT offset, new_frame_offset;
228 new_frame_offset = frame_offset;
229 if (FRAME_GROWS_DOWNWARD)
231 new_frame_offset -= size + frame_phase;
232 new_frame_offset &= -align;
233 new_frame_offset += frame_phase;
234 offset = new_frame_offset;
236 else
238 new_frame_offset -= frame_phase;
239 new_frame_offset += align - 1;
240 new_frame_offset &= -align;
241 new_frame_offset += frame_phase;
242 offset = new_frame_offset;
243 new_frame_offset += size;
245 frame_offset = new_frame_offset;
247 if (frame_offset_overflow (frame_offset, cfun->decl))
248 frame_offset = offset = 0;
250 return offset;
253 /* Accumulate DECL into STACK_VARS. */
255 static void
256 add_stack_var (tree decl)
258 struct stack_var *v;
260 if (stack_vars_num >= stack_vars_alloc)
262 if (stack_vars_alloc)
263 stack_vars_alloc = stack_vars_alloc * 3 / 2;
264 else
265 stack_vars_alloc = 32;
266 stack_vars
267 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
269 if (!decl_to_stack_part)
270 decl_to_stack_part = pointer_map_create ();
272 v = &stack_vars[stack_vars_num];
273 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
275 v->decl = decl;
276 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
277 /* Ensure that all variables have size, so that &a != &b for any two
278 variables that are simultaneously live. */
279 if (v->size == 0)
280 v->size = 1;
281 v->alignb = align_local_variable (SSAVAR (decl));
282 /* An alignment of zero can mightily confuse us later. */
283 gcc_assert (v->alignb != 0);
285 /* All variables are initially in their own partition. */
286 v->representative = stack_vars_num;
287 v->next = EOC;
289 /* All variables initially conflict with no other. */
290 v->conflicts = NULL;
292 /* Ensure that this decl doesn't get put onto the list twice. */
293 set_rtl (decl, pc_rtx);
295 stack_vars_num++;
298 /* Make the decls associated with luid's X and Y conflict. */
300 static void
301 add_stack_var_conflict (size_t x, size_t y)
303 struct stack_var *a = &stack_vars[x];
304 struct stack_var *b = &stack_vars[y];
305 if (!a->conflicts)
306 a->conflicts = BITMAP_ALLOC (NULL);
307 if (!b->conflicts)
308 b->conflicts = BITMAP_ALLOC (NULL);
309 bitmap_set_bit (a->conflicts, y);
310 bitmap_set_bit (b->conflicts, x);
313 /* Check whether the decls associated with luid's X and Y conflict. */
315 static bool
316 stack_var_conflict_p (size_t x, size_t y)
318 struct stack_var *a = &stack_vars[x];
319 struct stack_var *b = &stack_vars[y];
320 if (x == y)
321 return false;
322 /* Partitions containing an SSA name result from gimple registers
323 with things like unsupported modes. They are top-level and
324 hence conflict with everything else. */
325 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
326 return true;
328 if (!a->conflicts || !b->conflicts)
329 return false;
330 return bitmap_bit_p (a->conflicts, y);
333 /* Returns true if TYPE is or contains a union type. */
335 static bool
336 aggregate_contains_union_type (tree type)
338 tree field;
340 if (TREE_CODE (type) == UNION_TYPE
341 || TREE_CODE (type) == QUAL_UNION_TYPE)
342 return true;
343 if (TREE_CODE (type) == ARRAY_TYPE)
344 return aggregate_contains_union_type (TREE_TYPE (type));
345 if (TREE_CODE (type) != RECORD_TYPE)
346 return false;
348 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
349 if (TREE_CODE (field) == FIELD_DECL)
350 if (aggregate_contains_union_type (TREE_TYPE (field)))
351 return true;
353 return false;
356 /* A subroutine of expand_used_vars. If two variables X and Y have alias
357 sets that do not conflict, then do add a conflict for these variables
358 in the interference graph. We also need to make sure to add conflicts
359 for union containing structures. Else RTL alias analysis comes along
360 and due to type based aliasing rules decides that for two overlapping
361 union temporaries { short s; int i; } accesses to the same mem through
362 different types may not alias and happily reorders stores across
363 life-time boundaries of the temporaries (See PR25654). */
365 static void
366 add_alias_set_conflicts (void)
368 size_t i, j, n = stack_vars_num;
370 for (i = 0; i < n; ++i)
372 tree type_i = TREE_TYPE (stack_vars[i].decl);
373 bool aggr_i = AGGREGATE_TYPE_P (type_i);
374 bool contains_union;
376 contains_union = aggregate_contains_union_type (type_i);
377 for (j = 0; j < i; ++j)
379 tree type_j = TREE_TYPE (stack_vars[j].decl);
380 bool aggr_j = AGGREGATE_TYPE_P (type_j);
381 if (aggr_i != aggr_j
382 /* Either the objects conflict by means of type based
383 aliasing rules, or we need to add a conflict. */
384 || !objects_must_conflict_p (type_i, type_j)
385 /* In case the types do not conflict ensure that access
386 to elements will conflict. In case of unions we have
387 to be careful as type based aliasing rules may say
388 access to the same memory does not conflict. So play
389 safe and add a conflict in this case when
390 -fstrict-aliasing is used. */
391 || (contains_union && flag_strict_aliasing))
392 add_stack_var_conflict (i, j);
397 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
398 enter its partition number into bitmap DATA. */
400 static bool
401 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
403 bitmap active = (bitmap)data;
404 op = get_base_address (op);
405 if (op
406 && DECL_P (op)
407 && DECL_RTL_IF_SET (op) == pc_rtx)
409 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
410 if (v)
411 bitmap_set_bit (active, *v);
413 return false;
416 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
417 record conflicts between it and all currently active other partitions
418 from bitmap DATA. */
420 static bool
421 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
423 bitmap active = (bitmap)data;
424 op = get_base_address (op);
425 if (op
426 && DECL_P (op)
427 && DECL_RTL_IF_SET (op) == pc_rtx)
429 size_t *v =
430 (size_t *) pointer_map_contains (decl_to_stack_part, op);
431 if (v && bitmap_set_bit (active, *v))
433 size_t num = *v;
434 bitmap_iterator bi;
435 unsigned i;
436 gcc_assert (num < stack_vars_num);
437 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
438 add_stack_var_conflict (num, i);
441 return false;
444 /* Helper routine for add_scope_conflicts, calculating the active partitions
445 at the end of BB, leaving the result in WORK. We're called to generate
446 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
447 liveness. */
449 static void
450 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
452 edge e;
453 edge_iterator ei;
454 gimple_stmt_iterator gsi;
455 bool (*visit)(gimple, tree, void *);
457 bitmap_clear (work);
458 FOR_EACH_EDGE (e, ei, bb->preds)
459 bitmap_ior_into (work, (bitmap)e->src->aux);
461 visit = visit_op;
463 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
465 gimple stmt = gsi_stmt (gsi);
466 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
468 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
470 gimple stmt = gsi_stmt (gsi);
472 if (gimple_clobber_p (stmt))
474 tree lhs = gimple_assign_lhs (stmt);
475 size_t *v;
476 /* Nested function lowering might introduce LHSs
477 that are COMPONENT_REFs. */
478 if (TREE_CODE (lhs) != VAR_DECL)
479 continue;
480 if (DECL_RTL_IF_SET (lhs) == pc_rtx
481 && (v = (size_t *)
482 pointer_map_contains (decl_to_stack_part, lhs)))
483 bitmap_clear_bit (work, *v);
485 else if (!is_gimple_debug (stmt))
487 if (for_conflict
488 && visit == visit_op)
490 /* If this is the first real instruction in this BB we need
491 to add conflicts for everything live at this point now.
492 Unlike classical liveness for named objects we can't
493 rely on seeing a def/use of the names we're interested in.
494 There might merely be indirect loads/stores. We'd not add any
495 conflicts for such partitions. */
496 bitmap_iterator bi;
497 unsigned i;
498 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
500 unsigned j;
501 bitmap_iterator bj;
502 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
503 add_stack_var_conflict (i, j);
505 visit = visit_conflict;
507 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
512 /* Generate stack partition conflicts between all partitions that are
513 simultaneously live. */
515 static void
516 add_scope_conflicts (void)
518 basic_block bb;
519 bool changed;
520 bitmap work = BITMAP_ALLOC (NULL);
522 /* We approximate the live range of a stack variable by taking the first
523 mention of its name as starting point(s), and by the end-of-scope
524 death clobber added by gimplify as ending point(s) of the range.
525 This overapproximates in the case we for instance moved an address-taken
526 operation upward, without also moving a dereference to it upwards.
527 But it's conservatively correct as a variable never can hold values
528 before its name is mentioned at least once.
530 We then do a mostly classical bitmap liveness algorithm. */
532 FOR_ALL_BB (bb)
533 bb->aux = BITMAP_ALLOC (NULL);
535 changed = true;
536 while (changed)
538 changed = false;
539 FOR_EACH_BB (bb)
541 bitmap active = (bitmap)bb->aux;
542 add_scope_conflicts_1 (bb, work, false);
543 if (bitmap_ior_into (active, work))
544 changed = true;
548 FOR_EACH_BB (bb)
549 add_scope_conflicts_1 (bb, work, true);
551 BITMAP_FREE (work);
552 FOR_ALL_BB (bb)
553 BITMAP_FREE (bb->aux);
556 /* A subroutine of partition_stack_vars. A comparison function for qsort,
557 sorting an array of indices by the properties of the object. */
559 static int
560 stack_var_cmp (const void *a, const void *b)
562 size_t ia = *(const size_t *)a;
563 size_t ib = *(const size_t *)b;
564 unsigned int aligna = stack_vars[ia].alignb;
565 unsigned int alignb = stack_vars[ib].alignb;
566 HOST_WIDE_INT sizea = stack_vars[ia].size;
567 HOST_WIDE_INT sizeb = stack_vars[ib].size;
568 tree decla = stack_vars[ia].decl;
569 tree declb = stack_vars[ib].decl;
570 bool largea, largeb;
571 unsigned int uida, uidb;
573 /* Primary compare on "large" alignment. Large comes first. */
574 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
575 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
576 if (largea != largeb)
577 return (int)largeb - (int)largea;
579 /* Secondary compare on size, decreasing */
580 if (sizea > sizeb)
581 return -1;
582 if (sizea < sizeb)
583 return 1;
585 /* Tertiary compare on true alignment, decreasing. */
586 if (aligna < alignb)
587 return -1;
588 if (aligna > alignb)
589 return 1;
591 /* Final compare on ID for sort stability, increasing.
592 Two SSA names are compared by their version, SSA names come before
593 non-SSA names, and two normal decls are compared by their DECL_UID. */
594 if (TREE_CODE (decla) == SSA_NAME)
596 if (TREE_CODE (declb) == SSA_NAME)
597 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
598 else
599 return -1;
601 else if (TREE_CODE (declb) == SSA_NAME)
602 return 1;
603 else
604 uida = DECL_UID (decla), uidb = DECL_UID (declb);
605 if (uida < uidb)
606 return 1;
607 if (uida > uidb)
608 return -1;
609 return 0;
613 /* If the points-to solution *PI points to variables that are in a partition
614 together with other variables add all partition members to the pointed-to
615 variables bitmap. */
617 static void
618 add_partitioned_vars_to_ptset (struct pt_solution *pt,
619 struct pointer_map_t *decls_to_partitions,
620 struct pointer_set_t *visited, bitmap temp)
622 bitmap_iterator bi;
623 unsigned i;
624 bitmap *part;
626 if (pt->anything
627 || pt->vars == NULL
628 /* The pointed-to vars bitmap is shared, it is enough to
629 visit it once. */
630 || pointer_set_insert(visited, pt->vars))
631 return;
633 bitmap_clear (temp);
635 /* By using a temporary bitmap to store all members of the partitions
636 we have to add we make sure to visit each of the partitions only
637 once. */
638 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
639 if ((!temp
640 || !bitmap_bit_p (temp, i))
641 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
642 (void *)(size_t) i)))
643 bitmap_ior_into (temp, *part);
644 if (!bitmap_empty_p (temp))
645 bitmap_ior_into (pt->vars, temp);
648 /* Update points-to sets based on partition info, so we can use them on RTL.
649 The bitmaps representing stack partitions will be saved until expand,
650 where partitioned decls used as bases in memory expressions will be
651 rewritten. */
653 static void
654 update_alias_info_with_stack_vars (void)
656 struct pointer_map_t *decls_to_partitions = NULL;
657 size_t i, j;
658 tree var = NULL_TREE;
660 for (i = 0; i < stack_vars_num; i++)
662 bitmap part = NULL;
663 tree name;
664 struct ptr_info_def *pi;
666 /* Not interested in partitions with single variable. */
667 if (stack_vars[i].representative != i
668 || stack_vars[i].next == EOC)
669 continue;
671 if (!decls_to_partitions)
673 decls_to_partitions = pointer_map_create ();
674 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
677 /* Create an SSA_NAME that points to the partition for use
678 as base during alias-oracle queries on RTL for bases that
679 have been partitioned. */
680 if (var == NULL_TREE)
681 var = create_tmp_var (ptr_type_node, NULL);
682 name = make_ssa_name (var, NULL);
684 /* Create bitmaps representing partitions. They will be used for
685 points-to sets later, so use GGC alloc. */
686 part = BITMAP_GGC_ALLOC ();
687 for (j = i; j != EOC; j = stack_vars[j].next)
689 tree decl = stack_vars[j].decl;
690 unsigned int uid = DECL_PT_UID (decl);
691 /* We should never end up partitioning SSA names (though they
692 may end up on the stack). Neither should we allocate stack
693 space to something that is unused and thus unreferenced, except
694 for -O0 where we are preserving even unreferenced variables. */
695 gcc_assert (DECL_P (decl)
696 && (!optimize
697 || referenced_var_lookup (cfun, DECL_UID (decl))));
698 bitmap_set_bit (part, uid);
699 *((bitmap *) pointer_map_insert (decls_to_partitions,
700 (void *)(size_t) uid)) = part;
701 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
702 decl)) = name;
705 /* Make the SSA name point to all partition members. */
706 pi = get_ptr_info (name);
707 pt_solution_set (&pi->pt, part, false);
710 /* Make all points-to sets that contain one member of a partition
711 contain all members of the partition. */
712 if (decls_to_partitions)
714 unsigned i;
715 struct pointer_set_t *visited = pointer_set_create ();
716 bitmap temp = BITMAP_ALLOC (NULL);
718 for (i = 1; i < num_ssa_names; i++)
720 tree name = ssa_name (i);
721 struct ptr_info_def *pi;
723 if (name
724 && POINTER_TYPE_P (TREE_TYPE (name))
725 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
726 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
727 visited, temp);
730 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
731 decls_to_partitions, visited, temp);
733 pointer_set_destroy (visited);
734 pointer_map_destroy (decls_to_partitions);
735 BITMAP_FREE (temp);
739 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
740 partitioning algorithm. Partitions A and B are known to be non-conflicting.
741 Merge them into a single partition A. */
743 static void
744 union_stack_vars (size_t a, size_t b)
746 struct stack_var *vb = &stack_vars[b];
747 bitmap_iterator bi;
748 unsigned u;
750 gcc_assert (stack_vars[b].next == EOC);
751 /* Add B to A's partition. */
752 stack_vars[b].next = stack_vars[a].next;
753 stack_vars[b].representative = a;
754 stack_vars[a].next = b;
756 /* Update the required alignment of partition A to account for B. */
757 if (stack_vars[a].alignb < stack_vars[b].alignb)
758 stack_vars[a].alignb = stack_vars[b].alignb;
760 /* Update the interference graph and merge the conflicts. */
761 if (vb->conflicts)
763 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
764 add_stack_var_conflict (a, stack_vars[u].representative);
765 BITMAP_FREE (vb->conflicts);
769 /* A subroutine of expand_used_vars. Binpack the variables into
770 partitions constrained by the interference graph. The overall
771 algorithm used is as follows:
773 Sort the objects by size in descending order.
774 For each object A {
775 S = size(A)
776 O = 0
777 loop {
778 Look for the largest non-conflicting object B with size <= S.
779 UNION (A, B)
784 static void
785 partition_stack_vars (void)
787 size_t si, sj, n = stack_vars_num;
789 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
790 for (si = 0; si < n; ++si)
791 stack_vars_sorted[si] = si;
793 if (n == 1)
794 return;
796 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
798 for (si = 0; si < n; ++si)
800 size_t i = stack_vars_sorted[si];
801 unsigned int ialign = stack_vars[i].alignb;
803 /* Ignore objects that aren't partition representatives. If we
804 see a var that is not a partition representative, it must
805 have been merged earlier. */
806 if (stack_vars[i].representative != i)
807 continue;
809 for (sj = si + 1; sj < n; ++sj)
811 size_t j = stack_vars_sorted[sj];
812 unsigned int jalign = stack_vars[j].alignb;
814 /* Ignore objects that aren't partition representatives. */
815 if (stack_vars[j].representative != j)
816 continue;
818 /* Ignore conflicting objects. */
819 if (stack_var_conflict_p (i, j))
820 continue;
822 /* Do not mix objects of "small" (supported) alignment
823 and "large" (unsupported) alignment. */
824 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
825 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
826 continue;
828 /* UNION the objects, placing J at OFFSET. */
829 union_stack_vars (i, j);
833 update_alias_info_with_stack_vars ();
836 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
838 static void
839 dump_stack_var_partition (void)
841 size_t si, i, j, n = stack_vars_num;
843 for (si = 0; si < n; ++si)
845 i = stack_vars_sorted[si];
847 /* Skip variables that aren't partition representatives, for now. */
848 if (stack_vars[i].representative != i)
849 continue;
851 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
852 " align %u\n", (unsigned long) i, stack_vars[i].size,
853 stack_vars[i].alignb);
855 for (j = i; j != EOC; j = stack_vars[j].next)
857 fputc ('\t', dump_file);
858 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
860 fputc ('\n', dump_file);
864 /* Assign rtl to DECL at BASE + OFFSET. */
866 static void
867 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
868 HOST_WIDE_INT offset)
870 unsigned align;
871 rtx x;
873 /* If this fails, we've overflowed the stack frame. Error nicely? */
874 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
876 x = plus_constant (Pmode, base, offset);
877 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
879 if (TREE_CODE (decl) != SSA_NAME)
881 /* Set alignment we actually gave this decl if it isn't an SSA name.
882 If it is we generate stack slots only accidentally so it isn't as
883 important, we'll simply use the alignment that is already set. */
884 if (base == virtual_stack_vars_rtx)
885 offset -= frame_phase;
886 align = offset & -offset;
887 align *= BITS_PER_UNIT;
888 if (align == 0 || align > base_align)
889 align = base_align;
891 /* One would think that we could assert that we're not decreasing
892 alignment here, but (at least) the i386 port does exactly this
893 via the MINIMUM_ALIGNMENT hook. */
895 DECL_ALIGN (decl) = align;
896 DECL_USER_ALIGN (decl) = 0;
899 set_mem_attributes (x, SSAVAR (decl), true);
900 set_rtl (decl, x);
903 /* A subroutine of expand_used_vars. Give each partition representative
904 a unique location within the stack frame. Update each partition member
905 with that location. */
907 static void
908 expand_stack_vars (bool (*pred) (tree))
910 size_t si, i, j, n = stack_vars_num;
911 HOST_WIDE_INT large_size = 0, large_alloc = 0;
912 rtx large_base = NULL;
913 unsigned large_align = 0;
914 tree decl;
916 /* Determine if there are any variables requiring "large" alignment.
917 Since these are dynamically allocated, we only process these if
918 no predicate involved. */
919 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
920 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
922 /* Find the total size of these variables. */
923 for (si = 0; si < n; ++si)
925 unsigned alignb;
927 i = stack_vars_sorted[si];
928 alignb = stack_vars[i].alignb;
930 /* Stop when we get to the first decl with "small" alignment. */
931 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
932 break;
934 /* Skip variables that aren't partition representatives. */
935 if (stack_vars[i].representative != i)
936 continue;
938 /* Skip variables that have already had rtl assigned. See also
939 add_stack_var where we perpetrate this pc_rtx hack. */
940 decl = stack_vars[i].decl;
941 if ((TREE_CODE (decl) == SSA_NAME
942 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
943 : DECL_RTL (decl)) != pc_rtx)
944 continue;
946 large_size += alignb - 1;
947 large_size &= -(HOST_WIDE_INT)alignb;
948 large_size += stack_vars[i].size;
951 /* If there were any, allocate space. */
952 if (large_size > 0)
953 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
954 large_align, true);
957 for (si = 0; si < n; ++si)
959 rtx base;
960 unsigned base_align, alignb;
961 HOST_WIDE_INT offset;
963 i = stack_vars_sorted[si];
965 /* Skip variables that aren't partition representatives, for now. */
966 if (stack_vars[i].representative != i)
967 continue;
969 /* Skip variables that have already had rtl assigned. See also
970 add_stack_var where we perpetrate this pc_rtx hack. */
971 decl = stack_vars[i].decl;
972 if ((TREE_CODE (decl) == SSA_NAME
973 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
974 : DECL_RTL (decl)) != pc_rtx)
975 continue;
977 /* Check the predicate to see whether this variable should be
978 allocated in this pass. */
979 if (pred && !pred (decl))
980 continue;
982 alignb = stack_vars[i].alignb;
983 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
985 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
986 base = virtual_stack_vars_rtx;
987 base_align = crtl->max_used_stack_slot_alignment;
989 else
991 /* Large alignment is only processed in the last pass. */
992 if (pred)
993 continue;
994 gcc_assert (large_base != NULL);
996 large_alloc += alignb - 1;
997 large_alloc &= -(HOST_WIDE_INT)alignb;
998 offset = large_alloc;
999 large_alloc += stack_vars[i].size;
1001 base = large_base;
1002 base_align = large_align;
1005 /* Create rtl for each variable based on their location within the
1006 partition. */
1007 for (j = i; j != EOC; j = stack_vars[j].next)
1009 expand_one_stack_var_at (stack_vars[j].decl,
1010 base, base_align,
1011 offset);
1015 gcc_assert (large_alloc == large_size);
1018 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1019 static HOST_WIDE_INT
1020 account_stack_vars (void)
1022 size_t si, j, i, n = stack_vars_num;
1023 HOST_WIDE_INT size = 0;
1025 for (si = 0; si < n; ++si)
1027 i = stack_vars_sorted[si];
1029 /* Skip variables that aren't partition representatives, for now. */
1030 if (stack_vars[i].representative != i)
1031 continue;
1033 size += stack_vars[i].size;
1034 for (j = i; j != EOC; j = stack_vars[j].next)
1035 set_rtl (stack_vars[j].decl, NULL);
1037 return size;
1040 /* A subroutine of expand_one_var. Called to immediately assign rtl
1041 to a variable to be allocated in the stack frame. */
1043 static void
1044 expand_one_stack_var (tree var)
1046 HOST_WIDE_INT size, offset;
1047 unsigned byte_align;
1049 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1050 byte_align = align_local_variable (SSAVAR (var));
1052 /* We handle highly aligned variables in expand_stack_vars. */
1053 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1055 offset = alloc_stack_frame_space (size, byte_align);
1057 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1058 crtl->max_used_stack_slot_alignment, offset);
1061 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1062 that will reside in a hard register. */
1064 static void
1065 expand_one_hard_reg_var (tree var)
1067 rest_of_decl_compilation (var, 0, 0);
1070 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1071 that will reside in a pseudo register. */
1073 static void
1074 expand_one_register_var (tree var)
1076 tree decl = SSAVAR (var);
1077 tree type = TREE_TYPE (decl);
1078 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1079 rtx x = gen_reg_rtx (reg_mode);
1081 set_rtl (var, x);
1083 /* Note if the object is a user variable. */
1084 if (!DECL_ARTIFICIAL (decl))
1085 mark_user_reg (x);
1087 if (POINTER_TYPE_P (type))
1088 mark_reg_pointer (x, get_pointer_alignment (var));
1091 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1092 has some associated error, e.g. its type is error-mark. We just need
1093 to pick something that won't crash the rest of the compiler. */
1095 static void
1096 expand_one_error_var (tree var)
1098 enum machine_mode mode = DECL_MODE (var);
1099 rtx x;
1101 if (mode == BLKmode)
1102 x = gen_rtx_MEM (BLKmode, const0_rtx);
1103 else if (mode == VOIDmode)
1104 x = const0_rtx;
1105 else
1106 x = gen_reg_rtx (mode);
1108 SET_DECL_RTL (var, x);
1111 /* A subroutine of expand_one_var. VAR is a variable that will be
1112 allocated to the local stack frame. Return true if we wish to
1113 add VAR to STACK_VARS so that it will be coalesced with other
1114 variables. Return false to allocate VAR immediately.
1116 This function is used to reduce the number of variables considered
1117 for coalescing, which reduces the size of the quadratic problem. */
1119 static bool
1120 defer_stack_allocation (tree var, bool toplevel)
1122 /* If stack protection is enabled, *all* stack variables must be deferred,
1123 so that we can re-order the strings to the top of the frame. */
1124 if (flag_stack_protect)
1125 return true;
1127 /* We handle "large" alignment via dynamic allocation. We want to handle
1128 this extra complication in only one place, so defer them. */
1129 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1130 return true;
1132 /* Variables in the outermost scope automatically conflict with
1133 every other variable. The only reason to want to defer them
1134 at all is that, after sorting, we can more efficiently pack
1135 small variables in the stack frame. Continue to defer at -O2. */
1136 if (toplevel && optimize < 2)
1137 return false;
1139 /* Without optimization, *most* variables are allocated from the
1140 stack, which makes the quadratic problem large exactly when we
1141 want compilation to proceed as quickly as possible. On the
1142 other hand, we don't want the function's stack frame size to
1143 get completely out of hand. So we avoid adding scalars and
1144 "small" aggregates to the list at all. */
1145 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1146 return false;
1148 return true;
1151 /* A subroutine of expand_used_vars. Expand one variable according to
1152 its flavor. Variables to be placed on the stack are not actually
1153 expanded yet, merely recorded.
1154 When REALLY_EXPAND is false, only add stack values to be allocated.
1155 Return stack usage this variable is supposed to take.
1158 static HOST_WIDE_INT
1159 expand_one_var (tree var, bool toplevel, bool really_expand)
1161 unsigned int align = BITS_PER_UNIT;
1162 tree origvar = var;
1164 var = SSAVAR (var);
1166 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1168 /* Because we don't know if VAR will be in register or on stack,
1169 we conservatively assume it will be on stack even if VAR is
1170 eventually put into register after RA pass. For non-automatic
1171 variables, which won't be on stack, we collect alignment of
1172 type and ignore user specified alignment. */
1173 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1174 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1175 TYPE_MODE (TREE_TYPE (var)),
1176 TYPE_ALIGN (TREE_TYPE (var)));
1177 else if (DECL_HAS_VALUE_EXPR_P (var)
1178 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1179 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1180 or variables which were assigned a stack slot already by
1181 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1182 changed from the offset chosen to it. */
1183 align = crtl->stack_alignment_estimated;
1184 else
1185 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1187 /* If the variable alignment is very large we'll dynamicaly allocate
1188 it, which means that in-frame portion is just a pointer. */
1189 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1190 align = POINTER_SIZE;
1193 if (SUPPORTS_STACK_ALIGNMENT
1194 && crtl->stack_alignment_estimated < align)
1196 /* stack_alignment_estimated shouldn't change after stack
1197 realign decision made */
1198 gcc_assert(!crtl->stack_realign_processed);
1199 crtl->stack_alignment_estimated = align;
1202 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1203 So here we only make sure stack_alignment_needed >= align. */
1204 if (crtl->stack_alignment_needed < align)
1205 crtl->stack_alignment_needed = align;
1206 if (crtl->max_used_stack_slot_alignment < align)
1207 crtl->max_used_stack_slot_alignment = align;
1209 if (TREE_CODE (origvar) == SSA_NAME)
1211 gcc_assert (TREE_CODE (var) != VAR_DECL
1212 || (!DECL_EXTERNAL (var)
1213 && !DECL_HAS_VALUE_EXPR_P (var)
1214 && !TREE_STATIC (var)
1215 && TREE_TYPE (var) != error_mark_node
1216 && !DECL_HARD_REGISTER (var)
1217 && really_expand));
1219 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1221 else if (DECL_EXTERNAL (var))
1223 else if (DECL_HAS_VALUE_EXPR_P (var))
1225 else if (TREE_STATIC (var))
1227 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1229 else if (TREE_TYPE (var) == error_mark_node)
1231 if (really_expand)
1232 expand_one_error_var (var);
1234 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1236 if (really_expand)
1237 expand_one_hard_reg_var (var);
1239 else if (use_register_for_decl (var))
1241 if (really_expand)
1242 expand_one_register_var (origvar);
1244 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1246 /* Reject variables which cover more than half of the address-space. */
1247 if (really_expand)
1249 error ("size of variable %q+D is too large", var);
1250 expand_one_error_var (var);
1253 else if (defer_stack_allocation (var, toplevel))
1254 add_stack_var (origvar);
1255 else
1257 if (really_expand)
1258 expand_one_stack_var (origvar);
1259 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1261 return 0;
1264 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1265 expanding variables. Those variables that can be put into registers
1266 are allocated pseudos; those that can't are put on the stack.
1268 TOPLEVEL is true if this is the outermost BLOCK. */
1270 static void
1271 expand_used_vars_for_block (tree block, bool toplevel)
1273 tree t;
1275 /* Expand all variables at this level. */
1276 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1277 if (TREE_USED (t)
1278 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1279 || !DECL_NONSHAREABLE (t)))
1280 expand_one_var (t, toplevel, true);
1282 /* Expand all variables at containing levels. */
1283 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1284 expand_used_vars_for_block (t, false);
1287 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1288 and clear TREE_USED on all local variables. */
1290 static void
1291 clear_tree_used (tree block)
1293 tree t;
1295 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1296 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1297 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1298 || !DECL_NONSHAREABLE (t))
1299 TREE_USED (t) = 0;
1301 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1302 clear_tree_used (t);
1305 /* Examine TYPE and determine a bit mask of the following features. */
1307 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1308 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1309 #define SPCT_HAS_ARRAY 4
1310 #define SPCT_HAS_AGGREGATE 8
1312 static unsigned int
1313 stack_protect_classify_type (tree type)
1315 unsigned int ret = 0;
1316 tree t;
1318 switch (TREE_CODE (type))
1320 case ARRAY_TYPE:
1321 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1322 if (t == char_type_node
1323 || t == signed_char_type_node
1324 || t == unsigned_char_type_node)
1326 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1327 unsigned HOST_WIDE_INT len;
1329 if (!TYPE_SIZE_UNIT (type)
1330 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1331 len = max;
1332 else
1333 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1335 if (len < max)
1336 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1337 else
1338 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1340 else
1341 ret = SPCT_HAS_ARRAY;
1342 break;
1344 case UNION_TYPE:
1345 case QUAL_UNION_TYPE:
1346 case RECORD_TYPE:
1347 ret = SPCT_HAS_AGGREGATE;
1348 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1349 if (TREE_CODE (t) == FIELD_DECL)
1350 ret |= stack_protect_classify_type (TREE_TYPE (t));
1351 break;
1353 default:
1354 break;
1357 return ret;
1360 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1361 part of the local stack frame. Remember if we ever return nonzero for
1362 any variable in this function. The return value is the phase number in
1363 which the variable should be allocated. */
1365 static int
1366 stack_protect_decl_phase (tree decl)
1368 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1369 int ret = 0;
1371 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1372 has_short_buffer = true;
1374 if (flag_stack_protect == 2)
1376 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1377 && !(bits & SPCT_HAS_AGGREGATE))
1378 ret = 1;
1379 else if (bits & SPCT_HAS_ARRAY)
1380 ret = 2;
1382 else
1383 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1385 if (ret)
1386 has_protected_decls = true;
1388 return ret;
1391 /* Two helper routines that check for phase 1 and phase 2. These are used
1392 as callbacks for expand_stack_vars. */
1394 static bool
1395 stack_protect_decl_phase_1 (tree decl)
1397 return stack_protect_decl_phase (decl) == 1;
1400 static bool
1401 stack_protect_decl_phase_2 (tree decl)
1403 return stack_protect_decl_phase (decl) == 2;
1406 /* Ensure that variables in different stack protection phases conflict
1407 so that they are not merged and share the same stack slot. */
1409 static void
1410 add_stack_protection_conflicts (void)
1412 size_t i, j, n = stack_vars_num;
1413 unsigned char *phase;
1415 phase = XNEWVEC (unsigned char, n);
1416 for (i = 0; i < n; ++i)
1417 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1419 for (i = 0; i < n; ++i)
1421 unsigned char ph_i = phase[i];
1422 for (j = 0; j < i; ++j)
1423 if (ph_i != phase[j])
1424 add_stack_var_conflict (i, j);
1427 XDELETEVEC (phase);
1430 /* Create a decl for the guard at the top of the stack frame. */
1432 static void
1433 create_stack_guard (void)
1435 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1436 VAR_DECL, NULL, ptr_type_node);
1437 TREE_THIS_VOLATILE (guard) = 1;
1438 TREE_USED (guard) = 1;
1439 expand_one_stack_var (guard);
1440 crtl->stack_protect_guard = guard;
1443 /* Prepare for expanding variables. */
1444 static void
1445 init_vars_expansion (void)
1447 tree t;
1448 unsigned ix;
1449 /* Set TREE_USED on all variables in the local_decls. */
1450 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1451 TREE_USED (t) = 1;
1453 /* Clear TREE_USED on all variables associated with a block scope. */
1454 clear_tree_used (DECL_INITIAL (current_function_decl));
1456 /* Initialize local stack smashing state. */
1457 has_protected_decls = false;
1458 has_short_buffer = false;
1461 /* Free up stack variable graph data. */
1462 static void
1463 fini_vars_expansion (void)
1465 size_t i, n = stack_vars_num;
1466 for (i = 0; i < n; i++)
1467 BITMAP_FREE (stack_vars[i].conflicts);
1468 XDELETEVEC (stack_vars);
1469 XDELETEVEC (stack_vars_sorted);
1470 stack_vars = NULL;
1471 stack_vars_alloc = stack_vars_num = 0;
1472 pointer_map_destroy (decl_to_stack_part);
1473 decl_to_stack_part = NULL;
1476 /* Make a fair guess for the size of the stack frame of the function
1477 in NODE. This doesn't have to be exact, the result is only used in
1478 the inline heuristics. So we don't want to run the full stack var
1479 packing algorithm (which is quadratic in the number of stack vars).
1480 Instead, we calculate the total size of all stack vars. This turns
1481 out to be a pretty fair estimate -- packing of stack vars doesn't
1482 happen very often. */
1484 HOST_WIDE_INT
1485 estimated_stack_frame_size (struct cgraph_node *node)
1487 HOST_WIDE_INT size = 0;
1488 size_t i;
1489 tree var;
1490 tree old_cur_fun_decl = current_function_decl;
1491 referenced_var_iterator rvi;
1492 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1494 current_function_decl = node->symbol.decl;
1495 push_cfun (fn);
1497 gcc_checking_assert (gimple_referenced_vars (fn));
1498 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1499 size += expand_one_var (var, true, false);
1501 if (stack_vars_num > 0)
1503 /* Fake sorting the stack vars for account_stack_vars (). */
1504 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1505 for (i = 0; i < stack_vars_num; ++i)
1506 stack_vars_sorted[i] = i;
1507 size += account_stack_vars ();
1508 fini_vars_expansion ();
1510 pop_cfun ();
1511 current_function_decl = old_cur_fun_decl;
1512 return size;
1515 /* Expand all variables used in the function. */
1517 static void
1518 expand_used_vars (void)
1520 tree var, outer_block = DECL_INITIAL (current_function_decl);
1521 VEC(tree,heap) *maybe_local_decls = NULL;
1522 unsigned i;
1523 unsigned len;
1525 /* Compute the phase of the stack frame for this function. */
1527 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1528 int off = STARTING_FRAME_OFFSET % align;
1529 frame_phase = off ? align - off : 0;
1532 init_vars_expansion ();
1534 for (i = 0; i < SA.map->num_partitions; i++)
1536 tree var = partition_to_var (SA.map, i);
1538 gcc_assert (is_gimple_reg (var));
1539 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1540 expand_one_var (var, true, true);
1541 else
1543 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1544 contain the default def (representing the parm or result itself)
1545 we don't do anything here. But those which don't contain the
1546 default def (representing a temporary based on the parm/result)
1547 we need to allocate space just like for normal VAR_DECLs. */
1548 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1550 expand_one_var (var, true, true);
1551 gcc_assert (SA.partition_to_pseudo[i]);
1556 /* At this point all variables on the local_decls with TREE_USED
1557 set are not associated with any block scope. Lay them out. */
1559 len = VEC_length (tree, cfun->local_decls);
1560 FOR_EACH_LOCAL_DECL (cfun, i, var)
1562 bool expand_now = false;
1564 /* Expanded above already. */
1565 if (is_gimple_reg (var))
1567 TREE_USED (var) = 0;
1568 goto next;
1570 /* We didn't set a block for static or extern because it's hard
1571 to tell the difference between a global variable (re)declared
1572 in a local scope, and one that's really declared there to
1573 begin with. And it doesn't really matter much, since we're
1574 not giving them stack space. Expand them now. */
1575 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1576 expand_now = true;
1578 /* If the variable is not associated with any block, then it
1579 was created by the optimizers, and could be live anywhere
1580 in the function. */
1581 else if (TREE_USED (var))
1582 expand_now = true;
1584 /* Finally, mark all variables on the list as used. We'll use
1585 this in a moment when we expand those associated with scopes. */
1586 TREE_USED (var) = 1;
1588 if (expand_now)
1589 expand_one_var (var, true, true);
1591 next:
1592 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1594 rtx rtl = DECL_RTL_IF_SET (var);
1596 /* Keep artificial non-ignored vars in cfun->local_decls
1597 chain until instantiate_decls. */
1598 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1599 add_local_decl (cfun, var);
1600 else if (rtl == NULL_RTX)
1601 /* If rtl isn't set yet, which can happen e.g. with
1602 -fstack-protector, retry before returning from this
1603 function. */
1604 VEC_safe_push (tree, heap, maybe_local_decls, var);
1608 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1610 +-----------------+-----------------+
1611 | ...processed... | ...duplicates...|
1612 +-----------------+-----------------+
1614 +-- LEN points here.
1616 We just want the duplicates, as those are the artificial
1617 non-ignored vars that we want to keep until instantiate_decls.
1618 Move them down and truncate the array. */
1619 if (!VEC_empty (tree, cfun->local_decls))
1620 VEC_block_remove (tree, cfun->local_decls, 0, len);
1622 /* At this point, all variables within the block tree with TREE_USED
1623 set are actually used by the optimized function. Lay them out. */
1624 expand_used_vars_for_block (outer_block, true);
1626 if (stack_vars_num > 0)
1628 add_scope_conflicts ();
1629 /* Due to the way alias sets work, no variables with non-conflicting
1630 alias sets may be assigned the same address. Add conflicts to
1631 reflect this. */
1632 add_alias_set_conflicts ();
1634 /* If stack protection is enabled, we don't share space between
1635 vulnerable data and non-vulnerable data. */
1636 if (flag_stack_protect)
1637 add_stack_protection_conflicts ();
1639 /* Now that we have collected all stack variables, and have computed a
1640 minimal interference graph, attempt to save some stack space. */
1641 partition_stack_vars ();
1642 if (dump_file)
1643 dump_stack_var_partition ();
1646 /* There are several conditions under which we should create a
1647 stack guard: protect-all, alloca used, protected decls present. */
1648 if (flag_stack_protect == 2
1649 || (flag_stack_protect
1650 && (cfun->calls_alloca || has_protected_decls)))
1651 create_stack_guard ();
1653 /* Assign rtl to each variable based on these partitions. */
1654 if (stack_vars_num > 0)
1656 /* Reorder decls to be protected by iterating over the variables
1657 array multiple times, and allocating out of each phase in turn. */
1658 /* ??? We could probably integrate this into the qsort we did
1659 earlier, such that we naturally see these variables first,
1660 and thus naturally allocate things in the right order. */
1661 if (has_protected_decls)
1663 /* Phase 1 contains only character arrays. */
1664 expand_stack_vars (stack_protect_decl_phase_1);
1666 /* Phase 2 contains other kinds of arrays. */
1667 if (flag_stack_protect == 2)
1668 expand_stack_vars (stack_protect_decl_phase_2);
1671 expand_stack_vars (NULL);
1673 fini_vars_expansion ();
1676 /* If there were any artificial non-ignored vars without rtl
1677 found earlier, see if deferred stack allocation hasn't assigned
1678 rtl to them. */
1679 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1681 rtx rtl = DECL_RTL_IF_SET (var);
1683 /* Keep artificial non-ignored vars in cfun->local_decls
1684 chain until instantiate_decls. */
1685 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1686 add_local_decl (cfun, var);
1688 VEC_free (tree, heap, maybe_local_decls);
1690 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1691 if (STACK_ALIGNMENT_NEEDED)
1693 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1694 if (!FRAME_GROWS_DOWNWARD)
1695 frame_offset += align - 1;
1696 frame_offset &= -align;
1701 /* If we need to produce a detailed dump, print the tree representation
1702 for STMT to the dump file. SINCE is the last RTX after which the RTL
1703 generated for STMT should have been appended. */
1705 static void
1706 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1708 if (dump_file && (dump_flags & TDF_DETAILS))
1710 fprintf (dump_file, "\n;; ");
1711 print_gimple_stmt (dump_file, stmt, 0,
1712 TDF_SLIM | (dump_flags & TDF_LINENO));
1713 fprintf (dump_file, "\n");
1715 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1719 /* Maps the blocks that do not contain tree labels to rtx labels. */
1721 static struct pointer_map_t *lab_rtx_for_bb;
1723 /* Returns the label_rtx expression for a label starting basic block BB. */
1725 static rtx
1726 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1728 gimple_stmt_iterator gsi;
1729 tree lab;
1730 gimple lab_stmt;
1731 void **elt;
1733 if (bb->flags & BB_RTL)
1734 return block_label (bb);
1736 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1737 if (elt)
1738 return (rtx) *elt;
1740 /* Find the tree label if it is present. */
1742 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1744 lab_stmt = gsi_stmt (gsi);
1745 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1746 break;
1748 lab = gimple_label_label (lab_stmt);
1749 if (DECL_NONLOCAL (lab))
1750 break;
1752 return label_rtx (lab);
1755 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1756 *elt = gen_label_rtx ();
1757 return (rtx) *elt;
1761 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1762 of a basic block where we just expanded the conditional at the end,
1763 possibly clean up the CFG and instruction sequence. LAST is the
1764 last instruction before the just emitted jump sequence. */
1766 static void
1767 maybe_cleanup_end_of_block (edge e, rtx last)
1769 /* Special case: when jumpif decides that the condition is
1770 trivial it emits an unconditional jump (and the necessary
1771 barrier). But we still have two edges, the fallthru one is
1772 wrong. purge_dead_edges would clean this up later. Unfortunately
1773 we have to insert insns (and split edges) before
1774 find_many_sub_basic_blocks and hence before purge_dead_edges.
1775 But splitting edges might create new blocks which depend on the
1776 fact that if there are two edges there's no barrier. So the
1777 barrier would get lost and verify_flow_info would ICE. Instead
1778 of auditing all edge splitters to care for the barrier (which
1779 normally isn't there in a cleaned CFG), fix it here. */
1780 if (BARRIER_P (get_last_insn ()))
1782 rtx insn;
1783 remove_edge (e);
1784 /* Now, we have a single successor block, if we have insns to
1785 insert on the remaining edge we potentially will insert
1786 it at the end of this block (if the dest block isn't feasible)
1787 in order to avoid splitting the edge. This insertion will take
1788 place in front of the last jump. But we might have emitted
1789 multiple jumps (conditional and one unconditional) to the
1790 same destination. Inserting in front of the last one then
1791 is a problem. See PR 40021. We fix this by deleting all
1792 jumps except the last unconditional one. */
1793 insn = PREV_INSN (get_last_insn ());
1794 /* Make sure we have an unconditional jump. Otherwise we're
1795 confused. */
1796 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1797 for (insn = PREV_INSN (insn); insn != last;)
1799 insn = PREV_INSN (insn);
1800 if (JUMP_P (NEXT_INSN (insn)))
1802 if (!any_condjump_p (NEXT_INSN (insn)))
1804 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1805 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1807 delete_insn (NEXT_INSN (insn));
1813 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1814 Returns a new basic block if we've terminated the current basic
1815 block and created a new one. */
1817 static basic_block
1818 expand_gimple_cond (basic_block bb, gimple stmt)
1820 basic_block new_bb, dest;
1821 edge new_edge;
1822 edge true_edge;
1823 edge false_edge;
1824 rtx last2, last;
1825 enum tree_code code;
1826 tree op0, op1;
1828 code = gimple_cond_code (stmt);
1829 op0 = gimple_cond_lhs (stmt);
1830 op1 = gimple_cond_rhs (stmt);
1831 /* We're sometimes presented with such code:
1832 D.123_1 = x < y;
1833 if (D.123_1 != 0)
1835 This would expand to two comparisons which then later might
1836 be cleaned up by combine. But some pattern matchers like if-conversion
1837 work better when there's only one compare, so make up for this
1838 here as special exception if TER would have made the same change. */
1839 if (gimple_cond_single_var_p (stmt)
1840 && SA.values
1841 && TREE_CODE (op0) == SSA_NAME
1842 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1844 gimple second = SSA_NAME_DEF_STMT (op0);
1845 if (gimple_code (second) == GIMPLE_ASSIGN)
1847 enum tree_code code2 = gimple_assign_rhs_code (second);
1848 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1850 code = code2;
1851 op0 = gimple_assign_rhs1 (second);
1852 op1 = gimple_assign_rhs2 (second);
1854 /* If jumps are cheap turn some more codes into
1855 jumpy sequences. */
1856 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1858 if ((code2 == BIT_AND_EXPR
1859 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1860 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1861 || code2 == TRUTH_AND_EXPR)
1863 code = TRUTH_ANDIF_EXPR;
1864 op0 = gimple_assign_rhs1 (second);
1865 op1 = gimple_assign_rhs2 (second);
1867 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1869 code = TRUTH_ORIF_EXPR;
1870 op0 = gimple_assign_rhs1 (second);
1871 op1 = gimple_assign_rhs2 (second);
1877 last2 = last = get_last_insn ();
1879 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1880 set_curr_insn_source_location (gimple_location (stmt));
1881 set_curr_insn_block (gimple_block (stmt));
1883 /* These flags have no purpose in RTL land. */
1884 true_edge->flags &= ~EDGE_TRUE_VALUE;
1885 false_edge->flags &= ~EDGE_FALSE_VALUE;
1887 /* We can either have a pure conditional jump with one fallthru edge or
1888 two-way jump that needs to be decomposed into two basic blocks. */
1889 if (false_edge->dest == bb->next_bb)
1891 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1892 true_edge->probability);
1893 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1894 if (true_edge->goto_locus)
1896 set_curr_insn_source_location (true_edge->goto_locus);
1897 set_curr_insn_block (true_edge->goto_block);
1898 true_edge->goto_locus = curr_insn_locator ();
1900 true_edge->goto_block = NULL;
1901 false_edge->flags |= EDGE_FALLTHRU;
1902 maybe_cleanup_end_of_block (false_edge, last);
1903 return NULL;
1905 if (true_edge->dest == bb->next_bb)
1907 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1908 false_edge->probability);
1909 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1910 if (false_edge->goto_locus)
1912 set_curr_insn_source_location (false_edge->goto_locus);
1913 set_curr_insn_block (false_edge->goto_block);
1914 false_edge->goto_locus = curr_insn_locator ();
1916 false_edge->goto_block = NULL;
1917 true_edge->flags |= EDGE_FALLTHRU;
1918 maybe_cleanup_end_of_block (true_edge, last);
1919 return NULL;
1922 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1923 true_edge->probability);
1924 last = get_last_insn ();
1925 if (false_edge->goto_locus)
1927 set_curr_insn_source_location (false_edge->goto_locus);
1928 set_curr_insn_block (false_edge->goto_block);
1929 false_edge->goto_locus = curr_insn_locator ();
1931 false_edge->goto_block = NULL;
1932 emit_jump (label_rtx_for_bb (false_edge->dest));
1934 BB_END (bb) = last;
1935 if (BARRIER_P (BB_END (bb)))
1936 BB_END (bb) = PREV_INSN (BB_END (bb));
1937 update_bb_for_insn (bb);
1939 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1940 dest = false_edge->dest;
1941 redirect_edge_succ (false_edge, new_bb);
1942 false_edge->flags |= EDGE_FALLTHRU;
1943 new_bb->count = false_edge->count;
1944 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1945 if (current_loops && bb->loop_father)
1946 add_bb_to_loop (new_bb, bb->loop_father);
1947 new_edge = make_edge (new_bb, dest, 0);
1948 new_edge->probability = REG_BR_PROB_BASE;
1949 new_edge->count = new_bb->count;
1950 if (BARRIER_P (BB_END (new_bb)))
1951 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1952 update_bb_for_insn (new_bb);
1954 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1956 if (true_edge->goto_locus)
1958 set_curr_insn_source_location (true_edge->goto_locus);
1959 set_curr_insn_block (true_edge->goto_block);
1960 true_edge->goto_locus = curr_insn_locator ();
1962 true_edge->goto_block = NULL;
1964 return new_bb;
1967 /* Mark all calls that can have a transaction restart. */
1969 static void
1970 mark_transaction_restart_calls (gimple stmt)
1972 struct tm_restart_node dummy;
1973 void **slot;
1975 if (!cfun->gimple_df->tm_restart)
1976 return;
1978 dummy.stmt = stmt;
1979 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1980 if (slot)
1982 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1983 tree list = n->label_or_list;
1984 rtx insn;
1986 for (insn = next_real_insn (get_last_insn ());
1987 !CALL_P (insn);
1988 insn = next_real_insn (insn))
1989 continue;
1991 if (TREE_CODE (list) == LABEL_DECL)
1992 add_reg_note (insn, REG_TM, label_rtx (list));
1993 else
1994 for (; list ; list = TREE_CHAIN (list))
1995 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1999 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2000 statement STMT. */
2002 static void
2003 expand_call_stmt (gimple stmt)
2005 tree exp, decl, lhs;
2006 bool builtin_p;
2007 size_t i;
2009 if (gimple_call_internal_p (stmt))
2011 expand_internal_call (stmt);
2012 return;
2015 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2017 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2018 decl = gimple_call_fndecl (stmt);
2019 builtin_p = decl && DECL_BUILT_IN (decl);
2021 /* If this is not a builtin function, the function type through which the
2022 call is made may be different from the type of the function. */
2023 if (!builtin_p)
2024 CALL_EXPR_FN (exp)
2025 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2026 CALL_EXPR_FN (exp));
2028 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2029 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2031 for (i = 0; i < gimple_call_num_args (stmt); i++)
2033 tree arg = gimple_call_arg (stmt, i);
2034 gimple def;
2035 /* TER addresses into arguments of builtin functions so we have a
2036 chance to infer more correct alignment information. See PR39954. */
2037 if (builtin_p
2038 && TREE_CODE (arg) == SSA_NAME
2039 && (def = get_gimple_for_ssa_name (arg))
2040 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2041 arg = gimple_assign_rhs1 (def);
2042 CALL_EXPR_ARG (exp, i) = arg;
2045 if (gimple_has_side_effects (stmt))
2046 TREE_SIDE_EFFECTS (exp) = 1;
2048 if (gimple_call_nothrow_p (stmt))
2049 TREE_NOTHROW (exp) = 1;
2051 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2052 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2053 if (decl
2054 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2055 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2056 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2057 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2058 else
2059 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2060 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2061 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2062 TREE_BLOCK (exp) = gimple_block (stmt);
2064 /* Ensure RTL is created for debug args. */
2065 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2067 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2068 unsigned int ix;
2069 tree dtemp;
2071 if (debug_args)
2072 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2074 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2075 expand_debug_expr (dtemp);
2079 lhs = gimple_call_lhs (stmt);
2080 if (lhs)
2081 expand_assignment (lhs, exp, false);
2082 else
2083 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2085 mark_transaction_restart_calls (stmt);
2088 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2089 STMT that doesn't require special handling for outgoing edges. That
2090 is no tailcalls and no GIMPLE_COND. */
2092 static void
2093 expand_gimple_stmt_1 (gimple stmt)
2095 tree op0;
2097 set_curr_insn_source_location (gimple_location (stmt));
2098 set_curr_insn_block (gimple_block (stmt));
2100 switch (gimple_code (stmt))
2102 case GIMPLE_GOTO:
2103 op0 = gimple_goto_dest (stmt);
2104 if (TREE_CODE (op0) == LABEL_DECL)
2105 expand_goto (op0);
2106 else
2107 expand_computed_goto (op0);
2108 break;
2109 case GIMPLE_LABEL:
2110 expand_label (gimple_label_label (stmt));
2111 break;
2112 case GIMPLE_NOP:
2113 case GIMPLE_PREDICT:
2114 break;
2115 case GIMPLE_SWITCH:
2116 expand_case (stmt);
2117 break;
2118 case GIMPLE_ASM:
2119 expand_asm_stmt (stmt);
2120 break;
2121 case GIMPLE_CALL:
2122 expand_call_stmt (stmt);
2123 break;
2125 case GIMPLE_RETURN:
2126 op0 = gimple_return_retval (stmt);
2128 if (op0 && op0 != error_mark_node)
2130 tree result = DECL_RESULT (current_function_decl);
2132 /* If we are not returning the current function's RESULT_DECL,
2133 build an assignment to it. */
2134 if (op0 != result)
2136 /* I believe that a function's RESULT_DECL is unique. */
2137 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2139 /* ??? We'd like to use simply expand_assignment here,
2140 but this fails if the value is of BLKmode but the return
2141 decl is a register. expand_return has special handling
2142 for this combination, which eventually should move
2143 to common code. See comments there. Until then, let's
2144 build a modify expression :-/ */
2145 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2146 result, op0);
2149 if (!op0)
2150 expand_null_return ();
2151 else
2152 expand_return (op0);
2153 break;
2155 case GIMPLE_ASSIGN:
2157 tree lhs = gimple_assign_lhs (stmt);
2159 /* Tree expand used to fiddle with |= and &= of two bitfield
2160 COMPONENT_REFs here. This can't happen with gimple, the LHS
2161 of binary assigns must be a gimple reg. */
2163 if (TREE_CODE (lhs) != SSA_NAME
2164 || get_gimple_rhs_class (gimple_expr_code (stmt))
2165 == GIMPLE_SINGLE_RHS)
2167 tree rhs = gimple_assign_rhs1 (stmt);
2168 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2169 == GIMPLE_SINGLE_RHS);
2170 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2171 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2172 if (TREE_CLOBBER_P (rhs))
2173 /* This is a clobber to mark the going out of scope for
2174 this LHS. */
2176 else
2177 expand_assignment (lhs, rhs,
2178 gimple_assign_nontemporal_move_p (stmt));
2180 else
2182 rtx target, temp;
2183 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2184 struct separate_ops ops;
2185 bool promoted = false;
2187 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2188 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2189 promoted = true;
2191 ops.code = gimple_assign_rhs_code (stmt);
2192 ops.type = TREE_TYPE (lhs);
2193 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2195 case GIMPLE_TERNARY_RHS:
2196 ops.op2 = gimple_assign_rhs3 (stmt);
2197 /* Fallthru */
2198 case GIMPLE_BINARY_RHS:
2199 ops.op1 = gimple_assign_rhs2 (stmt);
2200 /* Fallthru */
2201 case GIMPLE_UNARY_RHS:
2202 ops.op0 = gimple_assign_rhs1 (stmt);
2203 break;
2204 default:
2205 gcc_unreachable ();
2207 ops.location = gimple_location (stmt);
2209 /* If we want to use a nontemporal store, force the value to
2210 register first. If we store into a promoted register,
2211 don't directly expand to target. */
2212 temp = nontemporal || promoted ? NULL_RTX : target;
2213 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2214 EXPAND_NORMAL);
2216 if (temp == target)
2218 else if (promoted)
2220 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2221 /* If TEMP is a VOIDmode constant, use convert_modes to make
2222 sure that we properly convert it. */
2223 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2225 temp = convert_modes (GET_MODE (target),
2226 TYPE_MODE (ops.type),
2227 temp, unsignedp);
2228 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2229 GET_MODE (target), temp, unsignedp);
2232 convert_move (SUBREG_REG (target), temp, unsignedp);
2234 else if (nontemporal && emit_storent_insn (target, temp))
2236 else
2238 temp = force_operand (temp, target);
2239 if (temp != target)
2240 emit_move_insn (target, temp);
2244 break;
2246 default:
2247 gcc_unreachable ();
2251 /* Expand one gimple statement STMT and return the last RTL instruction
2252 before any of the newly generated ones.
2254 In addition to generating the necessary RTL instructions this also
2255 sets REG_EH_REGION notes if necessary and sets the current source
2256 location for diagnostics. */
2258 static rtx
2259 expand_gimple_stmt (gimple stmt)
2261 location_t saved_location = input_location;
2262 rtx last = get_last_insn ();
2263 int lp_nr;
2265 gcc_assert (cfun);
2267 /* We need to save and restore the current source location so that errors
2268 discovered during expansion are emitted with the right location. But
2269 it would be better if the diagnostic routines used the source location
2270 embedded in the tree nodes rather than globals. */
2271 if (gimple_has_location (stmt))
2272 input_location = gimple_location (stmt);
2274 expand_gimple_stmt_1 (stmt);
2276 /* Free any temporaries used to evaluate this statement. */
2277 free_temp_slots ();
2279 input_location = saved_location;
2281 /* Mark all insns that may trap. */
2282 lp_nr = lookup_stmt_eh_lp (stmt);
2283 if (lp_nr)
2285 rtx insn;
2286 for (insn = next_real_insn (last); insn;
2287 insn = next_real_insn (insn))
2289 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2290 /* If we want exceptions for non-call insns, any
2291 may_trap_p instruction may throw. */
2292 && GET_CODE (PATTERN (insn)) != CLOBBER
2293 && GET_CODE (PATTERN (insn)) != USE
2294 && insn_could_throw_p (insn))
2295 make_reg_eh_region_note (insn, 0, lp_nr);
2299 return last;
2302 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2303 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2304 generated a tail call (something that might be denied by the ABI
2305 rules governing the call; see calls.c).
2307 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2308 can still reach the rest of BB. The case here is __builtin_sqrt,
2309 where the NaN result goes through the external function (with a
2310 tailcall) and the normal result happens via a sqrt instruction. */
2312 static basic_block
2313 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2315 rtx last2, last;
2316 edge e;
2317 edge_iterator ei;
2318 int probability;
2319 gcov_type count;
2321 last2 = last = expand_gimple_stmt (stmt);
2323 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2324 if (CALL_P (last) && SIBLING_CALL_P (last))
2325 goto found;
2327 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2329 *can_fallthru = true;
2330 return NULL;
2332 found:
2333 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2334 Any instructions emitted here are about to be deleted. */
2335 do_pending_stack_adjust ();
2337 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2338 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2339 EH or abnormal edges, we shouldn't have created a tail call in
2340 the first place. So it seems to me we should just be removing
2341 all edges here, or redirecting the existing fallthru edge to
2342 the exit block. */
2344 probability = 0;
2345 count = 0;
2347 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2349 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2351 if (e->dest != EXIT_BLOCK_PTR)
2353 e->dest->count -= e->count;
2354 e->dest->frequency -= EDGE_FREQUENCY (e);
2355 if (e->dest->count < 0)
2356 e->dest->count = 0;
2357 if (e->dest->frequency < 0)
2358 e->dest->frequency = 0;
2360 count += e->count;
2361 probability += e->probability;
2362 remove_edge (e);
2364 else
2365 ei_next (&ei);
2368 /* This is somewhat ugly: the call_expr expander often emits instructions
2369 after the sibcall (to perform the function return). These confuse the
2370 find_many_sub_basic_blocks code, so we need to get rid of these. */
2371 last = NEXT_INSN (last);
2372 gcc_assert (BARRIER_P (last));
2374 *can_fallthru = false;
2375 while (NEXT_INSN (last))
2377 /* For instance an sqrt builtin expander expands if with
2378 sibcall in the then and label for `else`. */
2379 if (LABEL_P (NEXT_INSN (last)))
2381 *can_fallthru = true;
2382 break;
2384 delete_insn (NEXT_INSN (last));
2387 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2388 e->probability += probability;
2389 e->count += count;
2390 BB_END (bb) = last;
2391 update_bb_for_insn (bb);
2393 if (NEXT_INSN (last))
2395 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2397 last = BB_END (bb);
2398 if (BARRIER_P (last))
2399 BB_END (bb) = PREV_INSN (last);
2402 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2404 return bb;
2407 /* Return the difference between the floor and the truncated result of
2408 a signed division by OP1 with remainder MOD. */
2409 static rtx
2410 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2412 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2413 return gen_rtx_IF_THEN_ELSE
2414 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2415 gen_rtx_IF_THEN_ELSE
2416 (mode, gen_rtx_LT (BImode,
2417 gen_rtx_DIV (mode, op1, mod),
2418 const0_rtx),
2419 constm1_rtx, const0_rtx),
2420 const0_rtx);
2423 /* Return the difference between the ceil and the truncated result of
2424 a signed division by OP1 with remainder MOD. */
2425 static rtx
2426 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2428 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2429 return gen_rtx_IF_THEN_ELSE
2430 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2431 gen_rtx_IF_THEN_ELSE
2432 (mode, gen_rtx_GT (BImode,
2433 gen_rtx_DIV (mode, op1, mod),
2434 const0_rtx),
2435 const1_rtx, const0_rtx),
2436 const0_rtx);
2439 /* Return the difference between the ceil and the truncated result of
2440 an unsigned division by OP1 with remainder MOD. */
2441 static rtx
2442 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2444 /* (mod != 0 ? 1 : 0) */
2445 return gen_rtx_IF_THEN_ELSE
2446 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2447 const1_rtx, const0_rtx);
2450 /* Return the difference between the rounded and the truncated result
2451 of a signed division by OP1 with remainder MOD. Halfway cases are
2452 rounded away from zero, rather than to the nearest even number. */
2453 static rtx
2454 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2456 /* (abs (mod) >= abs (op1) - abs (mod)
2457 ? (op1 / mod > 0 ? 1 : -1)
2458 : 0) */
2459 return gen_rtx_IF_THEN_ELSE
2460 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2461 gen_rtx_MINUS (mode,
2462 gen_rtx_ABS (mode, op1),
2463 gen_rtx_ABS (mode, mod))),
2464 gen_rtx_IF_THEN_ELSE
2465 (mode, gen_rtx_GT (BImode,
2466 gen_rtx_DIV (mode, op1, mod),
2467 const0_rtx),
2468 const1_rtx, constm1_rtx),
2469 const0_rtx);
2472 /* Return the difference between the rounded and the truncated result
2473 of a unsigned division by OP1 with remainder MOD. Halfway cases
2474 are rounded away from zero, rather than to the nearest even
2475 number. */
2476 static rtx
2477 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2479 /* (mod >= op1 - mod ? 1 : 0) */
2480 return gen_rtx_IF_THEN_ELSE
2481 (mode, gen_rtx_GE (BImode, mod,
2482 gen_rtx_MINUS (mode, op1, mod)),
2483 const1_rtx, const0_rtx);
2486 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2487 any rtl. */
2489 static rtx
2490 convert_debug_memory_address (enum machine_mode mode, rtx x,
2491 addr_space_t as)
2493 enum machine_mode xmode = GET_MODE (x);
2495 #ifndef POINTERS_EXTEND_UNSIGNED
2496 gcc_assert (mode == Pmode
2497 || mode == targetm.addr_space.address_mode (as));
2498 gcc_assert (xmode == mode || xmode == VOIDmode);
2499 #else
2500 rtx temp;
2502 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2504 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2505 return x;
2507 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2508 x = simplify_gen_subreg (mode, x, xmode,
2509 subreg_lowpart_offset
2510 (mode, xmode));
2511 else if (POINTERS_EXTEND_UNSIGNED > 0)
2512 x = gen_rtx_ZERO_EXTEND (mode, x);
2513 else if (!POINTERS_EXTEND_UNSIGNED)
2514 x = gen_rtx_SIGN_EXTEND (mode, x);
2515 else
2517 switch (GET_CODE (x))
2519 case SUBREG:
2520 if ((SUBREG_PROMOTED_VAR_P (x)
2521 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2522 || (GET_CODE (SUBREG_REG (x)) == PLUS
2523 && REG_P (XEXP (SUBREG_REG (x), 0))
2524 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2525 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2526 && GET_MODE (SUBREG_REG (x)) == mode)
2527 return SUBREG_REG (x);
2528 break;
2529 case LABEL_REF:
2530 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2531 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2532 return temp;
2533 case SYMBOL_REF:
2534 temp = shallow_copy_rtx (x);
2535 PUT_MODE (temp, mode);
2536 return temp;
2537 case CONST:
2538 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2539 if (temp)
2540 temp = gen_rtx_CONST (mode, temp);
2541 return temp;
2542 case PLUS:
2543 case MINUS:
2544 if (CONST_INT_P (XEXP (x, 1)))
2546 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2547 if (temp)
2548 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2550 break;
2551 default:
2552 break;
2554 /* Don't know how to express ptr_extend as operation in debug info. */
2555 return NULL;
2557 #endif /* POINTERS_EXTEND_UNSIGNED */
2559 return x;
2562 /* Return an RTX equivalent to the value of the parameter DECL. */
2564 static rtx
2565 expand_debug_parm_decl (tree decl)
2567 rtx incoming = DECL_INCOMING_RTL (decl);
2569 if (incoming
2570 && GET_MODE (incoming) != BLKmode
2571 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2572 || (MEM_P (incoming)
2573 && REG_P (XEXP (incoming, 0))
2574 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2576 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2578 #ifdef HAVE_window_save
2579 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2580 If the target machine has an explicit window save instruction, the
2581 actual entry value is the corresponding OUTGOING_REGNO instead. */
2582 if (REG_P (incoming)
2583 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2584 incoming
2585 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2586 OUTGOING_REGNO (REGNO (incoming)), 0);
2587 else if (MEM_P (incoming))
2589 rtx reg = XEXP (incoming, 0);
2590 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2592 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2593 incoming = replace_equiv_address_nv (incoming, reg);
2596 #endif
2598 ENTRY_VALUE_EXP (rtl) = incoming;
2599 return rtl;
2602 if (incoming
2603 && GET_MODE (incoming) != BLKmode
2604 && !TREE_ADDRESSABLE (decl)
2605 && MEM_P (incoming)
2606 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2607 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2608 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2609 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2610 return incoming;
2612 return NULL_RTX;
2615 /* Return an RTX equivalent to the value of the tree expression EXP. */
2617 static rtx
2618 expand_debug_expr (tree exp)
2620 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2621 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2622 enum machine_mode inner_mode = VOIDmode;
2623 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2624 addr_space_t as;
2626 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2628 case tcc_expression:
2629 switch (TREE_CODE (exp))
2631 case COND_EXPR:
2632 case DOT_PROD_EXPR:
2633 case WIDEN_MULT_PLUS_EXPR:
2634 case WIDEN_MULT_MINUS_EXPR:
2635 case FMA_EXPR:
2636 goto ternary;
2638 case TRUTH_ANDIF_EXPR:
2639 case TRUTH_ORIF_EXPR:
2640 case TRUTH_AND_EXPR:
2641 case TRUTH_OR_EXPR:
2642 case TRUTH_XOR_EXPR:
2643 goto binary;
2645 case TRUTH_NOT_EXPR:
2646 goto unary;
2648 default:
2649 break;
2651 break;
2653 ternary:
2654 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2655 if (!op2)
2656 return NULL_RTX;
2657 /* Fall through. */
2659 binary:
2660 case tcc_binary:
2661 case tcc_comparison:
2662 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2663 if (!op1)
2664 return NULL_RTX;
2665 /* Fall through. */
2667 unary:
2668 case tcc_unary:
2669 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2670 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2671 if (!op0)
2672 return NULL_RTX;
2673 break;
2675 case tcc_type:
2676 case tcc_statement:
2677 gcc_unreachable ();
2679 case tcc_constant:
2680 case tcc_exceptional:
2681 case tcc_declaration:
2682 case tcc_reference:
2683 case tcc_vl_exp:
2684 break;
2687 switch (TREE_CODE (exp))
2689 case STRING_CST:
2690 if (!lookup_constant_def (exp))
2692 if (strlen (TREE_STRING_POINTER (exp)) + 1
2693 != (size_t) TREE_STRING_LENGTH (exp))
2694 return NULL_RTX;
2695 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2696 op0 = gen_rtx_MEM (BLKmode, op0);
2697 set_mem_attributes (op0, exp, 0);
2698 return op0;
2700 /* Fall through... */
2702 case INTEGER_CST:
2703 case REAL_CST:
2704 case FIXED_CST:
2705 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2706 return op0;
2708 case COMPLEX_CST:
2709 gcc_assert (COMPLEX_MODE_P (mode));
2710 op0 = expand_debug_expr (TREE_REALPART (exp));
2711 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2712 return gen_rtx_CONCAT (mode, op0, op1);
2714 case DEBUG_EXPR_DECL:
2715 op0 = DECL_RTL_IF_SET (exp);
2717 if (op0)
2718 return op0;
2720 op0 = gen_rtx_DEBUG_EXPR (mode);
2721 DEBUG_EXPR_TREE_DECL (op0) = exp;
2722 SET_DECL_RTL (exp, op0);
2724 return op0;
2726 case VAR_DECL:
2727 case PARM_DECL:
2728 case FUNCTION_DECL:
2729 case LABEL_DECL:
2730 case CONST_DECL:
2731 case RESULT_DECL:
2732 op0 = DECL_RTL_IF_SET (exp);
2734 /* This decl was probably optimized away. */
2735 if (!op0)
2737 if (TREE_CODE (exp) != VAR_DECL
2738 || DECL_EXTERNAL (exp)
2739 || !TREE_STATIC (exp)
2740 || !DECL_NAME (exp)
2741 || DECL_HARD_REGISTER (exp)
2742 || DECL_IN_CONSTANT_POOL (exp)
2743 || mode == VOIDmode)
2744 return NULL;
2746 op0 = make_decl_rtl_for_debug (exp);
2747 if (!MEM_P (op0)
2748 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2749 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2750 return NULL;
2752 else
2753 op0 = copy_rtx (op0);
2755 if (GET_MODE (op0) == BLKmode
2756 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2757 below would ICE. While it is likely a FE bug,
2758 try to be robust here. See PR43166. */
2759 || mode == BLKmode
2760 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2762 gcc_assert (MEM_P (op0));
2763 op0 = adjust_address_nv (op0, mode, 0);
2764 return op0;
2767 /* Fall through. */
2769 adjust_mode:
2770 case PAREN_EXPR:
2771 case NOP_EXPR:
2772 case CONVERT_EXPR:
2774 inner_mode = GET_MODE (op0);
2776 if (mode == inner_mode)
2777 return op0;
2779 if (inner_mode == VOIDmode)
2781 if (TREE_CODE (exp) == SSA_NAME)
2782 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2783 else
2784 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2785 if (mode == inner_mode)
2786 return op0;
2789 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2791 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2792 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2793 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2794 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2795 else
2796 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2798 else if (FLOAT_MODE_P (mode))
2800 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2801 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2802 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2803 else
2804 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2806 else if (FLOAT_MODE_P (inner_mode))
2808 if (unsignedp)
2809 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2810 else
2811 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2813 else if (CONSTANT_P (op0)
2814 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2815 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2816 subreg_lowpart_offset (mode,
2817 inner_mode));
2818 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2819 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2820 : unsignedp)
2821 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2822 else
2823 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2825 return op0;
2828 case MEM_REF:
2829 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2831 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2832 TREE_OPERAND (exp, 0),
2833 TREE_OPERAND (exp, 1));
2834 if (newexp)
2835 return expand_debug_expr (newexp);
2837 /* FALLTHROUGH */
2838 case INDIRECT_REF:
2839 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2840 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2841 if (!op0)
2842 return NULL;
2844 if (TREE_CODE (exp) == MEM_REF)
2846 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2847 || (GET_CODE (op0) == PLUS
2848 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2849 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2850 Instead just use get_inner_reference. */
2851 goto component_ref;
2853 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2854 if (!op1 || !CONST_INT_P (op1))
2855 return NULL;
2857 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2860 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2861 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2862 else
2863 as = ADDR_SPACE_GENERIC;
2865 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2866 op0, as);
2867 if (op0 == NULL_RTX)
2868 return NULL;
2870 op0 = gen_rtx_MEM (mode, op0);
2871 set_mem_attributes (op0, exp, 0);
2872 if (TREE_CODE (exp) == MEM_REF
2873 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2874 set_mem_expr (op0, NULL_TREE);
2875 set_mem_addr_space (op0, as);
2877 return op0;
2879 case TARGET_MEM_REF:
2880 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2881 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2882 return NULL;
2884 op0 = expand_debug_expr
2885 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2886 if (!op0)
2887 return NULL;
2889 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2890 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2891 else
2892 as = ADDR_SPACE_GENERIC;
2894 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2895 op0, as);
2896 if (op0 == NULL_RTX)
2897 return NULL;
2899 op0 = gen_rtx_MEM (mode, op0);
2901 set_mem_attributes (op0, exp, 0);
2902 set_mem_addr_space (op0, as);
2904 return op0;
2906 component_ref:
2907 case ARRAY_REF:
2908 case ARRAY_RANGE_REF:
2909 case COMPONENT_REF:
2910 case BIT_FIELD_REF:
2911 case REALPART_EXPR:
2912 case IMAGPART_EXPR:
2913 case VIEW_CONVERT_EXPR:
2915 enum machine_mode mode1;
2916 HOST_WIDE_INT bitsize, bitpos;
2917 tree offset;
2918 int volatilep = 0;
2919 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2920 &mode1, &unsignedp, &volatilep, false);
2921 rtx orig_op0;
2923 if (bitsize == 0)
2924 return NULL;
2926 orig_op0 = op0 = expand_debug_expr (tem);
2928 if (!op0)
2929 return NULL;
2931 if (offset)
2933 enum machine_mode addrmode, offmode;
2935 if (!MEM_P (op0))
2936 return NULL;
2938 op0 = XEXP (op0, 0);
2939 addrmode = GET_MODE (op0);
2940 if (addrmode == VOIDmode)
2941 addrmode = Pmode;
2943 op1 = expand_debug_expr (offset);
2944 if (!op1)
2945 return NULL;
2947 offmode = GET_MODE (op1);
2948 if (offmode == VOIDmode)
2949 offmode = TYPE_MODE (TREE_TYPE (offset));
2951 if (addrmode != offmode)
2952 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2953 subreg_lowpart_offset (addrmode,
2954 offmode));
2956 /* Don't use offset_address here, we don't need a
2957 recognizable address, and we don't want to generate
2958 code. */
2959 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2960 op0, op1));
2963 if (MEM_P (op0))
2965 if (mode1 == VOIDmode)
2966 /* Bitfield. */
2967 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2968 if (bitpos >= BITS_PER_UNIT)
2970 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2971 bitpos %= BITS_PER_UNIT;
2973 else if (bitpos < 0)
2975 HOST_WIDE_INT units
2976 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2977 op0 = adjust_address_nv (op0, mode1, units);
2978 bitpos += units * BITS_PER_UNIT;
2980 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2981 op0 = adjust_address_nv (op0, mode, 0);
2982 else if (GET_MODE (op0) != mode1)
2983 op0 = adjust_address_nv (op0, mode1, 0);
2984 else
2985 op0 = copy_rtx (op0);
2986 if (op0 == orig_op0)
2987 op0 = shallow_copy_rtx (op0);
2988 set_mem_attributes (op0, exp, 0);
2991 if (bitpos == 0 && mode == GET_MODE (op0))
2992 return op0;
2994 if (bitpos < 0)
2995 return NULL;
2997 if (GET_MODE (op0) == BLKmode)
2998 return NULL;
3000 if ((bitpos % BITS_PER_UNIT) == 0
3001 && bitsize == GET_MODE_BITSIZE (mode1))
3003 enum machine_mode opmode = GET_MODE (op0);
3005 if (opmode == VOIDmode)
3006 opmode = TYPE_MODE (TREE_TYPE (tem));
3008 /* This condition may hold if we're expanding the address
3009 right past the end of an array that turned out not to
3010 be addressable (i.e., the address was only computed in
3011 debug stmts). The gen_subreg below would rightfully
3012 crash, and the address doesn't really exist, so just
3013 drop it. */
3014 if (bitpos >= GET_MODE_BITSIZE (opmode))
3015 return NULL;
3017 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3018 return simplify_gen_subreg (mode, op0, opmode,
3019 bitpos / BITS_PER_UNIT);
3022 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3023 && TYPE_UNSIGNED (TREE_TYPE (exp))
3024 ? SIGN_EXTRACT
3025 : ZERO_EXTRACT, mode,
3026 GET_MODE (op0) != VOIDmode
3027 ? GET_MODE (op0)
3028 : TYPE_MODE (TREE_TYPE (tem)),
3029 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3032 case ABS_EXPR:
3033 return simplify_gen_unary (ABS, mode, op0, mode);
3035 case NEGATE_EXPR:
3036 return simplify_gen_unary (NEG, mode, op0, mode);
3038 case BIT_NOT_EXPR:
3039 return simplify_gen_unary (NOT, mode, op0, mode);
3041 case FLOAT_EXPR:
3042 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3043 0)))
3044 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3045 inner_mode);
3047 case FIX_TRUNC_EXPR:
3048 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3049 inner_mode);
3051 case POINTER_PLUS_EXPR:
3052 /* For the rare target where pointers are not the same size as
3053 size_t, we need to check for mis-matched modes and correct
3054 the addend. */
3055 if (op0 && op1
3056 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3057 && GET_MODE (op0) != GET_MODE (op1))
3059 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3060 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3061 GET_MODE (op1));
3062 else
3063 /* We always sign-extend, regardless of the signedness of
3064 the operand, because the operand is always unsigned
3065 here even if the original C expression is signed. */
3066 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3067 GET_MODE (op1));
3069 /* Fall through. */
3070 case PLUS_EXPR:
3071 return simplify_gen_binary (PLUS, mode, op0, op1);
3073 case MINUS_EXPR:
3074 return simplify_gen_binary (MINUS, mode, op0, op1);
3076 case MULT_EXPR:
3077 return simplify_gen_binary (MULT, mode, op0, op1);
3079 case RDIV_EXPR:
3080 case TRUNC_DIV_EXPR:
3081 case EXACT_DIV_EXPR:
3082 if (unsignedp)
3083 return simplify_gen_binary (UDIV, mode, op0, op1);
3084 else
3085 return simplify_gen_binary (DIV, mode, op0, op1);
3087 case TRUNC_MOD_EXPR:
3088 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3090 case FLOOR_DIV_EXPR:
3091 if (unsignedp)
3092 return simplify_gen_binary (UDIV, mode, op0, op1);
3093 else
3095 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3096 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3097 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3098 return simplify_gen_binary (PLUS, mode, div, adj);
3101 case FLOOR_MOD_EXPR:
3102 if (unsignedp)
3103 return simplify_gen_binary (UMOD, mode, op0, op1);
3104 else
3106 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3107 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3108 adj = simplify_gen_unary (NEG, mode,
3109 simplify_gen_binary (MULT, mode, adj, op1),
3110 mode);
3111 return simplify_gen_binary (PLUS, mode, mod, adj);
3114 case CEIL_DIV_EXPR:
3115 if (unsignedp)
3117 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3118 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3119 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3120 return simplify_gen_binary (PLUS, mode, div, adj);
3122 else
3124 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3125 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3126 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3127 return simplify_gen_binary (PLUS, mode, div, adj);
3130 case CEIL_MOD_EXPR:
3131 if (unsignedp)
3133 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3134 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3135 adj = simplify_gen_unary (NEG, mode,
3136 simplify_gen_binary (MULT, mode, adj, op1),
3137 mode);
3138 return simplify_gen_binary (PLUS, mode, mod, adj);
3140 else
3142 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3143 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3144 adj = simplify_gen_unary (NEG, mode,
3145 simplify_gen_binary (MULT, mode, adj, op1),
3146 mode);
3147 return simplify_gen_binary (PLUS, mode, mod, adj);
3150 case ROUND_DIV_EXPR:
3151 if (unsignedp)
3153 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3154 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3155 rtx adj = round_udiv_adjust (mode, mod, op1);
3156 return simplify_gen_binary (PLUS, mode, div, adj);
3158 else
3160 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3161 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3162 rtx adj = round_sdiv_adjust (mode, mod, op1);
3163 return simplify_gen_binary (PLUS, mode, div, adj);
3166 case ROUND_MOD_EXPR:
3167 if (unsignedp)
3169 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3170 rtx adj = round_udiv_adjust (mode, mod, op1);
3171 adj = simplify_gen_unary (NEG, mode,
3172 simplify_gen_binary (MULT, mode, adj, op1),
3173 mode);
3174 return simplify_gen_binary (PLUS, mode, mod, adj);
3176 else
3178 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3179 rtx adj = round_sdiv_adjust (mode, mod, op1);
3180 adj = simplify_gen_unary (NEG, mode,
3181 simplify_gen_binary (MULT, mode, adj, op1),
3182 mode);
3183 return simplify_gen_binary (PLUS, mode, mod, adj);
3186 case LSHIFT_EXPR:
3187 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3189 case RSHIFT_EXPR:
3190 if (unsignedp)
3191 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3192 else
3193 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3195 case LROTATE_EXPR:
3196 return simplify_gen_binary (ROTATE, mode, op0, op1);
3198 case RROTATE_EXPR:
3199 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3201 case MIN_EXPR:
3202 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3204 case MAX_EXPR:
3205 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3207 case BIT_AND_EXPR:
3208 case TRUTH_AND_EXPR:
3209 return simplify_gen_binary (AND, mode, op0, op1);
3211 case BIT_IOR_EXPR:
3212 case TRUTH_OR_EXPR:
3213 return simplify_gen_binary (IOR, mode, op0, op1);
3215 case BIT_XOR_EXPR:
3216 case TRUTH_XOR_EXPR:
3217 return simplify_gen_binary (XOR, mode, op0, op1);
3219 case TRUTH_ANDIF_EXPR:
3220 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3222 case TRUTH_ORIF_EXPR:
3223 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3225 case TRUTH_NOT_EXPR:
3226 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3228 case LT_EXPR:
3229 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3230 op0, op1);
3232 case LE_EXPR:
3233 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3234 op0, op1);
3236 case GT_EXPR:
3237 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3238 op0, op1);
3240 case GE_EXPR:
3241 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3242 op0, op1);
3244 case EQ_EXPR:
3245 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3247 case NE_EXPR:
3248 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3250 case UNORDERED_EXPR:
3251 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3253 case ORDERED_EXPR:
3254 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3256 case UNLT_EXPR:
3257 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3259 case UNLE_EXPR:
3260 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3262 case UNGT_EXPR:
3263 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3265 case UNGE_EXPR:
3266 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3268 case UNEQ_EXPR:
3269 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3271 case LTGT_EXPR:
3272 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3274 case COND_EXPR:
3275 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3277 case COMPLEX_EXPR:
3278 gcc_assert (COMPLEX_MODE_P (mode));
3279 if (GET_MODE (op0) == VOIDmode)
3280 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3281 if (GET_MODE (op1) == VOIDmode)
3282 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3283 return gen_rtx_CONCAT (mode, op0, op1);
3285 case CONJ_EXPR:
3286 if (GET_CODE (op0) == CONCAT)
3287 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3288 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3289 XEXP (op0, 1),
3290 GET_MODE_INNER (mode)));
3291 else
3293 enum machine_mode imode = GET_MODE_INNER (mode);
3294 rtx re, im;
3296 if (MEM_P (op0))
3298 re = adjust_address_nv (op0, imode, 0);
3299 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3301 else
3303 enum machine_mode ifmode = int_mode_for_mode (mode);
3304 enum machine_mode ihmode = int_mode_for_mode (imode);
3305 rtx halfsize;
3306 if (ifmode == BLKmode || ihmode == BLKmode)
3307 return NULL;
3308 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3309 re = op0;
3310 if (mode != ifmode)
3311 re = gen_rtx_SUBREG (ifmode, re, 0);
3312 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3313 if (imode != ihmode)
3314 re = gen_rtx_SUBREG (imode, re, 0);
3315 im = copy_rtx (op0);
3316 if (mode != ifmode)
3317 im = gen_rtx_SUBREG (ifmode, im, 0);
3318 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3319 if (imode != ihmode)
3320 im = gen_rtx_SUBREG (imode, im, 0);
3322 im = gen_rtx_NEG (imode, im);
3323 return gen_rtx_CONCAT (mode, re, im);
3326 case ADDR_EXPR:
3327 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3328 if (!op0 || !MEM_P (op0))
3330 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3331 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3332 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3333 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3334 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3335 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3337 if (handled_component_p (TREE_OPERAND (exp, 0)))
3339 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3340 tree decl
3341 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3342 &bitoffset, &bitsize, &maxsize);
3343 if ((TREE_CODE (decl) == VAR_DECL
3344 || TREE_CODE (decl) == PARM_DECL
3345 || TREE_CODE (decl) == RESULT_DECL)
3346 && (!TREE_ADDRESSABLE (decl)
3347 || target_for_debug_bind (decl))
3348 && (bitoffset % BITS_PER_UNIT) == 0
3349 && bitsize > 0
3350 && bitsize == maxsize)
3352 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3353 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3357 return NULL;
3360 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3361 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3363 return op0;
3365 case VECTOR_CST:
3367 unsigned i;
3369 op0 = gen_rtx_CONCATN
3370 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3372 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3374 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3375 if (!op1)
3376 return NULL;
3377 XVECEXP (op0, 0, i) = op1;
3380 return op0;
3383 case CONSTRUCTOR:
3384 if (TREE_CLOBBER_P (exp))
3385 return NULL;
3386 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3388 unsigned i;
3389 tree val;
3391 op0 = gen_rtx_CONCATN
3392 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3394 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3396 op1 = expand_debug_expr (val);
3397 if (!op1)
3398 return NULL;
3399 XVECEXP (op0, 0, i) = op1;
3402 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3404 op1 = expand_debug_expr
3405 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3407 if (!op1)
3408 return NULL;
3410 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3411 XVECEXP (op0, 0, i) = op1;
3414 return op0;
3416 else
3417 goto flag_unsupported;
3419 case CALL_EXPR:
3420 /* ??? Maybe handle some builtins? */
3421 return NULL;
3423 case SSA_NAME:
3425 gimple g = get_gimple_for_ssa_name (exp);
3426 if (g)
3428 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3429 if (!op0)
3430 return NULL;
3432 else
3434 int part = var_to_partition (SA.map, exp);
3436 if (part == NO_PARTITION)
3438 /* If this is a reference to an incoming value of parameter
3439 that is never used in the code or where the incoming
3440 value is never used in the code, use PARM_DECL's
3441 DECL_RTL if set. */
3442 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3443 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3445 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3446 if (op0)
3447 goto adjust_mode;
3448 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3449 if (op0)
3450 goto adjust_mode;
3452 return NULL;
3455 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3457 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3459 goto adjust_mode;
3462 case ERROR_MARK:
3463 return NULL;
3465 /* Vector stuff. For most of the codes we don't have rtl codes. */
3466 case REALIGN_LOAD_EXPR:
3467 case REDUC_MAX_EXPR:
3468 case REDUC_MIN_EXPR:
3469 case REDUC_PLUS_EXPR:
3470 case VEC_COND_EXPR:
3471 case VEC_LSHIFT_EXPR:
3472 case VEC_PACK_FIX_TRUNC_EXPR:
3473 case VEC_PACK_SAT_EXPR:
3474 case VEC_PACK_TRUNC_EXPR:
3475 case VEC_RSHIFT_EXPR:
3476 case VEC_UNPACK_FLOAT_HI_EXPR:
3477 case VEC_UNPACK_FLOAT_LO_EXPR:
3478 case VEC_UNPACK_HI_EXPR:
3479 case VEC_UNPACK_LO_EXPR:
3480 case VEC_WIDEN_MULT_HI_EXPR:
3481 case VEC_WIDEN_MULT_LO_EXPR:
3482 case VEC_WIDEN_LSHIFT_HI_EXPR:
3483 case VEC_WIDEN_LSHIFT_LO_EXPR:
3484 case VEC_PERM_EXPR:
3485 return NULL;
3487 /* Misc codes. */
3488 case ADDR_SPACE_CONVERT_EXPR:
3489 case FIXED_CONVERT_EXPR:
3490 case OBJ_TYPE_REF:
3491 case WITH_SIZE_EXPR:
3492 return NULL;
3494 case DOT_PROD_EXPR:
3495 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3496 && SCALAR_INT_MODE_P (mode))
3499 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3500 0)))
3501 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3502 inner_mode);
3504 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3505 1)))
3506 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3507 inner_mode);
3508 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3509 return simplify_gen_binary (PLUS, mode, op0, op2);
3511 return NULL;
3513 case WIDEN_MULT_EXPR:
3514 case WIDEN_MULT_PLUS_EXPR:
3515 case WIDEN_MULT_MINUS_EXPR:
3516 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3517 && SCALAR_INT_MODE_P (mode))
3519 inner_mode = GET_MODE (op0);
3520 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3521 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3522 else
3523 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3524 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3525 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3526 else
3527 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3528 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3529 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3530 return op0;
3531 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3532 return simplify_gen_binary (PLUS, mode, op0, op2);
3533 else
3534 return simplify_gen_binary (MINUS, mode, op2, op0);
3536 return NULL;
3538 case WIDEN_SUM_EXPR:
3539 case WIDEN_LSHIFT_EXPR:
3540 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3541 && SCALAR_INT_MODE_P (mode))
3544 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3545 0)))
3546 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3547 inner_mode);
3548 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3549 ? ASHIFT : PLUS, mode, op0, op1);
3551 return NULL;
3553 case FMA_EXPR:
3554 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3556 default:
3557 flag_unsupported:
3558 #ifdef ENABLE_CHECKING
3559 debug_tree (exp);
3560 gcc_unreachable ();
3561 #else
3562 return NULL;
3563 #endif
3567 /* Return an RTX equivalent to the source bind value of the tree expression
3568 EXP. */
3570 static rtx
3571 expand_debug_source_expr (tree exp)
3573 rtx op0 = NULL_RTX;
3574 enum machine_mode mode = VOIDmode, inner_mode;
3576 switch (TREE_CODE (exp))
3578 case PARM_DECL:
3580 mode = DECL_MODE (exp);
3581 op0 = expand_debug_parm_decl (exp);
3582 if (op0)
3583 break;
3584 /* See if this isn't an argument that has been completely
3585 optimized out. */
3586 if (!DECL_RTL_SET_P (exp)
3587 && !DECL_INCOMING_RTL (exp)
3588 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3590 tree aexp = exp;
3591 if (DECL_ABSTRACT_ORIGIN (exp))
3592 aexp = DECL_ABSTRACT_ORIGIN (exp);
3593 if (DECL_CONTEXT (aexp)
3594 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3596 VEC(tree, gc) **debug_args;
3597 unsigned int ix;
3598 tree ddecl;
3599 #ifdef ENABLE_CHECKING
3600 tree parm;
3601 for (parm = DECL_ARGUMENTS (current_function_decl);
3602 parm; parm = DECL_CHAIN (parm))
3603 gcc_assert (parm != exp
3604 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3605 #endif
3606 debug_args = decl_debug_args_lookup (current_function_decl);
3607 if (debug_args != NULL)
3609 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3610 ix += 2)
3611 if (ddecl == aexp)
3612 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3616 break;
3618 default:
3619 break;
3622 if (op0 == NULL_RTX)
3623 return NULL_RTX;
3625 inner_mode = GET_MODE (op0);
3626 if (mode == inner_mode)
3627 return op0;
3629 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3631 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3632 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3633 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3634 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3635 else
3636 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3638 else if (FLOAT_MODE_P (mode))
3639 gcc_unreachable ();
3640 else if (FLOAT_MODE_P (inner_mode))
3642 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3643 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3644 else
3645 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3647 else if (CONSTANT_P (op0)
3648 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3649 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3650 subreg_lowpart_offset (mode, inner_mode));
3651 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3652 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3653 else
3654 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3656 return op0;
3659 /* Expand the _LOCs in debug insns. We run this after expanding all
3660 regular insns, so that any variables referenced in the function
3661 will have their DECL_RTLs set. */
3663 static void
3664 expand_debug_locations (void)
3666 rtx insn;
3667 rtx last = get_last_insn ();
3668 int save_strict_alias = flag_strict_aliasing;
3670 /* New alias sets while setting up memory attributes cause
3671 -fcompare-debug failures, even though it doesn't bring about any
3672 codegen changes. */
3673 flag_strict_aliasing = 0;
3675 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3676 if (DEBUG_INSN_P (insn))
3678 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3679 rtx val;
3680 enum machine_mode mode;
3682 if (value == NULL_TREE)
3683 val = NULL_RTX;
3684 else
3686 if (INSN_VAR_LOCATION_STATUS (insn)
3687 == VAR_INIT_STATUS_UNINITIALIZED)
3688 val = expand_debug_source_expr (value);
3689 else
3690 val = expand_debug_expr (value);
3691 gcc_assert (last == get_last_insn ());
3694 if (!val)
3695 val = gen_rtx_UNKNOWN_VAR_LOC ();
3696 else
3698 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3700 gcc_assert (mode == GET_MODE (val)
3701 || (GET_MODE (val) == VOIDmode
3702 && (CONST_INT_P (val)
3703 || GET_CODE (val) == CONST_FIXED
3704 || GET_CODE (val) == CONST_DOUBLE
3705 || GET_CODE (val) == LABEL_REF)));
3708 INSN_VAR_LOCATION_LOC (insn) = val;
3711 flag_strict_aliasing = save_strict_alias;
3714 /* Expand basic block BB from GIMPLE trees to RTL. */
3716 static basic_block
3717 expand_gimple_basic_block (basic_block bb)
3719 gimple_stmt_iterator gsi;
3720 gimple_seq stmts;
3721 gimple stmt = NULL;
3722 rtx note, last;
3723 edge e;
3724 edge_iterator ei;
3725 void **elt;
3727 if (dump_file)
3728 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3729 bb->index);
3731 /* Note that since we are now transitioning from GIMPLE to RTL, we
3732 cannot use the gsi_*_bb() routines because they expect the basic
3733 block to be in GIMPLE, instead of RTL. Therefore, we need to
3734 access the BB sequence directly. */
3735 stmts = bb_seq (bb);
3736 bb->il.gimple.seq = NULL;
3737 bb->il.gimple.phi_nodes = NULL;
3738 rtl_profile_for_bb (bb);
3739 init_rtl_bb_info (bb);
3740 bb->flags |= BB_RTL;
3742 /* Remove the RETURN_EXPR if we may fall though to the exit
3743 instead. */
3744 gsi = gsi_last (stmts);
3745 if (!gsi_end_p (gsi)
3746 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3748 gimple ret_stmt = gsi_stmt (gsi);
3750 gcc_assert (single_succ_p (bb));
3751 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3753 if (bb->next_bb == EXIT_BLOCK_PTR
3754 && !gimple_return_retval (ret_stmt))
3756 gsi_remove (&gsi, false);
3757 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3761 gsi = gsi_start (stmts);
3762 if (!gsi_end_p (gsi))
3764 stmt = gsi_stmt (gsi);
3765 if (gimple_code (stmt) != GIMPLE_LABEL)
3766 stmt = NULL;
3769 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3771 if (stmt || elt)
3773 last = get_last_insn ();
3775 if (stmt)
3777 expand_gimple_stmt (stmt);
3778 gsi_next (&gsi);
3781 if (elt)
3782 emit_label ((rtx) *elt);
3784 /* Java emits line number notes in the top of labels.
3785 ??? Make this go away once line number notes are obsoleted. */
3786 BB_HEAD (bb) = NEXT_INSN (last);
3787 if (NOTE_P (BB_HEAD (bb)))
3788 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3789 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3791 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3793 else
3794 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3796 NOTE_BASIC_BLOCK (note) = bb;
3798 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3800 basic_block new_bb;
3802 stmt = gsi_stmt (gsi);
3804 /* If this statement is a non-debug one, and we generate debug
3805 insns, then this one might be the last real use of a TERed
3806 SSA_NAME, but where there are still some debug uses further
3807 down. Expanding the current SSA name in such further debug
3808 uses by their RHS might lead to wrong debug info, as coalescing
3809 might make the operands of such RHS be placed into the same
3810 pseudo as something else. Like so:
3811 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3812 use(a_1);
3813 a_2 = ...
3814 #DEBUG ... => a_1
3815 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3816 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3817 the write to a_2 would actually have clobbered the place which
3818 formerly held a_0.
3820 So, instead of that, we recognize the situation, and generate
3821 debug temporaries at the last real use of TERed SSA names:
3822 a_1 = a_0 + 1;
3823 #DEBUG #D1 => a_1
3824 use(a_1);
3825 a_2 = ...
3826 #DEBUG ... => #D1
3828 if (MAY_HAVE_DEBUG_INSNS
3829 && SA.values
3830 && !is_gimple_debug (stmt))
3832 ssa_op_iter iter;
3833 tree op;
3834 gimple def;
3836 location_t sloc = get_curr_insn_source_location ();
3837 tree sblock = get_curr_insn_block ();
3839 /* Look for SSA names that have their last use here (TERed
3840 names always have only one real use). */
3841 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3842 if ((def = get_gimple_for_ssa_name (op)))
3844 imm_use_iterator imm_iter;
3845 use_operand_p use_p;
3846 bool have_debug_uses = false;
3848 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3850 if (gimple_debug_bind_p (USE_STMT (use_p)))
3852 have_debug_uses = true;
3853 break;
3857 if (have_debug_uses)
3859 /* OP is a TERed SSA name, with DEF it's defining
3860 statement, and where OP is used in further debug
3861 instructions. Generate a debug temporary, and
3862 replace all uses of OP in debug insns with that
3863 temporary. */
3864 gimple debugstmt;
3865 tree value = gimple_assign_rhs_to_tree (def);
3866 tree vexpr = make_node (DEBUG_EXPR_DECL);
3867 rtx val;
3868 enum machine_mode mode;
3870 set_curr_insn_source_location (gimple_location (def));
3871 set_curr_insn_block (gimple_block (def));
3873 DECL_ARTIFICIAL (vexpr) = 1;
3874 TREE_TYPE (vexpr) = TREE_TYPE (value);
3875 if (DECL_P (value))
3876 mode = DECL_MODE (value);
3877 else
3878 mode = TYPE_MODE (TREE_TYPE (value));
3879 DECL_MODE (vexpr) = mode;
3881 val = gen_rtx_VAR_LOCATION
3882 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3884 emit_debug_insn (val);
3886 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3888 if (!gimple_debug_bind_p (debugstmt))
3889 continue;
3891 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3892 SET_USE (use_p, vexpr);
3894 update_stmt (debugstmt);
3898 set_curr_insn_source_location (sloc);
3899 set_curr_insn_block (sblock);
3902 currently_expanding_gimple_stmt = stmt;
3904 /* Expand this statement, then evaluate the resulting RTL and
3905 fixup the CFG accordingly. */
3906 if (gimple_code (stmt) == GIMPLE_COND)
3908 new_bb = expand_gimple_cond (bb, stmt);
3909 if (new_bb)
3910 return new_bb;
3912 else if (gimple_debug_bind_p (stmt))
3914 location_t sloc = get_curr_insn_source_location ();
3915 tree sblock = get_curr_insn_block ();
3916 gimple_stmt_iterator nsi = gsi;
3918 for (;;)
3920 tree var = gimple_debug_bind_get_var (stmt);
3921 tree value;
3922 rtx val;
3923 enum machine_mode mode;
3925 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3926 && TREE_CODE (var) != LABEL_DECL
3927 && !target_for_debug_bind (var))
3928 goto delink_debug_stmt;
3930 if (gimple_debug_bind_has_value_p (stmt))
3931 value = gimple_debug_bind_get_value (stmt);
3932 else
3933 value = NULL_TREE;
3935 last = get_last_insn ();
3937 set_curr_insn_source_location (gimple_location (stmt));
3938 set_curr_insn_block (gimple_block (stmt));
3940 if (DECL_P (var))
3941 mode = DECL_MODE (var);
3942 else
3943 mode = TYPE_MODE (TREE_TYPE (var));
3945 val = gen_rtx_VAR_LOCATION
3946 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3948 emit_debug_insn (val);
3950 if (dump_file && (dump_flags & TDF_DETAILS))
3952 /* We can't dump the insn with a TREE where an RTX
3953 is expected. */
3954 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3955 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3956 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3959 delink_debug_stmt:
3960 /* In order not to generate too many debug temporaries,
3961 we delink all uses of debug statements we already expanded.
3962 Therefore debug statements between definition and real
3963 use of TERed SSA names will continue to use the SSA name,
3964 and not be replaced with debug temps. */
3965 delink_stmt_imm_use (stmt);
3967 gsi = nsi;
3968 gsi_next (&nsi);
3969 if (gsi_end_p (nsi))
3970 break;
3971 stmt = gsi_stmt (nsi);
3972 if (!gimple_debug_bind_p (stmt))
3973 break;
3976 set_curr_insn_source_location (sloc);
3977 set_curr_insn_block (sblock);
3979 else if (gimple_debug_source_bind_p (stmt))
3981 location_t sloc = get_curr_insn_source_location ();
3982 tree sblock = get_curr_insn_block ();
3983 tree var = gimple_debug_source_bind_get_var (stmt);
3984 tree value = gimple_debug_source_bind_get_value (stmt);
3985 rtx val;
3986 enum machine_mode mode;
3988 last = get_last_insn ();
3990 set_curr_insn_source_location (gimple_location (stmt));
3991 set_curr_insn_block (gimple_block (stmt));
3993 mode = DECL_MODE (var);
3995 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3996 VAR_INIT_STATUS_UNINITIALIZED);
3998 emit_debug_insn (val);
4000 if (dump_file && (dump_flags & TDF_DETAILS))
4002 /* We can't dump the insn with a TREE where an RTX
4003 is expected. */
4004 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4005 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4006 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4009 set_curr_insn_source_location (sloc);
4010 set_curr_insn_block (sblock);
4012 else
4014 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4016 bool can_fallthru;
4017 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4018 if (new_bb)
4020 if (can_fallthru)
4021 bb = new_bb;
4022 else
4023 return new_bb;
4026 else
4028 def_operand_p def_p;
4029 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4031 if (def_p != NULL)
4033 /* Ignore this stmt if it is in the list of
4034 replaceable expressions. */
4035 if (SA.values
4036 && bitmap_bit_p (SA.values,
4037 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4038 continue;
4040 last = expand_gimple_stmt (stmt);
4041 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4046 currently_expanding_gimple_stmt = NULL;
4048 /* Expand implicit goto and convert goto_locus. */
4049 FOR_EACH_EDGE (e, ei, bb->succs)
4051 if (e->goto_locus && e->goto_block)
4053 set_curr_insn_source_location (e->goto_locus);
4054 set_curr_insn_block (e->goto_block);
4055 e->goto_locus = curr_insn_locator ();
4057 e->goto_block = NULL;
4058 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4060 emit_jump (label_rtx_for_bb (e->dest));
4061 e->flags &= ~EDGE_FALLTHRU;
4065 /* Expanded RTL can create a jump in the last instruction of block.
4066 This later might be assumed to be a jump to successor and break edge insertion.
4067 We need to insert dummy move to prevent this. PR41440. */
4068 if (single_succ_p (bb)
4069 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4070 && (last = get_last_insn ())
4071 && JUMP_P (last))
4073 rtx dummy = gen_reg_rtx (SImode);
4074 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4077 do_pending_stack_adjust ();
4079 /* Find the block tail. The last insn in the block is the insn
4080 before a barrier and/or table jump insn. */
4081 last = get_last_insn ();
4082 if (BARRIER_P (last))
4083 last = PREV_INSN (last);
4084 if (JUMP_TABLE_DATA_P (last))
4085 last = PREV_INSN (PREV_INSN (last));
4086 BB_END (bb) = last;
4088 update_bb_for_insn (bb);
4090 return bb;
4094 /* Create a basic block for initialization code. */
4096 static basic_block
4097 construct_init_block (void)
4099 basic_block init_block, first_block;
4100 edge e = NULL;
4101 int flags;
4103 /* Multiple entry points not supported yet. */
4104 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4105 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4106 init_rtl_bb_info (EXIT_BLOCK_PTR);
4107 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4108 EXIT_BLOCK_PTR->flags |= BB_RTL;
4110 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4112 /* When entry edge points to first basic block, we don't need jump,
4113 otherwise we have to jump into proper target. */
4114 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4116 tree label = gimple_block_label (e->dest);
4118 emit_jump (label_rtx (label));
4119 flags = 0;
4121 else
4122 flags = EDGE_FALLTHRU;
4124 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4125 get_last_insn (),
4126 ENTRY_BLOCK_PTR);
4127 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4128 init_block->count = ENTRY_BLOCK_PTR->count;
4129 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4130 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4131 if (e)
4133 first_block = e->dest;
4134 redirect_edge_succ (e, init_block);
4135 e = make_edge (init_block, first_block, flags);
4137 else
4138 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4139 e->probability = REG_BR_PROB_BASE;
4140 e->count = ENTRY_BLOCK_PTR->count;
4142 update_bb_for_insn (init_block);
4143 return init_block;
4146 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4147 found in the block tree. */
4149 static void
4150 set_block_levels (tree block, int level)
4152 while (block)
4154 BLOCK_NUMBER (block) = level;
4155 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4156 block = BLOCK_CHAIN (block);
4160 /* Create a block containing landing pads and similar stuff. */
4162 static void
4163 construct_exit_block (void)
4165 rtx head = get_last_insn ();
4166 rtx end;
4167 basic_block exit_block;
4168 edge e, e2;
4169 unsigned ix;
4170 edge_iterator ei;
4171 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4173 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4175 /* Make sure the locus is set to the end of the function, so that
4176 epilogue line numbers and warnings are set properly. */
4177 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4178 input_location = cfun->function_end_locus;
4180 /* The following insns belong to the top scope. */
4181 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4183 /* Generate rtl for function exit. */
4184 expand_function_end ();
4186 end = get_last_insn ();
4187 if (head == end)
4188 return;
4189 /* While emitting the function end we could move end of the last basic block.
4191 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4192 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4193 head = NEXT_INSN (head);
4194 exit_block = create_basic_block (NEXT_INSN (head), end,
4195 EXIT_BLOCK_PTR->prev_bb);
4196 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4197 exit_block->count = EXIT_BLOCK_PTR->count;
4198 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4199 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4201 ix = 0;
4202 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4204 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4205 if (!(e->flags & EDGE_ABNORMAL))
4206 redirect_edge_succ (e, exit_block);
4207 else
4208 ix++;
4211 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4212 e->probability = REG_BR_PROB_BASE;
4213 e->count = EXIT_BLOCK_PTR->count;
4214 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4215 if (e2 != e)
4217 e->count -= e2->count;
4218 exit_block->count -= e2->count;
4219 exit_block->frequency -= EDGE_FREQUENCY (e2);
4221 if (e->count < 0)
4222 e->count = 0;
4223 if (exit_block->count < 0)
4224 exit_block->count = 0;
4225 if (exit_block->frequency < 0)
4226 exit_block->frequency = 0;
4227 update_bb_for_insn (exit_block);
4230 /* Helper function for discover_nonconstant_array_refs.
4231 Look for ARRAY_REF nodes with non-constant indexes and mark them
4232 addressable. */
4234 static tree
4235 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4236 void *data ATTRIBUTE_UNUSED)
4238 tree t = *tp;
4240 if (IS_TYPE_OR_DECL_P (t))
4241 *walk_subtrees = 0;
4242 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4244 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4245 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4246 && (!TREE_OPERAND (t, 2)
4247 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4248 || (TREE_CODE (t) == COMPONENT_REF
4249 && (!TREE_OPERAND (t,2)
4250 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4251 || TREE_CODE (t) == BIT_FIELD_REF
4252 || TREE_CODE (t) == REALPART_EXPR
4253 || TREE_CODE (t) == IMAGPART_EXPR
4254 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4255 || CONVERT_EXPR_P (t))
4256 t = TREE_OPERAND (t, 0);
4258 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4260 t = get_base_address (t);
4261 if (t && DECL_P (t)
4262 && DECL_MODE (t) != BLKmode)
4263 TREE_ADDRESSABLE (t) = 1;
4266 *walk_subtrees = 0;
4269 return NULL_TREE;
4272 /* RTL expansion is not able to compile array references with variable
4273 offsets for arrays stored in single register. Discover such
4274 expressions and mark variables as addressable to avoid this
4275 scenario. */
4277 static void
4278 discover_nonconstant_array_refs (void)
4280 basic_block bb;
4281 gimple_stmt_iterator gsi;
4283 FOR_EACH_BB (bb)
4284 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4286 gimple stmt = gsi_stmt (gsi);
4287 if (!is_gimple_debug (stmt))
4288 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4292 /* This function sets crtl->args.internal_arg_pointer to a virtual
4293 register if DRAP is needed. Local register allocator will replace
4294 virtual_incoming_args_rtx with the virtual register. */
4296 static void
4297 expand_stack_alignment (void)
4299 rtx drap_rtx;
4300 unsigned int preferred_stack_boundary;
4302 if (! SUPPORTS_STACK_ALIGNMENT)
4303 return;
4305 if (cfun->calls_alloca
4306 || cfun->has_nonlocal_label
4307 || crtl->has_nonlocal_goto)
4308 crtl->need_drap = true;
4310 /* Call update_stack_boundary here again to update incoming stack
4311 boundary. It may set incoming stack alignment to a different
4312 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4313 use the minimum incoming stack alignment to check if it is OK
4314 to perform sibcall optimization since sibcall optimization will
4315 only align the outgoing stack to incoming stack boundary. */
4316 if (targetm.calls.update_stack_boundary)
4317 targetm.calls.update_stack_boundary ();
4319 /* The incoming stack frame has to be aligned at least at
4320 parm_stack_boundary. */
4321 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4323 /* Update crtl->stack_alignment_estimated and use it later to align
4324 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4325 exceptions since callgraph doesn't collect incoming stack alignment
4326 in this case. */
4327 if (cfun->can_throw_non_call_exceptions
4328 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4329 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4330 else
4331 preferred_stack_boundary = crtl->preferred_stack_boundary;
4332 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4333 crtl->stack_alignment_estimated = preferred_stack_boundary;
4334 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4335 crtl->stack_alignment_needed = preferred_stack_boundary;
4337 gcc_assert (crtl->stack_alignment_needed
4338 <= crtl->stack_alignment_estimated);
4340 crtl->stack_realign_needed
4341 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4342 crtl->stack_realign_tried = crtl->stack_realign_needed;
4344 crtl->stack_realign_processed = true;
4346 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4347 alignment. */
4348 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4349 drap_rtx = targetm.calls.get_drap_rtx ();
4351 /* stack_realign_drap and drap_rtx must match. */
4352 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4354 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4355 if (NULL != drap_rtx)
4357 crtl->args.internal_arg_pointer = drap_rtx;
4359 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4360 needed. */
4361 fixup_tail_calls ();
4365 /* Translate the intermediate representation contained in the CFG
4366 from GIMPLE trees to RTL.
4368 We do conversion per basic block and preserve/update the tree CFG.
4369 This implies we have to do some magic as the CFG can simultaneously
4370 consist of basic blocks containing RTL and GIMPLE trees. This can
4371 confuse the CFG hooks, so be careful to not manipulate CFG during
4372 the expansion. */
4374 static unsigned int
4375 gimple_expand_cfg (void)
4377 basic_block bb, init_block;
4378 sbitmap blocks;
4379 edge_iterator ei;
4380 edge e;
4381 rtx var_seq;
4382 unsigned i;
4384 timevar_push (TV_OUT_OF_SSA);
4385 rewrite_out_of_ssa (&SA);
4386 timevar_pop (TV_OUT_OF_SSA);
4387 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4388 sizeof (rtx));
4390 /* Make sure all values used by the optimization passes have sane
4391 defaults. */
4392 reg_renumber = 0;
4394 /* Some backends want to know that we are expanding to RTL. */
4395 currently_expanding_to_rtl = 1;
4396 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4397 free_dominance_info (CDI_DOMINATORS);
4399 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4401 insn_locators_alloc ();
4402 if (!DECL_IS_BUILTIN (current_function_decl))
4404 /* Eventually, all FEs should explicitly set function_start_locus. */
4405 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4406 set_curr_insn_source_location
4407 (DECL_SOURCE_LOCATION (current_function_decl));
4408 else
4409 set_curr_insn_source_location (cfun->function_start_locus);
4411 else
4412 set_curr_insn_source_location (UNKNOWN_LOCATION);
4413 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4414 prologue_locator = curr_insn_locator ();
4416 #ifdef INSN_SCHEDULING
4417 init_sched_attrs ();
4418 #endif
4420 /* Make sure first insn is a note even if we don't want linenums.
4421 This makes sure the first insn will never be deleted.
4422 Also, final expects a note to appear there. */
4423 emit_note (NOTE_INSN_DELETED);
4425 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4426 discover_nonconstant_array_refs ();
4428 targetm.expand_to_rtl_hook ();
4429 crtl->stack_alignment_needed = STACK_BOUNDARY;
4430 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4431 crtl->stack_alignment_estimated = 0;
4432 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4433 cfun->cfg->max_jumptable_ents = 0;
4435 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4436 of the function section at exapnsion time to predict distance of calls. */
4437 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4439 /* Expand the variables recorded during gimple lowering. */
4440 timevar_push (TV_VAR_EXPAND);
4441 start_sequence ();
4443 expand_used_vars ();
4445 var_seq = get_insns ();
4446 end_sequence ();
4447 timevar_pop (TV_VAR_EXPAND);
4449 /* Honor stack protection warnings. */
4450 if (warn_stack_protect)
4452 if (cfun->calls_alloca)
4453 warning (OPT_Wstack_protector,
4454 "stack protector not protecting local variables: "
4455 "variable length buffer");
4456 if (has_short_buffer && !crtl->stack_protect_guard)
4457 warning (OPT_Wstack_protector,
4458 "stack protector not protecting function: "
4459 "all local arrays are less than %d bytes long",
4460 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4463 /* Set up parameters and prepare for return, for the function. */
4464 expand_function_start (current_function_decl);
4466 /* If we emitted any instructions for setting up the variables,
4467 emit them before the FUNCTION_START note. */
4468 if (var_seq)
4470 emit_insn_before (var_seq, parm_birth_insn);
4472 /* In expand_function_end we'll insert the alloca save/restore
4473 before parm_birth_insn. We've just insertted an alloca call.
4474 Adjust the pointer to match. */
4475 parm_birth_insn = var_seq;
4478 /* Now that we also have the parameter RTXs, copy them over to our
4479 partitions. */
4480 for (i = 0; i < SA.map->num_partitions; i++)
4482 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4484 if (TREE_CODE (var) != VAR_DECL
4485 && !SA.partition_to_pseudo[i])
4486 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4487 gcc_assert (SA.partition_to_pseudo[i]);
4489 /* If this decl was marked as living in multiple places, reset
4490 this now to NULL. */
4491 if (DECL_RTL_IF_SET (var) == pc_rtx)
4492 SET_DECL_RTL (var, NULL);
4494 /* Some RTL parts really want to look at DECL_RTL(x) when x
4495 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4496 SET_DECL_RTL here making this available, but that would mean
4497 to select one of the potentially many RTLs for one DECL. Instead
4498 of doing that we simply reset the MEM_EXPR of the RTL in question,
4499 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4500 if (!DECL_RTL_SET_P (var))
4502 if (MEM_P (SA.partition_to_pseudo[i]))
4503 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4507 /* If we have a class containing differently aligned pointers
4508 we need to merge those into the corresponding RTL pointer
4509 alignment. */
4510 for (i = 1; i < num_ssa_names; i++)
4512 tree name = ssa_name (i);
4513 int part;
4514 rtx r;
4516 if (!name
4517 || !POINTER_TYPE_P (TREE_TYPE (name))
4518 /* We might have generated new SSA names in
4519 update_alias_info_with_stack_vars. They will have a NULL
4520 defining statements, and won't be part of the partitioning,
4521 so ignore those. */
4522 || !SSA_NAME_DEF_STMT (name))
4523 continue;
4524 part = var_to_partition (SA.map, name);
4525 if (part == NO_PARTITION)
4526 continue;
4527 r = SA.partition_to_pseudo[part];
4528 if (REG_P (r))
4529 mark_reg_pointer (r, get_pointer_alignment (name));
4532 /* If this function is `main', emit a call to `__main'
4533 to run global initializers, etc. */
4534 if (DECL_NAME (current_function_decl)
4535 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4536 && DECL_FILE_SCOPE_P (current_function_decl))
4537 expand_main_function ();
4539 /* Initialize the stack_protect_guard field. This must happen after the
4540 call to __main (if any) so that the external decl is initialized. */
4541 if (crtl->stack_protect_guard)
4542 stack_protect_prologue ();
4544 expand_phi_nodes (&SA);
4546 /* Register rtl specific functions for cfg. */
4547 rtl_register_cfg_hooks ();
4549 init_block = construct_init_block ();
4551 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4552 remaining edges later. */
4553 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4554 e->flags &= ~EDGE_EXECUTABLE;
4556 lab_rtx_for_bb = pointer_map_create ();
4557 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4558 bb = expand_gimple_basic_block (bb);
4560 if (MAY_HAVE_DEBUG_INSNS)
4561 expand_debug_locations ();
4563 /* Free stuff we no longer need after GIMPLE optimizations. */
4564 free_dominance_info (CDI_DOMINATORS);
4565 free_dominance_info (CDI_POST_DOMINATORS);
4566 delete_tree_cfg_annotations ();
4568 timevar_push (TV_OUT_OF_SSA);
4569 finish_out_of_ssa (&SA);
4570 timevar_pop (TV_OUT_OF_SSA);
4572 timevar_push (TV_POST_EXPAND);
4573 /* We are no longer in SSA form. */
4574 cfun->gimple_df->in_ssa_p = false;
4575 if (current_loops)
4576 loops_state_clear (LOOP_CLOSED_SSA);
4578 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4579 conservatively to true until they are all profile aware. */
4580 pointer_map_destroy (lab_rtx_for_bb);
4581 free_histograms ();
4583 construct_exit_block ();
4584 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4585 insn_locators_finalize ();
4587 /* Zap the tree EH table. */
4588 set_eh_throw_stmt_table (cfun, NULL);
4590 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4591 split edges which edge insertions might do. */
4592 rebuild_jump_labels (get_insns ());
4594 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4596 edge e;
4597 edge_iterator ei;
4598 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4600 if (e->insns.r)
4602 rebuild_jump_labels_chain (e->insns.r);
4603 /* Avoid putting insns before parm_birth_insn. */
4604 if (e->src == ENTRY_BLOCK_PTR
4605 && single_succ_p (ENTRY_BLOCK_PTR)
4606 && parm_birth_insn)
4608 rtx insns = e->insns.r;
4609 e->insns.r = NULL_RTX;
4610 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4612 else
4613 commit_one_edge_insertion (e);
4615 else
4616 ei_next (&ei);
4620 /* We're done expanding trees to RTL. */
4621 currently_expanding_to_rtl = 0;
4623 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4625 edge e;
4626 edge_iterator ei;
4627 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4629 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4630 e->flags &= ~EDGE_EXECUTABLE;
4632 /* At the moment not all abnormal edges match the RTL
4633 representation. It is safe to remove them here as
4634 find_many_sub_basic_blocks will rediscover them.
4635 In the future we should get this fixed properly. */
4636 if ((e->flags & EDGE_ABNORMAL)
4637 && !(e->flags & EDGE_SIBCALL))
4638 remove_edge (e);
4639 else
4640 ei_next (&ei);
4644 blocks = sbitmap_alloc (last_basic_block);
4645 sbitmap_ones (blocks);
4646 find_many_sub_basic_blocks (blocks);
4647 sbitmap_free (blocks);
4648 purge_all_dead_edges ();
4650 expand_stack_alignment ();
4652 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4653 function. */
4654 if (crtl->tail_call_emit)
4655 fixup_tail_calls ();
4657 /* After initial rtl generation, call back to finish generating
4658 exception support code. We need to do this before cleaning up
4659 the CFG as the code does not expect dead landing pads. */
4660 if (cfun->eh->region_tree != NULL)
4661 finish_eh_generation ();
4663 /* Remove unreachable blocks, otherwise we cannot compute dominators
4664 which are needed for loop state verification. As a side-effect
4665 this also compacts blocks.
4666 ??? We cannot remove trivially dead insns here as for example
4667 the DRAP reg on i?86 is not magically live at this point.
4668 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4669 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4671 #ifdef ENABLE_CHECKING
4672 verify_flow_info ();
4673 #endif
4675 /* Initialize pseudos allocated for hard registers. */
4676 emit_initial_value_sets ();
4678 /* And finally unshare all RTL. */
4679 unshare_all_rtl ();
4681 /* There's no need to defer outputting this function any more; we
4682 know we want to output it. */
4683 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4685 /* Now that we're done expanding trees to RTL, we shouldn't have any
4686 more CONCATs anywhere. */
4687 generating_concat_p = 0;
4689 if (dump_file)
4691 fprintf (dump_file,
4692 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4693 /* And the pass manager will dump RTL for us. */
4696 /* If we're emitting a nested function, make sure its parent gets
4697 emitted as well. Doing otherwise confuses debug info. */
4699 tree parent;
4700 for (parent = DECL_CONTEXT (current_function_decl);
4701 parent != NULL_TREE;
4702 parent = get_containing_scope (parent))
4703 if (TREE_CODE (parent) == FUNCTION_DECL)
4704 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4707 /* We are now committed to emitting code for this function. Do any
4708 preparation, such as emitting abstract debug info for the inline
4709 before it gets mangled by optimization. */
4710 if (cgraph_function_possibly_inlined_p (current_function_decl))
4711 (*debug_hooks->outlining_inline_function) (current_function_decl);
4713 TREE_ASM_WRITTEN (current_function_decl) = 1;
4715 /* After expanding, the return labels are no longer needed. */
4716 return_label = NULL;
4717 naked_return_label = NULL;
4719 /* After expanding, the tm_restart map is no longer needed. */
4720 if (cfun->gimple_df->tm_restart)
4722 htab_delete (cfun->gimple_df->tm_restart);
4723 cfun->gimple_df->tm_restart = NULL;
4726 /* Tag the blocks with a depth number so that change_scope can find
4727 the common parent easily. */
4728 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4729 default_rtl_profile ();
4731 timevar_pop (TV_POST_EXPAND);
4733 return 0;
4736 struct rtl_opt_pass pass_expand =
4739 RTL_PASS,
4740 "expand", /* name */
4741 NULL, /* gate */
4742 gimple_expand_cfg, /* execute */
4743 NULL, /* sub */
4744 NULL, /* next */
4745 0, /* static_pass_number */
4746 TV_EXPAND, /* tv_id */
4747 PROP_ssa | PROP_gimple_leh | PROP_cfg
4748 | PROP_gimple_lcx, /* properties_required */
4749 PROP_rtl, /* properties_provided */
4750 PROP_ssa | PROP_trees, /* properties_destroyed */
4751 TODO_verify_ssa | TODO_verify_flow
4752 | TODO_verify_stmts, /* todo_flags_start */
4753 TODO_ggc_collect /* todo_flags_finish */