31370.cc: Skip this test on powerpc64-*-freebsd*.
[official-gcc.git] / gcc / cfgexpand.c
blob75d2b1628659ce80d46f397c891c8dba2c9be13b
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "regs.h" /* For reg_renumber. */
51 #include "integrate.h" /* For emit_initial_value_sets. */
52 #include "insn-attr.h" /* For INSN_SCHEDULING. */
54 /* This variable holds information helping the rewriting of SSA trees
55 into RTL. */
56 struct ssaexpand SA;
58 /* This variable holds the currently expanded gimple statement for purposes
59 of comminucating the profile info to the builtin expanders. */
60 gimple currently_expanding_gimple_stmt;
62 static rtx expand_debug_expr (tree);
64 /* Return an expression tree corresponding to the RHS of GIMPLE
65 statement STMT. */
67 tree
68 gimple_assign_rhs_to_tree (gimple stmt)
70 tree t;
71 enum gimple_rhs_class grhs_class;
73 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
75 if (grhs_class == GIMPLE_TERNARY_RHS)
76 t = build3 (gimple_assign_rhs_code (stmt),
77 TREE_TYPE (gimple_assign_lhs (stmt)),
78 gimple_assign_rhs1 (stmt),
79 gimple_assign_rhs2 (stmt),
80 gimple_assign_rhs3 (stmt));
81 else if (grhs_class == GIMPLE_BINARY_RHS)
82 t = build2 (gimple_assign_rhs_code (stmt),
83 TREE_TYPE (gimple_assign_lhs (stmt)),
84 gimple_assign_rhs1 (stmt),
85 gimple_assign_rhs2 (stmt));
86 else if (grhs_class == GIMPLE_UNARY_RHS)
87 t = build1 (gimple_assign_rhs_code (stmt),
88 TREE_TYPE (gimple_assign_lhs (stmt)),
89 gimple_assign_rhs1 (stmt));
90 else if (grhs_class == GIMPLE_SINGLE_RHS)
92 t = gimple_assign_rhs1 (stmt);
93 /* Avoid modifying this tree in place below. */
94 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
95 && gimple_location (stmt) != EXPR_LOCATION (t))
96 || (gimple_block (stmt)
97 && currently_expanding_to_rtl
98 && EXPR_P (t)
99 && gimple_block (stmt) != TREE_BLOCK (t)))
100 t = copy_node (t);
102 else
103 gcc_unreachable ();
105 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
106 SET_EXPR_LOCATION (t, gimple_location (stmt));
107 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
108 TREE_BLOCK (t) = gimple_block (stmt);
110 return t;
114 #ifndef STACK_ALIGNMENT_NEEDED
115 #define STACK_ALIGNMENT_NEEDED 1
116 #endif
118 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
120 /* Associate declaration T with storage space X. If T is no
121 SSA name this is exactly SET_DECL_RTL, otherwise make the
122 partition of T associated with X. */
123 static inline void
124 set_rtl (tree t, rtx x)
126 if (TREE_CODE (t) == SSA_NAME)
128 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
129 if (x && !MEM_P (x))
130 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
131 /* For the benefit of debug information at -O0 (where vartracking
132 doesn't run) record the place also in the base DECL if it's
133 a normal variable (not a parameter). */
134 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
136 tree var = SSA_NAME_VAR (t);
137 /* If we don't yet have something recorded, just record it now. */
138 if (!DECL_RTL_SET_P (var))
139 SET_DECL_RTL (var, x);
140 /* If we have it set already to "multiple places" don't
141 change this. */
142 else if (DECL_RTL (var) == pc_rtx)
144 /* If we have something recorded and it's not the same place
145 as we want to record now, we have multiple partitions for the
146 same base variable, with different places. We can't just
147 randomly chose one, hence we have to say that we don't know.
148 This only happens with optimization, and there var-tracking
149 will figure out the right thing. */
150 else if (DECL_RTL (var) != x)
151 SET_DECL_RTL (var, pc_rtx);
154 else
155 SET_DECL_RTL (t, x);
158 /* This structure holds data relevant to one variable that will be
159 placed in a stack slot. */
160 struct stack_var
162 /* The Variable. */
163 tree decl;
165 /* Initially, the size of the variable. Later, the size of the partition,
166 if this variable becomes it's partition's representative. */
167 HOST_WIDE_INT size;
169 /* The *byte* alignment required for this variable. Or as, with the
170 size, the alignment for this partition. */
171 unsigned int alignb;
173 /* The partition representative. */
174 size_t representative;
176 /* The next stack variable in the partition, or EOC. */
177 size_t next;
179 /* The numbers of conflicting stack variables. */
180 bitmap conflicts;
183 #define EOC ((size_t)-1)
185 /* We have an array of such objects while deciding allocation. */
186 static struct stack_var *stack_vars;
187 static size_t stack_vars_alloc;
188 static size_t stack_vars_num;
189 static struct pointer_map_t *decl_to_stack_part;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Compute the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
211 static unsigned int
212 align_local_variable (tree decl)
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
215 DECL_ALIGN (decl) = align;
216 return align / BITS_PER_UNIT;
219 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
220 Return the frame offset. */
222 static HOST_WIDE_INT
223 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
225 HOST_WIDE_INT offset, new_frame_offset;
227 new_frame_offset = frame_offset;
228 if (FRAME_GROWS_DOWNWARD)
230 new_frame_offset -= size + frame_phase;
231 new_frame_offset &= -align;
232 new_frame_offset += frame_phase;
233 offset = new_frame_offset;
235 else
237 new_frame_offset -= frame_phase;
238 new_frame_offset += align - 1;
239 new_frame_offset &= -align;
240 new_frame_offset += frame_phase;
241 offset = new_frame_offset;
242 new_frame_offset += size;
244 frame_offset = new_frame_offset;
246 if (frame_offset_overflow (frame_offset, cfun->decl))
247 frame_offset = offset = 0;
249 return offset;
252 /* Accumulate DECL into STACK_VARS. */
254 static void
255 add_stack_var (tree decl)
257 struct stack_var *v;
259 if (stack_vars_num >= stack_vars_alloc)
261 if (stack_vars_alloc)
262 stack_vars_alloc = stack_vars_alloc * 3 / 2;
263 else
264 stack_vars_alloc = 32;
265 stack_vars
266 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
268 if (!decl_to_stack_part)
269 decl_to_stack_part = pointer_map_create ();
271 v = &stack_vars[stack_vars_num];
272 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
274 v->decl = decl;
275 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
276 /* Ensure that all variables have size, so that &a != &b for any two
277 variables that are simultaneously live. */
278 if (v->size == 0)
279 v->size = 1;
280 v->alignb = align_local_variable (SSAVAR (decl));
281 /* An alignment of zero can mightily confuse us later. */
282 gcc_assert (v->alignb != 0);
284 /* All variables are initially in their own partition. */
285 v->representative = stack_vars_num;
286 v->next = EOC;
288 /* All variables initially conflict with no other. */
289 v->conflicts = NULL;
291 /* Ensure that this decl doesn't get put onto the list twice. */
292 set_rtl (decl, pc_rtx);
294 stack_vars_num++;
297 /* Make the decls associated with luid's X and Y conflict. */
299 static void
300 add_stack_var_conflict (size_t x, size_t y)
302 struct stack_var *a = &stack_vars[x];
303 struct stack_var *b = &stack_vars[y];
304 if (!a->conflicts)
305 a->conflicts = BITMAP_ALLOC (NULL);
306 if (!b->conflicts)
307 b->conflicts = BITMAP_ALLOC (NULL);
308 bitmap_set_bit (a->conflicts, y);
309 bitmap_set_bit (b->conflicts, x);
312 /* Check whether the decls associated with luid's X and Y conflict. */
314 static bool
315 stack_var_conflict_p (size_t x, size_t y)
317 struct stack_var *a = &stack_vars[x];
318 struct stack_var *b = &stack_vars[y];
319 if (x == y)
320 return false;
321 /* Partitions containing an SSA name result from gimple registers
322 with things like unsupported modes. They are top-level and
323 hence conflict with everything else. */
324 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
325 return true;
327 if (!a->conflicts || !b->conflicts)
328 return false;
329 return bitmap_bit_p (a->conflicts, y);
332 /* Returns true if TYPE is or contains a union type. */
334 static bool
335 aggregate_contains_union_type (tree type)
337 tree field;
339 if (TREE_CODE (type) == UNION_TYPE
340 || TREE_CODE (type) == QUAL_UNION_TYPE)
341 return true;
342 if (TREE_CODE (type) == ARRAY_TYPE)
343 return aggregate_contains_union_type (TREE_TYPE (type));
344 if (TREE_CODE (type) != RECORD_TYPE)
345 return false;
347 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
348 if (TREE_CODE (field) == FIELD_DECL)
349 if (aggregate_contains_union_type (TREE_TYPE (field)))
350 return true;
352 return false;
355 /* A subroutine of expand_used_vars. If two variables X and Y have alias
356 sets that do not conflict, then do add a conflict for these variables
357 in the interference graph. We also need to make sure to add conflicts
358 for union containing structures. Else RTL alias analysis comes along
359 and due to type based aliasing rules decides that for two overlapping
360 union temporaries { short s; int i; } accesses to the same mem through
361 different types may not alias and happily reorders stores across
362 life-time boundaries of the temporaries (See PR25654). */
364 static void
365 add_alias_set_conflicts (void)
367 size_t i, j, n = stack_vars_num;
369 for (i = 0; i < n; ++i)
371 tree type_i = TREE_TYPE (stack_vars[i].decl);
372 bool aggr_i = AGGREGATE_TYPE_P (type_i);
373 bool contains_union;
375 contains_union = aggregate_contains_union_type (type_i);
376 for (j = 0; j < i; ++j)
378 tree type_j = TREE_TYPE (stack_vars[j].decl);
379 bool aggr_j = AGGREGATE_TYPE_P (type_j);
380 if (aggr_i != aggr_j
381 /* Either the objects conflict by means of type based
382 aliasing rules, or we need to add a conflict. */
383 || !objects_must_conflict_p (type_i, type_j)
384 /* In case the types do not conflict ensure that access
385 to elements will conflict. In case of unions we have
386 to be careful as type based aliasing rules may say
387 access to the same memory does not conflict. So play
388 safe and add a conflict in this case when
389 -fstrict-aliasing is used. */
390 || (contains_union && flag_strict_aliasing))
391 add_stack_var_conflict (i, j);
396 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
397 enter its partition number into bitmap DATA. */
399 static bool
400 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
402 bitmap active = (bitmap)data;
403 op = get_base_address (op);
404 if (op
405 && DECL_P (op)
406 && DECL_RTL_IF_SET (op) == pc_rtx)
408 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
409 if (v)
410 bitmap_set_bit (active, *v);
412 return false;
415 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
416 record conflicts between it and all currently active other partitions
417 from bitmap DATA. */
419 static bool
420 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
422 bitmap active = (bitmap)data;
423 op = get_base_address (op);
424 if (op
425 && DECL_P (op)
426 && DECL_RTL_IF_SET (op) == pc_rtx)
428 size_t *v =
429 (size_t *) pointer_map_contains (decl_to_stack_part, op);
430 if (v && bitmap_set_bit (active, *v))
432 size_t num = *v;
433 bitmap_iterator bi;
434 unsigned i;
435 gcc_assert (num < stack_vars_num);
436 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
437 add_stack_var_conflict (num, i);
440 return false;
443 /* Helper routine for add_scope_conflicts, calculating the active partitions
444 at the end of BB, leaving the result in WORK. We're called to generate
445 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
446 liveness. */
448 static void
449 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
451 edge e;
452 edge_iterator ei;
453 gimple_stmt_iterator gsi;
454 bool (*visit)(gimple, tree, void *);
456 bitmap_clear (work);
457 FOR_EACH_EDGE (e, ei, bb->preds)
458 bitmap_ior_into (work, (bitmap)e->src->aux);
460 visit = visit_op;
462 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
464 gimple stmt = gsi_stmt (gsi);
465 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
467 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
469 gimple stmt = gsi_stmt (gsi);
471 if (gimple_clobber_p (stmt))
473 tree lhs = gimple_assign_lhs (stmt);
474 size_t *v;
475 /* Nested function lowering might introduce LHSs
476 that are COMPONENT_REFs. */
477 if (TREE_CODE (lhs) != VAR_DECL)
478 continue;
479 if (DECL_RTL_IF_SET (lhs) == pc_rtx
480 && (v = (size_t *)
481 pointer_map_contains (decl_to_stack_part, lhs)))
482 bitmap_clear_bit (work, *v);
484 else if (!is_gimple_debug (stmt))
486 if (for_conflict
487 && visit == visit_op)
489 /* If this is the first real instruction in this BB we need
490 to add conflicts for everything live at this point now.
491 Unlike classical liveness for named objects we can't
492 rely on seeing a def/use of the names we're interested in.
493 There might merely be indirect loads/stores. We'd not add any
494 conflicts for such partitions. */
495 bitmap_iterator bi;
496 unsigned i;
497 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
499 unsigned j;
500 bitmap_iterator bj;
501 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
502 add_stack_var_conflict (i, j);
504 visit = visit_conflict;
506 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
511 /* Generate stack partition conflicts between all partitions that are
512 simultaneously live. */
514 static void
515 add_scope_conflicts (void)
517 basic_block bb;
518 bool changed;
519 bitmap work = BITMAP_ALLOC (NULL);
521 /* We approximate the live range of a stack variable by taking the first
522 mention of its name as starting point(s), and by the end-of-scope
523 death clobber added by gimplify as ending point(s) of the range.
524 This overapproximates in the case we for instance moved an address-taken
525 operation upward, without also moving a dereference to it upwards.
526 But it's conservatively correct as a variable never can hold values
527 before its name is mentioned at least once.
529 We then do a mostly classical bitmap liveness algorithm. */
531 FOR_ALL_BB (bb)
532 bb->aux = BITMAP_ALLOC (NULL);
534 changed = true;
535 while (changed)
537 changed = false;
538 FOR_EACH_BB (bb)
540 bitmap active = (bitmap)bb->aux;
541 add_scope_conflicts_1 (bb, work, false);
542 if (bitmap_ior_into (active, work))
543 changed = true;
547 FOR_EACH_BB (bb)
548 add_scope_conflicts_1 (bb, work, true);
550 BITMAP_FREE (work);
551 FOR_ALL_BB (bb)
552 BITMAP_FREE (bb->aux);
555 /* A subroutine of partition_stack_vars. A comparison function for qsort,
556 sorting an array of indices by the properties of the object. */
558 static int
559 stack_var_cmp (const void *a, const void *b)
561 size_t ia = *(const size_t *)a;
562 size_t ib = *(const size_t *)b;
563 unsigned int aligna = stack_vars[ia].alignb;
564 unsigned int alignb = stack_vars[ib].alignb;
565 HOST_WIDE_INT sizea = stack_vars[ia].size;
566 HOST_WIDE_INT sizeb = stack_vars[ib].size;
567 tree decla = stack_vars[ia].decl;
568 tree declb = stack_vars[ib].decl;
569 bool largea, largeb;
570 unsigned int uida, uidb;
572 /* Primary compare on "large" alignment. Large comes first. */
573 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
574 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
575 if (largea != largeb)
576 return (int)largeb - (int)largea;
578 /* Secondary compare on size, decreasing */
579 if (sizea > sizeb)
580 return -1;
581 if (sizea < sizeb)
582 return 1;
584 /* Tertiary compare on true alignment, decreasing. */
585 if (aligna < alignb)
586 return -1;
587 if (aligna > alignb)
588 return 1;
590 /* Final compare on ID for sort stability, increasing.
591 Two SSA names are compared by their version, SSA names come before
592 non-SSA names, and two normal decls are compared by their DECL_UID. */
593 if (TREE_CODE (decla) == SSA_NAME)
595 if (TREE_CODE (declb) == SSA_NAME)
596 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
597 else
598 return -1;
600 else if (TREE_CODE (declb) == SSA_NAME)
601 return 1;
602 else
603 uida = DECL_UID (decla), uidb = DECL_UID (declb);
604 if (uida < uidb)
605 return 1;
606 if (uida > uidb)
607 return -1;
608 return 0;
612 /* If the points-to solution *PI points to variables that are in a partition
613 together with other variables add all partition members to the pointed-to
614 variables bitmap. */
616 static void
617 add_partitioned_vars_to_ptset (struct pt_solution *pt,
618 struct pointer_map_t *decls_to_partitions,
619 struct pointer_set_t *visited, bitmap temp)
621 bitmap_iterator bi;
622 unsigned i;
623 bitmap *part;
625 if (pt->anything
626 || pt->vars == NULL
627 /* The pointed-to vars bitmap is shared, it is enough to
628 visit it once. */
629 || pointer_set_insert(visited, pt->vars))
630 return;
632 bitmap_clear (temp);
634 /* By using a temporary bitmap to store all members of the partitions
635 we have to add we make sure to visit each of the partitions only
636 once. */
637 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
638 if ((!temp
639 || !bitmap_bit_p (temp, i))
640 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
641 (void *)(size_t) i)))
642 bitmap_ior_into (temp, *part);
643 if (!bitmap_empty_p (temp))
644 bitmap_ior_into (pt->vars, temp);
647 /* Update points-to sets based on partition info, so we can use them on RTL.
648 The bitmaps representing stack partitions will be saved until expand,
649 where partitioned decls used as bases in memory expressions will be
650 rewritten. */
652 static void
653 update_alias_info_with_stack_vars (void)
655 struct pointer_map_t *decls_to_partitions = NULL;
656 size_t i, j;
657 tree var = NULL_TREE;
659 for (i = 0; i < stack_vars_num; i++)
661 bitmap part = NULL;
662 tree name;
663 struct ptr_info_def *pi;
665 /* Not interested in partitions with single variable. */
666 if (stack_vars[i].representative != i
667 || stack_vars[i].next == EOC)
668 continue;
670 if (!decls_to_partitions)
672 decls_to_partitions = pointer_map_create ();
673 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
676 /* Create an SSA_NAME that points to the partition for use
677 as base during alias-oracle queries on RTL for bases that
678 have been partitioned. */
679 if (var == NULL_TREE)
680 var = create_tmp_var (ptr_type_node, NULL);
681 name = make_ssa_name (var, NULL);
683 /* Create bitmaps representing partitions. They will be used for
684 points-to sets later, so use GGC alloc. */
685 part = BITMAP_GGC_ALLOC ();
686 for (j = i; j != EOC; j = stack_vars[j].next)
688 tree decl = stack_vars[j].decl;
689 unsigned int uid = DECL_PT_UID (decl);
690 /* We should never end up partitioning SSA names (though they
691 may end up on the stack). Neither should we allocate stack
692 space to something that is unused and thus unreferenced, except
693 for -O0 where we are preserving even unreferenced variables. */
694 gcc_assert (DECL_P (decl)
695 && (!optimize
696 || referenced_var_lookup (cfun, DECL_UID (decl))));
697 bitmap_set_bit (part, uid);
698 *((bitmap *) pointer_map_insert (decls_to_partitions,
699 (void *)(size_t) uid)) = part;
700 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
701 decl)) = name;
704 /* Make the SSA name point to all partition members. */
705 pi = get_ptr_info (name);
706 pt_solution_set (&pi->pt, part, false);
709 /* Make all points-to sets that contain one member of a partition
710 contain all members of the partition. */
711 if (decls_to_partitions)
713 unsigned i;
714 struct pointer_set_t *visited = pointer_set_create ();
715 bitmap temp = BITMAP_ALLOC (NULL);
717 for (i = 1; i < num_ssa_names; i++)
719 tree name = ssa_name (i);
720 struct ptr_info_def *pi;
722 if (name
723 && POINTER_TYPE_P (TREE_TYPE (name))
724 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
725 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
726 visited, temp);
729 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
730 decls_to_partitions, visited, temp);
732 pointer_set_destroy (visited);
733 pointer_map_destroy (decls_to_partitions);
734 BITMAP_FREE (temp);
738 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
739 partitioning algorithm. Partitions A and B are known to be non-conflicting.
740 Merge them into a single partition A. */
742 static void
743 union_stack_vars (size_t a, size_t b)
745 struct stack_var *vb = &stack_vars[b];
746 bitmap_iterator bi;
747 unsigned u;
749 gcc_assert (stack_vars[b].next == EOC);
750 /* Add B to A's partition. */
751 stack_vars[b].next = stack_vars[a].next;
752 stack_vars[b].representative = a;
753 stack_vars[a].next = b;
755 /* Update the required alignment of partition A to account for B. */
756 if (stack_vars[a].alignb < stack_vars[b].alignb)
757 stack_vars[a].alignb = stack_vars[b].alignb;
759 /* Update the interference graph and merge the conflicts. */
760 if (vb->conflicts)
762 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
763 add_stack_var_conflict (a, stack_vars[u].representative);
764 BITMAP_FREE (vb->conflicts);
768 /* A subroutine of expand_used_vars. Binpack the variables into
769 partitions constrained by the interference graph. The overall
770 algorithm used is as follows:
772 Sort the objects by size in descending order.
773 For each object A {
774 S = size(A)
775 O = 0
776 loop {
777 Look for the largest non-conflicting object B with size <= S.
778 UNION (A, B)
783 static void
784 partition_stack_vars (void)
786 size_t si, sj, n = stack_vars_num;
788 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
789 for (si = 0; si < n; ++si)
790 stack_vars_sorted[si] = si;
792 if (n == 1)
793 return;
795 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
797 for (si = 0; si < n; ++si)
799 size_t i = stack_vars_sorted[si];
800 unsigned int ialign = stack_vars[i].alignb;
802 /* Ignore objects that aren't partition representatives. If we
803 see a var that is not a partition representative, it must
804 have been merged earlier. */
805 if (stack_vars[i].representative != i)
806 continue;
808 for (sj = si + 1; sj < n; ++sj)
810 size_t j = stack_vars_sorted[sj];
811 unsigned int jalign = stack_vars[j].alignb;
813 /* Ignore objects that aren't partition representatives. */
814 if (stack_vars[j].representative != j)
815 continue;
817 /* Ignore conflicting objects. */
818 if (stack_var_conflict_p (i, j))
819 continue;
821 /* Do not mix objects of "small" (supported) alignment
822 and "large" (unsupported) alignment. */
823 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
824 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
825 continue;
827 /* UNION the objects, placing J at OFFSET. */
828 union_stack_vars (i, j);
832 update_alias_info_with_stack_vars ();
835 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
837 static void
838 dump_stack_var_partition (void)
840 size_t si, i, j, n = stack_vars_num;
842 for (si = 0; si < n; ++si)
844 i = stack_vars_sorted[si];
846 /* Skip variables that aren't partition representatives, for now. */
847 if (stack_vars[i].representative != i)
848 continue;
850 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
851 " align %u\n", (unsigned long) i, stack_vars[i].size,
852 stack_vars[i].alignb);
854 for (j = i; j != EOC; j = stack_vars[j].next)
856 fputc ('\t', dump_file);
857 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
859 fputc ('\n', dump_file);
863 /* Assign rtl to DECL at BASE + OFFSET. */
865 static void
866 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
867 HOST_WIDE_INT offset)
869 unsigned align;
870 rtx x;
872 /* If this fails, we've overflowed the stack frame. Error nicely? */
873 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
875 x = plus_constant (base, offset);
876 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
878 if (TREE_CODE (decl) != SSA_NAME)
880 /* Set alignment we actually gave this decl if it isn't an SSA name.
881 If it is we generate stack slots only accidentally so it isn't as
882 important, we'll simply use the alignment that is already set. */
883 if (base == virtual_stack_vars_rtx)
884 offset -= frame_phase;
885 align = offset & -offset;
886 align *= BITS_PER_UNIT;
887 if (align == 0 || align > base_align)
888 align = base_align;
890 /* One would think that we could assert that we're not decreasing
891 alignment here, but (at least) the i386 port does exactly this
892 via the MINIMUM_ALIGNMENT hook. */
894 DECL_ALIGN (decl) = align;
895 DECL_USER_ALIGN (decl) = 0;
898 set_mem_attributes (x, SSAVAR (decl), true);
899 set_rtl (decl, x);
902 /* A subroutine of expand_used_vars. Give each partition representative
903 a unique location within the stack frame. Update each partition member
904 with that location. */
906 static void
907 expand_stack_vars (bool (*pred) (tree))
909 size_t si, i, j, n = stack_vars_num;
910 HOST_WIDE_INT large_size = 0, large_alloc = 0;
911 rtx large_base = NULL;
912 unsigned large_align = 0;
913 tree decl;
915 /* Determine if there are any variables requiring "large" alignment.
916 Since these are dynamically allocated, we only process these if
917 no predicate involved. */
918 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
919 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
921 /* Find the total size of these variables. */
922 for (si = 0; si < n; ++si)
924 unsigned alignb;
926 i = stack_vars_sorted[si];
927 alignb = stack_vars[i].alignb;
929 /* Stop when we get to the first decl with "small" alignment. */
930 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
931 break;
933 /* Skip variables that aren't partition representatives. */
934 if (stack_vars[i].representative != i)
935 continue;
937 /* Skip variables that have already had rtl assigned. See also
938 add_stack_var where we perpetrate this pc_rtx hack. */
939 decl = stack_vars[i].decl;
940 if ((TREE_CODE (decl) == SSA_NAME
941 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
942 : DECL_RTL (decl)) != pc_rtx)
943 continue;
945 large_size += alignb - 1;
946 large_size &= -(HOST_WIDE_INT)alignb;
947 large_size += stack_vars[i].size;
950 /* If there were any, allocate space. */
951 if (large_size > 0)
952 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
953 large_align, true);
956 for (si = 0; si < n; ++si)
958 rtx base;
959 unsigned base_align, alignb;
960 HOST_WIDE_INT offset;
962 i = stack_vars_sorted[si];
964 /* Skip variables that aren't partition representatives, for now. */
965 if (stack_vars[i].representative != i)
966 continue;
968 /* Skip variables that have already had rtl assigned. See also
969 add_stack_var where we perpetrate this pc_rtx hack. */
970 decl = stack_vars[i].decl;
971 if ((TREE_CODE (decl) == SSA_NAME
972 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
973 : DECL_RTL (decl)) != pc_rtx)
974 continue;
976 /* Check the predicate to see whether this variable should be
977 allocated in this pass. */
978 if (pred && !pred (decl))
979 continue;
981 alignb = stack_vars[i].alignb;
982 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
984 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
985 base = virtual_stack_vars_rtx;
986 base_align = crtl->max_used_stack_slot_alignment;
988 else
990 /* Large alignment is only processed in the last pass. */
991 if (pred)
992 continue;
993 gcc_assert (large_base != NULL);
995 large_alloc += alignb - 1;
996 large_alloc &= -(HOST_WIDE_INT)alignb;
997 offset = large_alloc;
998 large_alloc += stack_vars[i].size;
1000 base = large_base;
1001 base_align = large_align;
1004 /* Create rtl for each variable based on their location within the
1005 partition. */
1006 for (j = i; j != EOC; j = stack_vars[j].next)
1008 expand_one_stack_var_at (stack_vars[j].decl,
1009 base, base_align,
1010 offset);
1014 gcc_assert (large_alloc == large_size);
1017 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1018 static HOST_WIDE_INT
1019 account_stack_vars (void)
1021 size_t si, j, i, n = stack_vars_num;
1022 HOST_WIDE_INT size = 0;
1024 for (si = 0; si < n; ++si)
1026 i = stack_vars_sorted[si];
1028 /* Skip variables that aren't partition representatives, for now. */
1029 if (stack_vars[i].representative != i)
1030 continue;
1032 size += stack_vars[i].size;
1033 for (j = i; j != EOC; j = stack_vars[j].next)
1034 set_rtl (stack_vars[j].decl, NULL);
1036 return size;
1039 /* A subroutine of expand_one_var. Called to immediately assign rtl
1040 to a variable to be allocated in the stack frame. */
1042 static void
1043 expand_one_stack_var (tree var)
1045 HOST_WIDE_INT size, offset;
1046 unsigned byte_align;
1048 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1049 byte_align = align_local_variable (SSAVAR (var));
1051 /* We handle highly aligned variables in expand_stack_vars. */
1052 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1054 offset = alloc_stack_frame_space (size, byte_align);
1056 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1057 crtl->max_used_stack_slot_alignment, offset);
1060 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1061 that will reside in a hard register. */
1063 static void
1064 expand_one_hard_reg_var (tree var)
1066 rest_of_decl_compilation (var, 0, 0);
1069 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1070 that will reside in a pseudo register. */
1072 static void
1073 expand_one_register_var (tree var)
1075 tree decl = SSAVAR (var);
1076 tree type = TREE_TYPE (decl);
1077 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1078 rtx x = gen_reg_rtx (reg_mode);
1080 set_rtl (var, x);
1082 /* Note if the object is a user variable. */
1083 if (!DECL_ARTIFICIAL (decl))
1084 mark_user_reg (x);
1086 if (POINTER_TYPE_P (type))
1087 mark_reg_pointer (x, get_pointer_alignment (var));
1090 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1091 has some associated error, e.g. its type is error-mark. We just need
1092 to pick something that won't crash the rest of the compiler. */
1094 static void
1095 expand_one_error_var (tree var)
1097 enum machine_mode mode = DECL_MODE (var);
1098 rtx x;
1100 if (mode == BLKmode)
1101 x = gen_rtx_MEM (BLKmode, const0_rtx);
1102 else if (mode == VOIDmode)
1103 x = const0_rtx;
1104 else
1105 x = gen_reg_rtx (mode);
1107 SET_DECL_RTL (var, x);
1110 /* A subroutine of expand_one_var. VAR is a variable that will be
1111 allocated to the local stack frame. Return true if we wish to
1112 add VAR to STACK_VARS so that it will be coalesced with other
1113 variables. Return false to allocate VAR immediately.
1115 This function is used to reduce the number of variables considered
1116 for coalescing, which reduces the size of the quadratic problem. */
1118 static bool
1119 defer_stack_allocation (tree var, bool toplevel)
1121 /* If stack protection is enabled, *all* stack variables must be deferred,
1122 so that we can re-order the strings to the top of the frame. */
1123 if (flag_stack_protect)
1124 return true;
1126 /* We handle "large" alignment via dynamic allocation. We want to handle
1127 this extra complication in only one place, so defer them. */
1128 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1129 return true;
1131 /* Variables in the outermost scope automatically conflict with
1132 every other variable. The only reason to want to defer them
1133 at all is that, after sorting, we can more efficiently pack
1134 small variables in the stack frame. Continue to defer at -O2. */
1135 if (toplevel && optimize < 2)
1136 return false;
1138 /* Without optimization, *most* variables are allocated from the
1139 stack, which makes the quadratic problem large exactly when we
1140 want compilation to proceed as quickly as possible. On the
1141 other hand, we don't want the function's stack frame size to
1142 get completely out of hand. So we avoid adding scalars and
1143 "small" aggregates to the list at all. */
1144 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1145 return false;
1147 return true;
1150 /* A subroutine of expand_used_vars. Expand one variable according to
1151 its flavor. Variables to be placed on the stack are not actually
1152 expanded yet, merely recorded.
1153 When REALLY_EXPAND is false, only add stack values to be allocated.
1154 Return stack usage this variable is supposed to take.
1157 static HOST_WIDE_INT
1158 expand_one_var (tree var, bool toplevel, bool really_expand)
1160 unsigned int align = BITS_PER_UNIT;
1161 tree origvar = var;
1163 var = SSAVAR (var);
1165 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1167 /* Because we don't know if VAR will be in register or on stack,
1168 we conservatively assume it will be on stack even if VAR is
1169 eventually put into register after RA pass. For non-automatic
1170 variables, which won't be on stack, we collect alignment of
1171 type and ignore user specified alignment. */
1172 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1173 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1174 TYPE_MODE (TREE_TYPE (var)),
1175 TYPE_ALIGN (TREE_TYPE (var)));
1176 else if (DECL_HAS_VALUE_EXPR_P (var)
1177 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1178 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1179 or variables which were assigned a stack slot already by
1180 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1181 changed from the offset chosen to it. */
1182 align = crtl->stack_alignment_estimated;
1183 else
1184 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1186 /* If the variable alignment is very large we'll dynamicaly allocate
1187 it, which means that in-frame portion is just a pointer. */
1188 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1189 align = POINTER_SIZE;
1192 if (SUPPORTS_STACK_ALIGNMENT
1193 && crtl->stack_alignment_estimated < align)
1195 /* stack_alignment_estimated shouldn't change after stack
1196 realign decision made */
1197 gcc_assert(!crtl->stack_realign_processed);
1198 crtl->stack_alignment_estimated = align;
1201 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1202 So here we only make sure stack_alignment_needed >= align. */
1203 if (crtl->stack_alignment_needed < align)
1204 crtl->stack_alignment_needed = align;
1205 if (crtl->max_used_stack_slot_alignment < align)
1206 crtl->max_used_stack_slot_alignment = align;
1208 if (TREE_CODE (origvar) == SSA_NAME)
1210 gcc_assert (TREE_CODE (var) != VAR_DECL
1211 || (!DECL_EXTERNAL (var)
1212 && !DECL_HAS_VALUE_EXPR_P (var)
1213 && !TREE_STATIC (var)
1214 && TREE_TYPE (var) != error_mark_node
1215 && !DECL_HARD_REGISTER (var)
1216 && really_expand));
1218 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1220 else if (DECL_EXTERNAL (var))
1222 else if (DECL_HAS_VALUE_EXPR_P (var))
1224 else if (TREE_STATIC (var))
1226 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1228 else if (TREE_TYPE (var) == error_mark_node)
1230 if (really_expand)
1231 expand_one_error_var (var);
1233 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1235 if (really_expand)
1236 expand_one_hard_reg_var (var);
1238 else if (use_register_for_decl (var))
1240 if (really_expand)
1241 expand_one_register_var (origvar);
1243 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1245 if (really_expand)
1247 error ("size of variable %q+D is too large", var);
1248 expand_one_error_var (var);
1251 else if (defer_stack_allocation (var, toplevel))
1252 add_stack_var (origvar);
1253 else
1255 if (really_expand)
1256 expand_one_stack_var (origvar);
1257 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1259 return 0;
1262 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1263 expanding variables. Those variables that can be put into registers
1264 are allocated pseudos; those that can't are put on the stack.
1266 TOPLEVEL is true if this is the outermost BLOCK. */
1268 static void
1269 expand_used_vars_for_block (tree block, bool toplevel)
1271 tree t;
1273 /* Expand all variables at this level. */
1274 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1275 if (TREE_USED (t)
1276 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1277 || !DECL_NONSHAREABLE (t)))
1278 expand_one_var (t, toplevel, true);
1280 /* Expand all variables at containing levels. */
1281 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1282 expand_used_vars_for_block (t, false);
1285 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1286 and clear TREE_USED on all local variables. */
1288 static void
1289 clear_tree_used (tree block)
1291 tree t;
1293 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1294 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1295 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1296 || !DECL_NONSHAREABLE (t))
1297 TREE_USED (t) = 0;
1299 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1300 clear_tree_used (t);
1303 /* Examine TYPE and determine a bit mask of the following features. */
1305 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1306 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1307 #define SPCT_HAS_ARRAY 4
1308 #define SPCT_HAS_AGGREGATE 8
1310 static unsigned int
1311 stack_protect_classify_type (tree type)
1313 unsigned int ret = 0;
1314 tree t;
1316 switch (TREE_CODE (type))
1318 case ARRAY_TYPE:
1319 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1320 if (t == char_type_node
1321 || t == signed_char_type_node
1322 || t == unsigned_char_type_node)
1324 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1325 unsigned HOST_WIDE_INT len;
1327 if (!TYPE_SIZE_UNIT (type)
1328 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1329 len = max;
1330 else
1331 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1333 if (len < max)
1334 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1335 else
1336 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1338 else
1339 ret = SPCT_HAS_ARRAY;
1340 break;
1342 case UNION_TYPE:
1343 case QUAL_UNION_TYPE:
1344 case RECORD_TYPE:
1345 ret = SPCT_HAS_AGGREGATE;
1346 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1347 if (TREE_CODE (t) == FIELD_DECL)
1348 ret |= stack_protect_classify_type (TREE_TYPE (t));
1349 break;
1351 default:
1352 break;
1355 return ret;
1358 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1359 part of the local stack frame. Remember if we ever return nonzero for
1360 any variable in this function. The return value is the phase number in
1361 which the variable should be allocated. */
1363 static int
1364 stack_protect_decl_phase (tree decl)
1366 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1367 int ret = 0;
1369 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1370 has_short_buffer = true;
1372 if (flag_stack_protect == 2)
1374 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1375 && !(bits & SPCT_HAS_AGGREGATE))
1376 ret = 1;
1377 else if (bits & SPCT_HAS_ARRAY)
1378 ret = 2;
1380 else
1381 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1383 if (ret)
1384 has_protected_decls = true;
1386 return ret;
1389 /* Two helper routines that check for phase 1 and phase 2. These are used
1390 as callbacks for expand_stack_vars. */
1392 static bool
1393 stack_protect_decl_phase_1 (tree decl)
1395 return stack_protect_decl_phase (decl) == 1;
1398 static bool
1399 stack_protect_decl_phase_2 (tree decl)
1401 return stack_protect_decl_phase (decl) == 2;
1404 /* Ensure that variables in different stack protection phases conflict
1405 so that they are not merged and share the same stack slot. */
1407 static void
1408 add_stack_protection_conflicts (void)
1410 size_t i, j, n = stack_vars_num;
1411 unsigned char *phase;
1413 phase = XNEWVEC (unsigned char, n);
1414 for (i = 0; i < n; ++i)
1415 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1417 for (i = 0; i < n; ++i)
1419 unsigned char ph_i = phase[i];
1420 for (j = 0; j < i; ++j)
1421 if (ph_i != phase[j])
1422 add_stack_var_conflict (i, j);
1425 XDELETEVEC (phase);
1428 /* Create a decl for the guard at the top of the stack frame. */
1430 static void
1431 create_stack_guard (void)
1433 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1434 VAR_DECL, NULL, ptr_type_node);
1435 TREE_THIS_VOLATILE (guard) = 1;
1436 TREE_USED (guard) = 1;
1437 expand_one_stack_var (guard);
1438 crtl->stack_protect_guard = guard;
1441 /* Prepare for expanding variables. */
1442 static void
1443 init_vars_expansion (void)
1445 tree t;
1446 unsigned ix;
1447 /* Set TREE_USED on all variables in the local_decls. */
1448 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1449 TREE_USED (t) = 1;
1451 /* Clear TREE_USED on all variables associated with a block scope. */
1452 clear_tree_used (DECL_INITIAL (current_function_decl));
1454 /* Initialize local stack smashing state. */
1455 has_protected_decls = false;
1456 has_short_buffer = false;
1459 /* Free up stack variable graph data. */
1460 static void
1461 fini_vars_expansion (void)
1463 size_t i, n = stack_vars_num;
1464 for (i = 0; i < n; i++)
1465 BITMAP_FREE (stack_vars[i].conflicts);
1466 XDELETEVEC (stack_vars);
1467 XDELETEVEC (stack_vars_sorted);
1468 stack_vars = NULL;
1469 stack_vars_alloc = stack_vars_num = 0;
1470 pointer_map_destroy (decl_to_stack_part);
1471 decl_to_stack_part = NULL;
1474 /* Make a fair guess for the size of the stack frame of the function
1475 in NODE. This doesn't have to be exact, the result is only used in
1476 the inline heuristics. So we don't want to run the full stack var
1477 packing algorithm (which is quadratic in the number of stack vars).
1478 Instead, we calculate the total size of all stack vars. This turns
1479 out to be a pretty fair estimate -- packing of stack vars doesn't
1480 happen very often. */
1482 HOST_WIDE_INT
1483 estimated_stack_frame_size (struct cgraph_node *node)
1485 HOST_WIDE_INT size = 0;
1486 size_t i;
1487 tree var;
1488 tree old_cur_fun_decl = current_function_decl;
1489 referenced_var_iterator rvi;
1490 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1492 current_function_decl = node->decl;
1493 push_cfun (fn);
1495 gcc_checking_assert (gimple_referenced_vars (fn));
1496 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1497 size += expand_one_var (var, true, false);
1499 if (stack_vars_num > 0)
1501 /* Fake sorting the stack vars for account_stack_vars (). */
1502 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1503 for (i = 0; i < stack_vars_num; ++i)
1504 stack_vars_sorted[i] = i;
1505 size += account_stack_vars ();
1506 fini_vars_expansion ();
1508 pop_cfun ();
1509 current_function_decl = old_cur_fun_decl;
1510 return size;
1513 /* Expand all variables used in the function. */
1515 static void
1516 expand_used_vars (void)
1518 tree var, outer_block = DECL_INITIAL (current_function_decl);
1519 VEC(tree,heap) *maybe_local_decls = NULL;
1520 unsigned i;
1521 unsigned len;
1523 /* Compute the phase of the stack frame for this function. */
1525 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1526 int off = STARTING_FRAME_OFFSET % align;
1527 frame_phase = off ? align - off : 0;
1530 init_vars_expansion ();
1532 for (i = 0; i < SA.map->num_partitions; i++)
1534 tree var = partition_to_var (SA.map, i);
1536 gcc_assert (is_gimple_reg (var));
1537 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1538 expand_one_var (var, true, true);
1539 else
1541 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1542 contain the default def (representing the parm or result itself)
1543 we don't do anything here. But those which don't contain the
1544 default def (representing a temporary based on the parm/result)
1545 we need to allocate space just like for normal VAR_DECLs. */
1546 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1548 expand_one_var (var, true, true);
1549 gcc_assert (SA.partition_to_pseudo[i]);
1554 /* At this point all variables on the local_decls with TREE_USED
1555 set are not associated with any block scope. Lay them out. */
1557 len = VEC_length (tree, cfun->local_decls);
1558 FOR_EACH_LOCAL_DECL (cfun, i, var)
1560 bool expand_now = false;
1562 /* Expanded above already. */
1563 if (is_gimple_reg (var))
1565 TREE_USED (var) = 0;
1566 goto next;
1568 /* We didn't set a block for static or extern because it's hard
1569 to tell the difference between a global variable (re)declared
1570 in a local scope, and one that's really declared there to
1571 begin with. And it doesn't really matter much, since we're
1572 not giving them stack space. Expand them now. */
1573 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1574 expand_now = true;
1576 /* If the variable is not associated with any block, then it
1577 was created by the optimizers, and could be live anywhere
1578 in the function. */
1579 else if (TREE_USED (var))
1580 expand_now = true;
1582 /* Finally, mark all variables on the list as used. We'll use
1583 this in a moment when we expand those associated with scopes. */
1584 TREE_USED (var) = 1;
1586 if (expand_now)
1587 expand_one_var (var, true, true);
1589 next:
1590 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1592 rtx rtl = DECL_RTL_IF_SET (var);
1594 /* Keep artificial non-ignored vars in cfun->local_decls
1595 chain until instantiate_decls. */
1596 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1597 add_local_decl (cfun, var);
1598 else if (rtl == NULL_RTX)
1599 /* If rtl isn't set yet, which can happen e.g. with
1600 -fstack-protector, retry before returning from this
1601 function. */
1602 VEC_safe_push (tree, heap, maybe_local_decls, var);
1606 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1608 +-----------------+-----------------+
1609 | ...processed... | ...duplicates...|
1610 +-----------------+-----------------+
1612 +-- LEN points here.
1614 We just want the duplicates, as those are the artificial
1615 non-ignored vars that we want to keep until instantiate_decls.
1616 Move them down and truncate the array. */
1617 if (!VEC_empty (tree, cfun->local_decls))
1618 VEC_block_remove (tree, cfun->local_decls, 0, len);
1620 /* At this point, all variables within the block tree with TREE_USED
1621 set are actually used by the optimized function. Lay them out. */
1622 expand_used_vars_for_block (outer_block, true);
1624 if (stack_vars_num > 0)
1626 add_scope_conflicts ();
1627 /* Due to the way alias sets work, no variables with non-conflicting
1628 alias sets may be assigned the same address. Add conflicts to
1629 reflect this. */
1630 add_alias_set_conflicts ();
1632 /* If stack protection is enabled, we don't share space between
1633 vulnerable data and non-vulnerable data. */
1634 if (flag_stack_protect)
1635 add_stack_protection_conflicts ();
1637 /* Now that we have collected all stack variables, and have computed a
1638 minimal interference graph, attempt to save some stack space. */
1639 partition_stack_vars ();
1640 if (dump_file)
1641 dump_stack_var_partition ();
1644 /* There are several conditions under which we should create a
1645 stack guard: protect-all, alloca used, protected decls present. */
1646 if (flag_stack_protect == 2
1647 || (flag_stack_protect
1648 && (cfun->calls_alloca || has_protected_decls)))
1649 create_stack_guard ();
1651 /* Assign rtl to each variable based on these partitions. */
1652 if (stack_vars_num > 0)
1654 /* Reorder decls to be protected by iterating over the variables
1655 array multiple times, and allocating out of each phase in turn. */
1656 /* ??? We could probably integrate this into the qsort we did
1657 earlier, such that we naturally see these variables first,
1658 and thus naturally allocate things in the right order. */
1659 if (has_protected_decls)
1661 /* Phase 1 contains only character arrays. */
1662 expand_stack_vars (stack_protect_decl_phase_1);
1664 /* Phase 2 contains other kinds of arrays. */
1665 if (flag_stack_protect == 2)
1666 expand_stack_vars (stack_protect_decl_phase_2);
1669 expand_stack_vars (NULL);
1671 fini_vars_expansion ();
1674 /* If there were any artificial non-ignored vars without rtl
1675 found earlier, see if deferred stack allocation hasn't assigned
1676 rtl to them. */
1677 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1679 rtx rtl = DECL_RTL_IF_SET (var);
1681 /* Keep artificial non-ignored vars in cfun->local_decls
1682 chain until instantiate_decls. */
1683 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1684 add_local_decl (cfun, var);
1686 VEC_free (tree, heap, maybe_local_decls);
1688 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1689 if (STACK_ALIGNMENT_NEEDED)
1691 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1692 if (!FRAME_GROWS_DOWNWARD)
1693 frame_offset += align - 1;
1694 frame_offset &= -align;
1699 /* If we need to produce a detailed dump, print the tree representation
1700 for STMT to the dump file. SINCE is the last RTX after which the RTL
1701 generated for STMT should have been appended. */
1703 static void
1704 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1706 if (dump_file && (dump_flags & TDF_DETAILS))
1708 fprintf (dump_file, "\n;; ");
1709 print_gimple_stmt (dump_file, stmt, 0,
1710 TDF_SLIM | (dump_flags & TDF_LINENO));
1711 fprintf (dump_file, "\n");
1713 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1717 /* Maps the blocks that do not contain tree labels to rtx labels. */
1719 static struct pointer_map_t *lab_rtx_for_bb;
1721 /* Returns the label_rtx expression for a label starting basic block BB. */
1723 static rtx
1724 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1726 gimple_stmt_iterator gsi;
1727 tree lab;
1728 gimple lab_stmt;
1729 void **elt;
1731 if (bb->flags & BB_RTL)
1732 return block_label (bb);
1734 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1735 if (elt)
1736 return (rtx) *elt;
1738 /* Find the tree label if it is present. */
1740 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1742 lab_stmt = gsi_stmt (gsi);
1743 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1744 break;
1746 lab = gimple_label_label (lab_stmt);
1747 if (DECL_NONLOCAL (lab))
1748 break;
1750 return label_rtx (lab);
1753 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1754 *elt = gen_label_rtx ();
1755 return (rtx) *elt;
1759 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1760 of a basic block where we just expanded the conditional at the end,
1761 possibly clean up the CFG and instruction sequence. LAST is the
1762 last instruction before the just emitted jump sequence. */
1764 static void
1765 maybe_cleanup_end_of_block (edge e, rtx last)
1767 /* Special case: when jumpif decides that the condition is
1768 trivial it emits an unconditional jump (and the necessary
1769 barrier). But we still have two edges, the fallthru one is
1770 wrong. purge_dead_edges would clean this up later. Unfortunately
1771 we have to insert insns (and split edges) before
1772 find_many_sub_basic_blocks and hence before purge_dead_edges.
1773 But splitting edges might create new blocks which depend on the
1774 fact that if there are two edges there's no barrier. So the
1775 barrier would get lost and verify_flow_info would ICE. Instead
1776 of auditing all edge splitters to care for the barrier (which
1777 normally isn't there in a cleaned CFG), fix it here. */
1778 if (BARRIER_P (get_last_insn ()))
1780 rtx insn;
1781 remove_edge (e);
1782 /* Now, we have a single successor block, if we have insns to
1783 insert on the remaining edge we potentially will insert
1784 it at the end of this block (if the dest block isn't feasible)
1785 in order to avoid splitting the edge. This insertion will take
1786 place in front of the last jump. But we might have emitted
1787 multiple jumps (conditional and one unconditional) to the
1788 same destination. Inserting in front of the last one then
1789 is a problem. See PR 40021. We fix this by deleting all
1790 jumps except the last unconditional one. */
1791 insn = PREV_INSN (get_last_insn ());
1792 /* Make sure we have an unconditional jump. Otherwise we're
1793 confused. */
1794 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1795 for (insn = PREV_INSN (insn); insn != last;)
1797 insn = PREV_INSN (insn);
1798 if (JUMP_P (NEXT_INSN (insn)))
1800 if (!any_condjump_p (NEXT_INSN (insn)))
1802 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1803 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1805 delete_insn (NEXT_INSN (insn));
1811 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1812 Returns a new basic block if we've terminated the current basic
1813 block and created a new one. */
1815 static basic_block
1816 expand_gimple_cond (basic_block bb, gimple stmt)
1818 basic_block new_bb, dest;
1819 edge new_edge;
1820 edge true_edge;
1821 edge false_edge;
1822 rtx last2, last;
1823 enum tree_code code;
1824 tree op0, op1;
1826 code = gimple_cond_code (stmt);
1827 op0 = gimple_cond_lhs (stmt);
1828 op1 = gimple_cond_rhs (stmt);
1829 /* We're sometimes presented with such code:
1830 D.123_1 = x < y;
1831 if (D.123_1 != 0)
1833 This would expand to two comparisons which then later might
1834 be cleaned up by combine. But some pattern matchers like if-conversion
1835 work better when there's only one compare, so make up for this
1836 here as special exception if TER would have made the same change. */
1837 if (gimple_cond_single_var_p (stmt)
1838 && SA.values
1839 && TREE_CODE (op0) == SSA_NAME
1840 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1842 gimple second = SSA_NAME_DEF_STMT (op0);
1843 if (gimple_code (second) == GIMPLE_ASSIGN)
1845 enum tree_code code2 = gimple_assign_rhs_code (second);
1846 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1848 code = code2;
1849 op0 = gimple_assign_rhs1 (second);
1850 op1 = gimple_assign_rhs2 (second);
1852 /* If jumps are cheap turn some more codes into
1853 jumpy sequences. */
1854 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1856 if ((code2 == BIT_AND_EXPR
1857 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1858 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1859 || code2 == TRUTH_AND_EXPR)
1861 code = TRUTH_ANDIF_EXPR;
1862 op0 = gimple_assign_rhs1 (second);
1863 op1 = gimple_assign_rhs2 (second);
1865 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1867 code = TRUTH_ORIF_EXPR;
1868 op0 = gimple_assign_rhs1 (second);
1869 op1 = gimple_assign_rhs2 (second);
1875 last2 = last = get_last_insn ();
1877 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1878 set_curr_insn_source_location (gimple_location (stmt));
1879 set_curr_insn_block (gimple_block (stmt));
1881 /* These flags have no purpose in RTL land. */
1882 true_edge->flags &= ~EDGE_TRUE_VALUE;
1883 false_edge->flags &= ~EDGE_FALSE_VALUE;
1885 /* We can either have a pure conditional jump with one fallthru edge or
1886 two-way jump that needs to be decomposed into two basic blocks. */
1887 if (false_edge->dest == bb->next_bb)
1889 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1890 true_edge->probability);
1891 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1892 if (true_edge->goto_locus)
1894 set_curr_insn_source_location (true_edge->goto_locus);
1895 set_curr_insn_block (true_edge->goto_block);
1896 true_edge->goto_locus = curr_insn_locator ();
1898 true_edge->goto_block = NULL;
1899 false_edge->flags |= EDGE_FALLTHRU;
1900 maybe_cleanup_end_of_block (false_edge, last);
1901 return NULL;
1903 if (true_edge->dest == bb->next_bb)
1905 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1906 false_edge->probability);
1907 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1908 if (false_edge->goto_locus)
1910 set_curr_insn_source_location (false_edge->goto_locus);
1911 set_curr_insn_block (false_edge->goto_block);
1912 false_edge->goto_locus = curr_insn_locator ();
1914 false_edge->goto_block = NULL;
1915 true_edge->flags |= EDGE_FALLTHRU;
1916 maybe_cleanup_end_of_block (true_edge, last);
1917 return NULL;
1920 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1921 true_edge->probability);
1922 last = get_last_insn ();
1923 if (false_edge->goto_locus)
1925 set_curr_insn_source_location (false_edge->goto_locus);
1926 set_curr_insn_block (false_edge->goto_block);
1927 false_edge->goto_locus = curr_insn_locator ();
1929 false_edge->goto_block = NULL;
1930 emit_jump (label_rtx_for_bb (false_edge->dest));
1932 BB_END (bb) = last;
1933 if (BARRIER_P (BB_END (bb)))
1934 BB_END (bb) = PREV_INSN (BB_END (bb));
1935 update_bb_for_insn (bb);
1937 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1938 dest = false_edge->dest;
1939 redirect_edge_succ (false_edge, new_bb);
1940 false_edge->flags |= EDGE_FALLTHRU;
1941 new_bb->count = false_edge->count;
1942 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1943 new_edge = make_edge (new_bb, dest, 0);
1944 new_edge->probability = REG_BR_PROB_BASE;
1945 new_edge->count = new_bb->count;
1946 if (BARRIER_P (BB_END (new_bb)))
1947 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1948 update_bb_for_insn (new_bb);
1950 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1952 if (true_edge->goto_locus)
1954 set_curr_insn_source_location (true_edge->goto_locus);
1955 set_curr_insn_block (true_edge->goto_block);
1956 true_edge->goto_locus = curr_insn_locator ();
1958 true_edge->goto_block = NULL;
1960 return new_bb;
1963 /* Mark all calls that can have a transaction restart. */
1965 static void
1966 mark_transaction_restart_calls (gimple stmt)
1968 struct tm_restart_node dummy;
1969 void **slot;
1971 if (!cfun->gimple_df->tm_restart)
1972 return;
1974 dummy.stmt = stmt;
1975 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1976 if (slot)
1978 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1979 tree list = n->label_or_list;
1980 rtx insn;
1982 for (insn = next_real_insn (get_last_insn ());
1983 !CALL_P (insn);
1984 insn = next_real_insn (insn))
1985 continue;
1987 if (TREE_CODE (list) == LABEL_DECL)
1988 add_reg_note (insn, REG_TM, label_rtx (list));
1989 else
1990 for (; list ; list = TREE_CHAIN (list))
1991 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1995 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1996 statement STMT. */
1998 static void
1999 expand_call_stmt (gimple stmt)
2001 tree exp, decl, lhs;
2002 bool builtin_p;
2003 size_t i;
2005 if (gimple_call_internal_p (stmt))
2007 expand_internal_call (stmt);
2008 return;
2011 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2013 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2014 decl = gimple_call_fndecl (stmt);
2015 builtin_p = decl && DECL_BUILT_IN (decl);
2017 /* If this is not a builtin function, the function type through which the
2018 call is made may be different from the type of the function. */
2019 if (!builtin_p)
2020 CALL_EXPR_FN (exp)
2021 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2022 CALL_EXPR_FN (exp));
2024 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2025 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2027 for (i = 0; i < gimple_call_num_args (stmt); i++)
2029 tree arg = gimple_call_arg (stmt, i);
2030 gimple def;
2031 /* TER addresses into arguments of builtin functions so we have a
2032 chance to infer more correct alignment information. See PR39954. */
2033 if (builtin_p
2034 && TREE_CODE (arg) == SSA_NAME
2035 && (def = get_gimple_for_ssa_name (arg))
2036 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2037 arg = gimple_assign_rhs1 (def);
2038 CALL_EXPR_ARG (exp, i) = arg;
2041 if (gimple_has_side_effects (stmt))
2042 TREE_SIDE_EFFECTS (exp) = 1;
2044 if (gimple_call_nothrow_p (stmt))
2045 TREE_NOTHROW (exp) = 1;
2047 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2048 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2049 if (decl
2050 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2051 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2052 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2053 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2054 else
2055 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2056 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2057 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2058 TREE_BLOCK (exp) = gimple_block (stmt);
2060 /* Ensure RTL is created for debug args. */
2061 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2063 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2064 unsigned int ix;
2065 tree dtemp;
2067 if (debug_args)
2068 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2070 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2071 expand_debug_expr (dtemp);
2075 lhs = gimple_call_lhs (stmt);
2076 if (lhs)
2077 expand_assignment (lhs, exp, false);
2078 else
2079 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2081 mark_transaction_restart_calls (stmt);
2084 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2085 STMT that doesn't require special handling for outgoing edges. That
2086 is no tailcalls and no GIMPLE_COND. */
2088 static void
2089 expand_gimple_stmt_1 (gimple stmt)
2091 tree op0;
2093 set_curr_insn_source_location (gimple_location (stmt));
2094 set_curr_insn_block (gimple_block (stmt));
2096 switch (gimple_code (stmt))
2098 case GIMPLE_GOTO:
2099 op0 = gimple_goto_dest (stmt);
2100 if (TREE_CODE (op0) == LABEL_DECL)
2101 expand_goto (op0);
2102 else
2103 expand_computed_goto (op0);
2104 break;
2105 case GIMPLE_LABEL:
2106 expand_label (gimple_label_label (stmt));
2107 break;
2108 case GIMPLE_NOP:
2109 case GIMPLE_PREDICT:
2110 break;
2111 case GIMPLE_SWITCH:
2112 expand_case (stmt);
2113 break;
2114 case GIMPLE_ASM:
2115 expand_asm_stmt (stmt);
2116 break;
2117 case GIMPLE_CALL:
2118 expand_call_stmt (stmt);
2119 break;
2121 case GIMPLE_RETURN:
2122 op0 = gimple_return_retval (stmt);
2124 if (op0 && op0 != error_mark_node)
2126 tree result = DECL_RESULT (current_function_decl);
2128 /* If we are not returning the current function's RESULT_DECL,
2129 build an assignment to it. */
2130 if (op0 != result)
2132 /* I believe that a function's RESULT_DECL is unique. */
2133 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2135 /* ??? We'd like to use simply expand_assignment here,
2136 but this fails if the value is of BLKmode but the return
2137 decl is a register. expand_return has special handling
2138 for this combination, which eventually should move
2139 to common code. See comments there. Until then, let's
2140 build a modify expression :-/ */
2141 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2142 result, op0);
2145 if (!op0)
2146 expand_null_return ();
2147 else
2148 expand_return (op0);
2149 break;
2151 case GIMPLE_ASSIGN:
2153 tree lhs = gimple_assign_lhs (stmt);
2155 /* Tree expand used to fiddle with |= and &= of two bitfield
2156 COMPONENT_REFs here. This can't happen with gimple, the LHS
2157 of binary assigns must be a gimple reg. */
2159 if (TREE_CODE (lhs) != SSA_NAME
2160 || get_gimple_rhs_class (gimple_expr_code (stmt))
2161 == GIMPLE_SINGLE_RHS)
2163 tree rhs = gimple_assign_rhs1 (stmt);
2164 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2165 == GIMPLE_SINGLE_RHS);
2166 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2167 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2168 if (TREE_CLOBBER_P (rhs))
2169 /* This is a clobber to mark the going out of scope for
2170 this LHS. */
2172 else
2173 expand_assignment (lhs, rhs,
2174 gimple_assign_nontemporal_move_p (stmt));
2176 else
2178 rtx target, temp;
2179 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2180 struct separate_ops ops;
2181 bool promoted = false;
2183 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2184 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2185 promoted = true;
2187 ops.code = gimple_assign_rhs_code (stmt);
2188 ops.type = TREE_TYPE (lhs);
2189 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2191 case GIMPLE_TERNARY_RHS:
2192 ops.op2 = gimple_assign_rhs3 (stmt);
2193 /* Fallthru */
2194 case GIMPLE_BINARY_RHS:
2195 ops.op1 = gimple_assign_rhs2 (stmt);
2196 /* Fallthru */
2197 case GIMPLE_UNARY_RHS:
2198 ops.op0 = gimple_assign_rhs1 (stmt);
2199 break;
2200 default:
2201 gcc_unreachable ();
2203 ops.location = gimple_location (stmt);
2205 /* If we want to use a nontemporal store, force the value to
2206 register first. If we store into a promoted register,
2207 don't directly expand to target. */
2208 temp = nontemporal || promoted ? NULL_RTX : target;
2209 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2210 EXPAND_NORMAL);
2212 if (temp == target)
2214 else if (promoted)
2216 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2217 /* If TEMP is a VOIDmode constant, use convert_modes to make
2218 sure that we properly convert it. */
2219 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2221 temp = convert_modes (GET_MODE (target),
2222 TYPE_MODE (ops.type),
2223 temp, unsignedp);
2224 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2225 GET_MODE (target), temp, unsignedp);
2228 convert_move (SUBREG_REG (target), temp, unsignedp);
2230 else if (nontemporal && emit_storent_insn (target, temp))
2232 else
2234 temp = force_operand (temp, target);
2235 if (temp != target)
2236 emit_move_insn (target, temp);
2240 break;
2242 default:
2243 gcc_unreachable ();
2247 /* Expand one gimple statement STMT and return the last RTL instruction
2248 before any of the newly generated ones.
2250 In addition to generating the necessary RTL instructions this also
2251 sets REG_EH_REGION notes if necessary and sets the current source
2252 location for diagnostics. */
2254 static rtx
2255 expand_gimple_stmt (gimple stmt)
2257 location_t saved_location = input_location;
2258 rtx last = get_last_insn ();
2259 int lp_nr;
2261 gcc_assert (cfun);
2263 /* We need to save and restore the current source location so that errors
2264 discovered during expansion are emitted with the right location. But
2265 it would be better if the diagnostic routines used the source location
2266 embedded in the tree nodes rather than globals. */
2267 if (gimple_has_location (stmt))
2268 input_location = gimple_location (stmt);
2270 expand_gimple_stmt_1 (stmt);
2272 /* Free any temporaries used to evaluate this statement. */
2273 free_temp_slots ();
2275 input_location = saved_location;
2277 /* Mark all insns that may trap. */
2278 lp_nr = lookup_stmt_eh_lp (stmt);
2279 if (lp_nr)
2281 rtx insn;
2282 for (insn = next_real_insn (last); insn;
2283 insn = next_real_insn (insn))
2285 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2286 /* If we want exceptions for non-call insns, any
2287 may_trap_p instruction may throw. */
2288 && GET_CODE (PATTERN (insn)) != CLOBBER
2289 && GET_CODE (PATTERN (insn)) != USE
2290 && insn_could_throw_p (insn))
2291 make_reg_eh_region_note (insn, 0, lp_nr);
2295 return last;
2298 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2299 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2300 generated a tail call (something that might be denied by the ABI
2301 rules governing the call; see calls.c).
2303 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2304 can still reach the rest of BB. The case here is __builtin_sqrt,
2305 where the NaN result goes through the external function (with a
2306 tailcall) and the normal result happens via a sqrt instruction. */
2308 static basic_block
2309 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2311 rtx last2, last;
2312 edge e;
2313 edge_iterator ei;
2314 int probability;
2315 gcov_type count;
2317 last2 = last = expand_gimple_stmt (stmt);
2319 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2320 if (CALL_P (last) && SIBLING_CALL_P (last))
2321 goto found;
2323 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2325 *can_fallthru = true;
2326 return NULL;
2328 found:
2329 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2330 Any instructions emitted here are about to be deleted. */
2331 do_pending_stack_adjust ();
2333 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2334 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2335 EH or abnormal edges, we shouldn't have created a tail call in
2336 the first place. So it seems to me we should just be removing
2337 all edges here, or redirecting the existing fallthru edge to
2338 the exit block. */
2340 probability = 0;
2341 count = 0;
2343 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2345 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2347 if (e->dest != EXIT_BLOCK_PTR)
2349 e->dest->count -= e->count;
2350 e->dest->frequency -= EDGE_FREQUENCY (e);
2351 if (e->dest->count < 0)
2352 e->dest->count = 0;
2353 if (e->dest->frequency < 0)
2354 e->dest->frequency = 0;
2356 count += e->count;
2357 probability += e->probability;
2358 remove_edge (e);
2360 else
2361 ei_next (&ei);
2364 /* This is somewhat ugly: the call_expr expander often emits instructions
2365 after the sibcall (to perform the function return). These confuse the
2366 find_many_sub_basic_blocks code, so we need to get rid of these. */
2367 last = NEXT_INSN (last);
2368 gcc_assert (BARRIER_P (last));
2370 *can_fallthru = false;
2371 while (NEXT_INSN (last))
2373 /* For instance an sqrt builtin expander expands if with
2374 sibcall in the then and label for `else`. */
2375 if (LABEL_P (NEXT_INSN (last)))
2377 *can_fallthru = true;
2378 break;
2380 delete_insn (NEXT_INSN (last));
2383 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2384 e->probability += probability;
2385 e->count += count;
2386 BB_END (bb) = last;
2387 update_bb_for_insn (bb);
2389 if (NEXT_INSN (last))
2391 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2393 last = BB_END (bb);
2394 if (BARRIER_P (last))
2395 BB_END (bb) = PREV_INSN (last);
2398 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2400 return bb;
2403 /* Return the difference between the floor and the truncated result of
2404 a signed division by OP1 with remainder MOD. */
2405 static rtx
2406 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2408 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2409 return gen_rtx_IF_THEN_ELSE
2410 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2411 gen_rtx_IF_THEN_ELSE
2412 (mode, gen_rtx_LT (BImode,
2413 gen_rtx_DIV (mode, op1, mod),
2414 const0_rtx),
2415 constm1_rtx, const0_rtx),
2416 const0_rtx);
2419 /* Return the difference between the ceil and the truncated result of
2420 a signed division by OP1 with remainder MOD. */
2421 static rtx
2422 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2424 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2425 return gen_rtx_IF_THEN_ELSE
2426 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2427 gen_rtx_IF_THEN_ELSE
2428 (mode, gen_rtx_GT (BImode,
2429 gen_rtx_DIV (mode, op1, mod),
2430 const0_rtx),
2431 const1_rtx, const0_rtx),
2432 const0_rtx);
2435 /* Return the difference between the ceil and the truncated result of
2436 an unsigned division by OP1 with remainder MOD. */
2437 static rtx
2438 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2440 /* (mod != 0 ? 1 : 0) */
2441 return gen_rtx_IF_THEN_ELSE
2442 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2443 const1_rtx, const0_rtx);
2446 /* Return the difference between the rounded and the truncated result
2447 of a signed division by OP1 with remainder MOD. Halfway cases are
2448 rounded away from zero, rather than to the nearest even number. */
2449 static rtx
2450 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2452 /* (abs (mod) >= abs (op1) - abs (mod)
2453 ? (op1 / mod > 0 ? 1 : -1)
2454 : 0) */
2455 return gen_rtx_IF_THEN_ELSE
2456 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2457 gen_rtx_MINUS (mode,
2458 gen_rtx_ABS (mode, op1),
2459 gen_rtx_ABS (mode, mod))),
2460 gen_rtx_IF_THEN_ELSE
2461 (mode, gen_rtx_GT (BImode,
2462 gen_rtx_DIV (mode, op1, mod),
2463 const0_rtx),
2464 const1_rtx, constm1_rtx),
2465 const0_rtx);
2468 /* Return the difference between the rounded and the truncated result
2469 of a unsigned division by OP1 with remainder MOD. Halfway cases
2470 are rounded away from zero, rather than to the nearest even
2471 number. */
2472 static rtx
2473 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2475 /* (mod >= op1 - mod ? 1 : 0) */
2476 return gen_rtx_IF_THEN_ELSE
2477 (mode, gen_rtx_GE (BImode, mod,
2478 gen_rtx_MINUS (mode, op1, mod)),
2479 const1_rtx, const0_rtx);
2482 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2483 any rtl. */
2485 static rtx
2486 convert_debug_memory_address (enum machine_mode mode, rtx x,
2487 addr_space_t as)
2489 enum machine_mode xmode = GET_MODE (x);
2491 #ifndef POINTERS_EXTEND_UNSIGNED
2492 gcc_assert (mode == Pmode
2493 || mode == targetm.addr_space.address_mode (as));
2494 gcc_assert (xmode == mode || xmode == VOIDmode);
2495 #else
2496 rtx temp;
2498 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2500 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2501 return x;
2503 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2504 x = simplify_gen_subreg (mode, x, xmode,
2505 subreg_lowpart_offset
2506 (mode, xmode));
2507 else if (POINTERS_EXTEND_UNSIGNED > 0)
2508 x = gen_rtx_ZERO_EXTEND (mode, x);
2509 else if (!POINTERS_EXTEND_UNSIGNED)
2510 x = gen_rtx_SIGN_EXTEND (mode, x);
2511 else
2513 switch (GET_CODE (x))
2515 case SUBREG:
2516 if ((SUBREG_PROMOTED_VAR_P (x)
2517 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2518 || (GET_CODE (SUBREG_REG (x)) == PLUS
2519 && REG_P (XEXP (SUBREG_REG (x), 0))
2520 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2521 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2522 && GET_MODE (SUBREG_REG (x)) == mode)
2523 return SUBREG_REG (x);
2524 break;
2525 case LABEL_REF:
2526 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2527 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2528 return temp;
2529 case SYMBOL_REF:
2530 temp = shallow_copy_rtx (x);
2531 PUT_MODE (temp, mode);
2532 return temp;
2533 case CONST:
2534 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2535 if (temp)
2536 temp = gen_rtx_CONST (mode, temp);
2537 return temp;
2538 case PLUS:
2539 case MINUS:
2540 if (CONST_INT_P (XEXP (x, 1)))
2542 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2543 if (temp)
2544 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2546 break;
2547 default:
2548 break;
2550 /* Don't know how to express ptr_extend as operation in debug info. */
2551 return NULL;
2553 #endif /* POINTERS_EXTEND_UNSIGNED */
2555 return x;
2558 /* Return an RTX equivalent to the value of the parameter DECL. */
2560 static rtx
2561 expand_debug_parm_decl (tree decl)
2563 rtx incoming = DECL_INCOMING_RTL (decl);
2565 if (incoming
2566 && GET_MODE (incoming) != BLKmode
2567 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2568 || (MEM_P (incoming)
2569 && REG_P (XEXP (incoming, 0))
2570 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2572 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2574 #ifdef HAVE_window_save
2575 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2576 If the target machine has an explicit window save instruction, the
2577 actual entry value is the corresponding OUTGOING_REGNO instead. */
2578 if (REG_P (incoming)
2579 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2580 incoming
2581 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2582 OUTGOING_REGNO (REGNO (incoming)), 0);
2583 else if (MEM_P (incoming))
2585 rtx reg = XEXP (incoming, 0);
2586 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2588 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2589 incoming = replace_equiv_address_nv (incoming, reg);
2592 #endif
2594 ENTRY_VALUE_EXP (rtl) = incoming;
2595 return rtl;
2598 if (incoming
2599 && GET_MODE (incoming) != BLKmode
2600 && !TREE_ADDRESSABLE (decl)
2601 && MEM_P (incoming)
2602 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2603 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2604 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2605 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2606 return incoming;
2608 return NULL_RTX;
2611 /* Return an RTX equivalent to the value of the tree expression EXP. */
2613 static rtx
2614 expand_debug_expr (tree exp)
2616 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2617 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2618 enum machine_mode inner_mode = VOIDmode;
2619 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2620 addr_space_t as;
2622 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2624 case tcc_expression:
2625 switch (TREE_CODE (exp))
2627 case COND_EXPR:
2628 case DOT_PROD_EXPR:
2629 case WIDEN_MULT_PLUS_EXPR:
2630 case WIDEN_MULT_MINUS_EXPR:
2631 case FMA_EXPR:
2632 goto ternary;
2634 case TRUTH_ANDIF_EXPR:
2635 case TRUTH_ORIF_EXPR:
2636 case TRUTH_AND_EXPR:
2637 case TRUTH_OR_EXPR:
2638 case TRUTH_XOR_EXPR:
2639 goto binary;
2641 case TRUTH_NOT_EXPR:
2642 goto unary;
2644 default:
2645 break;
2647 break;
2649 ternary:
2650 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2651 if (!op2)
2652 return NULL_RTX;
2653 /* Fall through. */
2655 binary:
2656 case tcc_binary:
2657 case tcc_comparison:
2658 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2659 if (!op1)
2660 return NULL_RTX;
2661 /* Fall through. */
2663 unary:
2664 case tcc_unary:
2665 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2666 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2667 if (!op0)
2668 return NULL_RTX;
2669 break;
2671 case tcc_type:
2672 case tcc_statement:
2673 gcc_unreachable ();
2675 case tcc_constant:
2676 case tcc_exceptional:
2677 case tcc_declaration:
2678 case tcc_reference:
2679 case tcc_vl_exp:
2680 break;
2683 switch (TREE_CODE (exp))
2685 case STRING_CST:
2686 if (!lookup_constant_def (exp))
2688 if (strlen (TREE_STRING_POINTER (exp)) + 1
2689 != (size_t) TREE_STRING_LENGTH (exp))
2690 return NULL_RTX;
2691 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2692 op0 = gen_rtx_MEM (BLKmode, op0);
2693 set_mem_attributes (op0, exp, 0);
2694 return op0;
2696 /* Fall through... */
2698 case INTEGER_CST:
2699 case REAL_CST:
2700 case FIXED_CST:
2701 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2702 return op0;
2704 case COMPLEX_CST:
2705 gcc_assert (COMPLEX_MODE_P (mode));
2706 op0 = expand_debug_expr (TREE_REALPART (exp));
2707 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2708 return gen_rtx_CONCAT (mode, op0, op1);
2710 case DEBUG_EXPR_DECL:
2711 op0 = DECL_RTL_IF_SET (exp);
2713 if (op0)
2714 return op0;
2716 op0 = gen_rtx_DEBUG_EXPR (mode);
2717 DEBUG_EXPR_TREE_DECL (op0) = exp;
2718 SET_DECL_RTL (exp, op0);
2720 return op0;
2722 case VAR_DECL:
2723 case PARM_DECL:
2724 case FUNCTION_DECL:
2725 case LABEL_DECL:
2726 case CONST_DECL:
2727 case RESULT_DECL:
2728 op0 = DECL_RTL_IF_SET (exp);
2730 /* This decl was probably optimized away. */
2731 if (!op0)
2733 if (TREE_CODE (exp) != VAR_DECL
2734 || DECL_EXTERNAL (exp)
2735 || !TREE_STATIC (exp)
2736 || !DECL_NAME (exp)
2737 || DECL_HARD_REGISTER (exp)
2738 || DECL_IN_CONSTANT_POOL (exp)
2739 || mode == VOIDmode)
2740 return NULL;
2742 op0 = make_decl_rtl_for_debug (exp);
2743 if (!MEM_P (op0)
2744 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2745 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2746 return NULL;
2748 else
2749 op0 = copy_rtx (op0);
2751 if (GET_MODE (op0) == BLKmode
2752 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2753 below would ICE. While it is likely a FE bug,
2754 try to be robust here. See PR43166. */
2755 || mode == BLKmode
2756 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2758 gcc_assert (MEM_P (op0));
2759 op0 = adjust_address_nv (op0, mode, 0);
2760 return op0;
2763 /* Fall through. */
2765 adjust_mode:
2766 case PAREN_EXPR:
2767 case NOP_EXPR:
2768 case CONVERT_EXPR:
2770 inner_mode = GET_MODE (op0);
2772 if (mode == inner_mode)
2773 return op0;
2775 if (inner_mode == VOIDmode)
2777 if (TREE_CODE (exp) == SSA_NAME)
2778 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2779 else
2780 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2781 if (mode == inner_mode)
2782 return op0;
2785 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2787 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2788 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2789 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2790 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2791 else
2792 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2794 else if (FLOAT_MODE_P (mode))
2796 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2797 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2798 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2799 else
2800 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2802 else if (FLOAT_MODE_P (inner_mode))
2804 if (unsignedp)
2805 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2806 else
2807 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2809 else if (CONSTANT_P (op0)
2810 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2811 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2812 subreg_lowpart_offset (mode,
2813 inner_mode));
2814 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2815 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2816 : unsignedp)
2817 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2818 else
2819 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2821 return op0;
2824 case MEM_REF:
2825 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2827 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2828 TREE_OPERAND (exp, 0),
2829 TREE_OPERAND (exp, 1));
2830 if (newexp)
2831 return expand_debug_expr (newexp);
2833 /* FALLTHROUGH */
2834 case INDIRECT_REF:
2835 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2836 if (!op0)
2837 return NULL;
2839 if (TREE_CODE (exp) == MEM_REF)
2841 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2842 || (GET_CODE (op0) == PLUS
2843 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2844 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2845 Instead just use get_inner_reference. */
2846 goto component_ref;
2848 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2849 if (!op1 || !CONST_INT_P (op1))
2850 return NULL;
2852 op0 = plus_constant (op0, INTVAL (op1));
2855 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2856 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2857 else
2858 as = ADDR_SPACE_GENERIC;
2860 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2861 op0, as);
2862 if (op0 == NULL_RTX)
2863 return NULL;
2865 op0 = gen_rtx_MEM (mode, op0);
2866 set_mem_attributes (op0, exp, 0);
2867 if (TREE_CODE (exp) == MEM_REF
2868 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2869 set_mem_expr (op0, NULL_TREE);
2870 set_mem_addr_space (op0, as);
2872 return op0;
2874 case TARGET_MEM_REF:
2875 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2876 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2877 return NULL;
2879 op0 = expand_debug_expr
2880 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2881 if (!op0)
2882 return NULL;
2884 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2885 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2886 else
2887 as = ADDR_SPACE_GENERIC;
2889 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2890 op0, as);
2891 if (op0 == NULL_RTX)
2892 return NULL;
2894 op0 = gen_rtx_MEM (mode, op0);
2896 set_mem_attributes (op0, exp, 0);
2897 set_mem_addr_space (op0, as);
2899 return op0;
2901 component_ref:
2902 case ARRAY_REF:
2903 case ARRAY_RANGE_REF:
2904 case COMPONENT_REF:
2905 case BIT_FIELD_REF:
2906 case REALPART_EXPR:
2907 case IMAGPART_EXPR:
2908 case VIEW_CONVERT_EXPR:
2910 enum machine_mode mode1;
2911 HOST_WIDE_INT bitsize, bitpos;
2912 tree offset;
2913 int volatilep = 0;
2914 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2915 &mode1, &unsignedp, &volatilep, false);
2916 rtx orig_op0;
2918 if (bitsize == 0)
2919 return NULL;
2921 orig_op0 = op0 = expand_debug_expr (tem);
2923 if (!op0)
2924 return NULL;
2926 if (offset)
2928 enum machine_mode addrmode, offmode;
2930 if (!MEM_P (op0))
2931 return NULL;
2933 op0 = XEXP (op0, 0);
2934 addrmode = GET_MODE (op0);
2935 if (addrmode == VOIDmode)
2936 addrmode = Pmode;
2938 op1 = expand_debug_expr (offset);
2939 if (!op1)
2940 return NULL;
2942 offmode = GET_MODE (op1);
2943 if (offmode == VOIDmode)
2944 offmode = TYPE_MODE (TREE_TYPE (offset));
2946 if (addrmode != offmode)
2947 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2948 subreg_lowpart_offset (addrmode,
2949 offmode));
2951 /* Don't use offset_address here, we don't need a
2952 recognizable address, and we don't want to generate
2953 code. */
2954 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2955 op0, op1));
2958 if (MEM_P (op0))
2960 if (mode1 == VOIDmode)
2961 /* Bitfield. */
2962 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2963 if (bitpos >= BITS_PER_UNIT)
2965 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2966 bitpos %= BITS_PER_UNIT;
2968 else if (bitpos < 0)
2970 HOST_WIDE_INT units
2971 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2972 op0 = adjust_address_nv (op0, mode1, units);
2973 bitpos += units * BITS_PER_UNIT;
2975 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2976 op0 = adjust_address_nv (op0, mode, 0);
2977 else if (GET_MODE (op0) != mode1)
2978 op0 = adjust_address_nv (op0, mode1, 0);
2979 else
2980 op0 = copy_rtx (op0);
2981 if (op0 == orig_op0)
2982 op0 = shallow_copy_rtx (op0);
2983 set_mem_attributes (op0, exp, 0);
2986 if (bitpos == 0 && mode == GET_MODE (op0))
2987 return op0;
2989 if (bitpos < 0)
2990 return NULL;
2992 if (GET_MODE (op0) == BLKmode)
2993 return NULL;
2995 if ((bitpos % BITS_PER_UNIT) == 0
2996 && bitsize == GET_MODE_BITSIZE (mode1))
2998 enum machine_mode opmode = GET_MODE (op0);
3000 if (opmode == VOIDmode)
3001 opmode = TYPE_MODE (TREE_TYPE (tem));
3003 /* This condition may hold if we're expanding the address
3004 right past the end of an array that turned out not to
3005 be addressable (i.e., the address was only computed in
3006 debug stmts). The gen_subreg below would rightfully
3007 crash, and the address doesn't really exist, so just
3008 drop it. */
3009 if (bitpos >= GET_MODE_BITSIZE (opmode))
3010 return NULL;
3012 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3013 return simplify_gen_subreg (mode, op0, opmode,
3014 bitpos / BITS_PER_UNIT);
3017 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3018 && TYPE_UNSIGNED (TREE_TYPE (exp))
3019 ? SIGN_EXTRACT
3020 : ZERO_EXTRACT, mode,
3021 GET_MODE (op0) != VOIDmode
3022 ? GET_MODE (op0)
3023 : TYPE_MODE (TREE_TYPE (tem)),
3024 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3027 case ABS_EXPR:
3028 return simplify_gen_unary (ABS, mode, op0, mode);
3030 case NEGATE_EXPR:
3031 return simplify_gen_unary (NEG, mode, op0, mode);
3033 case BIT_NOT_EXPR:
3034 return simplify_gen_unary (NOT, mode, op0, mode);
3036 case FLOAT_EXPR:
3037 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3038 0)))
3039 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3040 inner_mode);
3042 case FIX_TRUNC_EXPR:
3043 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3044 inner_mode);
3046 case POINTER_PLUS_EXPR:
3047 /* For the rare target where pointers are not the same size as
3048 size_t, we need to check for mis-matched modes and correct
3049 the addend. */
3050 if (op0 && op1
3051 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3052 && GET_MODE (op0) != GET_MODE (op1))
3054 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3055 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3056 GET_MODE (op1));
3057 else
3058 /* We always sign-extend, regardless of the signedness of
3059 the operand, because the operand is always unsigned
3060 here even if the original C expression is signed. */
3061 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3062 GET_MODE (op1));
3064 /* Fall through. */
3065 case PLUS_EXPR:
3066 return simplify_gen_binary (PLUS, mode, op0, op1);
3068 case MINUS_EXPR:
3069 return simplify_gen_binary (MINUS, mode, op0, op1);
3071 case MULT_EXPR:
3072 return simplify_gen_binary (MULT, mode, op0, op1);
3074 case RDIV_EXPR:
3075 case TRUNC_DIV_EXPR:
3076 case EXACT_DIV_EXPR:
3077 if (unsignedp)
3078 return simplify_gen_binary (UDIV, mode, op0, op1);
3079 else
3080 return simplify_gen_binary (DIV, mode, op0, op1);
3082 case TRUNC_MOD_EXPR:
3083 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3085 case FLOOR_DIV_EXPR:
3086 if (unsignedp)
3087 return simplify_gen_binary (UDIV, mode, op0, op1);
3088 else
3090 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3091 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3092 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3093 return simplify_gen_binary (PLUS, mode, div, adj);
3096 case FLOOR_MOD_EXPR:
3097 if (unsignedp)
3098 return simplify_gen_binary (UMOD, mode, op0, op1);
3099 else
3101 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3102 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3103 adj = simplify_gen_unary (NEG, mode,
3104 simplify_gen_binary (MULT, mode, adj, op1),
3105 mode);
3106 return simplify_gen_binary (PLUS, mode, mod, adj);
3109 case CEIL_DIV_EXPR:
3110 if (unsignedp)
3112 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3113 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3114 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3115 return simplify_gen_binary (PLUS, mode, div, adj);
3117 else
3119 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3120 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3121 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3122 return simplify_gen_binary (PLUS, mode, div, adj);
3125 case CEIL_MOD_EXPR:
3126 if (unsignedp)
3128 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3129 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3130 adj = simplify_gen_unary (NEG, mode,
3131 simplify_gen_binary (MULT, mode, adj, op1),
3132 mode);
3133 return simplify_gen_binary (PLUS, mode, mod, adj);
3135 else
3137 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3138 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3139 adj = simplify_gen_unary (NEG, mode,
3140 simplify_gen_binary (MULT, mode, adj, op1),
3141 mode);
3142 return simplify_gen_binary (PLUS, mode, mod, adj);
3145 case ROUND_DIV_EXPR:
3146 if (unsignedp)
3148 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3149 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3150 rtx adj = round_udiv_adjust (mode, mod, op1);
3151 return simplify_gen_binary (PLUS, mode, div, adj);
3153 else
3155 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3156 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3157 rtx adj = round_sdiv_adjust (mode, mod, op1);
3158 return simplify_gen_binary (PLUS, mode, div, adj);
3161 case ROUND_MOD_EXPR:
3162 if (unsignedp)
3164 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3165 rtx adj = round_udiv_adjust (mode, mod, op1);
3166 adj = simplify_gen_unary (NEG, mode,
3167 simplify_gen_binary (MULT, mode, adj, op1),
3168 mode);
3169 return simplify_gen_binary (PLUS, mode, mod, adj);
3171 else
3173 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3174 rtx adj = round_sdiv_adjust (mode, mod, op1);
3175 adj = simplify_gen_unary (NEG, mode,
3176 simplify_gen_binary (MULT, mode, adj, op1),
3177 mode);
3178 return simplify_gen_binary (PLUS, mode, mod, adj);
3181 case LSHIFT_EXPR:
3182 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3184 case RSHIFT_EXPR:
3185 if (unsignedp)
3186 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3187 else
3188 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3190 case LROTATE_EXPR:
3191 return simplify_gen_binary (ROTATE, mode, op0, op1);
3193 case RROTATE_EXPR:
3194 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3196 case MIN_EXPR:
3197 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3199 case MAX_EXPR:
3200 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3202 case BIT_AND_EXPR:
3203 case TRUTH_AND_EXPR:
3204 return simplify_gen_binary (AND, mode, op0, op1);
3206 case BIT_IOR_EXPR:
3207 case TRUTH_OR_EXPR:
3208 return simplify_gen_binary (IOR, mode, op0, op1);
3210 case BIT_XOR_EXPR:
3211 case TRUTH_XOR_EXPR:
3212 return simplify_gen_binary (XOR, mode, op0, op1);
3214 case TRUTH_ANDIF_EXPR:
3215 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3217 case TRUTH_ORIF_EXPR:
3218 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3220 case TRUTH_NOT_EXPR:
3221 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3223 case LT_EXPR:
3224 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3225 op0, op1);
3227 case LE_EXPR:
3228 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3229 op0, op1);
3231 case GT_EXPR:
3232 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3233 op0, op1);
3235 case GE_EXPR:
3236 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3237 op0, op1);
3239 case EQ_EXPR:
3240 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3242 case NE_EXPR:
3243 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3245 case UNORDERED_EXPR:
3246 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3248 case ORDERED_EXPR:
3249 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3251 case UNLT_EXPR:
3252 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3254 case UNLE_EXPR:
3255 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3257 case UNGT_EXPR:
3258 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3260 case UNGE_EXPR:
3261 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3263 case UNEQ_EXPR:
3264 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3266 case LTGT_EXPR:
3267 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3269 case COND_EXPR:
3270 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3272 case COMPLEX_EXPR:
3273 gcc_assert (COMPLEX_MODE_P (mode));
3274 if (GET_MODE (op0) == VOIDmode)
3275 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3276 if (GET_MODE (op1) == VOIDmode)
3277 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3278 return gen_rtx_CONCAT (mode, op0, op1);
3280 case CONJ_EXPR:
3281 if (GET_CODE (op0) == CONCAT)
3282 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3283 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3284 XEXP (op0, 1),
3285 GET_MODE_INNER (mode)));
3286 else
3288 enum machine_mode imode = GET_MODE_INNER (mode);
3289 rtx re, im;
3291 if (MEM_P (op0))
3293 re = adjust_address_nv (op0, imode, 0);
3294 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3296 else
3298 enum machine_mode ifmode = int_mode_for_mode (mode);
3299 enum machine_mode ihmode = int_mode_for_mode (imode);
3300 rtx halfsize;
3301 if (ifmode == BLKmode || ihmode == BLKmode)
3302 return NULL;
3303 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3304 re = op0;
3305 if (mode != ifmode)
3306 re = gen_rtx_SUBREG (ifmode, re, 0);
3307 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3308 if (imode != ihmode)
3309 re = gen_rtx_SUBREG (imode, re, 0);
3310 im = copy_rtx (op0);
3311 if (mode != ifmode)
3312 im = gen_rtx_SUBREG (ifmode, im, 0);
3313 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3314 if (imode != ihmode)
3315 im = gen_rtx_SUBREG (imode, im, 0);
3317 im = gen_rtx_NEG (imode, im);
3318 return gen_rtx_CONCAT (mode, re, im);
3321 case ADDR_EXPR:
3322 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3323 if (!op0 || !MEM_P (op0))
3325 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3326 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3327 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3328 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3329 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3330 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3332 if (handled_component_p (TREE_OPERAND (exp, 0)))
3334 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3335 tree decl
3336 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3337 &bitoffset, &bitsize, &maxsize);
3338 if ((TREE_CODE (decl) == VAR_DECL
3339 || TREE_CODE (decl) == PARM_DECL
3340 || TREE_CODE (decl) == RESULT_DECL)
3341 && (!TREE_ADDRESSABLE (decl)
3342 || target_for_debug_bind (decl))
3343 && (bitoffset % BITS_PER_UNIT) == 0
3344 && bitsize > 0
3345 && bitsize == maxsize)
3346 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3347 bitoffset / BITS_PER_UNIT);
3350 return NULL;
3353 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3354 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3356 return op0;
3358 case VECTOR_CST:
3360 unsigned i;
3362 op0 = gen_rtx_CONCATN
3363 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3365 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3367 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3368 if (!op1)
3369 return NULL;
3370 XVECEXP (op0, 0, i) = op1;
3373 return op0;
3376 case CONSTRUCTOR:
3377 if (TREE_CLOBBER_P (exp))
3378 return NULL;
3379 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3381 unsigned i;
3382 tree val;
3384 op0 = gen_rtx_CONCATN
3385 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3387 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3389 op1 = expand_debug_expr (val);
3390 if (!op1)
3391 return NULL;
3392 XVECEXP (op0, 0, i) = op1;
3395 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3397 op1 = expand_debug_expr
3398 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3400 if (!op1)
3401 return NULL;
3403 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3404 XVECEXP (op0, 0, i) = op1;
3407 return op0;
3409 else
3410 goto flag_unsupported;
3412 case CALL_EXPR:
3413 /* ??? Maybe handle some builtins? */
3414 return NULL;
3416 case SSA_NAME:
3418 gimple g = get_gimple_for_ssa_name (exp);
3419 if (g)
3421 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3422 if (!op0)
3423 return NULL;
3425 else
3427 int part = var_to_partition (SA.map, exp);
3429 if (part == NO_PARTITION)
3431 /* If this is a reference to an incoming value of parameter
3432 that is never used in the code or where the incoming
3433 value is never used in the code, use PARM_DECL's
3434 DECL_RTL if set. */
3435 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3436 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3438 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3439 if (op0)
3440 goto adjust_mode;
3441 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3442 if (op0)
3443 goto adjust_mode;
3445 return NULL;
3448 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3450 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3452 goto adjust_mode;
3455 case ERROR_MARK:
3456 return NULL;
3458 /* Vector stuff. For most of the codes we don't have rtl codes. */
3459 case REALIGN_LOAD_EXPR:
3460 case REDUC_MAX_EXPR:
3461 case REDUC_MIN_EXPR:
3462 case REDUC_PLUS_EXPR:
3463 case VEC_COND_EXPR:
3464 case VEC_LSHIFT_EXPR:
3465 case VEC_PACK_FIX_TRUNC_EXPR:
3466 case VEC_PACK_SAT_EXPR:
3467 case VEC_PACK_TRUNC_EXPR:
3468 case VEC_RSHIFT_EXPR:
3469 case VEC_UNPACK_FLOAT_HI_EXPR:
3470 case VEC_UNPACK_FLOAT_LO_EXPR:
3471 case VEC_UNPACK_HI_EXPR:
3472 case VEC_UNPACK_LO_EXPR:
3473 case VEC_WIDEN_MULT_HI_EXPR:
3474 case VEC_WIDEN_MULT_LO_EXPR:
3475 case VEC_WIDEN_LSHIFT_HI_EXPR:
3476 case VEC_WIDEN_LSHIFT_LO_EXPR:
3477 case VEC_PERM_EXPR:
3478 return NULL;
3480 /* Misc codes. */
3481 case ADDR_SPACE_CONVERT_EXPR:
3482 case FIXED_CONVERT_EXPR:
3483 case OBJ_TYPE_REF:
3484 case WITH_SIZE_EXPR:
3485 return NULL;
3487 case DOT_PROD_EXPR:
3488 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3489 && SCALAR_INT_MODE_P (mode))
3492 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3493 0)))
3494 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3495 inner_mode);
3497 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3498 1)))
3499 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3500 inner_mode);
3501 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3502 return simplify_gen_binary (PLUS, mode, op0, op2);
3504 return NULL;
3506 case WIDEN_MULT_EXPR:
3507 case WIDEN_MULT_PLUS_EXPR:
3508 case WIDEN_MULT_MINUS_EXPR:
3509 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3510 && SCALAR_INT_MODE_P (mode))
3512 inner_mode = GET_MODE (op0);
3513 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3514 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3515 else
3516 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3517 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3518 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3519 else
3520 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3521 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3522 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3523 return op0;
3524 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3525 return simplify_gen_binary (PLUS, mode, op0, op2);
3526 else
3527 return simplify_gen_binary (MINUS, mode, op2, op0);
3529 return NULL;
3531 case WIDEN_SUM_EXPR:
3532 case WIDEN_LSHIFT_EXPR:
3533 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3534 && SCALAR_INT_MODE_P (mode))
3537 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3538 0)))
3539 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3540 inner_mode);
3541 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3542 ? ASHIFT : PLUS, mode, op0, op1);
3544 return NULL;
3546 case FMA_EXPR:
3547 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3549 default:
3550 flag_unsupported:
3551 #ifdef ENABLE_CHECKING
3552 debug_tree (exp);
3553 gcc_unreachable ();
3554 #else
3555 return NULL;
3556 #endif
3560 /* Return an RTX equivalent to the source bind value of the tree expression
3561 EXP. */
3563 static rtx
3564 expand_debug_source_expr (tree exp)
3566 rtx op0 = NULL_RTX;
3567 enum machine_mode mode = VOIDmode, inner_mode;
3569 switch (TREE_CODE (exp))
3571 case PARM_DECL:
3573 mode = DECL_MODE (exp);
3574 op0 = expand_debug_parm_decl (exp);
3575 if (op0)
3576 break;
3577 /* See if this isn't an argument that has been completely
3578 optimized out. */
3579 if (!DECL_RTL_SET_P (exp)
3580 && !DECL_INCOMING_RTL (exp)
3581 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3583 tree aexp = exp;
3584 if (DECL_ABSTRACT_ORIGIN (exp))
3585 aexp = DECL_ABSTRACT_ORIGIN (exp);
3586 if (DECL_CONTEXT (aexp)
3587 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3589 VEC(tree, gc) **debug_args;
3590 unsigned int ix;
3591 tree ddecl;
3592 #ifdef ENABLE_CHECKING
3593 tree parm;
3594 for (parm = DECL_ARGUMENTS (current_function_decl);
3595 parm; parm = DECL_CHAIN (parm))
3596 gcc_assert (parm != exp
3597 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3598 #endif
3599 debug_args = decl_debug_args_lookup (current_function_decl);
3600 if (debug_args != NULL)
3602 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3603 ix += 2)
3604 if (ddecl == aexp)
3605 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3609 break;
3611 default:
3612 break;
3615 if (op0 == NULL_RTX)
3616 return NULL_RTX;
3618 inner_mode = GET_MODE (op0);
3619 if (mode == inner_mode)
3620 return op0;
3622 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3624 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3625 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3626 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3627 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3628 else
3629 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3631 else if (FLOAT_MODE_P (mode))
3632 gcc_unreachable ();
3633 else if (FLOAT_MODE_P (inner_mode))
3635 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3636 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3637 else
3638 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3640 else if (CONSTANT_P (op0)
3641 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3642 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3643 subreg_lowpart_offset (mode, inner_mode));
3644 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3645 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3646 else
3647 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3649 return op0;
3652 /* Expand the _LOCs in debug insns. We run this after expanding all
3653 regular insns, so that any variables referenced in the function
3654 will have their DECL_RTLs set. */
3656 static void
3657 expand_debug_locations (void)
3659 rtx insn;
3660 rtx last = get_last_insn ();
3661 int save_strict_alias = flag_strict_aliasing;
3663 /* New alias sets while setting up memory attributes cause
3664 -fcompare-debug failures, even though it doesn't bring about any
3665 codegen changes. */
3666 flag_strict_aliasing = 0;
3668 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3669 if (DEBUG_INSN_P (insn))
3671 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3672 rtx val;
3673 enum machine_mode mode;
3675 if (value == NULL_TREE)
3676 val = NULL_RTX;
3677 else
3679 if (INSN_VAR_LOCATION_STATUS (insn)
3680 == VAR_INIT_STATUS_UNINITIALIZED)
3681 val = expand_debug_source_expr (value);
3682 else
3683 val = expand_debug_expr (value);
3684 gcc_assert (last == get_last_insn ());
3687 if (!val)
3688 val = gen_rtx_UNKNOWN_VAR_LOC ();
3689 else
3691 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3693 gcc_assert (mode == GET_MODE (val)
3694 || (GET_MODE (val) == VOIDmode
3695 && (CONST_INT_P (val)
3696 || GET_CODE (val) == CONST_FIXED
3697 || GET_CODE (val) == CONST_DOUBLE
3698 || GET_CODE (val) == LABEL_REF)));
3701 INSN_VAR_LOCATION_LOC (insn) = val;
3704 flag_strict_aliasing = save_strict_alias;
3707 /* Expand basic block BB from GIMPLE trees to RTL. */
3709 static basic_block
3710 expand_gimple_basic_block (basic_block bb)
3712 gimple_stmt_iterator gsi;
3713 gimple_seq stmts;
3714 gimple stmt = NULL;
3715 rtx note, last;
3716 edge e;
3717 edge_iterator ei;
3718 void **elt;
3720 if (dump_file)
3721 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3722 bb->index);
3724 /* Note that since we are now transitioning from GIMPLE to RTL, we
3725 cannot use the gsi_*_bb() routines because they expect the basic
3726 block to be in GIMPLE, instead of RTL. Therefore, we need to
3727 access the BB sequence directly. */
3728 stmts = bb_seq (bb);
3729 bb->il.gimple = NULL;
3730 rtl_profile_for_bb (bb);
3731 init_rtl_bb_info (bb);
3732 bb->flags |= BB_RTL;
3734 /* Remove the RETURN_EXPR if we may fall though to the exit
3735 instead. */
3736 gsi = gsi_last (stmts);
3737 if (!gsi_end_p (gsi)
3738 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3740 gimple ret_stmt = gsi_stmt (gsi);
3742 gcc_assert (single_succ_p (bb));
3743 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3745 if (bb->next_bb == EXIT_BLOCK_PTR
3746 && !gimple_return_retval (ret_stmt))
3748 gsi_remove (&gsi, false);
3749 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3753 gsi = gsi_start (stmts);
3754 if (!gsi_end_p (gsi))
3756 stmt = gsi_stmt (gsi);
3757 if (gimple_code (stmt) != GIMPLE_LABEL)
3758 stmt = NULL;
3761 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3763 if (stmt || elt)
3765 last = get_last_insn ();
3767 if (stmt)
3769 expand_gimple_stmt (stmt);
3770 gsi_next (&gsi);
3773 if (elt)
3774 emit_label ((rtx) *elt);
3776 /* Java emits line number notes in the top of labels.
3777 ??? Make this go away once line number notes are obsoleted. */
3778 BB_HEAD (bb) = NEXT_INSN (last);
3779 if (NOTE_P (BB_HEAD (bb)))
3780 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3781 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3783 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3785 else
3786 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3788 NOTE_BASIC_BLOCK (note) = bb;
3790 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3792 basic_block new_bb;
3794 stmt = gsi_stmt (gsi);
3796 /* If this statement is a non-debug one, and we generate debug
3797 insns, then this one might be the last real use of a TERed
3798 SSA_NAME, but where there are still some debug uses further
3799 down. Expanding the current SSA name in such further debug
3800 uses by their RHS might lead to wrong debug info, as coalescing
3801 might make the operands of such RHS be placed into the same
3802 pseudo as something else. Like so:
3803 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3804 use(a_1);
3805 a_2 = ...
3806 #DEBUG ... => a_1
3807 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3808 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3809 the write to a_2 would actually have clobbered the place which
3810 formerly held a_0.
3812 So, instead of that, we recognize the situation, and generate
3813 debug temporaries at the last real use of TERed SSA names:
3814 a_1 = a_0 + 1;
3815 #DEBUG #D1 => a_1
3816 use(a_1);
3817 a_2 = ...
3818 #DEBUG ... => #D1
3820 if (MAY_HAVE_DEBUG_INSNS
3821 && SA.values
3822 && !is_gimple_debug (stmt))
3824 ssa_op_iter iter;
3825 tree op;
3826 gimple def;
3828 location_t sloc = get_curr_insn_source_location ();
3829 tree sblock = get_curr_insn_block ();
3831 /* Look for SSA names that have their last use here (TERed
3832 names always have only one real use). */
3833 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3834 if ((def = get_gimple_for_ssa_name (op)))
3836 imm_use_iterator imm_iter;
3837 use_operand_p use_p;
3838 bool have_debug_uses = false;
3840 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3842 if (gimple_debug_bind_p (USE_STMT (use_p)))
3844 have_debug_uses = true;
3845 break;
3849 if (have_debug_uses)
3851 /* OP is a TERed SSA name, with DEF it's defining
3852 statement, and where OP is used in further debug
3853 instructions. Generate a debug temporary, and
3854 replace all uses of OP in debug insns with that
3855 temporary. */
3856 gimple debugstmt;
3857 tree value = gimple_assign_rhs_to_tree (def);
3858 tree vexpr = make_node (DEBUG_EXPR_DECL);
3859 rtx val;
3860 enum machine_mode mode;
3862 set_curr_insn_source_location (gimple_location (def));
3863 set_curr_insn_block (gimple_block (def));
3865 DECL_ARTIFICIAL (vexpr) = 1;
3866 TREE_TYPE (vexpr) = TREE_TYPE (value);
3867 if (DECL_P (value))
3868 mode = DECL_MODE (value);
3869 else
3870 mode = TYPE_MODE (TREE_TYPE (value));
3871 DECL_MODE (vexpr) = mode;
3873 val = gen_rtx_VAR_LOCATION
3874 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3876 emit_debug_insn (val);
3878 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3880 if (!gimple_debug_bind_p (debugstmt))
3881 continue;
3883 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3884 SET_USE (use_p, vexpr);
3886 update_stmt (debugstmt);
3890 set_curr_insn_source_location (sloc);
3891 set_curr_insn_block (sblock);
3894 currently_expanding_gimple_stmt = stmt;
3896 /* Expand this statement, then evaluate the resulting RTL and
3897 fixup the CFG accordingly. */
3898 if (gimple_code (stmt) == GIMPLE_COND)
3900 new_bb = expand_gimple_cond (bb, stmt);
3901 if (new_bb)
3902 return new_bb;
3904 else if (gimple_debug_bind_p (stmt))
3906 location_t sloc = get_curr_insn_source_location ();
3907 tree sblock = get_curr_insn_block ();
3908 gimple_stmt_iterator nsi = gsi;
3910 for (;;)
3912 tree var = gimple_debug_bind_get_var (stmt);
3913 tree value;
3914 rtx val;
3915 enum machine_mode mode;
3917 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3918 && TREE_CODE (var) != LABEL_DECL
3919 && !target_for_debug_bind (var))
3920 goto delink_debug_stmt;
3922 if (gimple_debug_bind_has_value_p (stmt))
3923 value = gimple_debug_bind_get_value (stmt);
3924 else
3925 value = NULL_TREE;
3927 last = get_last_insn ();
3929 set_curr_insn_source_location (gimple_location (stmt));
3930 set_curr_insn_block (gimple_block (stmt));
3932 if (DECL_P (var))
3933 mode = DECL_MODE (var);
3934 else
3935 mode = TYPE_MODE (TREE_TYPE (var));
3937 val = gen_rtx_VAR_LOCATION
3938 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3940 emit_debug_insn (val);
3942 if (dump_file && (dump_flags & TDF_DETAILS))
3944 /* We can't dump the insn with a TREE where an RTX
3945 is expected. */
3946 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3947 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3948 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3951 delink_debug_stmt:
3952 /* In order not to generate too many debug temporaries,
3953 we delink all uses of debug statements we already expanded.
3954 Therefore debug statements between definition and real
3955 use of TERed SSA names will continue to use the SSA name,
3956 and not be replaced with debug temps. */
3957 delink_stmt_imm_use (stmt);
3959 gsi = nsi;
3960 gsi_next (&nsi);
3961 if (gsi_end_p (nsi))
3962 break;
3963 stmt = gsi_stmt (nsi);
3964 if (!gimple_debug_bind_p (stmt))
3965 break;
3968 set_curr_insn_source_location (sloc);
3969 set_curr_insn_block (sblock);
3971 else if (gimple_debug_source_bind_p (stmt))
3973 location_t sloc = get_curr_insn_source_location ();
3974 tree sblock = get_curr_insn_block ();
3975 tree var = gimple_debug_source_bind_get_var (stmt);
3976 tree value = gimple_debug_source_bind_get_value (stmt);
3977 rtx val;
3978 enum machine_mode mode;
3980 last = get_last_insn ();
3982 set_curr_insn_source_location (gimple_location (stmt));
3983 set_curr_insn_block (gimple_block (stmt));
3985 mode = DECL_MODE (var);
3987 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3988 VAR_INIT_STATUS_UNINITIALIZED);
3990 emit_debug_insn (val);
3992 if (dump_file && (dump_flags & TDF_DETAILS))
3994 /* We can't dump the insn with a TREE where an RTX
3995 is expected. */
3996 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3997 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3998 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4001 set_curr_insn_source_location (sloc);
4002 set_curr_insn_block (sblock);
4004 else
4006 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4008 bool can_fallthru;
4009 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4010 if (new_bb)
4012 if (can_fallthru)
4013 bb = new_bb;
4014 else
4015 return new_bb;
4018 else
4020 def_operand_p def_p;
4021 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4023 if (def_p != NULL)
4025 /* Ignore this stmt if it is in the list of
4026 replaceable expressions. */
4027 if (SA.values
4028 && bitmap_bit_p (SA.values,
4029 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4030 continue;
4032 last = expand_gimple_stmt (stmt);
4033 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4038 currently_expanding_gimple_stmt = NULL;
4040 /* Expand implicit goto and convert goto_locus. */
4041 FOR_EACH_EDGE (e, ei, bb->succs)
4043 if (e->goto_locus && e->goto_block)
4045 set_curr_insn_source_location (e->goto_locus);
4046 set_curr_insn_block (e->goto_block);
4047 e->goto_locus = curr_insn_locator ();
4049 e->goto_block = NULL;
4050 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4052 emit_jump (label_rtx_for_bb (e->dest));
4053 e->flags &= ~EDGE_FALLTHRU;
4057 /* Expanded RTL can create a jump in the last instruction of block.
4058 This later might be assumed to be a jump to successor and break edge insertion.
4059 We need to insert dummy move to prevent this. PR41440. */
4060 if (single_succ_p (bb)
4061 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4062 && (last = get_last_insn ())
4063 && JUMP_P (last))
4065 rtx dummy = gen_reg_rtx (SImode);
4066 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4069 do_pending_stack_adjust ();
4071 /* Find the block tail. The last insn in the block is the insn
4072 before a barrier and/or table jump insn. */
4073 last = get_last_insn ();
4074 if (BARRIER_P (last))
4075 last = PREV_INSN (last);
4076 if (JUMP_TABLE_DATA_P (last))
4077 last = PREV_INSN (PREV_INSN (last));
4078 BB_END (bb) = last;
4080 update_bb_for_insn (bb);
4082 return bb;
4086 /* Create a basic block for initialization code. */
4088 static basic_block
4089 construct_init_block (void)
4091 basic_block init_block, first_block;
4092 edge e = NULL;
4093 int flags;
4095 /* Multiple entry points not supported yet. */
4096 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4097 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4098 init_rtl_bb_info (EXIT_BLOCK_PTR);
4099 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4100 EXIT_BLOCK_PTR->flags |= BB_RTL;
4102 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4104 /* When entry edge points to first basic block, we don't need jump,
4105 otherwise we have to jump into proper target. */
4106 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4108 tree label = gimple_block_label (e->dest);
4110 emit_jump (label_rtx (label));
4111 flags = 0;
4113 else
4114 flags = EDGE_FALLTHRU;
4116 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4117 get_last_insn (),
4118 ENTRY_BLOCK_PTR);
4119 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4120 init_block->count = ENTRY_BLOCK_PTR->count;
4121 if (e)
4123 first_block = e->dest;
4124 redirect_edge_succ (e, init_block);
4125 e = make_edge (init_block, first_block, flags);
4127 else
4128 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4129 e->probability = REG_BR_PROB_BASE;
4130 e->count = ENTRY_BLOCK_PTR->count;
4132 update_bb_for_insn (init_block);
4133 return init_block;
4136 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4137 found in the block tree. */
4139 static void
4140 set_block_levels (tree block, int level)
4142 while (block)
4144 BLOCK_NUMBER (block) = level;
4145 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4146 block = BLOCK_CHAIN (block);
4150 /* Create a block containing landing pads and similar stuff. */
4152 static void
4153 construct_exit_block (void)
4155 rtx head = get_last_insn ();
4156 rtx end;
4157 basic_block exit_block;
4158 edge e, e2;
4159 unsigned ix;
4160 edge_iterator ei;
4161 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4163 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4165 /* Make sure the locus is set to the end of the function, so that
4166 epilogue line numbers and warnings are set properly. */
4167 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4168 input_location = cfun->function_end_locus;
4170 /* The following insns belong to the top scope. */
4171 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4173 /* Generate rtl for function exit. */
4174 expand_function_end ();
4176 end = get_last_insn ();
4177 if (head == end)
4178 return;
4179 /* While emitting the function end we could move end of the last basic block.
4181 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4182 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4183 head = NEXT_INSN (head);
4184 exit_block = create_basic_block (NEXT_INSN (head), end,
4185 EXIT_BLOCK_PTR->prev_bb);
4186 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4187 exit_block->count = EXIT_BLOCK_PTR->count;
4189 ix = 0;
4190 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4192 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4193 if (!(e->flags & EDGE_ABNORMAL))
4194 redirect_edge_succ (e, exit_block);
4195 else
4196 ix++;
4199 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4200 e->probability = REG_BR_PROB_BASE;
4201 e->count = EXIT_BLOCK_PTR->count;
4202 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4203 if (e2 != e)
4205 e->count -= e2->count;
4206 exit_block->count -= e2->count;
4207 exit_block->frequency -= EDGE_FREQUENCY (e2);
4209 if (e->count < 0)
4210 e->count = 0;
4211 if (exit_block->count < 0)
4212 exit_block->count = 0;
4213 if (exit_block->frequency < 0)
4214 exit_block->frequency = 0;
4215 update_bb_for_insn (exit_block);
4218 /* Helper function for discover_nonconstant_array_refs.
4219 Look for ARRAY_REF nodes with non-constant indexes and mark them
4220 addressable. */
4222 static tree
4223 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4224 void *data ATTRIBUTE_UNUSED)
4226 tree t = *tp;
4228 if (IS_TYPE_OR_DECL_P (t))
4229 *walk_subtrees = 0;
4230 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4232 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4233 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4234 && (!TREE_OPERAND (t, 2)
4235 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4236 || (TREE_CODE (t) == COMPONENT_REF
4237 && (!TREE_OPERAND (t,2)
4238 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4239 || TREE_CODE (t) == BIT_FIELD_REF
4240 || TREE_CODE (t) == REALPART_EXPR
4241 || TREE_CODE (t) == IMAGPART_EXPR
4242 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4243 || CONVERT_EXPR_P (t))
4244 t = TREE_OPERAND (t, 0);
4246 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4248 t = get_base_address (t);
4249 if (t && DECL_P (t)
4250 && DECL_MODE (t) != BLKmode)
4251 TREE_ADDRESSABLE (t) = 1;
4254 *walk_subtrees = 0;
4257 return NULL_TREE;
4260 /* RTL expansion is not able to compile array references with variable
4261 offsets for arrays stored in single register. Discover such
4262 expressions and mark variables as addressable to avoid this
4263 scenario. */
4265 static void
4266 discover_nonconstant_array_refs (void)
4268 basic_block bb;
4269 gimple_stmt_iterator gsi;
4271 FOR_EACH_BB (bb)
4272 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4274 gimple stmt = gsi_stmt (gsi);
4275 if (!is_gimple_debug (stmt))
4276 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4280 /* This function sets crtl->args.internal_arg_pointer to a virtual
4281 register if DRAP is needed. Local register allocator will replace
4282 virtual_incoming_args_rtx with the virtual register. */
4284 static void
4285 expand_stack_alignment (void)
4287 rtx drap_rtx;
4288 unsigned int preferred_stack_boundary;
4290 if (! SUPPORTS_STACK_ALIGNMENT)
4291 return;
4293 if (cfun->calls_alloca
4294 || cfun->has_nonlocal_label
4295 || crtl->has_nonlocal_goto)
4296 crtl->need_drap = true;
4298 /* Call update_stack_boundary here again to update incoming stack
4299 boundary. It may set incoming stack alignment to a different
4300 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4301 use the minimum incoming stack alignment to check if it is OK
4302 to perform sibcall optimization since sibcall optimization will
4303 only align the outgoing stack to incoming stack boundary. */
4304 if (targetm.calls.update_stack_boundary)
4305 targetm.calls.update_stack_boundary ();
4307 /* The incoming stack frame has to be aligned at least at
4308 parm_stack_boundary. */
4309 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4311 /* Update crtl->stack_alignment_estimated and use it later to align
4312 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4313 exceptions since callgraph doesn't collect incoming stack alignment
4314 in this case. */
4315 if (cfun->can_throw_non_call_exceptions
4316 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4317 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4318 else
4319 preferred_stack_boundary = crtl->preferred_stack_boundary;
4320 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4321 crtl->stack_alignment_estimated = preferred_stack_boundary;
4322 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4323 crtl->stack_alignment_needed = preferred_stack_boundary;
4325 gcc_assert (crtl->stack_alignment_needed
4326 <= crtl->stack_alignment_estimated);
4328 crtl->stack_realign_needed
4329 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4330 crtl->stack_realign_tried = crtl->stack_realign_needed;
4332 crtl->stack_realign_processed = true;
4334 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4335 alignment. */
4336 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4337 drap_rtx = targetm.calls.get_drap_rtx ();
4339 /* stack_realign_drap and drap_rtx must match. */
4340 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4342 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4343 if (NULL != drap_rtx)
4345 crtl->args.internal_arg_pointer = drap_rtx;
4347 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4348 needed. */
4349 fixup_tail_calls ();
4353 /* Translate the intermediate representation contained in the CFG
4354 from GIMPLE trees to RTL.
4356 We do conversion per basic block and preserve/update the tree CFG.
4357 This implies we have to do some magic as the CFG can simultaneously
4358 consist of basic blocks containing RTL and GIMPLE trees. This can
4359 confuse the CFG hooks, so be careful to not manipulate CFG during
4360 the expansion. */
4362 static unsigned int
4363 gimple_expand_cfg (void)
4365 basic_block bb, init_block;
4366 sbitmap blocks;
4367 edge_iterator ei;
4368 edge e;
4369 rtx var_seq;
4370 unsigned i;
4372 timevar_push (TV_OUT_OF_SSA);
4373 rewrite_out_of_ssa (&SA);
4374 timevar_pop (TV_OUT_OF_SSA);
4375 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4376 sizeof (rtx));
4378 /* Make sure all values used by the optimization passes have sane
4379 defaults. */
4380 reg_renumber = 0;
4382 /* Some backends want to know that we are expanding to RTL. */
4383 currently_expanding_to_rtl = 1;
4384 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4385 free_dominance_info (CDI_DOMINATORS);
4387 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4389 insn_locators_alloc ();
4390 if (!DECL_IS_BUILTIN (current_function_decl))
4392 /* Eventually, all FEs should explicitly set function_start_locus. */
4393 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4394 set_curr_insn_source_location
4395 (DECL_SOURCE_LOCATION (current_function_decl));
4396 else
4397 set_curr_insn_source_location (cfun->function_start_locus);
4399 else
4400 set_curr_insn_source_location (UNKNOWN_LOCATION);
4401 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4402 prologue_locator = curr_insn_locator ();
4404 #ifdef INSN_SCHEDULING
4405 init_sched_attrs ();
4406 #endif
4408 /* Make sure first insn is a note even if we don't want linenums.
4409 This makes sure the first insn will never be deleted.
4410 Also, final expects a note to appear there. */
4411 emit_note (NOTE_INSN_DELETED);
4413 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4414 discover_nonconstant_array_refs ();
4416 targetm.expand_to_rtl_hook ();
4417 crtl->stack_alignment_needed = STACK_BOUNDARY;
4418 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4419 crtl->stack_alignment_estimated = 0;
4420 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4421 cfun->cfg->max_jumptable_ents = 0;
4423 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4424 of the function section at exapnsion time to predict distance of calls. */
4425 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4427 /* Expand the variables recorded during gimple lowering. */
4428 timevar_push (TV_VAR_EXPAND);
4429 start_sequence ();
4431 expand_used_vars ();
4433 var_seq = get_insns ();
4434 end_sequence ();
4435 timevar_pop (TV_VAR_EXPAND);
4437 /* Honor stack protection warnings. */
4438 if (warn_stack_protect)
4440 if (cfun->calls_alloca)
4441 warning (OPT_Wstack_protector,
4442 "stack protector not protecting local variables: "
4443 "variable length buffer");
4444 if (has_short_buffer && !crtl->stack_protect_guard)
4445 warning (OPT_Wstack_protector,
4446 "stack protector not protecting function: "
4447 "all local arrays are less than %d bytes long",
4448 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4451 /* Set up parameters and prepare for return, for the function. */
4452 expand_function_start (current_function_decl);
4454 /* If we emitted any instructions for setting up the variables,
4455 emit them before the FUNCTION_START note. */
4456 if (var_seq)
4458 emit_insn_before (var_seq, parm_birth_insn);
4460 /* In expand_function_end we'll insert the alloca save/restore
4461 before parm_birth_insn. We've just insertted an alloca call.
4462 Adjust the pointer to match. */
4463 parm_birth_insn = var_seq;
4466 /* Now that we also have the parameter RTXs, copy them over to our
4467 partitions. */
4468 for (i = 0; i < SA.map->num_partitions; i++)
4470 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4472 if (TREE_CODE (var) != VAR_DECL
4473 && !SA.partition_to_pseudo[i])
4474 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4475 gcc_assert (SA.partition_to_pseudo[i]);
4477 /* If this decl was marked as living in multiple places, reset
4478 this now to NULL. */
4479 if (DECL_RTL_IF_SET (var) == pc_rtx)
4480 SET_DECL_RTL (var, NULL);
4482 /* Some RTL parts really want to look at DECL_RTL(x) when x
4483 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4484 SET_DECL_RTL here making this available, but that would mean
4485 to select one of the potentially many RTLs for one DECL. Instead
4486 of doing that we simply reset the MEM_EXPR of the RTL in question,
4487 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4488 if (!DECL_RTL_SET_P (var))
4490 if (MEM_P (SA.partition_to_pseudo[i]))
4491 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4495 /* If we have a class containing differently aligned pointers
4496 we need to merge those into the corresponding RTL pointer
4497 alignment. */
4498 for (i = 1; i < num_ssa_names; i++)
4500 tree name = ssa_name (i);
4501 int part;
4502 rtx r;
4504 if (!name
4505 || !POINTER_TYPE_P (TREE_TYPE (name))
4506 /* We might have generated new SSA names in
4507 update_alias_info_with_stack_vars. They will have a NULL
4508 defining statements, and won't be part of the partitioning,
4509 so ignore those. */
4510 || !SSA_NAME_DEF_STMT (name))
4511 continue;
4512 part = var_to_partition (SA.map, name);
4513 if (part == NO_PARTITION)
4514 continue;
4515 r = SA.partition_to_pseudo[part];
4516 if (REG_P (r))
4517 mark_reg_pointer (r, get_pointer_alignment (name));
4520 /* If this function is `main', emit a call to `__main'
4521 to run global initializers, etc. */
4522 if (DECL_NAME (current_function_decl)
4523 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4524 && DECL_FILE_SCOPE_P (current_function_decl))
4525 expand_main_function ();
4527 /* Initialize the stack_protect_guard field. This must happen after the
4528 call to __main (if any) so that the external decl is initialized. */
4529 if (crtl->stack_protect_guard)
4530 stack_protect_prologue ();
4532 expand_phi_nodes (&SA);
4534 /* Register rtl specific functions for cfg. */
4535 rtl_register_cfg_hooks ();
4537 init_block = construct_init_block ();
4539 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4540 remaining edges later. */
4541 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4542 e->flags &= ~EDGE_EXECUTABLE;
4544 lab_rtx_for_bb = pointer_map_create ();
4545 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4546 bb = expand_gimple_basic_block (bb);
4548 if (MAY_HAVE_DEBUG_INSNS)
4549 expand_debug_locations ();
4551 execute_free_datastructures ();
4552 timevar_push (TV_OUT_OF_SSA);
4553 finish_out_of_ssa (&SA);
4554 timevar_pop (TV_OUT_OF_SSA);
4556 timevar_push (TV_POST_EXPAND);
4557 /* We are no longer in SSA form. */
4558 cfun->gimple_df->in_ssa_p = false;
4560 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4561 conservatively to true until they are all profile aware. */
4562 pointer_map_destroy (lab_rtx_for_bb);
4563 free_histograms ();
4565 construct_exit_block ();
4566 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4567 insn_locators_finalize ();
4569 /* Zap the tree EH table. */
4570 set_eh_throw_stmt_table (cfun, NULL);
4572 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4573 split edges which edge insertions might do. */
4574 rebuild_jump_labels (get_insns ());
4576 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4578 edge e;
4579 edge_iterator ei;
4580 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4582 if (e->insns.r)
4584 rebuild_jump_labels_chain (e->insns.r);
4585 /* Avoid putting insns before parm_birth_insn. */
4586 if (e->src == ENTRY_BLOCK_PTR
4587 && single_succ_p (ENTRY_BLOCK_PTR)
4588 && parm_birth_insn)
4590 rtx insns = e->insns.r;
4591 e->insns.r = NULL_RTX;
4592 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4594 else
4595 commit_one_edge_insertion (e);
4597 else
4598 ei_next (&ei);
4602 /* We're done expanding trees to RTL. */
4603 currently_expanding_to_rtl = 0;
4605 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4607 edge e;
4608 edge_iterator ei;
4609 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4611 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4612 e->flags &= ~EDGE_EXECUTABLE;
4614 /* At the moment not all abnormal edges match the RTL
4615 representation. It is safe to remove them here as
4616 find_many_sub_basic_blocks will rediscover them.
4617 In the future we should get this fixed properly. */
4618 if ((e->flags & EDGE_ABNORMAL)
4619 && !(e->flags & EDGE_SIBCALL))
4620 remove_edge (e);
4621 else
4622 ei_next (&ei);
4626 blocks = sbitmap_alloc (last_basic_block);
4627 sbitmap_ones (blocks);
4628 find_many_sub_basic_blocks (blocks);
4629 sbitmap_free (blocks);
4630 purge_all_dead_edges ();
4632 expand_stack_alignment ();
4634 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4635 function. */
4636 if (crtl->tail_call_emit)
4637 fixup_tail_calls ();
4639 /* After initial rtl generation, call back to finish generating
4640 exception support code. We need to do this before cleaning up
4641 the CFG as the code does not expect dead landing pads. */
4642 if (cfun->eh->region_tree != NULL)
4643 finish_eh_generation ();
4645 /* Remove unreachable blocks, otherwise we cannot compute dominators
4646 which are needed for loop state verification. As a side-effect
4647 this also compacts blocks.
4648 ??? We cannot remove trivially dead insns here as for example
4649 the DRAP reg on i?86 is not magically live at this point.
4650 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4651 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4653 #ifdef ENABLE_CHECKING
4654 verify_flow_info ();
4655 #endif
4657 /* Initialize pseudos allocated for hard registers. */
4658 emit_initial_value_sets ();
4660 /* And finally unshare all RTL. */
4661 unshare_all_rtl ();
4663 /* There's no need to defer outputting this function any more; we
4664 know we want to output it. */
4665 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4667 /* Now that we're done expanding trees to RTL, we shouldn't have any
4668 more CONCATs anywhere. */
4669 generating_concat_p = 0;
4671 if (dump_file)
4673 fprintf (dump_file,
4674 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4675 /* And the pass manager will dump RTL for us. */
4678 /* If we're emitting a nested function, make sure its parent gets
4679 emitted as well. Doing otherwise confuses debug info. */
4681 tree parent;
4682 for (parent = DECL_CONTEXT (current_function_decl);
4683 parent != NULL_TREE;
4684 parent = get_containing_scope (parent))
4685 if (TREE_CODE (parent) == FUNCTION_DECL)
4686 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4689 /* We are now committed to emitting code for this function. Do any
4690 preparation, such as emitting abstract debug info for the inline
4691 before it gets mangled by optimization. */
4692 if (cgraph_function_possibly_inlined_p (current_function_decl))
4693 (*debug_hooks->outlining_inline_function) (current_function_decl);
4695 TREE_ASM_WRITTEN (current_function_decl) = 1;
4697 /* After expanding, the return labels are no longer needed. */
4698 return_label = NULL;
4699 naked_return_label = NULL;
4701 /* After expanding, the tm_restart map is no longer needed. */
4702 if (cfun->gimple_df->tm_restart)
4704 htab_delete (cfun->gimple_df->tm_restart);
4705 cfun->gimple_df->tm_restart = NULL;
4708 /* Tag the blocks with a depth number so that change_scope can find
4709 the common parent easily. */
4710 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4711 default_rtl_profile ();
4713 timevar_pop (TV_POST_EXPAND);
4715 return 0;
4718 struct rtl_opt_pass pass_expand =
4721 RTL_PASS,
4722 "expand", /* name */
4723 NULL, /* gate */
4724 gimple_expand_cfg, /* execute */
4725 NULL, /* sub */
4726 NULL, /* next */
4727 0, /* static_pass_number */
4728 TV_EXPAND, /* tv_id */
4729 PROP_ssa | PROP_gimple_leh | PROP_cfg
4730 | PROP_gimple_lcx, /* properties_required */
4731 PROP_rtl, /* properties_provided */
4732 PROP_ssa | PROP_trees, /* properties_destroyed */
4733 TODO_verify_ssa | TODO_verify_flow
4734 | TODO_verify_stmts, /* todo_flags_start */
4735 TODO_ggc_collect /* todo_flags_finish */