* integrate.c: Remove.
[official-gcc.git] / gcc / cfgexpand.c
blob8a31a9f5835c23c5cc2cc64dce3d499aa531512b
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "cfgloop.h"
51 #include "regs.h" /* For reg_renumber. */
52 #include "insn-attr.h" /* For INSN_SCHEDULING. */
54 /* This variable holds information helping the rewriting of SSA trees
55 into RTL. */
56 struct ssaexpand SA;
58 /* This variable holds the currently expanded gimple statement for purposes
59 of comminucating the profile info to the builtin expanders. */
60 gimple currently_expanding_gimple_stmt;
62 static rtx expand_debug_expr (tree);
64 /* Return an expression tree corresponding to the RHS of GIMPLE
65 statement STMT. */
67 tree
68 gimple_assign_rhs_to_tree (gimple stmt)
70 tree t;
71 enum gimple_rhs_class grhs_class;
73 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
75 if (grhs_class == GIMPLE_TERNARY_RHS)
76 t = build3 (gimple_assign_rhs_code (stmt),
77 TREE_TYPE (gimple_assign_lhs (stmt)),
78 gimple_assign_rhs1 (stmt),
79 gimple_assign_rhs2 (stmt),
80 gimple_assign_rhs3 (stmt));
81 else if (grhs_class == GIMPLE_BINARY_RHS)
82 t = build2 (gimple_assign_rhs_code (stmt),
83 TREE_TYPE (gimple_assign_lhs (stmt)),
84 gimple_assign_rhs1 (stmt),
85 gimple_assign_rhs2 (stmt));
86 else if (grhs_class == GIMPLE_UNARY_RHS)
87 t = build1 (gimple_assign_rhs_code (stmt),
88 TREE_TYPE (gimple_assign_lhs (stmt)),
89 gimple_assign_rhs1 (stmt));
90 else if (grhs_class == GIMPLE_SINGLE_RHS)
92 t = gimple_assign_rhs1 (stmt);
93 /* Avoid modifying this tree in place below. */
94 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
95 && gimple_location (stmt) != EXPR_LOCATION (t))
96 || (gimple_block (stmt)
97 && currently_expanding_to_rtl
98 && EXPR_P (t)
99 && gimple_block (stmt) != TREE_BLOCK (t)))
100 t = copy_node (t);
102 else
103 gcc_unreachable ();
105 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
106 SET_EXPR_LOCATION (t, gimple_location (stmt));
107 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
108 TREE_BLOCK (t) = gimple_block (stmt);
110 return t;
114 #ifndef STACK_ALIGNMENT_NEEDED
115 #define STACK_ALIGNMENT_NEEDED 1
116 #endif
118 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
120 /* Associate declaration T with storage space X. If T is no
121 SSA name this is exactly SET_DECL_RTL, otherwise make the
122 partition of T associated with X. */
123 static inline void
124 set_rtl (tree t, rtx x)
126 if (TREE_CODE (t) == SSA_NAME)
128 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
129 if (x && !MEM_P (x))
130 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
131 /* For the benefit of debug information at -O0 (where vartracking
132 doesn't run) record the place also in the base DECL if it's
133 a normal variable (not a parameter). */
134 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
136 tree var = SSA_NAME_VAR (t);
137 /* If we don't yet have something recorded, just record it now. */
138 if (!DECL_RTL_SET_P (var))
139 SET_DECL_RTL (var, x);
140 /* If we have it set already to "multiple places" don't
141 change this. */
142 else if (DECL_RTL (var) == pc_rtx)
144 /* If we have something recorded and it's not the same place
145 as we want to record now, we have multiple partitions for the
146 same base variable, with different places. We can't just
147 randomly chose one, hence we have to say that we don't know.
148 This only happens with optimization, and there var-tracking
149 will figure out the right thing. */
150 else if (DECL_RTL (var) != x)
151 SET_DECL_RTL (var, pc_rtx);
154 else
155 SET_DECL_RTL (t, x);
158 /* This structure holds data relevant to one variable that will be
159 placed in a stack slot. */
160 struct stack_var
162 /* The Variable. */
163 tree decl;
165 /* Initially, the size of the variable. Later, the size of the partition,
166 if this variable becomes it's partition's representative. */
167 HOST_WIDE_INT size;
169 /* The *byte* alignment required for this variable. Or as, with the
170 size, the alignment for this partition. */
171 unsigned int alignb;
173 /* The partition representative. */
174 size_t representative;
176 /* The next stack variable in the partition, or EOC. */
177 size_t next;
179 /* The numbers of conflicting stack variables. */
180 bitmap conflicts;
183 #define EOC ((size_t)-1)
185 /* We have an array of such objects while deciding allocation. */
186 static struct stack_var *stack_vars;
187 static size_t stack_vars_alloc;
188 static size_t stack_vars_num;
189 static struct pointer_map_t *decl_to_stack_part;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Compute the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
211 static unsigned int
212 align_local_variable (tree decl)
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
215 DECL_ALIGN (decl) = align;
216 return align / BITS_PER_UNIT;
219 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
220 Return the frame offset. */
222 static HOST_WIDE_INT
223 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
225 HOST_WIDE_INT offset, new_frame_offset;
227 new_frame_offset = frame_offset;
228 if (FRAME_GROWS_DOWNWARD)
230 new_frame_offset -= size + frame_phase;
231 new_frame_offset &= -align;
232 new_frame_offset += frame_phase;
233 offset = new_frame_offset;
235 else
237 new_frame_offset -= frame_phase;
238 new_frame_offset += align - 1;
239 new_frame_offset &= -align;
240 new_frame_offset += frame_phase;
241 offset = new_frame_offset;
242 new_frame_offset += size;
244 frame_offset = new_frame_offset;
246 if (frame_offset_overflow (frame_offset, cfun->decl))
247 frame_offset = offset = 0;
249 return offset;
252 /* Accumulate DECL into STACK_VARS. */
254 static void
255 add_stack_var (tree decl)
257 struct stack_var *v;
259 if (stack_vars_num >= stack_vars_alloc)
261 if (stack_vars_alloc)
262 stack_vars_alloc = stack_vars_alloc * 3 / 2;
263 else
264 stack_vars_alloc = 32;
265 stack_vars
266 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
268 if (!decl_to_stack_part)
269 decl_to_stack_part = pointer_map_create ();
271 v = &stack_vars[stack_vars_num];
272 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
274 v->decl = decl;
275 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
276 /* Ensure that all variables have size, so that &a != &b for any two
277 variables that are simultaneously live. */
278 if (v->size == 0)
279 v->size = 1;
280 v->alignb = align_local_variable (SSAVAR (decl));
281 /* An alignment of zero can mightily confuse us later. */
282 gcc_assert (v->alignb != 0);
284 /* All variables are initially in their own partition. */
285 v->representative = stack_vars_num;
286 v->next = EOC;
288 /* All variables initially conflict with no other. */
289 v->conflicts = NULL;
291 /* Ensure that this decl doesn't get put onto the list twice. */
292 set_rtl (decl, pc_rtx);
294 stack_vars_num++;
297 /* Make the decls associated with luid's X and Y conflict. */
299 static void
300 add_stack_var_conflict (size_t x, size_t y)
302 struct stack_var *a = &stack_vars[x];
303 struct stack_var *b = &stack_vars[y];
304 if (!a->conflicts)
305 a->conflicts = BITMAP_ALLOC (NULL);
306 if (!b->conflicts)
307 b->conflicts = BITMAP_ALLOC (NULL);
308 bitmap_set_bit (a->conflicts, y);
309 bitmap_set_bit (b->conflicts, x);
312 /* Check whether the decls associated with luid's X and Y conflict. */
314 static bool
315 stack_var_conflict_p (size_t x, size_t y)
317 struct stack_var *a = &stack_vars[x];
318 struct stack_var *b = &stack_vars[y];
319 if (x == y)
320 return false;
321 /* Partitions containing an SSA name result from gimple registers
322 with things like unsupported modes. They are top-level and
323 hence conflict with everything else. */
324 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
325 return true;
327 if (!a->conflicts || !b->conflicts)
328 return false;
329 return bitmap_bit_p (a->conflicts, y);
332 /* Returns true if TYPE is or contains a union type. */
334 static bool
335 aggregate_contains_union_type (tree type)
337 tree field;
339 if (TREE_CODE (type) == UNION_TYPE
340 || TREE_CODE (type) == QUAL_UNION_TYPE)
341 return true;
342 if (TREE_CODE (type) == ARRAY_TYPE)
343 return aggregate_contains_union_type (TREE_TYPE (type));
344 if (TREE_CODE (type) != RECORD_TYPE)
345 return false;
347 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
348 if (TREE_CODE (field) == FIELD_DECL)
349 if (aggregate_contains_union_type (TREE_TYPE (field)))
350 return true;
352 return false;
355 /* A subroutine of expand_used_vars. If two variables X and Y have alias
356 sets that do not conflict, then do add a conflict for these variables
357 in the interference graph. We also need to make sure to add conflicts
358 for union containing structures. Else RTL alias analysis comes along
359 and due to type based aliasing rules decides that for two overlapping
360 union temporaries { short s; int i; } accesses to the same mem through
361 different types may not alias and happily reorders stores across
362 life-time boundaries of the temporaries (See PR25654). */
364 static void
365 add_alias_set_conflicts (void)
367 size_t i, j, n = stack_vars_num;
369 for (i = 0; i < n; ++i)
371 tree type_i = TREE_TYPE (stack_vars[i].decl);
372 bool aggr_i = AGGREGATE_TYPE_P (type_i);
373 bool contains_union;
375 contains_union = aggregate_contains_union_type (type_i);
376 for (j = 0; j < i; ++j)
378 tree type_j = TREE_TYPE (stack_vars[j].decl);
379 bool aggr_j = AGGREGATE_TYPE_P (type_j);
380 if (aggr_i != aggr_j
381 /* Either the objects conflict by means of type based
382 aliasing rules, or we need to add a conflict. */
383 || !objects_must_conflict_p (type_i, type_j)
384 /* In case the types do not conflict ensure that access
385 to elements will conflict. In case of unions we have
386 to be careful as type based aliasing rules may say
387 access to the same memory does not conflict. So play
388 safe and add a conflict in this case when
389 -fstrict-aliasing is used. */
390 || (contains_union && flag_strict_aliasing))
391 add_stack_var_conflict (i, j);
396 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
397 enter its partition number into bitmap DATA. */
399 static bool
400 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
402 bitmap active = (bitmap)data;
403 op = get_base_address (op);
404 if (op
405 && DECL_P (op)
406 && DECL_RTL_IF_SET (op) == pc_rtx)
408 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
409 if (v)
410 bitmap_set_bit (active, *v);
412 return false;
415 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
416 record conflicts between it and all currently active other partitions
417 from bitmap DATA. */
419 static bool
420 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
422 bitmap active = (bitmap)data;
423 op = get_base_address (op);
424 if (op
425 && DECL_P (op)
426 && DECL_RTL_IF_SET (op) == pc_rtx)
428 size_t *v =
429 (size_t *) pointer_map_contains (decl_to_stack_part, op);
430 if (v && bitmap_set_bit (active, *v))
432 size_t num = *v;
433 bitmap_iterator bi;
434 unsigned i;
435 gcc_assert (num < stack_vars_num);
436 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
437 add_stack_var_conflict (num, i);
440 return false;
443 /* Helper routine for add_scope_conflicts, calculating the active partitions
444 at the end of BB, leaving the result in WORK. We're called to generate
445 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
446 liveness. */
448 static void
449 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
451 edge e;
452 edge_iterator ei;
453 gimple_stmt_iterator gsi;
454 bool (*visit)(gimple, tree, void *);
456 bitmap_clear (work);
457 FOR_EACH_EDGE (e, ei, bb->preds)
458 bitmap_ior_into (work, (bitmap)e->src->aux);
460 visit = visit_op;
462 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
464 gimple stmt = gsi_stmt (gsi);
465 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
467 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
469 gimple stmt = gsi_stmt (gsi);
471 if (gimple_clobber_p (stmt))
473 tree lhs = gimple_assign_lhs (stmt);
474 size_t *v;
475 /* Nested function lowering might introduce LHSs
476 that are COMPONENT_REFs. */
477 if (TREE_CODE (lhs) != VAR_DECL)
478 continue;
479 if (DECL_RTL_IF_SET (lhs) == pc_rtx
480 && (v = (size_t *)
481 pointer_map_contains (decl_to_stack_part, lhs)))
482 bitmap_clear_bit (work, *v);
484 else if (!is_gimple_debug (stmt))
486 if (for_conflict
487 && visit == visit_op)
489 /* If this is the first real instruction in this BB we need
490 to add conflicts for everything live at this point now.
491 Unlike classical liveness for named objects we can't
492 rely on seeing a def/use of the names we're interested in.
493 There might merely be indirect loads/stores. We'd not add any
494 conflicts for such partitions. */
495 bitmap_iterator bi;
496 unsigned i;
497 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
499 unsigned j;
500 bitmap_iterator bj;
501 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
502 add_stack_var_conflict (i, j);
504 visit = visit_conflict;
506 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
511 /* Generate stack partition conflicts between all partitions that are
512 simultaneously live. */
514 static void
515 add_scope_conflicts (void)
517 basic_block bb;
518 bool changed;
519 bitmap work = BITMAP_ALLOC (NULL);
521 /* We approximate the live range of a stack variable by taking the first
522 mention of its name as starting point(s), and by the end-of-scope
523 death clobber added by gimplify as ending point(s) of the range.
524 This overapproximates in the case we for instance moved an address-taken
525 operation upward, without also moving a dereference to it upwards.
526 But it's conservatively correct as a variable never can hold values
527 before its name is mentioned at least once.
529 We then do a mostly classical bitmap liveness algorithm. */
531 FOR_ALL_BB (bb)
532 bb->aux = BITMAP_ALLOC (NULL);
534 changed = true;
535 while (changed)
537 changed = false;
538 FOR_EACH_BB (bb)
540 bitmap active = (bitmap)bb->aux;
541 add_scope_conflicts_1 (bb, work, false);
542 if (bitmap_ior_into (active, work))
543 changed = true;
547 FOR_EACH_BB (bb)
548 add_scope_conflicts_1 (bb, work, true);
550 BITMAP_FREE (work);
551 FOR_ALL_BB (bb)
552 BITMAP_FREE (bb->aux);
555 /* A subroutine of partition_stack_vars. A comparison function for qsort,
556 sorting an array of indices by the properties of the object. */
558 static int
559 stack_var_cmp (const void *a, const void *b)
561 size_t ia = *(const size_t *)a;
562 size_t ib = *(const size_t *)b;
563 unsigned int aligna = stack_vars[ia].alignb;
564 unsigned int alignb = stack_vars[ib].alignb;
565 HOST_WIDE_INT sizea = stack_vars[ia].size;
566 HOST_WIDE_INT sizeb = stack_vars[ib].size;
567 tree decla = stack_vars[ia].decl;
568 tree declb = stack_vars[ib].decl;
569 bool largea, largeb;
570 unsigned int uida, uidb;
572 /* Primary compare on "large" alignment. Large comes first. */
573 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
574 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
575 if (largea != largeb)
576 return (int)largeb - (int)largea;
578 /* Secondary compare on size, decreasing */
579 if (sizea > sizeb)
580 return -1;
581 if (sizea < sizeb)
582 return 1;
584 /* Tertiary compare on true alignment, decreasing. */
585 if (aligna < alignb)
586 return -1;
587 if (aligna > alignb)
588 return 1;
590 /* Final compare on ID for sort stability, increasing.
591 Two SSA names are compared by their version, SSA names come before
592 non-SSA names, and two normal decls are compared by their DECL_UID. */
593 if (TREE_CODE (decla) == SSA_NAME)
595 if (TREE_CODE (declb) == SSA_NAME)
596 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
597 else
598 return -1;
600 else if (TREE_CODE (declb) == SSA_NAME)
601 return 1;
602 else
603 uida = DECL_UID (decla), uidb = DECL_UID (declb);
604 if (uida < uidb)
605 return 1;
606 if (uida > uidb)
607 return -1;
608 return 0;
612 /* If the points-to solution *PI points to variables that are in a partition
613 together with other variables add all partition members to the pointed-to
614 variables bitmap. */
616 static void
617 add_partitioned_vars_to_ptset (struct pt_solution *pt,
618 struct pointer_map_t *decls_to_partitions,
619 struct pointer_set_t *visited, bitmap temp)
621 bitmap_iterator bi;
622 unsigned i;
623 bitmap *part;
625 if (pt->anything
626 || pt->vars == NULL
627 /* The pointed-to vars bitmap is shared, it is enough to
628 visit it once. */
629 || pointer_set_insert(visited, pt->vars))
630 return;
632 bitmap_clear (temp);
634 /* By using a temporary bitmap to store all members of the partitions
635 we have to add we make sure to visit each of the partitions only
636 once. */
637 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
638 if ((!temp
639 || !bitmap_bit_p (temp, i))
640 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
641 (void *)(size_t) i)))
642 bitmap_ior_into (temp, *part);
643 if (!bitmap_empty_p (temp))
644 bitmap_ior_into (pt->vars, temp);
647 /* Update points-to sets based on partition info, so we can use them on RTL.
648 The bitmaps representing stack partitions will be saved until expand,
649 where partitioned decls used as bases in memory expressions will be
650 rewritten. */
652 static void
653 update_alias_info_with_stack_vars (void)
655 struct pointer_map_t *decls_to_partitions = NULL;
656 size_t i, j;
657 tree var = NULL_TREE;
659 for (i = 0; i < stack_vars_num; i++)
661 bitmap part = NULL;
662 tree name;
663 struct ptr_info_def *pi;
665 /* Not interested in partitions with single variable. */
666 if (stack_vars[i].representative != i
667 || stack_vars[i].next == EOC)
668 continue;
670 if (!decls_to_partitions)
672 decls_to_partitions = pointer_map_create ();
673 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
676 /* Create an SSA_NAME that points to the partition for use
677 as base during alias-oracle queries on RTL for bases that
678 have been partitioned. */
679 if (var == NULL_TREE)
680 var = create_tmp_var (ptr_type_node, NULL);
681 name = make_ssa_name (var, NULL);
683 /* Create bitmaps representing partitions. They will be used for
684 points-to sets later, so use GGC alloc. */
685 part = BITMAP_GGC_ALLOC ();
686 for (j = i; j != EOC; j = stack_vars[j].next)
688 tree decl = stack_vars[j].decl;
689 unsigned int uid = DECL_PT_UID (decl);
690 /* We should never end up partitioning SSA names (though they
691 may end up on the stack). Neither should we allocate stack
692 space to something that is unused and thus unreferenced, except
693 for -O0 where we are preserving even unreferenced variables. */
694 gcc_assert (DECL_P (decl)
695 && (!optimize
696 || referenced_var_lookup (cfun, DECL_UID (decl))));
697 bitmap_set_bit (part, uid);
698 *((bitmap *) pointer_map_insert (decls_to_partitions,
699 (void *)(size_t) uid)) = part;
700 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
701 decl)) = name;
704 /* Make the SSA name point to all partition members. */
705 pi = get_ptr_info (name);
706 pt_solution_set (&pi->pt, part, false);
709 /* Make all points-to sets that contain one member of a partition
710 contain all members of the partition. */
711 if (decls_to_partitions)
713 unsigned i;
714 struct pointer_set_t *visited = pointer_set_create ();
715 bitmap temp = BITMAP_ALLOC (NULL);
717 for (i = 1; i < num_ssa_names; i++)
719 tree name = ssa_name (i);
720 struct ptr_info_def *pi;
722 if (name
723 && POINTER_TYPE_P (TREE_TYPE (name))
724 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
725 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
726 visited, temp);
729 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
730 decls_to_partitions, visited, temp);
732 pointer_set_destroy (visited);
733 pointer_map_destroy (decls_to_partitions);
734 BITMAP_FREE (temp);
738 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
739 partitioning algorithm. Partitions A and B are known to be non-conflicting.
740 Merge them into a single partition A. */
742 static void
743 union_stack_vars (size_t a, size_t b)
745 struct stack_var *vb = &stack_vars[b];
746 bitmap_iterator bi;
747 unsigned u;
749 gcc_assert (stack_vars[b].next == EOC);
750 /* Add B to A's partition. */
751 stack_vars[b].next = stack_vars[a].next;
752 stack_vars[b].representative = a;
753 stack_vars[a].next = b;
755 /* Update the required alignment of partition A to account for B. */
756 if (stack_vars[a].alignb < stack_vars[b].alignb)
757 stack_vars[a].alignb = stack_vars[b].alignb;
759 /* Update the interference graph and merge the conflicts. */
760 if (vb->conflicts)
762 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
763 add_stack_var_conflict (a, stack_vars[u].representative);
764 BITMAP_FREE (vb->conflicts);
768 /* A subroutine of expand_used_vars. Binpack the variables into
769 partitions constrained by the interference graph. The overall
770 algorithm used is as follows:
772 Sort the objects by size in descending order.
773 For each object A {
774 S = size(A)
775 O = 0
776 loop {
777 Look for the largest non-conflicting object B with size <= S.
778 UNION (A, B)
783 static void
784 partition_stack_vars (void)
786 size_t si, sj, n = stack_vars_num;
788 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
789 for (si = 0; si < n; ++si)
790 stack_vars_sorted[si] = si;
792 if (n == 1)
793 return;
795 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
797 for (si = 0; si < n; ++si)
799 size_t i = stack_vars_sorted[si];
800 unsigned int ialign = stack_vars[i].alignb;
802 /* Ignore objects that aren't partition representatives. If we
803 see a var that is not a partition representative, it must
804 have been merged earlier. */
805 if (stack_vars[i].representative != i)
806 continue;
808 for (sj = si + 1; sj < n; ++sj)
810 size_t j = stack_vars_sorted[sj];
811 unsigned int jalign = stack_vars[j].alignb;
813 /* Ignore objects that aren't partition representatives. */
814 if (stack_vars[j].representative != j)
815 continue;
817 /* Ignore conflicting objects. */
818 if (stack_var_conflict_p (i, j))
819 continue;
821 /* Do not mix objects of "small" (supported) alignment
822 and "large" (unsupported) alignment. */
823 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
824 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
825 continue;
827 /* UNION the objects, placing J at OFFSET. */
828 union_stack_vars (i, j);
832 update_alias_info_with_stack_vars ();
835 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
837 static void
838 dump_stack_var_partition (void)
840 size_t si, i, j, n = stack_vars_num;
842 for (si = 0; si < n; ++si)
844 i = stack_vars_sorted[si];
846 /* Skip variables that aren't partition representatives, for now. */
847 if (stack_vars[i].representative != i)
848 continue;
850 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
851 " align %u\n", (unsigned long) i, stack_vars[i].size,
852 stack_vars[i].alignb);
854 for (j = i; j != EOC; j = stack_vars[j].next)
856 fputc ('\t', dump_file);
857 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
859 fputc ('\n', dump_file);
863 /* Assign rtl to DECL at BASE + OFFSET. */
865 static void
866 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
867 HOST_WIDE_INT offset)
869 unsigned align;
870 rtx x;
872 /* If this fails, we've overflowed the stack frame. Error nicely? */
873 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
875 x = plus_constant (Pmode, base, offset);
876 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
878 if (TREE_CODE (decl) != SSA_NAME)
880 /* Set alignment we actually gave this decl if it isn't an SSA name.
881 If it is we generate stack slots only accidentally so it isn't as
882 important, we'll simply use the alignment that is already set. */
883 if (base == virtual_stack_vars_rtx)
884 offset -= frame_phase;
885 align = offset & -offset;
886 align *= BITS_PER_UNIT;
887 if (align == 0 || align > base_align)
888 align = base_align;
890 /* One would think that we could assert that we're not decreasing
891 alignment here, but (at least) the i386 port does exactly this
892 via the MINIMUM_ALIGNMENT hook. */
894 DECL_ALIGN (decl) = align;
895 DECL_USER_ALIGN (decl) = 0;
898 set_mem_attributes (x, SSAVAR (decl), true);
899 set_rtl (decl, x);
902 /* A subroutine of expand_used_vars. Give each partition representative
903 a unique location within the stack frame. Update each partition member
904 with that location. */
906 static void
907 expand_stack_vars (bool (*pred) (tree))
909 size_t si, i, j, n = stack_vars_num;
910 HOST_WIDE_INT large_size = 0, large_alloc = 0;
911 rtx large_base = NULL;
912 unsigned large_align = 0;
913 tree decl;
915 /* Determine if there are any variables requiring "large" alignment.
916 Since these are dynamically allocated, we only process these if
917 no predicate involved. */
918 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
919 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
921 /* Find the total size of these variables. */
922 for (si = 0; si < n; ++si)
924 unsigned alignb;
926 i = stack_vars_sorted[si];
927 alignb = stack_vars[i].alignb;
929 /* Stop when we get to the first decl with "small" alignment. */
930 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
931 break;
933 /* Skip variables that aren't partition representatives. */
934 if (stack_vars[i].representative != i)
935 continue;
937 /* Skip variables that have already had rtl assigned. See also
938 add_stack_var where we perpetrate this pc_rtx hack. */
939 decl = stack_vars[i].decl;
940 if ((TREE_CODE (decl) == SSA_NAME
941 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
942 : DECL_RTL (decl)) != pc_rtx)
943 continue;
945 large_size += alignb - 1;
946 large_size &= -(HOST_WIDE_INT)alignb;
947 large_size += stack_vars[i].size;
950 /* If there were any, allocate space. */
951 if (large_size > 0)
952 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
953 large_align, true);
956 for (si = 0; si < n; ++si)
958 rtx base;
959 unsigned base_align, alignb;
960 HOST_WIDE_INT offset;
962 i = stack_vars_sorted[si];
964 /* Skip variables that aren't partition representatives, for now. */
965 if (stack_vars[i].representative != i)
966 continue;
968 /* Skip variables that have already had rtl assigned. See also
969 add_stack_var where we perpetrate this pc_rtx hack. */
970 decl = stack_vars[i].decl;
971 if ((TREE_CODE (decl) == SSA_NAME
972 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
973 : DECL_RTL (decl)) != pc_rtx)
974 continue;
976 /* Check the predicate to see whether this variable should be
977 allocated in this pass. */
978 if (pred && !pred (decl))
979 continue;
981 alignb = stack_vars[i].alignb;
982 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
984 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
985 base = virtual_stack_vars_rtx;
986 base_align = crtl->max_used_stack_slot_alignment;
988 else
990 /* Large alignment is only processed in the last pass. */
991 if (pred)
992 continue;
993 gcc_assert (large_base != NULL);
995 large_alloc += alignb - 1;
996 large_alloc &= -(HOST_WIDE_INT)alignb;
997 offset = large_alloc;
998 large_alloc += stack_vars[i].size;
1000 base = large_base;
1001 base_align = large_align;
1004 /* Create rtl for each variable based on their location within the
1005 partition. */
1006 for (j = i; j != EOC; j = stack_vars[j].next)
1008 expand_one_stack_var_at (stack_vars[j].decl,
1009 base, base_align,
1010 offset);
1014 gcc_assert (large_alloc == large_size);
1017 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1018 static HOST_WIDE_INT
1019 account_stack_vars (void)
1021 size_t si, j, i, n = stack_vars_num;
1022 HOST_WIDE_INT size = 0;
1024 for (si = 0; si < n; ++si)
1026 i = stack_vars_sorted[si];
1028 /* Skip variables that aren't partition representatives, for now. */
1029 if (stack_vars[i].representative != i)
1030 continue;
1032 size += stack_vars[i].size;
1033 for (j = i; j != EOC; j = stack_vars[j].next)
1034 set_rtl (stack_vars[j].decl, NULL);
1036 return size;
1039 /* A subroutine of expand_one_var. Called to immediately assign rtl
1040 to a variable to be allocated in the stack frame. */
1042 static void
1043 expand_one_stack_var (tree var)
1045 HOST_WIDE_INT size, offset;
1046 unsigned byte_align;
1048 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1049 byte_align = align_local_variable (SSAVAR (var));
1051 /* We handle highly aligned variables in expand_stack_vars. */
1052 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1054 offset = alloc_stack_frame_space (size, byte_align);
1056 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1057 crtl->max_used_stack_slot_alignment, offset);
1060 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1061 that will reside in a hard register. */
1063 static void
1064 expand_one_hard_reg_var (tree var)
1066 rest_of_decl_compilation (var, 0, 0);
1069 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1070 that will reside in a pseudo register. */
1072 static void
1073 expand_one_register_var (tree var)
1075 tree decl = SSAVAR (var);
1076 tree type = TREE_TYPE (decl);
1077 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1078 rtx x = gen_reg_rtx (reg_mode);
1080 set_rtl (var, x);
1082 /* Note if the object is a user variable. */
1083 if (!DECL_ARTIFICIAL (decl))
1084 mark_user_reg (x);
1086 if (POINTER_TYPE_P (type))
1087 mark_reg_pointer (x, get_pointer_alignment (var));
1090 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1091 has some associated error, e.g. its type is error-mark. We just need
1092 to pick something that won't crash the rest of the compiler. */
1094 static void
1095 expand_one_error_var (tree var)
1097 enum machine_mode mode = DECL_MODE (var);
1098 rtx x;
1100 if (mode == BLKmode)
1101 x = gen_rtx_MEM (BLKmode, const0_rtx);
1102 else if (mode == VOIDmode)
1103 x = const0_rtx;
1104 else
1105 x = gen_reg_rtx (mode);
1107 SET_DECL_RTL (var, x);
1110 /* A subroutine of expand_one_var. VAR is a variable that will be
1111 allocated to the local stack frame. Return true if we wish to
1112 add VAR to STACK_VARS so that it will be coalesced with other
1113 variables. Return false to allocate VAR immediately.
1115 This function is used to reduce the number of variables considered
1116 for coalescing, which reduces the size of the quadratic problem. */
1118 static bool
1119 defer_stack_allocation (tree var, bool toplevel)
1121 /* If stack protection is enabled, *all* stack variables must be deferred,
1122 so that we can re-order the strings to the top of the frame. */
1123 if (flag_stack_protect)
1124 return true;
1126 /* We handle "large" alignment via dynamic allocation. We want to handle
1127 this extra complication in only one place, so defer them. */
1128 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1129 return true;
1131 /* Variables in the outermost scope automatically conflict with
1132 every other variable. The only reason to want to defer them
1133 at all is that, after sorting, we can more efficiently pack
1134 small variables in the stack frame. Continue to defer at -O2. */
1135 if (toplevel && optimize < 2)
1136 return false;
1138 /* Without optimization, *most* variables are allocated from the
1139 stack, which makes the quadratic problem large exactly when we
1140 want compilation to proceed as quickly as possible. On the
1141 other hand, we don't want the function's stack frame size to
1142 get completely out of hand. So we avoid adding scalars and
1143 "small" aggregates to the list at all. */
1144 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1145 return false;
1147 return true;
1150 /* A subroutine of expand_used_vars. Expand one variable according to
1151 its flavor. Variables to be placed on the stack are not actually
1152 expanded yet, merely recorded.
1153 When REALLY_EXPAND is false, only add stack values to be allocated.
1154 Return stack usage this variable is supposed to take.
1157 static HOST_WIDE_INT
1158 expand_one_var (tree var, bool toplevel, bool really_expand)
1160 unsigned int align = BITS_PER_UNIT;
1161 tree origvar = var;
1163 var = SSAVAR (var);
1165 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1167 /* Because we don't know if VAR will be in register or on stack,
1168 we conservatively assume it will be on stack even if VAR is
1169 eventually put into register after RA pass. For non-automatic
1170 variables, which won't be on stack, we collect alignment of
1171 type and ignore user specified alignment. */
1172 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1173 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1174 TYPE_MODE (TREE_TYPE (var)),
1175 TYPE_ALIGN (TREE_TYPE (var)));
1176 else if (DECL_HAS_VALUE_EXPR_P (var)
1177 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1178 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1179 or variables which were assigned a stack slot already by
1180 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1181 changed from the offset chosen to it. */
1182 align = crtl->stack_alignment_estimated;
1183 else
1184 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1186 /* If the variable alignment is very large we'll dynamicaly allocate
1187 it, which means that in-frame portion is just a pointer. */
1188 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1189 align = POINTER_SIZE;
1192 if (SUPPORTS_STACK_ALIGNMENT
1193 && crtl->stack_alignment_estimated < align)
1195 /* stack_alignment_estimated shouldn't change after stack
1196 realign decision made */
1197 gcc_assert(!crtl->stack_realign_processed);
1198 crtl->stack_alignment_estimated = align;
1201 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1202 So here we only make sure stack_alignment_needed >= align. */
1203 if (crtl->stack_alignment_needed < align)
1204 crtl->stack_alignment_needed = align;
1205 if (crtl->max_used_stack_slot_alignment < align)
1206 crtl->max_used_stack_slot_alignment = align;
1208 if (TREE_CODE (origvar) == SSA_NAME)
1210 gcc_assert (TREE_CODE (var) != VAR_DECL
1211 || (!DECL_EXTERNAL (var)
1212 && !DECL_HAS_VALUE_EXPR_P (var)
1213 && !TREE_STATIC (var)
1214 && TREE_TYPE (var) != error_mark_node
1215 && !DECL_HARD_REGISTER (var)
1216 && really_expand));
1218 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1220 else if (DECL_EXTERNAL (var))
1222 else if (DECL_HAS_VALUE_EXPR_P (var))
1224 else if (TREE_STATIC (var))
1226 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1228 else if (TREE_TYPE (var) == error_mark_node)
1230 if (really_expand)
1231 expand_one_error_var (var);
1233 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1235 if (really_expand)
1236 expand_one_hard_reg_var (var);
1238 else if (use_register_for_decl (var))
1240 if (really_expand)
1241 expand_one_register_var (origvar);
1243 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1245 /* Reject variables which cover more than half of the address-space. */
1246 if (really_expand)
1248 error ("size of variable %q+D is too large", var);
1249 expand_one_error_var (var);
1252 else if (defer_stack_allocation (var, toplevel))
1253 add_stack_var (origvar);
1254 else
1256 if (really_expand)
1257 expand_one_stack_var (origvar);
1258 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1260 return 0;
1263 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1264 expanding variables. Those variables that can be put into registers
1265 are allocated pseudos; those that can't are put on the stack.
1267 TOPLEVEL is true if this is the outermost BLOCK. */
1269 static void
1270 expand_used_vars_for_block (tree block, bool toplevel)
1272 tree t;
1274 /* Expand all variables at this level. */
1275 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1276 if (TREE_USED (t)
1277 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1278 || !DECL_NONSHAREABLE (t)))
1279 expand_one_var (t, toplevel, true);
1281 /* Expand all variables at containing levels. */
1282 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1283 expand_used_vars_for_block (t, false);
1286 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1287 and clear TREE_USED on all local variables. */
1289 static void
1290 clear_tree_used (tree block)
1292 tree t;
1294 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1295 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1296 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1297 || !DECL_NONSHAREABLE (t))
1298 TREE_USED (t) = 0;
1300 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1301 clear_tree_used (t);
1304 /* Examine TYPE and determine a bit mask of the following features. */
1306 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1307 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1308 #define SPCT_HAS_ARRAY 4
1309 #define SPCT_HAS_AGGREGATE 8
1311 static unsigned int
1312 stack_protect_classify_type (tree type)
1314 unsigned int ret = 0;
1315 tree t;
1317 switch (TREE_CODE (type))
1319 case ARRAY_TYPE:
1320 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1321 if (t == char_type_node
1322 || t == signed_char_type_node
1323 || t == unsigned_char_type_node)
1325 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1326 unsigned HOST_WIDE_INT len;
1328 if (!TYPE_SIZE_UNIT (type)
1329 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1330 len = max;
1331 else
1332 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1334 if (len < max)
1335 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1336 else
1337 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1339 else
1340 ret = SPCT_HAS_ARRAY;
1341 break;
1343 case UNION_TYPE:
1344 case QUAL_UNION_TYPE:
1345 case RECORD_TYPE:
1346 ret = SPCT_HAS_AGGREGATE;
1347 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1348 if (TREE_CODE (t) == FIELD_DECL)
1349 ret |= stack_protect_classify_type (TREE_TYPE (t));
1350 break;
1352 default:
1353 break;
1356 return ret;
1359 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1360 part of the local stack frame. Remember if we ever return nonzero for
1361 any variable in this function. The return value is the phase number in
1362 which the variable should be allocated. */
1364 static int
1365 stack_protect_decl_phase (tree decl)
1367 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1368 int ret = 0;
1370 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1371 has_short_buffer = true;
1373 if (flag_stack_protect == 2)
1375 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1376 && !(bits & SPCT_HAS_AGGREGATE))
1377 ret = 1;
1378 else if (bits & SPCT_HAS_ARRAY)
1379 ret = 2;
1381 else
1382 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1384 if (ret)
1385 has_protected_decls = true;
1387 return ret;
1390 /* Two helper routines that check for phase 1 and phase 2. These are used
1391 as callbacks for expand_stack_vars. */
1393 static bool
1394 stack_protect_decl_phase_1 (tree decl)
1396 return stack_protect_decl_phase (decl) == 1;
1399 static bool
1400 stack_protect_decl_phase_2 (tree decl)
1402 return stack_protect_decl_phase (decl) == 2;
1405 /* Ensure that variables in different stack protection phases conflict
1406 so that they are not merged and share the same stack slot. */
1408 static void
1409 add_stack_protection_conflicts (void)
1411 size_t i, j, n = stack_vars_num;
1412 unsigned char *phase;
1414 phase = XNEWVEC (unsigned char, n);
1415 for (i = 0; i < n; ++i)
1416 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1418 for (i = 0; i < n; ++i)
1420 unsigned char ph_i = phase[i];
1421 for (j = 0; j < i; ++j)
1422 if (ph_i != phase[j])
1423 add_stack_var_conflict (i, j);
1426 XDELETEVEC (phase);
1429 /* Create a decl for the guard at the top of the stack frame. */
1431 static void
1432 create_stack_guard (void)
1434 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1435 VAR_DECL, NULL, ptr_type_node);
1436 TREE_THIS_VOLATILE (guard) = 1;
1437 TREE_USED (guard) = 1;
1438 expand_one_stack_var (guard);
1439 crtl->stack_protect_guard = guard;
1442 /* Prepare for expanding variables. */
1443 static void
1444 init_vars_expansion (void)
1446 tree t;
1447 unsigned ix;
1448 /* Set TREE_USED on all variables in the local_decls. */
1449 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1450 TREE_USED (t) = 1;
1452 /* Clear TREE_USED on all variables associated with a block scope. */
1453 clear_tree_used (DECL_INITIAL (current_function_decl));
1455 /* Initialize local stack smashing state. */
1456 has_protected_decls = false;
1457 has_short_buffer = false;
1460 /* Free up stack variable graph data. */
1461 static void
1462 fini_vars_expansion (void)
1464 size_t i, n = stack_vars_num;
1465 for (i = 0; i < n; i++)
1466 BITMAP_FREE (stack_vars[i].conflicts);
1467 XDELETEVEC (stack_vars);
1468 XDELETEVEC (stack_vars_sorted);
1469 stack_vars = NULL;
1470 stack_vars_alloc = stack_vars_num = 0;
1471 pointer_map_destroy (decl_to_stack_part);
1472 decl_to_stack_part = NULL;
1475 /* Make a fair guess for the size of the stack frame of the function
1476 in NODE. This doesn't have to be exact, the result is only used in
1477 the inline heuristics. So we don't want to run the full stack var
1478 packing algorithm (which is quadratic in the number of stack vars).
1479 Instead, we calculate the total size of all stack vars. This turns
1480 out to be a pretty fair estimate -- packing of stack vars doesn't
1481 happen very often. */
1483 HOST_WIDE_INT
1484 estimated_stack_frame_size (struct cgraph_node *node)
1486 HOST_WIDE_INT size = 0;
1487 size_t i;
1488 tree var;
1489 tree old_cur_fun_decl = current_function_decl;
1490 referenced_var_iterator rvi;
1491 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1493 current_function_decl = node->symbol.decl;
1494 push_cfun (fn);
1496 gcc_checking_assert (gimple_referenced_vars (fn));
1497 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1498 size += expand_one_var (var, true, false);
1500 if (stack_vars_num > 0)
1502 /* Fake sorting the stack vars for account_stack_vars (). */
1503 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1504 for (i = 0; i < stack_vars_num; ++i)
1505 stack_vars_sorted[i] = i;
1506 size += account_stack_vars ();
1507 fini_vars_expansion ();
1509 pop_cfun ();
1510 current_function_decl = old_cur_fun_decl;
1511 return size;
1514 /* Expand all variables used in the function. */
1516 static void
1517 expand_used_vars (void)
1519 tree var, outer_block = DECL_INITIAL (current_function_decl);
1520 VEC(tree,heap) *maybe_local_decls = NULL;
1521 unsigned i;
1522 unsigned len;
1524 /* Compute the phase of the stack frame for this function. */
1526 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1527 int off = STARTING_FRAME_OFFSET % align;
1528 frame_phase = off ? align - off : 0;
1531 init_vars_expansion ();
1533 for (i = 0; i < SA.map->num_partitions; i++)
1535 tree var = partition_to_var (SA.map, i);
1537 gcc_assert (is_gimple_reg (var));
1538 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1539 expand_one_var (var, true, true);
1540 else
1542 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1543 contain the default def (representing the parm or result itself)
1544 we don't do anything here. But those which don't contain the
1545 default def (representing a temporary based on the parm/result)
1546 we need to allocate space just like for normal VAR_DECLs. */
1547 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1549 expand_one_var (var, true, true);
1550 gcc_assert (SA.partition_to_pseudo[i]);
1555 /* At this point all variables on the local_decls with TREE_USED
1556 set are not associated with any block scope. Lay them out. */
1558 len = VEC_length (tree, cfun->local_decls);
1559 FOR_EACH_LOCAL_DECL (cfun, i, var)
1561 bool expand_now = false;
1563 /* Expanded above already. */
1564 if (is_gimple_reg (var))
1566 TREE_USED (var) = 0;
1567 goto next;
1569 /* We didn't set a block for static or extern because it's hard
1570 to tell the difference between a global variable (re)declared
1571 in a local scope, and one that's really declared there to
1572 begin with. And it doesn't really matter much, since we're
1573 not giving them stack space. Expand them now. */
1574 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1575 expand_now = true;
1577 /* If the variable is not associated with any block, then it
1578 was created by the optimizers, and could be live anywhere
1579 in the function. */
1580 else if (TREE_USED (var))
1581 expand_now = true;
1583 /* Finally, mark all variables on the list as used. We'll use
1584 this in a moment when we expand those associated with scopes. */
1585 TREE_USED (var) = 1;
1587 if (expand_now)
1588 expand_one_var (var, true, true);
1590 next:
1591 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1593 rtx rtl = DECL_RTL_IF_SET (var);
1595 /* Keep artificial non-ignored vars in cfun->local_decls
1596 chain until instantiate_decls. */
1597 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1598 add_local_decl (cfun, var);
1599 else if (rtl == NULL_RTX)
1600 /* If rtl isn't set yet, which can happen e.g. with
1601 -fstack-protector, retry before returning from this
1602 function. */
1603 VEC_safe_push (tree, heap, maybe_local_decls, var);
1607 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1609 +-----------------+-----------------+
1610 | ...processed... | ...duplicates...|
1611 +-----------------+-----------------+
1613 +-- LEN points here.
1615 We just want the duplicates, as those are the artificial
1616 non-ignored vars that we want to keep until instantiate_decls.
1617 Move them down and truncate the array. */
1618 if (!VEC_empty (tree, cfun->local_decls))
1619 VEC_block_remove (tree, cfun->local_decls, 0, len);
1621 /* At this point, all variables within the block tree with TREE_USED
1622 set are actually used by the optimized function. Lay them out. */
1623 expand_used_vars_for_block (outer_block, true);
1625 if (stack_vars_num > 0)
1627 add_scope_conflicts ();
1628 /* Due to the way alias sets work, no variables with non-conflicting
1629 alias sets may be assigned the same address. Add conflicts to
1630 reflect this. */
1631 add_alias_set_conflicts ();
1633 /* If stack protection is enabled, we don't share space between
1634 vulnerable data and non-vulnerable data. */
1635 if (flag_stack_protect)
1636 add_stack_protection_conflicts ();
1638 /* Now that we have collected all stack variables, and have computed a
1639 minimal interference graph, attempt to save some stack space. */
1640 partition_stack_vars ();
1641 if (dump_file)
1642 dump_stack_var_partition ();
1645 /* There are several conditions under which we should create a
1646 stack guard: protect-all, alloca used, protected decls present. */
1647 if (flag_stack_protect == 2
1648 || (flag_stack_protect
1649 && (cfun->calls_alloca || has_protected_decls)))
1650 create_stack_guard ();
1652 /* Assign rtl to each variable based on these partitions. */
1653 if (stack_vars_num > 0)
1655 /* Reorder decls to be protected by iterating over the variables
1656 array multiple times, and allocating out of each phase in turn. */
1657 /* ??? We could probably integrate this into the qsort we did
1658 earlier, such that we naturally see these variables first,
1659 and thus naturally allocate things in the right order. */
1660 if (has_protected_decls)
1662 /* Phase 1 contains only character arrays. */
1663 expand_stack_vars (stack_protect_decl_phase_1);
1665 /* Phase 2 contains other kinds of arrays. */
1666 if (flag_stack_protect == 2)
1667 expand_stack_vars (stack_protect_decl_phase_2);
1670 expand_stack_vars (NULL);
1672 fini_vars_expansion ();
1675 /* If there were any artificial non-ignored vars without rtl
1676 found earlier, see if deferred stack allocation hasn't assigned
1677 rtl to them. */
1678 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1680 rtx rtl = DECL_RTL_IF_SET (var);
1682 /* Keep artificial non-ignored vars in cfun->local_decls
1683 chain until instantiate_decls. */
1684 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1685 add_local_decl (cfun, var);
1687 VEC_free (tree, heap, maybe_local_decls);
1689 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1690 if (STACK_ALIGNMENT_NEEDED)
1692 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1693 if (!FRAME_GROWS_DOWNWARD)
1694 frame_offset += align - 1;
1695 frame_offset &= -align;
1700 /* If we need to produce a detailed dump, print the tree representation
1701 for STMT to the dump file. SINCE is the last RTX after which the RTL
1702 generated for STMT should have been appended. */
1704 static void
1705 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1707 if (dump_file && (dump_flags & TDF_DETAILS))
1709 fprintf (dump_file, "\n;; ");
1710 print_gimple_stmt (dump_file, stmt, 0,
1711 TDF_SLIM | (dump_flags & TDF_LINENO));
1712 fprintf (dump_file, "\n");
1714 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1718 /* Maps the blocks that do not contain tree labels to rtx labels. */
1720 static struct pointer_map_t *lab_rtx_for_bb;
1722 /* Returns the label_rtx expression for a label starting basic block BB. */
1724 static rtx
1725 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1727 gimple_stmt_iterator gsi;
1728 tree lab;
1729 gimple lab_stmt;
1730 void **elt;
1732 if (bb->flags & BB_RTL)
1733 return block_label (bb);
1735 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1736 if (elt)
1737 return (rtx) *elt;
1739 /* Find the tree label if it is present. */
1741 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1743 lab_stmt = gsi_stmt (gsi);
1744 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1745 break;
1747 lab = gimple_label_label (lab_stmt);
1748 if (DECL_NONLOCAL (lab))
1749 break;
1751 return label_rtx (lab);
1754 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1755 *elt = gen_label_rtx ();
1756 return (rtx) *elt;
1760 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1761 of a basic block where we just expanded the conditional at the end,
1762 possibly clean up the CFG and instruction sequence. LAST is the
1763 last instruction before the just emitted jump sequence. */
1765 static void
1766 maybe_cleanup_end_of_block (edge e, rtx last)
1768 /* Special case: when jumpif decides that the condition is
1769 trivial it emits an unconditional jump (and the necessary
1770 barrier). But we still have two edges, the fallthru one is
1771 wrong. purge_dead_edges would clean this up later. Unfortunately
1772 we have to insert insns (and split edges) before
1773 find_many_sub_basic_blocks and hence before purge_dead_edges.
1774 But splitting edges might create new blocks which depend on the
1775 fact that if there are two edges there's no barrier. So the
1776 barrier would get lost and verify_flow_info would ICE. Instead
1777 of auditing all edge splitters to care for the barrier (which
1778 normally isn't there in a cleaned CFG), fix it here. */
1779 if (BARRIER_P (get_last_insn ()))
1781 rtx insn;
1782 remove_edge (e);
1783 /* Now, we have a single successor block, if we have insns to
1784 insert on the remaining edge we potentially will insert
1785 it at the end of this block (if the dest block isn't feasible)
1786 in order to avoid splitting the edge. This insertion will take
1787 place in front of the last jump. But we might have emitted
1788 multiple jumps (conditional and one unconditional) to the
1789 same destination. Inserting in front of the last one then
1790 is a problem. See PR 40021. We fix this by deleting all
1791 jumps except the last unconditional one. */
1792 insn = PREV_INSN (get_last_insn ());
1793 /* Make sure we have an unconditional jump. Otherwise we're
1794 confused. */
1795 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1796 for (insn = PREV_INSN (insn); insn != last;)
1798 insn = PREV_INSN (insn);
1799 if (JUMP_P (NEXT_INSN (insn)))
1801 if (!any_condjump_p (NEXT_INSN (insn)))
1803 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1804 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1806 delete_insn (NEXT_INSN (insn));
1812 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1813 Returns a new basic block if we've terminated the current basic
1814 block and created a new one. */
1816 static basic_block
1817 expand_gimple_cond (basic_block bb, gimple stmt)
1819 basic_block new_bb, dest;
1820 edge new_edge;
1821 edge true_edge;
1822 edge false_edge;
1823 rtx last2, last;
1824 enum tree_code code;
1825 tree op0, op1;
1827 code = gimple_cond_code (stmt);
1828 op0 = gimple_cond_lhs (stmt);
1829 op1 = gimple_cond_rhs (stmt);
1830 /* We're sometimes presented with such code:
1831 D.123_1 = x < y;
1832 if (D.123_1 != 0)
1834 This would expand to two comparisons which then later might
1835 be cleaned up by combine. But some pattern matchers like if-conversion
1836 work better when there's only one compare, so make up for this
1837 here as special exception if TER would have made the same change. */
1838 if (gimple_cond_single_var_p (stmt)
1839 && SA.values
1840 && TREE_CODE (op0) == SSA_NAME
1841 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1843 gimple second = SSA_NAME_DEF_STMT (op0);
1844 if (gimple_code (second) == GIMPLE_ASSIGN)
1846 enum tree_code code2 = gimple_assign_rhs_code (second);
1847 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1849 code = code2;
1850 op0 = gimple_assign_rhs1 (second);
1851 op1 = gimple_assign_rhs2 (second);
1853 /* If jumps are cheap turn some more codes into
1854 jumpy sequences. */
1855 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1857 if ((code2 == BIT_AND_EXPR
1858 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1859 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1860 || code2 == TRUTH_AND_EXPR)
1862 code = TRUTH_ANDIF_EXPR;
1863 op0 = gimple_assign_rhs1 (second);
1864 op1 = gimple_assign_rhs2 (second);
1866 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1868 code = TRUTH_ORIF_EXPR;
1869 op0 = gimple_assign_rhs1 (second);
1870 op1 = gimple_assign_rhs2 (second);
1876 last2 = last = get_last_insn ();
1878 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1879 set_curr_insn_source_location (gimple_location (stmt));
1880 set_curr_insn_block (gimple_block (stmt));
1882 /* These flags have no purpose in RTL land. */
1883 true_edge->flags &= ~EDGE_TRUE_VALUE;
1884 false_edge->flags &= ~EDGE_FALSE_VALUE;
1886 /* We can either have a pure conditional jump with one fallthru edge or
1887 two-way jump that needs to be decomposed into two basic blocks. */
1888 if (false_edge->dest == bb->next_bb)
1890 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1891 true_edge->probability);
1892 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1893 if (true_edge->goto_locus)
1895 set_curr_insn_source_location (true_edge->goto_locus);
1896 set_curr_insn_block (true_edge->goto_block);
1897 true_edge->goto_locus = curr_insn_locator ();
1899 true_edge->goto_block = NULL;
1900 false_edge->flags |= EDGE_FALLTHRU;
1901 maybe_cleanup_end_of_block (false_edge, last);
1902 return NULL;
1904 if (true_edge->dest == bb->next_bb)
1906 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1907 false_edge->probability);
1908 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1909 if (false_edge->goto_locus)
1911 set_curr_insn_source_location (false_edge->goto_locus);
1912 set_curr_insn_block (false_edge->goto_block);
1913 false_edge->goto_locus = curr_insn_locator ();
1915 false_edge->goto_block = NULL;
1916 true_edge->flags |= EDGE_FALLTHRU;
1917 maybe_cleanup_end_of_block (true_edge, last);
1918 return NULL;
1921 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1922 true_edge->probability);
1923 last = get_last_insn ();
1924 if (false_edge->goto_locus)
1926 set_curr_insn_source_location (false_edge->goto_locus);
1927 set_curr_insn_block (false_edge->goto_block);
1928 false_edge->goto_locus = curr_insn_locator ();
1930 false_edge->goto_block = NULL;
1931 emit_jump (label_rtx_for_bb (false_edge->dest));
1933 BB_END (bb) = last;
1934 if (BARRIER_P (BB_END (bb)))
1935 BB_END (bb) = PREV_INSN (BB_END (bb));
1936 update_bb_for_insn (bb);
1938 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1939 dest = false_edge->dest;
1940 redirect_edge_succ (false_edge, new_bb);
1941 false_edge->flags |= EDGE_FALLTHRU;
1942 new_bb->count = false_edge->count;
1943 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1944 if (current_loops && bb->loop_father)
1945 add_bb_to_loop (new_bb, bb->loop_father);
1946 new_edge = make_edge (new_bb, dest, 0);
1947 new_edge->probability = REG_BR_PROB_BASE;
1948 new_edge->count = new_bb->count;
1949 if (BARRIER_P (BB_END (new_bb)))
1950 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1951 update_bb_for_insn (new_bb);
1953 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1955 if (true_edge->goto_locus)
1957 set_curr_insn_source_location (true_edge->goto_locus);
1958 set_curr_insn_block (true_edge->goto_block);
1959 true_edge->goto_locus = curr_insn_locator ();
1961 true_edge->goto_block = NULL;
1963 return new_bb;
1966 /* Mark all calls that can have a transaction restart. */
1968 static void
1969 mark_transaction_restart_calls (gimple stmt)
1971 struct tm_restart_node dummy;
1972 void **slot;
1974 if (!cfun->gimple_df->tm_restart)
1975 return;
1977 dummy.stmt = stmt;
1978 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1979 if (slot)
1981 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1982 tree list = n->label_or_list;
1983 rtx insn;
1985 for (insn = next_real_insn (get_last_insn ());
1986 !CALL_P (insn);
1987 insn = next_real_insn (insn))
1988 continue;
1990 if (TREE_CODE (list) == LABEL_DECL)
1991 add_reg_note (insn, REG_TM, label_rtx (list));
1992 else
1993 for (; list ; list = TREE_CHAIN (list))
1994 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1998 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1999 statement STMT. */
2001 static void
2002 expand_call_stmt (gimple stmt)
2004 tree exp, decl, lhs;
2005 bool builtin_p;
2006 size_t i;
2008 if (gimple_call_internal_p (stmt))
2010 expand_internal_call (stmt);
2011 return;
2014 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2016 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2017 decl = gimple_call_fndecl (stmt);
2018 builtin_p = decl && DECL_BUILT_IN (decl);
2020 /* If this is not a builtin function, the function type through which the
2021 call is made may be different from the type of the function. */
2022 if (!builtin_p)
2023 CALL_EXPR_FN (exp)
2024 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2025 CALL_EXPR_FN (exp));
2027 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2028 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2030 for (i = 0; i < gimple_call_num_args (stmt); i++)
2032 tree arg = gimple_call_arg (stmt, i);
2033 gimple def;
2034 /* TER addresses into arguments of builtin functions so we have a
2035 chance to infer more correct alignment information. See PR39954. */
2036 if (builtin_p
2037 && TREE_CODE (arg) == SSA_NAME
2038 && (def = get_gimple_for_ssa_name (arg))
2039 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2040 arg = gimple_assign_rhs1 (def);
2041 CALL_EXPR_ARG (exp, i) = arg;
2044 if (gimple_has_side_effects (stmt))
2045 TREE_SIDE_EFFECTS (exp) = 1;
2047 if (gimple_call_nothrow_p (stmt))
2048 TREE_NOTHROW (exp) = 1;
2050 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2051 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2052 if (decl
2053 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2054 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2055 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2056 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2057 else
2058 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2059 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2060 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2061 TREE_BLOCK (exp) = gimple_block (stmt);
2063 /* Ensure RTL is created for debug args. */
2064 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2066 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2067 unsigned int ix;
2068 tree dtemp;
2070 if (debug_args)
2071 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2073 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2074 expand_debug_expr (dtemp);
2078 lhs = gimple_call_lhs (stmt);
2079 if (lhs)
2080 expand_assignment (lhs, exp, false);
2081 else
2082 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2084 mark_transaction_restart_calls (stmt);
2087 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2088 STMT that doesn't require special handling for outgoing edges. That
2089 is no tailcalls and no GIMPLE_COND. */
2091 static void
2092 expand_gimple_stmt_1 (gimple stmt)
2094 tree op0;
2096 set_curr_insn_source_location (gimple_location (stmt));
2097 set_curr_insn_block (gimple_block (stmt));
2099 switch (gimple_code (stmt))
2101 case GIMPLE_GOTO:
2102 op0 = gimple_goto_dest (stmt);
2103 if (TREE_CODE (op0) == LABEL_DECL)
2104 expand_goto (op0);
2105 else
2106 expand_computed_goto (op0);
2107 break;
2108 case GIMPLE_LABEL:
2109 expand_label (gimple_label_label (stmt));
2110 break;
2111 case GIMPLE_NOP:
2112 case GIMPLE_PREDICT:
2113 break;
2114 case GIMPLE_SWITCH:
2115 expand_case (stmt);
2116 break;
2117 case GIMPLE_ASM:
2118 expand_asm_stmt (stmt);
2119 break;
2120 case GIMPLE_CALL:
2121 expand_call_stmt (stmt);
2122 break;
2124 case GIMPLE_RETURN:
2125 op0 = gimple_return_retval (stmt);
2127 if (op0 && op0 != error_mark_node)
2129 tree result = DECL_RESULT (current_function_decl);
2131 /* If we are not returning the current function's RESULT_DECL,
2132 build an assignment to it. */
2133 if (op0 != result)
2135 /* I believe that a function's RESULT_DECL is unique. */
2136 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2138 /* ??? We'd like to use simply expand_assignment here,
2139 but this fails if the value is of BLKmode but the return
2140 decl is a register. expand_return has special handling
2141 for this combination, which eventually should move
2142 to common code. See comments there. Until then, let's
2143 build a modify expression :-/ */
2144 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2145 result, op0);
2148 if (!op0)
2149 expand_null_return ();
2150 else
2151 expand_return (op0);
2152 break;
2154 case GIMPLE_ASSIGN:
2156 tree lhs = gimple_assign_lhs (stmt);
2158 /* Tree expand used to fiddle with |= and &= of two bitfield
2159 COMPONENT_REFs here. This can't happen with gimple, the LHS
2160 of binary assigns must be a gimple reg. */
2162 if (TREE_CODE (lhs) != SSA_NAME
2163 || get_gimple_rhs_class (gimple_expr_code (stmt))
2164 == GIMPLE_SINGLE_RHS)
2166 tree rhs = gimple_assign_rhs1 (stmt);
2167 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2168 == GIMPLE_SINGLE_RHS);
2169 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2170 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2171 if (TREE_CLOBBER_P (rhs))
2172 /* This is a clobber to mark the going out of scope for
2173 this LHS. */
2175 else
2176 expand_assignment (lhs, rhs,
2177 gimple_assign_nontemporal_move_p (stmt));
2179 else
2181 rtx target, temp;
2182 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2183 struct separate_ops ops;
2184 bool promoted = false;
2186 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2187 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2188 promoted = true;
2190 ops.code = gimple_assign_rhs_code (stmt);
2191 ops.type = TREE_TYPE (lhs);
2192 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2194 case GIMPLE_TERNARY_RHS:
2195 ops.op2 = gimple_assign_rhs3 (stmt);
2196 /* Fallthru */
2197 case GIMPLE_BINARY_RHS:
2198 ops.op1 = gimple_assign_rhs2 (stmt);
2199 /* Fallthru */
2200 case GIMPLE_UNARY_RHS:
2201 ops.op0 = gimple_assign_rhs1 (stmt);
2202 break;
2203 default:
2204 gcc_unreachable ();
2206 ops.location = gimple_location (stmt);
2208 /* If we want to use a nontemporal store, force the value to
2209 register first. If we store into a promoted register,
2210 don't directly expand to target. */
2211 temp = nontemporal || promoted ? NULL_RTX : target;
2212 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2213 EXPAND_NORMAL);
2215 if (temp == target)
2217 else if (promoted)
2219 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2220 /* If TEMP is a VOIDmode constant, use convert_modes to make
2221 sure that we properly convert it. */
2222 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2224 temp = convert_modes (GET_MODE (target),
2225 TYPE_MODE (ops.type),
2226 temp, unsignedp);
2227 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2228 GET_MODE (target), temp, unsignedp);
2231 convert_move (SUBREG_REG (target), temp, unsignedp);
2233 else if (nontemporal && emit_storent_insn (target, temp))
2235 else
2237 temp = force_operand (temp, target);
2238 if (temp != target)
2239 emit_move_insn (target, temp);
2243 break;
2245 default:
2246 gcc_unreachable ();
2250 /* Expand one gimple statement STMT and return the last RTL instruction
2251 before any of the newly generated ones.
2253 In addition to generating the necessary RTL instructions this also
2254 sets REG_EH_REGION notes if necessary and sets the current source
2255 location for diagnostics. */
2257 static rtx
2258 expand_gimple_stmt (gimple stmt)
2260 location_t saved_location = input_location;
2261 rtx last = get_last_insn ();
2262 int lp_nr;
2264 gcc_assert (cfun);
2266 /* We need to save and restore the current source location so that errors
2267 discovered during expansion are emitted with the right location. But
2268 it would be better if the diagnostic routines used the source location
2269 embedded in the tree nodes rather than globals. */
2270 if (gimple_has_location (stmt))
2271 input_location = gimple_location (stmt);
2273 expand_gimple_stmt_1 (stmt);
2275 /* Free any temporaries used to evaluate this statement. */
2276 free_temp_slots ();
2278 input_location = saved_location;
2280 /* Mark all insns that may trap. */
2281 lp_nr = lookup_stmt_eh_lp (stmt);
2282 if (lp_nr)
2284 rtx insn;
2285 for (insn = next_real_insn (last); insn;
2286 insn = next_real_insn (insn))
2288 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2289 /* If we want exceptions for non-call insns, any
2290 may_trap_p instruction may throw. */
2291 && GET_CODE (PATTERN (insn)) != CLOBBER
2292 && GET_CODE (PATTERN (insn)) != USE
2293 && insn_could_throw_p (insn))
2294 make_reg_eh_region_note (insn, 0, lp_nr);
2298 return last;
2301 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2302 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2303 generated a tail call (something that might be denied by the ABI
2304 rules governing the call; see calls.c).
2306 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2307 can still reach the rest of BB. The case here is __builtin_sqrt,
2308 where the NaN result goes through the external function (with a
2309 tailcall) and the normal result happens via a sqrt instruction. */
2311 static basic_block
2312 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2314 rtx last2, last;
2315 edge e;
2316 edge_iterator ei;
2317 int probability;
2318 gcov_type count;
2320 last2 = last = expand_gimple_stmt (stmt);
2322 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2323 if (CALL_P (last) && SIBLING_CALL_P (last))
2324 goto found;
2326 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2328 *can_fallthru = true;
2329 return NULL;
2331 found:
2332 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2333 Any instructions emitted here are about to be deleted. */
2334 do_pending_stack_adjust ();
2336 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2337 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2338 EH or abnormal edges, we shouldn't have created a tail call in
2339 the first place. So it seems to me we should just be removing
2340 all edges here, or redirecting the existing fallthru edge to
2341 the exit block. */
2343 probability = 0;
2344 count = 0;
2346 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2348 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2350 if (e->dest != EXIT_BLOCK_PTR)
2352 e->dest->count -= e->count;
2353 e->dest->frequency -= EDGE_FREQUENCY (e);
2354 if (e->dest->count < 0)
2355 e->dest->count = 0;
2356 if (e->dest->frequency < 0)
2357 e->dest->frequency = 0;
2359 count += e->count;
2360 probability += e->probability;
2361 remove_edge (e);
2363 else
2364 ei_next (&ei);
2367 /* This is somewhat ugly: the call_expr expander often emits instructions
2368 after the sibcall (to perform the function return). These confuse the
2369 find_many_sub_basic_blocks code, so we need to get rid of these. */
2370 last = NEXT_INSN (last);
2371 gcc_assert (BARRIER_P (last));
2373 *can_fallthru = false;
2374 while (NEXT_INSN (last))
2376 /* For instance an sqrt builtin expander expands if with
2377 sibcall in the then and label for `else`. */
2378 if (LABEL_P (NEXT_INSN (last)))
2380 *can_fallthru = true;
2381 break;
2383 delete_insn (NEXT_INSN (last));
2386 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2387 e->probability += probability;
2388 e->count += count;
2389 BB_END (bb) = last;
2390 update_bb_for_insn (bb);
2392 if (NEXT_INSN (last))
2394 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2396 last = BB_END (bb);
2397 if (BARRIER_P (last))
2398 BB_END (bb) = PREV_INSN (last);
2401 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2403 return bb;
2406 /* Return the difference between the floor and the truncated result of
2407 a signed division by OP1 with remainder MOD. */
2408 static rtx
2409 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2411 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2412 return gen_rtx_IF_THEN_ELSE
2413 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2414 gen_rtx_IF_THEN_ELSE
2415 (mode, gen_rtx_LT (BImode,
2416 gen_rtx_DIV (mode, op1, mod),
2417 const0_rtx),
2418 constm1_rtx, const0_rtx),
2419 const0_rtx);
2422 /* Return the difference between the ceil and the truncated result of
2423 a signed division by OP1 with remainder MOD. */
2424 static rtx
2425 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2427 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2428 return gen_rtx_IF_THEN_ELSE
2429 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2430 gen_rtx_IF_THEN_ELSE
2431 (mode, gen_rtx_GT (BImode,
2432 gen_rtx_DIV (mode, op1, mod),
2433 const0_rtx),
2434 const1_rtx, const0_rtx),
2435 const0_rtx);
2438 /* Return the difference between the ceil and the truncated result of
2439 an unsigned division by OP1 with remainder MOD. */
2440 static rtx
2441 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2443 /* (mod != 0 ? 1 : 0) */
2444 return gen_rtx_IF_THEN_ELSE
2445 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2446 const1_rtx, const0_rtx);
2449 /* Return the difference between the rounded and the truncated result
2450 of a signed division by OP1 with remainder MOD. Halfway cases are
2451 rounded away from zero, rather than to the nearest even number. */
2452 static rtx
2453 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2455 /* (abs (mod) >= abs (op1) - abs (mod)
2456 ? (op1 / mod > 0 ? 1 : -1)
2457 : 0) */
2458 return gen_rtx_IF_THEN_ELSE
2459 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2460 gen_rtx_MINUS (mode,
2461 gen_rtx_ABS (mode, op1),
2462 gen_rtx_ABS (mode, mod))),
2463 gen_rtx_IF_THEN_ELSE
2464 (mode, gen_rtx_GT (BImode,
2465 gen_rtx_DIV (mode, op1, mod),
2466 const0_rtx),
2467 const1_rtx, constm1_rtx),
2468 const0_rtx);
2471 /* Return the difference between the rounded and the truncated result
2472 of a unsigned division by OP1 with remainder MOD. Halfway cases
2473 are rounded away from zero, rather than to the nearest even
2474 number. */
2475 static rtx
2476 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2478 /* (mod >= op1 - mod ? 1 : 0) */
2479 return gen_rtx_IF_THEN_ELSE
2480 (mode, gen_rtx_GE (BImode, mod,
2481 gen_rtx_MINUS (mode, op1, mod)),
2482 const1_rtx, const0_rtx);
2485 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2486 any rtl. */
2488 static rtx
2489 convert_debug_memory_address (enum machine_mode mode, rtx x,
2490 addr_space_t as)
2492 enum machine_mode xmode = GET_MODE (x);
2494 #ifndef POINTERS_EXTEND_UNSIGNED
2495 gcc_assert (mode == Pmode
2496 || mode == targetm.addr_space.address_mode (as));
2497 gcc_assert (xmode == mode || xmode == VOIDmode);
2498 #else
2499 rtx temp;
2501 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2503 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2504 return x;
2506 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2507 x = simplify_gen_subreg (mode, x, xmode,
2508 subreg_lowpart_offset
2509 (mode, xmode));
2510 else if (POINTERS_EXTEND_UNSIGNED > 0)
2511 x = gen_rtx_ZERO_EXTEND (mode, x);
2512 else if (!POINTERS_EXTEND_UNSIGNED)
2513 x = gen_rtx_SIGN_EXTEND (mode, x);
2514 else
2516 switch (GET_CODE (x))
2518 case SUBREG:
2519 if ((SUBREG_PROMOTED_VAR_P (x)
2520 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2521 || (GET_CODE (SUBREG_REG (x)) == PLUS
2522 && REG_P (XEXP (SUBREG_REG (x), 0))
2523 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2524 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2525 && GET_MODE (SUBREG_REG (x)) == mode)
2526 return SUBREG_REG (x);
2527 break;
2528 case LABEL_REF:
2529 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2530 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2531 return temp;
2532 case SYMBOL_REF:
2533 temp = shallow_copy_rtx (x);
2534 PUT_MODE (temp, mode);
2535 return temp;
2536 case CONST:
2537 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2538 if (temp)
2539 temp = gen_rtx_CONST (mode, temp);
2540 return temp;
2541 case PLUS:
2542 case MINUS:
2543 if (CONST_INT_P (XEXP (x, 1)))
2545 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2546 if (temp)
2547 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2549 break;
2550 default:
2551 break;
2553 /* Don't know how to express ptr_extend as operation in debug info. */
2554 return NULL;
2556 #endif /* POINTERS_EXTEND_UNSIGNED */
2558 return x;
2561 /* Return an RTX equivalent to the value of the parameter DECL. */
2563 static rtx
2564 expand_debug_parm_decl (tree decl)
2566 rtx incoming = DECL_INCOMING_RTL (decl);
2568 if (incoming
2569 && GET_MODE (incoming) != BLKmode
2570 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2571 || (MEM_P (incoming)
2572 && REG_P (XEXP (incoming, 0))
2573 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2575 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2577 #ifdef HAVE_window_save
2578 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2579 If the target machine has an explicit window save instruction, the
2580 actual entry value is the corresponding OUTGOING_REGNO instead. */
2581 if (REG_P (incoming)
2582 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2583 incoming
2584 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2585 OUTGOING_REGNO (REGNO (incoming)), 0);
2586 else if (MEM_P (incoming))
2588 rtx reg = XEXP (incoming, 0);
2589 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2591 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2592 incoming = replace_equiv_address_nv (incoming, reg);
2595 #endif
2597 ENTRY_VALUE_EXP (rtl) = incoming;
2598 return rtl;
2601 if (incoming
2602 && GET_MODE (incoming) != BLKmode
2603 && !TREE_ADDRESSABLE (decl)
2604 && MEM_P (incoming)
2605 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2606 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2607 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2608 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2609 return incoming;
2611 return NULL_RTX;
2614 /* Return an RTX equivalent to the value of the tree expression EXP. */
2616 static rtx
2617 expand_debug_expr (tree exp)
2619 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2620 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2621 enum machine_mode inner_mode = VOIDmode;
2622 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2623 addr_space_t as;
2625 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2627 case tcc_expression:
2628 switch (TREE_CODE (exp))
2630 case COND_EXPR:
2631 case DOT_PROD_EXPR:
2632 case WIDEN_MULT_PLUS_EXPR:
2633 case WIDEN_MULT_MINUS_EXPR:
2634 case FMA_EXPR:
2635 goto ternary;
2637 case TRUTH_ANDIF_EXPR:
2638 case TRUTH_ORIF_EXPR:
2639 case TRUTH_AND_EXPR:
2640 case TRUTH_OR_EXPR:
2641 case TRUTH_XOR_EXPR:
2642 goto binary;
2644 case TRUTH_NOT_EXPR:
2645 goto unary;
2647 default:
2648 break;
2650 break;
2652 ternary:
2653 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2654 if (!op2)
2655 return NULL_RTX;
2656 /* Fall through. */
2658 binary:
2659 case tcc_binary:
2660 case tcc_comparison:
2661 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2662 if (!op1)
2663 return NULL_RTX;
2664 /* Fall through. */
2666 unary:
2667 case tcc_unary:
2668 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2669 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2670 if (!op0)
2671 return NULL_RTX;
2672 break;
2674 case tcc_type:
2675 case tcc_statement:
2676 gcc_unreachable ();
2678 case tcc_constant:
2679 case tcc_exceptional:
2680 case tcc_declaration:
2681 case tcc_reference:
2682 case tcc_vl_exp:
2683 break;
2686 switch (TREE_CODE (exp))
2688 case STRING_CST:
2689 if (!lookup_constant_def (exp))
2691 if (strlen (TREE_STRING_POINTER (exp)) + 1
2692 != (size_t) TREE_STRING_LENGTH (exp))
2693 return NULL_RTX;
2694 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2695 op0 = gen_rtx_MEM (BLKmode, op0);
2696 set_mem_attributes (op0, exp, 0);
2697 return op0;
2699 /* Fall through... */
2701 case INTEGER_CST:
2702 case REAL_CST:
2703 case FIXED_CST:
2704 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2705 return op0;
2707 case COMPLEX_CST:
2708 gcc_assert (COMPLEX_MODE_P (mode));
2709 op0 = expand_debug_expr (TREE_REALPART (exp));
2710 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2711 return gen_rtx_CONCAT (mode, op0, op1);
2713 case DEBUG_EXPR_DECL:
2714 op0 = DECL_RTL_IF_SET (exp);
2716 if (op0)
2717 return op0;
2719 op0 = gen_rtx_DEBUG_EXPR (mode);
2720 DEBUG_EXPR_TREE_DECL (op0) = exp;
2721 SET_DECL_RTL (exp, op0);
2723 return op0;
2725 case VAR_DECL:
2726 case PARM_DECL:
2727 case FUNCTION_DECL:
2728 case LABEL_DECL:
2729 case CONST_DECL:
2730 case RESULT_DECL:
2731 op0 = DECL_RTL_IF_SET (exp);
2733 /* This decl was probably optimized away. */
2734 if (!op0)
2736 if (TREE_CODE (exp) != VAR_DECL
2737 || DECL_EXTERNAL (exp)
2738 || !TREE_STATIC (exp)
2739 || !DECL_NAME (exp)
2740 || DECL_HARD_REGISTER (exp)
2741 || DECL_IN_CONSTANT_POOL (exp)
2742 || mode == VOIDmode)
2743 return NULL;
2745 op0 = make_decl_rtl_for_debug (exp);
2746 if (!MEM_P (op0)
2747 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2748 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2749 return NULL;
2751 else
2752 op0 = copy_rtx (op0);
2754 if (GET_MODE (op0) == BLKmode
2755 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2756 below would ICE. While it is likely a FE bug,
2757 try to be robust here. See PR43166. */
2758 || mode == BLKmode
2759 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2761 gcc_assert (MEM_P (op0));
2762 op0 = adjust_address_nv (op0, mode, 0);
2763 return op0;
2766 /* Fall through. */
2768 adjust_mode:
2769 case PAREN_EXPR:
2770 case NOP_EXPR:
2771 case CONVERT_EXPR:
2773 inner_mode = GET_MODE (op0);
2775 if (mode == inner_mode)
2776 return op0;
2778 if (inner_mode == VOIDmode)
2780 if (TREE_CODE (exp) == SSA_NAME)
2781 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2782 else
2783 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2784 if (mode == inner_mode)
2785 return op0;
2788 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2790 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2791 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2792 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2793 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2794 else
2795 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2797 else if (FLOAT_MODE_P (mode))
2799 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2800 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2801 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2802 else
2803 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2805 else if (FLOAT_MODE_P (inner_mode))
2807 if (unsignedp)
2808 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2809 else
2810 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2812 else if (CONSTANT_P (op0)
2813 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2814 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2815 subreg_lowpart_offset (mode,
2816 inner_mode));
2817 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2818 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2819 : unsignedp)
2820 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2821 else
2822 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2824 return op0;
2827 case MEM_REF:
2828 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2830 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2831 TREE_OPERAND (exp, 0),
2832 TREE_OPERAND (exp, 1));
2833 if (newexp)
2834 return expand_debug_expr (newexp);
2836 /* FALLTHROUGH */
2837 case INDIRECT_REF:
2838 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2839 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2840 if (!op0)
2841 return NULL;
2843 if (TREE_CODE (exp) == MEM_REF)
2845 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2846 || (GET_CODE (op0) == PLUS
2847 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2848 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2849 Instead just use get_inner_reference. */
2850 goto component_ref;
2852 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2853 if (!op1 || !CONST_INT_P (op1))
2854 return NULL;
2856 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2859 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2860 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2861 else
2862 as = ADDR_SPACE_GENERIC;
2864 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2865 op0, as);
2866 if (op0 == NULL_RTX)
2867 return NULL;
2869 op0 = gen_rtx_MEM (mode, op0);
2870 set_mem_attributes (op0, exp, 0);
2871 if (TREE_CODE (exp) == MEM_REF
2872 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2873 set_mem_expr (op0, NULL_TREE);
2874 set_mem_addr_space (op0, as);
2876 return op0;
2878 case TARGET_MEM_REF:
2879 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2880 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2881 return NULL;
2883 op0 = expand_debug_expr
2884 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2885 if (!op0)
2886 return NULL;
2888 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2889 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2890 else
2891 as = ADDR_SPACE_GENERIC;
2893 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2894 op0, as);
2895 if (op0 == NULL_RTX)
2896 return NULL;
2898 op0 = gen_rtx_MEM (mode, op0);
2900 set_mem_attributes (op0, exp, 0);
2901 set_mem_addr_space (op0, as);
2903 return op0;
2905 component_ref:
2906 case ARRAY_REF:
2907 case ARRAY_RANGE_REF:
2908 case COMPONENT_REF:
2909 case BIT_FIELD_REF:
2910 case REALPART_EXPR:
2911 case IMAGPART_EXPR:
2912 case VIEW_CONVERT_EXPR:
2914 enum machine_mode mode1;
2915 HOST_WIDE_INT bitsize, bitpos;
2916 tree offset;
2917 int volatilep = 0;
2918 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2919 &mode1, &unsignedp, &volatilep, false);
2920 rtx orig_op0;
2922 if (bitsize == 0)
2923 return NULL;
2925 orig_op0 = op0 = expand_debug_expr (tem);
2927 if (!op0)
2928 return NULL;
2930 if (offset)
2932 enum machine_mode addrmode, offmode;
2934 if (!MEM_P (op0))
2935 return NULL;
2937 op0 = XEXP (op0, 0);
2938 addrmode = GET_MODE (op0);
2939 if (addrmode == VOIDmode)
2940 addrmode = Pmode;
2942 op1 = expand_debug_expr (offset);
2943 if (!op1)
2944 return NULL;
2946 offmode = GET_MODE (op1);
2947 if (offmode == VOIDmode)
2948 offmode = TYPE_MODE (TREE_TYPE (offset));
2950 if (addrmode != offmode)
2951 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2952 subreg_lowpart_offset (addrmode,
2953 offmode));
2955 /* Don't use offset_address here, we don't need a
2956 recognizable address, and we don't want to generate
2957 code. */
2958 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2959 op0, op1));
2962 if (MEM_P (op0))
2964 if (mode1 == VOIDmode)
2965 /* Bitfield. */
2966 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2967 if (bitpos >= BITS_PER_UNIT)
2969 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2970 bitpos %= BITS_PER_UNIT;
2972 else if (bitpos < 0)
2974 HOST_WIDE_INT units
2975 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2976 op0 = adjust_address_nv (op0, mode1, units);
2977 bitpos += units * BITS_PER_UNIT;
2979 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2980 op0 = adjust_address_nv (op0, mode, 0);
2981 else if (GET_MODE (op0) != mode1)
2982 op0 = adjust_address_nv (op0, mode1, 0);
2983 else
2984 op0 = copy_rtx (op0);
2985 if (op0 == orig_op0)
2986 op0 = shallow_copy_rtx (op0);
2987 set_mem_attributes (op0, exp, 0);
2990 if (bitpos == 0 && mode == GET_MODE (op0))
2991 return op0;
2993 if (bitpos < 0)
2994 return NULL;
2996 if (GET_MODE (op0) == BLKmode)
2997 return NULL;
2999 if ((bitpos % BITS_PER_UNIT) == 0
3000 && bitsize == GET_MODE_BITSIZE (mode1))
3002 enum machine_mode opmode = GET_MODE (op0);
3004 if (opmode == VOIDmode)
3005 opmode = TYPE_MODE (TREE_TYPE (tem));
3007 /* This condition may hold if we're expanding the address
3008 right past the end of an array that turned out not to
3009 be addressable (i.e., the address was only computed in
3010 debug stmts). The gen_subreg below would rightfully
3011 crash, and the address doesn't really exist, so just
3012 drop it. */
3013 if (bitpos >= GET_MODE_BITSIZE (opmode))
3014 return NULL;
3016 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3017 return simplify_gen_subreg (mode, op0, opmode,
3018 bitpos / BITS_PER_UNIT);
3021 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3022 && TYPE_UNSIGNED (TREE_TYPE (exp))
3023 ? SIGN_EXTRACT
3024 : ZERO_EXTRACT, mode,
3025 GET_MODE (op0) != VOIDmode
3026 ? GET_MODE (op0)
3027 : TYPE_MODE (TREE_TYPE (tem)),
3028 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3031 case ABS_EXPR:
3032 return simplify_gen_unary (ABS, mode, op0, mode);
3034 case NEGATE_EXPR:
3035 return simplify_gen_unary (NEG, mode, op0, mode);
3037 case BIT_NOT_EXPR:
3038 return simplify_gen_unary (NOT, mode, op0, mode);
3040 case FLOAT_EXPR:
3041 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3042 0)))
3043 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3044 inner_mode);
3046 case FIX_TRUNC_EXPR:
3047 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3048 inner_mode);
3050 case POINTER_PLUS_EXPR:
3051 /* For the rare target where pointers are not the same size as
3052 size_t, we need to check for mis-matched modes and correct
3053 the addend. */
3054 if (op0 && op1
3055 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3056 && GET_MODE (op0) != GET_MODE (op1))
3058 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3059 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3060 GET_MODE (op1));
3061 else
3062 /* We always sign-extend, regardless of the signedness of
3063 the operand, because the operand is always unsigned
3064 here even if the original C expression is signed. */
3065 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3066 GET_MODE (op1));
3068 /* Fall through. */
3069 case PLUS_EXPR:
3070 return simplify_gen_binary (PLUS, mode, op0, op1);
3072 case MINUS_EXPR:
3073 return simplify_gen_binary (MINUS, mode, op0, op1);
3075 case MULT_EXPR:
3076 return simplify_gen_binary (MULT, mode, op0, op1);
3078 case RDIV_EXPR:
3079 case TRUNC_DIV_EXPR:
3080 case EXACT_DIV_EXPR:
3081 if (unsignedp)
3082 return simplify_gen_binary (UDIV, mode, op0, op1);
3083 else
3084 return simplify_gen_binary (DIV, mode, op0, op1);
3086 case TRUNC_MOD_EXPR:
3087 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3089 case FLOOR_DIV_EXPR:
3090 if (unsignedp)
3091 return simplify_gen_binary (UDIV, mode, op0, op1);
3092 else
3094 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3095 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3096 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3097 return simplify_gen_binary (PLUS, mode, div, adj);
3100 case FLOOR_MOD_EXPR:
3101 if (unsignedp)
3102 return simplify_gen_binary (UMOD, mode, op0, op1);
3103 else
3105 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3106 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3107 adj = simplify_gen_unary (NEG, mode,
3108 simplify_gen_binary (MULT, mode, adj, op1),
3109 mode);
3110 return simplify_gen_binary (PLUS, mode, mod, adj);
3113 case CEIL_DIV_EXPR:
3114 if (unsignedp)
3116 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3117 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3118 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3119 return simplify_gen_binary (PLUS, mode, div, adj);
3121 else
3123 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3124 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3125 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3126 return simplify_gen_binary (PLUS, mode, div, adj);
3129 case CEIL_MOD_EXPR:
3130 if (unsignedp)
3132 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3133 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3134 adj = simplify_gen_unary (NEG, mode,
3135 simplify_gen_binary (MULT, mode, adj, op1),
3136 mode);
3137 return simplify_gen_binary (PLUS, mode, mod, adj);
3139 else
3141 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3142 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3143 adj = simplify_gen_unary (NEG, mode,
3144 simplify_gen_binary (MULT, mode, adj, op1),
3145 mode);
3146 return simplify_gen_binary (PLUS, mode, mod, adj);
3149 case ROUND_DIV_EXPR:
3150 if (unsignedp)
3152 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3153 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3154 rtx adj = round_udiv_adjust (mode, mod, op1);
3155 return simplify_gen_binary (PLUS, mode, div, adj);
3157 else
3159 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3160 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3161 rtx adj = round_sdiv_adjust (mode, mod, op1);
3162 return simplify_gen_binary (PLUS, mode, div, adj);
3165 case ROUND_MOD_EXPR:
3166 if (unsignedp)
3168 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3169 rtx adj = round_udiv_adjust (mode, mod, op1);
3170 adj = simplify_gen_unary (NEG, mode,
3171 simplify_gen_binary (MULT, mode, adj, op1),
3172 mode);
3173 return simplify_gen_binary (PLUS, mode, mod, adj);
3175 else
3177 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3178 rtx adj = round_sdiv_adjust (mode, mod, op1);
3179 adj = simplify_gen_unary (NEG, mode,
3180 simplify_gen_binary (MULT, mode, adj, op1),
3181 mode);
3182 return simplify_gen_binary (PLUS, mode, mod, adj);
3185 case LSHIFT_EXPR:
3186 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3188 case RSHIFT_EXPR:
3189 if (unsignedp)
3190 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3191 else
3192 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3194 case LROTATE_EXPR:
3195 return simplify_gen_binary (ROTATE, mode, op0, op1);
3197 case RROTATE_EXPR:
3198 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3200 case MIN_EXPR:
3201 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3203 case MAX_EXPR:
3204 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3206 case BIT_AND_EXPR:
3207 case TRUTH_AND_EXPR:
3208 return simplify_gen_binary (AND, mode, op0, op1);
3210 case BIT_IOR_EXPR:
3211 case TRUTH_OR_EXPR:
3212 return simplify_gen_binary (IOR, mode, op0, op1);
3214 case BIT_XOR_EXPR:
3215 case TRUTH_XOR_EXPR:
3216 return simplify_gen_binary (XOR, mode, op0, op1);
3218 case TRUTH_ANDIF_EXPR:
3219 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3221 case TRUTH_ORIF_EXPR:
3222 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3224 case TRUTH_NOT_EXPR:
3225 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3227 case LT_EXPR:
3228 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3229 op0, op1);
3231 case LE_EXPR:
3232 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3233 op0, op1);
3235 case GT_EXPR:
3236 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3237 op0, op1);
3239 case GE_EXPR:
3240 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3241 op0, op1);
3243 case EQ_EXPR:
3244 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3246 case NE_EXPR:
3247 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3249 case UNORDERED_EXPR:
3250 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3252 case ORDERED_EXPR:
3253 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3255 case UNLT_EXPR:
3256 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3258 case UNLE_EXPR:
3259 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3261 case UNGT_EXPR:
3262 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3264 case UNGE_EXPR:
3265 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3267 case UNEQ_EXPR:
3268 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3270 case LTGT_EXPR:
3271 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3273 case COND_EXPR:
3274 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3276 case COMPLEX_EXPR:
3277 gcc_assert (COMPLEX_MODE_P (mode));
3278 if (GET_MODE (op0) == VOIDmode)
3279 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3280 if (GET_MODE (op1) == VOIDmode)
3281 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3282 return gen_rtx_CONCAT (mode, op0, op1);
3284 case CONJ_EXPR:
3285 if (GET_CODE (op0) == CONCAT)
3286 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3287 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3288 XEXP (op0, 1),
3289 GET_MODE_INNER (mode)));
3290 else
3292 enum machine_mode imode = GET_MODE_INNER (mode);
3293 rtx re, im;
3295 if (MEM_P (op0))
3297 re = adjust_address_nv (op0, imode, 0);
3298 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3300 else
3302 enum machine_mode ifmode = int_mode_for_mode (mode);
3303 enum machine_mode ihmode = int_mode_for_mode (imode);
3304 rtx halfsize;
3305 if (ifmode == BLKmode || ihmode == BLKmode)
3306 return NULL;
3307 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3308 re = op0;
3309 if (mode != ifmode)
3310 re = gen_rtx_SUBREG (ifmode, re, 0);
3311 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3312 if (imode != ihmode)
3313 re = gen_rtx_SUBREG (imode, re, 0);
3314 im = copy_rtx (op0);
3315 if (mode != ifmode)
3316 im = gen_rtx_SUBREG (ifmode, im, 0);
3317 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3318 if (imode != ihmode)
3319 im = gen_rtx_SUBREG (imode, im, 0);
3321 im = gen_rtx_NEG (imode, im);
3322 return gen_rtx_CONCAT (mode, re, im);
3325 case ADDR_EXPR:
3326 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3327 if (!op0 || !MEM_P (op0))
3329 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3330 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3331 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3332 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3333 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3334 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3336 if (handled_component_p (TREE_OPERAND (exp, 0)))
3338 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3339 tree decl
3340 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3341 &bitoffset, &bitsize, &maxsize);
3342 if ((TREE_CODE (decl) == VAR_DECL
3343 || TREE_CODE (decl) == PARM_DECL
3344 || TREE_CODE (decl) == RESULT_DECL)
3345 && (!TREE_ADDRESSABLE (decl)
3346 || target_for_debug_bind (decl))
3347 && (bitoffset % BITS_PER_UNIT) == 0
3348 && bitsize > 0
3349 && bitsize == maxsize)
3351 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3352 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3356 return NULL;
3359 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3360 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3362 return op0;
3364 case VECTOR_CST:
3366 unsigned i;
3368 op0 = gen_rtx_CONCATN
3369 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3371 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3373 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3374 if (!op1)
3375 return NULL;
3376 XVECEXP (op0, 0, i) = op1;
3379 return op0;
3382 case CONSTRUCTOR:
3383 if (TREE_CLOBBER_P (exp))
3384 return NULL;
3385 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3387 unsigned i;
3388 tree val;
3390 op0 = gen_rtx_CONCATN
3391 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3393 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3395 op1 = expand_debug_expr (val);
3396 if (!op1)
3397 return NULL;
3398 XVECEXP (op0, 0, i) = op1;
3401 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3403 op1 = expand_debug_expr
3404 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3406 if (!op1)
3407 return NULL;
3409 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3410 XVECEXP (op0, 0, i) = op1;
3413 return op0;
3415 else
3416 goto flag_unsupported;
3418 case CALL_EXPR:
3419 /* ??? Maybe handle some builtins? */
3420 return NULL;
3422 case SSA_NAME:
3424 gimple g = get_gimple_for_ssa_name (exp);
3425 if (g)
3427 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3428 if (!op0)
3429 return NULL;
3431 else
3433 int part = var_to_partition (SA.map, exp);
3435 if (part == NO_PARTITION)
3437 /* If this is a reference to an incoming value of parameter
3438 that is never used in the code or where the incoming
3439 value is never used in the code, use PARM_DECL's
3440 DECL_RTL if set. */
3441 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3442 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3444 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3445 if (op0)
3446 goto adjust_mode;
3447 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3448 if (op0)
3449 goto adjust_mode;
3451 return NULL;
3454 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3456 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3458 goto adjust_mode;
3461 case ERROR_MARK:
3462 return NULL;
3464 /* Vector stuff. For most of the codes we don't have rtl codes. */
3465 case REALIGN_LOAD_EXPR:
3466 case REDUC_MAX_EXPR:
3467 case REDUC_MIN_EXPR:
3468 case REDUC_PLUS_EXPR:
3469 case VEC_COND_EXPR:
3470 case VEC_LSHIFT_EXPR:
3471 case VEC_PACK_FIX_TRUNC_EXPR:
3472 case VEC_PACK_SAT_EXPR:
3473 case VEC_PACK_TRUNC_EXPR:
3474 case VEC_RSHIFT_EXPR:
3475 case VEC_UNPACK_FLOAT_HI_EXPR:
3476 case VEC_UNPACK_FLOAT_LO_EXPR:
3477 case VEC_UNPACK_HI_EXPR:
3478 case VEC_UNPACK_LO_EXPR:
3479 case VEC_WIDEN_MULT_HI_EXPR:
3480 case VEC_WIDEN_MULT_LO_EXPR:
3481 case VEC_WIDEN_LSHIFT_HI_EXPR:
3482 case VEC_WIDEN_LSHIFT_LO_EXPR:
3483 case VEC_PERM_EXPR:
3484 return NULL;
3486 /* Misc codes. */
3487 case ADDR_SPACE_CONVERT_EXPR:
3488 case FIXED_CONVERT_EXPR:
3489 case OBJ_TYPE_REF:
3490 case WITH_SIZE_EXPR:
3491 return NULL;
3493 case DOT_PROD_EXPR:
3494 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3495 && SCALAR_INT_MODE_P (mode))
3498 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3499 0)))
3500 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3501 inner_mode);
3503 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3504 1)))
3505 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3506 inner_mode);
3507 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3508 return simplify_gen_binary (PLUS, mode, op0, op2);
3510 return NULL;
3512 case WIDEN_MULT_EXPR:
3513 case WIDEN_MULT_PLUS_EXPR:
3514 case WIDEN_MULT_MINUS_EXPR:
3515 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3516 && SCALAR_INT_MODE_P (mode))
3518 inner_mode = GET_MODE (op0);
3519 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3520 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3521 else
3522 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3523 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3524 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3525 else
3526 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3527 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3528 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3529 return op0;
3530 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3531 return simplify_gen_binary (PLUS, mode, op0, op2);
3532 else
3533 return simplify_gen_binary (MINUS, mode, op2, op0);
3535 return NULL;
3537 case WIDEN_SUM_EXPR:
3538 case WIDEN_LSHIFT_EXPR:
3539 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3540 && SCALAR_INT_MODE_P (mode))
3543 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3544 0)))
3545 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3546 inner_mode);
3547 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3548 ? ASHIFT : PLUS, mode, op0, op1);
3550 return NULL;
3552 case FMA_EXPR:
3553 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3555 default:
3556 flag_unsupported:
3557 #ifdef ENABLE_CHECKING
3558 debug_tree (exp);
3559 gcc_unreachable ();
3560 #else
3561 return NULL;
3562 #endif
3566 /* Return an RTX equivalent to the source bind value of the tree expression
3567 EXP. */
3569 static rtx
3570 expand_debug_source_expr (tree exp)
3572 rtx op0 = NULL_RTX;
3573 enum machine_mode mode = VOIDmode, inner_mode;
3575 switch (TREE_CODE (exp))
3577 case PARM_DECL:
3579 mode = DECL_MODE (exp);
3580 op0 = expand_debug_parm_decl (exp);
3581 if (op0)
3582 break;
3583 /* See if this isn't an argument that has been completely
3584 optimized out. */
3585 if (!DECL_RTL_SET_P (exp)
3586 && !DECL_INCOMING_RTL (exp)
3587 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3589 tree aexp = exp;
3590 if (DECL_ABSTRACT_ORIGIN (exp))
3591 aexp = DECL_ABSTRACT_ORIGIN (exp);
3592 if (DECL_CONTEXT (aexp)
3593 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3595 VEC(tree, gc) **debug_args;
3596 unsigned int ix;
3597 tree ddecl;
3598 #ifdef ENABLE_CHECKING
3599 tree parm;
3600 for (parm = DECL_ARGUMENTS (current_function_decl);
3601 parm; parm = DECL_CHAIN (parm))
3602 gcc_assert (parm != exp
3603 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3604 #endif
3605 debug_args = decl_debug_args_lookup (current_function_decl);
3606 if (debug_args != NULL)
3608 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3609 ix += 2)
3610 if (ddecl == aexp)
3611 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3615 break;
3617 default:
3618 break;
3621 if (op0 == NULL_RTX)
3622 return NULL_RTX;
3624 inner_mode = GET_MODE (op0);
3625 if (mode == inner_mode)
3626 return op0;
3628 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3630 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3631 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3632 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3633 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3634 else
3635 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3637 else if (FLOAT_MODE_P (mode))
3638 gcc_unreachable ();
3639 else if (FLOAT_MODE_P (inner_mode))
3641 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3642 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3643 else
3644 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3646 else if (CONSTANT_P (op0)
3647 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3648 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3649 subreg_lowpart_offset (mode, inner_mode));
3650 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3651 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3652 else
3653 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3655 return op0;
3658 /* Expand the _LOCs in debug insns. We run this after expanding all
3659 regular insns, so that any variables referenced in the function
3660 will have their DECL_RTLs set. */
3662 static void
3663 expand_debug_locations (void)
3665 rtx insn;
3666 rtx last = get_last_insn ();
3667 int save_strict_alias = flag_strict_aliasing;
3669 /* New alias sets while setting up memory attributes cause
3670 -fcompare-debug failures, even though it doesn't bring about any
3671 codegen changes. */
3672 flag_strict_aliasing = 0;
3674 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3675 if (DEBUG_INSN_P (insn))
3677 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3678 rtx val;
3679 enum machine_mode mode;
3681 if (value == NULL_TREE)
3682 val = NULL_RTX;
3683 else
3685 if (INSN_VAR_LOCATION_STATUS (insn)
3686 == VAR_INIT_STATUS_UNINITIALIZED)
3687 val = expand_debug_source_expr (value);
3688 else
3689 val = expand_debug_expr (value);
3690 gcc_assert (last == get_last_insn ());
3693 if (!val)
3694 val = gen_rtx_UNKNOWN_VAR_LOC ();
3695 else
3697 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3699 gcc_assert (mode == GET_MODE (val)
3700 || (GET_MODE (val) == VOIDmode
3701 && (CONST_INT_P (val)
3702 || GET_CODE (val) == CONST_FIXED
3703 || GET_CODE (val) == CONST_DOUBLE
3704 || GET_CODE (val) == LABEL_REF)));
3707 INSN_VAR_LOCATION_LOC (insn) = val;
3710 flag_strict_aliasing = save_strict_alias;
3713 /* Expand basic block BB from GIMPLE trees to RTL. */
3715 static basic_block
3716 expand_gimple_basic_block (basic_block bb)
3718 gimple_stmt_iterator gsi;
3719 gimple_seq stmts;
3720 gimple stmt = NULL;
3721 rtx note, last;
3722 edge e;
3723 edge_iterator ei;
3724 void **elt;
3726 if (dump_file)
3727 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3728 bb->index);
3730 /* Note that since we are now transitioning from GIMPLE to RTL, we
3731 cannot use the gsi_*_bb() routines because they expect the basic
3732 block to be in GIMPLE, instead of RTL. Therefore, we need to
3733 access the BB sequence directly. */
3734 stmts = bb_seq (bb);
3735 bb->il.gimple.seq = NULL;
3736 bb->il.gimple.phi_nodes = NULL;
3737 rtl_profile_for_bb (bb);
3738 init_rtl_bb_info (bb);
3739 bb->flags |= BB_RTL;
3741 /* Remove the RETURN_EXPR if we may fall though to the exit
3742 instead. */
3743 gsi = gsi_last (stmts);
3744 if (!gsi_end_p (gsi)
3745 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3747 gimple ret_stmt = gsi_stmt (gsi);
3749 gcc_assert (single_succ_p (bb));
3750 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3752 if (bb->next_bb == EXIT_BLOCK_PTR
3753 && !gimple_return_retval (ret_stmt))
3755 gsi_remove (&gsi, false);
3756 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3760 gsi = gsi_start (stmts);
3761 if (!gsi_end_p (gsi))
3763 stmt = gsi_stmt (gsi);
3764 if (gimple_code (stmt) != GIMPLE_LABEL)
3765 stmt = NULL;
3768 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3770 if (stmt || elt)
3772 last = get_last_insn ();
3774 if (stmt)
3776 expand_gimple_stmt (stmt);
3777 gsi_next (&gsi);
3780 if (elt)
3781 emit_label ((rtx) *elt);
3783 /* Java emits line number notes in the top of labels.
3784 ??? Make this go away once line number notes are obsoleted. */
3785 BB_HEAD (bb) = NEXT_INSN (last);
3786 if (NOTE_P (BB_HEAD (bb)))
3787 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3788 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3790 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3792 else
3793 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3795 NOTE_BASIC_BLOCK (note) = bb;
3797 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3799 basic_block new_bb;
3801 stmt = gsi_stmt (gsi);
3803 /* If this statement is a non-debug one, and we generate debug
3804 insns, then this one might be the last real use of a TERed
3805 SSA_NAME, but where there are still some debug uses further
3806 down. Expanding the current SSA name in such further debug
3807 uses by their RHS might lead to wrong debug info, as coalescing
3808 might make the operands of such RHS be placed into the same
3809 pseudo as something else. Like so:
3810 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3811 use(a_1);
3812 a_2 = ...
3813 #DEBUG ... => a_1
3814 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3815 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3816 the write to a_2 would actually have clobbered the place which
3817 formerly held a_0.
3819 So, instead of that, we recognize the situation, and generate
3820 debug temporaries at the last real use of TERed SSA names:
3821 a_1 = a_0 + 1;
3822 #DEBUG #D1 => a_1
3823 use(a_1);
3824 a_2 = ...
3825 #DEBUG ... => #D1
3827 if (MAY_HAVE_DEBUG_INSNS
3828 && SA.values
3829 && !is_gimple_debug (stmt))
3831 ssa_op_iter iter;
3832 tree op;
3833 gimple def;
3835 location_t sloc = get_curr_insn_source_location ();
3836 tree sblock = get_curr_insn_block ();
3838 /* Look for SSA names that have their last use here (TERed
3839 names always have only one real use). */
3840 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3841 if ((def = get_gimple_for_ssa_name (op)))
3843 imm_use_iterator imm_iter;
3844 use_operand_p use_p;
3845 bool have_debug_uses = false;
3847 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3849 if (gimple_debug_bind_p (USE_STMT (use_p)))
3851 have_debug_uses = true;
3852 break;
3856 if (have_debug_uses)
3858 /* OP is a TERed SSA name, with DEF it's defining
3859 statement, and where OP is used in further debug
3860 instructions. Generate a debug temporary, and
3861 replace all uses of OP in debug insns with that
3862 temporary. */
3863 gimple debugstmt;
3864 tree value = gimple_assign_rhs_to_tree (def);
3865 tree vexpr = make_node (DEBUG_EXPR_DECL);
3866 rtx val;
3867 enum machine_mode mode;
3869 set_curr_insn_source_location (gimple_location (def));
3870 set_curr_insn_block (gimple_block (def));
3872 DECL_ARTIFICIAL (vexpr) = 1;
3873 TREE_TYPE (vexpr) = TREE_TYPE (value);
3874 if (DECL_P (value))
3875 mode = DECL_MODE (value);
3876 else
3877 mode = TYPE_MODE (TREE_TYPE (value));
3878 DECL_MODE (vexpr) = mode;
3880 val = gen_rtx_VAR_LOCATION
3881 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3883 emit_debug_insn (val);
3885 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3887 if (!gimple_debug_bind_p (debugstmt))
3888 continue;
3890 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3891 SET_USE (use_p, vexpr);
3893 update_stmt (debugstmt);
3897 set_curr_insn_source_location (sloc);
3898 set_curr_insn_block (sblock);
3901 currently_expanding_gimple_stmt = stmt;
3903 /* Expand this statement, then evaluate the resulting RTL and
3904 fixup the CFG accordingly. */
3905 if (gimple_code (stmt) == GIMPLE_COND)
3907 new_bb = expand_gimple_cond (bb, stmt);
3908 if (new_bb)
3909 return new_bb;
3911 else if (gimple_debug_bind_p (stmt))
3913 location_t sloc = get_curr_insn_source_location ();
3914 tree sblock = get_curr_insn_block ();
3915 gimple_stmt_iterator nsi = gsi;
3917 for (;;)
3919 tree var = gimple_debug_bind_get_var (stmt);
3920 tree value;
3921 rtx val;
3922 enum machine_mode mode;
3924 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3925 && TREE_CODE (var) != LABEL_DECL
3926 && !target_for_debug_bind (var))
3927 goto delink_debug_stmt;
3929 if (gimple_debug_bind_has_value_p (stmt))
3930 value = gimple_debug_bind_get_value (stmt);
3931 else
3932 value = NULL_TREE;
3934 last = get_last_insn ();
3936 set_curr_insn_source_location (gimple_location (stmt));
3937 set_curr_insn_block (gimple_block (stmt));
3939 if (DECL_P (var))
3940 mode = DECL_MODE (var);
3941 else
3942 mode = TYPE_MODE (TREE_TYPE (var));
3944 val = gen_rtx_VAR_LOCATION
3945 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3947 emit_debug_insn (val);
3949 if (dump_file && (dump_flags & TDF_DETAILS))
3951 /* We can't dump the insn with a TREE where an RTX
3952 is expected. */
3953 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3954 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3955 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3958 delink_debug_stmt:
3959 /* In order not to generate too many debug temporaries,
3960 we delink all uses of debug statements we already expanded.
3961 Therefore debug statements between definition and real
3962 use of TERed SSA names will continue to use the SSA name,
3963 and not be replaced with debug temps. */
3964 delink_stmt_imm_use (stmt);
3966 gsi = nsi;
3967 gsi_next (&nsi);
3968 if (gsi_end_p (nsi))
3969 break;
3970 stmt = gsi_stmt (nsi);
3971 if (!gimple_debug_bind_p (stmt))
3972 break;
3975 set_curr_insn_source_location (sloc);
3976 set_curr_insn_block (sblock);
3978 else if (gimple_debug_source_bind_p (stmt))
3980 location_t sloc = get_curr_insn_source_location ();
3981 tree sblock = get_curr_insn_block ();
3982 tree var = gimple_debug_source_bind_get_var (stmt);
3983 tree value = gimple_debug_source_bind_get_value (stmt);
3984 rtx val;
3985 enum machine_mode mode;
3987 last = get_last_insn ();
3989 set_curr_insn_source_location (gimple_location (stmt));
3990 set_curr_insn_block (gimple_block (stmt));
3992 mode = DECL_MODE (var);
3994 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3995 VAR_INIT_STATUS_UNINITIALIZED);
3997 emit_debug_insn (val);
3999 if (dump_file && (dump_flags & TDF_DETAILS))
4001 /* We can't dump the insn with a TREE where an RTX
4002 is expected. */
4003 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4004 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4005 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4008 set_curr_insn_source_location (sloc);
4009 set_curr_insn_block (sblock);
4011 else
4013 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4015 bool can_fallthru;
4016 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4017 if (new_bb)
4019 if (can_fallthru)
4020 bb = new_bb;
4021 else
4022 return new_bb;
4025 else
4027 def_operand_p def_p;
4028 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4030 if (def_p != NULL)
4032 /* Ignore this stmt if it is in the list of
4033 replaceable expressions. */
4034 if (SA.values
4035 && bitmap_bit_p (SA.values,
4036 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4037 continue;
4039 last = expand_gimple_stmt (stmt);
4040 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4045 currently_expanding_gimple_stmt = NULL;
4047 /* Expand implicit goto and convert goto_locus. */
4048 FOR_EACH_EDGE (e, ei, bb->succs)
4050 if (e->goto_locus && e->goto_block)
4052 set_curr_insn_source_location (e->goto_locus);
4053 set_curr_insn_block (e->goto_block);
4054 e->goto_locus = curr_insn_locator ();
4056 e->goto_block = NULL;
4057 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4059 emit_jump (label_rtx_for_bb (e->dest));
4060 e->flags &= ~EDGE_FALLTHRU;
4064 /* Expanded RTL can create a jump in the last instruction of block.
4065 This later might be assumed to be a jump to successor and break edge insertion.
4066 We need to insert dummy move to prevent this. PR41440. */
4067 if (single_succ_p (bb)
4068 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4069 && (last = get_last_insn ())
4070 && JUMP_P (last))
4072 rtx dummy = gen_reg_rtx (SImode);
4073 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4076 do_pending_stack_adjust ();
4078 /* Find the block tail. The last insn in the block is the insn
4079 before a barrier and/or table jump insn. */
4080 last = get_last_insn ();
4081 if (BARRIER_P (last))
4082 last = PREV_INSN (last);
4083 if (JUMP_TABLE_DATA_P (last))
4084 last = PREV_INSN (PREV_INSN (last));
4085 BB_END (bb) = last;
4087 update_bb_for_insn (bb);
4089 return bb;
4093 /* Create a basic block for initialization code. */
4095 static basic_block
4096 construct_init_block (void)
4098 basic_block init_block, first_block;
4099 edge e = NULL;
4100 int flags;
4102 /* Multiple entry points not supported yet. */
4103 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4104 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4105 init_rtl_bb_info (EXIT_BLOCK_PTR);
4106 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4107 EXIT_BLOCK_PTR->flags |= BB_RTL;
4109 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4111 /* When entry edge points to first basic block, we don't need jump,
4112 otherwise we have to jump into proper target. */
4113 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4115 tree label = gimple_block_label (e->dest);
4117 emit_jump (label_rtx (label));
4118 flags = 0;
4120 else
4121 flags = EDGE_FALLTHRU;
4123 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4124 get_last_insn (),
4125 ENTRY_BLOCK_PTR);
4126 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4127 init_block->count = ENTRY_BLOCK_PTR->count;
4128 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4129 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4130 if (e)
4132 first_block = e->dest;
4133 redirect_edge_succ (e, init_block);
4134 e = make_edge (init_block, first_block, flags);
4136 else
4137 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4138 e->probability = REG_BR_PROB_BASE;
4139 e->count = ENTRY_BLOCK_PTR->count;
4141 update_bb_for_insn (init_block);
4142 return init_block;
4145 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4146 found in the block tree. */
4148 static void
4149 set_block_levels (tree block, int level)
4151 while (block)
4153 BLOCK_NUMBER (block) = level;
4154 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4155 block = BLOCK_CHAIN (block);
4159 /* Create a block containing landing pads and similar stuff. */
4161 static void
4162 construct_exit_block (void)
4164 rtx head = get_last_insn ();
4165 rtx end;
4166 basic_block exit_block;
4167 edge e, e2;
4168 unsigned ix;
4169 edge_iterator ei;
4170 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4172 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4174 /* Make sure the locus is set to the end of the function, so that
4175 epilogue line numbers and warnings are set properly. */
4176 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4177 input_location = cfun->function_end_locus;
4179 /* The following insns belong to the top scope. */
4180 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4182 /* Generate rtl for function exit. */
4183 expand_function_end ();
4185 end = get_last_insn ();
4186 if (head == end)
4187 return;
4188 /* While emitting the function end we could move end of the last basic block.
4190 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4191 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4192 head = NEXT_INSN (head);
4193 exit_block = create_basic_block (NEXT_INSN (head), end,
4194 EXIT_BLOCK_PTR->prev_bb);
4195 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4196 exit_block->count = EXIT_BLOCK_PTR->count;
4197 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4198 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4200 ix = 0;
4201 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4203 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4204 if (!(e->flags & EDGE_ABNORMAL))
4205 redirect_edge_succ (e, exit_block);
4206 else
4207 ix++;
4210 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4211 e->probability = REG_BR_PROB_BASE;
4212 e->count = EXIT_BLOCK_PTR->count;
4213 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4214 if (e2 != e)
4216 e->count -= e2->count;
4217 exit_block->count -= e2->count;
4218 exit_block->frequency -= EDGE_FREQUENCY (e2);
4220 if (e->count < 0)
4221 e->count = 0;
4222 if (exit_block->count < 0)
4223 exit_block->count = 0;
4224 if (exit_block->frequency < 0)
4225 exit_block->frequency = 0;
4226 update_bb_for_insn (exit_block);
4229 /* Helper function for discover_nonconstant_array_refs.
4230 Look for ARRAY_REF nodes with non-constant indexes and mark them
4231 addressable. */
4233 static tree
4234 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4235 void *data ATTRIBUTE_UNUSED)
4237 tree t = *tp;
4239 if (IS_TYPE_OR_DECL_P (t))
4240 *walk_subtrees = 0;
4241 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4243 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4244 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4245 && (!TREE_OPERAND (t, 2)
4246 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4247 || (TREE_CODE (t) == COMPONENT_REF
4248 && (!TREE_OPERAND (t,2)
4249 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4250 || TREE_CODE (t) == BIT_FIELD_REF
4251 || TREE_CODE (t) == REALPART_EXPR
4252 || TREE_CODE (t) == IMAGPART_EXPR
4253 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4254 || CONVERT_EXPR_P (t))
4255 t = TREE_OPERAND (t, 0);
4257 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4259 t = get_base_address (t);
4260 if (t && DECL_P (t)
4261 && DECL_MODE (t) != BLKmode)
4262 TREE_ADDRESSABLE (t) = 1;
4265 *walk_subtrees = 0;
4268 return NULL_TREE;
4271 /* RTL expansion is not able to compile array references with variable
4272 offsets for arrays stored in single register. Discover such
4273 expressions and mark variables as addressable to avoid this
4274 scenario. */
4276 static void
4277 discover_nonconstant_array_refs (void)
4279 basic_block bb;
4280 gimple_stmt_iterator gsi;
4282 FOR_EACH_BB (bb)
4283 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4285 gimple stmt = gsi_stmt (gsi);
4286 if (!is_gimple_debug (stmt))
4287 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4291 /* This function sets crtl->args.internal_arg_pointer to a virtual
4292 register if DRAP is needed. Local register allocator will replace
4293 virtual_incoming_args_rtx with the virtual register. */
4295 static void
4296 expand_stack_alignment (void)
4298 rtx drap_rtx;
4299 unsigned int preferred_stack_boundary;
4301 if (! SUPPORTS_STACK_ALIGNMENT)
4302 return;
4304 if (cfun->calls_alloca
4305 || cfun->has_nonlocal_label
4306 || crtl->has_nonlocal_goto)
4307 crtl->need_drap = true;
4309 /* Call update_stack_boundary here again to update incoming stack
4310 boundary. It may set incoming stack alignment to a different
4311 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4312 use the minimum incoming stack alignment to check if it is OK
4313 to perform sibcall optimization since sibcall optimization will
4314 only align the outgoing stack to incoming stack boundary. */
4315 if (targetm.calls.update_stack_boundary)
4316 targetm.calls.update_stack_boundary ();
4318 /* The incoming stack frame has to be aligned at least at
4319 parm_stack_boundary. */
4320 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4322 /* Update crtl->stack_alignment_estimated and use it later to align
4323 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4324 exceptions since callgraph doesn't collect incoming stack alignment
4325 in this case. */
4326 if (cfun->can_throw_non_call_exceptions
4327 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4328 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4329 else
4330 preferred_stack_boundary = crtl->preferred_stack_boundary;
4331 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4332 crtl->stack_alignment_estimated = preferred_stack_boundary;
4333 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4334 crtl->stack_alignment_needed = preferred_stack_boundary;
4336 gcc_assert (crtl->stack_alignment_needed
4337 <= crtl->stack_alignment_estimated);
4339 crtl->stack_realign_needed
4340 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4341 crtl->stack_realign_tried = crtl->stack_realign_needed;
4343 crtl->stack_realign_processed = true;
4345 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4346 alignment. */
4347 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4348 drap_rtx = targetm.calls.get_drap_rtx ();
4350 /* stack_realign_drap and drap_rtx must match. */
4351 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4353 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4354 if (NULL != drap_rtx)
4356 crtl->args.internal_arg_pointer = drap_rtx;
4358 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4359 needed. */
4360 fixup_tail_calls ();
4364 /* Translate the intermediate representation contained in the CFG
4365 from GIMPLE trees to RTL.
4367 We do conversion per basic block and preserve/update the tree CFG.
4368 This implies we have to do some magic as the CFG can simultaneously
4369 consist of basic blocks containing RTL and GIMPLE trees. This can
4370 confuse the CFG hooks, so be careful to not manipulate CFG during
4371 the expansion. */
4373 static unsigned int
4374 gimple_expand_cfg (void)
4376 basic_block bb, init_block;
4377 sbitmap blocks;
4378 edge_iterator ei;
4379 edge e;
4380 rtx var_seq;
4381 unsigned i;
4383 timevar_push (TV_OUT_OF_SSA);
4384 rewrite_out_of_ssa (&SA);
4385 timevar_pop (TV_OUT_OF_SSA);
4386 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4387 sizeof (rtx));
4389 /* Make sure all values used by the optimization passes have sane
4390 defaults. */
4391 reg_renumber = 0;
4393 /* Some backends want to know that we are expanding to RTL. */
4394 currently_expanding_to_rtl = 1;
4395 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4396 free_dominance_info (CDI_DOMINATORS);
4398 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4400 insn_locators_alloc ();
4401 if (!DECL_IS_BUILTIN (current_function_decl))
4403 /* Eventually, all FEs should explicitly set function_start_locus. */
4404 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4405 set_curr_insn_source_location
4406 (DECL_SOURCE_LOCATION (current_function_decl));
4407 else
4408 set_curr_insn_source_location (cfun->function_start_locus);
4410 else
4411 set_curr_insn_source_location (UNKNOWN_LOCATION);
4412 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4413 prologue_locator = curr_insn_locator ();
4415 #ifdef INSN_SCHEDULING
4416 init_sched_attrs ();
4417 #endif
4419 /* Make sure first insn is a note even if we don't want linenums.
4420 This makes sure the first insn will never be deleted.
4421 Also, final expects a note to appear there. */
4422 emit_note (NOTE_INSN_DELETED);
4424 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4425 discover_nonconstant_array_refs ();
4427 targetm.expand_to_rtl_hook ();
4428 crtl->stack_alignment_needed = STACK_BOUNDARY;
4429 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4430 crtl->stack_alignment_estimated = 0;
4431 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4432 cfun->cfg->max_jumptable_ents = 0;
4434 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4435 of the function section at exapnsion time to predict distance of calls. */
4436 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4438 /* Expand the variables recorded during gimple lowering. */
4439 timevar_push (TV_VAR_EXPAND);
4440 start_sequence ();
4442 expand_used_vars ();
4444 var_seq = get_insns ();
4445 end_sequence ();
4446 timevar_pop (TV_VAR_EXPAND);
4448 /* Honor stack protection warnings. */
4449 if (warn_stack_protect)
4451 if (cfun->calls_alloca)
4452 warning (OPT_Wstack_protector,
4453 "stack protector not protecting local variables: "
4454 "variable length buffer");
4455 if (has_short_buffer && !crtl->stack_protect_guard)
4456 warning (OPT_Wstack_protector,
4457 "stack protector not protecting function: "
4458 "all local arrays are less than %d bytes long",
4459 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4462 /* Set up parameters and prepare for return, for the function. */
4463 expand_function_start (current_function_decl);
4465 /* If we emitted any instructions for setting up the variables,
4466 emit them before the FUNCTION_START note. */
4467 if (var_seq)
4469 emit_insn_before (var_seq, parm_birth_insn);
4471 /* In expand_function_end we'll insert the alloca save/restore
4472 before parm_birth_insn. We've just insertted an alloca call.
4473 Adjust the pointer to match. */
4474 parm_birth_insn = var_seq;
4477 /* Now that we also have the parameter RTXs, copy them over to our
4478 partitions. */
4479 for (i = 0; i < SA.map->num_partitions; i++)
4481 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4483 if (TREE_CODE (var) != VAR_DECL
4484 && !SA.partition_to_pseudo[i])
4485 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4486 gcc_assert (SA.partition_to_pseudo[i]);
4488 /* If this decl was marked as living in multiple places, reset
4489 this now to NULL. */
4490 if (DECL_RTL_IF_SET (var) == pc_rtx)
4491 SET_DECL_RTL (var, NULL);
4493 /* Some RTL parts really want to look at DECL_RTL(x) when x
4494 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4495 SET_DECL_RTL here making this available, but that would mean
4496 to select one of the potentially many RTLs for one DECL. Instead
4497 of doing that we simply reset the MEM_EXPR of the RTL in question,
4498 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4499 if (!DECL_RTL_SET_P (var))
4501 if (MEM_P (SA.partition_to_pseudo[i]))
4502 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4506 /* If we have a class containing differently aligned pointers
4507 we need to merge those into the corresponding RTL pointer
4508 alignment. */
4509 for (i = 1; i < num_ssa_names; i++)
4511 tree name = ssa_name (i);
4512 int part;
4513 rtx r;
4515 if (!name
4516 || !POINTER_TYPE_P (TREE_TYPE (name))
4517 /* We might have generated new SSA names in
4518 update_alias_info_with_stack_vars. They will have a NULL
4519 defining statements, and won't be part of the partitioning,
4520 so ignore those. */
4521 || !SSA_NAME_DEF_STMT (name))
4522 continue;
4523 part = var_to_partition (SA.map, name);
4524 if (part == NO_PARTITION)
4525 continue;
4526 r = SA.partition_to_pseudo[part];
4527 if (REG_P (r))
4528 mark_reg_pointer (r, get_pointer_alignment (name));
4531 /* If this function is `main', emit a call to `__main'
4532 to run global initializers, etc. */
4533 if (DECL_NAME (current_function_decl)
4534 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4535 && DECL_FILE_SCOPE_P (current_function_decl))
4536 expand_main_function ();
4538 /* Initialize the stack_protect_guard field. This must happen after the
4539 call to __main (if any) so that the external decl is initialized. */
4540 if (crtl->stack_protect_guard)
4541 stack_protect_prologue ();
4543 expand_phi_nodes (&SA);
4545 /* Register rtl specific functions for cfg. */
4546 rtl_register_cfg_hooks ();
4548 init_block = construct_init_block ();
4550 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4551 remaining edges later. */
4552 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4553 e->flags &= ~EDGE_EXECUTABLE;
4555 lab_rtx_for_bb = pointer_map_create ();
4556 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4557 bb = expand_gimple_basic_block (bb);
4559 if (MAY_HAVE_DEBUG_INSNS)
4560 expand_debug_locations ();
4562 /* Free stuff we no longer need after GIMPLE optimizations. */
4563 free_dominance_info (CDI_DOMINATORS);
4564 free_dominance_info (CDI_POST_DOMINATORS);
4565 delete_tree_cfg_annotations ();
4567 timevar_push (TV_OUT_OF_SSA);
4568 finish_out_of_ssa (&SA);
4569 timevar_pop (TV_OUT_OF_SSA);
4571 timevar_push (TV_POST_EXPAND);
4572 /* We are no longer in SSA form. */
4573 cfun->gimple_df->in_ssa_p = false;
4574 if (current_loops)
4575 loops_state_clear (LOOP_CLOSED_SSA);
4577 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4578 conservatively to true until they are all profile aware. */
4579 pointer_map_destroy (lab_rtx_for_bb);
4580 free_histograms ();
4582 construct_exit_block ();
4583 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4584 insn_locators_finalize ();
4586 /* Zap the tree EH table. */
4587 set_eh_throw_stmt_table (cfun, NULL);
4589 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4590 split edges which edge insertions might do. */
4591 rebuild_jump_labels (get_insns ());
4593 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4595 edge e;
4596 edge_iterator ei;
4597 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4599 if (e->insns.r)
4601 rebuild_jump_labels_chain (e->insns.r);
4602 /* Avoid putting insns before parm_birth_insn. */
4603 if (e->src == ENTRY_BLOCK_PTR
4604 && single_succ_p (ENTRY_BLOCK_PTR)
4605 && parm_birth_insn)
4607 rtx insns = e->insns.r;
4608 e->insns.r = NULL_RTX;
4609 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4611 else
4612 commit_one_edge_insertion (e);
4614 else
4615 ei_next (&ei);
4619 /* We're done expanding trees to RTL. */
4620 currently_expanding_to_rtl = 0;
4622 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4624 edge e;
4625 edge_iterator ei;
4626 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4628 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4629 e->flags &= ~EDGE_EXECUTABLE;
4631 /* At the moment not all abnormal edges match the RTL
4632 representation. It is safe to remove them here as
4633 find_many_sub_basic_blocks will rediscover them.
4634 In the future we should get this fixed properly. */
4635 if ((e->flags & EDGE_ABNORMAL)
4636 && !(e->flags & EDGE_SIBCALL))
4637 remove_edge (e);
4638 else
4639 ei_next (&ei);
4643 blocks = sbitmap_alloc (last_basic_block);
4644 sbitmap_ones (blocks);
4645 find_many_sub_basic_blocks (blocks);
4646 sbitmap_free (blocks);
4647 purge_all_dead_edges ();
4649 expand_stack_alignment ();
4651 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4652 function. */
4653 if (crtl->tail_call_emit)
4654 fixup_tail_calls ();
4656 /* After initial rtl generation, call back to finish generating
4657 exception support code. We need to do this before cleaning up
4658 the CFG as the code does not expect dead landing pads. */
4659 if (cfun->eh->region_tree != NULL)
4660 finish_eh_generation ();
4662 /* Remove unreachable blocks, otherwise we cannot compute dominators
4663 which are needed for loop state verification. As a side-effect
4664 this also compacts blocks.
4665 ??? We cannot remove trivially dead insns here as for example
4666 the DRAP reg on i?86 is not magically live at this point.
4667 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4668 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4670 #ifdef ENABLE_CHECKING
4671 verify_flow_info ();
4672 #endif
4674 /* Initialize pseudos allocated for hard registers. */
4675 emit_initial_value_sets ();
4677 /* And finally unshare all RTL. */
4678 unshare_all_rtl ();
4680 /* There's no need to defer outputting this function any more; we
4681 know we want to output it. */
4682 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4684 /* Now that we're done expanding trees to RTL, we shouldn't have any
4685 more CONCATs anywhere. */
4686 generating_concat_p = 0;
4688 if (dump_file)
4690 fprintf (dump_file,
4691 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4692 /* And the pass manager will dump RTL for us. */
4695 /* If we're emitting a nested function, make sure its parent gets
4696 emitted as well. Doing otherwise confuses debug info. */
4698 tree parent;
4699 for (parent = DECL_CONTEXT (current_function_decl);
4700 parent != NULL_TREE;
4701 parent = get_containing_scope (parent))
4702 if (TREE_CODE (parent) == FUNCTION_DECL)
4703 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4706 /* We are now committed to emitting code for this function. Do any
4707 preparation, such as emitting abstract debug info for the inline
4708 before it gets mangled by optimization. */
4709 if (cgraph_function_possibly_inlined_p (current_function_decl))
4710 (*debug_hooks->outlining_inline_function) (current_function_decl);
4712 TREE_ASM_WRITTEN (current_function_decl) = 1;
4714 /* After expanding, the return labels are no longer needed. */
4715 return_label = NULL;
4716 naked_return_label = NULL;
4718 /* After expanding, the tm_restart map is no longer needed. */
4719 if (cfun->gimple_df->tm_restart)
4721 htab_delete (cfun->gimple_df->tm_restart);
4722 cfun->gimple_df->tm_restart = NULL;
4725 /* Tag the blocks with a depth number so that change_scope can find
4726 the common parent easily. */
4727 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4728 default_rtl_profile ();
4730 timevar_pop (TV_POST_EXPAND);
4732 return 0;
4735 struct rtl_opt_pass pass_expand =
4738 RTL_PASS,
4739 "expand", /* name */
4740 NULL, /* gate */
4741 gimple_expand_cfg, /* execute */
4742 NULL, /* sub */
4743 NULL, /* next */
4744 0, /* static_pass_number */
4745 TV_EXPAND, /* tv_id */
4746 PROP_ssa | PROP_gimple_leh | PROP_cfg
4747 | PROP_gimple_lcx, /* properties_required */
4748 PROP_rtl, /* properties_provided */
4749 PROP_ssa | PROP_trees, /* properties_destroyed */
4750 TODO_verify_ssa | TODO_verify_flow
4751 | TODO_verify_stmts, /* todo_flags_start */
4752 TODO_ggc_collect /* todo_flags_finish */