Added myself under Write after Approval Maintainer.
[official-gcc.git] / gcc / cfgexpand.c
blob2b2e464791cf2abf1afd71f1867e5209ed659858
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54 struct ssaexpand SA;
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 static rtx expand_debug_expr (tree);
62 /* Return an expression tree corresponding to the RHS of GIMPLE
63 statement STMT. */
65 tree
66 gimple_assign_rhs_to_tree (gimple stmt)
68 tree t;
69 enum gimple_rhs_class grhs_class;
71 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
73 if (grhs_class == GIMPLE_TERNARY_RHS)
74 t = build3 (gimple_assign_rhs_code (stmt),
75 TREE_TYPE (gimple_assign_lhs (stmt)),
76 gimple_assign_rhs1 (stmt),
77 gimple_assign_rhs2 (stmt),
78 gimple_assign_rhs3 (stmt));
79 else if (grhs_class == GIMPLE_BINARY_RHS)
80 t = build2 (gimple_assign_rhs_code (stmt),
81 TREE_TYPE (gimple_assign_lhs (stmt)),
82 gimple_assign_rhs1 (stmt),
83 gimple_assign_rhs2 (stmt));
84 else if (grhs_class == GIMPLE_UNARY_RHS)
85 t = build1 (gimple_assign_rhs_code (stmt),
86 TREE_TYPE (gimple_assign_lhs (stmt)),
87 gimple_assign_rhs1 (stmt));
88 else if (grhs_class == GIMPLE_SINGLE_RHS)
90 t = gimple_assign_rhs1 (stmt);
91 /* Avoid modifying this tree in place below. */
92 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93 && gimple_location (stmt) != EXPR_LOCATION (t))
94 || (gimple_block (stmt)
95 && currently_expanding_to_rtl
96 && EXPR_P (t)
97 && gimple_block (stmt) != TREE_BLOCK (t)))
98 t = copy_node (t);
100 else
101 gcc_unreachable ();
103 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104 SET_EXPR_LOCATION (t, gimple_location (stmt));
105 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106 TREE_BLOCK (t) = gimple_block (stmt);
108 return t;
112 #ifndef STACK_ALIGNMENT_NEEDED
113 #define STACK_ALIGNMENT_NEEDED 1
114 #endif
116 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
118 /* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
121 static inline void
122 set_rtl (tree t, rtx x)
124 if (TREE_CODE (t) == SSA_NAME)
126 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
127 if (x && !MEM_P (x))
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
134 tree var = SSA_NAME_VAR (t);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var))
137 SET_DECL_RTL (var, x);
138 /* If we have it set already to "multiple places" don't
139 change this. */
140 else if (DECL_RTL (var) == pc_rtx)
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var) != x)
149 SET_DECL_RTL (var, pc_rtx);
152 else
153 SET_DECL_RTL (t, x);
156 /* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
158 struct stack_var
160 /* The Variable. */
161 tree decl;
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
165 HOST_WIDE_INT size;
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
169 unsigned int alignb;
171 /* The partition representative. */
172 size_t representative;
174 /* The next stack variable in the partition, or EOC. */
175 size_t next;
177 /* The numbers of conflicting stack variables. */
178 bitmap conflicts;
181 #define EOC ((size_t)-1)
183 /* We have an array of such objects while deciding allocation. */
184 static struct stack_var *stack_vars;
185 static size_t stack_vars_alloc;
186 static size_t stack_vars_num;
187 static struct pointer_map_t *decl_to_stack_part;
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted;
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase;
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls;
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer;
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
209 static unsigned int
210 align_local_variable (tree decl)
212 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213 DECL_ALIGN (decl) = align;
214 return align / BITS_PER_UNIT;
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
220 static HOST_WIDE_INT
221 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
223 HOST_WIDE_INT offset, new_frame_offset;
225 new_frame_offset = frame_offset;
226 if (FRAME_GROWS_DOWNWARD)
228 new_frame_offset -= size + frame_phase;
229 new_frame_offset &= -align;
230 new_frame_offset += frame_phase;
231 offset = new_frame_offset;
233 else
235 new_frame_offset -= frame_phase;
236 new_frame_offset += align - 1;
237 new_frame_offset &= -align;
238 new_frame_offset += frame_phase;
239 offset = new_frame_offset;
240 new_frame_offset += size;
242 frame_offset = new_frame_offset;
244 if (frame_offset_overflow (frame_offset, cfun->decl))
245 frame_offset = offset = 0;
247 return offset;
250 /* Accumulate DECL into STACK_VARS. */
252 static void
253 add_stack_var (tree decl)
255 struct stack_var *v;
257 if (stack_vars_num >= stack_vars_alloc)
259 if (stack_vars_alloc)
260 stack_vars_alloc = stack_vars_alloc * 3 / 2;
261 else
262 stack_vars_alloc = 32;
263 stack_vars
264 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
266 if (!decl_to_stack_part)
267 decl_to_stack_part = pointer_map_create ();
269 v = &stack_vars[stack_vars_num];
270 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
272 v->decl = decl;
273 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
276 if (v->size == 0)
277 v->size = 1;
278 v->alignb = align_local_variable (SSAVAR (decl));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v->alignb != 0);
282 /* All variables are initially in their own partition. */
283 v->representative = stack_vars_num;
284 v->next = EOC;
286 /* All variables initially conflict with no other. */
287 v->conflicts = NULL;
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl, pc_rtx);
292 stack_vars_num++;
295 /* Make the decls associated with luid's X and Y conflict. */
297 static void
298 add_stack_var_conflict (size_t x, size_t y)
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
302 if (!a->conflicts)
303 a->conflicts = BITMAP_ALLOC (NULL);
304 if (!b->conflicts)
305 b->conflicts = BITMAP_ALLOC (NULL);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
310 /* Check whether the decls associated with luid's X and Y conflict. */
312 static bool
313 stack_var_conflict_p (size_t x, size_t y)
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
317 if (x == y)
318 return false;
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
323 return true;
325 if (!a->conflicts || !b->conflicts)
326 return false;
327 return bitmap_bit_p (a->conflicts, y);
330 /* Returns true if TYPE is or contains a union type. */
332 static bool
333 aggregate_contains_union_type (tree type)
335 tree field;
337 if (TREE_CODE (type) == UNION_TYPE
338 || TREE_CODE (type) == QUAL_UNION_TYPE)
339 return true;
340 if (TREE_CODE (type) == ARRAY_TYPE)
341 return aggregate_contains_union_type (TREE_TYPE (type));
342 if (TREE_CODE (type) != RECORD_TYPE)
343 return false;
345 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
346 if (TREE_CODE (field) == FIELD_DECL)
347 if (aggregate_contains_union_type (TREE_TYPE (field)))
348 return true;
350 return false;
353 /* A subroutine of expand_used_vars. If two variables X and Y have alias
354 sets that do not conflict, then do add a conflict for these variables
355 in the interference graph. We also need to make sure to add conflicts
356 for union containing structures. Else RTL alias analysis comes along
357 and due to type based aliasing rules decides that for two overlapping
358 union temporaries { short s; int i; } accesses to the same mem through
359 different types may not alias and happily reorders stores across
360 life-time boundaries of the temporaries (See PR25654).
361 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
363 static void
364 add_alias_set_conflicts (void)
366 size_t i, j, n = stack_vars_num;
368 for (i = 0; i < n; ++i)
370 tree type_i = TREE_TYPE (stack_vars[i].decl);
371 bool aggr_i = AGGREGATE_TYPE_P (type_i);
372 bool contains_union;
374 contains_union = aggregate_contains_union_type (type_i);
375 for (j = 0; j < i; ++j)
377 tree type_j = TREE_TYPE (stack_vars[j].decl);
378 bool aggr_j = AGGREGATE_TYPE_P (type_j);
379 if (aggr_i != aggr_j
380 /* Either the objects conflict by means of type based
381 aliasing rules, or we need to add a conflict. */
382 || !objects_must_conflict_p (type_i, type_j)
383 /* In case the types do not conflict ensure that access
384 to elements will conflict. In case of unions we have
385 to be careful as type based aliasing rules may say
386 access to the same memory does not conflict. So play
387 safe and add a conflict in this case when
388 -fstrict-aliasing is used. */
389 || (contains_union && flag_strict_aliasing))
390 add_stack_var_conflict (i, j);
395 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
396 enter its partition number into bitmap DATA. */
398 static bool
399 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
401 bitmap active = (bitmap)data;
402 op = get_base_address (op);
403 if (op
404 && DECL_P (op)
405 && DECL_RTL_IF_SET (op) == pc_rtx)
407 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
408 if (v)
409 bitmap_set_bit (active, *v);
411 return false;
414 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
415 record conflicts between it and all currently active other partitions
416 from bitmap DATA. */
418 static bool
419 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
421 bitmap active = (bitmap)data;
422 op = get_base_address (op);
423 if (op
424 && DECL_P (op)
425 && DECL_RTL_IF_SET (op) == pc_rtx)
427 size_t *v =
428 (size_t *) pointer_map_contains (decl_to_stack_part, op);
429 if (v && bitmap_set_bit (active, *v))
431 size_t num = *v;
432 bitmap_iterator bi;
433 unsigned i;
434 gcc_assert (num < stack_vars_num);
435 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
436 add_stack_var_conflict (num, i);
439 return false;
442 /* Helper routine for add_scope_conflicts, calculating the active partitions
443 at the end of BB, leaving the result in WORK. We're called to generate
444 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
445 liveness. */
447 static void
448 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
450 edge e;
451 edge_iterator ei;
452 gimple_stmt_iterator gsi;
453 bool (*visit)(gimple, tree, void *);
455 bitmap_clear (work);
456 FOR_EACH_EDGE (e, ei, bb->preds)
457 bitmap_ior_into (work, (bitmap)e->src->aux);
459 visit = visit_op;
461 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
463 gimple stmt = gsi_stmt (gsi);
464 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
466 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
468 gimple stmt = gsi_stmt (gsi);
470 if (gimple_clobber_p (stmt))
472 tree lhs = gimple_assign_lhs (stmt);
473 size_t *v;
474 /* Nested function lowering might introduce LHSs
475 that are COMPONENT_REFs. */
476 if (TREE_CODE (lhs) != VAR_DECL)
477 continue;
478 if (DECL_RTL_IF_SET (lhs) == pc_rtx
479 && (v = (size_t *)
480 pointer_map_contains (decl_to_stack_part, lhs)))
481 bitmap_clear_bit (work, *v);
483 else if (!is_gimple_debug (stmt))
485 if (for_conflict
486 && visit == visit_op)
488 /* If this is the first real instruction in this BB we need
489 to add conflicts for everything live at this point now.
490 Unlike classical liveness for named objects we can't
491 rely on seeing a def/use of the names we're interested in.
492 There might merely be indirect loads/stores. We'd not add any
493 conflicts for such partitions. */
494 bitmap_iterator bi;
495 unsigned i;
496 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
498 unsigned j;
499 bitmap_iterator bj;
500 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
501 add_stack_var_conflict (i, j);
503 visit = visit_conflict;
505 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
510 /* Generate stack partition conflicts between all partitions that are
511 simultaneously live. */
513 static void
514 add_scope_conflicts (void)
516 basic_block bb;
517 bool changed;
518 bitmap work = BITMAP_ALLOC (NULL);
520 /* We approximate the live range of a stack variable by taking the first
521 mention of its name as starting point(s), and by the end-of-scope
522 death clobber added by gimplify as ending point(s) of the range.
523 This overapproximates in the case we for instance moved an address-taken
524 operation upward, without also moving a dereference to it upwards.
525 But it's conservatively correct as a variable never can hold values
526 before its name is mentioned at least once.
528 We then do a mostly classical bitmap liveness algorithm. */
530 FOR_ALL_BB (bb)
531 bb->aux = BITMAP_ALLOC (NULL);
533 changed = true;
534 while (changed)
536 changed = false;
537 FOR_EACH_BB (bb)
539 bitmap active = (bitmap)bb->aux;
540 add_scope_conflicts_1 (bb, work, false);
541 if (bitmap_ior_into (active, work))
542 changed = true;
546 FOR_EACH_BB (bb)
547 add_scope_conflicts_1 (bb, work, true);
549 BITMAP_FREE (work);
550 FOR_ALL_BB (bb)
551 BITMAP_FREE (bb->aux);
554 /* A subroutine of partition_stack_vars. A comparison function for qsort,
555 sorting an array of indices by the properties of the object. */
557 static int
558 stack_var_cmp (const void *a, const void *b)
560 size_t ia = *(const size_t *)a;
561 size_t ib = *(const size_t *)b;
562 unsigned int aligna = stack_vars[ia].alignb;
563 unsigned int alignb = stack_vars[ib].alignb;
564 HOST_WIDE_INT sizea = stack_vars[ia].size;
565 HOST_WIDE_INT sizeb = stack_vars[ib].size;
566 tree decla = stack_vars[ia].decl;
567 tree declb = stack_vars[ib].decl;
568 bool largea, largeb;
569 unsigned int uida, uidb;
571 /* Primary compare on "large" alignment. Large comes first. */
572 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
573 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
574 if (largea != largeb)
575 return (int)largeb - (int)largea;
577 /* Secondary compare on size, decreasing */
578 if (sizea > sizeb)
579 return -1;
580 if (sizea < sizeb)
581 return 1;
583 /* Tertiary compare on true alignment, decreasing. */
584 if (aligna < alignb)
585 return -1;
586 if (aligna > alignb)
587 return 1;
589 /* Final compare on ID for sort stability, increasing.
590 Two SSA names are compared by their version, SSA names come before
591 non-SSA names, and two normal decls are compared by their DECL_UID. */
592 if (TREE_CODE (decla) == SSA_NAME)
594 if (TREE_CODE (declb) == SSA_NAME)
595 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
596 else
597 return -1;
599 else if (TREE_CODE (declb) == SSA_NAME)
600 return 1;
601 else
602 uida = DECL_UID (decla), uidb = DECL_UID (declb);
603 if (uida < uidb)
604 return 1;
605 if (uida > uidb)
606 return -1;
607 return 0;
611 /* If the points-to solution *PI points to variables that are in a partition
612 together with other variables add all partition members to the pointed-to
613 variables bitmap. */
615 static void
616 add_partitioned_vars_to_ptset (struct pt_solution *pt,
617 struct pointer_map_t *decls_to_partitions,
618 struct pointer_set_t *visited, bitmap temp)
620 bitmap_iterator bi;
621 unsigned i;
622 bitmap *part;
624 if (pt->anything
625 || pt->vars == NULL
626 /* The pointed-to vars bitmap is shared, it is enough to
627 visit it once. */
628 || pointer_set_insert(visited, pt->vars))
629 return;
631 bitmap_clear (temp);
633 /* By using a temporary bitmap to store all members of the partitions
634 we have to add we make sure to visit each of the partitions only
635 once. */
636 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
637 if ((!temp
638 || !bitmap_bit_p (temp, i))
639 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
640 (void *)(size_t) i)))
641 bitmap_ior_into (temp, *part);
642 if (!bitmap_empty_p (temp))
643 bitmap_ior_into (pt->vars, temp);
646 /* Update points-to sets based on partition info, so we can use them on RTL.
647 The bitmaps representing stack partitions will be saved until expand,
648 where partitioned decls used as bases in memory expressions will be
649 rewritten. */
651 static void
652 update_alias_info_with_stack_vars (void)
654 struct pointer_map_t *decls_to_partitions = NULL;
655 size_t i, j;
656 tree var = NULL_TREE;
658 for (i = 0; i < stack_vars_num; i++)
660 bitmap part = NULL;
661 tree name;
662 struct ptr_info_def *pi;
664 /* Not interested in partitions with single variable. */
665 if (stack_vars[i].representative != i
666 || stack_vars[i].next == EOC)
667 continue;
669 if (!decls_to_partitions)
671 decls_to_partitions = pointer_map_create ();
672 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
675 /* Create an SSA_NAME that points to the partition for use
676 as base during alias-oracle queries on RTL for bases that
677 have been partitioned. */
678 if (var == NULL_TREE)
679 var = create_tmp_var (ptr_type_node, NULL);
680 name = make_ssa_name (var, NULL);
682 /* Create bitmaps representing partitions. They will be used for
683 points-to sets later, so use GGC alloc. */
684 part = BITMAP_GGC_ALLOC ();
685 for (j = i; j != EOC; j = stack_vars[j].next)
687 tree decl = stack_vars[j].decl;
688 unsigned int uid = DECL_PT_UID (decl);
689 /* We should never end up partitioning SSA names (though they
690 may end up on the stack). Neither should we allocate stack
691 space to something that is unused and thus unreferenced, except
692 for -O0 where we are preserving even unreferenced variables. */
693 gcc_assert (DECL_P (decl)
694 && (!optimize
695 || referenced_var_lookup (cfun, DECL_UID (decl))));
696 bitmap_set_bit (part, uid);
697 *((bitmap *) pointer_map_insert (decls_to_partitions,
698 (void *)(size_t) uid)) = part;
699 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
700 decl)) = name;
703 /* Make the SSA name point to all partition members. */
704 pi = get_ptr_info (name);
705 pt_solution_set (&pi->pt, part, false);
708 /* Make all points-to sets that contain one member of a partition
709 contain all members of the partition. */
710 if (decls_to_partitions)
712 unsigned i;
713 struct pointer_set_t *visited = pointer_set_create ();
714 bitmap temp = BITMAP_ALLOC (NULL);
716 for (i = 1; i < num_ssa_names; i++)
718 tree name = ssa_name (i);
719 struct ptr_info_def *pi;
721 if (name
722 && POINTER_TYPE_P (TREE_TYPE (name))
723 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
724 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
725 visited, temp);
728 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
729 decls_to_partitions, visited, temp);
731 pointer_set_destroy (visited);
732 pointer_map_destroy (decls_to_partitions);
733 BITMAP_FREE (temp);
737 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
738 partitioning algorithm. Partitions A and B are known to be non-conflicting.
739 Merge them into a single partition A. */
741 static void
742 union_stack_vars (size_t a, size_t b)
744 struct stack_var *vb = &stack_vars[b];
745 bitmap_iterator bi;
746 unsigned u;
748 gcc_assert (stack_vars[b].next == EOC);
749 /* Add B to A's partition. */
750 stack_vars[b].next = stack_vars[a].next;
751 stack_vars[b].representative = a;
752 stack_vars[a].next = b;
754 /* Update the required alignment of partition A to account for B. */
755 if (stack_vars[a].alignb < stack_vars[b].alignb)
756 stack_vars[a].alignb = stack_vars[b].alignb;
758 /* Update the interference graph and merge the conflicts. */
759 if (vb->conflicts)
761 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
762 add_stack_var_conflict (a, stack_vars[u].representative);
763 BITMAP_FREE (vb->conflicts);
767 /* A subroutine of expand_used_vars. Binpack the variables into
768 partitions constrained by the interference graph. The overall
769 algorithm used is as follows:
771 Sort the objects by size in descending order.
772 For each object A {
773 S = size(A)
774 O = 0
775 loop {
776 Look for the largest non-conflicting object B with size <= S.
777 UNION (A, B)
782 static void
783 partition_stack_vars (void)
785 size_t si, sj, n = stack_vars_num;
787 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
788 for (si = 0; si < n; ++si)
789 stack_vars_sorted[si] = si;
791 if (n == 1)
792 return;
794 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
796 for (si = 0; si < n; ++si)
798 size_t i = stack_vars_sorted[si];
799 unsigned int ialign = stack_vars[i].alignb;
801 /* Ignore objects that aren't partition representatives. If we
802 see a var that is not a partition representative, it must
803 have been merged earlier. */
804 if (stack_vars[i].representative != i)
805 continue;
807 for (sj = si + 1; sj < n; ++sj)
809 size_t j = stack_vars_sorted[sj];
810 unsigned int jalign = stack_vars[j].alignb;
812 /* Ignore objects that aren't partition representatives. */
813 if (stack_vars[j].representative != j)
814 continue;
816 /* Ignore conflicting objects. */
817 if (stack_var_conflict_p (i, j))
818 continue;
820 /* Do not mix objects of "small" (supported) alignment
821 and "large" (unsupported) alignment. */
822 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
823 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
824 continue;
826 /* UNION the objects, placing J at OFFSET. */
827 union_stack_vars (i, j);
831 update_alias_info_with_stack_vars ();
834 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
836 static void
837 dump_stack_var_partition (void)
839 size_t si, i, j, n = stack_vars_num;
841 for (si = 0; si < n; ++si)
843 i = stack_vars_sorted[si];
845 /* Skip variables that aren't partition representatives, for now. */
846 if (stack_vars[i].representative != i)
847 continue;
849 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
850 " align %u\n", (unsigned long) i, stack_vars[i].size,
851 stack_vars[i].alignb);
853 for (j = i; j != EOC; j = stack_vars[j].next)
855 fputc ('\t', dump_file);
856 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
858 fputc ('\n', dump_file);
862 /* Assign rtl to DECL at BASE + OFFSET. */
864 static void
865 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
866 HOST_WIDE_INT offset)
868 unsigned align;
869 rtx x;
871 /* If this fails, we've overflowed the stack frame. Error nicely? */
872 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
874 x = plus_constant (base, offset);
875 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
877 if (TREE_CODE (decl) != SSA_NAME)
879 /* Set alignment we actually gave this decl if it isn't an SSA name.
880 If it is we generate stack slots only accidentally so it isn't as
881 important, we'll simply use the alignment that is already set. */
882 if (base == virtual_stack_vars_rtx)
883 offset -= frame_phase;
884 align = offset & -offset;
885 align *= BITS_PER_UNIT;
886 if (align == 0 || align > base_align)
887 align = base_align;
889 /* One would think that we could assert that we're not decreasing
890 alignment here, but (at least) the i386 port does exactly this
891 via the MINIMUM_ALIGNMENT hook. */
893 DECL_ALIGN (decl) = align;
894 DECL_USER_ALIGN (decl) = 0;
897 set_mem_attributes (x, SSAVAR (decl), true);
898 set_rtl (decl, x);
901 /* A subroutine of expand_used_vars. Give each partition representative
902 a unique location within the stack frame. Update each partition member
903 with that location. */
905 static void
906 expand_stack_vars (bool (*pred) (tree))
908 size_t si, i, j, n = stack_vars_num;
909 HOST_WIDE_INT large_size = 0, large_alloc = 0;
910 rtx large_base = NULL;
911 unsigned large_align = 0;
912 tree decl;
914 /* Determine if there are any variables requiring "large" alignment.
915 Since these are dynamically allocated, we only process these if
916 no predicate involved. */
917 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
918 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
920 /* Find the total size of these variables. */
921 for (si = 0; si < n; ++si)
923 unsigned alignb;
925 i = stack_vars_sorted[si];
926 alignb = stack_vars[i].alignb;
928 /* Stop when we get to the first decl with "small" alignment. */
929 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
930 break;
932 /* Skip variables that aren't partition representatives. */
933 if (stack_vars[i].representative != i)
934 continue;
936 /* Skip variables that have already had rtl assigned. See also
937 add_stack_var where we perpetrate this pc_rtx hack. */
938 decl = stack_vars[i].decl;
939 if ((TREE_CODE (decl) == SSA_NAME
940 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
941 : DECL_RTL (decl)) != pc_rtx)
942 continue;
944 large_size += alignb - 1;
945 large_size &= -(HOST_WIDE_INT)alignb;
946 large_size += stack_vars[i].size;
949 /* If there were any, allocate space. */
950 if (large_size > 0)
951 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
952 large_align, true);
955 for (si = 0; si < n; ++si)
957 rtx base;
958 unsigned base_align, alignb;
959 HOST_WIDE_INT offset;
961 i = stack_vars_sorted[si];
963 /* Skip variables that aren't partition representatives, for now. */
964 if (stack_vars[i].representative != i)
965 continue;
967 /* Skip variables that have already had rtl assigned. See also
968 add_stack_var where we perpetrate this pc_rtx hack. */
969 decl = stack_vars[i].decl;
970 if ((TREE_CODE (decl) == SSA_NAME
971 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
972 : DECL_RTL (decl)) != pc_rtx)
973 continue;
975 /* Check the predicate to see whether this variable should be
976 allocated in this pass. */
977 if (pred && !pred (decl))
978 continue;
980 alignb = stack_vars[i].alignb;
981 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
983 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
984 base = virtual_stack_vars_rtx;
985 base_align = crtl->max_used_stack_slot_alignment;
987 else
989 /* Large alignment is only processed in the last pass. */
990 if (pred)
991 continue;
992 gcc_assert (large_base != NULL);
994 large_alloc += alignb - 1;
995 large_alloc &= -(HOST_WIDE_INT)alignb;
996 offset = large_alloc;
997 large_alloc += stack_vars[i].size;
999 base = large_base;
1000 base_align = large_align;
1003 /* Create rtl for each variable based on their location within the
1004 partition. */
1005 for (j = i; j != EOC; j = stack_vars[j].next)
1007 expand_one_stack_var_at (stack_vars[j].decl,
1008 base, base_align,
1009 offset);
1013 gcc_assert (large_alloc == large_size);
1016 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1017 static HOST_WIDE_INT
1018 account_stack_vars (void)
1020 size_t si, j, i, n = stack_vars_num;
1021 HOST_WIDE_INT size = 0;
1023 for (si = 0; si < n; ++si)
1025 i = stack_vars_sorted[si];
1027 /* Skip variables that aren't partition representatives, for now. */
1028 if (stack_vars[i].representative != i)
1029 continue;
1031 size += stack_vars[i].size;
1032 for (j = i; j != EOC; j = stack_vars[j].next)
1033 set_rtl (stack_vars[j].decl, NULL);
1035 return size;
1038 /* A subroutine of expand_one_var. Called to immediately assign rtl
1039 to a variable to be allocated in the stack frame. */
1041 static void
1042 expand_one_stack_var (tree var)
1044 HOST_WIDE_INT size, offset;
1045 unsigned byte_align;
1047 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1048 byte_align = align_local_variable (SSAVAR (var));
1050 /* We handle highly aligned variables in expand_stack_vars. */
1051 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1053 offset = alloc_stack_frame_space (size, byte_align);
1055 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1056 crtl->max_used_stack_slot_alignment, offset);
1059 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1060 that will reside in a hard register. */
1062 static void
1063 expand_one_hard_reg_var (tree var)
1065 rest_of_decl_compilation (var, 0, 0);
1068 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1069 that will reside in a pseudo register. */
1071 static void
1072 expand_one_register_var (tree var)
1074 tree decl = SSAVAR (var);
1075 tree type = TREE_TYPE (decl);
1076 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1077 rtx x = gen_reg_rtx (reg_mode);
1079 set_rtl (var, x);
1081 /* Note if the object is a user variable. */
1082 if (!DECL_ARTIFICIAL (decl))
1083 mark_user_reg (x);
1085 if (POINTER_TYPE_P (type))
1086 mark_reg_pointer (x, get_pointer_alignment (var));
1089 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1090 has some associated error, e.g. its type is error-mark. We just need
1091 to pick something that won't crash the rest of the compiler. */
1093 static void
1094 expand_one_error_var (tree var)
1096 enum machine_mode mode = DECL_MODE (var);
1097 rtx x;
1099 if (mode == BLKmode)
1100 x = gen_rtx_MEM (BLKmode, const0_rtx);
1101 else if (mode == VOIDmode)
1102 x = const0_rtx;
1103 else
1104 x = gen_reg_rtx (mode);
1106 SET_DECL_RTL (var, x);
1109 /* A subroutine of expand_one_var. VAR is a variable that will be
1110 allocated to the local stack frame. Return true if we wish to
1111 add VAR to STACK_VARS so that it will be coalesced with other
1112 variables. Return false to allocate VAR immediately.
1114 This function is used to reduce the number of variables considered
1115 for coalescing, which reduces the size of the quadratic problem. */
1117 static bool
1118 defer_stack_allocation (tree var, bool toplevel)
1120 /* If stack protection is enabled, *all* stack variables must be deferred,
1121 so that we can re-order the strings to the top of the frame. */
1122 if (flag_stack_protect)
1123 return true;
1125 /* We handle "large" alignment via dynamic allocation. We want to handle
1126 this extra complication in only one place, so defer them. */
1127 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1128 return true;
1130 /* Variables in the outermost scope automatically conflict with
1131 every other variable. The only reason to want to defer them
1132 at all is that, after sorting, we can more efficiently pack
1133 small variables in the stack frame. Continue to defer at -O2. */
1134 if (toplevel && optimize < 2)
1135 return false;
1137 /* Without optimization, *most* variables are allocated from the
1138 stack, which makes the quadratic problem large exactly when we
1139 want compilation to proceed as quickly as possible. On the
1140 other hand, we don't want the function's stack frame size to
1141 get completely out of hand. So we avoid adding scalars and
1142 "small" aggregates to the list at all. */
1143 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1144 return false;
1146 return true;
1149 /* A subroutine of expand_used_vars. Expand one variable according to
1150 its flavor. Variables to be placed on the stack are not actually
1151 expanded yet, merely recorded.
1152 When REALLY_EXPAND is false, only add stack values to be allocated.
1153 Return stack usage this variable is supposed to take.
1156 static HOST_WIDE_INT
1157 expand_one_var (tree var, bool toplevel, bool really_expand)
1159 unsigned int align = BITS_PER_UNIT;
1160 tree origvar = var;
1162 var = SSAVAR (var);
1164 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1166 /* Because we don't know if VAR will be in register or on stack,
1167 we conservatively assume it will be on stack even if VAR is
1168 eventually put into register after RA pass. For non-automatic
1169 variables, which won't be on stack, we collect alignment of
1170 type and ignore user specified alignment. */
1171 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1172 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1173 TYPE_MODE (TREE_TYPE (var)),
1174 TYPE_ALIGN (TREE_TYPE (var)));
1175 else if (DECL_HAS_VALUE_EXPR_P (var)
1176 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1177 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1178 or variables which were assigned a stack slot already by
1179 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1180 changed from the offset chosen to it. */
1181 align = crtl->stack_alignment_estimated;
1182 else
1183 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1185 /* If the variable alignment is very large we'll dynamicaly allocate
1186 it, which means that in-frame portion is just a pointer. */
1187 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1188 align = POINTER_SIZE;
1191 if (SUPPORTS_STACK_ALIGNMENT
1192 && crtl->stack_alignment_estimated < align)
1194 /* stack_alignment_estimated shouldn't change after stack
1195 realign decision made */
1196 gcc_assert(!crtl->stack_realign_processed);
1197 crtl->stack_alignment_estimated = align;
1200 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1201 So here we only make sure stack_alignment_needed >= align. */
1202 if (crtl->stack_alignment_needed < align)
1203 crtl->stack_alignment_needed = align;
1204 if (crtl->max_used_stack_slot_alignment < align)
1205 crtl->max_used_stack_slot_alignment = align;
1207 if (TREE_CODE (origvar) == SSA_NAME)
1209 gcc_assert (TREE_CODE (var) != VAR_DECL
1210 || (!DECL_EXTERNAL (var)
1211 && !DECL_HAS_VALUE_EXPR_P (var)
1212 && !TREE_STATIC (var)
1213 && TREE_TYPE (var) != error_mark_node
1214 && !DECL_HARD_REGISTER (var)
1215 && really_expand));
1217 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1219 else if (DECL_EXTERNAL (var))
1221 else if (DECL_HAS_VALUE_EXPR_P (var))
1223 else if (TREE_STATIC (var))
1225 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1227 else if (TREE_TYPE (var) == error_mark_node)
1229 if (really_expand)
1230 expand_one_error_var (var);
1232 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1234 if (really_expand)
1235 expand_one_hard_reg_var (var);
1237 else if (use_register_for_decl (var))
1239 if (really_expand)
1240 expand_one_register_var (origvar);
1242 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1244 if (really_expand)
1246 error ("size of variable %q+D is too large", var);
1247 expand_one_error_var (var);
1250 else if (defer_stack_allocation (var, toplevel))
1251 add_stack_var (origvar);
1252 else
1254 if (really_expand)
1255 expand_one_stack_var (origvar);
1256 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1258 return 0;
1261 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1262 expanding variables. Those variables that can be put into registers
1263 are allocated pseudos; those that can't are put on the stack.
1265 TOPLEVEL is true if this is the outermost BLOCK. */
1267 static void
1268 expand_used_vars_for_block (tree block, bool toplevel)
1270 tree t;
1272 /* Expand all variables at this level. */
1273 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1274 if (TREE_USED (t)
1275 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1276 || !DECL_NONSHAREABLE (t)))
1277 expand_one_var (t, toplevel, true);
1279 /* Expand all variables at containing levels. */
1280 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1281 expand_used_vars_for_block (t, false);
1284 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1285 and clear TREE_USED on all local variables. */
1287 static void
1288 clear_tree_used (tree block)
1290 tree t;
1292 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1293 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1294 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1295 || !DECL_NONSHAREABLE (t))
1296 TREE_USED (t) = 0;
1298 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1299 clear_tree_used (t);
1302 /* Examine TYPE and determine a bit mask of the following features. */
1304 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1305 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1306 #define SPCT_HAS_ARRAY 4
1307 #define SPCT_HAS_AGGREGATE 8
1309 static unsigned int
1310 stack_protect_classify_type (tree type)
1312 unsigned int ret = 0;
1313 tree t;
1315 switch (TREE_CODE (type))
1317 case ARRAY_TYPE:
1318 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1319 if (t == char_type_node
1320 || t == signed_char_type_node
1321 || t == unsigned_char_type_node)
1323 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1324 unsigned HOST_WIDE_INT len;
1326 if (!TYPE_SIZE_UNIT (type)
1327 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1328 len = max;
1329 else
1330 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1332 if (len < max)
1333 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1334 else
1335 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1337 else
1338 ret = SPCT_HAS_ARRAY;
1339 break;
1341 case UNION_TYPE:
1342 case QUAL_UNION_TYPE:
1343 case RECORD_TYPE:
1344 ret = SPCT_HAS_AGGREGATE;
1345 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1346 if (TREE_CODE (t) == FIELD_DECL)
1347 ret |= stack_protect_classify_type (TREE_TYPE (t));
1348 break;
1350 default:
1351 break;
1354 return ret;
1357 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1358 part of the local stack frame. Remember if we ever return nonzero for
1359 any variable in this function. The return value is the phase number in
1360 which the variable should be allocated. */
1362 static int
1363 stack_protect_decl_phase (tree decl)
1365 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1366 int ret = 0;
1368 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1369 has_short_buffer = true;
1371 if (flag_stack_protect == 2)
1373 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1374 && !(bits & SPCT_HAS_AGGREGATE))
1375 ret = 1;
1376 else if (bits & SPCT_HAS_ARRAY)
1377 ret = 2;
1379 else
1380 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1382 if (ret)
1383 has_protected_decls = true;
1385 return ret;
1388 /* Two helper routines that check for phase 1 and phase 2. These are used
1389 as callbacks for expand_stack_vars. */
1391 static bool
1392 stack_protect_decl_phase_1 (tree decl)
1394 return stack_protect_decl_phase (decl) == 1;
1397 static bool
1398 stack_protect_decl_phase_2 (tree decl)
1400 return stack_protect_decl_phase (decl) == 2;
1403 /* Ensure that variables in different stack protection phases conflict
1404 so that they are not merged and share the same stack slot. */
1406 static void
1407 add_stack_protection_conflicts (void)
1409 size_t i, j, n = stack_vars_num;
1410 unsigned char *phase;
1412 phase = XNEWVEC (unsigned char, n);
1413 for (i = 0; i < n; ++i)
1414 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1416 for (i = 0; i < n; ++i)
1418 unsigned char ph_i = phase[i];
1419 for (j = 0; j < i; ++j)
1420 if (ph_i != phase[j])
1421 add_stack_var_conflict (i, j);
1424 XDELETEVEC (phase);
1427 /* Create a decl for the guard at the top of the stack frame. */
1429 static void
1430 create_stack_guard (void)
1432 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1433 VAR_DECL, NULL, ptr_type_node);
1434 TREE_THIS_VOLATILE (guard) = 1;
1435 TREE_USED (guard) = 1;
1436 expand_one_stack_var (guard);
1437 crtl->stack_protect_guard = guard;
1440 /* Prepare for expanding variables. */
1441 static void
1442 init_vars_expansion (void)
1444 tree t;
1445 unsigned ix;
1446 /* Set TREE_USED on all variables in the local_decls. */
1447 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1448 TREE_USED (t) = 1;
1450 /* Clear TREE_USED on all variables associated with a block scope. */
1451 clear_tree_used (DECL_INITIAL (current_function_decl));
1453 /* Initialize local stack smashing state. */
1454 has_protected_decls = false;
1455 has_short_buffer = false;
1458 /* Free up stack variable graph data. */
1459 static void
1460 fini_vars_expansion (void)
1462 size_t i, n = stack_vars_num;
1463 for (i = 0; i < n; i++)
1464 BITMAP_FREE (stack_vars[i].conflicts);
1465 XDELETEVEC (stack_vars);
1466 XDELETEVEC (stack_vars_sorted);
1467 stack_vars = NULL;
1468 stack_vars_alloc = stack_vars_num = 0;
1469 pointer_map_destroy (decl_to_stack_part);
1470 decl_to_stack_part = NULL;
1473 /* Make a fair guess for the size of the stack frame of the function
1474 in NODE. This doesn't have to be exact, the result is only used in
1475 the inline heuristics. So we don't want to run the full stack var
1476 packing algorithm (which is quadratic in the number of stack vars).
1477 Instead, we calculate the total size of all stack vars. This turns
1478 out to be a pretty fair estimate -- packing of stack vars doesn't
1479 happen very often. */
1481 HOST_WIDE_INT
1482 estimated_stack_frame_size (struct cgraph_node *node)
1484 HOST_WIDE_INT size = 0;
1485 size_t i;
1486 tree var;
1487 tree old_cur_fun_decl = current_function_decl;
1488 referenced_var_iterator rvi;
1489 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1491 current_function_decl = node->decl;
1492 push_cfun (fn);
1494 gcc_checking_assert (gimple_referenced_vars (fn));
1495 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1496 size += expand_one_var (var, true, false);
1498 if (stack_vars_num > 0)
1500 /* Fake sorting the stack vars for account_stack_vars (). */
1501 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1502 for (i = 0; i < stack_vars_num; ++i)
1503 stack_vars_sorted[i] = i;
1504 size += account_stack_vars ();
1505 fini_vars_expansion ();
1507 pop_cfun ();
1508 current_function_decl = old_cur_fun_decl;
1509 return size;
1512 /* Expand all variables used in the function. */
1514 static void
1515 expand_used_vars (void)
1517 tree var, outer_block = DECL_INITIAL (current_function_decl);
1518 VEC(tree,heap) *maybe_local_decls = NULL;
1519 unsigned i;
1520 unsigned len;
1522 /* Compute the phase of the stack frame for this function. */
1524 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1525 int off = STARTING_FRAME_OFFSET % align;
1526 frame_phase = off ? align - off : 0;
1529 init_vars_expansion ();
1531 for (i = 0; i < SA.map->num_partitions; i++)
1533 tree var = partition_to_var (SA.map, i);
1535 gcc_assert (is_gimple_reg (var));
1536 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1537 expand_one_var (var, true, true);
1538 else
1540 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1541 contain the default def (representing the parm or result itself)
1542 we don't do anything here. But those which don't contain the
1543 default def (representing a temporary based on the parm/result)
1544 we need to allocate space just like for normal VAR_DECLs. */
1545 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1547 expand_one_var (var, true, true);
1548 gcc_assert (SA.partition_to_pseudo[i]);
1553 /* At this point all variables on the local_decls with TREE_USED
1554 set are not associated with any block scope. Lay them out. */
1556 len = VEC_length (tree, cfun->local_decls);
1557 FOR_EACH_LOCAL_DECL (cfun, i, var)
1559 bool expand_now = false;
1561 /* Expanded above already. */
1562 if (is_gimple_reg (var))
1564 TREE_USED (var) = 0;
1565 goto next;
1567 /* We didn't set a block for static or extern because it's hard
1568 to tell the difference between a global variable (re)declared
1569 in a local scope, and one that's really declared there to
1570 begin with. And it doesn't really matter much, since we're
1571 not giving them stack space. Expand them now. */
1572 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1573 expand_now = true;
1575 /* If the variable is not associated with any block, then it
1576 was created by the optimizers, and could be live anywhere
1577 in the function. */
1578 else if (TREE_USED (var))
1579 expand_now = true;
1581 /* Finally, mark all variables on the list as used. We'll use
1582 this in a moment when we expand those associated with scopes. */
1583 TREE_USED (var) = 1;
1585 if (expand_now)
1586 expand_one_var (var, true, true);
1588 next:
1589 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1591 rtx rtl = DECL_RTL_IF_SET (var);
1593 /* Keep artificial non-ignored vars in cfun->local_decls
1594 chain until instantiate_decls. */
1595 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1596 add_local_decl (cfun, var);
1597 else if (rtl == NULL_RTX)
1598 /* If rtl isn't set yet, which can happen e.g. with
1599 -fstack-protector, retry before returning from this
1600 function. */
1601 VEC_safe_push (tree, heap, maybe_local_decls, var);
1605 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1607 +-----------------+-----------------+
1608 | ...processed... | ...duplicates...|
1609 +-----------------+-----------------+
1611 +-- LEN points here.
1613 We just want the duplicates, as those are the artificial
1614 non-ignored vars that we want to keep until instantiate_decls.
1615 Move them down and truncate the array. */
1616 if (!VEC_empty (tree, cfun->local_decls))
1617 VEC_block_remove (tree, cfun->local_decls, 0, len);
1619 /* At this point, all variables within the block tree with TREE_USED
1620 set are actually used by the optimized function. Lay them out. */
1621 expand_used_vars_for_block (outer_block, true);
1623 if (stack_vars_num > 0)
1625 add_scope_conflicts ();
1626 /* Due to the way alias sets work, no variables with non-conflicting
1627 alias sets may be assigned the same address. Add conflicts to
1628 reflect this. */
1629 add_alias_set_conflicts ();
1631 /* If stack protection is enabled, we don't share space between
1632 vulnerable data and non-vulnerable data. */
1633 if (flag_stack_protect)
1634 add_stack_protection_conflicts ();
1636 /* Now that we have collected all stack variables, and have computed a
1637 minimal interference graph, attempt to save some stack space. */
1638 partition_stack_vars ();
1639 if (dump_file)
1640 dump_stack_var_partition ();
1643 /* There are several conditions under which we should create a
1644 stack guard: protect-all, alloca used, protected decls present. */
1645 if (flag_stack_protect == 2
1646 || (flag_stack_protect
1647 && (cfun->calls_alloca || has_protected_decls)))
1648 create_stack_guard ();
1650 /* Assign rtl to each variable based on these partitions. */
1651 if (stack_vars_num > 0)
1653 /* Reorder decls to be protected by iterating over the variables
1654 array multiple times, and allocating out of each phase in turn. */
1655 /* ??? We could probably integrate this into the qsort we did
1656 earlier, such that we naturally see these variables first,
1657 and thus naturally allocate things in the right order. */
1658 if (has_protected_decls)
1660 /* Phase 1 contains only character arrays. */
1661 expand_stack_vars (stack_protect_decl_phase_1);
1663 /* Phase 2 contains other kinds of arrays. */
1664 if (flag_stack_protect == 2)
1665 expand_stack_vars (stack_protect_decl_phase_2);
1668 expand_stack_vars (NULL);
1670 fini_vars_expansion ();
1673 /* If there were any artificial non-ignored vars without rtl
1674 found earlier, see if deferred stack allocation hasn't assigned
1675 rtl to them. */
1676 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1678 rtx rtl = DECL_RTL_IF_SET (var);
1680 /* Keep artificial non-ignored vars in cfun->local_decls
1681 chain until instantiate_decls. */
1682 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1683 add_local_decl (cfun, var);
1685 VEC_free (tree, heap, maybe_local_decls);
1687 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1688 if (STACK_ALIGNMENT_NEEDED)
1690 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1691 if (!FRAME_GROWS_DOWNWARD)
1692 frame_offset += align - 1;
1693 frame_offset &= -align;
1698 /* If we need to produce a detailed dump, print the tree representation
1699 for STMT to the dump file. SINCE is the last RTX after which the RTL
1700 generated for STMT should have been appended. */
1702 static void
1703 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1705 if (dump_file && (dump_flags & TDF_DETAILS))
1707 fprintf (dump_file, "\n;; ");
1708 print_gimple_stmt (dump_file, stmt, 0,
1709 TDF_SLIM | (dump_flags & TDF_LINENO));
1710 fprintf (dump_file, "\n");
1712 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1716 /* Maps the blocks that do not contain tree labels to rtx labels. */
1718 static struct pointer_map_t *lab_rtx_for_bb;
1720 /* Returns the label_rtx expression for a label starting basic block BB. */
1722 static rtx
1723 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1725 gimple_stmt_iterator gsi;
1726 tree lab;
1727 gimple lab_stmt;
1728 void **elt;
1730 if (bb->flags & BB_RTL)
1731 return block_label (bb);
1733 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1734 if (elt)
1735 return (rtx) *elt;
1737 /* Find the tree label if it is present. */
1739 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1741 lab_stmt = gsi_stmt (gsi);
1742 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1743 break;
1745 lab = gimple_label_label (lab_stmt);
1746 if (DECL_NONLOCAL (lab))
1747 break;
1749 return label_rtx (lab);
1752 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1753 *elt = gen_label_rtx ();
1754 return (rtx) *elt;
1758 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1759 of a basic block where we just expanded the conditional at the end,
1760 possibly clean up the CFG and instruction sequence. LAST is the
1761 last instruction before the just emitted jump sequence. */
1763 static void
1764 maybe_cleanup_end_of_block (edge e, rtx last)
1766 /* Special case: when jumpif decides that the condition is
1767 trivial it emits an unconditional jump (and the necessary
1768 barrier). But we still have two edges, the fallthru one is
1769 wrong. purge_dead_edges would clean this up later. Unfortunately
1770 we have to insert insns (and split edges) before
1771 find_many_sub_basic_blocks and hence before purge_dead_edges.
1772 But splitting edges might create new blocks which depend on the
1773 fact that if there are two edges there's no barrier. So the
1774 barrier would get lost and verify_flow_info would ICE. Instead
1775 of auditing all edge splitters to care for the barrier (which
1776 normally isn't there in a cleaned CFG), fix it here. */
1777 if (BARRIER_P (get_last_insn ()))
1779 rtx insn;
1780 remove_edge (e);
1781 /* Now, we have a single successor block, if we have insns to
1782 insert on the remaining edge we potentially will insert
1783 it at the end of this block (if the dest block isn't feasible)
1784 in order to avoid splitting the edge. This insertion will take
1785 place in front of the last jump. But we might have emitted
1786 multiple jumps (conditional and one unconditional) to the
1787 same destination. Inserting in front of the last one then
1788 is a problem. See PR 40021. We fix this by deleting all
1789 jumps except the last unconditional one. */
1790 insn = PREV_INSN (get_last_insn ());
1791 /* Make sure we have an unconditional jump. Otherwise we're
1792 confused. */
1793 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1794 for (insn = PREV_INSN (insn); insn != last;)
1796 insn = PREV_INSN (insn);
1797 if (JUMP_P (NEXT_INSN (insn)))
1799 if (!any_condjump_p (NEXT_INSN (insn)))
1801 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1802 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1804 delete_insn (NEXT_INSN (insn));
1810 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1811 Returns a new basic block if we've terminated the current basic
1812 block and created a new one. */
1814 static basic_block
1815 expand_gimple_cond (basic_block bb, gimple stmt)
1817 basic_block new_bb, dest;
1818 edge new_edge;
1819 edge true_edge;
1820 edge false_edge;
1821 rtx last2, last;
1822 enum tree_code code;
1823 tree op0, op1;
1825 code = gimple_cond_code (stmt);
1826 op0 = gimple_cond_lhs (stmt);
1827 op1 = gimple_cond_rhs (stmt);
1828 /* We're sometimes presented with such code:
1829 D.123_1 = x < y;
1830 if (D.123_1 != 0)
1832 This would expand to two comparisons which then later might
1833 be cleaned up by combine. But some pattern matchers like if-conversion
1834 work better when there's only one compare, so make up for this
1835 here as special exception if TER would have made the same change. */
1836 if (gimple_cond_single_var_p (stmt)
1837 && SA.values
1838 && TREE_CODE (op0) == SSA_NAME
1839 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1841 gimple second = SSA_NAME_DEF_STMT (op0);
1842 if (gimple_code (second) == GIMPLE_ASSIGN)
1844 enum tree_code code2 = gimple_assign_rhs_code (second);
1845 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1847 code = code2;
1848 op0 = gimple_assign_rhs1 (second);
1849 op1 = gimple_assign_rhs2 (second);
1851 /* If jumps are cheap turn some more codes into
1852 jumpy sequences. */
1853 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1855 if ((code2 == BIT_AND_EXPR
1856 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1857 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1858 || code2 == TRUTH_AND_EXPR)
1860 code = TRUTH_ANDIF_EXPR;
1861 op0 = gimple_assign_rhs1 (second);
1862 op1 = gimple_assign_rhs2 (second);
1864 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1866 code = TRUTH_ORIF_EXPR;
1867 op0 = gimple_assign_rhs1 (second);
1868 op1 = gimple_assign_rhs2 (second);
1874 last2 = last = get_last_insn ();
1876 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1877 set_curr_insn_source_location (gimple_location (stmt));
1878 set_curr_insn_block (gimple_block (stmt));
1880 /* These flags have no purpose in RTL land. */
1881 true_edge->flags &= ~EDGE_TRUE_VALUE;
1882 false_edge->flags &= ~EDGE_FALSE_VALUE;
1884 /* We can either have a pure conditional jump with one fallthru edge or
1885 two-way jump that needs to be decomposed into two basic blocks. */
1886 if (false_edge->dest == bb->next_bb)
1888 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1889 true_edge->probability);
1890 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1891 if (true_edge->goto_locus)
1893 set_curr_insn_source_location (true_edge->goto_locus);
1894 set_curr_insn_block (true_edge->goto_block);
1895 true_edge->goto_locus = curr_insn_locator ();
1897 true_edge->goto_block = NULL;
1898 false_edge->flags |= EDGE_FALLTHRU;
1899 maybe_cleanup_end_of_block (false_edge, last);
1900 return NULL;
1902 if (true_edge->dest == bb->next_bb)
1904 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1905 false_edge->probability);
1906 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1907 if (false_edge->goto_locus)
1909 set_curr_insn_source_location (false_edge->goto_locus);
1910 set_curr_insn_block (false_edge->goto_block);
1911 false_edge->goto_locus = curr_insn_locator ();
1913 false_edge->goto_block = NULL;
1914 true_edge->flags |= EDGE_FALLTHRU;
1915 maybe_cleanup_end_of_block (true_edge, last);
1916 return NULL;
1919 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1920 true_edge->probability);
1921 last = get_last_insn ();
1922 if (false_edge->goto_locus)
1924 set_curr_insn_source_location (false_edge->goto_locus);
1925 set_curr_insn_block (false_edge->goto_block);
1926 false_edge->goto_locus = curr_insn_locator ();
1928 false_edge->goto_block = NULL;
1929 emit_jump (label_rtx_for_bb (false_edge->dest));
1931 BB_END (bb) = last;
1932 if (BARRIER_P (BB_END (bb)))
1933 BB_END (bb) = PREV_INSN (BB_END (bb));
1934 update_bb_for_insn (bb);
1936 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1937 dest = false_edge->dest;
1938 redirect_edge_succ (false_edge, new_bb);
1939 false_edge->flags |= EDGE_FALLTHRU;
1940 new_bb->count = false_edge->count;
1941 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1942 new_edge = make_edge (new_bb, dest, 0);
1943 new_edge->probability = REG_BR_PROB_BASE;
1944 new_edge->count = new_bb->count;
1945 if (BARRIER_P (BB_END (new_bb)))
1946 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1947 update_bb_for_insn (new_bb);
1949 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1951 if (true_edge->goto_locus)
1953 set_curr_insn_source_location (true_edge->goto_locus);
1954 set_curr_insn_block (true_edge->goto_block);
1955 true_edge->goto_locus = curr_insn_locator ();
1957 true_edge->goto_block = NULL;
1959 return new_bb;
1962 /* Mark all calls that can have a transaction restart. */
1964 static void
1965 mark_transaction_restart_calls (gimple stmt)
1967 struct tm_restart_node dummy;
1968 void **slot;
1970 if (!cfun->gimple_df->tm_restart)
1971 return;
1973 dummy.stmt = stmt;
1974 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1975 if (slot)
1977 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1978 tree list = n->label_or_list;
1979 rtx insn;
1981 for (insn = next_real_insn (get_last_insn ());
1982 !CALL_P (insn);
1983 insn = next_real_insn (insn))
1984 continue;
1986 if (TREE_CODE (list) == LABEL_DECL)
1987 add_reg_note (insn, REG_TM, label_rtx (list));
1988 else
1989 for (; list ; list = TREE_CHAIN (list))
1990 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1994 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1995 statement STMT. */
1997 static void
1998 expand_call_stmt (gimple stmt)
2000 tree exp, decl, lhs;
2001 bool builtin_p;
2002 size_t i;
2004 if (gimple_call_internal_p (stmt))
2006 expand_internal_call (stmt);
2007 return;
2010 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2012 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2013 decl = gimple_call_fndecl (stmt);
2014 builtin_p = decl && DECL_BUILT_IN (decl);
2016 /* If this is not a builtin function, the function type through which the
2017 call is made may be different from the type of the function. */
2018 if (!builtin_p)
2019 CALL_EXPR_FN (exp)
2020 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2021 CALL_EXPR_FN (exp));
2023 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2024 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2026 for (i = 0; i < gimple_call_num_args (stmt); i++)
2028 tree arg = gimple_call_arg (stmt, i);
2029 gimple def;
2030 /* TER addresses into arguments of builtin functions so we have a
2031 chance to infer more correct alignment information. See PR39954. */
2032 if (builtin_p
2033 && TREE_CODE (arg) == SSA_NAME
2034 && (def = get_gimple_for_ssa_name (arg))
2035 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2036 arg = gimple_assign_rhs1 (def);
2037 CALL_EXPR_ARG (exp, i) = arg;
2040 if (gimple_has_side_effects (stmt))
2041 TREE_SIDE_EFFECTS (exp) = 1;
2043 if (gimple_call_nothrow_p (stmt))
2044 TREE_NOTHROW (exp) = 1;
2046 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2047 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2048 if (decl
2049 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2050 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2051 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2052 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2053 else
2054 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2055 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2056 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2057 TREE_BLOCK (exp) = gimple_block (stmt);
2059 /* Ensure RTL is created for debug args. */
2060 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2062 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2063 unsigned int ix;
2064 tree dtemp;
2066 if (debug_args)
2067 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2069 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2070 expand_debug_expr (dtemp);
2074 lhs = gimple_call_lhs (stmt);
2075 if (lhs)
2076 expand_assignment (lhs, exp, false);
2077 else
2078 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2080 mark_transaction_restart_calls (stmt);
2083 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2084 STMT that doesn't require special handling for outgoing edges. That
2085 is no tailcalls and no GIMPLE_COND. */
2087 static void
2088 expand_gimple_stmt_1 (gimple stmt)
2090 tree op0;
2092 set_curr_insn_source_location (gimple_location (stmt));
2093 set_curr_insn_block (gimple_block (stmt));
2095 switch (gimple_code (stmt))
2097 case GIMPLE_GOTO:
2098 op0 = gimple_goto_dest (stmt);
2099 if (TREE_CODE (op0) == LABEL_DECL)
2100 expand_goto (op0);
2101 else
2102 expand_computed_goto (op0);
2103 break;
2104 case GIMPLE_LABEL:
2105 expand_label (gimple_label_label (stmt));
2106 break;
2107 case GIMPLE_NOP:
2108 case GIMPLE_PREDICT:
2109 break;
2110 case GIMPLE_SWITCH:
2111 expand_case (stmt);
2112 break;
2113 case GIMPLE_ASM:
2114 expand_asm_stmt (stmt);
2115 break;
2116 case GIMPLE_CALL:
2117 expand_call_stmt (stmt);
2118 break;
2120 case GIMPLE_RETURN:
2121 op0 = gimple_return_retval (stmt);
2123 if (op0 && op0 != error_mark_node)
2125 tree result = DECL_RESULT (current_function_decl);
2127 /* If we are not returning the current function's RESULT_DECL,
2128 build an assignment to it. */
2129 if (op0 != result)
2131 /* I believe that a function's RESULT_DECL is unique. */
2132 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2134 /* ??? We'd like to use simply expand_assignment here,
2135 but this fails if the value is of BLKmode but the return
2136 decl is a register. expand_return has special handling
2137 for this combination, which eventually should move
2138 to common code. See comments there. Until then, let's
2139 build a modify expression :-/ */
2140 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2141 result, op0);
2144 if (!op0)
2145 expand_null_return ();
2146 else
2147 expand_return (op0);
2148 break;
2150 case GIMPLE_ASSIGN:
2152 tree lhs = gimple_assign_lhs (stmt);
2154 /* Tree expand used to fiddle with |= and &= of two bitfield
2155 COMPONENT_REFs here. This can't happen with gimple, the LHS
2156 of binary assigns must be a gimple reg. */
2158 if (TREE_CODE (lhs) != SSA_NAME
2159 || get_gimple_rhs_class (gimple_expr_code (stmt))
2160 == GIMPLE_SINGLE_RHS)
2162 tree rhs = gimple_assign_rhs1 (stmt);
2163 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2164 == GIMPLE_SINGLE_RHS);
2165 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2166 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2167 if (TREE_CLOBBER_P (rhs))
2168 /* This is a clobber to mark the going out of scope for
2169 this LHS. */
2171 else
2172 expand_assignment (lhs, rhs,
2173 gimple_assign_nontemporal_move_p (stmt));
2175 else
2177 rtx target, temp;
2178 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2179 struct separate_ops ops;
2180 bool promoted = false;
2182 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2183 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2184 promoted = true;
2186 ops.code = gimple_assign_rhs_code (stmt);
2187 ops.type = TREE_TYPE (lhs);
2188 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2190 case GIMPLE_TERNARY_RHS:
2191 ops.op2 = gimple_assign_rhs3 (stmt);
2192 /* Fallthru */
2193 case GIMPLE_BINARY_RHS:
2194 ops.op1 = gimple_assign_rhs2 (stmt);
2195 /* Fallthru */
2196 case GIMPLE_UNARY_RHS:
2197 ops.op0 = gimple_assign_rhs1 (stmt);
2198 break;
2199 default:
2200 gcc_unreachable ();
2202 ops.location = gimple_location (stmt);
2204 /* If we want to use a nontemporal store, force the value to
2205 register first. If we store into a promoted register,
2206 don't directly expand to target. */
2207 temp = nontemporal || promoted ? NULL_RTX : target;
2208 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2209 EXPAND_NORMAL);
2211 if (temp == target)
2213 else if (promoted)
2215 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2216 /* If TEMP is a VOIDmode constant, use convert_modes to make
2217 sure that we properly convert it. */
2218 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2220 temp = convert_modes (GET_MODE (target),
2221 TYPE_MODE (ops.type),
2222 temp, unsignedp);
2223 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2224 GET_MODE (target), temp, unsignedp);
2227 convert_move (SUBREG_REG (target), temp, unsignedp);
2229 else if (nontemporal && emit_storent_insn (target, temp))
2231 else
2233 temp = force_operand (temp, target);
2234 if (temp != target)
2235 emit_move_insn (target, temp);
2239 break;
2241 default:
2242 gcc_unreachable ();
2246 /* Expand one gimple statement STMT and return the last RTL instruction
2247 before any of the newly generated ones.
2249 In addition to generating the necessary RTL instructions this also
2250 sets REG_EH_REGION notes if necessary and sets the current source
2251 location for diagnostics. */
2253 static rtx
2254 expand_gimple_stmt (gimple stmt)
2256 location_t saved_location = input_location;
2257 rtx last = get_last_insn ();
2258 int lp_nr;
2260 gcc_assert (cfun);
2262 /* We need to save and restore the current source location so that errors
2263 discovered during expansion are emitted with the right location. But
2264 it would be better if the diagnostic routines used the source location
2265 embedded in the tree nodes rather than globals. */
2266 if (gimple_has_location (stmt))
2267 input_location = gimple_location (stmt);
2269 expand_gimple_stmt_1 (stmt);
2271 /* Free any temporaries used to evaluate this statement. */
2272 free_temp_slots ();
2274 input_location = saved_location;
2276 /* Mark all insns that may trap. */
2277 lp_nr = lookup_stmt_eh_lp (stmt);
2278 if (lp_nr)
2280 rtx insn;
2281 for (insn = next_real_insn (last); insn;
2282 insn = next_real_insn (insn))
2284 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2285 /* If we want exceptions for non-call insns, any
2286 may_trap_p instruction may throw. */
2287 && GET_CODE (PATTERN (insn)) != CLOBBER
2288 && GET_CODE (PATTERN (insn)) != USE
2289 && insn_could_throw_p (insn))
2290 make_reg_eh_region_note (insn, 0, lp_nr);
2294 return last;
2297 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2298 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2299 generated a tail call (something that might be denied by the ABI
2300 rules governing the call; see calls.c).
2302 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2303 can still reach the rest of BB. The case here is __builtin_sqrt,
2304 where the NaN result goes through the external function (with a
2305 tailcall) and the normal result happens via a sqrt instruction. */
2307 static basic_block
2308 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2310 rtx last2, last;
2311 edge e;
2312 edge_iterator ei;
2313 int probability;
2314 gcov_type count;
2316 last2 = last = expand_gimple_stmt (stmt);
2318 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2319 if (CALL_P (last) && SIBLING_CALL_P (last))
2320 goto found;
2322 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2324 *can_fallthru = true;
2325 return NULL;
2327 found:
2328 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2329 Any instructions emitted here are about to be deleted. */
2330 do_pending_stack_adjust ();
2332 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2333 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2334 EH or abnormal edges, we shouldn't have created a tail call in
2335 the first place. So it seems to me we should just be removing
2336 all edges here, or redirecting the existing fallthru edge to
2337 the exit block. */
2339 probability = 0;
2340 count = 0;
2342 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2344 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2346 if (e->dest != EXIT_BLOCK_PTR)
2348 e->dest->count -= e->count;
2349 e->dest->frequency -= EDGE_FREQUENCY (e);
2350 if (e->dest->count < 0)
2351 e->dest->count = 0;
2352 if (e->dest->frequency < 0)
2353 e->dest->frequency = 0;
2355 count += e->count;
2356 probability += e->probability;
2357 remove_edge (e);
2359 else
2360 ei_next (&ei);
2363 /* This is somewhat ugly: the call_expr expander often emits instructions
2364 after the sibcall (to perform the function return). These confuse the
2365 find_many_sub_basic_blocks code, so we need to get rid of these. */
2366 last = NEXT_INSN (last);
2367 gcc_assert (BARRIER_P (last));
2369 *can_fallthru = false;
2370 while (NEXT_INSN (last))
2372 /* For instance an sqrt builtin expander expands if with
2373 sibcall in the then and label for `else`. */
2374 if (LABEL_P (NEXT_INSN (last)))
2376 *can_fallthru = true;
2377 break;
2379 delete_insn (NEXT_INSN (last));
2382 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2383 e->probability += probability;
2384 e->count += count;
2385 BB_END (bb) = last;
2386 update_bb_for_insn (bb);
2388 if (NEXT_INSN (last))
2390 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2392 last = BB_END (bb);
2393 if (BARRIER_P (last))
2394 BB_END (bb) = PREV_INSN (last);
2397 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2399 return bb;
2402 /* Return the difference between the floor and the truncated result of
2403 a signed division by OP1 with remainder MOD. */
2404 static rtx
2405 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2407 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2408 return gen_rtx_IF_THEN_ELSE
2409 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2410 gen_rtx_IF_THEN_ELSE
2411 (mode, gen_rtx_LT (BImode,
2412 gen_rtx_DIV (mode, op1, mod),
2413 const0_rtx),
2414 constm1_rtx, const0_rtx),
2415 const0_rtx);
2418 /* Return the difference between the ceil and the truncated result of
2419 a signed division by OP1 with remainder MOD. */
2420 static rtx
2421 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2423 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2424 return gen_rtx_IF_THEN_ELSE
2425 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2426 gen_rtx_IF_THEN_ELSE
2427 (mode, gen_rtx_GT (BImode,
2428 gen_rtx_DIV (mode, op1, mod),
2429 const0_rtx),
2430 const1_rtx, const0_rtx),
2431 const0_rtx);
2434 /* Return the difference between the ceil and the truncated result of
2435 an unsigned division by OP1 with remainder MOD. */
2436 static rtx
2437 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2439 /* (mod != 0 ? 1 : 0) */
2440 return gen_rtx_IF_THEN_ELSE
2441 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2442 const1_rtx, const0_rtx);
2445 /* Return the difference between the rounded and the truncated result
2446 of a signed division by OP1 with remainder MOD. Halfway cases are
2447 rounded away from zero, rather than to the nearest even number. */
2448 static rtx
2449 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2451 /* (abs (mod) >= abs (op1) - abs (mod)
2452 ? (op1 / mod > 0 ? 1 : -1)
2453 : 0) */
2454 return gen_rtx_IF_THEN_ELSE
2455 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2456 gen_rtx_MINUS (mode,
2457 gen_rtx_ABS (mode, op1),
2458 gen_rtx_ABS (mode, mod))),
2459 gen_rtx_IF_THEN_ELSE
2460 (mode, gen_rtx_GT (BImode,
2461 gen_rtx_DIV (mode, op1, mod),
2462 const0_rtx),
2463 const1_rtx, constm1_rtx),
2464 const0_rtx);
2467 /* Return the difference between the rounded and the truncated result
2468 of a unsigned division by OP1 with remainder MOD. Halfway cases
2469 are rounded away from zero, rather than to the nearest even
2470 number. */
2471 static rtx
2472 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2474 /* (mod >= op1 - mod ? 1 : 0) */
2475 return gen_rtx_IF_THEN_ELSE
2476 (mode, gen_rtx_GE (BImode, mod,
2477 gen_rtx_MINUS (mode, op1, mod)),
2478 const1_rtx, const0_rtx);
2481 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2482 any rtl. */
2484 static rtx
2485 convert_debug_memory_address (enum machine_mode mode, rtx x,
2486 addr_space_t as)
2488 enum machine_mode xmode = GET_MODE (x);
2490 #ifndef POINTERS_EXTEND_UNSIGNED
2491 gcc_assert (mode == Pmode
2492 || mode == targetm.addr_space.address_mode (as));
2493 gcc_assert (xmode == mode || xmode == VOIDmode);
2494 #else
2495 rtx temp;
2496 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2497 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2499 gcc_assert (mode == address_mode || mode == pointer_mode);
2501 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2502 return x;
2504 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2505 x = simplify_gen_subreg (mode, x, xmode,
2506 subreg_lowpart_offset
2507 (mode, xmode));
2508 else if (POINTERS_EXTEND_UNSIGNED > 0)
2509 x = gen_rtx_ZERO_EXTEND (mode, x);
2510 else if (!POINTERS_EXTEND_UNSIGNED)
2511 x = gen_rtx_SIGN_EXTEND (mode, x);
2512 else
2514 switch (GET_CODE (x))
2516 case SUBREG:
2517 if ((SUBREG_PROMOTED_VAR_P (x)
2518 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2519 || (GET_CODE (SUBREG_REG (x)) == PLUS
2520 && REG_P (XEXP (SUBREG_REG (x), 0))
2521 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2522 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2523 && GET_MODE (SUBREG_REG (x)) == mode)
2524 return SUBREG_REG (x);
2525 break;
2526 case LABEL_REF:
2527 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2528 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2529 return temp;
2530 case SYMBOL_REF:
2531 temp = shallow_copy_rtx (x);
2532 PUT_MODE (temp, mode);
2533 return temp;
2534 case CONST:
2535 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2536 if (temp)
2537 temp = gen_rtx_CONST (mode, temp);
2538 return temp;
2539 case PLUS:
2540 case MINUS:
2541 if (CONST_INT_P (XEXP (x, 1)))
2543 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2544 if (temp)
2545 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2547 break;
2548 default:
2549 break;
2551 /* Don't know how to express ptr_extend as operation in debug info. */
2552 return NULL;
2554 #endif /* POINTERS_EXTEND_UNSIGNED */
2556 return x;
2559 /* Return an RTX equivalent to the value of the parameter DECL. */
2561 static rtx
2562 expand_debug_parm_decl (tree decl)
2564 rtx incoming = DECL_INCOMING_RTL (decl);
2566 if (incoming
2567 && GET_MODE (incoming) != BLKmode
2568 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2569 || (MEM_P (incoming)
2570 && REG_P (XEXP (incoming, 0))
2571 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2573 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2575 #ifdef HAVE_window_save
2576 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2577 If the target machine has an explicit window save instruction, the
2578 actual entry value is the corresponding OUTGOING_REGNO instead. */
2579 if (REG_P (incoming)
2580 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2581 incoming
2582 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2583 OUTGOING_REGNO (REGNO (incoming)), 0);
2584 else if (MEM_P (incoming))
2586 rtx reg = XEXP (incoming, 0);
2587 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2589 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2590 incoming = replace_equiv_address_nv (incoming, reg);
2593 #endif
2595 ENTRY_VALUE_EXP (rtl) = incoming;
2596 return rtl;
2599 if (incoming
2600 && GET_MODE (incoming) != BLKmode
2601 && !TREE_ADDRESSABLE (decl)
2602 && MEM_P (incoming)
2603 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2604 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2605 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2606 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2607 return incoming;
2609 return NULL_RTX;
2612 /* Return an RTX equivalent to the value of the tree expression EXP. */
2614 static rtx
2615 expand_debug_expr (tree exp)
2617 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2618 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2619 enum machine_mode inner_mode = VOIDmode;
2620 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2621 addr_space_t as;
2623 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2625 case tcc_expression:
2626 switch (TREE_CODE (exp))
2628 case COND_EXPR:
2629 case DOT_PROD_EXPR:
2630 case WIDEN_MULT_PLUS_EXPR:
2631 case WIDEN_MULT_MINUS_EXPR:
2632 case FMA_EXPR:
2633 goto ternary;
2635 case TRUTH_ANDIF_EXPR:
2636 case TRUTH_ORIF_EXPR:
2637 case TRUTH_AND_EXPR:
2638 case TRUTH_OR_EXPR:
2639 case TRUTH_XOR_EXPR:
2640 goto binary;
2642 case TRUTH_NOT_EXPR:
2643 goto unary;
2645 default:
2646 break;
2648 break;
2650 ternary:
2651 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2652 if (!op2)
2653 return NULL_RTX;
2654 /* Fall through. */
2656 binary:
2657 case tcc_binary:
2658 case tcc_comparison:
2659 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2660 if (!op1)
2661 return NULL_RTX;
2662 /* Fall through. */
2664 unary:
2665 case tcc_unary:
2666 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2667 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2668 if (!op0)
2669 return NULL_RTX;
2670 break;
2672 case tcc_type:
2673 case tcc_statement:
2674 gcc_unreachable ();
2676 case tcc_constant:
2677 case tcc_exceptional:
2678 case tcc_declaration:
2679 case tcc_reference:
2680 case tcc_vl_exp:
2681 break;
2684 switch (TREE_CODE (exp))
2686 case STRING_CST:
2687 if (!lookup_constant_def (exp))
2689 if (strlen (TREE_STRING_POINTER (exp)) + 1
2690 != (size_t) TREE_STRING_LENGTH (exp))
2691 return NULL_RTX;
2692 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2693 op0 = gen_rtx_MEM (BLKmode, op0);
2694 set_mem_attributes (op0, exp, 0);
2695 return op0;
2697 /* Fall through... */
2699 case INTEGER_CST:
2700 case REAL_CST:
2701 case FIXED_CST:
2702 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2703 return op0;
2705 case COMPLEX_CST:
2706 gcc_assert (COMPLEX_MODE_P (mode));
2707 op0 = expand_debug_expr (TREE_REALPART (exp));
2708 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2709 return gen_rtx_CONCAT (mode, op0, op1);
2711 case DEBUG_EXPR_DECL:
2712 op0 = DECL_RTL_IF_SET (exp);
2714 if (op0)
2715 return op0;
2717 op0 = gen_rtx_DEBUG_EXPR (mode);
2718 DEBUG_EXPR_TREE_DECL (op0) = exp;
2719 SET_DECL_RTL (exp, op0);
2721 return op0;
2723 case VAR_DECL:
2724 case PARM_DECL:
2725 case FUNCTION_DECL:
2726 case LABEL_DECL:
2727 case CONST_DECL:
2728 case RESULT_DECL:
2729 op0 = DECL_RTL_IF_SET (exp);
2731 /* This decl was probably optimized away. */
2732 if (!op0)
2734 if (TREE_CODE (exp) != VAR_DECL
2735 || DECL_EXTERNAL (exp)
2736 || !TREE_STATIC (exp)
2737 || !DECL_NAME (exp)
2738 || DECL_HARD_REGISTER (exp)
2739 || DECL_IN_CONSTANT_POOL (exp)
2740 || mode == VOIDmode)
2741 return NULL;
2743 op0 = make_decl_rtl_for_debug (exp);
2744 if (!MEM_P (op0)
2745 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2746 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2747 return NULL;
2749 else
2750 op0 = copy_rtx (op0);
2752 if (GET_MODE (op0) == BLKmode
2753 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2754 below would ICE. While it is likely a FE bug,
2755 try to be robust here. See PR43166. */
2756 || mode == BLKmode
2757 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2759 gcc_assert (MEM_P (op0));
2760 op0 = adjust_address_nv (op0, mode, 0);
2761 return op0;
2764 /* Fall through. */
2766 adjust_mode:
2767 case PAREN_EXPR:
2768 case NOP_EXPR:
2769 case CONVERT_EXPR:
2771 inner_mode = GET_MODE (op0);
2773 if (mode == inner_mode)
2774 return op0;
2776 if (inner_mode == VOIDmode)
2778 if (TREE_CODE (exp) == SSA_NAME)
2779 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2780 else
2781 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2782 if (mode == inner_mode)
2783 return op0;
2786 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2788 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2789 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2790 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2791 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2792 else
2793 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2795 else if (FLOAT_MODE_P (mode))
2797 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2798 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2799 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2800 else
2801 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2803 else if (FLOAT_MODE_P (inner_mode))
2805 if (unsignedp)
2806 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2807 else
2808 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2810 else if (CONSTANT_P (op0)
2811 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2812 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2813 subreg_lowpart_offset (mode,
2814 inner_mode));
2815 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2816 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2817 : unsignedp)
2818 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2819 else
2820 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2822 return op0;
2825 case MEM_REF:
2826 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2828 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2829 TREE_OPERAND (exp, 0),
2830 TREE_OPERAND (exp, 1));
2831 if (newexp)
2832 return expand_debug_expr (newexp);
2834 /* FALLTHROUGH */
2835 case INDIRECT_REF:
2836 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2837 if (!op0)
2838 return NULL;
2840 if (TREE_CODE (exp) == MEM_REF)
2842 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2843 || (GET_CODE (op0) == PLUS
2844 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2845 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2846 Instead just use get_inner_reference. */
2847 goto component_ref;
2849 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2850 if (!op1 || !CONST_INT_P (op1))
2851 return NULL;
2853 op0 = plus_constant (op0, INTVAL (op1));
2856 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2857 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2858 else
2859 as = ADDR_SPACE_GENERIC;
2861 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2862 op0, as);
2863 if (op0 == NULL_RTX)
2864 return NULL;
2866 op0 = gen_rtx_MEM (mode, op0);
2867 set_mem_attributes (op0, exp, 0);
2868 if (TREE_CODE (exp) == MEM_REF
2869 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2870 set_mem_expr (op0, NULL_TREE);
2871 set_mem_addr_space (op0, as);
2873 return op0;
2875 case TARGET_MEM_REF:
2876 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2877 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2878 return NULL;
2880 op0 = expand_debug_expr
2881 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2882 if (!op0)
2883 return NULL;
2885 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2886 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2887 else
2888 as = ADDR_SPACE_GENERIC;
2890 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2891 op0, as);
2892 if (op0 == NULL_RTX)
2893 return NULL;
2895 op0 = gen_rtx_MEM (mode, op0);
2897 set_mem_attributes (op0, exp, 0);
2898 set_mem_addr_space (op0, as);
2900 return op0;
2902 component_ref:
2903 case ARRAY_REF:
2904 case ARRAY_RANGE_REF:
2905 case COMPONENT_REF:
2906 case BIT_FIELD_REF:
2907 case REALPART_EXPR:
2908 case IMAGPART_EXPR:
2909 case VIEW_CONVERT_EXPR:
2911 enum machine_mode mode1;
2912 HOST_WIDE_INT bitsize, bitpos;
2913 tree offset;
2914 int volatilep = 0;
2915 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2916 &mode1, &unsignedp, &volatilep, false);
2917 rtx orig_op0;
2919 if (bitsize == 0)
2920 return NULL;
2922 orig_op0 = op0 = expand_debug_expr (tem);
2924 if (!op0)
2925 return NULL;
2927 if (offset)
2929 enum machine_mode addrmode, offmode;
2931 if (!MEM_P (op0))
2932 return NULL;
2934 op0 = XEXP (op0, 0);
2935 addrmode = GET_MODE (op0);
2936 if (addrmode == VOIDmode)
2937 addrmode = Pmode;
2939 op1 = expand_debug_expr (offset);
2940 if (!op1)
2941 return NULL;
2943 offmode = GET_MODE (op1);
2944 if (offmode == VOIDmode)
2945 offmode = TYPE_MODE (TREE_TYPE (offset));
2947 if (addrmode != offmode)
2948 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2949 subreg_lowpart_offset (addrmode,
2950 offmode));
2952 /* Don't use offset_address here, we don't need a
2953 recognizable address, and we don't want to generate
2954 code. */
2955 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2956 op0, op1));
2959 if (MEM_P (op0))
2961 if (mode1 == VOIDmode)
2962 /* Bitfield. */
2963 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2964 if (bitpos >= BITS_PER_UNIT)
2966 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2967 bitpos %= BITS_PER_UNIT;
2969 else if (bitpos < 0)
2971 HOST_WIDE_INT units
2972 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2973 op0 = adjust_address_nv (op0, mode1, units);
2974 bitpos += units * BITS_PER_UNIT;
2976 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2977 op0 = adjust_address_nv (op0, mode, 0);
2978 else if (GET_MODE (op0) != mode1)
2979 op0 = adjust_address_nv (op0, mode1, 0);
2980 else
2981 op0 = copy_rtx (op0);
2982 if (op0 == orig_op0)
2983 op0 = shallow_copy_rtx (op0);
2984 set_mem_attributes (op0, exp, 0);
2987 if (bitpos == 0 && mode == GET_MODE (op0))
2988 return op0;
2990 if (bitpos < 0)
2991 return NULL;
2993 if (GET_MODE (op0) == BLKmode)
2994 return NULL;
2996 if ((bitpos % BITS_PER_UNIT) == 0
2997 && bitsize == GET_MODE_BITSIZE (mode1))
2999 enum machine_mode opmode = GET_MODE (op0);
3001 if (opmode == VOIDmode)
3002 opmode = TYPE_MODE (TREE_TYPE (tem));
3004 /* This condition may hold if we're expanding the address
3005 right past the end of an array that turned out not to
3006 be addressable (i.e., the address was only computed in
3007 debug stmts). The gen_subreg below would rightfully
3008 crash, and the address doesn't really exist, so just
3009 drop it. */
3010 if (bitpos >= GET_MODE_BITSIZE (opmode))
3011 return NULL;
3013 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3014 return simplify_gen_subreg (mode, op0, opmode,
3015 bitpos / BITS_PER_UNIT);
3018 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3019 && TYPE_UNSIGNED (TREE_TYPE (exp))
3020 ? SIGN_EXTRACT
3021 : ZERO_EXTRACT, mode,
3022 GET_MODE (op0) != VOIDmode
3023 ? GET_MODE (op0)
3024 : TYPE_MODE (TREE_TYPE (tem)),
3025 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3028 case ABS_EXPR:
3029 return simplify_gen_unary (ABS, mode, op0, mode);
3031 case NEGATE_EXPR:
3032 return simplify_gen_unary (NEG, mode, op0, mode);
3034 case BIT_NOT_EXPR:
3035 return simplify_gen_unary (NOT, mode, op0, mode);
3037 case FLOAT_EXPR:
3038 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3039 0)))
3040 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3041 inner_mode);
3043 case FIX_TRUNC_EXPR:
3044 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3045 inner_mode);
3047 case POINTER_PLUS_EXPR:
3048 /* For the rare target where pointers are not the same size as
3049 size_t, we need to check for mis-matched modes and correct
3050 the addend. */
3051 if (op0 && op1
3052 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3053 && GET_MODE (op0) != GET_MODE (op1))
3055 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3056 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3057 GET_MODE (op1));
3058 else
3059 /* We always sign-extend, regardless of the signedness of
3060 the operand, because the operand is always unsigned
3061 here even if the original C expression is signed. */
3062 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3063 GET_MODE (op1));
3065 /* Fall through. */
3066 case PLUS_EXPR:
3067 return simplify_gen_binary (PLUS, mode, op0, op1);
3069 case MINUS_EXPR:
3070 return simplify_gen_binary (MINUS, mode, op0, op1);
3072 case MULT_EXPR:
3073 return simplify_gen_binary (MULT, mode, op0, op1);
3075 case RDIV_EXPR:
3076 case TRUNC_DIV_EXPR:
3077 case EXACT_DIV_EXPR:
3078 if (unsignedp)
3079 return simplify_gen_binary (UDIV, mode, op0, op1);
3080 else
3081 return simplify_gen_binary (DIV, mode, op0, op1);
3083 case TRUNC_MOD_EXPR:
3084 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3086 case FLOOR_DIV_EXPR:
3087 if (unsignedp)
3088 return simplify_gen_binary (UDIV, mode, op0, op1);
3089 else
3091 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3092 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3093 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3094 return simplify_gen_binary (PLUS, mode, div, adj);
3097 case FLOOR_MOD_EXPR:
3098 if (unsignedp)
3099 return simplify_gen_binary (UMOD, mode, op0, op1);
3100 else
3102 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3103 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3104 adj = simplify_gen_unary (NEG, mode,
3105 simplify_gen_binary (MULT, mode, adj, op1),
3106 mode);
3107 return simplify_gen_binary (PLUS, mode, mod, adj);
3110 case CEIL_DIV_EXPR:
3111 if (unsignedp)
3113 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3114 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3115 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3116 return simplify_gen_binary (PLUS, mode, div, adj);
3118 else
3120 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3121 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3122 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3123 return simplify_gen_binary (PLUS, mode, div, adj);
3126 case CEIL_MOD_EXPR:
3127 if (unsignedp)
3129 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3130 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3131 adj = simplify_gen_unary (NEG, mode,
3132 simplify_gen_binary (MULT, mode, adj, op1),
3133 mode);
3134 return simplify_gen_binary (PLUS, mode, mod, adj);
3136 else
3138 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3139 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3140 adj = simplify_gen_unary (NEG, mode,
3141 simplify_gen_binary (MULT, mode, adj, op1),
3142 mode);
3143 return simplify_gen_binary (PLUS, mode, mod, adj);
3146 case ROUND_DIV_EXPR:
3147 if (unsignedp)
3149 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3150 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3151 rtx adj = round_udiv_adjust (mode, mod, op1);
3152 return simplify_gen_binary (PLUS, mode, div, adj);
3154 else
3156 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3157 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3158 rtx adj = round_sdiv_adjust (mode, mod, op1);
3159 return simplify_gen_binary (PLUS, mode, div, adj);
3162 case ROUND_MOD_EXPR:
3163 if (unsignedp)
3165 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3166 rtx adj = round_udiv_adjust (mode, mod, op1);
3167 adj = simplify_gen_unary (NEG, mode,
3168 simplify_gen_binary (MULT, mode, adj, op1),
3169 mode);
3170 return simplify_gen_binary (PLUS, mode, mod, adj);
3172 else
3174 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3175 rtx adj = round_sdiv_adjust (mode, mod, op1);
3176 adj = simplify_gen_unary (NEG, mode,
3177 simplify_gen_binary (MULT, mode, adj, op1),
3178 mode);
3179 return simplify_gen_binary (PLUS, mode, mod, adj);
3182 case LSHIFT_EXPR:
3183 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3185 case RSHIFT_EXPR:
3186 if (unsignedp)
3187 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3188 else
3189 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3191 case LROTATE_EXPR:
3192 return simplify_gen_binary (ROTATE, mode, op0, op1);
3194 case RROTATE_EXPR:
3195 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3197 case MIN_EXPR:
3198 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3200 case MAX_EXPR:
3201 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3203 case BIT_AND_EXPR:
3204 case TRUTH_AND_EXPR:
3205 return simplify_gen_binary (AND, mode, op0, op1);
3207 case BIT_IOR_EXPR:
3208 case TRUTH_OR_EXPR:
3209 return simplify_gen_binary (IOR, mode, op0, op1);
3211 case BIT_XOR_EXPR:
3212 case TRUTH_XOR_EXPR:
3213 return simplify_gen_binary (XOR, mode, op0, op1);
3215 case TRUTH_ANDIF_EXPR:
3216 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3218 case TRUTH_ORIF_EXPR:
3219 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3221 case TRUTH_NOT_EXPR:
3222 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3224 case LT_EXPR:
3225 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3226 op0, op1);
3228 case LE_EXPR:
3229 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3230 op0, op1);
3232 case GT_EXPR:
3233 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3234 op0, op1);
3236 case GE_EXPR:
3237 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3238 op0, op1);
3240 case EQ_EXPR:
3241 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3243 case NE_EXPR:
3244 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3246 case UNORDERED_EXPR:
3247 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3249 case ORDERED_EXPR:
3250 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3252 case UNLT_EXPR:
3253 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3255 case UNLE_EXPR:
3256 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3258 case UNGT_EXPR:
3259 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3261 case UNGE_EXPR:
3262 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3264 case UNEQ_EXPR:
3265 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3267 case LTGT_EXPR:
3268 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3270 case COND_EXPR:
3271 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3273 case COMPLEX_EXPR:
3274 gcc_assert (COMPLEX_MODE_P (mode));
3275 if (GET_MODE (op0) == VOIDmode)
3276 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3277 if (GET_MODE (op1) == VOIDmode)
3278 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3279 return gen_rtx_CONCAT (mode, op0, op1);
3281 case CONJ_EXPR:
3282 if (GET_CODE (op0) == CONCAT)
3283 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3284 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3285 XEXP (op0, 1),
3286 GET_MODE_INNER (mode)));
3287 else
3289 enum machine_mode imode = GET_MODE_INNER (mode);
3290 rtx re, im;
3292 if (MEM_P (op0))
3294 re = adjust_address_nv (op0, imode, 0);
3295 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3297 else
3299 enum machine_mode ifmode = int_mode_for_mode (mode);
3300 enum machine_mode ihmode = int_mode_for_mode (imode);
3301 rtx halfsize;
3302 if (ifmode == BLKmode || ihmode == BLKmode)
3303 return NULL;
3304 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3305 re = op0;
3306 if (mode != ifmode)
3307 re = gen_rtx_SUBREG (ifmode, re, 0);
3308 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3309 if (imode != ihmode)
3310 re = gen_rtx_SUBREG (imode, re, 0);
3311 im = copy_rtx (op0);
3312 if (mode != ifmode)
3313 im = gen_rtx_SUBREG (ifmode, im, 0);
3314 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3315 if (imode != ihmode)
3316 im = gen_rtx_SUBREG (imode, im, 0);
3318 im = gen_rtx_NEG (imode, im);
3319 return gen_rtx_CONCAT (mode, re, im);
3322 case ADDR_EXPR:
3323 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3324 if (!op0 || !MEM_P (op0))
3326 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3327 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3328 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3329 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3330 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3331 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3333 if (handled_component_p (TREE_OPERAND (exp, 0)))
3335 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3336 tree decl
3337 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3338 &bitoffset, &bitsize, &maxsize);
3339 if ((TREE_CODE (decl) == VAR_DECL
3340 || TREE_CODE (decl) == PARM_DECL
3341 || TREE_CODE (decl) == RESULT_DECL)
3342 && (!TREE_ADDRESSABLE (decl)
3343 || target_for_debug_bind (decl))
3344 && (bitoffset % BITS_PER_UNIT) == 0
3345 && bitsize > 0
3346 && bitsize == maxsize)
3347 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3348 bitoffset / BITS_PER_UNIT);
3351 return NULL;
3354 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3355 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3357 return op0;
3359 case VECTOR_CST:
3360 exp = build_constructor_from_list (TREE_TYPE (exp),
3361 TREE_VECTOR_CST_ELTS (exp));
3362 /* Fall through. */
3364 case CONSTRUCTOR:
3365 if (TREE_CLOBBER_P (exp))
3366 return NULL;
3367 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3369 unsigned i;
3370 tree val;
3372 op0 = gen_rtx_CONCATN
3373 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3375 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3377 op1 = expand_debug_expr (val);
3378 if (!op1)
3379 return NULL;
3380 XVECEXP (op0, 0, i) = op1;
3383 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3385 op1 = expand_debug_expr
3386 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3388 if (!op1)
3389 return NULL;
3391 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3392 XVECEXP (op0, 0, i) = op1;
3395 return op0;
3397 else
3398 goto flag_unsupported;
3400 case CALL_EXPR:
3401 /* ??? Maybe handle some builtins? */
3402 return NULL;
3404 case SSA_NAME:
3406 gimple g = get_gimple_for_ssa_name (exp);
3407 if (g)
3409 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3410 if (!op0)
3411 return NULL;
3413 else
3415 int part = var_to_partition (SA.map, exp);
3417 if (part == NO_PARTITION)
3419 /* If this is a reference to an incoming value of parameter
3420 that is never used in the code or where the incoming
3421 value is never used in the code, use PARM_DECL's
3422 DECL_RTL if set. */
3423 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3424 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3426 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3427 if (op0)
3428 goto adjust_mode;
3429 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3430 if (op0)
3431 goto adjust_mode;
3433 return NULL;
3436 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3438 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3440 goto adjust_mode;
3443 case ERROR_MARK:
3444 return NULL;
3446 /* Vector stuff. For most of the codes we don't have rtl codes. */
3447 case REALIGN_LOAD_EXPR:
3448 case REDUC_MAX_EXPR:
3449 case REDUC_MIN_EXPR:
3450 case REDUC_PLUS_EXPR:
3451 case VEC_COND_EXPR:
3452 case VEC_LSHIFT_EXPR:
3453 case VEC_PACK_FIX_TRUNC_EXPR:
3454 case VEC_PACK_SAT_EXPR:
3455 case VEC_PACK_TRUNC_EXPR:
3456 case VEC_RSHIFT_EXPR:
3457 case VEC_UNPACK_FLOAT_HI_EXPR:
3458 case VEC_UNPACK_FLOAT_LO_EXPR:
3459 case VEC_UNPACK_HI_EXPR:
3460 case VEC_UNPACK_LO_EXPR:
3461 case VEC_WIDEN_MULT_HI_EXPR:
3462 case VEC_WIDEN_MULT_LO_EXPR:
3463 case VEC_WIDEN_LSHIFT_HI_EXPR:
3464 case VEC_WIDEN_LSHIFT_LO_EXPR:
3465 case VEC_PERM_EXPR:
3466 return NULL;
3468 /* Misc codes. */
3469 case ADDR_SPACE_CONVERT_EXPR:
3470 case FIXED_CONVERT_EXPR:
3471 case OBJ_TYPE_REF:
3472 case WITH_SIZE_EXPR:
3473 return NULL;
3475 case DOT_PROD_EXPR:
3476 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3477 && SCALAR_INT_MODE_P (mode))
3480 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3481 0)))
3482 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3483 inner_mode);
3485 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3486 1)))
3487 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3488 inner_mode);
3489 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3490 return simplify_gen_binary (PLUS, mode, op0, op2);
3492 return NULL;
3494 case WIDEN_MULT_EXPR:
3495 case WIDEN_MULT_PLUS_EXPR:
3496 case WIDEN_MULT_MINUS_EXPR:
3497 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3498 && SCALAR_INT_MODE_P (mode))
3500 inner_mode = GET_MODE (op0);
3501 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3502 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3503 else
3504 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3505 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3506 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3507 else
3508 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3509 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3510 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3511 return op0;
3512 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3513 return simplify_gen_binary (PLUS, mode, op0, op2);
3514 else
3515 return simplify_gen_binary (MINUS, mode, op2, op0);
3517 return NULL;
3519 case WIDEN_SUM_EXPR:
3520 case WIDEN_LSHIFT_EXPR:
3521 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3522 && SCALAR_INT_MODE_P (mode))
3525 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3526 0)))
3527 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3528 inner_mode);
3529 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3530 ? ASHIFT : PLUS, mode, op0, op1);
3532 return NULL;
3534 case FMA_EXPR:
3535 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3537 default:
3538 flag_unsupported:
3539 #ifdef ENABLE_CHECKING
3540 debug_tree (exp);
3541 gcc_unreachable ();
3542 #else
3543 return NULL;
3544 #endif
3548 /* Return an RTX equivalent to the source bind value of the tree expression
3549 EXP. */
3551 static rtx
3552 expand_debug_source_expr (tree exp)
3554 rtx op0 = NULL_RTX;
3555 enum machine_mode mode = VOIDmode, inner_mode;
3557 switch (TREE_CODE (exp))
3559 case PARM_DECL:
3561 mode = DECL_MODE (exp);
3562 op0 = expand_debug_parm_decl (exp);
3563 if (op0)
3564 break;
3565 /* See if this isn't an argument that has been completely
3566 optimized out. */
3567 if (!DECL_RTL_SET_P (exp)
3568 && !DECL_INCOMING_RTL (exp)
3569 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3571 tree aexp = exp;
3572 if (DECL_ABSTRACT_ORIGIN (exp))
3573 aexp = DECL_ABSTRACT_ORIGIN (exp);
3574 if (DECL_CONTEXT (aexp)
3575 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3577 VEC(tree, gc) **debug_args;
3578 unsigned int ix;
3579 tree ddecl;
3580 #ifdef ENABLE_CHECKING
3581 tree parm;
3582 for (parm = DECL_ARGUMENTS (current_function_decl);
3583 parm; parm = DECL_CHAIN (parm))
3584 gcc_assert (parm != exp
3585 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3586 #endif
3587 debug_args = decl_debug_args_lookup (current_function_decl);
3588 if (debug_args != NULL)
3590 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3591 ix += 2)
3592 if (ddecl == aexp)
3593 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3597 break;
3599 default:
3600 break;
3603 if (op0 == NULL_RTX)
3604 return NULL_RTX;
3606 inner_mode = GET_MODE (op0);
3607 if (mode == inner_mode)
3608 return op0;
3610 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3612 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3613 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3614 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3615 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3616 else
3617 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3619 else if (FLOAT_MODE_P (mode))
3620 gcc_unreachable ();
3621 else if (FLOAT_MODE_P (inner_mode))
3623 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3624 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3625 else
3626 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3628 else if (CONSTANT_P (op0)
3629 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3630 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3631 subreg_lowpart_offset (mode, inner_mode));
3632 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3633 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3634 else
3635 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3637 return op0;
3640 /* Expand the _LOCs in debug insns. We run this after expanding all
3641 regular insns, so that any variables referenced in the function
3642 will have their DECL_RTLs set. */
3644 static void
3645 expand_debug_locations (void)
3647 rtx insn;
3648 rtx last = get_last_insn ();
3649 int save_strict_alias = flag_strict_aliasing;
3651 /* New alias sets while setting up memory attributes cause
3652 -fcompare-debug failures, even though it doesn't bring about any
3653 codegen changes. */
3654 flag_strict_aliasing = 0;
3656 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3657 if (DEBUG_INSN_P (insn))
3659 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3660 rtx val;
3661 enum machine_mode mode;
3663 if (value == NULL_TREE)
3664 val = NULL_RTX;
3665 else
3667 if (INSN_VAR_LOCATION_STATUS (insn)
3668 == VAR_INIT_STATUS_UNINITIALIZED)
3669 val = expand_debug_source_expr (value);
3670 else
3671 val = expand_debug_expr (value);
3672 gcc_assert (last == get_last_insn ());
3675 if (!val)
3676 val = gen_rtx_UNKNOWN_VAR_LOC ();
3677 else
3679 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3681 gcc_assert (mode == GET_MODE (val)
3682 || (GET_MODE (val) == VOIDmode
3683 && (CONST_INT_P (val)
3684 || GET_CODE (val) == CONST_FIXED
3685 || GET_CODE (val) == CONST_DOUBLE
3686 || GET_CODE (val) == LABEL_REF)));
3689 INSN_VAR_LOCATION_LOC (insn) = val;
3692 flag_strict_aliasing = save_strict_alias;
3695 /* Expand basic block BB from GIMPLE trees to RTL. */
3697 static basic_block
3698 expand_gimple_basic_block (basic_block bb)
3700 gimple_stmt_iterator gsi;
3701 gimple_seq stmts;
3702 gimple stmt = NULL;
3703 rtx note, last;
3704 edge e;
3705 edge_iterator ei;
3706 void **elt;
3708 if (dump_file)
3709 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3710 bb->index);
3712 /* Note that since we are now transitioning from GIMPLE to RTL, we
3713 cannot use the gsi_*_bb() routines because they expect the basic
3714 block to be in GIMPLE, instead of RTL. Therefore, we need to
3715 access the BB sequence directly. */
3716 stmts = bb_seq (bb);
3717 bb->il.gimple = NULL;
3718 rtl_profile_for_bb (bb);
3719 init_rtl_bb_info (bb);
3720 bb->flags |= BB_RTL;
3722 /* Remove the RETURN_EXPR if we may fall though to the exit
3723 instead. */
3724 gsi = gsi_last (stmts);
3725 if (!gsi_end_p (gsi)
3726 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3728 gimple ret_stmt = gsi_stmt (gsi);
3730 gcc_assert (single_succ_p (bb));
3731 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3733 if (bb->next_bb == EXIT_BLOCK_PTR
3734 && !gimple_return_retval (ret_stmt))
3736 gsi_remove (&gsi, false);
3737 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3741 gsi = gsi_start (stmts);
3742 if (!gsi_end_p (gsi))
3744 stmt = gsi_stmt (gsi);
3745 if (gimple_code (stmt) != GIMPLE_LABEL)
3746 stmt = NULL;
3749 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3751 if (stmt || elt)
3753 last = get_last_insn ();
3755 if (stmt)
3757 expand_gimple_stmt (stmt);
3758 gsi_next (&gsi);
3761 if (elt)
3762 emit_label ((rtx) *elt);
3764 /* Java emits line number notes in the top of labels.
3765 ??? Make this go away once line number notes are obsoleted. */
3766 BB_HEAD (bb) = NEXT_INSN (last);
3767 if (NOTE_P (BB_HEAD (bb)))
3768 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3769 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3771 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3773 else
3774 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3776 NOTE_BASIC_BLOCK (note) = bb;
3778 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3780 basic_block new_bb;
3782 stmt = gsi_stmt (gsi);
3784 /* If this statement is a non-debug one, and we generate debug
3785 insns, then this one might be the last real use of a TERed
3786 SSA_NAME, but where there are still some debug uses further
3787 down. Expanding the current SSA name in such further debug
3788 uses by their RHS might lead to wrong debug info, as coalescing
3789 might make the operands of such RHS be placed into the same
3790 pseudo as something else. Like so:
3791 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3792 use(a_1);
3793 a_2 = ...
3794 #DEBUG ... => a_1
3795 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3796 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3797 the write to a_2 would actually have clobbered the place which
3798 formerly held a_0.
3800 So, instead of that, we recognize the situation, and generate
3801 debug temporaries at the last real use of TERed SSA names:
3802 a_1 = a_0 + 1;
3803 #DEBUG #D1 => a_1
3804 use(a_1);
3805 a_2 = ...
3806 #DEBUG ... => #D1
3808 if (MAY_HAVE_DEBUG_INSNS
3809 && SA.values
3810 && !is_gimple_debug (stmt))
3812 ssa_op_iter iter;
3813 tree op;
3814 gimple def;
3816 location_t sloc = get_curr_insn_source_location ();
3817 tree sblock = get_curr_insn_block ();
3819 /* Look for SSA names that have their last use here (TERed
3820 names always have only one real use). */
3821 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3822 if ((def = get_gimple_for_ssa_name (op)))
3824 imm_use_iterator imm_iter;
3825 use_operand_p use_p;
3826 bool have_debug_uses = false;
3828 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3830 if (gimple_debug_bind_p (USE_STMT (use_p)))
3832 have_debug_uses = true;
3833 break;
3837 if (have_debug_uses)
3839 /* OP is a TERed SSA name, with DEF it's defining
3840 statement, and where OP is used in further debug
3841 instructions. Generate a debug temporary, and
3842 replace all uses of OP in debug insns with that
3843 temporary. */
3844 gimple debugstmt;
3845 tree value = gimple_assign_rhs_to_tree (def);
3846 tree vexpr = make_node (DEBUG_EXPR_DECL);
3847 rtx val;
3848 enum machine_mode mode;
3850 set_curr_insn_source_location (gimple_location (def));
3851 set_curr_insn_block (gimple_block (def));
3853 DECL_ARTIFICIAL (vexpr) = 1;
3854 TREE_TYPE (vexpr) = TREE_TYPE (value);
3855 if (DECL_P (value))
3856 mode = DECL_MODE (value);
3857 else
3858 mode = TYPE_MODE (TREE_TYPE (value));
3859 DECL_MODE (vexpr) = mode;
3861 val = gen_rtx_VAR_LOCATION
3862 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3864 emit_debug_insn (val);
3866 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3868 if (!gimple_debug_bind_p (debugstmt))
3869 continue;
3871 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3872 SET_USE (use_p, vexpr);
3874 update_stmt (debugstmt);
3878 set_curr_insn_source_location (sloc);
3879 set_curr_insn_block (sblock);
3882 currently_expanding_gimple_stmt = stmt;
3884 /* Expand this statement, then evaluate the resulting RTL and
3885 fixup the CFG accordingly. */
3886 if (gimple_code (stmt) == GIMPLE_COND)
3888 new_bb = expand_gimple_cond (bb, stmt);
3889 if (new_bb)
3890 return new_bb;
3892 else if (gimple_debug_bind_p (stmt))
3894 location_t sloc = get_curr_insn_source_location ();
3895 tree sblock = get_curr_insn_block ();
3896 gimple_stmt_iterator nsi = gsi;
3898 for (;;)
3900 tree var = gimple_debug_bind_get_var (stmt);
3901 tree value;
3902 rtx val;
3903 enum machine_mode mode;
3905 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3906 && TREE_CODE (var) != LABEL_DECL
3907 && !target_for_debug_bind (var))
3908 goto delink_debug_stmt;
3910 if (gimple_debug_bind_has_value_p (stmt))
3911 value = gimple_debug_bind_get_value (stmt);
3912 else
3913 value = NULL_TREE;
3915 last = get_last_insn ();
3917 set_curr_insn_source_location (gimple_location (stmt));
3918 set_curr_insn_block (gimple_block (stmt));
3920 if (DECL_P (var))
3921 mode = DECL_MODE (var);
3922 else
3923 mode = TYPE_MODE (TREE_TYPE (var));
3925 val = gen_rtx_VAR_LOCATION
3926 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3928 emit_debug_insn (val);
3930 if (dump_file && (dump_flags & TDF_DETAILS))
3932 /* We can't dump the insn with a TREE where an RTX
3933 is expected. */
3934 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3935 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3936 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3939 delink_debug_stmt:
3940 /* In order not to generate too many debug temporaries,
3941 we delink all uses of debug statements we already expanded.
3942 Therefore debug statements between definition and real
3943 use of TERed SSA names will continue to use the SSA name,
3944 and not be replaced with debug temps. */
3945 delink_stmt_imm_use (stmt);
3947 gsi = nsi;
3948 gsi_next (&nsi);
3949 if (gsi_end_p (nsi))
3950 break;
3951 stmt = gsi_stmt (nsi);
3952 if (!gimple_debug_bind_p (stmt))
3953 break;
3956 set_curr_insn_source_location (sloc);
3957 set_curr_insn_block (sblock);
3959 else if (gimple_debug_source_bind_p (stmt))
3961 location_t sloc = get_curr_insn_source_location ();
3962 tree sblock = get_curr_insn_block ();
3963 tree var = gimple_debug_source_bind_get_var (stmt);
3964 tree value = gimple_debug_source_bind_get_value (stmt);
3965 rtx val;
3966 enum machine_mode mode;
3968 last = get_last_insn ();
3970 set_curr_insn_source_location (gimple_location (stmt));
3971 set_curr_insn_block (gimple_block (stmt));
3973 mode = DECL_MODE (var);
3975 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3976 VAR_INIT_STATUS_UNINITIALIZED);
3978 emit_debug_insn (val);
3980 if (dump_file && (dump_flags & TDF_DETAILS))
3982 /* We can't dump the insn with a TREE where an RTX
3983 is expected. */
3984 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3985 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3986 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3989 set_curr_insn_source_location (sloc);
3990 set_curr_insn_block (sblock);
3992 else
3994 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3996 bool can_fallthru;
3997 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3998 if (new_bb)
4000 if (can_fallthru)
4001 bb = new_bb;
4002 else
4003 return new_bb;
4006 else
4008 def_operand_p def_p;
4009 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4011 if (def_p != NULL)
4013 /* Ignore this stmt if it is in the list of
4014 replaceable expressions. */
4015 if (SA.values
4016 && bitmap_bit_p (SA.values,
4017 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4018 continue;
4020 last = expand_gimple_stmt (stmt);
4021 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4026 currently_expanding_gimple_stmt = NULL;
4028 /* Expand implicit goto and convert goto_locus. */
4029 FOR_EACH_EDGE (e, ei, bb->succs)
4031 if (e->goto_locus && e->goto_block)
4033 set_curr_insn_source_location (e->goto_locus);
4034 set_curr_insn_block (e->goto_block);
4035 e->goto_locus = curr_insn_locator ();
4037 e->goto_block = NULL;
4038 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4040 emit_jump (label_rtx_for_bb (e->dest));
4041 e->flags &= ~EDGE_FALLTHRU;
4045 /* Expanded RTL can create a jump in the last instruction of block.
4046 This later might be assumed to be a jump to successor and break edge insertion.
4047 We need to insert dummy move to prevent this. PR41440. */
4048 if (single_succ_p (bb)
4049 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4050 && (last = get_last_insn ())
4051 && JUMP_P (last))
4053 rtx dummy = gen_reg_rtx (SImode);
4054 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4057 do_pending_stack_adjust ();
4059 /* Find the block tail. The last insn in the block is the insn
4060 before a barrier and/or table jump insn. */
4061 last = get_last_insn ();
4062 if (BARRIER_P (last))
4063 last = PREV_INSN (last);
4064 if (JUMP_TABLE_DATA_P (last))
4065 last = PREV_INSN (PREV_INSN (last));
4066 BB_END (bb) = last;
4068 update_bb_for_insn (bb);
4070 return bb;
4074 /* Create a basic block for initialization code. */
4076 static basic_block
4077 construct_init_block (void)
4079 basic_block init_block, first_block;
4080 edge e = NULL;
4081 int flags;
4083 /* Multiple entry points not supported yet. */
4084 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4085 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4086 init_rtl_bb_info (EXIT_BLOCK_PTR);
4087 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4088 EXIT_BLOCK_PTR->flags |= BB_RTL;
4090 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4092 /* When entry edge points to first basic block, we don't need jump,
4093 otherwise we have to jump into proper target. */
4094 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4096 tree label = gimple_block_label (e->dest);
4098 emit_jump (label_rtx (label));
4099 flags = 0;
4101 else
4102 flags = EDGE_FALLTHRU;
4104 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4105 get_last_insn (),
4106 ENTRY_BLOCK_PTR);
4107 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4108 init_block->count = ENTRY_BLOCK_PTR->count;
4109 if (e)
4111 first_block = e->dest;
4112 redirect_edge_succ (e, init_block);
4113 e = make_edge (init_block, first_block, flags);
4115 else
4116 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4117 e->probability = REG_BR_PROB_BASE;
4118 e->count = ENTRY_BLOCK_PTR->count;
4120 update_bb_for_insn (init_block);
4121 return init_block;
4124 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4125 found in the block tree. */
4127 static void
4128 set_block_levels (tree block, int level)
4130 while (block)
4132 BLOCK_NUMBER (block) = level;
4133 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4134 block = BLOCK_CHAIN (block);
4138 /* Create a block containing landing pads and similar stuff. */
4140 static void
4141 construct_exit_block (void)
4143 rtx head = get_last_insn ();
4144 rtx end;
4145 basic_block exit_block;
4146 edge e, e2;
4147 unsigned ix;
4148 edge_iterator ei;
4149 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4151 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4153 /* Make sure the locus is set to the end of the function, so that
4154 epilogue line numbers and warnings are set properly. */
4155 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4156 input_location = cfun->function_end_locus;
4158 /* The following insns belong to the top scope. */
4159 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4161 /* Generate rtl for function exit. */
4162 expand_function_end ();
4164 end = get_last_insn ();
4165 if (head == end)
4166 return;
4167 /* While emitting the function end we could move end of the last basic block.
4169 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4170 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4171 head = NEXT_INSN (head);
4172 exit_block = create_basic_block (NEXT_INSN (head), end,
4173 EXIT_BLOCK_PTR->prev_bb);
4174 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4175 exit_block->count = EXIT_BLOCK_PTR->count;
4177 ix = 0;
4178 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4180 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4181 if (!(e->flags & EDGE_ABNORMAL))
4182 redirect_edge_succ (e, exit_block);
4183 else
4184 ix++;
4187 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4188 e->probability = REG_BR_PROB_BASE;
4189 e->count = EXIT_BLOCK_PTR->count;
4190 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4191 if (e2 != e)
4193 e->count -= e2->count;
4194 exit_block->count -= e2->count;
4195 exit_block->frequency -= EDGE_FREQUENCY (e2);
4197 if (e->count < 0)
4198 e->count = 0;
4199 if (exit_block->count < 0)
4200 exit_block->count = 0;
4201 if (exit_block->frequency < 0)
4202 exit_block->frequency = 0;
4203 update_bb_for_insn (exit_block);
4206 /* Helper function for discover_nonconstant_array_refs.
4207 Look for ARRAY_REF nodes with non-constant indexes and mark them
4208 addressable. */
4210 static tree
4211 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4212 void *data ATTRIBUTE_UNUSED)
4214 tree t = *tp;
4216 if (IS_TYPE_OR_DECL_P (t))
4217 *walk_subtrees = 0;
4218 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4220 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4221 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4222 && (!TREE_OPERAND (t, 2)
4223 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4224 || (TREE_CODE (t) == COMPONENT_REF
4225 && (!TREE_OPERAND (t,2)
4226 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4227 || TREE_CODE (t) == BIT_FIELD_REF
4228 || TREE_CODE (t) == REALPART_EXPR
4229 || TREE_CODE (t) == IMAGPART_EXPR
4230 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4231 || CONVERT_EXPR_P (t))
4232 t = TREE_OPERAND (t, 0);
4234 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4236 t = get_base_address (t);
4237 if (t && DECL_P (t)
4238 && DECL_MODE (t) != BLKmode)
4239 TREE_ADDRESSABLE (t) = 1;
4242 *walk_subtrees = 0;
4245 return NULL_TREE;
4248 /* RTL expansion is not able to compile array references with variable
4249 offsets for arrays stored in single register. Discover such
4250 expressions and mark variables as addressable to avoid this
4251 scenario. */
4253 static void
4254 discover_nonconstant_array_refs (void)
4256 basic_block bb;
4257 gimple_stmt_iterator gsi;
4259 FOR_EACH_BB (bb)
4260 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4262 gimple stmt = gsi_stmt (gsi);
4263 if (!is_gimple_debug (stmt))
4264 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4268 /* This function sets crtl->args.internal_arg_pointer to a virtual
4269 register if DRAP is needed. Local register allocator will replace
4270 virtual_incoming_args_rtx with the virtual register. */
4272 static void
4273 expand_stack_alignment (void)
4275 rtx drap_rtx;
4276 unsigned int preferred_stack_boundary;
4278 if (! SUPPORTS_STACK_ALIGNMENT)
4279 return;
4281 if (cfun->calls_alloca
4282 || cfun->has_nonlocal_label
4283 || crtl->has_nonlocal_goto)
4284 crtl->need_drap = true;
4286 /* Call update_stack_boundary here again to update incoming stack
4287 boundary. It may set incoming stack alignment to a different
4288 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4289 use the minimum incoming stack alignment to check if it is OK
4290 to perform sibcall optimization since sibcall optimization will
4291 only align the outgoing stack to incoming stack boundary. */
4292 if (targetm.calls.update_stack_boundary)
4293 targetm.calls.update_stack_boundary ();
4295 /* The incoming stack frame has to be aligned at least at
4296 parm_stack_boundary. */
4297 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4299 /* Update crtl->stack_alignment_estimated and use it later to align
4300 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4301 exceptions since callgraph doesn't collect incoming stack alignment
4302 in this case. */
4303 if (cfun->can_throw_non_call_exceptions
4304 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4305 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4306 else
4307 preferred_stack_boundary = crtl->preferred_stack_boundary;
4308 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4309 crtl->stack_alignment_estimated = preferred_stack_boundary;
4310 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4311 crtl->stack_alignment_needed = preferred_stack_boundary;
4313 gcc_assert (crtl->stack_alignment_needed
4314 <= crtl->stack_alignment_estimated);
4316 crtl->stack_realign_needed
4317 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4318 crtl->stack_realign_tried = crtl->stack_realign_needed;
4320 crtl->stack_realign_processed = true;
4322 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4323 alignment. */
4324 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4325 drap_rtx = targetm.calls.get_drap_rtx ();
4327 /* stack_realign_drap and drap_rtx must match. */
4328 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4330 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4331 if (NULL != drap_rtx)
4333 crtl->args.internal_arg_pointer = drap_rtx;
4335 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4336 needed. */
4337 fixup_tail_calls ();
4341 /* Translate the intermediate representation contained in the CFG
4342 from GIMPLE trees to RTL.
4344 We do conversion per basic block and preserve/update the tree CFG.
4345 This implies we have to do some magic as the CFG can simultaneously
4346 consist of basic blocks containing RTL and GIMPLE trees. This can
4347 confuse the CFG hooks, so be careful to not manipulate CFG during
4348 the expansion. */
4350 static unsigned int
4351 gimple_expand_cfg (void)
4353 basic_block bb, init_block;
4354 sbitmap blocks;
4355 edge_iterator ei;
4356 edge e;
4357 rtx var_seq;
4358 unsigned i;
4360 timevar_push (TV_OUT_OF_SSA);
4361 rewrite_out_of_ssa (&SA);
4362 timevar_pop (TV_OUT_OF_SSA);
4363 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4364 sizeof (rtx));
4366 /* Some backends want to know that we are expanding to RTL. */
4367 currently_expanding_to_rtl = 1;
4369 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4371 insn_locators_alloc ();
4372 if (!DECL_IS_BUILTIN (current_function_decl))
4374 /* Eventually, all FEs should explicitly set function_start_locus. */
4375 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4376 set_curr_insn_source_location
4377 (DECL_SOURCE_LOCATION (current_function_decl));
4378 else
4379 set_curr_insn_source_location (cfun->function_start_locus);
4381 else
4382 set_curr_insn_source_location (UNKNOWN_LOCATION);
4383 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4384 prologue_locator = curr_insn_locator ();
4386 #ifdef INSN_SCHEDULING
4387 init_sched_attrs ();
4388 #endif
4390 /* Make sure first insn is a note even if we don't want linenums.
4391 This makes sure the first insn will never be deleted.
4392 Also, final expects a note to appear there. */
4393 emit_note (NOTE_INSN_DELETED);
4395 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4396 discover_nonconstant_array_refs ();
4398 targetm.expand_to_rtl_hook ();
4399 crtl->stack_alignment_needed = STACK_BOUNDARY;
4400 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4401 crtl->stack_alignment_estimated = 0;
4402 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4403 cfun->cfg->max_jumptable_ents = 0;
4405 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4406 of the function section at exapnsion time to predict distance of calls. */
4407 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4409 /* Expand the variables recorded during gimple lowering. */
4410 timevar_push (TV_VAR_EXPAND);
4411 start_sequence ();
4413 expand_used_vars ();
4415 var_seq = get_insns ();
4416 end_sequence ();
4417 timevar_pop (TV_VAR_EXPAND);
4419 /* Honor stack protection warnings. */
4420 if (warn_stack_protect)
4422 if (cfun->calls_alloca)
4423 warning (OPT_Wstack_protector,
4424 "stack protector not protecting local variables: "
4425 "variable length buffer");
4426 if (has_short_buffer && !crtl->stack_protect_guard)
4427 warning (OPT_Wstack_protector,
4428 "stack protector not protecting function: "
4429 "all local arrays are less than %d bytes long",
4430 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4433 /* Set up parameters and prepare for return, for the function. */
4434 expand_function_start (current_function_decl);
4436 /* If we emitted any instructions for setting up the variables,
4437 emit them before the FUNCTION_START note. */
4438 if (var_seq)
4440 emit_insn_before (var_seq, parm_birth_insn);
4442 /* In expand_function_end we'll insert the alloca save/restore
4443 before parm_birth_insn. We've just insertted an alloca call.
4444 Adjust the pointer to match. */
4445 parm_birth_insn = var_seq;
4448 /* Now that we also have the parameter RTXs, copy them over to our
4449 partitions. */
4450 for (i = 0; i < SA.map->num_partitions; i++)
4452 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4454 if (TREE_CODE (var) != VAR_DECL
4455 && !SA.partition_to_pseudo[i])
4456 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4457 gcc_assert (SA.partition_to_pseudo[i]);
4459 /* If this decl was marked as living in multiple places, reset
4460 this now to NULL. */
4461 if (DECL_RTL_IF_SET (var) == pc_rtx)
4462 SET_DECL_RTL (var, NULL);
4464 /* Some RTL parts really want to look at DECL_RTL(x) when x
4465 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4466 SET_DECL_RTL here making this available, but that would mean
4467 to select one of the potentially many RTLs for one DECL. Instead
4468 of doing that we simply reset the MEM_EXPR of the RTL in question,
4469 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4470 if (!DECL_RTL_SET_P (var))
4472 if (MEM_P (SA.partition_to_pseudo[i]))
4473 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4477 /* If we have a class containing differently aligned pointers
4478 we need to merge those into the corresponding RTL pointer
4479 alignment. */
4480 for (i = 1; i < num_ssa_names; i++)
4482 tree name = ssa_name (i);
4483 int part;
4484 rtx r;
4486 if (!name
4487 || !POINTER_TYPE_P (TREE_TYPE (name))
4488 /* We might have generated new SSA names in
4489 update_alias_info_with_stack_vars. They will have a NULL
4490 defining statements, and won't be part of the partitioning,
4491 so ignore those. */
4492 || !SSA_NAME_DEF_STMT (name))
4493 continue;
4494 part = var_to_partition (SA.map, name);
4495 if (part == NO_PARTITION)
4496 continue;
4497 r = SA.partition_to_pseudo[part];
4498 if (REG_P (r))
4499 mark_reg_pointer (r, get_pointer_alignment (name));
4502 /* If this function is `main', emit a call to `__main'
4503 to run global initializers, etc. */
4504 if (DECL_NAME (current_function_decl)
4505 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4506 && DECL_FILE_SCOPE_P (current_function_decl))
4507 expand_main_function ();
4509 /* Initialize the stack_protect_guard field. This must happen after the
4510 call to __main (if any) so that the external decl is initialized. */
4511 if (crtl->stack_protect_guard)
4512 stack_protect_prologue ();
4514 expand_phi_nodes (&SA);
4516 /* Register rtl specific functions for cfg. */
4517 rtl_register_cfg_hooks ();
4519 init_block = construct_init_block ();
4521 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4522 remaining edges later. */
4523 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4524 e->flags &= ~EDGE_EXECUTABLE;
4526 lab_rtx_for_bb = pointer_map_create ();
4527 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4528 bb = expand_gimple_basic_block (bb);
4530 if (MAY_HAVE_DEBUG_INSNS)
4531 expand_debug_locations ();
4533 execute_free_datastructures ();
4534 timevar_push (TV_OUT_OF_SSA);
4535 finish_out_of_ssa (&SA);
4536 timevar_pop (TV_OUT_OF_SSA);
4538 timevar_push (TV_POST_EXPAND);
4539 /* We are no longer in SSA form. */
4540 cfun->gimple_df->in_ssa_p = false;
4542 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4543 conservatively to true until they are all profile aware. */
4544 pointer_map_destroy (lab_rtx_for_bb);
4545 free_histograms ();
4547 construct_exit_block ();
4548 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4549 insn_locators_finalize ();
4551 /* Zap the tree EH table. */
4552 set_eh_throw_stmt_table (cfun, NULL);
4554 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4555 split edges which edge insertions might do. */
4556 rebuild_jump_labels (get_insns ());
4558 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4560 edge e;
4561 edge_iterator ei;
4562 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4564 if (e->insns.r)
4566 rebuild_jump_labels_chain (e->insns.r);
4567 /* Avoid putting insns before parm_birth_insn. */
4568 if (e->src == ENTRY_BLOCK_PTR
4569 && single_succ_p (ENTRY_BLOCK_PTR)
4570 && parm_birth_insn)
4572 rtx insns = e->insns.r;
4573 e->insns.r = NULL_RTX;
4574 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4576 else
4577 commit_one_edge_insertion (e);
4579 else
4580 ei_next (&ei);
4584 /* We're done expanding trees to RTL. */
4585 currently_expanding_to_rtl = 0;
4587 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4589 edge e;
4590 edge_iterator ei;
4591 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4593 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4594 e->flags &= ~EDGE_EXECUTABLE;
4596 /* At the moment not all abnormal edges match the RTL
4597 representation. It is safe to remove them here as
4598 find_many_sub_basic_blocks will rediscover them.
4599 In the future we should get this fixed properly. */
4600 if ((e->flags & EDGE_ABNORMAL)
4601 && !(e->flags & EDGE_SIBCALL))
4602 remove_edge (e);
4603 else
4604 ei_next (&ei);
4608 blocks = sbitmap_alloc (last_basic_block);
4609 sbitmap_ones (blocks);
4610 find_many_sub_basic_blocks (blocks);
4611 sbitmap_free (blocks);
4612 purge_all_dead_edges ();
4614 compact_blocks ();
4616 expand_stack_alignment ();
4618 #ifdef ENABLE_CHECKING
4619 verify_flow_info ();
4620 #endif
4622 /* There's no need to defer outputting this function any more; we
4623 know we want to output it. */
4624 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4626 /* Now that we're done expanding trees to RTL, we shouldn't have any
4627 more CONCATs anywhere. */
4628 generating_concat_p = 0;
4630 if (dump_file)
4632 fprintf (dump_file,
4633 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4634 /* And the pass manager will dump RTL for us. */
4637 /* If we're emitting a nested function, make sure its parent gets
4638 emitted as well. Doing otherwise confuses debug info. */
4640 tree parent;
4641 for (parent = DECL_CONTEXT (current_function_decl);
4642 parent != NULL_TREE;
4643 parent = get_containing_scope (parent))
4644 if (TREE_CODE (parent) == FUNCTION_DECL)
4645 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4648 /* We are now committed to emitting code for this function. Do any
4649 preparation, such as emitting abstract debug info for the inline
4650 before it gets mangled by optimization. */
4651 if (cgraph_function_possibly_inlined_p (current_function_decl))
4652 (*debug_hooks->outlining_inline_function) (current_function_decl);
4654 TREE_ASM_WRITTEN (current_function_decl) = 1;
4656 /* After expanding, the return labels are no longer needed. */
4657 return_label = NULL;
4658 naked_return_label = NULL;
4660 /* After expanding, the tm_restart map is no longer needed. */
4661 if (cfun->gimple_df->tm_restart)
4663 htab_delete (cfun->gimple_df->tm_restart);
4664 cfun->gimple_df->tm_restart = NULL;
4667 /* Tag the blocks with a depth number so that change_scope can find
4668 the common parent easily. */
4669 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4670 default_rtl_profile ();
4671 timevar_pop (TV_POST_EXPAND);
4672 return 0;
4675 struct rtl_opt_pass pass_expand =
4678 RTL_PASS,
4679 "expand", /* name */
4680 NULL, /* gate */
4681 gimple_expand_cfg, /* execute */
4682 NULL, /* sub */
4683 NULL, /* next */
4684 0, /* static_pass_number */
4685 TV_EXPAND, /* tv_id */
4686 PROP_ssa | PROP_gimple_leh | PROP_cfg
4687 | PROP_gimple_lcx, /* properties_required */
4688 PROP_rtl, /* properties_provided */
4689 PROP_ssa | PROP_trees, /* properties_destroyed */
4690 TODO_verify_ssa | TODO_verify_flow
4691 | TODO_verify_stmts, /* todo_flags_start */
4692 TODO_ggc_collect /* todo_flags_finish */