re PR bootstrap/51346 (LTO bootstrap failed with bootstrap-profiled)
[official-gcc.git] / gcc / cfgexpand.c
blobe5a7a392eb473a5c8d6ef27823c90d80ada86f85
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54 struct ssaexpand SA;
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 static rtx expand_debug_expr (tree);
62 /* Return an expression tree corresponding to the RHS of GIMPLE
63 statement STMT. */
65 tree
66 gimple_assign_rhs_to_tree (gimple stmt)
68 tree t;
69 enum gimple_rhs_class grhs_class;
71 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
73 if (grhs_class == GIMPLE_TERNARY_RHS)
74 t = build3 (gimple_assign_rhs_code (stmt),
75 TREE_TYPE (gimple_assign_lhs (stmt)),
76 gimple_assign_rhs1 (stmt),
77 gimple_assign_rhs2 (stmt),
78 gimple_assign_rhs3 (stmt));
79 else if (grhs_class == GIMPLE_BINARY_RHS)
80 t = build2 (gimple_assign_rhs_code (stmt),
81 TREE_TYPE (gimple_assign_lhs (stmt)),
82 gimple_assign_rhs1 (stmt),
83 gimple_assign_rhs2 (stmt));
84 else if (grhs_class == GIMPLE_UNARY_RHS)
85 t = build1 (gimple_assign_rhs_code (stmt),
86 TREE_TYPE (gimple_assign_lhs (stmt)),
87 gimple_assign_rhs1 (stmt));
88 else if (grhs_class == GIMPLE_SINGLE_RHS)
90 t = gimple_assign_rhs1 (stmt);
91 /* Avoid modifying this tree in place below. */
92 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93 && gimple_location (stmt) != EXPR_LOCATION (t))
94 || (gimple_block (stmt)
95 && currently_expanding_to_rtl
96 && EXPR_P (t)
97 && gimple_block (stmt) != TREE_BLOCK (t)))
98 t = copy_node (t);
100 else
101 gcc_unreachable ();
103 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104 SET_EXPR_LOCATION (t, gimple_location (stmt));
105 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106 TREE_BLOCK (t) = gimple_block (stmt);
108 return t;
112 #ifndef STACK_ALIGNMENT_NEEDED
113 #define STACK_ALIGNMENT_NEEDED 1
114 #endif
116 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
118 /* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
121 static inline void
122 set_rtl (tree t, rtx x)
124 if (TREE_CODE (t) == SSA_NAME)
126 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
127 if (x && !MEM_P (x))
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
134 tree var = SSA_NAME_VAR (t);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var))
137 SET_DECL_RTL (var, x);
138 /* If we have it set already to "multiple places" don't
139 change this. */
140 else if (DECL_RTL (var) == pc_rtx)
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var) != x)
149 SET_DECL_RTL (var, pc_rtx);
152 else
153 SET_DECL_RTL (t, x);
156 /* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
158 struct stack_var
160 /* The Variable. */
161 tree decl;
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
165 HOST_WIDE_INT size;
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
169 unsigned int alignb;
171 /* The partition representative. */
172 size_t representative;
174 /* The next stack variable in the partition, or EOC. */
175 size_t next;
177 /* The numbers of conflicting stack variables. */
178 bitmap conflicts;
181 #define EOC ((size_t)-1)
183 /* We have an array of such objects while deciding allocation. */
184 static struct stack_var *stack_vars;
185 static size_t stack_vars_alloc;
186 static size_t stack_vars_num;
187 static struct pointer_map_t *decl_to_stack_part;
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted;
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase;
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls;
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer;
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
209 static unsigned int
210 align_local_variable (tree decl)
212 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213 DECL_ALIGN (decl) = align;
214 return align / BITS_PER_UNIT;
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
220 static HOST_WIDE_INT
221 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
223 HOST_WIDE_INT offset, new_frame_offset;
225 new_frame_offset = frame_offset;
226 if (FRAME_GROWS_DOWNWARD)
228 new_frame_offset -= size + frame_phase;
229 new_frame_offset &= -align;
230 new_frame_offset += frame_phase;
231 offset = new_frame_offset;
233 else
235 new_frame_offset -= frame_phase;
236 new_frame_offset += align - 1;
237 new_frame_offset &= -align;
238 new_frame_offset += frame_phase;
239 offset = new_frame_offset;
240 new_frame_offset += size;
242 frame_offset = new_frame_offset;
244 if (frame_offset_overflow (frame_offset, cfun->decl))
245 frame_offset = offset = 0;
247 return offset;
250 /* Accumulate DECL into STACK_VARS. */
252 static void
253 add_stack_var (tree decl)
255 struct stack_var *v;
257 if (stack_vars_num >= stack_vars_alloc)
259 if (stack_vars_alloc)
260 stack_vars_alloc = stack_vars_alloc * 3 / 2;
261 else
262 stack_vars_alloc = 32;
263 stack_vars
264 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
266 if (!decl_to_stack_part)
267 decl_to_stack_part = pointer_map_create ();
269 v = &stack_vars[stack_vars_num];
270 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
272 v->decl = decl;
273 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
276 if (v->size == 0)
277 v->size = 1;
278 v->alignb = align_local_variable (SSAVAR (decl));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v->alignb != 0);
282 /* All variables are initially in their own partition. */
283 v->representative = stack_vars_num;
284 v->next = EOC;
286 /* All variables initially conflict with no other. */
287 v->conflicts = NULL;
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl, pc_rtx);
292 stack_vars_num++;
295 /* Make the decls associated with luid's X and Y conflict. */
297 static void
298 add_stack_var_conflict (size_t x, size_t y)
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
302 if (!a->conflicts)
303 a->conflicts = BITMAP_ALLOC (NULL);
304 if (!b->conflicts)
305 b->conflicts = BITMAP_ALLOC (NULL);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
310 /* Check whether the decls associated with luid's X and Y conflict. */
312 static bool
313 stack_var_conflict_p (size_t x, size_t y)
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
317 if (x == y)
318 return false;
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
323 return true;
325 if (!a->conflicts || !b->conflicts)
326 return false;
327 return bitmap_bit_p (a->conflicts, y);
330 /* Returns true if TYPE is or contains a union type. */
332 static bool
333 aggregate_contains_union_type (tree type)
335 tree field;
337 if (TREE_CODE (type) == UNION_TYPE
338 || TREE_CODE (type) == QUAL_UNION_TYPE)
339 return true;
340 if (TREE_CODE (type) == ARRAY_TYPE)
341 return aggregate_contains_union_type (TREE_TYPE (type));
342 if (TREE_CODE (type) != RECORD_TYPE)
343 return false;
345 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
346 if (TREE_CODE (field) == FIELD_DECL)
347 if (aggregate_contains_union_type (TREE_TYPE (field)))
348 return true;
350 return false;
353 /* A subroutine of expand_used_vars. If two variables X and Y have alias
354 sets that do not conflict, then do add a conflict for these variables
355 in the interference graph. We also need to make sure to add conflicts
356 for union containing structures. Else RTL alias analysis comes along
357 and due to type based aliasing rules decides that for two overlapping
358 union temporaries { short s; int i; } accesses to the same mem through
359 different types may not alias and happily reorders stores across
360 life-time boundaries of the temporaries (See PR25654).
361 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
363 static void
364 add_alias_set_conflicts (void)
366 size_t i, j, n = stack_vars_num;
368 for (i = 0; i < n; ++i)
370 tree type_i = TREE_TYPE (stack_vars[i].decl);
371 bool aggr_i = AGGREGATE_TYPE_P (type_i);
372 bool contains_union;
374 contains_union = aggregate_contains_union_type (type_i);
375 for (j = 0; j < i; ++j)
377 tree type_j = TREE_TYPE (stack_vars[j].decl);
378 bool aggr_j = AGGREGATE_TYPE_P (type_j);
379 if (aggr_i != aggr_j
380 /* Either the objects conflict by means of type based
381 aliasing rules, or we need to add a conflict. */
382 || !objects_must_conflict_p (type_i, type_j)
383 /* In case the types do not conflict ensure that access
384 to elements will conflict. In case of unions we have
385 to be careful as type based aliasing rules may say
386 access to the same memory does not conflict. So play
387 safe and add a conflict in this case when
388 -fstrict-aliasing is used. */
389 || (contains_union && flag_strict_aliasing))
390 add_stack_var_conflict (i, j);
395 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
396 enter its partition number into bitmap DATA. */
398 static bool
399 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
401 bitmap active = (bitmap)data;
402 op = get_base_address (op);
403 if (op
404 && DECL_P (op)
405 && DECL_RTL_IF_SET (op) == pc_rtx)
407 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
408 if (v)
409 bitmap_set_bit (active, *v);
411 return false;
414 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
415 record conflicts between it and all currently active other partitions
416 from bitmap DATA. */
418 static bool
419 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
421 bitmap active = (bitmap)data;
422 op = get_base_address (op);
423 if (op
424 && DECL_P (op)
425 && DECL_RTL_IF_SET (op) == pc_rtx)
427 size_t *v =
428 (size_t *) pointer_map_contains (decl_to_stack_part, op);
429 if (v && bitmap_set_bit (active, *v))
431 size_t num = *v;
432 bitmap_iterator bi;
433 unsigned i;
434 gcc_assert (num < stack_vars_num);
435 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
436 add_stack_var_conflict (num, i);
439 return false;
442 /* Helper routine for add_scope_conflicts, calculating the active partitions
443 at the end of BB, leaving the result in WORK. We're called to generate
444 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
445 liveness. */
447 static void
448 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
450 edge e;
451 edge_iterator ei;
452 gimple_stmt_iterator gsi;
453 bool (*visit)(gimple, tree, void *);
455 bitmap_clear (work);
456 FOR_EACH_EDGE (e, ei, bb->preds)
457 bitmap_ior_into (work, (bitmap)e->src->aux);
459 if (for_conflict)
461 /* We need to add conflicts for everything life at the start of
462 this block. Unlike classical lifeness for named objects we can't
463 rely on seeing a def/use of the names we're interested in.
464 There might merely be indirect loads/stores. We'd not add any
465 conflicts for such partitions. */
466 bitmap_iterator bi;
467 unsigned i;
468 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
470 unsigned j;
471 bitmap_iterator bj;
472 EXECUTE_IF_SET_IN_BITMAP (work, i, j, bj)
473 add_stack_var_conflict (i, j);
475 visit = visit_conflict;
477 else
478 visit = visit_op;
480 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
482 gimple stmt = gsi_stmt (gsi);
483 if (!is_gimple_debug (stmt))
484 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
486 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
488 gimple stmt = gsi_stmt (gsi);
490 if (gimple_clobber_p (stmt))
492 tree lhs = gimple_assign_lhs (stmt);
493 size_t *v;
494 /* Nested function lowering might introduce LHSs
495 that are COMPONENT_REFs. */
496 if (TREE_CODE (lhs) != VAR_DECL)
497 continue;
498 if (DECL_RTL_IF_SET (lhs) == pc_rtx
499 && (v = (size_t *)
500 pointer_map_contains (decl_to_stack_part, lhs)))
501 bitmap_clear_bit (work, *v);
503 else if (!is_gimple_debug (stmt))
504 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
508 /* Generate stack partition conflicts between all partitions that are
509 simultaneously live. */
511 static void
512 add_scope_conflicts (void)
514 basic_block bb;
515 bool changed;
516 bitmap work = BITMAP_ALLOC (NULL);
518 /* We approximate the life range of a stack variable by taking the first
519 mention of its name as starting point(s), and by the end-of-scope
520 death clobber added by gimplify as ending point(s) of the range.
521 This overapproximates in the case we for instance moved an address-taken
522 operation upward, without also moving a dereference to it upwards.
523 But it's conservatively correct as a variable never can hold values
524 before its name is mentioned at least once.
526 We then do a mostly classical bitmap lifeness algorithm. */
528 FOR_ALL_BB (bb)
529 bb->aux = BITMAP_ALLOC (NULL);
531 changed = true;
532 while (changed)
534 changed = false;
535 FOR_EACH_BB (bb)
537 bitmap active = (bitmap)bb->aux;
538 add_scope_conflicts_1 (bb, work, false);
539 if (bitmap_ior_into (active, work))
540 changed = true;
544 FOR_EACH_BB (bb)
545 add_scope_conflicts_1 (bb, work, true);
547 BITMAP_FREE (work);
548 FOR_ALL_BB (bb)
549 BITMAP_FREE (bb->aux);
552 /* A subroutine of partition_stack_vars. A comparison function for qsort,
553 sorting an array of indices by the properties of the object. */
555 static int
556 stack_var_cmp (const void *a, const void *b)
558 size_t ia = *(const size_t *)a;
559 size_t ib = *(const size_t *)b;
560 unsigned int aligna = stack_vars[ia].alignb;
561 unsigned int alignb = stack_vars[ib].alignb;
562 HOST_WIDE_INT sizea = stack_vars[ia].size;
563 HOST_WIDE_INT sizeb = stack_vars[ib].size;
564 tree decla = stack_vars[ia].decl;
565 tree declb = stack_vars[ib].decl;
566 bool largea, largeb;
567 unsigned int uida, uidb;
569 /* Primary compare on "large" alignment. Large comes first. */
570 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
571 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
572 if (largea != largeb)
573 return (int)largeb - (int)largea;
575 /* Secondary compare on size, decreasing */
576 if (sizea > sizeb)
577 return -1;
578 if (sizea < sizeb)
579 return 1;
581 /* Tertiary compare on true alignment, decreasing. */
582 if (aligna < alignb)
583 return -1;
584 if (aligna > alignb)
585 return 1;
587 /* Final compare on ID for sort stability, increasing.
588 Two SSA names are compared by their version, SSA names come before
589 non-SSA names, and two normal decls are compared by their DECL_UID. */
590 if (TREE_CODE (decla) == SSA_NAME)
592 if (TREE_CODE (declb) == SSA_NAME)
593 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
594 else
595 return -1;
597 else if (TREE_CODE (declb) == SSA_NAME)
598 return 1;
599 else
600 uida = DECL_UID (decla), uidb = DECL_UID (declb);
601 if (uida < uidb)
602 return 1;
603 if (uida > uidb)
604 return -1;
605 return 0;
609 /* If the points-to solution *PI points to variables that are in a partition
610 together with other variables add all partition members to the pointed-to
611 variables bitmap. */
613 static void
614 add_partitioned_vars_to_ptset (struct pt_solution *pt,
615 struct pointer_map_t *decls_to_partitions,
616 struct pointer_set_t *visited, bitmap temp)
618 bitmap_iterator bi;
619 unsigned i;
620 bitmap *part;
622 if (pt->anything
623 || pt->vars == NULL
624 /* The pointed-to vars bitmap is shared, it is enough to
625 visit it once. */
626 || pointer_set_insert(visited, pt->vars))
627 return;
629 bitmap_clear (temp);
631 /* By using a temporary bitmap to store all members of the partitions
632 we have to add we make sure to visit each of the partitions only
633 once. */
634 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
635 if ((!temp
636 || !bitmap_bit_p (temp, i))
637 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
638 (void *)(size_t) i)))
639 bitmap_ior_into (temp, *part);
640 if (!bitmap_empty_p (temp))
641 bitmap_ior_into (pt->vars, temp);
644 /* Update points-to sets based on partition info, so we can use them on RTL.
645 The bitmaps representing stack partitions will be saved until expand,
646 where partitioned decls used as bases in memory expressions will be
647 rewritten. */
649 static void
650 update_alias_info_with_stack_vars (void)
652 struct pointer_map_t *decls_to_partitions = NULL;
653 size_t i, j;
654 tree var = NULL_TREE;
656 for (i = 0; i < stack_vars_num; i++)
658 bitmap part = NULL;
659 tree name;
660 struct ptr_info_def *pi;
662 /* Not interested in partitions with single variable. */
663 if (stack_vars[i].representative != i
664 || stack_vars[i].next == EOC)
665 continue;
667 if (!decls_to_partitions)
669 decls_to_partitions = pointer_map_create ();
670 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
673 /* Create an SSA_NAME that points to the partition for use
674 as base during alias-oracle queries on RTL for bases that
675 have been partitioned. */
676 if (var == NULL_TREE)
677 var = create_tmp_var (ptr_type_node, NULL);
678 name = make_ssa_name (var, NULL);
680 /* Create bitmaps representing partitions. They will be used for
681 points-to sets later, so use GGC alloc. */
682 part = BITMAP_GGC_ALLOC ();
683 for (j = i; j != EOC; j = stack_vars[j].next)
685 tree decl = stack_vars[j].decl;
686 unsigned int uid = DECL_PT_UID (decl);
687 /* We should never end up partitioning SSA names (though they
688 may end up on the stack). Neither should we allocate stack
689 space to something that is unused and thus unreferenced, except
690 for -O0 where we are preserving even unreferenced variables. */
691 gcc_assert (DECL_P (decl)
692 && (!optimize
693 || referenced_var_lookup (cfun, DECL_UID (decl))));
694 bitmap_set_bit (part, uid);
695 *((bitmap *) pointer_map_insert (decls_to_partitions,
696 (void *)(size_t) uid)) = part;
697 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
698 decl)) = name;
701 /* Make the SSA name point to all partition members. */
702 pi = get_ptr_info (name);
703 pt_solution_set (&pi->pt, part, false);
706 /* Make all points-to sets that contain one member of a partition
707 contain all members of the partition. */
708 if (decls_to_partitions)
710 unsigned i;
711 struct pointer_set_t *visited = pointer_set_create ();
712 bitmap temp = BITMAP_ALLOC (NULL);
714 for (i = 1; i < num_ssa_names; i++)
716 tree name = ssa_name (i);
717 struct ptr_info_def *pi;
719 if (name
720 && POINTER_TYPE_P (TREE_TYPE (name))
721 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
722 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
723 visited, temp);
726 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
727 decls_to_partitions, visited, temp);
729 pointer_set_destroy (visited);
730 pointer_map_destroy (decls_to_partitions);
731 BITMAP_FREE (temp);
735 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
736 partitioning algorithm. Partitions A and B are known to be non-conflicting.
737 Merge them into a single partition A. */
739 static void
740 union_stack_vars (size_t a, size_t b)
742 struct stack_var *vb = &stack_vars[b];
743 bitmap_iterator bi;
744 unsigned u;
746 gcc_assert (stack_vars[b].next == EOC);
747 /* Add B to A's partition. */
748 stack_vars[b].next = stack_vars[a].next;
749 stack_vars[b].representative = a;
750 stack_vars[a].next = b;
752 /* Update the required alignment of partition A to account for B. */
753 if (stack_vars[a].alignb < stack_vars[b].alignb)
754 stack_vars[a].alignb = stack_vars[b].alignb;
756 /* Update the interference graph and merge the conflicts. */
757 if (vb->conflicts)
759 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
760 add_stack_var_conflict (a, stack_vars[u].representative);
761 BITMAP_FREE (vb->conflicts);
765 /* A subroutine of expand_used_vars. Binpack the variables into
766 partitions constrained by the interference graph. The overall
767 algorithm used is as follows:
769 Sort the objects by size in descending order.
770 For each object A {
771 S = size(A)
772 O = 0
773 loop {
774 Look for the largest non-conflicting object B with size <= S.
775 UNION (A, B)
780 static void
781 partition_stack_vars (void)
783 size_t si, sj, n = stack_vars_num;
785 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
786 for (si = 0; si < n; ++si)
787 stack_vars_sorted[si] = si;
789 if (n == 1)
790 return;
792 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
794 for (si = 0; si < n; ++si)
796 size_t i = stack_vars_sorted[si];
797 unsigned int ialign = stack_vars[i].alignb;
799 /* Ignore objects that aren't partition representatives. If we
800 see a var that is not a partition representative, it must
801 have been merged earlier. */
802 if (stack_vars[i].representative != i)
803 continue;
805 for (sj = si + 1; sj < n; ++sj)
807 size_t j = stack_vars_sorted[sj];
808 unsigned int jalign = stack_vars[j].alignb;
810 /* Ignore objects that aren't partition representatives. */
811 if (stack_vars[j].representative != j)
812 continue;
814 /* Ignore conflicting objects. */
815 if (stack_var_conflict_p (i, j))
816 continue;
818 /* Do not mix objects of "small" (supported) alignment
819 and "large" (unsupported) alignment. */
820 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
821 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
822 continue;
824 /* UNION the objects, placing J at OFFSET. */
825 union_stack_vars (i, j);
829 update_alias_info_with_stack_vars ();
832 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
834 static void
835 dump_stack_var_partition (void)
837 size_t si, i, j, n = stack_vars_num;
839 for (si = 0; si < n; ++si)
841 i = stack_vars_sorted[si];
843 /* Skip variables that aren't partition representatives, for now. */
844 if (stack_vars[i].representative != i)
845 continue;
847 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
848 " align %u\n", (unsigned long) i, stack_vars[i].size,
849 stack_vars[i].alignb);
851 for (j = i; j != EOC; j = stack_vars[j].next)
853 fputc ('\t', dump_file);
854 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
856 fputc ('\n', dump_file);
860 /* Assign rtl to DECL at BASE + OFFSET. */
862 static void
863 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
864 HOST_WIDE_INT offset)
866 unsigned align;
867 rtx x;
869 /* If this fails, we've overflowed the stack frame. Error nicely? */
870 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
872 x = plus_constant (base, offset);
873 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
875 if (TREE_CODE (decl) != SSA_NAME)
877 /* Set alignment we actually gave this decl if it isn't an SSA name.
878 If it is we generate stack slots only accidentally so it isn't as
879 important, we'll simply use the alignment that is already set. */
880 if (base == virtual_stack_vars_rtx)
881 offset -= frame_phase;
882 align = offset & -offset;
883 align *= BITS_PER_UNIT;
884 if (align == 0 || align > base_align)
885 align = base_align;
887 /* One would think that we could assert that we're not decreasing
888 alignment here, but (at least) the i386 port does exactly this
889 via the MINIMUM_ALIGNMENT hook. */
891 DECL_ALIGN (decl) = align;
892 DECL_USER_ALIGN (decl) = 0;
895 set_mem_attributes (x, SSAVAR (decl), true);
896 set_rtl (decl, x);
899 /* A subroutine of expand_used_vars. Give each partition representative
900 a unique location within the stack frame. Update each partition member
901 with that location. */
903 static void
904 expand_stack_vars (bool (*pred) (tree))
906 size_t si, i, j, n = stack_vars_num;
907 HOST_WIDE_INT large_size = 0, large_alloc = 0;
908 rtx large_base = NULL;
909 unsigned large_align = 0;
910 tree decl;
912 /* Determine if there are any variables requiring "large" alignment.
913 Since these are dynamically allocated, we only process these if
914 no predicate involved. */
915 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
916 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
918 /* Find the total size of these variables. */
919 for (si = 0; si < n; ++si)
921 unsigned alignb;
923 i = stack_vars_sorted[si];
924 alignb = stack_vars[i].alignb;
926 /* Stop when we get to the first decl with "small" alignment. */
927 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
928 break;
930 /* Skip variables that aren't partition representatives. */
931 if (stack_vars[i].representative != i)
932 continue;
934 /* Skip variables that have already had rtl assigned. See also
935 add_stack_var where we perpetrate this pc_rtx hack. */
936 decl = stack_vars[i].decl;
937 if ((TREE_CODE (decl) == SSA_NAME
938 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
939 : DECL_RTL (decl)) != pc_rtx)
940 continue;
942 large_size += alignb - 1;
943 large_size &= -(HOST_WIDE_INT)alignb;
944 large_size += stack_vars[i].size;
947 /* If there were any, allocate space. */
948 if (large_size > 0)
949 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
950 large_align, true);
953 for (si = 0; si < n; ++si)
955 rtx base;
956 unsigned base_align, alignb;
957 HOST_WIDE_INT offset;
959 i = stack_vars_sorted[si];
961 /* Skip variables that aren't partition representatives, for now. */
962 if (stack_vars[i].representative != i)
963 continue;
965 /* Skip variables that have already had rtl assigned. See also
966 add_stack_var where we perpetrate this pc_rtx hack. */
967 decl = stack_vars[i].decl;
968 if ((TREE_CODE (decl) == SSA_NAME
969 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
970 : DECL_RTL (decl)) != pc_rtx)
971 continue;
973 /* Check the predicate to see whether this variable should be
974 allocated in this pass. */
975 if (pred && !pred (decl))
976 continue;
978 alignb = stack_vars[i].alignb;
979 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
981 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
982 base = virtual_stack_vars_rtx;
983 base_align = crtl->max_used_stack_slot_alignment;
985 else
987 /* Large alignment is only processed in the last pass. */
988 if (pred)
989 continue;
990 gcc_assert (large_base != NULL);
992 large_alloc += alignb - 1;
993 large_alloc &= -(HOST_WIDE_INT)alignb;
994 offset = large_alloc;
995 large_alloc += stack_vars[i].size;
997 base = large_base;
998 base_align = large_align;
1001 /* Create rtl for each variable based on their location within the
1002 partition. */
1003 for (j = i; j != EOC; j = stack_vars[j].next)
1005 expand_one_stack_var_at (stack_vars[j].decl,
1006 base, base_align,
1007 offset);
1011 gcc_assert (large_alloc == large_size);
1014 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1015 static HOST_WIDE_INT
1016 account_stack_vars (void)
1018 size_t si, j, i, n = stack_vars_num;
1019 HOST_WIDE_INT size = 0;
1021 for (si = 0; si < n; ++si)
1023 i = stack_vars_sorted[si];
1025 /* Skip variables that aren't partition representatives, for now. */
1026 if (stack_vars[i].representative != i)
1027 continue;
1029 size += stack_vars[i].size;
1030 for (j = i; j != EOC; j = stack_vars[j].next)
1031 set_rtl (stack_vars[j].decl, NULL);
1033 return size;
1036 /* A subroutine of expand_one_var. Called to immediately assign rtl
1037 to a variable to be allocated in the stack frame. */
1039 static void
1040 expand_one_stack_var (tree var)
1042 HOST_WIDE_INT size, offset;
1043 unsigned byte_align;
1045 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1046 byte_align = align_local_variable (SSAVAR (var));
1048 /* We handle highly aligned variables in expand_stack_vars. */
1049 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1051 offset = alloc_stack_frame_space (size, byte_align);
1053 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1054 crtl->max_used_stack_slot_alignment, offset);
1057 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1058 that will reside in a hard register. */
1060 static void
1061 expand_one_hard_reg_var (tree var)
1063 rest_of_decl_compilation (var, 0, 0);
1066 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1067 that will reside in a pseudo register. */
1069 static void
1070 expand_one_register_var (tree var)
1072 tree decl = SSAVAR (var);
1073 tree type = TREE_TYPE (decl);
1074 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1075 rtx x = gen_reg_rtx (reg_mode);
1077 set_rtl (var, x);
1079 /* Note if the object is a user variable. */
1080 if (!DECL_ARTIFICIAL (decl))
1081 mark_user_reg (x);
1083 if (POINTER_TYPE_P (type))
1084 mark_reg_pointer (x, get_pointer_alignment (var));
1087 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1088 has some associated error, e.g. its type is error-mark. We just need
1089 to pick something that won't crash the rest of the compiler. */
1091 static void
1092 expand_one_error_var (tree var)
1094 enum machine_mode mode = DECL_MODE (var);
1095 rtx x;
1097 if (mode == BLKmode)
1098 x = gen_rtx_MEM (BLKmode, const0_rtx);
1099 else if (mode == VOIDmode)
1100 x = const0_rtx;
1101 else
1102 x = gen_reg_rtx (mode);
1104 SET_DECL_RTL (var, x);
1107 /* A subroutine of expand_one_var. VAR is a variable that will be
1108 allocated to the local stack frame. Return true if we wish to
1109 add VAR to STACK_VARS so that it will be coalesced with other
1110 variables. Return false to allocate VAR immediately.
1112 This function is used to reduce the number of variables considered
1113 for coalescing, which reduces the size of the quadratic problem. */
1115 static bool
1116 defer_stack_allocation (tree var, bool toplevel)
1118 /* If stack protection is enabled, *all* stack variables must be deferred,
1119 so that we can re-order the strings to the top of the frame. */
1120 if (flag_stack_protect)
1121 return true;
1123 /* We handle "large" alignment via dynamic allocation. We want to handle
1124 this extra complication in only one place, so defer them. */
1125 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1126 return true;
1128 /* Variables in the outermost scope automatically conflict with
1129 every other variable. The only reason to want to defer them
1130 at all is that, after sorting, we can more efficiently pack
1131 small variables in the stack frame. Continue to defer at -O2. */
1132 if (toplevel && optimize < 2)
1133 return false;
1135 /* Without optimization, *most* variables are allocated from the
1136 stack, which makes the quadratic problem large exactly when we
1137 want compilation to proceed as quickly as possible. On the
1138 other hand, we don't want the function's stack frame size to
1139 get completely out of hand. So we avoid adding scalars and
1140 "small" aggregates to the list at all. */
1141 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1142 return false;
1144 return true;
1147 /* A subroutine of expand_used_vars. Expand one variable according to
1148 its flavor. Variables to be placed on the stack are not actually
1149 expanded yet, merely recorded.
1150 When REALLY_EXPAND is false, only add stack values to be allocated.
1151 Return stack usage this variable is supposed to take.
1154 static HOST_WIDE_INT
1155 expand_one_var (tree var, bool toplevel, bool really_expand)
1157 unsigned int align = BITS_PER_UNIT;
1158 tree origvar = var;
1160 var = SSAVAR (var);
1162 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1164 /* Because we don't know if VAR will be in register or on stack,
1165 we conservatively assume it will be on stack even if VAR is
1166 eventually put into register after RA pass. For non-automatic
1167 variables, which won't be on stack, we collect alignment of
1168 type and ignore user specified alignment. */
1169 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1170 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1171 TYPE_MODE (TREE_TYPE (var)),
1172 TYPE_ALIGN (TREE_TYPE (var)));
1173 else if (DECL_HAS_VALUE_EXPR_P (var)
1174 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1175 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1176 or variables which were assigned a stack slot already by
1177 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1178 changed from the offset chosen to it. */
1179 align = crtl->stack_alignment_estimated;
1180 else
1181 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1183 /* If the variable alignment is very large we'll dynamicaly allocate
1184 it, which means that in-frame portion is just a pointer. */
1185 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1186 align = POINTER_SIZE;
1189 if (SUPPORTS_STACK_ALIGNMENT
1190 && crtl->stack_alignment_estimated < align)
1192 /* stack_alignment_estimated shouldn't change after stack
1193 realign decision made */
1194 gcc_assert(!crtl->stack_realign_processed);
1195 crtl->stack_alignment_estimated = align;
1198 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1199 So here we only make sure stack_alignment_needed >= align. */
1200 if (crtl->stack_alignment_needed < align)
1201 crtl->stack_alignment_needed = align;
1202 if (crtl->max_used_stack_slot_alignment < align)
1203 crtl->max_used_stack_slot_alignment = align;
1205 if (TREE_CODE (origvar) == SSA_NAME)
1207 gcc_assert (TREE_CODE (var) != VAR_DECL
1208 || (!DECL_EXTERNAL (var)
1209 && !DECL_HAS_VALUE_EXPR_P (var)
1210 && !TREE_STATIC (var)
1211 && TREE_TYPE (var) != error_mark_node
1212 && !DECL_HARD_REGISTER (var)
1213 && really_expand));
1215 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1217 else if (DECL_EXTERNAL (var))
1219 else if (DECL_HAS_VALUE_EXPR_P (var))
1221 else if (TREE_STATIC (var))
1223 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1225 else if (TREE_TYPE (var) == error_mark_node)
1227 if (really_expand)
1228 expand_one_error_var (var);
1230 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1232 if (really_expand)
1233 expand_one_hard_reg_var (var);
1235 else if (use_register_for_decl (var))
1237 if (really_expand)
1238 expand_one_register_var (origvar);
1240 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1242 if (really_expand)
1244 error ("size of variable %q+D is too large", var);
1245 expand_one_error_var (var);
1248 else if (defer_stack_allocation (var, toplevel))
1249 add_stack_var (origvar);
1250 else
1252 if (really_expand)
1253 expand_one_stack_var (origvar);
1254 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1256 return 0;
1259 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1260 expanding variables. Those variables that can be put into registers
1261 are allocated pseudos; those that can't are put on the stack.
1263 TOPLEVEL is true if this is the outermost BLOCK. */
1265 static void
1266 expand_used_vars_for_block (tree block, bool toplevel)
1268 tree t;
1270 /* Expand all variables at this level. */
1271 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1272 if (TREE_USED (t)
1273 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1274 || !DECL_NONSHAREABLE (t)))
1275 expand_one_var (t, toplevel, true);
1277 /* Expand all variables at containing levels. */
1278 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1279 expand_used_vars_for_block (t, false);
1282 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1283 and clear TREE_USED on all local variables. */
1285 static void
1286 clear_tree_used (tree block)
1288 tree t;
1290 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1291 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1292 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1293 || !DECL_NONSHAREABLE (t))
1294 TREE_USED (t) = 0;
1296 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1297 clear_tree_used (t);
1300 /* Examine TYPE and determine a bit mask of the following features. */
1302 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1303 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1304 #define SPCT_HAS_ARRAY 4
1305 #define SPCT_HAS_AGGREGATE 8
1307 static unsigned int
1308 stack_protect_classify_type (tree type)
1310 unsigned int ret = 0;
1311 tree t;
1313 switch (TREE_CODE (type))
1315 case ARRAY_TYPE:
1316 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1317 if (t == char_type_node
1318 || t == signed_char_type_node
1319 || t == unsigned_char_type_node)
1321 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1322 unsigned HOST_WIDE_INT len;
1324 if (!TYPE_SIZE_UNIT (type)
1325 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1326 len = max;
1327 else
1328 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1330 if (len < max)
1331 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1332 else
1333 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1335 else
1336 ret = SPCT_HAS_ARRAY;
1337 break;
1339 case UNION_TYPE:
1340 case QUAL_UNION_TYPE:
1341 case RECORD_TYPE:
1342 ret = SPCT_HAS_AGGREGATE;
1343 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1344 if (TREE_CODE (t) == FIELD_DECL)
1345 ret |= stack_protect_classify_type (TREE_TYPE (t));
1346 break;
1348 default:
1349 break;
1352 return ret;
1355 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1356 part of the local stack frame. Remember if we ever return nonzero for
1357 any variable in this function. The return value is the phase number in
1358 which the variable should be allocated. */
1360 static int
1361 stack_protect_decl_phase (tree decl)
1363 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1364 int ret = 0;
1366 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1367 has_short_buffer = true;
1369 if (flag_stack_protect == 2)
1371 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1372 && !(bits & SPCT_HAS_AGGREGATE))
1373 ret = 1;
1374 else if (bits & SPCT_HAS_ARRAY)
1375 ret = 2;
1377 else
1378 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1380 if (ret)
1381 has_protected_decls = true;
1383 return ret;
1386 /* Two helper routines that check for phase 1 and phase 2. These are used
1387 as callbacks for expand_stack_vars. */
1389 static bool
1390 stack_protect_decl_phase_1 (tree decl)
1392 return stack_protect_decl_phase (decl) == 1;
1395 static bool
1396 stack_protect_decl_phase_2 (tree decl)
1398 return stack_protect_decl_phase (decl) == 2;
1401 /* Ensure that variables in different stack protection phases conflict
1402 so that they are not merged and share the same stack slot. */
1404 static void
1405 add_stack_protection_conflicts (void)
1407 size_t i, j, n = stack_vars_num;
1408 unsigned char *phase;
1410 phase = XNEWVEC (unsigned char, n);
1411 for (i = 0; i < n; ++i)
1412 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1414 for (i = 0; i < n; ++i)
1416 unsigned char ph_i = phase[i];
1417 for (j = 0; j < i; ++j)
1418 if (ph_i != phase[j])
1419 add_stack_var_conflict (i, j);
1422 XDELETEVEC (phase);
1425 /* Create a decl for the guard at the top of the stack frame. */
1427 static void
1428 create_stack_guard (void)
1430 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1431 VAR_DECL, NULL, ptr_type_node);
1432 TREE_THIS_VOLATILE (guard) = 1;
1433 TREE_USED (guard) = 1;
1434 expand_one_stack_var (guard);
1435 crtl->stack_protect_guard = guard;
1438 /* Prepare for expanding variables. */
1439 static void
1440 init_vars_expansion (void)
1442 tree t;
1443 unsigned ix;
1444 /* Set TREE_USED on all variables in the local_decls. */
1445 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1446 TREE_USED (t) = 1;
1448 /* Clear TREE_USED on all variables associated with a block scope. */
1449 clear_tree_used (DECL_INITIAL (current_function_decl));
1451 /* Initialize local stack smashing state. */
1452 has_protected_decls = false;
1453 has_short_buffer = false;
1456 /* Free up stack variable graph data. */
1457 static void
1458 fini_vars_expansion (void)
1460 size_t i, n = stack_vars_num;
1461 for (i = 0; i < n; i++)
1462 BITMAP_FREE (stack_vars[i].conflicts);
1463 XDELETEVEC (stack_vars);
1464 XDELETEVEC (stack_vars_sorted);
1465 stack_vars = NULL;
1466 stack_vars_alloc = stack_vars_num = 0;
1467 pointer_map_destroy (decl_to_stack_part);
1468 decl_to_stack_part = NULL;
1471 /* Make a fair guess for the size of the stack frame of the function
1472 in NODE. This doesn't have to be exact, the result is only used in
1473 the inline heuristics. So we don't want to run the full stack var
1474 packing algorithm (which is quadratic in the number of stack vars).
1475 Instead, we calculate the total size of all stack vars. This turns
1476 out to be a pretty fair estimate -- packing of stack vars doesn't
1477 happen very often. */
1479 HOST_WIDE_INT
1480 estimated_stack_frame_size (struct cgraph_node *node)
1482 HOST_WIDE_INT size = 0;
1483 size_t i;
1484 tree var;
1485 tree old_cur_fun_decl = current_function_decl;
1486 referenced_var_iterator rvi;
1487 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1489 current_function_decl = node->decl;
1490 push_cfun (fn);
1492 gcc_checking_assert (gimple_referenced_vars (fn));
1493 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1494 size += expand_one_var (var, true, false);
1496 if (stack_vars_num > 0)
1498 /* Fake sorting the stack vars for account_stack_vars (). */
1499 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1500 for (i = 0; i < stack_vars_num; ++i)
1501 stack_vars_sorted[i] = i;
1502 size += account_stack_vars ();
1503 fini_vars_expansion ();
1505 pop_cfun ();
1506 current_function_decl = old_cur_fun_decl;
1507 return size;
1510 /* Expand all variables used in the function. */
1512 static void
1513 expand_used_vars (void)
1515 tree var, outer_block = DECL_INITIAL (current_function_decl);
1516 VEC(tree,heap) *maybe_local_decls = NULL;
1517 unsigned i;
1518 unsigned len;
1520 /* Compute the phase of the stack frame for this function. */
1522 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1523 int off = STARTING_FRAME_OFFSET % align;
1524 frame_phase = off ? align - off : 0;
1527 init_vars_expansion ();
1529 for (i = 0; i < SA.map->num_partitions; i++)
1531 tree var = partition_to_var (SA.map, i);
1533 gcc_assert (is_gimple_reg (var));
1534 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1535 expand_one_var (var, true, true);
1536 else
1538 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1539 contain the default def (representing the parm or result itself)
1540 we don't do anything here. But those which don't contain the
1541 default def (representing a temporary based on the parm/result)
1542 we need to allocate space just like for normal VAR_DECLs. */
1543 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1545 expand_one_var (var, true, true);
1546 gcc_assert (SA.partition_to_pseudo[i]);
1551 /* At this point all variables on the local_decls with TREE_USED
1552 set are not associated with any block scope. Lay them out. */
1554 len = VEC_length (tree, cfun->local_decls);
1555 FOR_EACH_LOCAL_DECL (cfun, i, var)
1557 bool expand_now = false;
1559 /* Expanded above already. */
1560 if (is_gimple_reg (var))
1562 TREE_USED (var) = 0;
1563 goto next;
1565 /* We didn't set a block for static or extern because it's hard
1566 to tell the difference between a global variable (re)declared
1567 in a local scope, and one that's really declared there to
1568 begin with. And it doesn't really matter much, since we're
1569 not giving them stack space. Expand them now. */
1570 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1571 expand_now = true;
1573 /* If the variable is not associated with any block, then it
1574 was created by the optimizers, and could be live anywhere
1575 in the function. */
1576 else if (TREE_USED (var))
1577 expand_now = true;
1579 /* Finally, mark all variables on the list as used. We'll use
1580 this in a moment when we expand those associated with scopes. */
1581 TREE_USED (var) = 1;
1583 if (expand_now)
1584 expand_one_var (var, true, true);
1586 next:
1587 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1589 rtx rtl = DECL_RTL_IF_SET (var);
1591 /* Keep artificial non-ignored vars in cfun->local_decls
1592 chain until instantiate_decls. */
1593 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1594 add_local_decl (cfun, var);
1595 else if (rtl == NULL_RTX)
1596 /* If rtl isn't set yet, which can happen e.g. with
1597 -fstack-protector, retry before returning from this
1598 function. */
1599 VEC_safe_push (tree, heap, maybe_local_decls, var);
1603 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1605 +-----------------+-----------------+
1606 | ...processed... | ...duplicates...|
1607 +-----------------+-----------------+
1609 +-- LEN points here.
1611 We just want the duplicates, as those are the artificial
1612 non-ignored vars that we want to keep until instantiate_decls.
1613 Move them down and truncate the array. */
1614 if (!VEC_empty (tree, cfun->local_decls))
1615 VEC_block_remove (tree, cfun->local_decls, 0, len);
1617 /* At this point, all variables within the block tree with TREE_USED
1618 set are actually used by the optimized function. Lay them out. */
1619 expand_used_vars_for_block (outer_block, true);
1621 if (stack_vars_num > 0)
1623 add_scope_conflicts ();
1624 /* Due to the way alias sets work, no variables with non-conflicting
1625 alias sets may be assigned the same address. Add conflicts to
1626 reflect this. */
1627 add_alias_set_conflicts ();
1629 /* If stack protection is enabled, we don't share space between
1630 vulnerable data and non-vulnerable data. */
1631 if (flag_stack_protect)
1632 add_stack_protection_conflicts ();
1634 /* Now that we have collected all stack variables, and have computed a
1635 minimal interference graph, attempt to save some stack space. */
1636 partition_stack_vars ();
1637 if (dump_file)
1638 dump_stack_var_partition ();
1641 /* There are several conditions under which we should create a
1642 stack guard: protect-all, alloca used, protected decls present. */
1643 if (flag_stack_protect == 2
1644 || (flag_stack_protect
1645 && (cfun->calls_alloca || has_protected_decls)))
1646 create_stack_guard ();
1648 /* Assign rtl to each variable based on these partitions. */
1649 if (stack_vars_num > 0)
1651 /* Reorder decls to be protected by iterating over the variables
1652 array multiple times, and allocating out of each phase in turn. */
1653 /* ??? We could probably integrate this into the qsort we did
1654 earlier, such that we naturally see these variables first,
1655 and thus naturally allocate things in the right order. */
1656 if (has_protected_decls)
1658 /* Phase 1 contains only character arrays. */
1659 expand_stack_vars (stack_protect_decl_phase_1);
1661 /* Phase 2 contains other kinds of arrays. */
1662 if (flag_stack_protect == 2)
1663 expand_stack_vars (stack_protect_decl_phase_2);
1666 expand_stack_vars (NULL);
1668 fini_vars_expansion ();
1671 /* If there were any artificial non-ignored vars without rtl
1672 found earlier, see if deferred stack allocation hasn't assigned
1673 rtl to them. */
1674 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1676 rtx rtl = DECL_RTL_IF_SET (var);
1678 /* Keep artificial non-ignored vars in cfun->local_decls
1679 chain until instantiate_decls. */
1680 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1681 add_local_decl (cfun, var);
1683 VEC_free (tree, heap, maybe_local_decls);
1685 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1686 if (STACK_ALIGNMENT_NEEDED)
1688 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1689 if (!FRAME_GROWS_DOWNWARD)
1690 frame_offset += align - 1;
1691 frame_offset &= -align;
1696 /* If we need to produce a detailed dump, print the tree representation
1697 for STMT to the dump file. SINCE is the last RTX after which the RTL
1698 generated for STMT should have been appended. */
1700 static void
1701 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1703 if (dump_file && (dump_flags & TDF_DETAILS))
1705 fprintf (dump_file, "\n;; ");
1706 print_gimple_stmt (dump_file, stmt, 0,
1707 TDF_SLIM | (dump_flags & TDF_LINENO));
1708 fprintf (dump_file, "\n");
1710 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1714 /* Maps the blocks that do not contain tree labels to rtx labels. */
1716 static struct pointer_map_t *lab_rtx_for_bb;
1718 /* Returns the label_rtx expression for a label starting basic block BB. */
1720 static rtx
1721 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1723 gimple_stmt_iterator gsi;
1724 tree lab;
1725 gimple lab_stmt;
1726 void **elt;
1728 if (bb->flags & BB_RTL)
1729 return block_label (bb);
1731 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1732 if (elt)
1733 return (rtx) *elt;
1735 /* Find the tree label if it is present. */
1737 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1739 lab_stmt = gsi_stmt (gsi);
1740 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1741 break;
1743 lab = gimple_label_label (lab_stmt);
1744 if (DECL_NONLOCAL (lab))
1745 break;
1747 return label_rtx (lab);
1750 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1751 *elt = gen_label_rtx ();
1752 return (rtx) *elt;
1756 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1757 of a basic block where we just expanded the conditional at the end,
1758 possibly clean up the CFG and instruction sequence. LAST is the
1759 last instruction before the just emitted jump sequence. */
1761 static void
1762 maybe_cleanup_end_of_block (edge e, rtx last)
1764 /* Special case: when jumpif decides that the condition is
1765 trivial it emits an unconditional jump (and the necessary
1766 barrier). But we still have two edges, the fallthru one is
1767 wrong. purge_dead_edges would clean this up later. Unfortunately
1768 we have to insert insns (and split edges) before
1769 find_many_sub_basic_blocks and hence before purge_dead_edges.
1770 But splitting edges might create new blocks which depend on the
1771 fact that if there are two edges there's no barrier. So the
1772 barrier would get lost and verify_flow_info would ICE. Instead
1773 of auditing all edge splitters to care for the barrier (which
1774 normally isn't there in a cleaned CFG), fix it here. */
1775 if (BARRIER_P (get_last_insn ()))
1777 rtx insn;
1778 remove_edge (e);
1779 /* Now, we have a single successor block, if we have insns to
1780 insert on the remaining edge we potentially will insert
1781 it at the end of this block (if the dest block isn't feasible)
1782 in order to avoid splitting the edge. This insertion will take
1783 place in front of the last jump. But we might have emitted
1784 multiple jumps (conditional and one unconditional) to the
1785 same destination. Inserting in front of the last one then
1786 is a problem. See PR 40021. We fix this by deleting all
1787 jumps except the last unconditional one. */
1788 insn = PREV_INSN (get_last_insn ());
1789 /* Make sure we have an unconditional jump. Otherwise we're
1790 confused. */
1791 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1792 for (insn = PREV_INSN (insn); insn != last;)
1794 insn = PREV_INSN (insn);
1795 if (JUMP_P (NEXT_INSN (insn)))
1797 if (!any_condjump_p (NEXT_INSN (insn)))
1799 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1800 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1802 delete_insn (NEXT_INSN (insn));
1808 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1809 Returns a new basic block if we've terminated the current basic
1810 block and created a new one. */
1812 static basic_block
1813 expand_gimple_cond (basic_block bb, gimple stmt)
1815 basic_block new_bb, dest;
1816 edge new_edge;
1817 edge true_edge;
1818 edge false_edge;
1819 rtx last2, last;
1820 enum tree_code code;
1821 tree op0, op1;
1823 code = gimple_cond_code (stmt);
1824 op0 = gimple_cond_lhs (stmt);
1825 op1 = gimple_cond_rhs (stmt);
1826 /* We're sometimes presented with such code:
1827 D.123_1 = x < y;
1828 if (D.123_1 != 0)
1830 This would expand to two comparisons which then later might
1831 be cleaned up by combine. But some pattern matchers like if-conversion
1832 work better when there's only one compare, so make up for this
1833 here as special exception if TER would have made the same change. */
1834 if (gimple_cond_single_var_p (stmt)
1835 && SA.values
1836 && TREE_CODE (op0) == SSA_NAME
1837 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1839 gimple second = SSA_NAME_DEF_STMT (op0);
1840 if (gimple_code (second) == GIMPLE_ASSIGN)
1842 enum tree_code code2 = gimple_assign_rhs_code (second);
1843 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1845 code = code2;
1846 op0 = gimple_assign_rhs1 (second);
1847 op1 = gimple_assign_rhs2 (second);
1849 /* If jumps are cheap turn some more codes into
1850 jumpy sequences. */
1851 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1853 if ((code2 == BIT_AND_EXPR
1854 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1855 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1856 || code2 == TRUTH_AND_EXPR)
1858 code = TRUTH_ANDIF_EXPR;
1859 op0 = gimple_assign_rhs1 (second);
1860 op1 = gimple_assign_rhs2 (second);
1862 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1864 code = TRUTH_ORIF_EXPR;
1865 op0 = gimple_assign_rhs1 (second);
1866 op1 = gimple_assign_rhs2 (second);
1872 last2 = last = get_last_insn ();
1874 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1875 set_curr_insn_source_location (gimple_location (stmt));
1876 set_curr_insn_block (gimple_block (stmt));
1878 /* These flags have no purpose in RTL land. */
1879 true_edge->flags &= ~EDGE_TRUE_VALUE;
1880 false_edge->flags &= ~EDGE_FALSE_VALUE;
1882 /* We can either have a pure conditional jump with one fallthru edge or
1883 two-way jump that needs to be decomposed into two basic blocks. */
1884 if (false_edge->dest == bb->next_bb)
1886 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1887 true_edge->probability);
1888 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1889 if (true_edge->goto_locus)
1891 set_curr_insn_source_location (true_edge->goto_locus);
1892 set_curr_insn_block (true_edge->goto_block);
1893 true_edge->goto_locus = curr_insn_locator ();
1895 true_edge->goto_block = NULL;
1896 false_edge->flags |= EDGE_FALLTHRU;
1897 maybe_cleanup_end_of_block (false_edge, last);
1898 return NULL;
1900 if (true_edge->dest == bb->next_bb)
1902 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1903 false_edge->probability);
1904 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1905 if (false_edge->goto_locus)
1907 set_curr_insn_source_location (false_edge->goto_locus);
1908 set_curr_insn_block (false_edge->goto_block);
1909 false_edge->goto_locus = curr_insn_locator ();
1911 false_edge->goto_block = NULL;
1912 true_edge->flags |= EDGE_FALLTHRU;
1913 maybe_cleanup_end_of_block (true_edge, last);
1914 return NULL;
1917 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1918 true_edge->probability);
1919 last = get_last_insn ();
1920 if (false_edge->goto_locus)
1922 set_curr_insn_source_location (false_edge->goto_locus);
1923 set_curr_insn_block (false_edge->goto_block);
1924 false_edge->goto_locus = curr_insn_locator ();
1926 false_edge->goto_block = NULL;
1927 emit_jump (label_rtx_for_bb (false_edge->dest));
1929 BB_END (bb) = last;
1930 if (BARRIER_P (BB_END (bb)))
1931 BB_END (bb) = PREV_INSN (BB_END (bb));
1932 update_bb_for_insn (bb);
1934 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1935 dest = false_edge->dest;
1936 redirect_edge_succ (false_edge, new_bb);
1937 false_edge->flags |= EDGE_FALLTHRU;
1938 new_bb->count = false_edge->count;
1939 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1940 new_edge = make_edge (new_bb, dest, 0);
1941 new_edge->probability = REG_BR_PROB_BASE;
1942 new_edge->count = new_bb->count;
1943 if (BARRIER_P (BB_END (new_bb)))
1944 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1945 update_bb_for_insn (new_bb);
1947 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1949 if (true_edge->goto_locus)
1951 set_curr_insn_source_location (true_edge->goto_locus);
1952 set_curr_insn_block (true_edge->goto_block);
1953 true_edge->goto_locus = curr_insn_locator ();
1955 true_edge->goto_block = NULL;
1957 return new_bb;
1960 /* Mark all calls that can have a transaction restart. */
1962 static void
1963 mark_transaction_restart_calls (gimple stmt)
1965 struct tm_restart_node dummy;
1966 void **slot;
1968 if (!cfun->gimple_df->tm_restart)
1969 return;
1971 dummy.stmt = stmt;
1972 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1973 if (slot)
1975 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1976 tree list = n->label_or_list;
1977 rtx insn;
1979 for (insn = next_real_insn (get_last_insn ());
1980 !CALL_P (insn);
1981 insn = next_real_insn (insn))
1982 continue;
1984 if (TREE_CODE (list) == LABEL_DECL)
1985 add_reg_note (insn, REG_TM, label_rtx (list));
1986 else
1987 for (; list ; list = TREE_CHAIN (list))
1988 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1992 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1993 statement STMT. */
1995 static void
1996 expand_call_stmt (gimple stmt)
1998 tree exp, decl, lhs;
1999 bool builtin_p;
2000 size_t i;
2002 if (gimple_call_internal_p (stmt))
2004 expand_internal_call (stmt);
2005 return;
2008 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2010 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2011 decl = gimple_call_fndecl (stmt);
2012 builtin_p = decl && DECL_BUILT_IN (decl);
2014 /* If this is not a builtin function, the function type through which the
2015 call is made may be different from the type of the function. */
2016 if (!builtin_p)
2017 CALL_EXPR_FN (exp)
2018 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2019 CALL_EXPR_FN (exp));
2021 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2022 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2024 for (i = 0; i < gimple_call_num_args (stmt); i++)
2026 tree arg = gimple_call_arg (stmt, i);
2027 gimple def;
2028 /* TER addresses into arguments of builtin functions so we have a
2029 chance to infer more correct alignment information. See PR39954. */
2030 if (builtin_p
2031 && TREE_CODE (arg) == SSA_NAME
2032 && (def = get_gimple_for_ssa_name (arg))
2033 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2034 arg = gimple_assign_rhs1 (def);
2035 CALL_EXPR_ARG (exp, i) = arg;
2038 if (gimple_has_side_effects (stmt))
2039 TREE_SIDE_EFFECTS (exp) = 1;
2041 if (gimple_call_nothrow_p (stmt))
2042 TREE_NOTHROW (exp) = 1;
2044 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2045 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2046 if (decl
2047 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2048 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2049 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2050 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2051 else
2052 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2053 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
2054 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2055 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2056 TREE_BLOCK (exp) = gimple_block (stmt);
2058 /* Ensure RTL is created for debug args. */
2059 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2061 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2062 unsigned int ix;
2063 tree dtemp;
2065 if (debug_args)
2066 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2068 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2069 expand_debug_expr (dtemp);
2073 lhs = gimple_call_lhs (stmt);
2074 if (lhs)
2075 expand_assignment (lhs, exp, false);
2076 else
2077 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2079 mark_transaction_restart_calls (stmt);
2082 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2083 STMT that doesn't require special handling for outgoing edges. That
2084 is no tailcalls and no GIMPLE_COND. */
2086 static void
2087 expand_gimple_stmt_1 (gimple stmt)
2089 tree op0;
2091 set_curr_insn_source_location (gimple_location (stmt));
2092 set_curr_insn_block (gimple_block (stmt));
2094 switch (gimple_code (stmt))
2096 case GIMPLE_GOTO:
2097 op0 = gimple_goto_dest (stmt);
2098 if (TREE_CODE (op0) == LABEL_DECL)
2099 expand_goto (op0);
2100 else
2101 expand_computed_goto (op0);
2102 break;
2103 case GIMPLE_LABEL:
2104 expand_label (gimple_label_label (stmt));
2105 break;
2106 case GIMPLE_NOP:
2107 case GIMPLE_PREDICT:
2108 break;
2109 case GIMPLE_SWITCH:
2110 expand_case (stmt);
2111 break;
2112 case GIMPLE_ASM:
2113 expand_asm_stmt (stmt);
2114 break;
2115 case GIMPLE_CALL:
2116 expand_call_stmt (stmt);
2117 break;
2119 case GIMPLE_RETURN:
2120 op0 = gimple_return_retval (stmt);
2122 if (op0 && op0 != error_mark_node)
2124 tree result = DECL_RESULT (current_function_decl);
2126 /* If we are not returning the current function's RESULT_DECL,
2127 build an assignment to it. */
2128 if (op0 != result)
2130 /* I believe that a function's RESULT_DECL is unique. */
2131 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2133 /* ??? We'd like to use simply expand_assignment here,
2134 but this fails if the value is of BLKmode but the return
2135 decl is a register. expand_return has special handling
2136 for this combination, which eventually should move
2137 to common code. See comments there. Until then, let's
2138 build a modify expression :-/ */
2139 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2140 result, op0);
2143 if (!op0)
2144 expand_null_return ();
2145 else
2146 expand_return (op0);
2147 break;
2149 case GIMPLE_ASSIGN:
2151 tree lhs = gimple_assign_lhs (stmt);
2153 /* Tree expand used to fiddle with |= and &= of two bitfield
2154 COMPONENT_REFs here. This can't happen with gimple, the LHS
2155 of binary assigns must be a gimple reg. */
2157 if (TREE_CODE (lhs) != SSA_NAME
2158 || get_gimple_rhs_class (gimple_expr_code (stmt))
2159 == GIMPLE_SINGLE_RHS)
2161 tree rhs = gimple_assign_rhs1 (stmt);
2162 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2163 == GIMPLE_SINGLE_RHS);
2164 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2165 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2166 if (TREE_CLOBBER_P (rhs))
2167 /* This is a clobber to mark the going out of scope for
2168 this LHS. */
2170 else
2171 expand_assignment (lhs, rhs,
2172 gimple_assign_nontemporal_move_p (stmt));
2174 else
2176 rtx target, temp;
2177 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2178 struct separate_ops ops;
2179 bool promoted = false;
2181 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2182 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2183 promoted = true;
2185 ops.code = gimple_assign_rhs_code (stmt);
2186 ops.type = TREE_TYPE (lhs);
2187 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2189 case GIMPLE_TERNARY_RHS:
2190 ops.op2 = gimple_assign_rhs3 (stmt);
2191 /* Fallthru */
2192 case GIMPLE_BINARY_RHS:
2193 ops.op1 = gimple_assign_rhs2 (stmt);
2194 /* Fallthru */
2195 case GIMPLE_UNARY_RHS:
2196 ops.op0 = gimple_assign_rhs1 (stmt);
2197 break;
2198 default:
2199 gcc_unreachable ();
2201 ops.location = gimple_location (stmt);
2203 /* If we want to use a nontemporal store, force the value to
2204 register first. If we store into a promoted register,
2205 don't directly expand to target. */
2206 temp = nontemporal || promoted ? NULL_RTX : target;
2207 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2208 EXPAND_NORMAL);
2210 if (temp == target)
2212 else if (promoted)
2214 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2215 /* If TEMP is a VOIDmode constant, use convert_modes to make
2216 sure that we properly convert it. */
2217 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2219 temp = convert_modes (GET_MODE (target),
2220 TYPE_MODE (ops.type),
2221 temp, unsignedp);
2222 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2223 GET_MODE (target), temp, unsignedp);
2226 convert_move (SUBREG_REG (target), temp, unsignedp);
2228 else if (nontemporal && emit_storent_insn (target, temp))
2230 else
2232 temp = force_operand (temp, target);
2233 if (temp != target)
2234 emit_move_insn (target, temp);
2238 break;
2240 default:
2241 gcc_unreachable ();
2245 /* Expand one gimple statement STMT and return the last RTL instruction
2246 before any of the newly generated ones.
2248 In addition to generating the necessary RTL instructions this also
2249 sets REG_EH_REGION notes if necessary and sets the current source
2250 location for diagnostics. */
2252 static rtx
2253 expand_gimple_stmt (gimple stmt)
2255 location_t saved_location = input_location;
2256 rtx last = get_last_insn ();
2257 int lp_nr;
2259 gcc_assert (cfun);
2261 /* We need to save and restore the current source location so that errors
2262 discovered during expansion are emitted with the right location. But
2263 it would be better if the diagnostic routines used the source location
2264 embedded in the tree nodes rather than globals. */
2265 if (gimple_has_location (stmt))
2266 input_location = gimple_location (stmt);
2268 expand_gimple_stmt_1 (stmt);
2270 /* Free any temporaries used to evaluate this statement. */
2271 free_temp_slots ();
2273 input_location = saved_location;
2275 /* Mark all insns that may trap. */
2276 lp_nr = lookup_stmt_eh_lp (stmt);
2277 if (lp_nr)
2279 rtx insn;
2280 for (insn = next_real_insn (last); insn;
2281 insn = next_real_insn (insn))
2283 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2284 /* If we want exceptions for non-call insns, any
2285 may_trap_p instruction may throw. */
2286 && GET_CODE (PATTERN (insn)) != CLOBBER
2287 && GET_CODE (PATTERN (insn)) != USE
2288 && insn_could_throw_p (insn))
2289 make_reg_eh_region_note (insn, 0, lp_nr);
2293 return last;
2296 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2297 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2298 generated a tail call (something that might be denied by the ABI
2299 rules governing the call; see calls.c).
2301 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2302 can still reach the rest of BB. The case here is __builtin_sqrt,
2303 where the NaN result goes through the external function (with a
2304 tailcall) and the normal result happens via a sqrt instruction. */
2306 static basic_block
2307 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2309 rtx last2, last;
2310 edge e;
2311 edge_iterator ei;
2312 int probability;
2313 gcov_type count;
2315 last2 = last = expand_gimple_stmt (stmt);
2317 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2318 if (CALL_P (last) && SIBLING_CALL_P (last))
2319 goto found;
2321 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2323 *can_fallthru = true;
2324 return NULL;
2326 found:
2327 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2328 Any instructions emitted here are about to be deleted. */
2329 do_pending_stack_adjust ();
2331 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2332 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2333 EH or abnormal edges, we shouldn't have created a tail call in
2334 the first place. So it seems to me we should just be removing
2335 all edges here, or redirecting the existing fallthru edge to
2336 the exit block. */
2338 probability = 0;
2339 count = 0;
2341 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2343 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2345 if (e->dest != EXIT_BLOCK_PTR)
2347 e->dest->count -= e->count;
2348 e->dest->frequency -= EDGE_FREQUENCY (e);
2349 if (e->dest->count < 0)
2350 e->dest->count = 0;
2351 if (e->dest->frequency < 0)
2352 e->dest->frequency = 0;
2354 count += e->count;
2355 probability += e->probability;
2356 remove_edge (e);
2358 else
2359 ei_next (&ei);
2362 /* This is somewhat ugly: the call_expr expander often emits instructions
2363 after the sibcall (to perform the function return). These confuse the
2364 find_many_sub_basic_blocks code, so we need to get rid of these. */
2365 last = NEXT_INSN (last);
2366 gcc_assert (BARRIER_P (last));
2368 *can_fallthru = false;
2369 while (NEXT_INSN (last))
2371 /* For instance an sqrt builtin expander expands if with
2372 sibcall in the then and label for `else`. */
2373 if (LABEL_P (NEXT_INSN (last)))
2375 *can_fallthru = true;
2376 break;
2378 delete_insn (NEXT_INSN (last));
2381 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2382 e->probability += probability;
2383 e->count += count;
2384 BB_END (bb) = last;
2385 update_bb_for_insn (bb);
2387 if (NEXT_INSN (last))
2389 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2391 last = BB_END (bb);
2392 if (BARRIER_P (last))
2393 BB_END (bb) = PREV_INSN (last);
2396 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2398 return bb;
2401 /* Return the difference between the floor and the truncated result of
2402 a signed division by OP1 with remainder MOD. */
2403 static rtx
2404 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2406 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2407 return gen_rtx_IF_THEN_ELSE
2408 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2409 gen_rtx_IF_THEN_ELSE
2410 (mode, gen_rtx_LT (BImode,
2411 gen_rtx_DIV (mode, op1, mod),
2412 const0_rtx),
2413 constm1_rtx, const0_rtx),
2414 const0_rtx);
2417 /* Return the difference between the ceil and the truncated result of
2418 a signed division by OP1 with remainder MOD. */
2419 static rtx
2420 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2422 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2423 return gen_rtx_IF_THEN_ELSE
2424 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2425 gen_rtx_IF_THEN_ELSE
2426 (mode, gen_rtx_GT (BImode,
2427 gen_rtx_DIV (mode, op1, mod),
2428 const0_rtx),
2429 const1_rtx, const0_rtx),
2430 const0_rtx);
2433 /* Return the difference between the ceil and the truncated result of
2434 an unsigned division by OP1 with remainder MOD. */
2435 static rtx
2436 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2438 /* (mod != 0 ? 1 : 0) */
2439 return gen_rtx_IF_THEN_ELSE
2440 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2441 const1_rtx, const0_rtx);
2444 /* Return the difference between the rounded and the truncated result
2445 of a signed division by OP1 with remainder MOD. Halfway cases are
2446 rounded away from zero, rather than to the nearest even number. */
2447 static rtx
2448 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2450 /* (abs (mod) >= abs (op1) - abs (mod)
2451 ? (op1 / mod > 0 ? 1 : -1)
2452 : 0) */
2453 return gen_rtx_IF_THEN_ELSE
2454 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2455 gen_rtx_MINUS (mode,
2456 gen_rtx_ABS (mode, op1),
2457 gen_rtx_ABS (mode, mod))),
2458 gen_rtx_IF_THEN_ELSE
2459 (mode, gen_rtx_GT (BImode,
2460 gen_rtx_DIV (mode, op1, mod),
2461 const0_rtx),
2462 const1_rtx, constm1_rtx),
2463 const0_rtx);
2466 /* Return the difference between the rounded and the truncated result
2467 of a unsigned division by OP1 with remainder MOD. Halfway cases
2468 are rounded away from zero, rather than to the nearest even
2469 number. */
2470 static rtx
2471 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2473 /* (mod >= op1 - mod ? 1 : 0) */
2474 return gen_rtx_IF_THEN_ELSE
2475 (mode, gen_rtx_GE (BImode, mod,
2476 gen_rtx_MINUS (mode, op1, mod)),
2477 const1_rtx, const0_rtx);
2480 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2481 any rtl. */
2483 static rtx
2484 convert_debug_memory_address (enum machine_mode mode, rtx x,
2485 addr_space_t as)
2487 enum machine_mode xmode = GET_MODE (x);
2489 #ifndef POINTERS_EXTEND_UNSIGNED
2490 gcc_assert (mode == Pmode
2491 || mode == targetm.addr_space.address_mode (as));
2492 gcc_assert (xmode == mode || xmode == VOIDmode);
2493 #else
2494 rtx temp;
2495 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2496 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2498 gcc_assert (mode == address_mode || mode == pointer_mode);
2500 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2501 return x;
2503 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2504 x = simplify_gen_subreg (mode, x, xmode,
2505 subreg_lowpart_offset
2506 (mode, xmode));
2507 else if (POINTERS_EXTEND_UNSIGNED > 0)
2508 x = gen_rtx_ZERO_EXTEND (mode, x);
2509 else if (!POINTERS_EXTEND_UNSIGNED)
2510 x = gen_rtx_SIGN_EXTEND (mode, x);
2511 else
2513 switch (GET_CODE (x))
2515 case SUBREG:
2516 if ((SUBREG_PROMOTED_VAR_P (x)
2517 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2518 || (GET_CODE (SUBREG_REG (x)) == PLUS
2519 && REG_P (XEXP (SUBREG_REG (x), 0))
2520 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2521 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2522 && GET_MODE (SUBREG_REG (x)) == mode)
2523 return SUBREG_REG (x);
2524 break;
2525 case LABEL_REF:
2526 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2527 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2528 return temp;
2529 case SYMBOL_REF:
2530 temp = shallow_copy_rtx (x);
2531 PUT_MODE (temp, mode);
2532 return temp;
2533 case CONST:
2534 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2535 if (temp)
2536 temp = gen_rtx_CONST (mode, temp);
2537 return temp;
2538 case PLUS:
2539 case MINUS:
2540 if (CONST_INT_P (XEXP (x, 1)))
2542 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2543 if (temp)
2544 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2546 break;
2547 default:
2548 break;
2550 /* Don't know how to express ptr_extend as operation in debug info. */
2551 return NULL;
2553 #endif /* POINTERS_EXTEND_UNSIGNED */
2555 return x;
2558 /* Return an RTX equivalent to the value of the parameter DECL. */
2560 static rtx
2561 expand_debug_parm_decl (tree decl)
2563 rtx incoming = DECL_INCOMING_RTL (decl);
2565 if (incoming
2566 && GET_MODE (incoming) != BLKmode
2567 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2568 || (MEM_P (incoming)
2569 && REG_P (XEXP (incoming, 0))
2570 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2572 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2574 #ifdef HAVE_window_save
2575 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2576 If the target machine has an explicit window save instruction, the
2577 actual entry value is the corresponding OUTGOING_REGNO instead. */
2578 if (REG_P (incoming)
2579 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2580 incoming
2581 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2582 OUTGOING_REGNO (REGNO (incoming)), 0);
2583 else if (MEM_P (incoming))
2585 rtx reg = XEXP (incoming, 0);
2586 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2588 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2589 incoming = replace_equiv_address_nv (incoming, reg);
2592 #endif
2594 ENTRY_VALUE_EXP (rtl) = incoming;
2595 return rtl;
2598 if (incoming
2599 && GET_MODE (incoming) != BLKmode
2600 && !TREE_ADDRESSABLE (decl)
2601 && MEM_P (incoming)
2602 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2603 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2604 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2605 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2606 return incoming;
2608 return NULL_RTX;
2611 /* Return an RTX equivalent to the value of the tree expression EXP. */
2613 static rtx
2614 expand_debug_expr (tree exp)
2616 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2617 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2618 enum machine_mode inner_mode = VOIDmode;
2619 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2620 addr_space_t as;
2622 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2624 case tcc_expression:
2625 switch (TREE_CODE (exp))
2627 case COND_EXPR:
2628 case DOT_PROD_EXPR:
2629 case WIDEN_MULT_PLUS_EXPR:
2630 case WIDEN_MULT_MINUS_EXPR:
2631 case FMA_EXPR:
2632 goto ternary;
2634 case TRUTH_ANDIF_EXPR:
2635 case TRUTH_ORIF_EXPR:
2636 case TRUTH_AND_EXPR:
2637 case TRUTH_OR_EXPR:
2638 case TRUTH_XOR_EXPR:
2639 goto binary;
2641 case TRUTH_NOT_EXPR:
2642 goto unary;
2644 default:
2645 break;
2647 break;
2649 ternary:
2650 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2651 if (!op2)
2652 return NULL_RTX;
2653 /* Fall through. */
2655 binary:
2656 case tcc_binary:
2657 case tcc_comparison:
2658 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2659 if (!op1)
2660 return NULL_RTX;
2661 /* Fall through. */
2663 unary:
2664 case tcc_unary:
2665 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2666 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2667 if (!op0)
2668 return NULL_RTX;
2669 break;
2671 case tcc_type:
2672 case tcc_statement:
2673 gcc_unreachable ();
2675 case tcc_constant:
2676 case tcc_exceptional:
2677 case tcc_declaration:
2678 case tcc_reference:
2679 case tcc_vl_exp:
2680 break;
2683 switch (TREE_CODE (exp))
2685 case STRING_CST:
2686 if (!lookup_constant_def (exp))
2688 if (strlen (TREE_STRING_POINTER (exp)) + 1
2689 != (size_t) TREE_STRING_LENGTH (exp))
2690 return NULL_RTX;
2691 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2692 op0 = gen_rtx_MEM (BLKmode, op0);
2693 set_mem_attributes (op0, exp, 0);
2694 return op0;
2696 /* Fall through... */
2698 case INTEGER_CST:
2699 case REAL_CST:
2700 case FIXED_CST:
2701 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2702 return op0;
2704 case COMPLEX_CST:
2705 gcc_assert (COMPLEX_MODE_P (mode));
2706 op0 = expand_debug_expr (TREE_REALPART (exp));
2707 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2708 return gen_rtx_CONCAT (mode, op0, op1);
2710 case DEBUG_EXPR_DECL:
2711 op0 = DECL_RTL_IF_SET (exp);
2713 if (op0)
2714 return op0;
2716 op0 = gen_rtx_DEBUG_EXPR (mode);
2717 DEBUG_EXPR_TREE_DECL (op0) = exp;
2718 SET_DECL_RTL (exp, op0);
2720 return op0;
2722 case VAR_DECL:
2723 case PARM_DECL:
2724 case FUNCTION_DECL:
2725 case LABEL_DECL:
2726 case CONST_DECL:
2727 case RESULT_DECL:
2728 op0 = DECL_RTL_IF_SET (exp);
2730 /* This decl was probably optimized away. */
2731 if (!op0)
2733 if (TREE_CODE (exp) != VAR_DECL
2734 || DECL_EXTERNAL (exp)
2735 || !TREE_STATIC (exp)
2736 || !DECL_NAME (exp)
2737 || DECL_HARD_REGISTER (exp)
2738 || DECL_IN_CONSTANT_POOL (exp)
2739 || mode == VOIDmode)
2740 return NULL;
2742 op0 = make_decl_rtl_for_debug (exp);
2743 if (!MEM_P (op0)
2744 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2745 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2746 return NULL;
2748 else
2749 op0 = copy_rtx (op0);
2751 if (GET_MODE (op0) == BLKmode
2752 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2753 below would ICE. While it is likely a FE bug,
2754 try to be robust here. See PR43166. */
2755 || mode == BLKmode
2756 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2758 gcc_assert (MEM_P (op0));
2759 op0 = adjust_address_nv (op0, mode, 0);
2760 return op0;
2763 /* Fall through. */
2765 adjust_mode:
2766 case PAREN_EXPR:
2767 case NOP_EXPR:
2768 case CONVERT_EXPR:
2770 inner_mode = GET_MODE (op0);
2772 if (mode == inner_mode)
2773 return op0;
2775 if (inner_mode == VOIDmode)
2777 if (TREE_CODE (exp) == SSA_NAME)
2778 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2779 else
2780 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2781 if (mode == inner_mode)
2782 return op0;
2785 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2787 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2788 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2789 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2790 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2791 else
2792 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2794 else if (FLOAT_MODE_P (mode))
2796 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2797 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2798 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2799 else
2800 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2802 else if (FLOAT_MODE_P (inner_mode))
2804 if (unsignedp)
2805 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2806 else
2807 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2809 else if (CONSTANT_P (op0)
2810 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2811 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2812 subreg_lowpart_offset (mode,
2813 inner_mode));
2814 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2815 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2816 : unsignedp)
2817 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2818 else
2819 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2821 return op0;
2824 case MEM_REF:
2825 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2827 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2828 TREE_OPERAND (exp, 0),
2829 TREE_OPERAND (exp, 1));
2830 if (newexp)
2831 return expand_debug_expr (newexp);
2833 /* FALLTHROUGH */
2834 case INDIRECT_REF:
2835 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2836 if (!op0)
2837 return NULL;
2839 if (TREE_CODE (exp) == MEM_REF)
2841 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2842 || (GET_CODE (op0) == PLUS
2843 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2844 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2845 Instead just use get_inner_reference. */
2846 goto component_ref;
2848 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2849 if (!op1 || !CONST_INT_P (op1))
2850 return NULL;
2852 op0 = plus_constant (op0, INTVAL (op1));
2855 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2856 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2857 else
2858 as = ADDR_SPACE_GENERIC;
2860 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2861 op0, as);
2862 if (op0 == NULL_RTX)
2863 return NULL;
2865 op0 = gen_rtx_MEM (mode, op0);
2866 set_mem_attributes (op0, exp, 0);
2867 if (TREE_CODE (exp) == MEM_REF
2868 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2869 set_mem_expr (op0, NULL_TREE);
2870 set_mem_addr_space (op0, as);
2872 return op0;
2874 case TARGET_MEM_REF:
2875 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2876 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2877 return NULL;
2879 op0 = expand_debug_expr
2880 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2881 if (!op0)
2882 return NULL;
2884 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2885 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2886 else
2887 as = ADDR_SPACE_GENERIC;
2889 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2890 op0, as);
2891 if (op0 == NULL_RTX)
2892 return NULL;
2894 op0 = gen_rtx_MEM (mode, op0);
2896 set_mem_attributes (op0, exp, 0);
2897 set_mem_addr_space (op0, as);
2899 return op0;
2901 component_ref:
2902 case ARRAY_REF:
2903 case ARRAY_RANGE_REF:
2904 case COMPONENT_REF:
2905 case BIT_FIELD_REF:
2906 case REALPART_EXPR:
2907 case IMAGPART_EXPR:
2908 case VIEW_CONVERT_EXPR:
2910 enum machine_mode mode1;
2911 HOST_WIDE_INT bitsize, bitpos;
2912 tree offset;
2913 int volatilep = 0;
2914 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2915 &mode1, &unsignedp, &volatilep, false);
2916 rtx orig_op0;
2918 if (bitsize == 0)
2919 return NULL;
2921 orig_op0 = op0 = expand_debug_expr (tem);
2923 if (!op0)
2924 return NULL;
2926 if (offset)
2928 enum machine_mode addrmode, offmode;
2930 if (!MEM_P (op0))
2931 return NULL;
2933 op0 = XEXP (op0, 0);
2934 addrmode = GET_MODE (op0);
2935 if (addrmode == VOIDmode)
2936 addrmode = Pmode;
2938 op1 = expand_debug_expr (offset);
2939 if (!op1)
2940 return NULL;
2942 offmode = GET_MODE (op1);
2943 if (offmode == VOIDmode)
2944 offmode = TYPE_MODE (TREE_TYPE (offset));
2946 if (addrmode != offmode)
2947 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2948 subreg_lowpart_offset (addrmode,
2949 offmode));
2951 /* Don't use offset_address here, we don't need a
2952 recognizable address, and we don't want to generate
2953 code. */
2954 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2955 op0, op1));
2958 if (MEM_P (op0))
2960 if (mode1 == VOIDmode)
2961 /* Bitfield. */
2962 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2963 if (bitpos >= BITS_PER_UNIT)
2965 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2966 bitpos %= BITS_PER_UNIT;
2968 else if (bitpos < 0)
2970 HOST_WIDE_INT units
2971 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2972 op0 = adjust_address_nv (op0, mode1, units);
2973 bitpos += units * BITS_PER_UNIT;
2975 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2976 op0 = adjust_address_nv (op0, mode, 0);
2977 else if (GET_MODE (op0) != mode1)
2978 op0 = adjust_address_nv (op0, mode1, 0);
2979 else
2980 op0 = copy_rtx (op0);
2981 if (op0 == orig_op0)
2982 op0 = shallow_copy_rtx (op0);
2983 set_mem_attributes (op0, exp, 0);
2986 if (bitpos == 0 && mode == GET_MODE (op0))
2987 return op0;
2989 if (bitpos < 0)
2990 return NULL;
2992 if (GET_MODE (op0) == BLKmode)
2993 return NULL;
2995 if ((bitpos % BITS_PER_UNIT) == 0
2996 && bitsize == GET_MODE_BITSIZE (mode1))
2998 enum machine_mode opmode = GET_MODE (op0);
3000 if (opmode == VOIDmode)
3001 opmode = TYPE_MODE (TREE_TYPE (tem));
3003 /* This condition may hold if we're expanding the address
3004 right past the end of an array that turned out not to
3005 be addressable (i.e., the address was only computed in
3006 debug stmts). The gen_subreg below would rightfully
3007 crash, and the address doesn't really exist, so just
3008 drop it. */
3009 if (bitpos >= GET_MODE_BITSIZE (opmode))
3010 return NULL;
3012 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3013 return simplify_gen_subreg (mode, op0, opmode,
3014 bitpos / BITS_PER_UNIT);
3017 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3018 && TYPE_UNSIGNED (TREE_TYPE (exp))
3019 ? SIGN_EXTRACT
3020 : ZERO_EXTRACT, mode,
3021 GET_MODE (op0) != VOIDmode
3022 ? GET_MODE (op0)
3023 : TYPE_MODE (TREE_TYPE (tem)),
3024 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3027 case ABS_EXPR:
3028 return simplify_gen_unary (ABS, mode, op0, mode);
3030 case NEGATE_EXPR:
3031 return simplify_gen_unary (NEG, mode, op0, mode);
3033 case BIT_NOT_EXPR:
3034 return simplify_gen_unary (NOT, mode, op0, mode);
3036 case FLOAT_EXPR:
3037 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3038 0)))
3039 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3040 inner_mode);
3042 case FIX_TRUNC_EXPR:
3043 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3044 inner_mode);
3046 case POINTER_PLUS_EXPR:
3047 /* For the rare target where pointers are not the same size as
3048 size_t, we need to check for mis-matched modes and correct
3049 the addend. */
3050 if (op0 && op1
3051 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3052 && GET_MODE (op0) != GET_MODE (op1))
3054 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3055 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3056 GET_MODE (op1));
3057 else
3058 /* We always sign-extend, regardless of the signedness of
3059 the operand, because the operand is always unsigned
3060 here even if the original C expression is signed. */
3061 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3062 GET_MODE (op1));
3064 /* Fall through. */
3065 case PLUS_EXPR:
3066 return simplify_gen_binary (PLUS, mode, op0, op1);
3068 case MINUS_EXPR:
3069 return simplify_gen_binary (MINUS, mode, op0, op1);
3071 case MULT_EXPR:
3072 return simplify_gen_binary (MULT, mode, op0, op1);
3074 case RDIV_EXPR:
3075 case TRUNC_DIV_EXPR:
3076 case EXACT_DIV_EXPR:
3077 if (unsignedp)
3078 return simplify_gen_binary (UDIV, mode, op0, op1);
3079 else
3080 return simplify_gen_binary (DIV, mode, op0, op1);
3082 case TRUNC_MOD_EXPR:
3083 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3085 case FLOOR_DIV_EXPR:
3086 if (unsignedp)
3087 return simplify_gen_binary (UDIV, mode, op0, op1);
3088 else
3090 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3091 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3092 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3093 return simplify_gen_binary (PLUS, mode, div, adj);
3096 case FLOOR_MOD_EXPR:
3097 if (unsignedp)
3098 return simplify_gen_binary (UMOD, mode, op0, op1);
3099 else
3101 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3102 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3103 adj = simplify_gen_unary (NEG, mode,
3104 simplify_gen_binary (MULT, mode, adj, op1),
3105 mode);
3106 return simplify_gen_binary (PLUS, mode, mod, adj);
3109 case CEIL_DIV_EXPR:
3110 if (unsignedp)
3112 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3113 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3114 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3115 return simplify_gen_binary (PLUS, mode, div, adj);
3117 else
3119 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3120 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3121 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3122 return simplify_gen_binary (PLUS, mode, div, adj);
3125 case CEIL_MOD_EXPR:
3126 if (unsignedp)
3128 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3129 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3130 adj = simplify_gen_unary (NEG, mode,
3131 simplify_gen_binary (MULT, mode, adj, op1),
3132 mode);
3133 return simplify_gen_binary (PLUS, mode, mod, adj);
3135 else
3137 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3138 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3139 adj = simplify_gen_unary (NEG, mode,
3140 simplify_gen_binary (MULT, mode, adj, op1),
3141 mode);
3142 return simplify_gen_binary (PLUS, mode, mod, adj);
3145 case ROUND_DIV_EXPR:
3146 if (unsignedp)
3148 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3149 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3150 rtx adj = round_udiv_adjust (mode, mod, op1);
3151 return simplify_gen_binary (PLUS, mode, div, adj);
3153 else
3155 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3156 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3157 rtx adj = round_sdiv_adjust (mode, mod, op1);
3158 return simplify_gen_binary (PLUS, mode, div, adj);
3161 case ROUND_MOD_EXPR:
3162 if (unsignedp)
3164 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3165 rtx adj = round_udiv_adjust (mode, mod, op1);
3166 adj = simplify_gen_unary (NEG, mode,
3167 simplify_gen_binary (MULT, mode, adj, op1),
3168 mode);
3169 return simplify_gen_binary (PLUS, mode, mod, adj);
3171 else
3173 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3174 rtx adj = round_sdiv_adjust (mode, mod, op1);
3175 adj = simplify_gen_unary (NEG, mode,
3176 simplify_gen_binary (MULT, mode, adj, op1),
3177 mode);
3178 return simplify_gen_binary (PLUS, mode, mod, adj);
3181 case LSHIFT_EXPR:
3182 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3184 case RSHIFT_EXPR:
3185 if (unsignedp)
3186 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3187 else
3188 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3190 case LROTATE_EXPR:
3191 return simplify_gen_binary (ROTATE, mode, op0, op1);
3193 case RROTATE_EXPR:
3194 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3196 case MIN_EXPR:
3197 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3199 case MAX_EXPR:
3200 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3202 case BIT_AND_EXPR:
3203 case TRUTH_AND_EXPR:
3204 return simplify_gen_binary (AND, mode, op0, op1);
3206 case BIT_IOR_EXPR:
3207 case TRUTH_OR_EXPR:
3208 return simplify_gen_binary (IOR, mode, op0, op1);
3210 case BIT_XOR_EXPR:
3211 case TRUTH_XOR_EXPR:
3212 return simplify_gen_binary (XOR, mode, op0, op1);
3214 case TRUTH_ANDIF_EXPR:
3215 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3217 case TRUTH_ORIF_EXPR:
3218 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3220 case TRUTH_NOT_EXPR:
3221 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3223 case LT_EXPR:
3224 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3225 op0, op1);
3227 case LE_EXPR:
3228 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3229 op0, op1);
3231 case GT_EXPR:
3232 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3233 op0, op1);
3235 case GE_EXPR:
3236 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3237 op0, op1);
3239 case EQ_EXPR:
3240 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3242 case NE_EXPR:
3243 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3245 case UNORDERED_EXPR:
3246 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3248 case ORDERED_EXPR:
3249 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3251 case UNLT_EXPR:
3252 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3254 case UNLE_EXPR:
3255 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3257 case UNGT_EXPR:
3258 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3260 case UNGE_EXPR:
3261 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3263 case UNEQ_EXPR:
3264 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3266 case LTGT_EXPR:
3267 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3269 case COND_EXPR:
3270 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3272 case COMPLEX_EXPR:
3273 gcc_assert (COMPLEX_MODE_P (mode));
3274 if (GET_MODE (op0) == VOIDmode)
3275 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3276 if (GET_MODE (op1) == VOIDmode)
3277 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3278 return gen_rtx_CONCAT (mode, op0, op1);
3280 case CONJ_EXPR:
3281 if (GET_CODE (op0) == CONCAT)
3282 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3283 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3284 XEXP (op0, 1),
3285 GET_MODE_INNER (mode)));
3286 else
3288 enum machine_mode imode = GET_MODE_INNER (mode);
3289 rtx re, im;
3291 if (MEM_P (op0))
3293 re = adjust_address_nv (op0, imode, 0);
3294 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3296 else
3298 enum machine_mode ifmode = int_mode_for_mode (mode);
3299 enum machine_mode ihmode = int_mode_for_mode (imode);
3300 rtx halfsize;
3301 if (ifmode == BLKmode || ihmode == BLKmode)
3302 return NULL;
3303 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3304 re = op0;
3305 if (mode != ifmode)
3306 re = gen_rtx_SUBREG (ifmode, re, 0);
3307 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3308 if (imode != ihmode)
3309 re = gen_rtx_SUBREG (imode, re, 0);
3310 im = copy_rtx (op0);
3311 if (mode != ifmode)
3312 im = gen_rtx_SUBREG (ifmode, im, 0);
3313 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3314 if (imode != ihmode)
3315 im = gen_rtx_SUBREG (imode, im, 0);
3317 im = gen_rtx_NEG (imode, im);
3318 return gen_rtx_CONCAT (mode, re, im);
3321 case ADDR_EXPR:
3322 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3323 if (!op0 || !MEM_P (op0))
3325 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3326 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3327 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3328 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3329 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3331 if (handled_component_p (TREE_OPERAND (exp, 0)))
3333 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3334 tree decl
3335 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3336 &bitoffset, &bitsize, &maxsize);
3337 if ((TREE_CODE (decl) == VAR_DECL
3338 || TREE_CODE (decl) == PARM_DECL
3339 || TREE_CODE (decl) == RESULT_DECL)
3340 && !TREE_ADDRESSABLE (decl)
3341 && (bitoffset % BITS_PER_UNIT) == 0
3342 && bitsize > 0
3343 && bitsize == maxsize)
3344 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3345 bitoffset / BITS_PER_UNIT);
3348 return NULL;
3351 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3352 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3354 return op0;
3356 case VECTOR_CST:
3357 exp = build_constructor_from_list (TREE_TYPE (exp),
3358 TREE_VECTOR_CST_ELTS (exp));
3359 /* Fall through. */
3361 case CONSTRUCTOR:
3362 if (TREE_CLOBBER_P (exp))
3363 return NULL;
3364 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3366 unsigned i;
3367 tree val;
3369 op0 = gen_rtx_CONCATN
3370 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3372 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3374 op1 = expand_debug_expr (val);
3375 if (!op1)
3376 return NULL;
3377 XVECEXP (op0, 0, i) = op1;
3380 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3382 op1 = expand_debug_expr
3383 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3385 if (!op1)
3386 return NULL;
3388 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3389 XVECEXP (op0, 0, i) = op1;
3392 return op0;
3394 else
3395 goto flag_unsupported;
3397 case CALL_EXPR:
3398 /* ??? Maybe handle some builtins? */
3399 return NULL;
3401 case SSA_NAME:
3403 gimple g = get_gimple_for_ssa_name (exp);
3404 if (g)
3406 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3407 if (!op0)
3408 return NULL;
3410 else
3412 int part = var_to_partition (SA.map, exp);
3414 if (part == NO_PARTITION)
3416 /* If this is a reference to an incoming value of parameter
3417 that is never used in the code or where the incoming
3418 value is never used in the code, use PARM_DECL's
3419 DECL_RTL if set. */
3420 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3421 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3423 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3424 if (op0)
3425 goto adjust_mode;
3426 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3427 if (op0)
3428 goto adjust_mode;
3430 return NULL;
3433 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3435 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3437 goto adjust_mode;
3440 case ERROR_MARK:
3441 return NULL;
3443 /* Vector stuff. For most of the codes we don't have rtl codes. */
3444 case REALIGN_LOAD_EXPR:
3445 case REDUC_MAX_EXPR:
3446 case REDUC_MIN_EXPR:
3447 case REDUC_PLUS_EXPR:
3448 case VEC_COND_EXPR:
3449 case VEC_EXTRACT_EVEN_EXPR:
3450 case VEC_EXTRACT_ODD_EXPR:
3451 case VEC_INTERLEAVE_HIGH_EXPR:
3452 case VEC_INTERLEAVE_LOW_EXPR:
3453 case VEC_LSHIFT_EXPR:
3454 case VEC_PACK_FIX_TRUNC_EXPR:
3455 case VEC_PACK_SAT_EXPR:
3456 case VEC_PACK_TRUNC_EXPR:
3457 case VEC_RSHIFT_EXPR:
3458 case VEC_UNPACK_FLOAT_HI_EXPR:
3459 case VEC_UNPACK_FLOAT_LO_EXPR:
3460 case VEC_UNPACK_HI_EXPR:
3461 case VEC_UNPACK_LO_EXPR:
3462 case VEC_WIDEN_MULT_HI_EXPR:
3463 case VEC_WIDEN_MULT_LO_EXPR:
3464 case VEC_WIDEN_LSHIFT_HI_EXPR:
3465 case VEC_WIDEN_LSHIFT_LO_EXPR:
3466 case VEC_PERM_EXPR:
3467 return NULL;
3469 /* Misc codes. */
3470 case ADDR_SPACE_CONVERT_EXPR:
3471 case FIXED_CONVERT_EXPR:
3472 case OBJ_TYPE_REF:
3473 case WITH_SIZE_EXPR:
3474 return NULL;
3476 case DOT_PROD_EXPR:
3477 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3478 && SCALAR_INT_MODE_P (mode))
3481 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3482 0)))
3483 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3484 inner_mode);
3486 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3487 1)))
3488 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3489 inner_mode);
3490 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3491 return simplify_gen_binary (PLUS, mode, op0, op2);
3493 return NULL;
3495 case WIDEN_MULT_EXPR:
3496 case WIDEN_MULT_PLUS_EXPR:
3497 case WIDEN_MULT_MINUS_EXPR:
3498 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3499 && SCALAR_INT_MODE_P (mode))
3501 inner_mode = GET_MODE (op0);
3502 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3503 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3504 else
3505 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3506 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3507 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3508 else
3509 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3510 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3511 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3512 return op0;
3513 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3514 return simplify_gen_binary (PLUS, mode, op0, op2);
3515 else
3516 return simplify_gen_binary (MINUS, mode, op2, op0);
3518 return NULL;
3520 case WIDEN_SUM_EXPR:
3521 case WIDEN_LSHIFT_EXPR:
3522 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3523 && SCALAR_INT_MODE_P (mode))
3526 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3527 0)))
3528 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3529 inner_mode);
3530 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3531 ? ASHIFT : PLUS, mode, op0, op1);
3533 return NULL;
3535 case FMA_EXPR:
3536 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3538 default:
3539 flag_unsupported:
3540 #ifdef ENABLE_CHECKING
3541 debug_tree (exp);
3542 gcc_unreachable ();
3543 #else
3544 return NULL;
3545 #endif
3549 /* Return an RTX equivalent to the source bind value of the tree expression
3550 EXP. */
3552 static rtx
3553 expand_debug_source_expr (tree exp)
3555 rtx op0 = NULL_RTX;
3556 enum machine_mode mode = VOIDmode, inner_mode;
3558 switch (TREE_CODE (exp))
3560 case PARM_DECL:
3562 mode = DECL_MODE (exp);
3563 op0 = expand_debug_parm_decl (exp);
3564 if (op0)
3565 break;
3566 /* See if this isn't an argument that has been completely
3567 optimized out. */
3568 if (!DECL_RTL_SET_P (exp)
3569 && !DECL_INCOMING_RTL (exp)
3570 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3572 tree aexp = exp;
3573 if (DECL_ABSTRACT_ORIGIN (exp))
3574 aexp = DECL_ABSTRACT_ORIGIN (exp);
3575 if (DECL_CONTEXT (aexp)
3576 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3578 VEC(tree, gc) **debug_args;
3579 unsigned int ix;
3580 tree ddecl;
3581 #ifdef ENABLE_CHECKING
3582 tree parm;
3583 for (parm = DECL_ARGUMENTS (current_function_decl);
3584 parm; parm = DECL_CHAIN (parm))
3585 gcc_assert (parm != exp
3586 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3587 #endif
3588 debug_args = decl_debug_args_lookup (current_function_decl);
3589 if (debug_args != NULL)
3591 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3592 ix += 2)
3593 if (ddecl == aexp)
3594 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3598 break;
3600 default:
3601 break;
3604 if (op0 == NULL_RTX)
3605 return NULL_RTX;
3607 inner_mode = GET_MODE (op0);
3608 if (mode == inner_mode)
3609 return op0;
3611 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3613 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3614 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3615 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3616 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3617 else
3618 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3620 else if (FLOAT_MODE_P (mode))
3621 gcc_unreachable ();
3622 else if (FLOAT_MODE_P (inner_mode))
3624 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3625 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3626 else
3627 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3629 else if (CONSTANT_P (op0)
3630 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3631 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3632 subreg_lowpart_offset (mode, inner_mode));
3633 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3634 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3635 else
3636 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3638 return op0;
3641 /* Expand the _LOCs in debug insns. We run this after expanding all
3642 regular insns, so that any variables referenced in the function
3643 will have their DECL_RTLs set. */
3645 static void
3646 expand_debug_locations (void)
3648 rtx insn;
3649 rtx last = get_last_insn ();
3650 int save_strict_alias = flag_strict_aliasing;
3652 /* New alias sets while setting up memory attributes cause
3653 -fcompare-debug failures, even though it doesn't bring about any
3654 codegen changes. */
3655 flag_strict_aliasing = 0;
3657 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3658 if (DEBUG_INSN_P (insn))
3660 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3661 rtx val;
3662 enum machine_mode mode;
3664 if (value == NULL_TREE)
3665 val = NULL_RTX;
3666 else
3668 if (INSN_VAR_LOCATION_STATUS (insn)
3669 == VAR_INIT_STATUS_UNINITIALIZED)
3670 val = expand_debug_source_expr (value);
3671 else
3672 val = expand_debug_expr (value);
3673 gcc_assert (last == get_last_insn ());
3676 if (!val)
3677 val = gen_rtx_UNKNOWN_VAR_LOC ();
3678 else
3680 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3682 gcc_assert (mode == GET_MODE (val)
3683 || (GET_MODE (val) == VOIDmode
3684 && (CONST_INT_P (val)
3685 || GET_CODE (val) == CONST_FIXED
3686 || GET_CODE (val) == CONST_DOUBLE
3687 || GET_CODE (val) == LABEL_REF)));
3690 INSN_VAR_LOCATION_LOC (insn) = val;
3693 flag_strict_aliasing = save_strict_alias;
3696 /* Expand basic block BB from GIMPLE trees to RTL. */
3698 static basic_block
3699 expand_gimple_basic_block (basic_block bb)
3701 gimple_stmt_iterator gsi;
3702 gimple_seq stmts;
3703 gimple stmt = NULL;
3704 rtx note, last;
3705 edge e;
3706 edge_iterator ei;
3707 void **elt;
3709 if (dump_file)
3710 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3711 bb->index);
3713 /* Note that since we are now transitioning from GIMPLE to RTL, we
3714 cannot use the gsi_*_bb() routines because they expect the basic
3715 block to be in GIMPLE, instead of RTL. Therefore, we need to
3716 access the BB sequence directly. */
3717 stmts = bb_seq (bb);
3718 bb->il.gimple = NULL;
3719 rtl_profile_for_bb (bb);
3720 init_rtl_bb_info (bb);
3721 bb->flags |= BB_RTL;
3723 /* Remove the RETURN_EXPR if we may fall though to the exit
3724 instead. */
3725 gsi = gsi_last (stmts);
3726 if (!gsi_end_p (gsi)
3727 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3729 gimple ret_stmt = gsi_stmt (gsi);
3731 gcc_assert (single_succ_p (bb));
3732 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3734 if (bb->next_bb == EXIT_BLOCK_PTR
3735 && !gimple_return_retval (ret_stmt))
3737 gsi_remove (&gsi, false);
3738 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3742 gsi = gsi_start (stmts);
3743 if (!gsi_end_p (gsi))
3745 stmt = gsi_stmt (gsi);
3746 if (gimple_code (stmt) != GIMPLE_LABEL)
3747 stmt = NULL;
3750 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3752 if (stmt || elt)
3754 last = get_last_insn ();
3756 if (stmt)
3758 expand_gimple_stmt (stmt);
3759 gsi_next (&gsi);
3762 if (elt)
3763 emit_label ((rtx) *elt);
3765 /* Java emits line number notes in the top of labels.
3766 ??? Make this go away once line number notes are obsoleted. */
3767 BB_HEAD (bb) = NEXT_INSN (last);
3768 if (NOTE_P (BB_HEAD (bb)))
3769 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3770 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3772 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3774 else
3775 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3777 NOTE_BASIC_BLOCK (note) = bb;
3779 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3781 basic_block new_bb;
3783 stmt = gsi_stmt (gsi);
3785 /* If this statement is a non-debug one, and we generate debug
3786 insns, then this one might be the last real use of a TERed
3787 SSA_NAME, but where there are still some debug uses further
3788 down. Expanding the current SSA name in such further debug
3789 uses by their RHS might lead to wrong debug info, as coalescing
3790 might make the operands of such RHS be placed into the same
3791 pseudo as something else. Like so:
3792 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3793 use(a_1);
3794 a_2 = ...
3795 #DEBUG ... => a_1
3796 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3797 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3798 the write to a_2 would actually have clobbered the place which
3799 formerly held a_0.
3801 So, instead of that, we recognize the situation, and generate
3802 debug temporaries at the last real use of TERed SSA names:
3803 a_1 = a_0 + 1;
3804 #DEBUG #D1 => a_1
3805 use(a_1);
3806 a_2 = ...
3807 #DEBUG ... => #D1
3809 if (MAY_HAVE_DEBUG_INSNS
3810 && SA.values
3811 && !is_gimple_debug (stmt))
3813 ssa_op_iter iter;
3814 tree op;
3815 gimple def;
3817 location_t sloc = get_curr_insn_source_location ();
3818 tree sblock = get_curr_insn_block ();
3820 /* Look for SSA names that have their last use here (TERed
3821 names always have only one real use). */
3822 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3823 if ((def = get_gimple_for_ssa_name (op)))
3825 imm_use_iterator imm_iter;
3826 use_operand_p use_p;
3827 bool have_debug_uses = false;
3829 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3831 if (gimple_debug_bind_p (USE_STMT (use_p)))
3833 have_debug_uses = true;
3834 break;
3838 if (have_debug_uses)
3840 /* OP is a TERed SSA name, with DEF it's defining
3841 statement, and where OP is used in further debug
3842 instructions. Generate a debug temporary, and
3843 replace all uses of OP in debug insns with that
3844 temporary. */
3845 gimple debugstmt;
3846 tree value = gimple_assign_rhs_to_tree (def);
3847 tree vexpr = make_node (DEBUG_EXPR_DECL);
3848 rtx val;
3849 enum machine_mode mode;
3851 set_curr_insn_source_location (gimple_location (def));
3852 set_curr_insn_block (gimple_block (def));
3854 DECL_ARTIFICIAL (vexpr) = 1;
3855 TREE_TYPE (vexpr) = TREE_TYPE (value);
3856 if (DECL_P (value))
3857 mode = DECL_MODE (value);
3858 else
3859 mode = TYPE_MODE (TREE_TYPE (value));
3860 DECL_MODE (vexpr) = mode;
3862 val = gen_rtx_VAR_LOCATION
3863 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3865 emit_debug_insn (val);
3867 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3869 if (!gimple_debug_bind_p (debugstmt))
3870 continue;
3872 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3873 SET_USE (use_p, vexpr);
3875 update_stmt (debugstmt);
3879 set_curr_insn_source_location (sloc);
3880 set_curr_insn_block (sblock);
3883 currently_expanding_gimple_stmt = stmt;
3885 /* Expand this statement, then evaluate the resulting RTL and
3886 fixup the CFG accordingly. */
3887 if (gimple_code (stmt) == GIMPLE_COND)
3889 new_bb = expand_gimple_cond (bb, stmt);
3890 if (new_bb)
3891 return new_bb;
3893 else if (gimple_debug_bind_p (stmt))
3895 location_t sloc = get_curr_insn_source_location ();
3896 tree sblock = get_curr_insn_block ();
3897 gimple_stmt_iterator nsi = gsi;
3899 for (;;)
3901 tree var = gimple_debug_bind_get_var (stmt);
3902 tree value;
3903 rtx val;
3904 enum machine_mode mode;
3906 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3907 && TREE_CODE (var) != LABEL_DECL
3908 && !target_for_debug_bind (var))
3909 goto delink_debug_stmt;
3911 if (gimple_debug_bind_has_value_p (stmt))
3912 value = gimple_debug_bind_get_value (stmt);
3913 else
3914 value = NULL_TREE;
3916 last = get_last_insn ();
3918 set_curr_insn_source_location (gimple_location (stmt));
3919 set_curr_insn_block (gimple_block (stmt));
3921 if (DECL_P (var))
3922 mode = DECL_MODE (var);
3923 else
3924 mode = TYPE_MODE (TREE_TYPE (var));
3926 val = gen_rtx_VAR_LOCATION
3927 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3929 emit_debug_insn (val);
3931 if (dump_file && (dump_flags & TDF_DETAILS))
3933 /* We can't dump the insn with a TREE where an RTX
3934 is expected. */
3935 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3936 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3937 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3940 delink_debug_stmt:
3941 /* In order not to generate too many debug temporaries,
3942 we delink all uses of debug statements we already expanded.
3943 Therefore debug statements between definition and real
3944 use of TERed SSA names will continue to use the SSA name,
3945 and not be replaced with debug temps. */
3946 delink_stmt_imm_use (stmt);
3948 gsi = nsi;
3949 gsi_next (&nsi);
3950 if (gsi_end_p (nsi))
3951 break;
3952 stmt = gsi_stmt (nsi);
3953 if (!gimple_debug_bind_p (stmt))
3954 break;
3957 set_curr_insn_source_location (sloc);
3958 set_curr_insn_block (sblock);
3960 else if (gimple_debug_source_bind_p (stmt))
3962 location_t sloc = get_curr_insn_source_location ();
3963 tree sblock = get_curr_insn_block ();
3964 tree var = gimple_debug_source_bind_get_var (stmt);
3965 tree value = gimple_debug_source_bind_get_value (stmt);
3966 rtx val;
3967 enum machine_mode mode;
3969 last = get_last_insn ();
3971 set_curr_insn_source_location (gimple_location (stmt));
3972 set_curr_insn_block (gimple_block (stmt));
3974 mode = DECL_MODE (var);
3976 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3977 VAR_INIT_STATUS_UNINITIALIZED);
3979 emit_debug_insn (val);
3981 if (dump_file && (dump_flags & TDF_DETAILS))
3983 /* We can't dump the insn with a TREE where an RTX
3984 is expected. */
3985 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3986 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3987 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3990 set_curr_insn_source_location (sloc);
3991 set_curr_insn_block (sblock);
3993 else
3995 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3997 bool can_fallthru;
3998 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3999 if (new_bb)
4001 if (can_fallthru)
4002 bb = new_bb;
4003 else
4004 return new_bb;
4007 else
4009 def_operand_p def_p;
4010 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4012 if (def_p != NULL)
4014 /* Ignore this stmt if it is in the list of
4015 replaceable expressions. */
4016 if (SA.values
4017 && bitmap_bit_p (SA.values,
4018 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4019 continue;
4021 last = expand_gimple_stmt (stmt);
4022 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4027 currently_expanding_gimple_stmt = NULL;
4029 /* Expand implicit goto and convert goto_locus. */
4030 FOR_EACH_EDGE (e, ei, bb->succs)
4032 if (e->goto_locus && e->goto_block)
4034 set_curr_insn_source_location (e->goto_locus);
4035 set_curr_insn_block (e->goto_block);
4036 e->goto_locus = curr_insn_locator ();
4038 e->goto_block = NULL;
4039 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4041 emit_jump (label_rtx_for_bb (e->dest));
4042 e->flags &= ~EDGE_FALLTHRU;
4046 /* Expanded RTL can create a jump in the last instruction of block.
4047 This later might be assumed to be a jump to successor and break edge insertion.
4048 We need to insert dummy move to prevent this. PR41440. */
4049 if (single_succ_p (bb)
4050 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4051 && (last = get_last_insn ())
4052 && JUMP_P (last))
4054 rtx dummy = gen_reg_rtx (SImode);
4055 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4058 do_pending_stack_adjust ();
4060 /* Find the block tail. The last insn in the block is the insn
4061 before a barrier and/or table jump insn. */
4062 last = get_last_insn ();
4063 if (BARRIER_P (last))
4064 last = PREV_INSN (last);
4065 if (JUMP_TABLE_DATA_P (last))
4066 last = PREV_INSN (PREV_INSN (last));
4067 BB_END (bb) = last;
4069 update_bb_for_insn (bb);
4071 return bb;
4075 /* Create a basic block for initialization code. */
4077 static basic_block
4078 construct_init_block (void)
4080 basic_block init_block, first_block;
4081 edge e = NULL;
4082 int flags;
4084 /* Multiple entry points not supported yet. */
4085 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4086 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4087 init_rtl_bb_info (EXIT_BLOCK_PTR);
4088 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4089 EXIT_BLOCK_PTR->flags |= BB_RTL;
4091 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4093 /* When entry edge points to first basic block, we don't need jump,
4094 otherwise we have to jump into proper target. */
4095 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4097 tree label = gimple_block_label (e->dest);
4099 emit_jump (label_rtx (label));
4100 flags = 0;
4102 else
4103 flags = EDGE_FALLTHRU;
4105 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4106 get_last_insn (),
4107 ENTRY_BLOCK_PTR);
4108 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4109 init_block->count = ENTRY_BLOCK_PTR->count;
4110 if (e)
4112 first_block = e->dest;
4113 redirect_edge_succ (e, init_block);
4114 e = make_edge (init_block, first_block, flags);
4116 else
4117 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4118 e->probability = REG_BR_PROB_BASE;
4119 e->count = ENTRY_BLOCK_PTR->count;
4121 update_bb_for_insn (init_block);
4122 return init_block;
4125 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4126 found in the block tree. */
4128 static void
4129 set_block_levels (tree block, int level)
4131 while (block)
4133 BLOCK_NUMBER (block) = level;
4134 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4135 block = BLOCK_CHAIN (block);
4139 /* Create a block containing landing pads and similar stuff. */
4141 static void
4142 construct_exit_block (void)
4144 rtx head = get_last_insn ();
4145 rtx end;
4146 basic_block exit_block;
4147 edge e, e2;
4148 unsigned ix;
4149 edge_iterator ei;
4150 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4152 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4154 /* Make sure the locus is set to the end of the function, so that
4155 epilogue line numbers and warnings are set properly. */
4156 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4157 input_location = cfun->function_end_locus;
4159 /* The following insns belong to the top scope. */
4160 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4162 /* Generate rtl for function exit. */
4163 expand_function_end ();
4165 end = get_last_insn ();
4166 if (head == end)
4167 return;
4168 /* While emitting the function end we could move end of the last basic block.
4170 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4171 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4172 head = NEXT_INSN (head);
4173 exit_block = create_basic_block (NEXT_INSN (head), end,
4174 EXIT_BLOCK_PTR->prev_bb);
4175 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4176 exit_block->count = EXIT_BLOCK_PTR->count;
4178 ix = 0;
4179 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4181 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4182 if (!(e->flags & EDGE_ABNORMAL))
4183 redirect_edge_succ (e, exit_block);
4184 else
4185 ix++;
4188 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4189 e->probability = REG_BR_PROB_BASE;
4190 e->count = EXIT_BLOCK_PTR->count;
4191 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4192 if (e2 != e)
4194 e->count -= e2->count;
4195 exit_block->count -= e2->count;
4196 exit_block->frequency -= EDGE_FREQUENCY (e2);
4198 if (e->count < 0)
4199 e->count = 0;
4200 if (exit_block->count < 0)
4201 exit_block->count = 0;
4202 if (exit_block->frequency < 0)
4203 exit_block->frequency = 0;
4204 update_bb_for_insn (exit_block);
4207 /* Helper function for discover_nonconstant_array_refs.
4208 Look for ARRAY_REF nodes with non-constant indexes and mark them
4209 addressable. */
4211 static tree
4212 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4213 void *data ATTRIBUTE_UNUSED)
4215 tree t = *tp;
4217 if (IS_TYPE_OR_DECL_P (t))
4218 *walk_subtrees = 0;
4219 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4221 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4222 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4223 && (!TREE_OPERAND (t, 2)
4224 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4225 || (TREE_CODE (t) == COMPONENT_REF
4226 && (!TREE_OPERAND (t,2)
4227 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4228 || TREE_CODE (t) == BIT_FIELD_REF
4229 || TREE_CODE (t) == REALPART_EXPR
4230 || TREE_CODE (t) == IMAGPART_EXPR
4231 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4232 || CONVERT_EXPR_P (t))
4233 t = TREE_OPERAND (t, 0);
4235 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4237 t = get_base_address (t);
4238 if (t && DECL_P (t)
4239 && DECL_MODE (t) != BLKmode)
4240 TREE_ADDRESSABLE (t) = 1;
4243 *walk_subtrees = 0;
4246 return NULL_TREE;
4249 /* RTL expansion is not able to compile array references with variable
4250 offsets for arrays stored in single register. Discover such
4251 expressions and mark variables as addressable to avoid this
4252 scenario. */
4254 static void
4255 discover_nonconstant_array_refs (void)
4257 basic_block bb;
4258 gimple_stmt_iterator gsi;
4260 FOR_EACH_BB (bb)
4261 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4263 gimple stmt = gsi_stmt (gsi);
4264 if (!is_gimple_debug (stmt))
4265 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4269 /* This function sets crtl->args.internal_arg_pointer to a virtual
4270 register if DRAP is needed. Local register allocator will replace
4271 virtual_incoming_args_rtx with the virtual register. */
4273 static void
4274 expand_stack_alignment (void)
4276 rtx drap_rtx;
4277 unsigned int preferred_stack_boundary;
4279 if (! SUPPORTS_STACK_ALIGNMENT)
4280 return;
4282 if (cfun->calls_alloca
4283 || cfun->has_nonlocal_label
4284 || crtl->has_nonlocal_goto)
4285 crtl->need_drap = true;
4287 /* Call update_stack_boundary here again to update incoming stack
4288 boundary. It may set incoming stack alignment to a different
4289 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4290 use the minimum incoming stack alignment to check if it is OK
4291 to perform sibcall optimization since sibcall optimization will
4292 only align the outgoing stack to incoming stack boundary. */
4293 if (targetm.calls.update_stack_boundary)
4294 targetm.calls.update_stack_boundary ();
4296 /* The incoming stack frame has to be aligned at least at
4297 parm_stack_boundary. */
4298 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4300 /* Update crtl->stack_alignment_estimated and use it later to align
4301 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4302 exceptions since callgraph doesn't collect incoming stack alignment
4303 in this case. */
4304 if (cfun->can_throw_non_call_exceptions
4305 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4306 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4307 else
4308 preferred_stack_boundary = crtl->preferred_stack_boundary;
4309 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4310 crtl->stack_alignment_estimated = preferred_stack_boundary;
4311 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4312 crtl->stack_alignment_needed = preferred_stack_boundary;
4314 gcc_assert (crtl->stack_alignment_needed
4315 <= crtl->stack_alignment_estimated);
4317 crtl->stack_realign_needed
4318 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4319 crtl->stack_realign_tried = crtl->stack_realign_needed;
4321 crtl->stack_realign_processed = true;
4323 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4324 alignment. */
4325 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4326 drap_rtx = targetm.calls.get_drap_rtx ();
4328 /* stack_realign_drap and drap_rtx must match. */
4329 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4331 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4332 if (NULL != drap_rtx)
4334 crtl->args.internal_arg_pointer = drap_rtx;
4336 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4337 needed. */
4338 fixup_tail_calls ();
4342 /* Translate the intermediate representation contained in the CFG
4343 from GIMPLE trees to RTL.
4345 We do conversion per basic block and preserve/update the tree CFG.
4346 This implies we have to do some magic as the CFG can simultaneously
4347 consist of basic blocks containing RTL and GIMPLE trees. This can
4348 confuse the CFG hooks, so be careful to not manipulate CFG during
4349 the expansion. */
4351 static unsigned int
4352 gimple_expand_cfg (void)
4354 basic_block bb, init_block;
4355 sbitmap blocks;
4356 edge_iterator ei;
4357 edge e;
4358 rtx var_seq;
4359 unsigned i;
4361 timevar_push (TV_OUT_OF_SSA);
4362 rewrite_out_of_ssa (&SA);
4363 timevar_pop (TV_OUT_OF_SSA);
4364 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4365 sizeof (rtx));
4367 /* Some backends want to know that we are expanding to RTL. */
4368 currently_expanding_to_rtl = 1;
4370 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4372 insn_locators_alloc ();
4373 if (!DECL_IS_BUILTIN (current_function_decl))
4375 /* Eventually, all FEs should explicitly set function_start_locus. */
4376 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4377 set_curr_insn_source_location
4378 (DECL_SOURCE_LOCATION (current_function_decl));
4379 else
4380 set_curr_insn_source_location (cfun->function_start_locus);
4382 else
4383 set_curr_insn_source_location (UNKNOWN_LOCATION);
4384 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4385 prologue_locator = curr_insn_locator ();
4387 #ifdef INSN_SCHEDULING
4388 init_sched_attrs ();
4389 #endif
4391 /* Make sure first insn is a note even if we don't want linenums.
4392 This makes sure the first insn will never be deleted.
4393 Also, final expects a note to appear there. */
4394 emit_note (NOTE_INSN_DELETED);
4396 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4397 discover_nonconstant_array_refs ();
4399 targetm.expand_to_rtl_hook ();
4400 crtl->stack_alignment_needed = STACK_BOUNDARY;
4401 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4402 crtl->stack_alignment_estimated = 0;
4403 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4404 cfun->cfg->max_jumptable_ents = 0;
4406 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4407 of the function section at exapnsion time to predict distance of calls. */
4408 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4410 /* Expand the variables recorded during gimple lowering. */
4411 timevar_push (TV_VAR_EXPAND);
4412 start_sequence ();
4414 expand_used_vars ();
4416 var_seq = get_insns ();
4417 end_sequence ();
4418 timevar_pop (TV_VAR_EXPAND);
4420 /* Honor stack protection warnings. */
4421 if (warn_stack_protect)
4423 if (cfun->calls_alloca)
4424 warning (OPT_Wstack_protector,
4425 "stack protector not protecting local variables: "
4426 "variable length buffer");
4427 if (has_short_buffer && !crtl->stack_protect_guard)
4428 warning (OPT_Wstack_protector,
4429 "stack protector not protecting function: "
4430 "all local arrays are less than %d bytes long",
4431 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4434 /* Set up parameters and prepare for return, for the function. */
4435 expand_function_start (current_function_decl);
4437 /* If we emitted any instructions for setting up the variables,
4438 emit them before the FUNCTION_START note. */
4439 if (var_seq)
4441 emit_insn_before (var_seq, parm_birth_insn);
4443 /* In expand_function_end we'll insert the alloca save/restore
4444 before parm_birth_insn. We've just insertted an alloca call.
4445 Adjust the pointer to match. */
4446 parm_birth_insn = var_seq;
4449 /* Now that we also have the parameter RTXs, copy them over to our
4450 partitions. */
4451 for (i = 0; i < SA.map->num_partitions; i++)
4453 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4455 if (TREE_CODE (var) != VAR_DECL
4456 && !SA.partition_to_pseudo[i])
4457 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4458 gcc_assert (SA.partition_to_pseudo[i]);
4460 /* If this decl was marked as living in multiple places, reset
4461 this now to NULL. */
4462 if (DECL_RTL_IF_SET (var) == pc_rtx)
4463 SET_DECL_RTL (var, NULL);
4465 /* Some RTL parts really want to look at DECL_RTL(x) when x
4466 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4467 SET_DECL_RTL here making this available, but that would mean
4468 to select one of the potentially many RTLs for one DECL. Instead
4469 of doing that we simply reset the MEM_EXPR of the RTL in question,
4470 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4471 if (!DECL_RTL_SET_P (var))
4473 if (MEM_P (SA.partition_to_pseudo[i]))
4474 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4478 /* If we have a class containing differently aligned pointers
4479 we need to merge those into the corresponding RTL pointer
4480 alignment. */
4481 for (i = 1; i < num_ssa_names; i++)
4483 tree name = ssa_name (i);
4484 int part;
4485 rtx r;
4487 if (!name
4488 || !POINTER_TYPE_P (TREE_TYPE (name))
4489 /* We might have generated new SSA names in
4490 update_alias_info_with_stack_vars. They will have a NULL
4491 defining statements, and won't be part of the partitioning,
4492 so ignore those. */
4493 || !SSA_NAME_DEF_STMT (name))
4494 continue;
4495 part = var_to_partition (SA.map, name);
4496 if (part == NO_PARTITION)
4497 continue;
4498 r = SA.partition_to_pseudo[part];
4499 if (REG_P (r))
4500 mark_reg_pointer (r, get_pointer_alignment (name));
4503 /* If this function is `main', emit a call to `__main'
4504 to run global initializers, etc. */
4505 if (DECL_NAME (current_function_decl)
4506 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4507 && DECL_FILE_SCOPE_P (current_function_decl))
4508 expand_main_function ();
4510 /* Initialize the stack_protect_guard field. This must happen after the
4511 call to __main (if any) so that the external decl is initialized. */
4512 if (crtl->stack_protect_guard)
4513 stack_protect_prologue ();
4515 expand_phi_nodes (&SA);
4517 /* Register rtl specific functions for cfg. */
4518 rtl_register_cfg_hooks ();
4520 init_block = construct_init_block ();
4522 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4523 remaining edges later. */
4524 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4525 e->flags &= ~EDGE_EXECUTABLE;
4527 lab_rtx_for_bb = pointer_map_create ();
4528 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4529 bb = expand_gimple_basic_block (bb);
4531 if (MAY_HAVE_DEBUG_INSNS)
4532 expand_debug_locations ();
4534 execute_free_datastructures ();
4535 timevar_push (TV_OUT_OF_SSA);
4536 finish_out_of_ssa (&SA);
4537 timevar_pop (TV_OUT_OF_SSA);
4539 timevar_push (TV_POST_EXPAND);
4540 /* We are no longer in SSA form. */
4541 cfun->gimple_df->in_ssa_p = false;
4543 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4544 conservatively to true until they are all profile aware. */
4545 pointer_map_destroy (lab_rtx_for_bb);
4546 free_histograms ();
4548 construct_exit_block ();
4549 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4550 insn_locators_finalize ();
4552 /* Zap the tree EH table. */
4553 set_eh_throw_stmt_table (cfun, NULL);
4555 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4556 split edges which edge insertions might do. */
4557 rebuild_jump_labels (get_insns ());
4559 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4561 edge e;
4562 edge_iterator ei;
4563 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4565 if (e->insns.r)
4567 rebuild_jump_labels_chain (e->insns.r);
4568 /* Avoid putting insns before parm_birth_insn. */
4569 if (e->src == ENTRY_BLOCK_PTR
4570 && single_succ_p (ENTRY_BLOCK_PTR)
4571 && parm_birth_insn)
4573 rtx insns = e->insns.r;
4574 e->insns.r = NULL_RTX;
4575 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4577 else
4578 commit_one_edge_insertion (e);
4580 else
4581 ei_next (&ei);
4585 /* We're done expanding trees to RTL. */
4586 currently_expanding_to_rtl = 0;
4588 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4590 edge e;
4591 edge_iterator ei;
4592 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4594 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4595 e->flags &= ~EDGE_EXECUTABLE;
4597 /* At the moment not all abnormal edges match the RTL
4598 representation. It is safe to remove them here as
4599 find_many_sub_basic_blocks will rediscover them.
4600 In the future we should get this fixed properly. */
4601 if ((e->flags & EDGE_ABNORMAL)
4602 && !(e->flags & EDGE_SIBCALL))
4603 remove_edge (e);
4604 else
4605 ei_next (&ei);
4609 blocks = sbitmap_alloc (last_basic_block);
4610 sbitmap_ones (blocks);
4611 find_many_sub_basic_blocks (blocks);
4612 sbitmap_free (blocks);
4613 purge_all_dead_edges ();
4615 compact_blocks ();
4617 expand_stack_alignment ();
4619 #ifdef ENABLE_CHECKING
4620 verify_flow_info ();
4621 #endif
4623 /* There's no need to defer outputting this function any more; we
4624 know we want to output it. */
4625 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4627 /* Now that we're done expanding trees to RTL, we shouldn't have any
4628 more CONCATs anywhere. */
4629 generating_concat_p = 0;
4631 if (dump_file)
4633 fprintf (dump_file,
4634 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4635 /* And the pass manager will dump RTL for us. */
4638 /* If we're emitting a nested function, make sure its parent gets
4639 emitted as well. Doing otherwise confuses debug info. */
4641 tree parent;
4642 for (parent = DECL_CONTEXT (current_function_decl);
4643 parent != NULL_TREE;
4644 parent = get_containing_scope (parent))
4645 if (TREE_CODE (parent) == FUNCTION_DECL)
4646 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4649 /* We are now committed to emitting code for this function. Do any
4650 preparation, such as emitting abstract debug info for the inline
4651 before it gets mangled by optimization. */
4652 if (cgraph_function_possibly_inlined_p (current_function_decl))
4653 (*debug_hooks->outlining_inline_function) (current_function_decl);
4655 TREE_ASM_WRITTEN (current_function_decl) = 1;
4657 /* After expanding, the return labels are no longer needed. */
4658 return_label = NULL;
4659 naked_return_label = NULL;
4661 /* After expanding, the tm_restart map is no longer needed. */
4662 if (cfun->gimple_df->tm_restart)
4664 htab_delete (cfun->gimple_df->tm_restart);
4665 cfun->gimple_df->tm_restart = NULL;
4668 /* Tag the blocks with a depth number so that change_scope can find
4669 the common parent easily. */
4670 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4671 default_rtl_profile ();
4672 timevar_pop (TV_POST_EXPAND);
4673 return 0;
4676 struct rtl_opt_pass pass_expand =
4679 RTL_PASS,
4680 "expand", /* name */
4681 NULL, /* gate */
4682 gimple_expand_cfg, /* execute */
4683 NULL, /* sub */
4684 NULL, /* next */
4685 0, /* static_pass_number */
4686 TV_EXPAND, /* tv_id */
4687 PROP_ssa | PROP_gimple_leh | PROP_cfg
4688 | PROP_gimple_lcx, /* properties_required */
4689 PROP_rtl, /* properties_provided */
4690 PROP_ssa | PROP_trees, /* properties_destroyed */
4691 TODO_verify_ssa | TODO_verify_flow
4692 | TODO_verify_stmts, /* todo_flags_start */
4693 TODO_ggc_collect /* todo_flags_finish */