2012-03-17 Janne Blomqvist <jb@gcc.gnu.org>
[official-gcc.git] / gcc / cfgexpand.c
blobbd21169eb876849cdf9bceda5cd481bf30924d41
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
41 #include "toplev.h"
42 #include "debug.h"
43 #include "params.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
46 #include "target.h"
47 #include "ssaexpand.h"
48 #include "bitmap.h"
49 #include "sbitmap.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54 struct ssaexpand SA;
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 static rtx expand_debug_expr (tree);
62 /* Return an expression tree corresponding to the RHS of GIMPLE
63 statement STMT. */
65 tree
66 gimple_assign_rhs_to_tree (gimple stmt)
68 tree t;
69 enum gimple_rhs_class grhs_class;
71 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
73 if (grhs_class == GIMPLE_TERNARY_RHS)
74 t = build3 (gimple_assign_rhs_code (stmt),
75 TREE_TYPE (gimple_assign_lhs (stmt)),
76 gimple_assign_rhs1 (stmt),
77 gimple_assign_rhs2 (stmt),
78 gimple_assign_rhs3 (stmt));
79 else if (grhs_class == GIMPLE_BINARY_RHS)
80 t = build2 (gimple_assign_rhs_code (stmt),
81 TREE_TYPE (gimple_assign_lhs (stmt)),
82 gimple_assign_rhs1 (stmt),
83 gimple_assign_rhs2 (stmt));
84 else if (grhs_class == GIMPLE_UNARY_RHS)
85 t = build1 (gimple_assign_rhs_code (stmt),
86 TREE_TYPE (gimple_assign_lhs (stmt)),
87 gimple_assign_rhs1 (stmt));
88 else if (grhs_class == GIMPLE_SINGLE_RHS)
90 t = gimple_assign_rhs1 (stmt);
91 /* Avoid modifying this tree in place below. */
92 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93 && gimple_location (stmt) != EXPR_LOCATION (t))
94 || (gimple_block (stmt)
95 && currently_expanding_to_rtl
96 && EXPR_P (t)
97 && gimple_block (stmt) != TREE_BLOCK (t)))
98 t = copy_node (t);
100 else
101 gcc_unreachable ();
103 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104 SET_EXPR_LOCATION (t, gimple_location (stmt));
105 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106 TREE_BLOCK (t) = gimple_block (stmt);
108 return t;
112 #ifndef STACK_ALIGNMENT_NEEDED
113 #define STACK_ALIGNMENT_NEEDED 1
114 #endif
116 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
118 /* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
121 static inline void
122 set_rtl (tree t, rtx x)
124 if (TREE_CODE (t) == SSA_NAME)
126 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
127 if (x && !MEM_P (x))
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
134 tree var = SSA_NAME_VAR (t);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var))
137 SET_DECL_RTL (var, x);
138 /* If we have it set already to "multiple places" don't
139 change this. */
140 else if (DECL_RTL (var) == pc_rtx)
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var) != x)
149 SET_DECL_RTL (var, pc_rtx);
152 else
153 SET_DECL_RTL (t, x);
156 /* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
158 struct stack_var
160 /* The Variable. */
161 tree decl;
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
165 HOST_WIDE_INT size;
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
169 unsigned int alignb;
171 /* The partition representative. */
172 size_t representative;
174 /* The next stack variable in the partition, or EOC. */
175 size_t next;
177 /* The numbers of conflicting stack variables. */
178 bitmap conflicts;
181 #define EOC ((size_t)-1)
183 /* We have an array of such objects while deciding allocation. */
184 static struct stack_var *stack_vars;
185 static size_t stack_vars_alloc;
186 static size_t stack_vars_num;
187 static struct pointer_map_t *decl_to_stack_part;
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted;
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase;
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls;
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer;
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
209 static unsigned int
210 align_local_variable (tree decl)
212 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213 DECL_ALIGN (decl) = align;
214 return align / BITS_PER_UNIT;
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
220 static HOST_WIDE_INT
221 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
223 HOST_WIDE_INT offset, new_frame_offset;
225 new_frame_offset = frame_offset;
226 if (FRAME_GROWS_DOWNWARD)
228 new_frame_offset -= size + frame_phase;
229 new_frame_offset &= -align;
230 new_frame_offset += frame_phase;
231 offset = new_frame_offset;
233 else
235 new_frame_offset -= frame_phase;
236 new_frame_offset += align - 1;
237 new_frame_offset &= -align;
238 new_frame_offset += frame_phase;
239 offset = new_frame_offset;
240 new_frame_offset += size;
242 frame_offset = new_frame_offset;
244 if (frame_offset_overflow (frame_offset, cfun->decl))
245 frame_offset = offset = 0;
247 return offset;
250 /* Accumulate DECL into STACK_VARS. */
252 static void
253 add_stack_var (tree decl)
255 struct stack_var *v;
257 if (stack_vars_num >= stack_vars_alloc)
259 if (stack_vars_alloc)
260 stack_vars_alloc = stack_vars_alloc * 3 / 2;
261 else
262 stack_vars_alloc = 32;
263 stack_vars
264 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
266 if (!decl_to_stack_part)
267 decl_to_stack_part = pointer_map_create ();
269 v = &stack_vars[stack_vars_num];
270 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
272 v->decl = decl;
273 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
276 if (v->size == 0)
277 v->size = 1;
278 v->alignb = align_local_variable (SSAVAR (decl));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v->alignb != 0);
282 /* All variables are initially in their own partition. */
283 v->representative = stack_vars_num;
284 v->next = EOC;
286 /* All variables initially conflict with no other. */
287 v->conflicts = NULL;
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl, pc_rtx);
292 stack_vars_num++;
295 /* Make the decls associated with luid's X and Y conflict. */
297 static void
298 add_stack_var_conflict (size_t x, size_t y)
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
302 if (!a->conflicts)
303 a->conflicts = BITMAP_ALLOC (NULL);
304 if (!b->conflicts)
305 b->conflicts = BITMAP_ALLOC (NULL);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
310 /* Check whether the decls associated with luid's X and Y conflict. */
312 static bool
313 stack_var_conflict_p (size_t x, size_t y)
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
317 if (x == y)
318 return false;
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
323 return true;
325 if (!a->conflicts || !b->conflicts)
326 return false;
327 return bitmap_bit_p (a->conflicts, y);
330 /* Returns true if TYPE is or contains a union type. */
332 static bool
333 aggregate_contains_union_type (tree type)
335 tree field;
337 if (TREE_CODE (type) == UNION_TYPE
338 || TREE_CODE (type) == QUAL_UNION_TYPE)
339 return true;
340 if (TREE_CODE (type) == ARRAY_TYPE)
341 return aggregate_contains_union_type (TREE_TYPE (type));
342 if (TREE_CODE (type) != RECORD_TYPE)
343 return false;
345 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
346 if (TREE_CODE (field) == FIELD_DECL)
347 if (aggregate_contains_union_type (TREE_TYPE (field)))
348 return true;
350 return false;
353 /* A subroutine of expand_used_vars. If two variables X and Y have alias
354 sets that do not conflict, then do add a conflict for these variables
355 in the interference graph. We also need to make sure to add conflicts
356 for union containing structures. Else RTL alias analysis comes along
357 and due to type based aliasing rules decides that for two overlapping
358 union temporaries { short s; int i; } accesses to the same mem through
359 different types may not alias and happily reorders stores across
360 life-time boundaries of the temporaries (See PR25654). */
362 static void
363 add_alias_set_conflicts (void)
365 size_t i, j, n = stack_vars_num;
367 for (i = 0; i < n; ++i)
369 tree type_i = TREE_TYPE (stack_vars[i].decl);
370 bool aggr_i = AGGREGATE_TYPE_P (type_i);
371 bool contains_union;
373 contains_union = aggregate_contains_union_type (type_i);
374 for (j = 0; j < i; ++j)
376 tree type_j = TREE_TYPE (stack_vars[j].decl);
377 bool aggr_j = AGGREGATE_TYPE_P (type_j);
378 if (aggr_i != aggr_j
379 /* Either the objects conflict by means of type based
380 aliasing rules, or we need to add a conflict. */
381 || !objects_must_conflict_p (type_i, type_j)
382 /* In case the types do not conflict ensure that access
383 to elements will conflict. In case of unions we have
384 to be careful as type based aliasing rules may say
385 access to the same memory does not conflict. So play
386 safe and add a conflict in this case when
387 -fstrict-aliasing is used. */
388 || (contains_union && flag_strict_aliasing))
389 add_stack_var_conflict (i, j);
394 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
395 enter its partition number into bitmap DATA. */
397 static bool
398 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
400 bitmap active = (bitmap)data;
401 op = get_base_address (op);
402 if (op
403 && DECL_P (op)
404 && DECL_RTL_IF_SET (op) == pc_rtx)
406 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
407 if (v)
408 bitmap_set_bit (active, *v);
410 return false;
413 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
414 record conflicts between it and all currently active other partitions
415 from bitmap DATA. */
417 static bool
418 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
420 bitmap active = (bitmap)data;
421 op = get_base_address (op);
422 if (op
423 && DECL_P (op)
424 && DECL_RTL_IF_SET (op) == pc_rtx)
426 size_t *v =
427 (size_t *) pointer_map_contains (decl_to_stack_part, op);
428 if (v && bitmap_set_bit (active, *v))
430 size_t num = *v;
431 bitmap_iterator bi;
432 unsigned i;
433 gcc_assert (num < stack_vars_num);
434 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
435 add_stack_var_conflict (num, i);
438 return false;
441 /* Helper routine for add_scope_conflicts, calculating the active partitions
442 at the end of BB, leaving the result in WORK. We're called to generate
443 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
444 liveness. */
446 static void
447 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
449 edge e;
450 edge_iterator ei;
451 gimple_stmt_iterator gsi;
452 bool (*visit)(gimple, tree, void *);
454 bitmap_clear (work);
455 FOR_EACH_EDGE (e, ei, bb->preds)
456 bitmap_ior_into (work, (bitmap)e->src->aux);
458 visit = visit_op;
460 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
462 gimple stmt = gsi_stmt (gsi);
463 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
465 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
467 gimple stmt = gsi_stmt (gsi);
469 if (gimple_clobber_p (stmt))
471 tree lhs = gimple_assign_lhs (stmt);
472 size_t *v;
473 /* Nested function lowering might introduce LHSs
474 that are COMPONENT_REFs. */
475 if (TREE_CODE (lhs) != VAR_DECL)
476 continue;
477 if (DECL_RTL_IF_SET (lhs) == pc_rtx
478 && (v = (size_t *)
479 pointer_map_contains (decl_to_stack_part, lhs)))
480 bitmap_clear_bit (work, *v);
482 else if (!is_gimple_debug (stmt))
484 if (for_conflict
485 && visit == visit_op)
487 /* If this is the first real instruction in this BB we need
488 to add conflicts for everything live at this point now.
489 Unlike classical liveness for named objects we can't
490 rely on seeing a def/use of the names we're interested in.
491 There might merely be indirect loads/stores. We'd not add any
492 conflicts for such partitions. */
493 bitmap_iterator bi;
494 unsigned i;
495 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
497 unsigned j;
498 bitmap_iterator bj;
499 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
500 add_stack_var_conflict (i, j);
502 visit = visit_conflict;
504 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
509 /* Generate stack partition conflicts between all partitions that are
510 simultaneously live. */
512 static void
513 add_scope_conflicts (void)
515 basic_block bb;
516 bool changed;
517 bitmap work = BITMAP_ALLOC (NULL);
519 /* We approximate the live range of a stack variable by taking the first
520 mention of its name as starting point(s), and by the end-of-scope
521 death clobber added by gimplify as ending point(s) of the range.
522 This overapproximates in the case we for instance moved an address-taken
523 operation upward, without also moving a dereference to it upwards.
524 But it's conservatively correct as a variable never can hold values
525 before its name is mentioned at least once.
527 We then do a mostly classical bitmap liveness algorithm. */
529 FOR_ALL_BB (bb)
530 bb->aux = BITMAP_ALLOC (NULL);
532 changed = true;
533 while (changed)
535 changed = false;
536 FOR_EACH_BB (bb)
538 bitmap active = (bitmap)bb->aux;
539 add_scope_conflicts_1 (bb, work, false);
540 if (bitmap_ior_into (active, work))
541 changed = true;
545 FOR_EACH_BB (bb)
546 add_scope_conflicts_1 (bb, work, true);
548 BITMAP_FREE (work);
549 FOR_ALL_BB (bb)
550 BITMAP_FREE (bb->aux);
553 /* A subroutine of partition_stack_vars. A comparison function for qsort,
554 sorting an array of indices by the properties of the object. */
556 static int
557 stack_var_cmp (const void *a, const void *b)
559 size_t ia = *(const size_t *)a;
560 size_t ib = *(const size_t *)b;
561 unsigned int aligna = stack_vars[ia].alignb;
562 unsigned int alignb = stack_vars[ib].alignb;
563 HOST_WIDE_INT sizea = stack_vars[ia].size;
564 HOST_WIDE_INT sizeb = stack_vars[ib].size;
565 tree decla = stack_vars[ia].decl;
566 tree declb = stack_vars[ib].decl;
567 bool largea, largeb;
568 unsigned int uida, uidb;
570 /* Primary compare on "large" alignment. Large comes first. */
571 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
572 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
573 if (largea != largeb)
574 return (int)largeb - (int)largea;
576 /* Secondary compare on size, decreasing */
577 if (sizea > sizeb)
578 return -1;
579 if (sizea < sizeb)
580 return 1;
582 /* Tertiary compare on true alignment, decreasing. */
583 if (aligna < alignb)
584 return -1;
585 if (aligna > alignb)
586 return 1;
588 /* Final compare on ID for sort stability, increasing.
589 Two SSA names are compared by their version, SSA names come before
590 non-SSA names, and two normal decls are compared by their DECL_UID. */
591 if (TREE_CODE (decla) == SSA_NAME)
593 if (TREE_CODE (declb) == SSA_NAME)
594 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
595 else
596 return -1;
598 else if (TREE_CODE (declb) == SSA_NAME)
599 return 1;
600 else
601 uida = DECL_UID (decla), uidb = DECL_UID (declb);
602 if (uida < uidb)
603 return 1;
604 if (uida > uidb)
605 return -1;
606 return 0;
610 /* If the points-to solution *PI points to variables that are in a partition
611 together with other variables add all partition members to the pointed-to
612 variables bitmap. */
614 static void
615 add_partitioned_vars_to_ptset (struct pt_solution *pt,
616 struct pointer_map_t *decls_to_partitions,
617 struct pointer_set_t *visited, bitmap temp)
619 bitmap_iterator bi;
620 unsigned i;
621 bitmap *part;
623 if (pt->anything
624 || pt->vars == NULL
625 /* The pointed-to vars bitmap is shared, it is enough to
626 visit it once. */
627 || pointer_set_insert(visited, pt->vars))
628 return;
630 bitmap_clear (temp);
632 /* By using a temporary bitmap to store all members of the partitions
633 we have to add we make sure to visit each of the partitions only
634 once. */
635 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
636 if ((!temp
637 || !bitmap_bit_p (temp, i))
638 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
639 (void *)(size_t) i)))
640 bitmap_ior_into (temp, *part);
641 if (!bitmap_empty_p (temp))
642 bitmap_ior_into (pt->vars, temp);
645 /* Update points-to sets based on partition info, so we can use them on RTL.
646 The bitmaps representing stack partitions will be saved until expand,
647 where partitioned decls used as bases in memory expressions will be
648 rewritten. */
650 static void
651 update_alias_info_with_stack_vars (void)
653 struct pointer_map_t *decls_to_partitions = NULL;
654 size_t i, j;
655 tree var = NULL_TREE;
657 for (i = 0; i < stack_vars_num; i++)
659 bitmap part = NULL;
660 tree name;
661 struct ptr_info_def *pi;
663 /* Not interested in partitions with single variable. */
664 if (stack_vars[i].representative != i
665 || stack_vars[i].next == EOC)
666 continue;
668 if (!decls_to_partitions)
670 decls_to_partitions = pointer_map_create ();
671 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
674 /* Create an SSA_NAME that points to the partition for use
675 as base during alias-oracle queries on RTL for bases that
676 have been partitioned. */
677 if (var == NULL_TREE)
678 var = create_tmp_var (ptr_type_node, NULL);
679 name = make_ssa_name (var, NULL);
681 /* Create bitmaps representing partitions. They will be used for
682 points-to sets later, so use GGC alloc. */
683 part = BITMAP_GGC_ALLOC ();
684 for (j = i; j != EOC; j = stack_vars[j].next)
686 tree decl = stack_vars[j].decl;
687 unsigned int uid = DECL_PT_UID (decl);
688 /* We should never end up partitioning SSA names (though they
689 may end up on the stack). Neither should we allocate stack
690 space to something that is unused and thus unreferenced, except
691 for -O0 where we are preserving even unreferenced variables. */
692 gcc_assert (DECL_P (decl)
693 && (!optimize
694 || referenced_var_lookup (cfun, DECL_UID (decl))));
695 bitmap_set_bit (part, uid);
696 *((bitmap *) pointer_map_insert (decls_to_partitions,
697 (void *)(size_t) uid)) = part;
698 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
699 decl)) = name;
702 /* Make the SSA name point to all partition members. */
703 pi = get_ptr_info (name);
704 pt_solution_set (&pi->pt, part, false);
707 /* Make all points-to sets that contain one member of a partition
708 contain all members of the partition. */
709 if (decls_to_partitions)
711 unsigned i;
712 struct pointer_set_t *visited = pointer_set_create ();
713 bitmap temp = BITMAP_ALLOC (NULL);
715 for (i = 1; i < num_ssa_names; i++)
717 tree name = ssa_name (i);
718 struct ptr_info_def *pi;
720 if (name
721 && POINTER_TYPE_P (TREE_TYPE (name))
722 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
723 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
724 visited, temp);
727 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
728 decls_to_partitions, visited, temp);
730 pointer_set_destroy (visited);
731 pointer_map_destroy (decls_to_partitions);
732 BITMAP_FREE (temp);
736 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
737 partitioning algorithm. Partitions A and B are known to be non-conflicting.
738 Merge them into a single partition A. */
740 static void
741 union_stack_vars (size_t a, size_t b)
743 struct stack_var *vb = &stack_vars[b];
744 bitmap_iterator bi;
745 unsigned u;
747 gcc_assert (stack_vars[b].next == EOC);
748 /* Add B to A's partition. */
749 stack_vars[b].next = stack_vars[a].next;
750 stack_vars[b].representative = a;
751 stack_vars[a].next = b;
753 /* Update the required alignment of partition A to account for B. */
754 if (stack_vars[a].alignb < stack_vars[b].alignb)
755 stack_vars[a].alignb = stack_vars[b].alignb;
757 /* Update the interference graph and merge the conflicts. */
758 if (vb->conflicts)
760 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
761 add_stack_var_conflict (a, stack_vars[u].representative);
762 BITMAP_FREE (vb->conflicts);
766 /* A subroutine of expand_used_vars. Binpack the variables into
767 partitions constrained by the interference graph. The overall
768 algorithm used is as follows:
770 Sort the objects by size in descending order.
771 For each object A {
772 S = size(A)
773 O = 0
774 loop {
775 Look for the largest non-conflicting object B with size <= S.
776 UNION (A, B)
781 static void
782 partition_stack_vars (void)
784 size_t si, sj, n = stack_vars_num;
786 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
787 for (si = 0; si < n; ++si)
788 stack_vars_sorted[si] = si;
790 if (n == 1)
791 return;
793 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
795 for (si = 0; si < n; ++si)
797 size_t i = stack_vars_sorted[si];
798 unsigned int ialign = stack_vars[i].alignb;
800 /* Ignore objects that aren't partition representatives. If we
801 see a var that is not a partition representative, it must
802 have been merged earlier. */
803 if (stack_vars[i].representative != i)
804 continue;
806 for (sj = si + 1; sj < n; ++sj)
808 size_t j = stack_vars_sorted[sj];
809 unsigned int jalign = stack_vars[j].alignb;
811 /* Ignore objects that aren't partition representatives. */
812 if (stack_vars[j].representative != j)
813 continue;
815 /* Ignore conflicting objects. */
816 if (stack_var_conflict_p (i, j))
817 continue;
819 /* Do not mix objects of "small" (supported) alignment
820 and "large" (unsupported) alignment. */
821 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
822 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
823 continue;
825 /* UNION the objects, placing J at OFFSET. */
826 union_stack_vars (i, j);
830 update_alias_info_with_stack_vars ();
833 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
835 static void
836 dump_stack_var_partition (void)
838 size_t si, i, j, n = stack_vars_num;
840 for (si = 0; si < n; ++si)
842 i = stack_vars_sorted[si];
844 /* Skip variables that aren't partition representatives, for now. */
845 if (stack_vars[i].representative != i)
846 continue;
848 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
849 " align %u\n", (unsigned long) i, stack_vars[i].size,
850 stack_vars[i].alignb);
852 for (j = i; j != EOC; j = stack_vars[j].next)
854 fputc ('\t', dump_file);
855 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
857 fputc ('\n', dump_file);
861 /* Assign rtl to DECL at BASE + OFFSET. */
863 static void
864 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
865 HOST_WIDE_INT offset)
867 unsigned align;
868 rtx x;
870 /* If this fails, we've overflowed the stack frame. Error nicely? */
871 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
873 x = plus_constant (base, offset);
874 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
876 if (TREE_CODE (decl) != SSA_NAME)
878 /* Set alignment we actually gave this decl if it isn't an SSA name.
879 If it is we generate stack slots only accidentally so it isn't as
880 important, we'll simply use the alignment that is already set. */
881 if (base == virtual_stack_vars_rtx)
882 offset -= frame_phase;
883 align = offset & -offset;
884 align *= BITS_PER_UNIT;
885 if (align == 0 || align > base_align)
886 align = base_align;
888 /* One would think that we could assert that we're not decreasing
889 alignment here, but (at least) the i386 port does exactly this
890 via the MINIMUM_ALIGNMENT hook. */
892 DECL_ALIGN (decl) = align;
893 DECL_USER_ALIGN (decl) = 0;
896 set_mem_attributes (x, SSAVAR (decl), true);
897 set_rtl (decl, x);
900 /* A subroutine of expand_used_vars. Give each partition representative
901 a unique location within the stack frame. Update each partition member
902 with that location. */
904 static void
905 expand_stack_vars (bool (*pred) (tree))
907 size_t si, i, j, n = stack_vars_num;
908 HOST_WIDE_INT large_size = 0, large_alloc = 0;
909 rtx large_base = NULL;
910 unsigned large_align = 0;
911 tree decl;
913 /* Determine if there are any variables requiring "large" alignment.
914 Since these are dynamically allocated, we only process these if
915 no predicate involved. */
916 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
917 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
919 /* Find the total size of these variables. */
920 for (si = 0; si < n; ++si)
922 unsigned alignb;
924 i = stack_vars_sorted[si];
925 alignb = stack_vars[i].alignb;
927 /* Stop when we get to the first decl with "small" alignment. */
928 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
929 break;
931 /* Skip variables that aren't partition representatives. */
932 if (stack_vars[i].representative != i)
933 continue;
935 /* Skip variables that have already had rtl assigned. See also
936 add_stack_var where we perpetrate this pc_rtx hack. */
937 decl = stack_vars[i].decl;
938 if ((TREE_CODE (decl) == SSA_NAME
939 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
940 : DECL_RTL (decl)) != pc_rtx)
941 continue;
943 large_size += alignb - 1;
944 large_size &= -(HOST_WIDE_INT)alignb;
945 large_size += stack_vars[i].size;
948 /* If there were any, allocate space. */
949 if (large_size > 0)
950 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
951 large_align, true);
954 for (si = 0; si < n; ++si)
956 rtx base;
957 unsigned base_align, alignb;
958 HOST_WIDE_INT offset;
960 i = stack_vars_sorted[si];
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars[i].representative != i)
964 continue;
966 /* Skip variables that have already had rtl assigned. See also
967 add_stack_var where we perpetrate this pc_rtx hack. */
968 decl = stack_vars[i].decl;
969 if ((TREE_CODE (decl) == SSA_NAME
970 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
971 : DECL_RTL (decl)) != pc_rtx)
972 continue;
974 /* Check the predicate to see whether this variable should be
975 allocated in this pass. */
976 if (pred && !pred (decl))
977 continue;
979 alignb = stack_vars[i].alignb;
980 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
982 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
983 base = virtual_stack_vars_rtx;
984 base_align = crtl->max_used_stack_slot_alignment;
986 else
988 /* Large alignment is only processed in the last pass. */
989 if (pred)
990 continue;
991 gcc_assert (large_base != NULL);
993 large_alloc += alignb - 1;
994 large_alloc &= -(HOST_WIDE_INT)alignb;
995 offset = large_alloc;
996 large_alloc += stack_vars[i].size;
998 base = large_base;
999 base_align = large_align;
1002 /* Create rtl for each variable based on their location within the
1003 partition. */
1004 for (j = i; j != EOC; j = stack_vars[j].next)
1006 expand_one_stack_var_at (stack_vars[j].decl,
1007 base, base_align,
1008 offset);
1012 gcc_assert (large_alloc == large_size);
1015 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1016 static HOST_WIDE_INT
1017 account_stack_vars (void)
1019 size_t si, j, i, n = stack_vars_num;
1020 HOST_WIDE_INT size = 0;
1022 for (si = 0; si < n; ++si)
1024 i = stack_vars_sorted[si];
1026 /* Skip variables that aren't partition representatives, for now. */
1027 if (stack_vars[i].representative != i)
1028 continue;
1030 size += stack_vars[i].size;
1031 for (j = i; j != EOC; j = stack_vars[j].next)
1032 set_rtl (stack_vars[j].decl, NULL);
1034 return size;
1037 /* A subroutine of expand_one_var. Called to immediately assign rtl
1038 to a variable to be allocated in the stack frame. */
1040 static void
1041 expand_one_stack_var (tree var)
1043 HOST_WIDE_INT size, offset;
1044 unsigned byte_align;
1046 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1047 byte_align = align_local_variable (SSAVAR (var));
1049 /* We handle highly aligned variables in expand_stack_vars. */
1050 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1052 offset = alloc_stack_frame_space (size, byte_align);
1054 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1055 crtl->max_used_stack_slot_alignment, offset);
1058 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1059 that will reside in a hard register. */
1061 static void
1062 expand_one_hard_reg_var (tree var)
1064 rest_of_decl_compilation (var, 0, 0);
1067 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1068 that will reside in a pseudo register. */
1070 static void
1071 expand_one_register_var (tree var)
1073 tree decl = SSAVAR (var);
1074 tree type = TREE_TYPE (decl);
1075 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1076 rtx x = gen_reg_rtx (reg_mode);
1078 set_rtl (var, x);
1080 /* Note if the object is a user variable. */
1081 if (!DECL_ARTIFICIAL (decl))
1082 mark_user_reg (x);
1084 if (POINTER_TYPE_P (type))
1085 mark_reg_pointer (x, get_pointer_alignment (var));
1088 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1089 has some associated error, e.g. its type is error-mark. We just need
1090 to pick something that won't crash the rest of the compiler. */
1092 static void
1093 expand_one_error_var (tree var)
1095 enum machine_mode mode = DECL_MODE (var);
1096 rtx x;
1098 if (mode == BLKmode)
1099 x = gen_rtx_MEM (BLKmode, const0_rtx);
1100 else if (mode == VOIDmode)
1101 x = const0_rtx;
1102 else
1103 x = gen_reg_rtx (mode);
1105 SET_DECL_RTL (var, x);
1108 /* A subroutine of expand_one_var. VAR is a variable that will be
1109 allocated to the local stack frame. Return true if we wish to
1110 add VAR to STACK_VARS so that it will be coalesced with other
1111 variables. Return false to allocate VAR immediately.
1113 This function is used to reduce the number of variables considered
1114 for coalescing, which reduces the size of the quadratic problem. */
1116 static bool
1117 defer_stack_allocation (tree var, bool toplevel)
1119 /* If stack protection is enabled, *all* stack variables must be deferred,
1120 so that we can re-order the strings to the top of the frame. */
1121 if (flag_stack_protect)
1122 return true;
1124 /* We handle "large" alignment via dynamic allocation. We want to handle
1125 this extra complication in only one place, so defer them. */
1126 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1127 return true;
1129 /* Variables in the outermost scope automatically conflict with
1130 every other variable. The only reason to want to defer them
1131 at all is that, after sorting, we can more efficiently pack
1132 small variables in the stack frame. Continue to defer at -O2. */
1133 if (toplevel && optimize < 2)
1134 return false;
1136 /* Without optimization, *most* variables are allocated from the
1137 stack, which makes the quadratic problem large exactly when we
1138 want compilation to proceed as quickly as possible. On the
1139 other hand, we don't want the function's stack frame size to
1140 get completely out of hand. So we avoid adding scalars and
1141 "small" aggregates to the list at all. */
1142 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1143 return false;
1145 return true;
1148 /* A subroutine of expand_used_vars. Expand one variable according to
1149 its flavor. Variables to be placed on the stack are not actually
1150 expanded yet, merely recorded.
1151 When REALLY_EXPAND is false, only add stack values to be allocated.
1152 Return stack usage this variable is supposed to take.
1155 static HOST_WIDE_INT
1156 expand_one_var (tree var, bool toplevel, bool really_expand)
1158 unsigned int align = BITS_PER_UNIT;
1159 tree origvar = var;
1161 var = SSAVAR (var);
1163 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1165 /* Because we don't know if VAR will be in register or on stack,
1166 we conservatively assume it will be on stack even if VAR is
1167 eventually put into register after RA pass. For non-automatic
1168 variables, which won't be on stack, we collect alignment of
1169 type and ignore user specified alignment. */
1170 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1171 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1172 TYPE_MODE (TREE_TYPE (var)),
1173 TYPE_ALIGN (TREE_TYPE (var)));
1174 else if (DECL_HAS_VALUE_EXPR_P (var)
1175 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1176 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1177 or variables which were assigned a stack slot already by
1178 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1179 changed from the offset chosen to it. */
1180 align = crtl->stack_alignment_estimated;
1181 else
1182 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1184 /* If the variable alignment is very large we'll dynamicaly allocate
1185 it, which means that in-frame portion is just a pointer. */
1186 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1187 align = POINTER_SIZE;
1190 if (SUPPORTS_STACK_ALIGNMENT
1191 && crtl->stack_alignment_estimated < align)
1193 /* stack_alignment_estimated shouldn't change after stack
1194 realign decision made */
1195 gcc_assert(!crtl->stack_realign_processed);
1196 crtl->stack_alignment_estimated = align;
1199 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1200 So here we only make sure stack_alignment_needed >= align. */
1201 if (crtl->stack_alignment_needed < align)
1202 crtl->stack_alignment_needed = align;
1203 if (crtl->max_used_stack_slot_alignment < align)
1204 crtl->max_used_stack_slot_alignment = align;
1206 if (TREE_CODE (origvar) == SSA_NAME)
1208 gcc_assert (TREE_CODE (var) != VAR_DECL
1209 || (!DECL_EXTERNAL (var)
1210 && !DECL_HAS_VALUE_EXPR_P (var)
1211 && !TREE_STATIC (var)
1212 && TREE_TYPE (var) != error_mark_node
1213 && !DECL_HARD_REGISTER (var)
1214 && really_expand));
1216 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1218 else if (DECL_EXTERNAL (var))
1220 else if (DECL_HAS_VALUE_EXPR_P (var))
1222 else if (TREE_STATIC (var))
1224 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1226 else if (TREE_TYPE (var) == error_mark_node)
1228 if (really_expand)
1229 expand_one_error_var (var);
1231 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1233 if (really_expand)
1234 expand_one_hard_reg_var (var);
1236 else if (use_register_for_decl (var))
1238 if (really_expand)
1239 expand_one_register_var (origvar);
1241 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1243 if (really_expand)
1245 error ("size of variable %q+D is too large", var);
1246 expand_one_error_var (var);
1249 else if (defer_stack_allocation (var, toplevel))
1250 add_stack_var (origvar);
1251 else
1253 if (really_expand)
1254 expand_one_stack_var (origvar);
1255 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1257 return 0;
1260 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1261 expanding variables. Those variables that can be put into registers
1262 are allocated pseudos; those that can't are put on the stack.
1264 TOPLEVEL is true if this is the outermost BLOCK. */
1266 static void
1267 expand_used_vars_for_block (tree block, bool toplevel)
1269 tree t;
1271 /* Expand all variables at this level. */
1272 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1273 if (TREE_USED (t)
1274 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1275 || !DECL_NONSHAREABLE (t)))
1276 expand_one_var (t, toplevel, true);
1278 /* Expand all variables at containing levels. */
1279 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1280 expand_used_vars_for_block (t, false);
1283 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1284 and clear TREE_USED on all local variables. */
1286 static void
1287 clear_tree_used (tree block)
1289 tree t;
1291 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1292 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1293 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1294 || !DECL_NONSHAREABLE (t))
1295 TREE_USED (t) = 0;
1297 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1298 clear_tree_used (t);
1301 /* Examine TYPE and determine a bit mask of the following features. */
1303 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1304 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1305 #define SPCT_HAS_ARRAY 4
1306 #define SPCT_HAS_AGGREGATE 8
1308 static unsigned int
1309 stack_protect_classify_type (tree type)
1311 unsigned int ret = 0;
1312 tree t;
1314 switch (TREE_CODE (type))
1316 case ARRAY_TYPE:
1317 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1318 if (t == char_type_node
1319 || t == signed_char_type_node
1320 || t == unsigned_char_type_node)
1322 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1323 unsigned HOST_WIDE_INT len;
1325 if (!TYPE_SIZE_UNIT (type)
1326 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1327 len = max;
1328 else
1329 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1331 if (len < max)
1332 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1333 else
1334 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1336 else
1337 ret = SPCT_HAS_ARRAY;
1338 break;
1340 case UNION_TYPE:
1341 case QUAL_UNION_TYPE:
1342 case RECORD_TYPE:
1343 ret = SPCT_HAS_AGGREGATE;
1344 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1345 if (TREE_CODE (t) == FIELD_DECL)
1346 ret |= stack_protect_classify_type (TREE_TYPE (t));
1347 break;
1349 default:
1350 break;
1353 return ret;
1356 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1357 part of the local stack frame. Remember if we ever return nonzero for
1358 any variable in this function. The return value is the phase number in
1359 which the variable should be allocated. */
1361 static int
1362 stack_protect_decl_phase (tree decl)
1364 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1365 int ret = 0;
1367 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1368 has_short_buffer = true;
1370 if (flag_stack_protect == 2)
1372 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1373 && !(bits & SPCT_HAS_AGGREGATE))
1374 ret = 1;
1375 else if (bits & SPCT_HAS_ARRAY)
1376 ret = 2;
1378 else
1379 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1381 if (ret)
1382 has_protected_decls = true;
1384 return ret;
1387 /* Two helper routines that check for phase 1 and phase 2. These are used
1388 as callbacks for expand_stack_vars. */
1390 static bool
1391 stack_protect_decl_phase_1 (tree decl)
1393 return stack_protect_decl_phase (decl) == 1;
1396 static bool
1397 stack_protect_decl_phase_2 (tree decl)
1399 return stack_protect_decl_phase (decl) == 2;
1402 /* Ensure that variables in different stack protection phases conflict
1403 so that they are not merged and share the same stack slot. */
1405 static void
1406 add_stack_protection_conflicts (void)
1408 size_t i, j, n = stack_vars_num;
1409 unsigned char *phase;
1411 phase = XNEWVEC (unsigned char, n);
1412 for (i = 0; i < n; ++i)
1413 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1415 for (i = 0; i < n; ++i)
1417 unsigned char ph_i = phase[i];
1418 for (j = 0; j < i; ++j)
1419 if (ph_i != phase[j])
1420 add_stack_var_conflict (i, j);
1423 XDELETEVEC (phase);
1426 /* Create a decl for the guard at the top of the stack frame. */
1428 static void
1429 create_stack_guard (void)
1431 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1432 VAR_DECL, NULL, ptr_type_node);
1433 TREE_THIS_VOLATILE (guard) = 1;
1434 TREE_USED (guard) = 1;
1435 expand_one_stack_var (guard);
1436 crtl->stack_protect_guard = guard;
1439 /* Prepare for expanding variables. */
1440 static void
1441 init_vars_expansion (void)
1443 tree t;
1444 unsigned ix;
1445 /* Set TREE_USED on all variables in the local_decls. */
1446 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1447 TREE_USED (t) = 1;
1449 /* Clear TREE_USED on all variables associated with a block scope. */
1450 clear_tree_used (DECL_INITIAL (current_function_decl));
1452 /* Initialize local stack smashing state. */
1453 has_protected_decls = false;
1454 has_short_buffer = false;
1457 /* Free up stack variable graph data. */
1458 static void
1459 fini_vars_expansion (void)
1461 size_t i, n = stack_vars_num;
1462 for (i = 0; i < n; i++)
1463 BITMAP_FREE (stack_vars[i].conflicts);
1464 XDELETEVEC (stack_vars);
1465 XDELETEVEC (stack_vars_sorted);
1466 stack_vars = NULL;
1467 stack_vars_alloc = stack_vars_num = 0;
1468 pointer_map_destroy (decl_to_stack_part);
1469 decl_to_stack_part = NULL;
1472 /* Make a fair guess for the size of the stack frame of the function
1473 in NODE. This doesn't have to be exact, the result is only used in
1474 the inline heuristics. So we don't want to run the full stack var
1475 packing algorithm (which is quadratic in the number of stack vars).
1476 Instead, we calculate the total size of all stack vars. This turns
1477 out to be a pretty fair estimate -- packing of stack vars doesn't
1478 happen very often. */
1480 HOST_WIDE_INT
1481 estimated_stack_frame_size (struct cgraph_node *node)
1483 HOST_WIDE_INT size = 0;
1484 size_t i;
1485 tree var;
1486 tree old_cur_fun_decl = current_function_decl;
1487 referenced_var_iterator rvi;
1488 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1490 current_function_decl = node->decl;
1491 push_cfun (fn);
1493 gcc_checking_assert (gimple_referenced_vars (fn));
1494 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1495 size += expand_one_var (var, true, false);
1497 if (stack_vars_num > 0)
1499 /* Fake sorting the stack vars for account_stack_vars (). */
1500 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1501 for (i = 0; i < stack_vars_num; ++i)
1502 stack_vars_sorted[i] = i;
1503 size += account_stack_vars ();
1504 fini_vars_expansion ();
1506 pop_cfun ();
1507 current_function_decl = old_cur_fun_decl;
1508 return size;
1511 /* Expand all variables used in the function. */
1513 static void
1514 expand_used_vars (void)
1516 tree var, outer_block = DECL_INITIAL (current_function_decl);
1517 VEC(tree,heap) *maybe_local_decls = NULL;
1518 unsigned i;
1519 unsigned len;
1521 /* Compute the phase of the stack frame for this function. */
1523 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1524 int off = STARTING_FRAME_OFFSET % align;
1525 frame_phase = off ? align - off : 0;
1528 init_vars_expansion ();
1530 for (i = 0; i < SA.map->num_partitions; i++)
1532 tree var = partition_to_var (SA.map, i);
1534 gcc_assert (is_gimple_reg (var));
1535 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1536 expand_one_var (var, true, true);
1537 else
1539 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1540 contain the default def (representing the parm or result itself)
1541 we don't do anything here. But those which don't contain the
1542 default def (representing a temporary based on the parm/result)
1543 we need to allocate space just like for normal VAR_DECLs. */
1544 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1546 expand_one_var (var, true, true);
1547 gcc_assert (SA.partition_to_pseudo[i]);
1552 /* At this point all variables on the local_decls with TREE_USED
1553 set are not associated with any block scope. Lay them out. */
1555 len = VEC_length (tree, cfun->local_decls);
1556 FOR_EACH_LOCAL_DECL (cfun, i, var)
1558 bool expand_now = false;
1560 /* Expanded above already. */
1561 if (is_gimple_reg (var))
1563 TREE_USED (var) = 0;
1564 goto next;
1566 /* We didn't set a block for static or extern because it's hard
1567 to tell the difference between a global variable (re)declared
1568 in a local scope, and one that's really declared there to
1569 begin with. And it doesn't really matter much, since we're
1570 not giving them stack space. Expand them now. */
1571 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1572 expand_now = true;
1574 /* If the variable is not associated with any block, then it
1575 was created by the optimizers, and could be live anywhere
1576 in the function. */
1577 else if (TREE_USED (var))
1578 expand_now = true;
1580 /* Finally, mark all variables on the list as used. We'll use
1581 this in a moment when we expand those associated with scopes. */
1582 TREE_USED (var) = 1;
1584 if (expand_now)
1585 expand_one_var (var, true, true);
1587 next:
1588 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1590 rtx rtl = DECL_RTL_IF_SET (var);
1592 /* Keep artificial non-ignored vars in cfun->local_decls
1593 chain until instantiate_decls. */
1594 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1595 add_local_decl (cfun, var);
1596 else if (rtl == NULL_RTX)
1597 /* If rtl isn't set yet, which can happen e.g. with
1598 -fstack-protector, retry before returning from this
1599 function. */
1600 VEC_safe_push (tree, heap, maybe_local_decls, var);
1604 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1606 +-----------------+-----------------+
1607 | ...processed... | ...duplicates...|
1608 +-----------------+-----------------+
1610 +-- LEN points here.
1612 We just want the duplicates, as those are the artificial
1613 non-ignored vars that we want to keep until instantiate_decls.
1614 Move them down and truncate the array. */
1615 if (!VEC_empty (tree, cfun->local_decls))
1616 VEC_block_remove (tree, cfun->local_decls, 0, len);
1618 /* At this point, all variables within the block tree with TREE_USED
1619 set are actually used by the optimized function. Lay them out. */
1620 expand_used_vars_for_block (outer_block, true);
1622 if (stack_vars_num > 0)
1624 add_scope_conflicts ();
1625 /* Due to the way alias sets work, no variables with non-conflicting
1626 alias sets may be assigned the same address. Add conflicts to
1627 reflect this. */
1628 add_alias_set_conflicts ();
1630 /* If stack protection is enabled, we don't share space between
1631 vulnerable data and non-vulnerable data. */
1632 if (flag_stack_protect)
1633 add_stack_protection_conflicts ();
1635 /* Now that we have collected all stack variables, and have computed a
1636 minimal interference graph, attempt to save some stack space. */
1637 partition_stack_vars ();
1638 if (dump_file)
1639 dump_stack_var_partition ();
1642 /* There are several conditions under which we should create a
1643 stack guard: protect-all, alloca used, protected decls present. */
1644 if (flag_stack_protect == 2
1645 || (flag_stack_protect
1646 && (cfun->calls_alloca || has_protected_decls)))
1647 create_stack_guard ();
1649 /* Assign rtl to each variable based on these partitions. */
1650 if (stack_vars_num > 0)
1652 /* Reorder decls to be protected by iterating over the variables
1653 array multiple times, and allocating out of each phase in turn. */
1654 /* ??? We could probably integrate this into the qsort we did
1655 earlier, such that we naturally see these variables first,
1656 and thus naturally allocate things in the right order. */
1657 if (has_protected_decls)
1659 /* Phase 1 contains only character arrays. */
1660 expand_stack_vars (stack_protect_decl_phase_1);
1662 /* Phase 2 contains other kinds of arrays. */
1663 if (flag_stack_protect == 2)
1664 expand_stack_vars (stack_protect_decl_phase_2);
1667 expand_stack_vars (NULL);
1669 fini_vars_expansion ();
1672 /* If there were any artificial non-ignored vars without rtl
1673 found earlier, see if deferred stack allocation hasn't assigned
1674 rtl to them. */
1675 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1677 rtx rtl = DECL_RTL_IF_SET (var);
1679 /* Keep artificial non-ignored vars in cfun->local_decls
1680 chain until instantiate_decls. */
1681 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1682 add_local_decl (cfun, var);
1684 VEC_free (tree, heap, maybe_local_decls);
1686 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1687 if (STACK_ALIGNMENT_NEEDED)
1689 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1690 if (!FRAME_GROWS_DOWNWARD)
1691 frame_offset += align - 1;
1692 frame_offset &= -align;
1697 /* If we need to produce a detailed dump, print the tree representation
1698 for STMT to the dump file. SINCE is the last RTX after which the RTL
1699 generated for STMT should have been appended. */
1701 static void
1702 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1704 if (dump_file && (dump_flags & TDF_DETAILS))
1706 fprintf (dump_file, "\n;; ");
1707 print_gimple_stmt (dump_file, stmt, 0,
1708 TDF_SLIM | (dump_flags & TDF_LINENO));
1709 fprintf (dump_file, "\n");
1711 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1715 /* Maps the blocks that do not contain tree labels to rtx labels. */
1717 static struct pointer_map_t *lab_rtx_for_bb;
1719 /* Returns the label_rtx expression for a label starting basic block BB. */
1721 static rtx
1722 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1724 gimple_stmt_iterator gsi;
1725 tree lab;
1726 gimple lab_stmt;
1727 void **elt;
1729 if (bb->flags & BB_RTL)
1730 return block_label (bb);
1732 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1733 if (elt)
1734 return (rtx) *elt;
1736 /* Find the tree label if it is present. */
1738 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1740 lab_stmt = gsi_stmt (gsi);
1741 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1742 break;
1744 lab = gimple_label_label (lab_stmt);
1745 if (DECL_NONLOCAL (lab))
1746 break;
1748 return label_rtx (lab);
1751 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1752 *elt = gen_label_rtx ();
1753 return (rtx) *elt;
1757 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1758 of a basic block where we just expanded the conditional at the end,
1759 possibly clean up the CFG and instruction sequence. LAST is the
1760 last instruction before the just emitted jump sequence. */
1762 static void
1763 maybe_cleanup_end_of_block (edge e, rtx last)
1765 /* Special case: when jumpif decides that the condition is
1766 trivial it emits an unconditional jump (and the necessary
1767 barrier). But we still have two edges, the fallthru one is
1768 wrong. purge_dead_edges would clean this up later. Unfortunately
1769 we have to insert insns (and split edges) before
1770 find_many_sub_basic_blocks and hence before purge_dead_edges.
1771 But splitting edges might create new blocks which depend on the
1772 fact that if there are two edges there's no barrier. So the
1773 barrier would get lost and verify_flow_info would ICE. Instead
1774 of auditing all edge splitters to care for the barrier (which
1775 normally isn't there in a cleaned CFG), fix it here. */
1776 if (BARRIER_P (get_last_insn ()))
1778 rtx insn;
1779 remove_edge (e);
1780 /* Now, we have a single successor block, if we have insns to
1781 insert on the remaining edge we potentially will insert
1782 it at the end of this block (if the dest block isn't feasible)
1783 in order to avoid splitting the edge. This insertion will take
1784 place in front of the last jump. But we might have emitted
1785 multiple jumps (conditional and one unconditional) to the
1786 same destination. Inserting in front of the last one then
1787 is a problem. See PR 40021. We fix this by deleting all
1788 jumps except the last unconditional one. */
1789 insn = PREV_INSN (get_last_insn ());
1790 /* Make sure we have an unconditional jump. Otherwise we're
1791 confused. */
1792 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1793 for (insn = PREV_INSN (insn); insn != last;)
1795 insn = PREV_INSN (insn);
1796 if (JUMP_P (NEXT_INSN (insn)))
1798 if (!any_condjump_p (NEXT_INSN (insn)))
1800 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1801 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1803 delete_insn (NEXT_INSN (insn));
1809 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1810 Returns a new basic block if we've terminated the current basic
1811 block and created a new one. */
1813 static basic_block
1814 expand_gimple_cond (basic_block bb, gimple stmt)
1816 basic_block new_bb, dest;
1817 edge new_edge;
1818 edge true_edge;
1819 edge false_edge;
1820 rtx last2, last;
1821 enum tree_code code;
1822 tree op0, op1;
1824 code = gimple_cond_code (stmt);
1825 op0 = gimple_cond_lhs (stmt);
1826 op1 = gimple_cond_rhs (stmt);
1827 /* We're sometimes presented with such code:
1828 D.123_1 = x < y;
1829 if (D.123_1 != 0)
1831 This would expand to two comparisons which then later might
1832 be cleaned up by combine. But some pattern matchers like if-conversion
1833 work better when there's only one compare, so make up for this
1834 here as special exception if TER would have made the same change. */
1835 if (gimple_cond_single_var_p (stmt)
1836 && SA.values
1837 && TREE_CODE (op0) == SSA_NAME
1838 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1840 gimple second = SSA_NAME_DEF_STMT (op0);
1841 if (gimple_code (second) == GIMPLE_ASSIGN)
1843 enum tree_code code2 = gimple_assign_rhs_code (second);
1844 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1846 code = code2;
1847 op0 = gimple_assign_rhs1 (second);
1848 op1 = gimple_assign_rhs2 (second);
1850 /* If jumps are cheap turn some more codes into
1851 jumpy sequences. */
1852 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1854 if ((code2 == BIT_AND_EXPR
1855 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1856 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1857 || code2 == TRUTH_AND_EXPR)
1859 code = TRUTH_ANDIF_EXPR;
1860 op0 = gimple_assign_rhs1 (second);
1861 op1 = gimple_assign_rhs2 (second);
1863 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1865 code = TRUTH_ORIF_EXPR;
1866 op0 = gimple_assign_rhs1 (second);
1867 op1 = gimple_assign_rhs2 (second);
1873 last2 = last = get_last_insn ();
1875 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1876 set_curr_insn_source_location (gimple_location (stmt));
1877 set_curr_insn_block (gimple_block (stmt));
1879 /* These flags have no purpose in RTL land. */
1880 true_edge->flags &= ~EDGE_TRUE_VALUE;
1881 false_edge->flags &= ~EDGE_FALSE_VALUE;
1883 /* We can either have a pure conditional jump with one fallthru edge or
1884 two-way jump that needs to be decomposed into two basic blocks. */
1885 if (false_edge->dest == bb->next_bb)
1887 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1888 true_edge->probability);
1889 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1890 if (true_edge->goto_locus)
1892 set_curr_insn_source_location (true_edge->goto_locus);
1893 set_curr_insn_block (true_edge->goto_block);
1894 true_edge->goto_locus = curr_insn_locator ();
1896 true_edge->goto_block = NULL;
1897 false_edge->flags |= EDGE_FALLTHRU;
1898 maybe_cleanup_end_of_block (false_edge, last);
1899 return NULL;
1901 if (true_edge->dest == bb->next_bb)
1903 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1904 false_edge->probability);
1905 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1906 if (false_edge->goto_locus)
1908 set_curr_insn_source_location (false_edge->goto_locus);
1909 set_curr_insn_block (false_edge->goto_block);
1910 false_edge->goto_locus = curr_insn_locator ();
1912 false_edge->goto_block = NULL;
1913 true_edge->flags |= EDGE_FALLTHRU;
1914 maybe_cleanup_end_of_block (true_edge, last);
1915 return NULL;
1918 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1919 true_edge->probability);
1920 last = get_last_insn ();
1921 if (false_edge->goto_locus)
1923 set_curr_insn_source_location (false_edge->goto_locus);
1924 set_curr_insn_block (false_edge->goto_block);
1925 false_edge->goto_locus = curr_insn_locator ();
1927 false_edge->goto_block = NULL;
1928 emit_jump (label_rtx_for_bb (false_edge->dest));
1930 BB_END (bb) = last;
1931 if (BARRIER_P (BB_END (bb)))
1932 BB_END (bb) = PREV_INSN (BB_END (bb));
1933 update_bb_for_insn (bb);
1935 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1936 dest = false_edge->dest;
1937 redirect_edge_succ (false_edge, new_bb);
1938 false_edge->flags |= EDGE_FALLTHRU;
1939 new_bb->count = false_edge->count;
1940 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1941 new_edge = make_edge (new_bb, dest, 0);
1942 new_edge->probability = REG_BR_PROB_BASE;
1943 new_edge->count = new_bb->count;
1944 if (BARRIER_P (BB_END (new_bb)))
1945 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1946 update_bb_for_insn (new_bb);
1948 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1950 if (true_edge->goto_locus)
1952 set_curr_insn_source_location (true_edge->goto_locus);
1953 set_curr_insn_block (true_edge->goto_block);
1954 true_edge->goto_locus = curr_insn_locator ();
1956 true_edge->goto_block = NULL;
1958 return new_bb;
1961 /* Mark all calls that can have a transaction restart. */
1963 static void
1964 mark_transaction_restart_calls (gimple stmt)
1966 struct tm_restart_node dummy;
1967 void **slot;
1969 if (!cfun->gimple_df->tm_restart)
1970 return;
1972 dummy.stmt = stmt;
1973 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1974 if (slot)
1976 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1977 tree list = n->label_or_list;
1978 rtx insn;
1980 for (insn = next_real_insn (get_last_insn ());
1981 !CALL_P (insn);
1982 insn = next_real_insn (insn))
1983 continue;
1985 if (TREE_CODE (list) == LABEL_DECL)
1986 add_reg_note (insn, REG_TM, label_rtx (list));
1987 else
1988 for (; list ; list = TREE_CHAIN (list))
1989 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1993 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1994 statement STMT. */
1996 static void
1997 expand_call_stmt (gimple stmt)
1999 tree exp, decl, lhs;
2000 bool builtin_p;
2001 size_t i;
2003 if (gimple_call_internal_p (stmt))
2005 expand_internal_call (stmt);
2006 return;
2009 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2011 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2012 decl = gimple_call_fndecl (stmt);
2013 builtin_p = decl && DECL_BUILT_IN (decl);
2015 /* If this is not a builtin function, the function type through which the
2016 call is made may be different from the type of the function. */
2017 if (!builtin_p)
2018 CALL_EXPR_FN (exp)
2019 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2020 CALL_EXPR_FN (exp));
2022 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2023 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2025 for (i = 0; i < gimple_call_num_args (stmt); i++)
2027 tree arg = gimple_call_arg (stmt, i);
2028 gimple def;
2029 /* TER addresses into arguments of builtin functions so we have a
2030 chance to infer more correct alignment information. See PR39954. */
2031 if (builtin_p
2032 && TREE_CODE (arg) == SSA_NAME
2033 && (def = get_gimple_for_ssa_name (arg))
2034 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2035 arg = gimple_assign_rhs1 (def);
2036 CALL_EXPR_ARG (exp, i) = arg;
2039 if (gimple_has_side_effects (stmt))
2040 TREE_SIDE_EFFECTS (exp) = 1;
2042 if (gimple_call_nothrow_p (stmt))
2043 TREE_NOTHROW (exp) = 1;
2045 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2046 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2047 if (decl
2048 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2049 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2050 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2051 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2052 else
2053 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2054 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2055 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2056 TREE_BLOCK (exp) = gimple_block (stmt);
2058 /* Ensure RTL is created for debug args. */
2059 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2061 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2062 unsigned int ix;
2063 tree dtemp;
2065 if (debug_args)
2066 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2068 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2069 expand_debug_expr (dtemp);
2073 lhs = gimple_call_lhs (stmt);
2074 if (lhs)
2075 expand_assignment (lhs, exp, false);
2076 else
2077 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2079 mark_transaction_restart_calls (stmt);
2082 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2083 STMT that doesn't require special handling for outgoing edges. That
2084 is no tailcalls and no GIMPLE_COND. */
2086 static void
2087 expand_gimple_stmt_1 (gimple stmt)
2089 tree op0;
2091 set_curr_insn_source_location (gimple_location (stmt));
2092 set_curr_insn_block (gimple_block (stmt));
2094 switch (gimple_code (stmt))
2096 case GIMPLE_GOTO:
2097 op0 = gimple_goto_dest (stmt);
2098 if (TREE_CODE (op0) == LABEL_DECL)
2099 expand_goto (op0);
2100 else
2101 expand_computed_goto (op0);
2102 break;
2103 case GIMPLE_LABEL:
2104 expand_label (gimple_label_label (stmt));
2105 break;
2106 case GIMPLE_NOP:
2107 case GIMPLE_PREDICT:
2108 break;
2109 case GIMPLE_SWITCH:
2110 expand_case (stmt);
2111 break;
2112 case GIMPLE_ASM:
2113 expand_asm_stmt (stmt);
2114 break;
2115 case GIMPLE_CALL:
2116 expand_call_stmt (stmt);
2117 break;
2119 case GIMPLE_RETURN:
2120 op0 = gimple_return_retval (stmt);
2122 if (op0 && op0 != error_mark_node)
2124 tree result = DECL_RESULT (current_function_decl);
2126 /* If we are not returning the current function's RESULT_DECL,
2127 build an assignment to it. */
2128 if (op0 != result)
2130 /* I believe that a function's RESULT_DECL is unique. */
2131 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2133 /* ??? We'd like to use simply expand_assignment here,
2134 but this fails if the value is of BLKmode but the return
2135 decl is a register. expand_return has special handling
2136 for this combination, which eventually should move
2137 to common code. See comments there. Until then, let's
2138 build a modify expression :-/ */
2139 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2140 result, op0);
2143 if (!op0)
2144 expand_null_return ();
2145 else
2146 expand_return (op0);
2147 break;
2149 case GIMPLE_ASSIGN:
2151 tree lhs = gimple_assign_lhs (stmt);
2153 /* Tree expand used to fiddle with |= and &= of two bitfield
2154 COMPONENT_REFs here. This can't happen with gimple, the LHS
2155 of binary assigns must be a gimple reg. */
2157 if (TREE_CODE (lhs) != SSA_NAME
2158 || get_gimple_rhs_class (gimple_expr_code (stmt))
2159 == GIMPLE_SINGLE_RHS)
2161 tree rhs = gimple_assign_rhs1 (stmt);
2162 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2163 == GIMPLE_SINGLE_RHS);
2164 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2165 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2166 if (TREE_CLOBBER_P (rhs))
2167 /* This is a clobber to mark the going out of scope for
2168 this LHS. */
2170 else
2171 expand_assignment (lhs, rhs,
2172 gimple_assign_nontemporal_move_p (stmt));
2174 else
2176 rtx target, temp;
2177 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2178 struct separate_ops ops;
2179 bool promoted = false;
2181 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2182 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2183 promoted = true;
2185 ops.code = gimple_assign_rhs_code (stmt);
2186 ops.type = TREE_TYPE (lhs);
2187 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2189 case GIMPLE_TERNARY_RHS:
2190 ops.op2 = gimple_assign_rhs3 (stmt);
2191 /* Fallthru */
2192 case GIMPLE_BINARY_RHS:
2193 ops.op1 = gimple_assign_rhs2 (stmt);
2194 /* Fallthru */
2195 case GIMPLE_UNARY_RHS:
2196 ops.op0 = gimple_assign_rhs1 (stmt);
2197 break;
2198 default:
2199 gcc_unreachable ();
2201 ops.location = gimple_location (stmt);
2203 /* If we want to use a nontemporal store, force the value to
2204 register first. If we store into a promoted register,
2205 don't directly expand to target. */
2206 temp = nontemporal || promoted ? NULL_RTX : target;
2207 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2208 EXPAND_NORMAL);
2210 if (temp == target)
2212 else if (promoted)
2214 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2215 /* If TEMP is a VOIDmode constant, use convert_modes to make
2216 sure that we properly convert it. */
2217 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2219 temp = convert_modes (GET_MODE (target),
2220 TYPE_MODE (ops.type),
2221 temp, unsignedp);
2222 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2223 GET_MODE (target), temp, unsignedp);
2226 convert_move (SUBREG_REG (target), temp, unsignedp);
2228 else if (nontemporal && emit_storent_insn (target, temp))
2230 else
2232 temp = force_operand (temp, target);
2233 if (temp != target)
2234 emit_move_insn (target, temp);
2238 break;
2240 default:
2241 gcc_unreachable ();
2245 /* Expand one gimple statement STMT and return the last RTL instruction
2246 before any of the newly generated ones.
2248 In addition to generating the necessary RTL instructions this also
2249 sets REG_EH_REGION notes if necessary and sets the current source
2250 location for diagnostics. */
2252 static rtx
2253 expand_gimple_stmt (gimple stmt)
2255 location_t saved_location = input_location;
2256 rtx last = get_last_insn ();
2257 int lp_nr;
2259 gcc_assert (cfun);
2261 /* We need to save and restore the current source location so that errors
2262 discovered during expansion are emitted with the right location. But
2263 it would be better if the diagnostic routines used the source location
2264 embedded in the tree nodes rather than globals. */
2265 if (gimple_has_location (stmt))
2266 input_location = gimple_location (stmt);
2268 expand_gimple_stmt_1 (stmt);
2270 /* Free any temporaries used to evaluate this statement. */
2271 free_temp_slots ();
2273 input_location = saved_location;
2275 /* Mark all insns that may trap. */
2276 lp_nr = lookup_stmt_eh_lp (stmt);
2277 if (lp_nr)
2279 rtx insn;
2280 for (insn = next_real_insn (last); insn;
2281 insn = next_real_insn (insn))
2283 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2284 /* If we want exceptions for non-call insns, any
2285 may_trap_p instruction may throw. */
2286 && GET_CODE (PATTERN (insn)) != CLOBBER
2287 && GET_CODE (PATTERN (insn)) != USE
2288 && insn_could_throw_p (insn))
2289 make_reg_eh_region_note (insn, 0, lp_nr);
2293 return last;
2296 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2297 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2298 generated a tail call (something that might be denied by the ABI
2299 rules governing the call; see calls.c).
2301 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2302 can still reach the rest of BB. The case here is __builtin_sqrt,
2303 where the NaN result goes through the external function (with a
2304 tailcall) and the normal result happens via a sqrt instruction. */
2306 static basic_block
2307 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2309 rtx last2, last;
2310 edge e;
2311 edge_iterator ei;
2312 int probability;
2313 gcov_type count;
2315 last2 = last = expand_gimple_stmt (stmt);
2317 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2318 if (CALL_P (last) && SIBLING_CALL_P (last))
2319 goto found;
2321 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2323 *can_fallthru = true;
2324 return NULL;
2326 found:
2327 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2328 Any instructions emitted here are about to be deleted. */
2329 do_pending_stack_adjust ();
2331 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2332 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2333 EH or abnormal edges, we shouldn't have created a tail call in
2334 the first place. So it seems to me we should just be removing
2335 all edges here, or redirecting the existing fallthru edge to
2336 the exit block. */
2338 probability = 0;
2339 count = 0;
2341 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2343 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2345 if (e->dest != EXIT_BLOCK_PTR)
2347 e->dest->count -= e->count;
2348 e->dest->frequency -= EDGE_FREQUENCY (e);
2349 if (e->dest->count < 0)
2350 e->dest->count = 0;
2351 if (e->dest->frequency < 0)
2352 e->dest->frequency = 0;
2354 count += e->count;
2355 probability += e->probability;
2356 remove_edge (e);
2358 else
2359 ei_next (&ei);
2362 /* This is somewhat ugly: the call_expr expander often emits instructions
2363 after the sibcall (to perform the function return). These confuse the
2364 find_many_sub_basic_blocks code, so we need to get rid of these. */
2365 last = NEXT_INSN (last);
2366 gcc_assert (BARRIER_P (last));
2368 *can_fallthru = false;
2369 while (NEXT_INSN (last))
2371 /* For instance an sqrt builtin expander expands if with
2372 sibcall in the then and label for `else`. */
2373 if (LABEL_P (NEXT_INSN (last)))
2375 *can_fallthru = true;
2376 break;
2378 delete_insn (NEXT_INSN (last));
2381 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2382 e->probability += probability;
2383 e->count += count;
2384 BB_END (bb) = last;
2385 update_bb_for_insn (bb);
2387 if (NEXT_INSN (last))
2389 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2391 last = BB_END (bb);
2392 if (BARRIER_P (last))
2393 BB_END (bb) = PREV_INSN (last);
2396 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2398 return bb;
2401 /* Return the difference between the floor and the truncated result of
2402 a signed division by OP1 with remainder MOD. */
2403 static rtx
2404 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2406 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2407 return gen_rtx_IF_THEN_ELSE
2408 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2409 gen_rtx_IF_THEN_ELSE
2410 (mode, gen_rtx_LT (BImode,
2411 gen_rtx_DIV (mode, op1, mod),
2412 const0_rtx),
2413 constm1_rtx, const0_rtx),
2414 const0_rtx);
2417 /* Return the difference between the ceil and the truncated result of
2418 a signed division by OP1 with remainder MOD. */
2419 static rtx
2420 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2422 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2423 return gen_rtx_IF_THEN_ELSE
2424 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2425 gen_rtx_IF_THEN_ELSE
2426 (mode, gen_rtx_GT (BImode,
2427 gen_rtx_DIV (mode, op1, mod),
2428 const0_rtx),
2429 const1_rtx, const0_rtx),
2430 const0_rtx);
2433 /* Return the difference between the ceil and the truncated result of
2434 an unsigned division by OP1 with remainder MOD. */
2435 static rtx
2436 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2438 /* (mod != 0 ? 1 : 0) */
2439 return gen_rtx_IF_THEN_ELSE
2440 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2441 const1_rtx, const0_rtx);
2444 /* Return the difference between the rounded and the truncated result
2445 of a signed division by OP1 with remainder MOD. Halfway cases are
2446 rounded away from zero, rather than to the nearest even number. */
2447 static rtx
2448 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2450 /* (abs (mod) >= abs (op1) - abs (mod)
2451 ? (op1 / mod > 0 ? 1 : -1)
2452 : 0) */
2453 return gen_rtx_IF_THEN_ELSE
2454 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2455 gen_rtx_MINUS (mode,
2456 gen_rtx_ABS (mode, op1),
2457 gen_rtx_ABS (mode, mod))),
2458 gen_rtx_IF_THEN_ELSE
2459 (mode, gen_rtx_GT (BImode,
2460 gen_rtx_DIV (mode, op1, mod),
2461 const0_rtx),
2462 const1_rtx, constm1_rtx),
2463 const0_rtx);
2466 /* Return the difference between the rounded and the truncated result
2467 of a unsigned division by OP1 with remainder MOD. Halfway cases
2468 are rounded away from zero, rather than to the nearest even
2469 number. */
2470 static rtx
2471 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2473 /* (mod >= op1 - mod ? 1 : 0) */
2474 return gen_rtx_IF_THEN_ELSE
2475 (mode, gen_rtx_GE (BImode, mod,
2476 gen_rtx_MINUS (mode, op1, mod)),
2477 const1_rtx, const0_rtx);
2480 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2481 any rtl. */
2483 static rtx
2484 convert_debug_memory_address (enum machine_mode mode, rtx x,
2485 addr_space_t as)
2487 enum machine_mode xmode = GET_MODE (x);
2489 #ifndef POINTERS_EXTEND_UNSIGNED
2490 gcc_assert (mode == Pmode
2491 || mode == targetm.addr_space.address_mode (as));
2492 gcc_assert (xmode == mode || xmode == VOIDmode);
2493 #else
2494 rtx temp;
2496 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2498 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2499 return x;
2501 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2502 x = simplify_gen_subreg (mode, x, xmode,
2503 subreg_lowpart_offset
2504 (mode, xmode));
2505 else if (POINTERS_EXTEND_UNSIGNED > 0)
2506 x = gen_rtx_ZERO_EXTEND (mode, x);
2507 else if (!POINTERS_EXTEND_UNSIGNED)
2508 x = gen_rtx_SIGN_EXTEND (mode, x);
2509 else
2511 switch (GET_CODE (x))
2513 case SUBREG:
2514 if ((SUBREG_PROMOTED_VAR_P (x)
2515 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2516 || (GET_CODE (SUBREG_REG (x)) == PLUS
2517 && REG_P (XEXP (SUBREG_REG (x), 0))
2518 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2519 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2520 && GET_MODE (SUBREG_REG (x)) == mode)
2521 return SUBREG_REG (x);
2522 break;
2523 case LABEL_REF:
2524 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2525 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2526 return temp;
2527 case SYMBOL_REF:
2528 temp = shallow_copy_rtx (x);
2529 PUT_MODE (temp, mode);
2530 return temp;
2531 case CONST:
2532 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2533 if (temp)
2534 temp = gen_rtx_CONST (mode, temp);
2535 return temp;
2536 case PLUS:
2537 case MINUS:
2538 if (CONST_INT_P (XEXP (x, 1)))
2540 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2541 if (temp)
2542 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2544 break;
2545 default:
2546 break;
2548 /* Don't know how to express ptr_extend as operation in debug info. */
2549 return NULL;
2551 #endif /* POINTERS_EXTEND_UNSIGNED */
2553 return x;
2556 /* Return an RTX equivalent to the value of the parameter DECL. */
2558 static rtx
2559 expand_debug_parm_decl (tree decl)
2561 rtx incoming = DECL_INCOMING_RTL (decl);
2563 if (incoming
2564 && GET_MODE (incoming) != BLKmode
2565 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2566 || (MEM_P (incoming)
2567 && REG_P (XEXP (incoming, 0))
2568 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2570 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2572 #ifdef HAVE_window_save
2573 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2574 If the target machine has an explicit window save instruction, the
2575 actual entry value is the corresponding OUTGOING_REGNO instead. */
2576 if (REG_P (incoming)
2577 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2578 incoming
2579 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2580 OUTGOING_REGNO (REGNO (incoming)), 0);
2581 else if (MEM_P (incoming))
2583 rtx reg = XEXP (incoming, 0);
2584 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2586 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2587 incoming = replace_equiv_address_nv (incoming, reg);
2590 #endif
2592 ENTRY_VALUE_EXP (rtl) = incoming;
2593 return rtl;
2596 if (incoming
2597 && GET_MODE (incoming) != BLKmode
2598 && !TREE_ADDRESSABLE (decl)
2599 && MEM_P (incoming)
2600 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2601 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2602 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2603 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2604 return incoming;
2606 return NULL_RTX;
2609 /* Return an RTX equivalent to the value of the tree expression EXP. */
2611 static rtx
2612 expand_debug_expr (tree exp)
2614 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2615 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2616 enum machine_mode inner_mode = VOIDmode;
2617 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2618 addr_space_t as;
2620 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2622 case tcc_expression:
2623 switch (TREE_CODE (exp))
2625 case COND_EXPR:
2626 case DOT_PROD_EXPR:
2627 case WIDEN_MULT_PLUS_EXPR:
2628 case WIDEN_MULT_MINUS_EXPR:
2629 case FMA_EXPR:
2630 goto ternary;
2632 case TRUTH_ANDIF_EXPR:
2633 case TRUTH_ORIF_EXPR:
2634 case TRUTH_AND_EXPR:
2635 case TRUTH_OR_EXPR:
2636 case TRUTH_XOR_EXPR:
2637 goto binary;
2639 case TRUTH_NOT_EXPR:
2640 goto unary;
2642 default:
2643 break;
2645 break;
2647 ternary:
2648 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2649 if (!op2)
2650 return NULL_RTX;
2651 /* Fall through. */
2653 binary:
2654 case tcc_binary:
2655 case tcc_comparison:
2656 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2657 if (!op1)
2658 return NULL_RTX;
2659 /* Fall through. */
2661 unary:
2662 case tcc_unary:
2663 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2664 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2665 if (!op0)
2666 return NULL_RTX;
2667 break;
2669 case tcc_type:
2670 case tcc_statement:
2671 gcc_unreachable ();
2673 case tcc_constant:
2674 case tcc_exceptional:
2675 case tcc_declaration:
2676 case tcc_reference:
2677 case tcc_vl_exp:
2678 break;
2681 switch (TREE_CODE (exp))
2683 case STRING_CST:
2684 if (!lookup_constant_def (exp))
2686 if (strlen (TREE_STRING_POINTER (exp)) + 1
2687 != (size_t) TREE_STRING_LENGTH (exp))
2688 return NULL_RTX;
2689 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2690 op0 = gen_rtx_MEM (BLKmode, op0);
2691 set_mem_attributes (op0, exp, 0);
2692 return op0;
2694 /* Fall through... */
2696 case INTEGER_CST:
2697 case REAL_CST:
2698 case FIXED_CST:
2699 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2700 return op0;
2702 case COMPLEX_CST:
2703 gcc_assert (COMPLEX_MODE_P (mode));
2704 op0 = expand_debug_expr (TREE_REALPART (exp));
2705 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2706 return gen_rtx_CONCAT (mode, op0, op1);
2708 case DEBUG_EXPR_DECL:
2709 op0 = DECL_RTL_IF_SET (exp);
2711 if (op0)
2712 return op0;
2714 op0 = gen_rtx_DEBUG_EXPR (mode);
2715 DEBUG_EXPR_TREE_DECL (op0) = exp;
2716 SET_DECL_RTL (exp, op0);
2718 return op0;
2720 case VAR_DECL:
2721 case PARM_DECL:
2722 case FUNCTION_DECL:
2723 case LABEL_DECL:
2724 case CONST_DECL:
2725 case RESULT_DECL:
2726 op0 = DECL_RTL_IF_SET (exp);
2728 /* This decl was probably optimized away. */
2729 if (!op0)
2731 if (TREE_CODE (exp) != VAR_DECL
2732 || DECL_EXTERNAL (exp)
2733 || !TREE_STATIC (exp)
2734 || !DECL_NAME (exp)
2735 || DECL_HARD_REGISTER (exp)
2736 || DECL_IN_CONSTANT_POOL (exp)
2737 || mode == VOIDmode)
2738 return NULL;
2740 op0 = make_decl_rtl_for_debug (exp);
2741 if (!MEM_P (op0)
2742 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2743 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2744 return NULL;
2746 else
2747 op0 = copy_rtx (op0);
2749 if (GET_MODE (op0) == BLKmode
2750 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2751 below would ICE. While it is likely a FE bug,
2752 try to be robust here. See PR43166. */
2753 || mode == BLKmode
2754 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2756 gcc_assert (MEM_P (op0));
2757 op0 = adjust_address_nv (op0, mode, 0);
2758 return op0;
2761 /* Fall through. */
2763 adjust_mode:
2764 case PAREN_EXPR:
2765 case NOP_EXPR:
2766 case CONVERT_EXPR:
2768 inner_mode = GET_MODE (op0);
2770 if (mode == inner_mode)
2771 return op0;
2773 if (inner_mode == VOIDmode)
2775 if (TREE_CODE (exp) == SSA_NAME)
2776 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2777 else
2778 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2779 if (mode == inner_mode)
2780 return op0;
2783 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2785 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2786 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2787 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2788 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2789 else
2790 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2792 else if (FLOAT_MODE_P (mode))
2794 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2795 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2796 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2797 else
2798 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2800 else if (FLOAT_MODE_P (inner_mode))
2802 if (unsignedp)
2803 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2804 else
2805 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2807 else if (CONSTANT_P (op0)
2808 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2809 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2810 subreg_lowpart_offset (mode,
2811 inner_mode));
2812 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2813 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2814 : unsignedp)
2815 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2816 else
2817 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2819 return op0;
2822 case MEM_REF:
2823 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2825 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2826 TREE_OPERAND (exp, 0),
2827 TREE_OPERAND (exp, 1));
2828 if (newexp)
2829 return expand_debug_expr (newexp);
2831 /* FALLTHROUGH */
2832 case INDIRECT_REF:
2833 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2834 if (!op0)
2835 return NULL;
2837 if (TREE_CODE (exp) == MEM_REF)
2839 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2840 || (GET_CODE (op0) == PLUS
2841 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2842 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2843 Instead just use get_inner_reference. */
2844 goto component_ref;
2846 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2847 if (!op1 || !CONST_INT_P (op1))
2848 return NULL;
2850 op0 = plus_constant (op0, INTVAL (op1));
2853 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2854 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2855 else
2856 as = ADDR_SPACE_GENERIC;
2858 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2859 op0, as);
2860 if (op0 == NULL_RTX)
2861 return NULL;
2863 op0 = gen_rtx_MEM (mode, op0);
2864 set_mem_attributes (op0, exp, 0);
2865 if (TREE_CODE (exp) == MEM_REF
2866 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2867 set_mem_expr (op0, NULL_TREE);
2868 set_mem_addr_space (op0, as);
2870 return op0;
2872 case TARGET_MEM_REF:
2873 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2874 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2875 return NULL;
2877 op0 = expand_debug_expr
2878 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2879 if (!op0)
2880 return NULL;
2882 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2883 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2884 else
2885 as = ADDR_SPACE_GENERIC;
2887 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2888 op0, as);
2889 if (op0 == NULL_RTX)
2890 return NULL;
2892 op0 = gen_rtx_MEM (mode, op0);
2894 set_mem_attributes (op0, exp, 0);
2895 set_mem_addr_space (op0, as);
2897 return op0;
2899 component_ref:
2900 case ARRAY_REF:
2901 case ARRAY_RANGE_REF:
2902 case COMPONENT_REF:
2903 case BIT_FIELD_REF:
2904 case REALPART_EXPR:
2905 case IMAGPART_EXPR:
2906 case VIEW_CONVERT_EXPR:
2908 enum machine_mode mode1;
2909 HOST_WIDE_INT bitsize, bitpos;
2910 tree offset;
2911 int volatilep = 0;
2912 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2913 &mode1, &unsignedp, &volatilep, false);
2914 rtx orig_op0;
2916 if (bitsize == 0)
2917 return NULL;
2919 orig_op0 = op0 = expand_debug_expr (tem);
2921 if (!op0)
2922 return NULL;
2924 if (offset)
2926 enum machine_mode addrmode, offmode;
2928 if (!MEM_P (op0))
2929 return NULL;
2931 op0 = XEXP (op0, 0);
2932 addrmode = GET_MODE (op0);
2933 if (addrmode == VOIDmode)
2934 addrmode = Pmode;
2936 op1 = expand_debug_expr (offset);
2937 if (!op1)
2938 return NULL;
2940 offmode = GET_MODE (op1);
2941 if (offmode == VOIDmode)
2942 offmode = TYPE_MODE (TREE_TYPE (offset));
2944 if (addrmode != offmode)
2945 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2946 subreg_lowpart_offset (addrmode,
2947 offmode));
2949 /* Don't use offset_address here, we don't need a
2950 recognizable address, and we don't want to generate
2951 code. */
2952 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2953 op0, op1));
2956 if (MEM_P (op0))
2958 if (mode1 == VOIDmode)
2959 /* Bitfield. */
2960 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2961 if (bitpos >= BITS_PER_UNIT)
2963 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2964 bitpos %= BITS_PER_UNIT;
2966 else if (bitpos < 0)
2968 HOST_WIDE_INT units
2969 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2970 op0 = adjust_address_nv (op0, mode1, units);
2971 bitpos += units * BITS_PER_UNIT;
2973 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2974 op0 = adjust_address_nv (op0, mode, 0);
2975 else if (GET_MODE (op0) != mode1)
2976 op0 = adjust_address_nv (op0, mode1, 0);
2977 else
2978 op0 = copy_rtx (op0);
2979 if (op0 == orig_op0)
2980 op0 = shallow_copy_rtx (op0);
2981 set_mem_attributes (op0, exp, 0);
2984 if (bitpos == 0 && mode == GET_MODE (op0))
2985 return op0;
2987 if (bitpos < 0)
2988 return NULL;
2990 if (GET_MODE (op0) == BLKmode)
2991 return NULL;
2993 if ((bitpos % BITS_PER_UNIT) == 0
2994 && bitsize == GET_MODE_BITSIZE (mode1))
2996 enum machine_mode opmode = GET_MODE (op0);
2998 if (opmode == VOIDmode)
2999 opmode = TYPE_MODE (TREE_TYPE (tem));
3001 /* This condition may hold if we're expanding the address
3002 right past the end of an array that turned out not to
3003 be addressable (i.e., the address was only computed in
3004 debug stmts). The gen_subreg below would rightfully
3005 crash, and the address doesn't really exist, so just
3006 drop it. */
3007 if (bitpos >= GET_MODE_BITSIZE (opmode))
3008 return NULL;
3010 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3011 return simplify_gen_subreg (mode, op0, opmode,
3012 bitpos / BITS_PER_UNIT);
3015 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3016 && TYPE_UNSIGNED (TREE_TYPE (exp))
3017 ? SIGN_EXTRACT
3018 : ZERO_EXTRACT, mode,
3019 GET_MODE (op0) != VOIDmode
3020 ? GET_MODE (op0)
3021 : TYPE_MODE (TREE_TYPE (tem)),
3022 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3025 case ABS_EXPR:
3026 return simplify_gen_unary (ABS, mode, op0, mode);
3028 case NEGATE_EXPR:
3029 return simplify_gen_unary (NEG, mode, op0, mode);
3031 case BIT_NOT_EXPR:
3032 return simplify_gen_unary (NOT, mode, op0, mode);
3034 case FLOAT_EXPR:
3035 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3036 0)))
3037 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3038 inner_mode);
3040 case FIX_TRUNC_EXPR:
3041 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3042 inner_mode);
3044 case POINTER_PLUS_EXPR:
3045 /* For the rare target where pointers are not the same size as
3046 size_t, we need to check for mis-matched modes and correct
3047 the addend. */
3048 if (op0 && op1
3049 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3050 && GET_MODE (op0) != GET_MODE (op1))
3052 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3053 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3054 GET_MODE (op1));
3055 else
3056 /* We always sign-extend, regardless of the signedness of
3057 the operand, because the operand is always unsigned
3058 here even if the original C expression is signed. */
3059 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3060 GET_MODE (op1));
3062 /* Fall through. */
3063 case PLUS_EXPR:
3064 return simplify_gen_binary (PLUS, mode, op0, op1);
3066 case MINUS_EXPR:
3067 return simplify_gen_binary (MINUS, mode, op0, op1);
3069 case MULT_EXPR:
3070 return simplify_gen_binary (MULT, mode, op0, op1);
3072 case RDIV_EXPR:
3073 case TRUNC_DIV_EXPR:
3074 case EXACT_DIV_EXPR:
3075 if (unsignedp)
3076 return simplify_gen_binary (UDIV, mode, op0, op1);
3077 else
3078 return simplify_gen_binary (DIV, mode, op0, op1);
3080 case TRUNC_MOD_EXPR:
3081 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3083 case FLOOR_DIV_EXPR:
3084 if (unsignedp)
3085 return simplify_gen_binary (UDIV, mode, op0, op1);
3086 else
3088 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3089 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3090 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3091 return simplify_gen_binary (PLUS, mode, div, adj);
3094 case FLOOR_MOD_EXPR:
3095 if (unsignedp)
3096 return simplify_gen_binary (UMOD, mode, op0, op1);
3097 else
3099 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3100 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3101 adj = simplify_gen_unary (NEG, mode,
3102 simplify_gen_binary (MULT, mode, adj, op1),
3103 mode);
3104 return simplify_gen_binary (PLUS, mode, mod, adj);
3107 case CEIL_DIV_EXPR:
3108 if (unsignedp)
3110 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3111 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3112 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3113 return simplify_gen_binary (PLUS, mode, div, adj);
3115 else
3117 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3118 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3119 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3120 return simplify_gen_binary (PLUS, mode, div, adj);
3123 case CEIL_MOD_EXPR:
3124 if (unsignedp)
3126 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3127 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3128 adj = simplify_gen_unary (NEG, mode,
3129 simplify_gen_binary (MULT, mode, adj, op1),
3130 mode);
3131 return simplify_gen_binary (PLUS, mode, mod, adj);
3133 else
3135 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3136 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3137 adj = simplify_gen_unary (NEG, mode,
3138 simplify_gen_binary (MULT, mode, adj, op1),
3139 mode);
3140 return simplify_gen_binary (PLUS, mode, mod, adj);
3143 case ROUND_DIV_EXPR:
3144 if (unsignedp)
3146 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3147 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3148 rtx adj = round_udiv_adjust (mode, mod, op1);
3149 return simplify_gen_binary (PLUS, mode, div, adj);
3151 else
3153 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3154 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3155 rtx adj = round_sdiv_adjust (mode, mod, op1);
3156 return simplify_gen_binary (PLUS, mode, div, adj);
3159 case ROUND_MOD_EXPR:
3160 if (unsignedp)
3162 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3163 rtx adj = round_udiv_adjust (mode, mod, op1);
3164 adj = simplify_gen_unary (NEG, mode,
3165 simplify_gen_binary (MULT, mode, adj, op1),
3166 mode);
3167 return simplify_gen_binary (PLUS, mode, mod, adj);
3169 else
3171 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3172 rtx adj = round_sdiv_adjust (mode, mod, op1);
3173 adj = simplify_gen_unary (NEG, mode,
3174 simplify_gen_binary (MULT, mode, adj, op1),
3175 mode);
3176 return simplify_gen_binary (PLUS, mode, mod, adj);
3179 case LSHIFT_EXPR:
3180 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3182 case RSHIFT_EXPR:
3183 if (unsignedp)
3184 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3185 else
3186 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3188 case LROTATE_EXPR:
3189 return simplify_gen_binary (ROTATE, mode, op0, op1);
3191 case RROTATE_EXPR:
3192 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3194 case MIN_EXPR:
3195 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3197 case MAX_EXPR:
3198 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3200 case BIT_AND_EXPR:
3201 case TRUTH_AND_EXPR:
3202 return simplify_gen_binary (AND, mode, op0, op1);
3204 case BIT_IOR_EXPR:
3205 case TRUTH_OR_EXPR:
3206 return simplify_gen_binary (IOR, mode, op0, op1);
3208 case BIT_XOR_EXPR:
3209 case TRUTH_XOR_EXPR:
3210 return simplify_gen_binary (XOR, mode, op0, op1);
3212 case TRUTH_ANDIF_EXPR:
3213 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3215 case TRUTH_ORIF_EXPR:
3216 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3218 case TRUTH_NOT_EXPR:
3219 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3221 case LT_EXPR:
3222 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3223 op0, op1);
3225 case LE_EXPR:
3226 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3227 op0, op1);
3229 case GT_EXPR:
3230 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3231 op0, op1);
3233 case GE_EXPR:
3234 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3235 op0, op1);
3237 case EQ_EXPR:
3238 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3240 case NE_EXPR:
3241 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3243 case UNORDERED_EXPR:
3244 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3246 case ORDERED_EXPR:
3247 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3249 case UNLT_EXPR:
3250 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3252 case UNLE_EXPR:
3253 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3255 case UNGT_EXPR:
3256 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3258 case UNGE_EXPR:
3259 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3261 case UNEQ_EXPR:
3262 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3264 case LTGT_EXPR:
3265 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3267 case COND_EXPR:
3268 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3270 case COMPLEX_EXPR:
3271 gcc_assert (COMPLEX_MODE_P (mode));
3272 if (GET_MODE (op0) == VOIDmode)
3273 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3274 if (GET_MODE (op1) == VOIDmode)
3275 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3276 return gen_rtx_CONCAT (mode, op0, op1);
3278 case CONJ_EXPR:
3279 if (GET_CODE (op0) == CONCAT)
3280 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3281 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3282 XEXP (op0, 1),
3283 GET_MODE_INNER (mode)));
3284 else
3286 enum machine_mode imode = GET_MODE_INNER (mode);
3287 rtx re, im;
3289 if (MEM_P (op0))
3291 re = adjust_address_nv (op0, imode, 0);
3292 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3294 else
3296 enum machine_mode ifmode = int_mode_for_mode (mode);
3297 enum machine_mode ihmode = int_mode_for_mode (imode);
3298 rtx halfsize;
3299 if (ifmode == BLKmode || ihmode == BLKmode)
3300 return NULL;
3301 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3302 re = op0;
3303 if (mode != ifmode)
3304 re = gen_rtx_SUBREG (ifmode, re, 0);
3305 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3306 if (imode != ihmode)
3307 re = gen_rtx_SUBREG (imode, re, 0);
3308 im = copy_rtx (op0);
3309 if (mode != ifmode)
3310 im = gen_rtx_SUBREG (ifmode, im, 0);
3311 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3312 if (imode != ihmode)
3313 im = gen_rtx_SUBREG (imode, im, 0);
3315 im = gen_rtx_NEG (imode, im);
3316 return gen_rtx_CONCAT (mode, re, im);
3319 case ADDR_EXPR:
3320 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3321 if (!op0 || !MEM_P (op0))
3323 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3324 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3325 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3326 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3327 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3328 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3330 if (handled_component_p (TREE_OPERAND (exp, 0)))
3332 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3333 tree decl
3334 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3335 &bitoffset, &bitsize, &maxsize);
3336 if ((TREE_CODE (decl) == VAR_DECL
3337 || TREE_CODE (decl) == PARM_DECL
3338 || TREE_CODE (decl) == RESULT_DECL)
3339 && (!TREE_ADDRESSABLE (decl)
3340 || target_for_debug_bind (decl))
3341 && (bitoffset % BITS_PER_UNIT) == 0
3342 && bitsize > 0
3343 && bitsize == maxsize)
3344 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3345 bitoffset / BITS_PER_UNIT);
3348 return NULL;
3351 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3352 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3354 return op0;
3356 case VECTOR_CST:
3358 unsigned i;
3360 op0 = gen_rtx_CONCATN
3361 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3363 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3365 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3366 if (!op1)
3367 return NULL;
3368 XVECEXP (op0, 0, i) = op1;
3371 return op0;
3374 case CONSTRUCTOR:
3375 if (TREE_CLOBBER_P (exp))
3376 return NULL;
3377 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3379 unsigned i;
3380 tree val;
3382 op0 = gen_rtx_CONCATN
3383 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3385 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3387 op1 = expand_debug_expr (val);
3388 if (!op1)
3389 return NULL;
3390 XVECEXP (op0, 0, i) = op1;
3393 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3395 op1 = expand_debug_expr
3396 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3398 if (!op1)
3399 return NULL;
3401 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3402 XVECEXP (op0, 0, i) = op1;
3405 return op0;
3407 else
3408 goto flag_unsupported;
3410 case CALL_EXPR:
3411 /* ??? Maybe handle some builtins? */
3412 return NULL;
3414 case SSA_NAME:
3416 gimple g = get_gimple_for_ssa_name (exp);
3417 if (g)
3419 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3420 if (!op0)
3421 return NULL;
3423 else
3425 int part = var_to_partition (SA.map, exp);
3427 if (part == NO_PARTITION)
3429 /* If this is a reference to an incoming value of parameter
3430 that is never used in the code or where the incoming
3431 value is never used in the code, use PARM_DECL's
3432 DECL_RTL if set. */
3433 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3434 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3436 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3437 if (op0)
3438 goto adjust_mode;
3439 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3440 if (op0)
3441 goto adjust_mode;
3443 return NULL;
3446 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3448 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3450 goto adjust_mode;
3453 case ERROR_MARK:
3454 return NULL;
3456 /* Vector stuff. For most of the codes we don't have rtl codes. */
3457 case REALIGN_LOAD_EXPR:
3458 case REDUC_MAX_EXPR:
3459 case REDUC_MIN_EXPR:
3460 case REDUC_PLUS_EXPR:
3461 case VEC_COND_EXPR:
3462 case VEC_LSHIFT_EXPR:
3463 case VEC_PACK_FIX_TRUNC_EXPR:
3464 case VEC_PACK_SAT_EXPR:
3465 case VEC_PACK_TRUNC_EXPR:
3466 case VEC_RSHIFT_EXPR:
3467 case VEC_UNPACK_FLOAT_HI_EXPR:
3468 case VEC_UNPACK_FLOAT_LO_EXPR:
3469 case VEC_UNPACK_HI_EXPR:
3470 case VEC_UNPACK_LO_EXPR:
3471 case VEC_WIDEN_MULT_HI_EXPR:
3472 case VEC_WIDEN_MULT_LO_EXPR:
3473 case VEC_WIDEN_LSHIFT_HI_EXPR:
3474 case VEC_WIDEN_LSHIFT_LO_EXPR:
3475 case VEC_PERM_EXPR:
3476 return NULL;
3478 /* Misc codes. */
3479 case ADDR_SPACE_CONVERT_EXPR:
3480 case FIXED_CONVERT_EXPR:
3481 case OBJ_TYPE_REF:
3482 case WITH_SIZE_EXPR:
3483 return NULL;
3485 case DOT_PROD_EXPR:
3486 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3487 && SCALAR_INT_MODE_P (mode))
3490 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3491 0)))
3492 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3493 inner_mode);
3495 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3496 1)))
3497 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3498 inner_mode);
3499 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3500 return simplify_gen_binary (PLUS, mode, op0, op2);
3502 return NULL;
3504 case WIDEN_MULT_EXPR:
3505 case WIDEN_MULT_PLUS_EXPR:
3506 case WIDEN_MULT_MINUS_EXPR:
3507 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3508 && SCALAR_INT_MODE_P (mode))
3510 inner_mode = GET_MODE (op0);
3511 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3512 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3513 else
3514 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3515 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3516 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3517 else
3518 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3519 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3520 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3521 return op0;
3522 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3523 return simplify_gen_binary (PLUS, mode, op0, op2);
3524 else
3525 return simplify_gen_binary (MINUS, mode, op2, op0);
3527 return NULL;
3529 case WIDEN_SUM_EXPR:
3530 case WIDEN_LSHIFT_EXPR:
3531 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3532 && SCALAR_INT_MODE_P (mode))
3535 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3536 0)))
3537 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3538 inner_mode);
3539 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3540 ? ASHIFT : PLUS, mode, op0, op1);
3542 return NULL;
3544 case FMA_EXPR:
3545 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3547 default:
3548 flag_unsupported:
3549 #ifdef ENABLE_CHECKING
3550 debug_tree (exp);
3551 gcc_unreachable ();
3552 #else
3553 return NULL;
3554 #endif
3558 /* Return an RTX equivalent to the source bind value of the tree expression
3559 EXP. */
3561 static rtx
3562 expand_debug_source_expr (tree exp)
3564 rtx op0 = NULL_RTX;
3565 enum machine_mode mode = VOIDmode, inner_mode;
3567 switch (TREE_CODE (exp))
3569 case PARM_DECL:
3571 mode = DECL_MODE (exp);
3572 op0 = expand_debug_parm_decl (exp);
3573 if (op0)
3574 break;
3575 /* See if this isn't an argument that has been completely
3576 optimized out. */
3577 if (!DECL_RTL_SET_P (exp)
3578 && !DECL_INCOMING_RTL (exp)
3579 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3581 tree aexp = exp;
3582 if (DECL_ABSTRACT_ORIGIN (exp))
3583 aexp = DECL_ABSTRACT_ORIGIN (exp);
3584 if (DECL_CONTEXT (aexp)
3585 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3587 VEC(tree, gc) **debug_args;
3588 unsigned int ix;
3589 tree ddecl;
3590 #ifdef ENABLE_CHECKING
3591 tree parm;
3592 for (parm = DECL_ARGUMENTS (current_function_decl);
3593 parm; parm = DECL_CHAIN (parm))
3594 gcc_assert (parm != exp
3595 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3596 #endif
3597 debug_args = decl_debug_args_lookup (current_function_decl);
3598 if (debug_args != NULL)
3600 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3601 ix += 2)
3602 if (ddecl == aexp)
3603 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3607 break;
3609 default:
3610 break;
3613 if (op0 == NULL_RTX)
3614 return NULL_RTX;
3616 inner_mode = GET_MODE (op0);
3617 if (mode == inner_mode)
3618 return op0;
3620 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3622 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3623 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3624 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3625 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3626 else
3627 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3629 else if (FLOAT_MODE_P (mode))
3630 gcc_unreachable ();
3631 else if (FLOAT_MODE_P (inner_mode))
3633 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3634 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3635 else
3636 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3638 else if (CONSTANT_P (op0)
3639 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3640 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3641 subreg_lowpart_offset (mode, inner_mode));
3642 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3643 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3644 else
3645 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3647 return op0;
3650 /* Expand the _LOCs in debug insns. We run this after expanding all
3651 regular insns, so that any variables referenced in the function
3652 will have their DECL_RTLs set. */
3654 static void
3655 expand_debug_locations (void)
3657 rtx insn;
3658 rtx last = get_last_insn ();
3659 int save_strict_alias = flag_strict_aliasing;
3661 /* New alias sets while setting up memory attributes cause
3662 -fcompare-debug failures, even though it doesn't bring about any
3663 codegen changes. */
3664 flag_strict_aliasing = 0;
3666 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3667 if (DEBUG_INSN_P (insn))
3669 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3670 rtx val;
3671 enum machine_mode mode;
3673 if (value == NULL_TREE)
3674 val = NULL_RTX;
3675 else
3677 if (INSN_VAR_LOCATION_STATUS (insn)
3678 == VAR_INIT_STATUS_UNINITIALIZED)
3679 val = expand_debug_source_expr (value);
3680 else
3681 val = expand_debug_expr (value);
3682 gcc_assert (last == get_last_insn ());
3685 if (!val)
3686 val = gen_rtx_UNKNOWN_VAR_LOC ();
3687 else
3689 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3691 gcc_assert (mode == GET_MODE (val)
3692 || (GET_MODE (val) == VOIDmode
3693 && (CONST_INT_P (val)
3694 || GET_CODE (val) == CONST_FIXED
3695 || GET_CODE (val) == CONST_DOUBLE
3696 || GET_CODE (val) == LABEL_REF)));
3699 INSN_VAR_LOCATION_LOC (insn) = val;
3702 flag_strict_aliasing = save_strict_alias;
3705 /* Expand basic block BB from GIMPLE trees to RTL. */
3707 static basic_block
3708 expand_gimple_basic_block (basic_block bb)
3710 gimple_stmt_iterator gsi;
3711 gimple_seq stmts;
3712 gimple stmt = NULL;
3713 rtx note, last;
3714 edge e;
3715 edge_iterator ei;
3716 void **elt;
3718 if (dump_file)
3719 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3720 bb->index);
3722 /* Note that since we are now transitioning from GIMPLE to RTL, we
3723 cannot use the gsi_*_bb() routines because they expect the basic
3724 block to be in GIMPLE, instead of RTL. Therefore, we need to
3725 access the BB sequence directly. */
3726 stmts = bb_seq (bb);
3727 bb->il.gimple = NULL;
3728 rtl_profile_for_bb (bb);
3729 init_rtl_bb_info (bb);
3730 bb->flags |= BB_RTL;
3732 /* Remove the RETURN_EXPR if we may fall though to the exit
3733 instead. */
3734 gsi = gsi_last (stmts);
3735 if (!gsi_end_p (gsi)
3736 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3738 gimple ret_stmt = gsi_stmt (gsi);
3740 gcc_assert (single_succ_p (bb));
3741 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3743 if (bb->next_bb == EXIT_BLOCK_PTR
3744 && !gimple_return_retval (ret_stmt))
3746 gsi_remove (&gsi, false);
3747 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3751 gsi = gsi_start (stmts);
3752 if (!gsi_end_p (gsi))
3754 stmt = gsi_stmt (gsi);
3755 if (gimple_code (stmt) != GIMPLE_LABEL)
3756 stmt = NULL;
3759 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3761 if (stmt || elt)
3763 last = get_last_insn ();
3765 if (stmt)
3767 expand_gimple_stmt (stmt);
3768 gsi_next (&gsi);
3771 if (elt)
3772 emit_label ((rtx) *elt);
3774 /* Java emits line number notes in the top of labels.
3775 ??? Make this go away once line number notes are obsoleted. */
3776 BB_HEAD (bb) = NEXT_INSN (last);
3777 if (NOTE_P (BB_HEAD (bb)))
3778 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3779 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3781 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3783 else
3784 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3786 NOTE_BASIC_BLOCK (note) = bb;
3788 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3790 basic_block new_bb;
3792 stmt = gsi_stmt (gsi);
3794 /* If this statement is a non-debug one, and we generate debug
3795 insns, then this one might be the last real use of a TERed
3796 SSA_NAME, but where there are still some debug uses further
3797 down. Expanding the current SSA name in such further debug
3798 uses by their RHS might lead to wrong debug info, as coalescing
3799 might make the operands of such RHS be placed into the same
3800 pseudo as something else. Like so:
3801 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3802 use(a_1);
3803 a_2 = ...
3804 #DEBUG ... => a_1
3805 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3806 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3807 the write to a_2 would actually have clobbered the place which
3808 formerly held a_0.
3810 So, instead of that, we recognize the situation, and generate
3811 debug temporaries at the last real use of TERed SSA names:
3812 a_1 = a_0 + 1;
3813 #DEBUG #D1 => a_1
3814 use(a_1);
3815 a_2 = ...
3816 #DEBUG ... => #D1
3818 if (MAY_HAVE_DEBUG_INSNS
3819 && SA.values
3820 && !is_gimple_debug (stmt))
3822 ssa_op_iter iter;
3823 tree op;
3824 gimple def;
3826 location_t sloc = get_curr_insn_source_location ();
3827 tree sblock = get_curr_insn_block ();
3829 /* Look for SSA names that have their last use here (TERed
3830 names always have only one real use). */
3831 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3832 if ((def = get_gimple_for_ssa_name (op)))
3834 imm_use_iterator imm_iter;
3835 use_operand_p use_p;
3836 bool have_debug_uses = false;
3838 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3840 if (gimple_debug_bind_p (USE_STMT (use_p)))
3842 have_debug_uses = true;
3843 break;
3847 if (have_debug_uses)
3849 /* OP is a TERed SSA name, with DEF it's defining
3850 statement, and where OP is used in further debug
3851 instructions. Generate a debug temporary, and
3852 replace all uses of OP in debug insns with that
3853 temporary. */
3854 gimple debugstmt;
3855 tree value = gimple_assign_rhs_to_tree (def);
3856 tree vexpr = make_node (DEBUG_EXPR_DECL);
3857 rtx val;
3858 enum machine_mode mode;
3860 set_curr_insn_source_location (gimple_location (def));
3861 set_curr_insn_block (gimple_block (def));
3863 DECL_ARTIFICIAL (vexpr) = 1;
3864 TREE_TYPE (vexpr) = TREE_TYPE (value);
3865 if (DECL_P (value))
3866 mode = DECL_MODE (value);
3867 else
3868 mode = TYPE_MODE (TREE_TYPE (value));
3869 DECL_MODE (vexpr) = mode;
3871 val = gen_rtx_VAR_LOCATION
3872 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3874 emit_debug_insn (val);
3876 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3878 if (!gimple_debug_bind_p (debugstmt))
3879 continue;
3881 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3882 SET_USE (use_p, vexpr);
3884 update_stmt (debugstmt);
3888 set_curr_insn_source_location (sloc);
3889 set_curr_insn_block (sblock);
3892 currently_expanding_gimple_stmt = stmt;
3894 /* Expand this statement, then evaluate the resulting RTL and
3895 fixup the CFG accordingly. */
3896 if (gimple_code (stmt) == GIMPLE_COND)
3898 new_bb = expand_gimple_cond (bb, stmt);
3899 if (new_bb)
3900 return new_bb;
3902 else if (gimple_debug_bind_p (stmt))
3904 location_t sloc = get_curr_insn_source_location ();
3905 tree sblock = get_curr_insn_block ();
3906 gimple_stmt_iterator nsi = gsi;
3908 for (;;)
3910 tree var = gimple_debug_bind_get_var (stmt);
3911 tree value;
3912 rtx val;
3913 enum machine_mode mode;
3915 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3916 && TREE_CODE (var) != LABEL_DECL
3917 && !target_for_debug_bind (var))
3918 goto delink_debug_stmt;
3920 if (gimple_debug_bind_has_value_p (stmt))
3921 value = gimple_debug_bind_get_value (stmt);
3922 else
3923 value = NULL_TREE;
3925 last = get_last_insn ();
3927 set_curr_insn_source_location (gimple_location (stmt));
3928 set_curr_insn_block (gimple_block (stmt));
3930 if (DECL_P (var))
3931 mode = DECL_MODE (var);
3932 else
3933 mode = TYPE_MODE (TREE_TYPE (var));
3935 val = gen_rtx_VAR_LOCATION
3936 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3938 emit_debug_insn (val);
3940 if (dump_file && (dump_flags & TDF_DETAILS))
3942 /* We can't dump the insn with a TREE where an RTX
3943 is expected. */
3944 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3945 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3946 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3949 delink_debug_stmt:
3950 /* In order not to generate too many debug temporaries,
3951 we delink all uses of debug statements we already expanded.
3952 Therefore debug statements between definition and real
3953 use of TERed SSA names will continue to use the SSA name,
3954 and not be replaced with debug temps. */
3955 delink_stmt_imm_use (stmt);
3957 gsi = nsi;
3958 gsi_next (&nsi);
3959 if (gsi_end_p (nsi))
3960 break;
3961 stmt = gsi_stmt (nsi);
3962 if (!gimple_debug_bind_p (stmt))
3963 break;
3966 set_curr_insn_source_location (sloc);
3967 set_curr_insn_block (sblock);
3969 else if (gimple_debug_source_bind_p (stmt))
3971 location_t sloc = get_curr_insn_source_location ();
3972 tree sblock = get_curr_insn_block ();
3973 tree var = gimple_debug_source_bind_get_var (stmt);
3974 tree value = gimple_debug_source_bind_get_value (stmt);
3975 rtx val;
3976 enum machine_mode mode;
3978 last = get_last_insn ();
3980 set_curr_insn_source_location (gimple_location (stmt));
3981 set_curr_insn_block (gimple_block (stmt));
3983 mode = DECL_MODE (var);
3985 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3986 VAR_INIT_STATUS_UNINITIALIZED);
3988 emit_debug_insn (val);
3990 if (dump_file && (dump_flags & TDF_DETAILS))
3992 /* We can't dump the insn with a TREE where an RTX
3993 is expected. */
3994 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3995 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3996 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3999 set_curr_insn_source_location (sloc);
4000 set_curr_insn_block (sblock);
4002 else
4004 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4006 bool can_fallthru;
4007 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4008 if (new_bb)
4010 if (can_fallthru)
4011 bb = new_bb;
4012 else
4013 return new_bb;
4016 else
4018 def_operand_p def_p;
4019 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4021 if (def_p != NULL)
4023 /* Ignore this stmt if it is in the list of
4024 replaceable expressions. */
4025 if (SA.values
4026 && bitmap_bit_p (SA.values,
4027 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4028 continue;
4030 last = expand_gimple_stmt (stmt);
4031 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4036 currently_expanding_gimple_stmt = NULL;
4038 /* Expand implicit goto and convert goto_locus. */
4039 FOR_EACH_EDGE (e, ei, bb->succs)
4041 if (e->goto_locus && e->goto_block)
4043 set_curr_insn_source_location (e->goto_locus);
4044 set_curr_insn_block (e->goto_block);
4045 e->goto_locus = curr_insn_locator ();
4047 e->goto_block = NULL;
4048 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4050 emit_jump (label_rtx_for_bb (e->dest));
4051 e->flags &= ~EDGE_FALLTHRU;
4055 /* Expanded RTL can create a jump in the last instruction of block.
4056 This later might be assumed to be a jump to successor and break edge insertion.
4057 We need to insert dummy move to prevent this. PR41440. */
4058 if (single_succ_p (bb)
4059 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4060 && (last = get_last_insn ())
4061 && JUMP_P (last))
4063 rtx dummy = gen_reg_rtx (SImode);
4064 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4067 do_pending_stack_adjust ();
4069 /* Find the block tail. The last insn in the block is the insn
4070 before a barrier and/or table jump insn. */
4071 last = get_last_insn ();
4072 if (BARRIER_P (last))
4073 last = PREV_INSN (last);
4074 if (JUMP_TABLE_DATA_P (last))
4075 last = PREV_INSN (PREV_INSN (last));
4076 BB_END (bb) = last;
4078 update_bb_for_insn (bb);
4080 return bb;
4084 /* Create a basic block for initialization code. */
4086 static basic_block
4087 construct_init_block (void)
4089 basic_block init_block, first_block;
4090 edge e = NULL;
4091 int flags;
4093 /* Multiple entry points not supported yet. */
4094 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4095 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4096 init_rtl_bb_info (EXIT_BLOCK_PTR);
4097 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4098 EXIT_BLOCK_PTR->flags |= BB_RTL;
4100 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4102 /* When entry edge points to first basic block, we don't need jump,
4103 otherwise we have to jump into proper target. */
4104 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4106 tree label = gimple_block_label (e->dest);
4108 emit_jump (label_rtx (label));
4109 flags = 0;
4111 else
4112 flags = EDGE_FALLTHRU;
4114 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4115 get_last_insn (),
4116 ENTRY_BLOCK_PTR);
4117 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4118 init_block->count = ENTRY_BLOCK_PTR->count;
4119 if (e)
4121 first_block = e->dest;
4122 redirect_edge_succ (e, init_block);
4123 e = make_edge (init_block, first_block, flags);
4125 else
4126 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4127 e->probability = REG_BR_PROB_BASE;
4128 e->count = ENTRY_BLOCK_PTR->count;
4130 update_bb_for_insn (init_block);
4131 return init_block;
4134 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4135 found in the block tree. */
4137 static void
4138 set_block_levels (tree block, int level)
4140 while (block)
4142 BLOCK_NUMBER (block) = level;
4143 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4144 block = BLOCK_CHAIN (block);
4148 /* Create a block containing landing pads and similar stuff. */
4150 static void
4151 construct_exit_block (void)
4153 rtx head = get_last_insn ();
4154 rtx end;
4155 basic_block exit_block;
4156 edge e, e2;
4157 unsigned ix;
4158 edge_iterator ei;
4159 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4161 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4163 /* Make sure the locus is set to the end of the function, so that
4164 epilogue line numbers and warnings are set properly. */
4165 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4166 input_location = cfun->function_end_locus;
4168 /* The following insns belong to the top scope. */
4169 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4171 /* Generate rtl for function exit. */
4172 expand_function_end ();
4174 end = get_last_insn ();
4175 if (head == end)
4176 return;
4177 /* While emitting the function end we could move end of the last basic block.
4179 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4180 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4181 head = NEXT_INSN (head);
4182 exit_block = create_basic_block (NEXT_INSN (head), end,
4183 EXIT_BLOCK_PTR->prev_bb);
4184 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4185 exit_block->count = EXIT_BLOCK_PTR->count;
4187 ix = 0;
4188 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4190 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4191 if (!(e->flags & EDGE_ABNORMAL))
4192 redirect_edge_succ (e, exit_block);
4193 else
4194 ix++;
4197 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4198 e->probability = REG_BR_PROB_BASE;
4199 e->count = EXIT_BLOCK_PTR->count;
4200 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4201 if (e2 != e)
4203 e->count -= e2->count;
4204 exit_block->count -= e2->count;
4205 exit_block->frequency -= EDGE_FREQUENCY (e2);
4207 if (e->count < 0)
4208 e->count = 0;
4209 if (exit_block->count < 0)
4210 exit_block->count = 0;
4211 if (exit_block->frequency < 0)
4212 exit_block->frequency = 0;
4213 update_bb_for_insn (exit_block);
4216 /* Helper function for discover_nonconstant_array_refs.
4217 Look for ARRAY_REF nodes with non-constant indexes and mark them
4218 addressable. */
4220 static tree
4221 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4222 void *data ATTRIBUTE_UNUSED)
4224 tree t = *tp;
4226 if (IS_TYPE_OR_DECL_P (t))
4227 *walk_subtrees = 0;
4228 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4230 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4231 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4232 && (!TREE_OPERAND (t, 2)
4233 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4234 || (TREE_CODE (t) == COMPONENT_REF
4235 && (!TREE_OPERAND (t,2)
4236 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4237 || TREE_CODE (t) == BIT_FIELD_REF
4238 || TREE_CODE (t) == REALPART_EXPR
4239 || TREE_CODE (t) == IMAGPART_EXPR
4240 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4241 || CONVERT_EXPR_P (t))
4242 t = TREE_OPERAND (t, 0);
4244 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4246 t = get_base_address (t);
4247 if (t && DECL_P (t)
4248 && DECL_MODE (t) != BLKmode)
4249 TREE_ADDRESSABLE (t) = 1;
4252 *walk_subtrees = 0;
4255 return NULL_TREE;
4258 /* RTL expansion is not able to compile array references with variable
4259 offsets for arrays stored in single register. Discover such
4260 expressions and mark variables as addressable to avoid this
4261 scenario. */
4263 static void
4264 discover_nonconstant_array_refs (void)
4266 basic_block bb;
4267 gimple_stmt_iterator gsi;
4269 FOR_EACH_BB (bb)
4270 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4272 gimple stmt = gsi_stmt (gsi);
4273 if (!is_gimple_debug (stmt))
4274 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4278 /* This function sets crtl->args.internal_arg_pointer to a virtual
4279 register if DRAP is needed. Local register allocator will replace
4280 virtual_incoming_args_rtx with the virtual register. */
4282 static void
4283 expand_stack_alignment (void)
4285 rtx drap_rtx;
4286 unsigned int preferred_stack_boundary;
4288 if (! SUPPORTS_STACK_ALIGNMENT)
4289 return;
4291 if (cfun->calls_alloca
4292 || cfun->has_nonlocal_label
4293 || crtl->has_nonlocal_goto)
4294 crtl->need_drap = true;
4296 /* Call update_stack_boundary here again to update incoming stack
4297 boundary. It may set incoming stack alignment to a different
4298 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4299 use the minimum incoming stack alignment to check if it is OK
4300 to perform sibcall optimization since sibcall optimization will
4301 only align the outgoing stack to incoming stack boundary. */
4302 if (targetm.calls.update_stack_boundary)
4303 targetm.calls.update_stack_boundary ();
4305 /* The incoming stack frame has to be aligned at least at
4306 parm_stack_boundary. */
4307 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4309 /* Update crtl->stack_alignment_estimated and use it later to align
4310 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4311 exceptions since callgraph doesn't collect incoming stack alignment
4312 in this case. */
4313 if (cfun->can_throw_non_call_exceptions
4314 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4315 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4316 else
4317 preferred_stack_boundary = crtl->preferred_stack_boundary;
4318 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4319 crtl->stack_alignment_estimated = preferred_stack_boundary;
4320 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4321 crtl->stack_alignment_needed = preferred_stack_boundary;
4323 gcc_assert (crtl->stack_alignment_needed
4324 <= crtl->stack_alignment_estimated);
4326 crtl->stack_realign_needed
4327 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4328 crtl->stack_realign_tried = crtl->stack_realign_needed;
4330 crtl->stack_realign_processed = true;
4332 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4333 alignment. */
4334 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4335 drap_rtx = targetm.calls.get_drap_rtx ();
4337 /* stack_realign_drap and drap_rtx must match. */
4338 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4340 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4341 if (NULL != drap_rtx)
4343 crtl->args.internal_arg_pointer = drap_rtx;
4345 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4346 needed. */
4347 fixup_tail_calls ();
4351 /* Translate the intermediate representation contained in the CFG
4352 from GIMPLE trees to RTL.
4354 We do conversion per basic block and preserve/update the tree CFG.
4355 This implies we have to do some magic as the CFG can simultaneously
4356 consist of basic blocks containing RTL and GIMPLE trees. This can
4357 confuse the CFG hooks, so be careful to not manipulate CFG during
4358 the expansion. */
4360 static unsigned int
4361 gimple_expand_cfg (void)
4363 basic_block bb, init_block;
4364 sbitmap blocks;
4365 edge_iterator ei;
4366 edge e;
4367 rtx var_seq;
4368 unsigned i;
4370 timevar_push (TV_OUT_OF_SSA);
4371 rewrite_out_of_ssa (&SA);
4372 timevar_pop (TV_OUT_OF_SSA);
4373 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4374 sizeof (rtx));
4376 /* Some backends want to know that we are expanding to RTL. */
4377 currently_expanding_to_rtl = 1;
4378 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4379 free_dominance_info (CDI_DOMINATORS);
4381 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4383 insn_locators_alloc ();
4384 if (!DECL_IS_BUILTIN (current_function_decl))
4386 /* Eventually, all FEs should explicitly set function_start_locus. */
4387 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4388 set_curr_insn_source_location
4389 (DECL_SOURCE_LOCATION (current_function_decl));
4390 else
4391 set_curr_insn_source_location (cfun->function_start_locus);
4393 else
4394 set_curr_insn_source_location (UNKNOWN_LOCATION);
4395 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4396 prologue_locator = curr_insn_locator ();
4398 #ifdef INSN_SCHEDULING
4399 init_sched_attrs ();
4400 #endif
4402 /* Make sure first insn is a note even if we don't want linenums.
4403 This makes sure the first insn will never be deleted.
4404 Also, final expects a note to appear there. */
4405 emit_note (NOTE_INSN_DELETED);
4407 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4408 discover_nonconstant_array_refs ();
4410 targetm.expand_to_rtl_hook ();
4411 crtl->stack_alignment_needed = STACK_BOUNDARY;
4412 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4413 crtl->stack_alignment_estimated = 0;
4414 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4415 cfun->cfg->max_jumptable_ents = 0;
4417 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4418 of the function section at exapnsion time to predict distance of calls. */
4419 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4421 /* Expand the variables recorded during gimple lowering. */
4422 timevar_push (TV_VAR_EXPAND);
4423 start_sequence ();
4425 expand_used_vars ();
4427 var_seq = get_insns ();
4428 end_sequence ();
4429 timevar_pop (TV_VAR_EXPAND);
4431 /* Honor stack protection warnings. */
4432 if (warn_stack_protect)
4434 if (cfun->calls_alloca)
4435 warning (OPT_Wstack_protector,
4436 "stack protector not protecting local variables: "
4437 "variable length buffer");
4438 if (has_short_buffer && !crtl->stack_protect_guard)
4439 warning (OPT_Wstack_protector,
4440 "stack protector not protecting function: "
4441 "all local arrays are less than %d bytes long",
4442 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4445 /* Set up parameters and prepare for return, for the function. */
4446 expand_function_start (current_function_decl);
4448 /* If we emitted any instructions for setting up the variables,
4449 emit them before the FUNCTION_START note. */
4450 if (var_seq)
4452 emit_insn_before (var_seq, parm_birth_insn);
4454 /* In expand_function_end we'll insert the alloca save/restore
4455 before parm_birth_insn. We've just insertted an alloca call.
4456 Adjust the pointer to match. */
4457 parm_birth_insn = var_seq;
4460 /* Now that we also have the parameter RTXs, copy them over to our
4461 partitions. */
4462 for (i = 0; i < SA.map->num_partitions; i++)
4464 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4466 if (TREE_CODE (var) != VAR_DECL
4467 && !SA.partition_to_pseudo[i])
4468 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4469 gcc_assert (SA.partition_to_pseudo[i]);
4471 /* If this decl was marked as living in multiple places, reset
4472 this now to NULL. */
4473 if (DECL_RTL_IF_SET (var) == pc_rtx)
4474 SET_DECL_RTL (var, NULL);
4476 /* Some RTL parts really want to look at DECL_RTL(x) when x
4477 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4478 SET_DECL_RTL here making this available, but that would mean
4479 to select one of the potentially many RTLs for one DECL. Instead
4480 of doing that we simply reset the MEM_EXPR of the RTL in question,
4481 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4482 if (!DECL_RTL_SET_P (var))
4484 if (MEM_P (SA.partition_to_pseudo[i]))
4485 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4489 /* If we have a class containing differently aligned pointers
4490 we need to merge those into the corresponding RTL pointer
4491 alignment. */
4492 for (i = 1; i < num_ssa_names; i++)
4494 tree name = ssa_name (i);
4495 int part;
4496 rtx r;
4498 if (!name
4499 || !POINTER_TYPE_P (TREE_TYPE (name))
4500 /* We might have generated new SSA names in
4501 update_alias_info_with_stack_vars. They will have a NULL
4502 defining statements, and won't be part of the partitioning,
4503 so ignore those. */
4504 || !SSA_NAME_DEF_STMT (name))
4505 continue;
4506 part = var_to_partition (SA.map, name);
4507 if (part == NO_PARTITION)
4508 continue;
4509 r = SA.partition_to_pseudo[part];
4510 if (REG_P (r))
4511 mark_reg_pointer (r, get_pointer_alignment (name));
4514 /* If this function is `main', emit a call to `__main'
4515 to run global initializers, etc. */
4516 if (DECL_NAME (current_function_decl)
4517 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4518 && DECL_FILE_SCOPE_P (current_function_decl))
4519 expand_main_function ();
4521 /* Initialize the stack_protect_guard field. This must happen after the
4522 call to __main (if any) so that the external decl is initialized. */
4523 if (crtl->stack_protect_guard)
4524 stack_protect_prologue ();
4526 expand_phi_nodes (&SA);
4528 /* Register rtl specific functions for cfg. */
4529 rtl_register_cfg_hooks ();
4531 init_block = construct_init_block ();
4533 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4534 remaining edges later. */
4535 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4536 e->flags &= ~EDGE_EXECUTABLE;
4538 lab_rtx_for_bb = pointer_map_create ();
4539 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4540 bb = expand_gimple_basic_block (bb);
4542 if (MAY_HAVE_DEBUG_INSNS)
4543 expand_debug_locations ();
4545 execute_free_datastructures ();
4546 timevar_push (TV_OUT_OF_SSA);
4547 finish_out_of_ssa (&SA);
4548 timevar_pop (TV_OUT_OF_SSA);
4550 timevar_push (TV_POST_EXPAND);
4551 /* We are no longer in SSA form. */
4552 cfun->gimple_df->in_ssa_p = false;
4554 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4555 conservatively to true until they are all profile aware. */
4556 pointer_map_destroy (lab_rtx_for_bb);
4557 free_histograms ();
4559 construct_exit_block ();
4560 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4561 insn_locators_finalize ();
4563 /* Zap the tree EH table. */
4564 set_eh_throw_stmt_table (cfun, NULL);
4566 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4567 split edges which edge insertions might do. */
4568 rebuild_jump_labels (get_insns ());
4570 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4572 edge e;
4573 edge_iterator ei;
4574 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4576 if (e->insns.r)
4578 rebuild_jump_labels_chain (e->insns.r);
4579 /* Avoid putting insns before parm_birth_insn. */
4580 if (e->src == ENTRY_BLOCK_PTR
4581 && single_succ_p (ENTRY_BLOCK_PTR)
4582 && parm_birth_insn)
4584 rtx insns = e->insns.r;
4585 e->insns.r = NULL_RTX;
4586 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4588 else
4589 commit_one_edge_insertion (e);
4591 else
4592 ei_next (&ei);
4596 /* We're done expanding trees to RTL. */
4597 currently_expanding_to_rtl = 0;
4599 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4601 edge e;
4602 edge_iterator ei;
4603 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4605 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4606 e->flags &= ~EDGE_EXECUTABLE;
4608 /* At the moment not all abnormal edges match the RTL
4609 representation. It is safe to remove them here as
4610 find_many_sub_basic_blocks will rediscover them.
4611 In the future we should get this fixed properly. */
4612 if ((e->flags & EDGE_ABNORMAL)
4613 && !(e->flags & EDGE_SIBCALL))
4614 remove_edge (e);
4615 else
4616 ei_next (&ei);
4620 blocks = sbitmap_alloc (last_basic_block);
4621 sbitmap_ones (blocks);
4622 find_many_sub_basic_blocks (blocks);
4623 sbitmap_free (blocks);
4624 purge_all_dead_edges ();
4626 compact_blocks ();
4628 expand_stack_alignment ();
4630 #ifdef ENABLE_CHECKING
4631 verify_flow_info ();
4632 #endif
4634 /* There's no need to defer outputting this function any more; we
4635 know we want to output it. */
4636 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4638 /* Now that we're done expanding trees to RTL, we shouldn't have any
4639 more CONCATs anywhere. */
4640 generating_concat_p = 0;
4642 if (dump_file)
4644 fprintf (dump_file,
4645 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4646 /* And the pass manager will dump RTL for us. */
4649 /* If we're emitting a nested function, make sure its parent gets
4650 emitted as well. Doing otherwise confuses debug info. */
4652 tree parent;
4653 for (parent = DECL_CONTEXT (current_function_decl);
4654 parent != NULL_TREE;
4655 parent = get_containing_scope (parent))
4656 if (TREE_CODE (parent) == FUNCTION_DECL)
4657 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4660 /* We are now committed to emitting code for this function. Do any
4661 preparation, such as emitting abstract debug info for the inline
4662 before it gets mangled by optimization. */
4663 if (cgraph_function_possibly_inlined_p (current_function_decl))
4664 (*debug_hooks->outlining_inline_function) (current_function_decl);
4666 TREE_ASM_WRITTEN (current_function_decl) = 1;
4668 /* After expanding, the return labels are no longer needed. */
4669 return_label = NULL;
4670 naked_return_label = NULL;
4672 /* After expanding, the tm_restart map is no longer needed. */
4673 if (cfun->gimple_df->tm_restart)
4675 htab_delete (cfun->gimple_df->tm_restart);
4676 cfun->gimple_df->tm_restart = NULL;
4679 /* Tag the blocks with a depth number so that change_scope can find
4680 the common parent easily. */
4681 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4682 default_rtl_profile ();
4683 timevar_pop (TV_POST_EXPAND);
4684 return 0;
4687 struct rtl_opt_pass pass_expand =
4690 RTL_PASS,
4691 "expand", /* name */
4692 NULL, /* gate */
4693 gimple_expand_cfg, /* execute */
4694 NULL, /* sub */
4695 NULL, /* next */
4696 0, /* static_pass_number */
4697 TV_EXPAND, /* tv_id */
4698 PROP_ssa | PROP_gimple_leh | PROP_cfg
4699 | PROP_gimple_lcx, /* properties_required */
4700 PROP_rtl, /* properties_provided */
4701 PROP_ssa | PROP_trees, /* properties_destroyed */
4702 TODO_verify_ssa | TODO_verify_flow
4703 | TODO_verify_stmts, /* todo_flags_start */
4704 TODO_ggc_collect /* todo_flags_finish */