[Ada] Wrong accessibility level under -gnat12
[official-gcc.git] / gcc / cfgexpand.c
blob1e84d2c4a4efd5927b02766fb95dd94068752074
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber. */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* This variable holds information helping the rewriting of SSA trees
87 into RTL. */
88 struct ssaexpand SA;
90 /* This variable holds the currently expanded gimple statement for purposes
91 of comminucating the profile info to the builtin expanders. */
92 gimple *currently_expanding_gimple_stmt;
94 static rtx expand_debug_expr (tree);
96 static bool defer_stack_allocation (tree, bool);
98 static void record_alignment_for_reg_var (unsigned int);
100 /* Return an expression tree corresponding to the RHS of GIMPLE
101 statement STMT. */
103 tree
104 gimple_assign_rhs_to_tree (gimple *stmt)
106 tree t;
107 enum gimple_rhs_class grhs_class;
109 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
111 if (grhs_class == GIMPLE_TERNARY_RHS)
112 t = build3 (gimple_assign_rhs_code (stmt),
113 TREE_TYPE (gimple_assign_lhs (stmt)),
114 gimple_assign_rhs1 (stmt),
115 gimple_assign_rhs2 (stmt),
116 gimple_assign_rhs3 (stmt));
117 else if (grhs_class == GIMPLE_BINARY_RHS)
118 t = build2 (gimple_assign_rhs_code (stmt),
119 TREE_TYPE (gimple_assign_lhs (stmt)),
120 gimple_assign_rhs1 (stmt),
121 gimple_assign_rhs2 (stmt));
122 else if (grhs_class == GIMPLE_UNARY_RHS)
123 t = build1 (gimple_assign_rhs_code (stmt),
124 TREE_TYPE (gimple_assign_lhs (stmt)),
125 gimple_assign_rhs1 (stmt));
126 else if (grhs_class == GIMPLE_SINGLE_RHS)
128 t = gimple_assign_rhs1 (stmt);
129 /* Avoid modifying this tree in place below. */
130 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
131 && gimple_location (stmt) != EXPR_LOCATION (t))
132 || (gimple_block (stmt)
133 && currently_expanding_to_rtl
134 && EXPR_P (t)))
135 t = copy_node (t);
137 else
138 gcc_unreachable ();
140 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
141 SET_EXPR_LOCATION (t, gimple_location (stmt));
143 return t;
147 #ifndef STACK_ALIGNMENT_NEEDED
148 #define STACK_ALIGNMENT_NEEDED 1
149 #endif
151 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
153 /* Choose either CUR or NEXT as the leader DECL for a partition.
154 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155 out of the same user variable being in multiple partitions (this is
156 less likely for compiler-introduced temps). */
158 static tree
159 leader_merge (tree cur, tree next)
161 if (cur == NULL || cur == next)
162 return next;
164 if (DECL_P (cur) && DECL_IGNORED_P (cur))
165 return cur;
167 if (DECL_P (next) && DECL_IGNORED_P (next))
168 return next;
170 return cur;
173 /* Associate declaration T with storage space X. If T is no
174 SSA name this is exactly SET_DECL_RTL, otherwise make the
175 partition of T associated with X. */
176 static inline void
177 set_rtl (tree t, rtx x)
179 gcc_checking_assert (!x
180 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
181 || (use_register_for_decl (t)
182 ? (REG_P (x)
183 || (GET_CODE (x) == CONCAT
184 && (REG_P (XEXP (x, 0))
185 || SUBREG_P (XEXP (x, 0)))
186 && (REG_P (XEXP (x, 1))
187 || SUBREG_P (XEXP (x, 1))))
188 /* We need to accept PARALLELs for RESUT_DECLs
189 because of vector types with BLKmode returned
190 in multiple registers, but they are supposed
191 to be uncoalesced. */
192 || (GET_CODE (x) == PARALLEL
193 && SSAVAR (t)
194 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
195 && (GET_MODE (x) == BLKmode
196 || !flag_tree_coalesce_vars)))
197 : (MEM_P (x) || x == pc_rtx
198 || (GET_CODE (x) == CONCAT
199 && MEM_P (XEXP (x, 0))
200 && MEM_P (XEXP (x, 1))))));
201 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202 RESULT_DECLs has the expected mode. For memory, we accept
203 unpromoted modes, since that's what we're likely to get. For
204 PARM_DECLs and RESULT_DECLs, we'll have been called by
205 set_parm_rtl, which will give us the default def, so we don't
206 have to compute it ourselves. For RESULT_DECLs, we accept mode
207 mismatches too, as long as we have BLKmode or are not coalescing
208 across variables, so that we don't reject BLKmode PARALLELs or
209 unpromoted REGs. */
210 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
211 || (SSAVAR (t)
212 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
213 && (promote_ssa_mode (t, NULL) == BLKmode
214 || !flag_tree_coalesce_vars))
215 || !use_register_for_decl (t)
216 || GET_MODE (x) == promote_ssa_mode (t, NULL));
218 if (x)
220 bool skip = false;
221 tree cur = NULL_TREE;
222 rtx xm = x;
224 retry:
225 if (MEM_P (xm))
226 cur = MEM_EXPR (xm);
227 else if (REG_P (xm))
228 cur = REG_EXPR (xm);
229 else if (SUBREG_P (xm))
231 gcc_assert (subreg_lowpart_p (xm));
232 xm = SUBREG_REG (xm);
233 goto retry;
235 else if (GET_CODE (xm) == CONCAT)
237 xm = XEXP (xm, 0);
238 goto retry;
240 else if (GET_CODE (xm) == PARALLEL)
242 xm = XVECEXP (xm, 0, 0);
243 gcc_assert (GET_CODE (xm) == EXPR_LIST);
244 xm = XEXP (xm, 0);
245 goto retry;
247 else if (xm == pc_rtx)
248 skip = true;
249 else
250 gcc_unreachable ();
252 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
254 if (cur != next)
256 if (MEM_P (x))
257 set_mem_attributes (x,
258 next && TREE_CODE (next) == SSA_NAME
259 ? TREE_TYPE (next)
260 : next, true);
261 else
262 set_reg_attrs_for_decl_rtl (next, x);
266 if (TREE_CODE (t) == SSA_NAME)
268 int part = var_to_partition (SA.map, t);
269 if (part != NO_PARTITION)
271 if (SA.partition_to_pseudo[part])
272 gcc_assert (SA.partition_to_pseudo[part] == x);
273 else if (x != pc_rtx)
274 SA.partition_to_pseudo[part] = x;
276 /* For the benefit of debug information at -O0 (where
277 vartracking doesn't run) record the place also in the base
278 DECL. For PARMs and RESULTs, do so only when setting the
279 default def. */
280 if (x && x != pc_rtx && SSA_NAME_VAR (t)
281 && (VAR_P (SSA_NAME_VAR (t))
282 || SSA_NAME_IS_DEFAULT_DEF (t)))
284 tree var = SSA_NAME_VAR (t);
285 /* If we don't yet have something recorded, just record it now. */
286 if (!DECL_RTL_SET_P (var))
287 SET_DECL_RTL (var, x);
288 /* If we have it set already to "multiple places" don't
289 change this. */
290 else if (DECL_RTL (var) == pc_rtx)
292 /* If we have something recorded and it's not the same place
293 as we want to record now, we have multiple partitions for the
294 same base variable, with different places. We can't just
295 randomly chose one, hence we have to say that we don't know.
296 This only happens with optimization, and there var-tracking
297 will figure out the right thing. */
298 else if (DECL_RTL (var) != x)
299 SET_DECL_RTL (var, pc_rtx);
302 else
303 SET_DECL_RTL (t, x);
306 /* This structure holds data relevant to one variable that will be
307 placed in a stack slot. */
308 struct stack_var
310 /* The Variable. */
311 tree decl;
313 /* Initially, the size of the variable. Later, the size of the partition,
314 if this variable becomes it's partition's representative. */
315 poly_uint64 size;
317 /* The *byte* alignment required for this variable. Or as, with the
318 size, the alignment for this partition. */
319 unsigned int alignb;
321 /* The partition representative. */
322 size_t representative;
324 /* The next stack variable in the partition, or EOC. */
325 size_t next;
327 /* The numbers of conflicting stack variables. */
328 bitmap conflicts;
331 #define EOC ((size_t)-1)
333 /* We have an array of such objects while deciding allocation. */
334 static struct stack_var *stack_vars;
335 static size_t stack_vars_alloc;
336 static size_t stack_vars_num;
337 static hash_map<tree, size_t> *decl_to_stack_part;
339 /* Conflict bitmaps go on this obstack. This allows us to destroy
340 all of them in one big sweep. */
341 static bitmap_obstack stack_var_bitmap_obstack;
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344 is non-decreasing. */
345 static size_t *stack_vars_sorted;
347 /* The phase of the stack frame. This is the known misalignment of
348 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
349 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
350 static int frame_phase;
352 /* Used during expand_used_vars to remember if we saw any decls for
353 which we'd like to enable stack smashing protection. */
354 static bool has_protected_decls;
356 /* Used during expand_used_vars. Remember if we say a character buffer
357 smaller than our cutoff threshold. Used for -Wstack-protector. */
358 static bool has_short_buffer;
360 /* Compute the byte alignment to use for DECL. Ignore alignment
361 we can't do with expected alignment of the stack boundary. */
363 static unsigned int
364 align_local_variable (tree decl, bool really_expand)
366 unsigned int align;
368 if (TREE_CODE (decl) == SSA_NAME)
369 align = TYPE_ALIGN (TREE_TYPE (decl));
370 else
372 align = LOCAL_DECL_ALIGNMENT (decl);
373 /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
374 That is done before IPA and could bump alignment based on host
375 backend even for offloaded code which wants different
376 LOCAL_DECL_ALIGNMENT. */
377 if (really_expand)
378 SET_DECL_ALIGN (decl, align);
380 return align / BITS_PER_UNIT;
383 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
384 down otherwise. Return truncated BASE value. */
386 static inline unsigned HOST_WIDE_INT
387 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
389 return align_up ? (base + align - 1) & -align : base & -align;
392 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
393 Return the frame offset. */
395 static poly_int64
396 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
398 poly_int64 offset, new_frame_offset;
400 if (FRAME_GROWS_DOWNWARD)
402 new_frame_offset
403 = aligned_lower_bound (frame_offset - frame_phase - size,
404 align) + frame_phase;
405 offset = new_frame_offset;
407 else
409 new_frame_offset
410 = aligned_upper_bound (frame_offset - frame_phase,
411 align) + frame_phase;
412 offset = new_frame_offset;
413 new_frame_offset += size;
415 frame_offset = new_frame_offset;
417 if (frame_offset_overflow (frame_offset, cfun->decl))
418 frame_offset = offset = 0;
420 return offset;
423 /* Accumulate DECL into STACK_VARS. */
425 static void
426 add_stack_var (tree decl, bool really_expand)
428 struct stack_var *v;
430 if (stack_vars_num >= stack_vars_alloc)
432 if (stack_vars_alloc)
433 stack_vars_alloc = stack_vars_alloc * 3 / 2;
434 else
435 stack_vars_alloc = 32;
436 stack_vars
437 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
439 if (!decl_to_stack_part)
440 decl_to_stack_part = new hash_map<tree, size_t>;
442 v = &stack_vars[stack_vars_num];
443 decl_to_stack_part->put (decl, stack_vars_num);
445 v->decl = decl;
446 tree size = TREE_CODE (decl) == SSA_NAME
447 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
448 : DECL_SIZE_UNIT (decl);
449 v->size = tree_to_poly_uint64 (size);
450 /* Ensure that all variables have size, so that &a != &b for any two
451 variables that are simultaneously live. */
452 if (known_eq (v->size, 0U))
453 v->size = 1;
454 v->alignb = align_local_variable (decl, really_expand);
455 /* An alignment of zero can mightily confuse us later. */
456 gcc_assert (v->alignb != 0);
458 /* All variables are initially in their own partition. */
459 v->representative = stack_vars_num;
460 v->next = EOC;
462 /* All variables initially conflict with no other. */
463 v->conflicts = NULL;
465 /* Ensure that this decl doesn't get put onto the list twice. */
466 set_rtl (decl, pc_rtx);
468 stack_vars_num++;
471 /* Make the decls associated with luid's X and Y conflict. */
473 static void
474 add_stack_var_conflict (size_t x, size_t y)
476 struct stack_var *a = &stack_vars[x];
477 struct stack_var *b = &stack_vars[y];
478 if (x == y)
479 return;
480 if (!a->conflicts)
481 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
482 if (!b->conflicts)
483 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
484 bitmap_set_bit (a->conflicts, y);
485 bitmap_set_bit (b->conflicts, x);
488 /* Check whether the decls associated with luid's X and Y conflict. */
490 static bool
491 stack_var_conflict_p (size_t x, size_t y)
493 struct stack_var *a = &stack_vars[x];
494 struct stack_var *b = &stack_vars[y];
495 if (x == y)
496 return false;
497 /* Partitions containing an SSA name result from gimple registers
498 with things like unsupported modes. They are top-level and
499 hence conflict with everything else. */
500 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
501 return true;
503 if (!a->conflicts || !b->conflicts)
504 return false;
505 return bitmap_bit_p (a->conflicts, y);
508 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
509 enter its partition number into bitmap DATA. */
511 static bool
512 visit_op (gimple *, tree op, tree, void *data)
514 bitmap active = (bitmap)data;
515 op = get_base_address (op);
516 if (op
517 && DECL_P (op)
518 && DECL_RTL_IF_SET (op) == pc_rtx)
520 size_t *v = decl_to_stack_part->get (op);
521 if (v)
522 bitmap_set_bit (active, *v);
524 return false;
527 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
528 record conflicts between it and all currently active other partitions
529 from bitmap DATA. */
531 static bool
532 visit_conflict (gimple *, tree op, tree, void *data)
534 bitmap active = (bitmap)data;
535 op = get_base_address (op);
536 if (op
537 && DECL_P (op)
538 && DECL_RTL_IF_SET (op) == pc_rtx)
540 size_t *v = decl_to_stack_part->get (op);
541 if (v && bitmap_set_bit (active, *v))
543 size_t num = *v;
544 bitmap_iterator bi;
545 unsigned i;
546 gcc_assert (num < stack_vars_num);
547 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
548 add_stack_var_conflict (num, i);
551 return false;
554 /* Helper routine for add_scope_conflicts, calculating the active partitions
555 at the end of BB, leaving the result in WORK. We're called to generate
556 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
557 liveness. */
559 static void
560 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
562 edge e;
563 edge_iterator ei;
564 gimple_stmt_iterator gsi;
565 walk_stmt_load_store_addr_fn visit;
567 bitmap_clear (work);
568 FOR_EACH_EDGE (e, ei, bb->preds)
569 bitmap_ior_into (work, (bitmap)e->src->aux);
571 visit = visit_op;
573 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
575 gimple *stmt = gsi_stmt (gsi);
576 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
578 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
580 gimple *stmt = gsi_stmt (gsi);
582 if (gimple_clobber_p (stmt))
584 tree lhs = gimple_assign_lhs (stmt);
585 size_t *v;
586 /* Nested function lowering might introduce LHSs
587 that are COMPONENT_REFs. */
588 if (!VAR_P (lhs))
589 continue;
590 if (DECL_RTL_IF_SET (lhs) == pc_rtx
591 && (v = decl_to_stack_part->get (lhs)))
592 bitmap_clear_bit (work, *v);
594 else if (!is_gimple_debug (stmt))
596 if (for_conflict
597 && visit == visit_op)
599 /* If this is the first real instruction in this BB we need
600 to add conflicts for everything live at this point now.
601 Unlike classical liveness for named objects we can't
602 rely on seeing a def/use of the names we're interested in.
603 There might merely be indirect loads/stores. We'd not add any
604 conflicts for such partitions. */
605 bitmap_iterator bi;
606 unsigned i;
607 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
609 struct stack_var *a = &stack_vars[i];
610 if (!a->conflicts)
611 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
612 bitmap_ior_into (a->conflicts, work);
614 visit = visit_conflict;
616 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
621 /* Generate stack partition conflicts between all partitions that are
622 simultaneously live. */
624 static void
625 add_scope_conflicts (void)
627 basic_block bb;
628 bool changed;
629 bitmap work = BITMAP_ALLOC (NULL);
630 int *rpo;
631 int n_bbs;
633 /* We approximate the live range of a stack variable by taking the first
634 mention of its name as starting point(s), and by the end-of-scope
635 death clobber added by gimplify as ending point(s) of the range.
636 This overapproximates in the case we for instance moved an address-taken
637 operation upward, without also moving a dereference to it upwards.
638 But it's conservatively correct as a variable never can hold values
639 before its name is mentioned at least once.
641 We then do a mostly classical bitmap liveness algorithm. */
643 FOR_ALL_BB_FN (bb, cfun)
644 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
646 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
647 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
649 changed = true;
650 while (changed)
652 int i;
653 changed = false;
654 for (i = 0; i < n_bbs; i++)
656 bitmap active;
657 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
658 active = (bitmap)bb->aux;
659 add_scope_conflicts_1 (bb, work, false);
660 if (bitmap_ior_into (active, work))
661 changed = true;
665 FOR_EACH_BB_FN (bb, cfun)
666 add_scope_conflicts_1 (bb, work, true);
668 free (rpo);
669 BITMAP_FREE (work);
670 FOR_ALL_BB_FN (bb, cfun)
671 BITMAP_FREE (bb->aux);
674 /* A subroutine of partition_stack_vars. A comparison function for qsort,
675 sorting an array of indices by the properties of the object. */
677 static int
678 stack_var_cmp (const void *a, const void *b)
680 size_t ia = *(const size_t *)a;
681 size_t ib = *(const size_t *)b;
682 unsigned int aligna = stack_vars[ia].alignb;
683 unsigned int alignb = stack_vars[ib].alignb;
684 poly_int64 sizea = stack_vars[ia].size;
685 poly_int64 sizeb = stack_vars[ib].size;
686 tree decla = stack_vars[ia].decl;
687 tree declb = stack_vars[ib].decl;
688 bool largea, largeb;
689 unsigned int uida, uidb;
691 /* Primary compare on "large" alignment. Large comes first. */
692 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
693 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
694 if (largea != largeb)
695 return (int)largeb - (int)largea;
697 /* Secondary compare on size, decreasing */
698 int diff = compare_sizes_for_sort (sizeb, sizea);
699 if (diff != 0)
700 return diff;
702 /* Tertiary compare on true alignment, decreasing. */
703 if (aligna < alignb)
704 return -1;
705 if (aligna > alignb)
706 return 1;
708 /* Final compare on ID for sort stability, increasing.
709 Two SSA names are compared by their version, SSA names come before
710 non-SSA names, and two normal decls are compared by their DECL_UID. */
711 if (TREE_CODE (decla) == SSA_NAME)
713 if (TREE_CODE (declb) == SSA_NAME)
714 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
715 else
716 return -1;
718 else if (TREE_CODE (declb) == SSA_NAME)
719 return 1;
720 else
721 uida = DECL_UID (decla), uidb = DECL_UID (declb);
722 if (uida < uidb)
723 return 1;
724 if (uida > uidb)
725 return -1;
726 return 0;
729 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
730 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
732 /* If the points-to solution *PI points to variables that are in a partition
733 together with other variables add all partition members to the pointed-to
734 variables bitmap. */
736 static void
737 add_partitioned_vars_to_ptset (struct pt_solution *pt,
738 part_hashmap *decls_to_partitions,
739 hash_set<bitmap> *visited, bitmap temp)
741 bitmap_iterator bi;
742 unsigned i;
743 bitmap *part;
745 if (pt->anything
746 || pt->vars == NULL
747 /* The pointed-to vars bitmap is shared, it is enough to
748 visit it once. */
749 || visited->add (pt->vars))
750 return;
752 bitmap_clear (temp);
754 /* By using a temporary bitmap to store all members of the partitions
755 we have to add we make sure to visit each of the partitions only
756 once. */
757 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
758 if ((!temp
759 || !bitmap_bit_p (temp, i))
760 && (part = decls_to_partitions->get (i)))
761 bitmap_ior_into (temp, *part);
762 if (!bitmap_empty_p (temp))
763 bitmap_ior_into (pt->vars, temp);
766 /* Update points-to sets based on partition info, so we can use them on RTL.
767 The bitmaps representing stack partitions will be saved until expand,
768 where partitioned decls used as bases in memory expressions will be
769 rewritten. */
771 static void
772 update_alias_info_with_stack_vars (void)
774 part_hashmap *decls_to_partitions = NULL;
775 size_t i, j;
776 tree var = NULL_TREE;
778 for (i = 0; i < stack_vars_num; i++)
780 bitmap part = NULL;
781 tree name;
782 struct ptr_info_def *pi;
784 /* Not interested in partitions with single variable. */
785 if (stack_vars[i].representative != i
786 || stack_vars[i].next == EOC)
787 continue;
789 if (!decls_to_partitions)
791 decls_to_partitions = new part_hashmap;
792 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
795 /* Create an SSA_NAME that points to the partition for use
796 as base during alias-oracle queries on RTL for bases that
797 have been partitioned. */
798 if (var == NULL_TREE)
799 var = create_tmp_var (ptr_type_node);
800 name = make_ssa_name (var);
802 /* Create bitmaps representing partitions. They will be used for
803 points-to sets later, so use GGC alloc. */
804 part = BITMAP_GGC_ALLOC ();
805 for (j = i; j != EOC; j = stack_vars[j].next)
807 tree decl = stack_vars[j].decl;
808 unsigned int uid = DECL_PT_UID (decl);
809 bitmap_set_bit (part, uid);
810 decls_to_partitions->put (uid, part);
811 cfun->gimple_df->decls_to_pointers->put (decl, name);
812 if (TREE_ADDRESSABLE (decl))
813 TREE_ADDRESSABLE (name) = 1;
816 /* Make the SSA name point to all partition members. */
817 pi = get_ptr_info (name);
818 pt_solution_set (&pi->pt, part, false);
821 /* Make all points-to sets that contain one member of a partition
822 contain all members of the partition. */
823 if (decls_to_partitions)
825 unsigned i;
826 tree name;
827 hash_set<bitmap> visited;
828 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
830 FOR_EACH_SSA_NAME (i, name, cfun)
832 struct ptr_info_def *pi;
834 if (POINTER_TYPE_P (TREE_TYPE (name))
835 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
836 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
837 &visited, temp);
840 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
841 decls_to_partitions, &visited, temp);
843 delete decls_to_partitions;
844 BITMAP_FREE (temp);
848 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
849 partitioning algorithm. Partitions A and B are known to be non-conflicting.
850 Merge them into a single partition A. */
852 static void
853 union_stack_vars (size_t a, size_t b)
855 struct stack_var *vb = &stack_vars[b];
856 bitmap_iterator bi;
857 unsigned u;
859 gcc_assert (stack_vars[b].next == EOC);
860 /* Add B to A's partition. */
861 stack_vars[b].next = stack_vars[a].next;
862 stack_vars[b].representative = a;
863 stack_vars[a].next = b;
865 /* Update the required alignment of partition A to account for B. */
866 if (stack_vars[a].alignb < stack_vars[b].alignb)
867 stack_vars[a].alignb = stack_vars[b].alignb;
869 /* Update the interference graph and merge the conflicts. */
870 if (vb->conflicts)
872 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
873 add_stack_var_conflict (a, stack_vars[u].representative);
874 BITMAP_FREE (vb->conflicts);
878 /* A subroutine of expand_used_vars. Binpack the variables into
879 partitions constrained by the interference graph. The overall
880 algorithm used is as follows:
882 Sort the objects by size in descending order.
883 For each object A {
884 S = size(A)
885 O = 0
886 loop {
887 Look for the largest non-conflicting object B with size <= S.
888 UNION (A, B)
893 static void
894 partition_stack_vars (void)
896 size_t si, sj, n = stack_vars_num;
898 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
899 for (si = 0; si < n; ++si)
900 stack_vars_sorted[si] = si;
902 if (n == 1)
903 return;
905 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
907 for (si = 0; si < n; ++si)
909 size_t i = stack_vars_sorted[si];
910 unsigned int ialign = stack_vars[i].alignb;
911 poly_int64 isize = stack_vars[i].size;
913 /* Ignore objects that aren't partition representatives. If we
914 see a var that is not a partition representative, it must
915 have been merged earlier. */
916 if (stack_vars[i].representative != i)
917 continue;
919 for (sj = si + 1; sj < n; ++sj)
921 size_t j = stack_vars_sorted[sj];
922 unsigned int jalign = stack_vars[j].alignb;
923 poly_int64 jsize = stack_vars[j].size;
925 /* Ignore objects that aren't partition representatives. */
926 if (stack_vars[j].representative != j)
927 continue;
929 /* Do not mix objects of "small" (supported) alignment
930 and "large" (unsupported) alignment. */
931 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
932 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
933 break;
935 /* For Address Sanitizer do not mix objects with different
936 sizes, as the shorter vars wouldn't be adequately protected.
937 Don't do that for "large" (unsupported) alignment objects,
938 those aren't protected anyway. */
939 if (asan_sanitize_stack_p ()
940 && maybe_ne (isize, jsize)
941 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
942 break;
944 /* Ignore conflicting objects. */
945 if (stack_var_conflict_p (i, j))
946 continue;
948 /* UNION the objects, placing J at OFFSET. */
949 union_stack_vars (i, j);
953 update_alias_info_with_stack_vars ();
956 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
958 static void
959 dump_stack_var_partition (void)
961 size_t si, i, j, n = stack_vars_num;
963 for (si = 0; si < n; ++si)
965 i = stack_vars_sorted[si];
967 /* Skip variables that aren't partition representatives, for now. */
968 if (stack_vars[i].representative != i)
969 continue;
971 fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
972 print_dec (stack_vars[i].size, dump_file);
973 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
975 for (j = i; j != EOC; j = stack_vars[j].next)
977 fputc ('\t', dump_file);
978 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
980 fputc ('\n', dump_file);
984 /* Assign rtl to DECL at BASE + OFFSET. */
986 static void
987 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
988 poly_int64 offset)
990 unsigned align;
991 rtx x;
993 /* If this fails, we've overflowed the stack frame. Error nicely? */
994 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
996 x = plus_constant (Pmode, base, offset);
997 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
998 ? TYPE_MODE (TREE_TYPE (decl))
999 : DECL_MODE (SSAVAR (decl)), x);
1001 if (TREE_CODE (decl) != SSA_NAME)
1003 /* Set alignment we actually gave this decl if it isn't an SSA name.
1004 If it is we generate stack slots only accidentally so it isn't as
1005 important, we'll simply use the alignment that is already set. */
1006 if (base == virtual_stack_vars_rtx)
1007 offset -= frame_phase;
1008 align = known_alignment (offset);
1009 align *= BITS_PER_UNIT;
1010 if (align == 0 || align > base_align)
1011 align = base_align;
1013 /* One would think that we could assert that we're not decreasing
1014 alignment here, but (at least) the i386 port does exactly this
1015 via the MINIMUM_ALIGNMENT hook. */
1017 SET_DECL_ALIGN (decl, align);
1018 DECL_USER_ALIGN (decl) = 0;
1021 set_rtl (decl, x);
1024 struct stack_vars_data
1026 /* Vector of offset pairs, always end of some padding followed
1027 by start of the padding that needs Address Sanitizer protection.
1028 The vector is in reversed, highest offset pairs come first. */
1029 auto_vec<HOST_WIDE_INT> asan_vec;
1031 /* Vector of partition representative decls in between the paddings. */
1032 auto_vec<tree> asan_decl_vec;
1034 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1035 rtx asan_base;
1037 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1038 unsigned int asan_alignb;
1041 /* A subroutine of expand_used_vars. Give each partition representative
1042 a unique location within the stack frame. Update each partition member
1043 with that location. */
1045 static void
1046 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1048 size_t si, i, j, n = stack_vars_num;
1049 poly_uint64 large_size = 0, large_alloc = 0;
1050 rtx large_base = NULL;
1051 unsigned large_align = 0;
1052 bool large_allocation_done = false;
1053 tree decl;
1055 /* Determine if there are any variables requiring "large" alignment.
1056 Since these are dynamically allocated, we only process these if
1057 no predicate involved. */
1058 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1059 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1061 /* Find the total size of these variables. */
1062 for (si = 0; si < n; ++si)
1064 unsigned alignb;
1066 i = stack_vars_sorted[si];
1067 alignb = stack_vars[i].alignb;
1069 /* All "large" alignment decls come before all "small" alignment
1070 decls, but "large" alignment decls are not sorted based on
1071 their alignment. Increase large_align to track the largest
1072 required alignment. */
1073 if ((alignb * BITS_PER_UNIT) > large_align)
1074 large_align = alignb * BITS_PER_UNIT;
1076 /* Stop when we get to the first decl with "small" alignment. */
1077 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1078 break;
1080 /* Skip variables that aren't partition representatives. */
1081 if (stack_vars[i].representative != i)
1082 continue;
1084 /* Skip variables that have already had rtl assigned. See also
1085 add_stack_var where we perpetrate this pc_rtx hack. */
1086 decl = stack_vars[i].decl;
1087 if (TREE_CODE (decl) == SSA_NAME
1088 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1089 : DECL_RTL (decl) != pc_rtx)
1090 continue;
1092 large_size = aligned_upper_bound (large_size, alignb);
1093 large_size += stack_vars[i].size;
1097 for (si = 0; si < n; ++si)
1099 rtx base;
1100 unsigned base_align, alignb;
1101 poly_int64 offset;
1103 i = stack_vars_sorted[si];
1105 /* Skip variables that aren't partition representatives, for now. */
1106 if (stack_vars[i].representative != i)
1107 continue;
1109 /* Skip variables that have already had rtl assigned. See also
1110 add_stack_var where we perpetrate this pc_rtx hack. */
1111 decl = stack_vars[i].decl;
1112 if (TREE_CODE (decl) == SSA_NAME
1113 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1114 : DECL_RTL (decl) != pc_rtx)
1115 continue;
1117 /* Check the predicate to see whether this variable should be
1118 allocated in this pass. */
1119 if (pred && !pred (i))
1120 continue;
1122 alignb = stack_vars[i].alignb;
1123 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1125 base = virtual_stack_vars_rtx;
1126 /* ASAN description strings don't yet have a syntax for expressing
1127 polynomial offsets. */
1128 HOST_WIDE_INT prev_offset;
1129 if (asan_sanitize_stack_p ()
1130 && pred
1131 && frame_offset.is_constant (&prev_offset)
1132 && stack_vars[i].size.is_constant ())
1134 if (data->asan_vec.is_empty ())
1136 alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE);
1137 prev_offset = frame_offset.to_constant ();
1139 prev_offset = align_base (prev_offset,
1140 ASAN_MIN_RED_ZONE_SIZE,
1141 !FRAME_GROWS_DOWNWARD);
1142 tree repr_decl = NULL_TREE;
1143 unsigned HOST_WIDE_INT size
1144 = asan_var_and_redzone_size (stack_vars[i].size.to_constant ());
1145 if (data->asan_vec.is_empty ())
1146 size = MAX (size, ASAN_RED_ZONE_SIZE);
1148 unsigned HOST_WIDE_INT alignment = MAX (alignb,
1149 ASAN_MIN_RED_ZONE_SIZE);
1150 offset = alloc_stack_frame_space (size, alignment);
1152 data->asan_vec.safe_push (prev_offset);
1153 /* Allocating a constant amount of space from a constant
1154 starting offset must give a constant result. */
1155 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1156 .to_constant ());
1157 /* Find best representative of the partition.
1158 Prefer those with DECL_NAME, even better
1159 satisfying asan_protect_stack_decl predicate. */
1160 for (j = i; j != EOC; j = stack_vars[j].next)
1161 if (asan_protect_stack_decl (stack_vars[j].decl)
1162 && DECL_NAME (stack_vars[j].decl))
1164 repr_decl = stack_vars[j].decl;
1165 break;
1167 else if (repr_decl == NULL_TREE
1168 && DECL_P (stack_vars[j].decl)
1169 && DECL_NAME (stack_vars[j].decl))
1170 repr_decl = stack_vars[j].decl;
1171 if (repr_decl == NULL_TREE)
1172 repr_decl = stack_vars[i].decl;
1173 data->asan_decl_vec.safe_push (repr_decl);
1175 /* Make sure a representative is unpoison if another
1176 variable in the partition is handled by
1177 use-after-scope sanitization. */
1178 if (asan_handled_variables != NULL
1179 && !asan_handled_variables->contains (repr_decl))
1181 for (j = i; j != EOC; j = stack_vars[j].next)
1182 if (asan_handled_variables->contains (stack_vars[j].decl))
1183 break;
1184 if (j != EOC)
1185 asan_handled_variables->add (repr_decl);
1188 data->asan_alignb = MAX (data->asan_alignb, alignb);
1189 if (data->asan_base == NULL)
1190 data->asan_base = gen_reg_rtx (Pmode);
1191 base = data->asan_base;
1193 if (!STRICT_ALIGNMENT)
1194 base_align = crtl->max_used_stack_slot_alignment;
1195 else
1196 base_align = MAX (crtl->max_used_stack_slot_alignment,
1197 GET_MODE_ALIGNMENT (SImode)
1198 << ASAN_SHADOW_SHIFT);
1200 else
1202 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1203 base_align = crtl->max_used_stack_slot_alignment;
1206 else
1208 /* Large alignment is only processed in the last pass. */
1209 if (pred)
1210 continue;
1212 /* If there were any variables requiring "large" alignment, allocate
1213 space. */
1214 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1216 poly_int64 loffset;
1217 rtx large_allocsize;
1219 large_allocsize = gen_int_mode (large_size, Pmode);
1220 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1221 loffset = alloc_stack_frame_space
1222 (rtx_to_poly_int64 (large_allocsize),
1223 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1224 large_base = get_dynamic_stack_base (loffset, large_align);
1225 large_allocation_done = true;
1227 gcc_assert (large_base != NULL);
1229 large_alloc = aligned_upper_bound (large_alloc, alignb);
1230 offset = large_alloc;
1231 large_alloc += stack_vars[i].size;
1233 base = large_base;
1234 base_align = large_align;
1237 /* Create rtl for each variable based on their location within the
1238 partition. */
1239 for (j = i; j != EOC; j = stack_vars[j].next)
1241 expand_one_stack_var_at (stack_vars[j].decl,
1242 base, base_align,
1243 offset);
1247 gcc_assert (known_eq (large_alloc, large_size));
1250 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1251 static poly_uint64
1252 account_stack_vars (void)
1254 size_t si, j, i, n = stack_vars_num;
1255 poly_uint64 size = 0;
1257 for (si = 0; si < n; ++si)
1259 i = stack_vars_sorted[si];
1261 /* Skip variables that aren't partition representatives, for now. */
1262 if (stack_vars[i].representative != i)
1263 continue;
1265 size += stack_vars[i].size;
1266 for (j = i; j != EOC; j = stack_vars[j].next)
1267 set_rtl (stack_vars[j].decl, NULL);
1269 return size;
1272 /* Record the RTL assignment X for the default def of PARM. */
1274 extern void
1275 set_parm_rtl (tree parm, rtx x)
1277 gcc_assert (TREE_CODE (parm) == PARM_DECL
1278 || TREE_CODE (parm) == RESULT_DECL);
1280 if (x && !MEM_P (x))
1282 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1283 TYPE_MODE (TREE_TYPE (parm)),
1284 TYPE_ALIGN (TREE_TYPE (parm)));
1286 /* If the variable alignment is very large we'll dynamicaly
1287 allocate it, which means that in-frame portion is just a
1288 pointer. ??? We've got a pseudo for sure here, do we
1289 actually dynamically allocate its spilling area if needed?
1290 ??? Isn't it a problem when Pmode alignment also exceeds
1291 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1292 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1293 align = GET_MODE_ALIGNMENT (Pmode);
1295 record_alignment_for_reg_var (align);
1298 tree ssa = ssa_default_def (cfun, parm);
1299 if (!ssa)
1300 return set_rtl (parm, x);
1302 int part = var_to_partition (SA.map, ssa);
1303 gcc_assert (part != NO_PARTITION);
1305 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1306 gcc_assert (changed);
1308 set_rtl (ssa, x);
1309 gcc_assert (DECL_RTL (parm) == x);
1312 /* A subroutine of expand_one_var. Called to immediately assign rtl
1313 to a variable to be allocated in the stack frame. */
1315 static void
1316 expand_one_stack_var_1 (tree var)
1318 poly_uint64 size;
1319 poly_int64 offset;
1320 unsigned byte_align;
1322 if (TREE_CODE (var) == SSA_NAME)
1324 tree type = TREE_TYPE (var);
1325 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1326 byte_align = TYPE_ALIGN_UNIT (type);
1328 else
1330 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1331 byte_align = align_local_variable (var, true);
1334 /* We handle highly aligned variables in expand_stack_vars. */
1335 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1337 offset = alloc_stack_frame_space (size, byte_align);
1339 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1340 crtl->max_used_stack_slot_alignment, offset);
1343 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1344 already assigned some MEM. */
1346 static void
1347 expand_one_stack_var (tree var)
1349 if (TREE_CODE (var) == SSA_NAME)
1351 int part = var_to_partition (SA.map, var);
1352 if (part != NO_PARTITION)
1354 rtx x = SA.partition_to_pseudo[part];
1355 gcc_assert (x);
1356 gcc_assert (MEM_P (x));
1357 return;
1361 return expand_one_stack_var_1 (var);
1364 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1365 that will reside in a hard register. */
1367 static void
1368 expand_one_hard_reg_var (tree var)
1370 rest_of_decl_compilation (var, 0, 0);
1373 /* Record the alignment requirements of some variable assigned to a
1374 pseudo. */
1376 static void
1377 record_alignment_for_reg_var (unsigned int align)
1379 if (SUPPORTS_STACK_ALIGNMENT
1380 && crtl->stack_alignment_estimated < align)
1382 /* stack_alignment_estimated shouldn't change after stack
1383 realign decision made */
1384 gcc_assert (!crtl->stack_realign_processed);
1385 crtl->stack_alignment_estimated = align;
1388 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1389 So here we only make sure stack_alignment_needed >= align. */
1390 if (crtl->stack_alignment_needed < align)
1391 crtl->stack_alignment_needed = align;
1392 if (crtl->max_used_stack_slot_alignment < align)
1393 crtl->max_used_stack_slot_alignment = align;
1396 /* Create RTL for an SSA partition. */
1398 static void
1399 expand_one_ssa_partition (tree var)
1401 int part = var_to_partition (SA.map, var);
1402 gcc_assert (part != NO_PARTITION);
1404 if (SA.partition_to_pseudo[part])
1405 return;
1407 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1408 TYPE_MODE (TREE_TYPE (var)),
1409 TYPE_ALIGN (TREE_TYPE (var)));
1411 /* If the variable alignment is very large we'll dynamicaly allocate
1412 it, which means that in-frame portion is just a pointer. */
1413 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1414 align = GET_MODE_ALIGNMENT (Pmode);
1416 record_alignment_for_reg_var (align);
1418 if (!use_register_for_decl (var))
1420 if (defer_stack_allocation (var, true))
1421 add_stack_var (var, true);
1422 else
1423 expand_one_stack_var_1 (var);
1424 return;
1427 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1428 rtx x = gen_reg_rtx (reg_mode);
1430 set_rtl (var, x);
1432 /* For a promoted variable, X will not be used directly but wrapped in a
1433 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1434 will assume that its upper bits can be inferred from its lower bits.
1435 Therefore, if X isn't initialized on every path from the entry, then
1436 we must do it manually in order to fulfill the above assumption. */
1437 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1438 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1439 emit_move_insn (x, CONST0_RTX (reg_mode));
1442 /* Record the association between the RTL generated for partition PART
1443 and the underlying variable of the SSA_NAME VAR. */
1445 static void
1446 adjust_one_expanded_partition_var (tree var)
1448 if (!var)
1449 return;
1451 tree decl = SSA_NAME_VAR (var);
1453 int part = var_to_partition (SA.map, var);
1454 if (part == NO_PARTITION)
1455 return;
1457 rtx x = SA.partition_to_pseudo[part];
1459 gcc_assert (x);
1461 set_rtl (var, x);
1463 if (!REG_P (x))
1464 return;
1466 /* Note if the object is a user variable. */
1467 if (decl && !DECL_ARTIFICIAL (decl))
1468 mark_user_reg (x);
1470 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1471 mark_reg_pointer (x, get_pointer_alignment (var));
1474 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1475 that will reside in a pseudo register. */
1477 static void
1478 expand_one_register_var (tree var)
1480 if (TREE_CODE (var) == SSA_NAME)
1482 int part = var_to_partition (SA.map, var);
1483 if (part != NO_PARTITION)
1485 rtx x = SA.partition_to_pseudo[part];
1486 gcc_assert (x);
1487 gcc_assert (REG_P (x));
1488 return;
1490 gcc_unreachable ();
1493 tree decl = var;
1494 tree type = TREE_TYPE (decl);
1495 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1496 rtx x = gen_reg_rtx (reg_mode);
1498 set_rtl (var, x);
1500 /* Note if the object is a user variable. */
1501 if (!DECL_ARTIFICIAL (decl))
1502 mark_user_reg (x);
1504 if (POINTER_TYPE_P (type))
1505 mark_reg_pointer (x, get_pointer_alignment (var));
1508 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1509 has some associated error, e.g. its type is error-mark. We just need
1510 to pick something that won't crash the rest of the compiler. */
1512 static void
1513 expand_one_error_var (tree var)
1515 machine_mode mode = DECL_MODE (var);
1516 rtx x;
1518 if (mode == BLKmode)
1519 x = gen_rtx_MEM (BLKmode, const0_rtx);
1520 else if (mode == VOIDmode)
1521 x = const0_rtx;
1522 else
1523 x = gen_reg_rtx (mode);
1525 SET_DECL_RTL (var, x);
1528 /* A subroutine of expand_one_var. VAR is a variable that will be
1529 allocated to the local stack frame. Return true if we wish to
1530 add VAR to STACK_VARS so that it will be coalesced with other
1531 variables. Return false to allocate VAR immediately.
1533 This function is used to reduce the number of variables considered
1534 for coalescing, which reduces the size of the quadratic problem. */
1536 static bool
1537 defer_stack_allocation (tree var, bool toplevel)
1539 tree size_unit = TREE_CODE (var) == SSA_NAME
1540 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1541 : DECL_SIZE_UNIT (var);
1542 poly_uint64 size;
1544 /* Whether the variable is small enough for immediate allocation not to be
1545 a problem with regard to the frame size. */
1546 bool smallish
1547 = (poly_int_tree_p (size_unit, &size)
1548 && (estimated_poly_value (size)
1549 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1551 /* If stack protection is enabled, *all* stack variables must be deferred,
1552 so that we can re-order the strings to the top of the frame.
1553 Similarly for Address Sanitizer. */
1554 if (flag_stack_protect || asan_sanitize_stack_p ())
1555 return true;
1557 unsigned int align = TREE_CODE (var) == SSA_NAME
1558 ? TYPE_ALIGN (TREE_TYPE (var))
1559 : DECL_ALIGN (var);
1561 /* We handle "large" alignment via dynamic allocation. We want to handle
1562 this extra complication in only one place, so defer them. */
1563 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1564 return true;
1566 bool ignored = TREE_CODE (var) == SSA_NAME
1567 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1568 : DECL_IGNORED_P (var);
1570 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1571 might be detached from their block and appear at toplevel when we reach
1572 here. We want to coalesce them with variables from other blocks when
1573 the immediate contribution to the frame size would be noticeable. */
1574 if (toplevel && optimize > 0 && ignored && !smallish)
1575 return true;
1577 /* Variables declared in the outermost scope automatically conflict
1578 with every other variable. The only reason to want to defer them
1579 at all is that, after sorting, we can more efficiently pack
1580 small variables in the stack frame. Continue to defer at -O2. */
1581 if (toplevel && optimize < 2)
1582 return false;
1584 /* Without optimization, *most* variables are allocated from the
1585 stack, which makes the quadratic problem large exactly when we
1586 want compilation to proceed as quickly as possible. On the
1587 other hand, we don't want the function's stack frame size to
1588 get completely out of hand. So we avoid adding scalars and
1589 "small" aggregates to the list at all. */
1590 if (optimize == 0 && smallish)
1591 return false;
1593 return true;
1596 /* A subroutine of expand_used_vars. Expand one variable according to
1597 its flavor. Variables to be placed on the stack are not actually
1598 expanded yet, merely recorded.
1599 When REALLY_EXPAND is false, only add stack values to be allocated.
1600 Return stack usage this variable is supposed to take.
1603 static poly_uint64
1604 expand_one_var (tree var, bool toplevel, bool really_expand)
1606 unsigned int align = BITS_PER_UNIT;
1607 tree origvar = var;
1609 var = SSAVAR (var);
1611 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1613 if (is_global_var (var))
1614 return 0;
1616 /* Because we don't know if VAR will be in register or on stack,
1617 we conservatively assume it will be on stack even if VAR is
1618 eventually put into register after RA pass. For non-automatic
1619 variables, which won't be on stack, we collect alignment of
1620 type and ignore user specified alignment. Similarly for
1621 SSA_NAMEs for which use_register_for_decl returns true. */
1622 if (TREE_STATIC (var)
1623 || DECL_EXTERNAL (var)
1624 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1625 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1626 TYPE_MODE (TREE_TYPE (var)),
1627 TYPE_ALIGN (TREE_TYPE (var)));
1628 else if (DECL_HAS_VALUE_EXPR_P (var)
1629 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1630 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1631 or variables which were assigned a stack slot already by
1632 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1633 changed from the offset chosen to it. */
1634 align = crtl->stack_alignment_estimated;
1635 else
1636 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1638 /* If the variable alignment is very large we'll dynamicaly allocate
1639 it, which means that in-frame portion is just a pointer. */
1640 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1641 align = GET_MODE_ALIGNMENT (Pmode);
1644 record_alignment_for_reg_var (align);
1646 poly_uint64 size;
1647 if (TREE_CODE (origvar) == SSA_NAME)
1649 gcc_assert (!VAR_P (var)
1650 || (!DECL_EXTERNAL (var)
1651 && !DECL_HAS_VALUE_EXPR_P (var)
1652 && !TREE_STATIC (var)
1653 && TREE_TYPE (var) != error_mark_node
1654 && !DECL_HARD_REGISTER (var)
1655 && really_expand));
1657 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1659 else if (DECL_EXTERNAL (var))
1661 else if (DECL_HAS_VALUE_EXPR_P (var))
1663 else if (TREE_STATIC (var))
1665 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1667 else if (TREE_TYPE (var) == error_mark_node)
1669 if (really_expand)
1670 expand_one_error_var (var);
1672 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1674 if (really_expand)
1676 expand_one_hard_reg_var (var);
1677 if (!DECL_HARD_REGISTER (var))
1678 /* Invalid register specification. */
1679 expand_one_error_var (var);
1682 else if (use_register_for_decl (var))
1684 if (really_expand)
1685 expand_one_register_var (origvar);
1687 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1688 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1690 /* Reject variables which cover more than half of the address-space. */
1691 if (really_expand)
1693 if (DECL_NONLOCAL_FRAME (var))
1694 error_at (DECL_SOURCE_LOCATION (current_function_decl),
1695 "total size of local objects is too large");
1696 else
1697 error_at (DECL_SOURCE_LOCATION (var),
1698 "size of variable %q+D is too large", var);
1699 expand_one_error_var (var);
1702 else if (defer_stack_allocation (var, toplevel))
1703 add_stack_var (origvar, really_expand);
1704 else
1706 if (really_expand)
1708 if (lookup_attribute ("naked",
1709 DECL_ATTRIBUTES (current_function_decl)))
1710 error ("cannot allocate stack for variable %q+D, naked function",
1711 var);
1713 expand_one_stack_var (origvar);
1715 return size;
1717 return 0;
1720 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1721 expanding variables. Those variables that can be put into registers
1722 are allocated pseudos; those that can't are put on the stack.
1724 TOPLEVEL is true if this is the outermost BLOCK. */
1726 static void
1727 expand_used_vars_for_block (tree block, bool toplevel)
1729 tree t;
1731 /* Expand all variables at this level. */
1732 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1733 if (TREE_USED (t)
1734 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1735 || !DECL_NONSHAREABLE (t)))
1736 expand_one_var (t, toplevel, true);
1738 /* Expand all variables at containing levels. */
1739 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1740 expand_used_vars_for_block (t, false);
1743 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1744 and clear TREE_USED on all local variables. */
1746 static void
1747 clear_tree_used (tree block)
1749 tree t;
1751 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1752 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1753 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1754 || !DECL_NONSHAREABLE (t))
1755 TREE_USED (t) = 0;
1757 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1758 clear_tree_used (t);
1761 enum {
1762 SPCT_FLAG_DEFAULT = 1,
1763 SPCT_FLAG_ALL = 2,
1764 SPCT_FLAG_STRONG = 3,
1765 SPCT_FLAG_EXPLICIT = 4
1768 /* Examine TYPE and determine a bit mask of the following features. */
1770 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1771 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1772 #define SPCT_HAS_ARRAY 4
1773 #define SPCT_HAS_AGGREGATE 8
1775 static unsigned int
1776 stack_protect_classify_type (tree type)
1778 unsigned int ret = 0;
1779 tree t;
1781 switch (TREE_CODE (type))
1783 case ARRAY_TYPE:
1784 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1785 if (t == char_type_node
1786 || t == signed_char_type_node
1787 || t == unsigned_char_type_node)
1789 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1790 unsigned HOST_WIDE_INT len;
1792 if (!TYPE_SIZE_UNIT (type)
1793 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1794 len = max;
1795 else
1796 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1798 if (len < max)
1799 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1800 else
1801 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1803 else
1804 ret = SPCT_HAS_ARRAY;
1805 break;
1807 case UNION_TYPE:
1808 case QUAL_UNION_TYPE:
1809 case RECORD_TYPE:
1810 ret = SPCT_HAS_AGGREGATE;
1811 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1812 if (TREE_CODE (t) == FIELD_DECL)
1813 ret |= stack_protect_classify_type (TREE_TYPE (t));
1814 break;
1816 default:
1817 break;
1820 return ret;
1823 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1824 part of the local stack frame. Remember if we ever return nonzero for
1825 any variable in this function. The return value is the phase number in
1826 which the variable should be allocated. */
1828 static int
1829 stack_protect_decl_phase (tree decl)
1831 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1832 int ret = 0;
1834 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1835 has_short_buffer = true;
1837 if (flag_stack_protect == SPCT_FLAG_ALL
1838 || flag_stack_protect == SPCT_FLAG_STRONG
1839 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1840 && lookup_attribute ("stack_protect",
1841 DECL_ATTRIBUTES (current_function_decl))))
1843 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1844 && !(bits & SPCT_HAS_AGGREGATE))
1845 ret = 1;
1846 else if (bits & SPCT_HAS_ARRAY)
1847 ret = 2;
1849 else
1850 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1852 if (ret)
1853 has_protected_decls = true;
1855 return ret;
1858 /* Two helper routines that check for phase 1 and phase 2. These are used
1859 as callbacks for expand_stack_vars. */
1861 static bool
1862 stack_protect_decl_phase_1 (size_t i)
1864 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1867 static bool
1868 stack_protect_decl_phase_2 (size_t i)
1870 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1873 /* And helper function that checks for asan phase (with stack protector
1874 it is phase 3). This is used as callback for expand_stack_vars.
1875 Returns true if any of the vars in the partition need to be protected. */
1877 static bool
1878 asan_decl_phase_3 (size_t i)
1880 while (i != EOC)
1882 if (asan_protect_stack_decl (stack_vars[i].decl))
1883 return true;
1884 i = stack_vars[i].next;
1886 return false;
1889 /* Ensure that variables in different stack protection phases conflict
1890 so that they are not merged and share the same stack slot. */
1892 static void
1893 add_stack_protection_conflicts (void)
1895 size_t i, j, n = stack_vars_num;
1896 unsigned char *phase;
1898 phase = XNEWVEC (unsigned char, n);
1899 for (i = 0; i < n; ++i)
1900 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1902 for (i = 0; i < n; ++i)
1904 unsigned char ph_i = phase[i];
1905 for (j = i + 1; j < n; ++j)
1906 if (ph_i != phase[j])
1907 add_stack_var_conflict (i, j);
1910 XDELETEVEC (phase);
1913 /* Create a decl for the guard at the top of the stack frame. */
1915 static void
1916 create_stack_guard (void)
1918 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1919 VAR_DECL, NULL, ptr_type_node);
1920 TREE_THIS_VOLATILE (guard) = 1;
1921 TREE_USED (guard) = 1;
1922 expand_one_stack_var (guard);
1923 crtl->stack_protect_guard = guard;
1926 /* Prepare for expanding variables. */
1927 static void
1928 init_vars_expansion (void)
1930 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1931 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1933 /* A map from decl to stack partition. */
1934 decl_to_stack_part = new hash_map<tree, size_t>;
1936 /* Initialize local stack smashing state. */
1937 has_protected_decls = false;
1938 has_short_buffer = false;
1941 /* Free up stack variable graph data. */
1942 static void
1943 fini_vars_expansion (void)
1945 bitmap_obstack_release (&stack_var_bitmap_obstack);
1946 if (stack_vars)
1947 XDELETEVEC (stack_vars);
1948 if (stack_vars_sorted)
1949 XDELETEVEC (stack_vars_sorted);
1950 stack_vars = NULL;
1951 stack_vars_sorted = NULL;
1952 stack_vars_alloc = stack_vars_num = 0;
1953 delete decl_to_stack_part;
1954 decl_to_stack_part = NULL;
1957 /* Make a fair guess for the size of the stack frame of the function
1958 in NODE. This doesn't have to be exact, the result is only used in
1959 the inline heuristics. So we don't want to run the full stack var
1960 packing algorithm (which is quadratic in the number of stack vars).
1961 Instead, we calculate the total size of all stack vars. This turns
1962 out to be a pretty fair estimate -- packing of stack vars doesn't
1963 happen very often. */
1965 HOST_WIDE_INT
1966 estimated_stack_frame_size (struct cgraph_node *node)
1968 poly_int64 size = 0;
1969 size_t i;
1970 tree var;
1971 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1973 push_cfun (fn);
1975 init_vars_expansion ();
1977 FOR_EACH_LOCAL_DECL (fn, i, var)
1978 if (auto_var_in_fn_p (var, fn->decl))
1979 size += expand_one_var (var, true, false);
1981 if (stack_vars_num > 0)
1983 /* Fake sorting the stack vars for account_stack_vars (). */
1984 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1985 for (i = 0; i < stack_vars_num; ++i)
1986 stack_vars_sorted[i] = i;
1987 size += account_stack_vars ();
1990 fini_vars_expansion ();
1991 pop_cfun ();
1992 return estimated_poly_value (size);
1995 /* Helper routine to check if a record or union contains an array field. */
1997 static int
1998 record_or_union_type_has_array_p (const_tree tree_type)
2000 tree fields = TYPE_FIELDS (tree_type);
2001 tree f;
2003 for (f = fields; f; f = DECL_CHAIN (f))
2004 if (TREE_CODE (f) == FIELD_DECL)
2006 tree field_type = TREE_TYPE (f);
2007 if (RECORD_OR_UNION_TYPE_P (field_type)
2008 && record_or_union_type_has_array_p (field_type))
2009 return 1;
2010 if (TREE_CODE (field_type) == ARRAY_TYPE)
2011 return 1;
2013 return 0;
2016 /* Check if the current function has local referenced variables that
2017 have their addresses taken, contain an array, or are arrays. */
2019 static bool
2020 stack_protect_decl_p ()
2022 unsigned i;
2023 tree var;
2025 FOR_EACH_LOCAL_DECL (cfun, i, var)
2026 if (!is_global_var (var))
2028 tree var_type = TREE_TYPE (var);
2029 if (VAR_P (var)
2030 && (TREE_CODE (var_type) == ARRAY_TYPE
2031 || TREE_ADDRESSABLE (var)
2032 || (RECORD_OR_UNION_TYPE_P (var_type)
2033 && record_or_union_type_has_array_p (var_type))))
2034 return true;
2036 return false;
2039 /* Check if the current function has calls that use a return slot. */
2041 static bool
2042 stack_protect_return_slot_p ()
2044 basic_block bb;
2046 FOR_ALL_BB_FN (bb, cfun)
2047 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2048 !gsi_end_p (gsi); gsi_next (&gsi))
2050 gimple *stmt = gsi_stmt (gsi);
2051 /* This assumes that calls to internal-only functions never
2052 use a return slot. */
2053 if (is_gimple_call (stmt)
2054 && !gimple_call_internal_p (stmt)
2055 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2056 gimple_call_fndecl (stmt)))
2057 return true;
2059 return false;
2062 /* Expand all variables used in the function. */
2064 static rtx_insn *
2065 expand_used_vars (void)
2067 tree var, outer_block = DECL_INITIAL (current_function_decl);
2068 auto_vec<tree> maybe_local_decls;
2069 rtx_insn *var_end_seq = NULL;
2070 unsigned i;
2071 unsigned len;
2072 bool gen_stack_protect_signal = false;
2074 /* Compute the phase of the stack frame for this function. */
2076 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2077 int off = targetm.starting_frame_offset () % align;
2078 frame_phase = off ? align - off : 0;
2081 /* Set TREE_USED on all variables in the local_decls. */
2082 FOR_EACH_LOCAL_DECL (cfun, i, var)
2083 TREE_USED (var) = 1;
2084 /* Clear TREE_USED on all variables associated with a block scope. */
2085 clear_tree_used (DECL_INITIAL (current_function_decl));
2087 init_vars_expansion ();
2089 if (targetm.use_pseudo_pic_reg ())
2090 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2092 for (i = 0; i < SA.map->num_partitions; i++)
2094 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2095 continue;
2097 tree var = partition_to_var (SA.map, i);
2099 gcc_assert (!virtual_operand_p (var));
2101 expand_one_ssa_partition (var);
2104 if (flag_stack_protect == SPCT_FLAG_STRONG)
2105 gen_stack_protect_signal
2106 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2108 /* At this point all variables on the local_decls with TREE_USED
2109 set are not associated with any block scope. Lay them out. */
2111 len = vec_safe_length (cfun->local_decls);
2112 FOR_EACH_LOCAL_DECL (cfun, i, var)
2114 bool expand_now = false;
2116 /* Expanded above already. */
2117 if (is_gimple_reg (var))
2119 TREE_USED (var) = 0;
2120 goto next;
2122 /* We didn't set a block for static or extern because it's hard
2123 to tell the difference between a global variable (re)declared
2124 in a local scope, and one that's really declared there to
2125 begin with. And it doesn't really matter much, since we're
2126 not giving them stack space. Expand them now. */
2127 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2128 expand_now = true;
2130 /* Expand variables not associated with any block now. Those created by
2131 the optimizers could be live anywhere in the function. Those that
2132 could possibly have been scoped originally and detached from their
2133 block will have their allocation deferred so we coalesce them with
2134 others when optimization is enabled. */
2135 else if (TREE_USED (var))
2136 expand_now = true;
2138 /* Finally, mark all variables on the list as used. We'll use
2139 this in a moment when we expand those associated with scopes. */
2140 TREE_USED (var) = 1;
2142 if (expand_now)
2143 expand_one_var (var, true, true);
2145 next:
2146 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2148 rtx rtl = DECL_RTL_IF_SET (var);
2150 /* Keep artificial non-ignored vars in cfun->local_decls
2151 chain until instantiate_decls. */
2152 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2153 add_local_decl (cfun, var);
2154 else if (rtl == NULL_RTX)
2155 /* If rtl isn't set yet, which can happen e.g. with
2156 -fstack-protector, retry before returning from this
2157 function. */
2158 maybe_local_decls.safe_push (var);
2162 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2164 +-----------------+-----------------+
2165 | ...processed... | ...duplicates...|
2166 +-----------------+-----------------+
2168 +-- LEN points here.
2170 We just want the duplicates, as those are the artificial
2171 non-ignored vars that we want to keep until instantiate_decls.
2172 Move them down and truncate the array. */
2173 if (!vec_safe_is_empty (cfun->local_decls))
2174 cfun->local_decls->block_remove (0, len);
2176 /* At this point, all variables within the block tree with TREE_USED
2177 set are actually used by the optimized function. Lay them out. */
2178 expand_used_vars_for_block (outer_block, true);
2180 if (stack_vars_num > 0)
2182 add_scope_conflicts ();
2184 /* If stack protection is enabled, we don't share space between
2185 vulnerable data and non-vulnerable data. */
2186 if (flag_stack_protect != 0
2187 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2188 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2189 && lookup_attribute ("stack_protect",
2190 DECL_ATTRIBUTES (current_function_decl)))))
2191 add_stack_protection_conflicts ();
2193 /* Now that we have collected all stack variables, and have computed a
2194 minimal interference graph, attempt to save some stack space. */
2195 partition_stack_vars ();
2196 if (dump_file)
2197 dump_stack_var_partition ();
2200 switch (flag_stack_protect)
2202 case SPCT_FLAG_ALL:
2203 create_stack_guard ();
2204 break;
2206 case SPCT_FLAG_STRONG:
2207 if (gen_stack_protect_signal
2208 || cfun->calls_alloca || has_protected_decls
2209 || lookup_attribute ("stack_protect",
2210 DECL_ATTRIBUTES (current_function_decl)))
2211 create_stack_guard ();
2212 break;
2214 case SPCT_FLAG_DEFAULT:
2215 if (cfun->calls_alloca || has_protected_decls
2216 || lookup_attribute ("stack_protect",
2217 DECL_ATTRIBUTES (current_function_decl)))
2218 create_stack_guard ();
2219 break;
2221 case SPCT_FLAG_EXPLICIT:
2222 if (lookup_attribute ("stack_protect",
2223 DECL_ATTRIBUTES (current_function_decl)))
2224 create_stack_guard ();
2225 break;
2226 default:
2230 /* Assign rtl to each variable based on these partitions. */
2231 if (stack_vars_num > 0)
2233 struct stack_vars_data data;
2235 data.asan_base = NULL_RTX;
2236 data.asan_alignb = 0;
2238 /* Reorder decls to be protected by iterating over the variables
2239 array multiple times, and allocating out of each phase in turn. */
2240 /* ??? We could probably integrate this into the qsort we did
2241 earlier, such that we naturally see these variables first,
2242 and thus naturally allocate things in the right order. */
2243 if (has_protected_decls)
2245 /* Phase 1 contains only character arrays. */
2246 expand_stack_vars (stack_protect_decl_phase_1, &data);
2248 /* Phase 2 contains other kinds of arrays. */
2249 if (flag_stack_protect == SPCT_FLAG_ALL
2250 || flag_stack_protect == SPCT_FLAG_STRONG
2251 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2252 && lookup_attribute ("stack_protect",
2253 DECL_ATTRIBUTES (current_function_decl))))
2254 expand_stack_vars (stack_protect_decl_phase_2, &data);
2257 if (asan_sanitize_stack_p ())
2258 /* Phase 3, any partitions that need asan protection
2259 in addition to phase 1 and 2. */
2260 expand_stack_vars (asan_decl_phase_3, &data);
2262 /* ASAN description strings don't yet have a syntax for expressing
2263 polynomial offsets. */
2264 HOST_WIDE_INT prev_offset;
2265 if (!data.asan_vec.is_empty ()
2266 && frame_offset.is_constant (&prev_offset))
2268 HOST_WIDE_INT offset, sz, redzonesz;
2269 redzonesz = ASAN_RED_ZONE_SIZE;
2270 sz = data.asan_vec[0] - prev_offset;
2271 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2272 && data.asan_alignb <= 4096
2273 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2274 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2275 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2276 /* Allocating a constant amount of space from a constant
2277 starting offset must give a constant result. */
2278 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2279 .to_constant ());
2280 data.asan_vec.safe_push (prev_offset);
2281 data.asan_vec.safe_push (offset);
2282 /* Leave space for alignment if STRICT_ALIGNMENT. */
2283 if (STRICT_ALIGNMENT)
2284 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2285 << ASAN_SHADOW_SHIFT)
2286 / BITS_PER_UNIT, 1);
2288 var_end_seq
2289 = asan_emit_stack_protection (virtual_stack_vars_rtx,
2290 data.asan_base,
2291 data.asan_alignb,
2292 data.asan_vec.address (),
2293 data.asan_decl_vec.address (),
2294 data.asan_vec.length ());
2297 expand_stack_vars (NULL, &data);
2300 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2301 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2302 virtual_stack_vars_rtx,
2303 var_end_seq);
2305 fini_vars_expansion ();
2307 /* If there were any artificial non-ignored vars without rtl
2308 found earlier, see if deferred stack allocation hasn't assigned
2309 rtl to them. */
2310 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2312 rtx rtl = DECL_RTL_IF_SET (var);
2314 /* Keep artificial non-ignored vars in cfun->local_decls
2315 chain until instantiate_decls. */
2316 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2317 add_local_decl (cfun, var);
2320 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2321 if (STACK_ALIGNMENT_NEEDED)
2323 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2324 if (FRAME_GROWS_DOWNWARD)
2325 frame_offset = aligned_lower_bound (frame_offset, align);
2326 else
2327 frame_offset = aligned_upper_bound (frame_offset, align);
2330 return var_end_seq;
2334 /* If we need to produce a detailed dump, print the tree representation
2335 for STMT to the dump file. SINCE is the last RTX after which the RTL
2336 generated for STMT should have been appended. */
2338 static void
2339 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2341 if (dump_file && (dump_flags & TDF_DETAILS))
2343 fprintf (dump_file, "\n;; ");
2344 print_gimple_stmt (dump_file, stmt, 0,
2345 TDF_SLIM | (dump_flags & TDF_LINENO));
2346 fprintf (dump_file, "\n");
2348 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2352 /* Maps the blocks that do not contain tree labels to rtx labels. */
2354 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2356 /* Returns the label_rtx expression for a label starting basic block BB. */
2358 static rtx_code_label *
2359 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2361 gimple_stmt_iterator gsi;
2362 tree lab;
2364 if (bb->flags & BB_RTL)
2365 return block_label (bb);
2367 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2368 if (elt)
2369 return *elt;
2371 /* Find the tree label if it is present. */
2373 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2375 glabel *lab_stmt;
2377 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2378 if (!lab_stmt)
2379 break;
2381 lab = gimple_label_label (lab_stmt);
2382 if (DECL_NONLOCAL (lab))
2383 break;
2385 return jump_target_rtx (lab);
2388 rtx_code_label *l = gen_label_rtx ();
2389 lab_rtx_for_bb->put (bb, l);
2390 return l;
2394 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2395 of a basic block where we just expanded the conditional at the end,
2396 possibly clean up the CFG and instruction sequence. LAST is the
2397 last instruction before the just emitted jump sequence. */
2399 static void
2400 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2402 /* Special case: when jumpif decides that the condition is
2403 trivial it emits an unconditional jump (and the necessary
2404 barrier). But we still have two edges, the fallthru one is
2405 wrong. purge_dead_edges would clean this up later. Unfortunately
2406 we have to insert insns (and split edges) before
2407 find_many_sub_basic_blocks and hence before purge_dead_edges.
2408 But splitting edges might create new blocks which depend on the
2409 fact that if there are two edges there's no barrier. So the
2410 barrier would get lost and verify_flow_info would ICE. Instead
2411 of auditing all edge splitters to care for the barrier (which
2412 normally isn't there in a cleaned CFG), fix it here. */
2413 if (BARRIER_P (get_last_insn ()))
2415 rtx_insn *insn;
2416 remove_edge (e);
2417 /* Now, we have a single successor block, if we have insns to
2418 insert on the remaining edge we potentially will insert
2419 it at the end of this block (if the dest block isn't feasible)
2420 in order to avoid splitting the edge. This insertion will take
2421 place in front of the last jump. But we might have emitted
2422 multiple jumps (conditional and one unconditional) to the
2423 same destination. Inserting in front of the last one then
2424 is a problem. See PR 40021. We fix this by deleting all
2425 jumps except the last unconditional one. */
2426 insn = PREV_INSN (get_last_insn ());
2427 /* Make sure we have an unconditional jump. Otherwise we're
2428 confused. */
2429 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2430 for (insn = PREV_INSN (insn); insn != last;)
2432 insn = PREV_INSN (insn);
2433 if (JUMP_P (NEXT_INSN (insn)))
2435 if (!any_condjump_p (NEXT_INSN (insn)))
2437 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2438 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2440 delete_insn (NEXT_INSN (insn));
2446 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2447 Returns a new basic block if we've terminated the current basic
2448 block and created a new one. */
2450 static basic_block
2451 expand_gimple_cond (basic_block bb, gcond *stmt)
2453 basic_block new_bb, dest;
2454 edge true_edge;
2455 edge false_edge;
2456 rtx_insn *last2, *last;
2457 enum tree_code code;
2458 tree op0, op1;
2460 code = gimple_cond_code (stmt);
2461 op0 = gimple_cond_lhs (stmt);
2462 op1 = gimple_cond_rhs (stmt);
2463 /* We're sometimes presented with such code:
2464 D.123_1 = x < y;
2465 if (D.123_1 != 0)
2467 This would expand to two comparisons which then later might
2468 be cleaned up by combine. But some pattern matchers like if-conversion
2469 work better when there's only one compare, so make up for this
2470 here as special exception if TER would have made the same change. */
2471 if (SA.values
2472 && TREE_CODE (op0) == SSA_NAME
2473 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2474 && TREE_CODE (op1) == INTEGER_CST
2475 && ((gimple_cond_code (stmt) == NE_EXPR
2476 && integer_zerop (op1))
2477 || (gimple_cond_code (stmt) == EQ_EXPR
2478 && integer_onep (op1)))
2479 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2481 gimple *second = SSA_NAME_DEF_STMT (op0);
2482 if (gimple_code (second) == GIMPLE_ASSIGN)
2484 enum tree_code code2 = gimple_assign_rhs_code (second);
2485 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2487 code = code2;
2488 op0 = gimple_assign_rhs1 (second);
2489 op1 = gimple_assign_rhs2 (second);
2491 /* If jumps are cheap and the target does not support conditional
2492 compare, turn some more codes into jumpy sequences. */
2493 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2494 && targetm.gen_ccmp_first == NULL)
2496 if ((code2 == BIT_AND_EXPR
2497 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2498 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2499 || code2 == TRUTH_AND_EXPR)
2501 code = TRUTH_ANDIF_EXPR;
2502 op0 = gimple_assign_rhs1 (second);
2503 op1 = gimple_assign_rhs2 (second);
2505 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2507 code = TRUTH_ORIF_EXPR;
2508 op0 = gimple_assign_rhs1 (second);
2509 op1 = gimple_assign_rhs2 (second);
2515 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2516 into (x - C2) * C3 < C4. */
2517 if ((code == EQ_EXPR || code == NE_EXPR)
2518 && TREE_CODE (op0) == SSA_NAME
2519 && TREE_CODE (op1) == INTEGER_CST)
2520 code = maybe_optimize_mod_cmp (code, &op0, &op1);
2522 last2 = last = get_last_insn ();
2524 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2525 set_curr_insn_location (gimple_location (stmt));
2527 /* These flags have no purpose in RTL land. */
2528 true_edge->flags &= ~EDGE_TRUE_VALUE;
2529 false_edge->flags &= ~EDGE_FALSE_VALUE;
2531 /* We can either have a pure conditional jump with one fallthru edge or
2532 two-way jump that needs to be decomposed into two basic blocks. */
2533 if (false_edge->dest == bb->next_bb)
2535 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2536 true_edge->probability);
2537 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2538 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2539 set_curr_insn_location (true_edge->goto_locus);
2540 false_edge->flags |= EDGE_FALLTHRU;
2541 maybe_cleanup_end_of_block (false_edge, last);
2542 return NULL;
2544 if (true_edge->dest == bb->next_bb)
2546 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2547 false_edge->probability);
2548 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2549 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2550 set_curr_insn_location (false_edge->goto_locus);
2551 true_edge->flags |= EDGE_FALLTHRU;
2552 maybe_cleanup_end_of_block (true_edge, last);
2553 return NULL;
2556 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2557 true_edge->probability);
2558 last = get_last_insn ();
2559 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2560 set_curr_insn_location (false_edge->goto_locus);
2561 emit_jump (label_rtx_for_bb (false_edge->dest));
2563 BB_END (bb) = last;
2564 if (BARRIER_P (BB_END (bb)))
2565 BB_END (bb) = PREV_INSN (BB_END (bb));
2566 update_bb_for_insn (bb);
2568 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2569 dest = false_edge->dest;
2570 redirect_edge_succ (false_edge, new_bb);
2571 false_edge->flags |= EDGE_FALLTHRU;
2572 new_bb->count = false_edge->count ();
2573 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2574 add_bb_to_loop (new_bb, loop);
2575 if (loop->latch == bb
2576 && loop->header == dest)
2577 loop->latch = new_bb;
2578 make_single_succ_edge (new_bb, dest, 0);
2579 if (BARRIER_P (BB_END (new_bb)))
2580 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2581 update_bb_for_insn (new_bb);
2583 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2585 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2587 set_curr_insn_location (true_edge->goto_locus);
2588 true_edge->goto_locus = curr_insn_location ();
2591 return new_bb;
2594 /* Mark all calls that can have a transaction restart. */
2596 static void
2597 mark_transaction_restart_calls (gimple *stmt)
2599 struct tm_restart_node dummy;
2600 tm_restart_node **slot;
2602 if (!cfun->gimple_df->tm_restart)
2603 return;
2605 dummy.stmt = stmt;
2606 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2607 if (slot)
2609 struct tm_restart_node *n = *slot;
2610 tree list = n->label_or_list;
2611 rtx_insn *insn;
2613 for (insn = next_real_insn (get_last_insn ());
2614 !CALL_P (insn);
2615 insn = next_real_insn (insn))
2616 continue;
2618 if (TREE_CODE (list) == LABEL_DECL)
2619 add_reg_note (insn, REG_TM, label_rtx (list));
2620 else
2621 for (; list ; list = TREE_CHAIN (list))
2622 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2626 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2627 statement STMT. */
2629 static void
2630 expand_call_stmt (gcall *stmt)
2632 tree exp, decl, lhs;
2633 bool builtin_p;
2634 size_t i;
2636 if (gimple_call_internal_p (stmt))
2638 expand_internal_call (stmt);
2639 return;
2642 /* If this is a call to a built-in function and it has no effect other
2643 than setting the lhs, try to implement it using an internal function
2644 instead. */
2645 decl = gimple_call_fndecl (stmt);
2646 if (gimple_call_lhs (stmt)
2647 && !gimple_has_side_effects (stmt)
2648 && (optimize || (decl && called_as_built_in (decl))))
2650 internal_fn ifn = replacement_internal_fn (stmt);
2651 if (ifn != IFN_LAST)
2653 expand_internal_call (ifn, stmt);
2654 return;
2658 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2660 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2661 builtin_p = decl && fndecl_built_in_p (decl);
2663 /* If this is not a builtin function, the function type through which the
2664 call is made may be different from the type of the function. */
2665 if (!builtin_p)
2666 CALL_EXPR_FN (exp)
2667 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2668 CALL_EXPR_FN (exp));
2670 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2671 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2673 for (i = 0; i < gimple_call_num_args (stmt); i++)
2675 tree arg = gimple_call_arg (stmt, i);
2676 gimple *def;
2677 /* TER addresses into arguments of builtin functions so we have a
2678 chance to infer more correct alignment information. See PR39954. */
2679 if (builtin_p
2680 && TREE_CODE (arg) == SSA_NAME
2681 && (def = get_gimple_for_ssa_name (arg))
2682 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2683 arg = gimple_assign_rhs1 (def);
2684 CALL_EXPR_ARG (exp, i) = arg;
2687 if (gimple_has_side_effects (stmt))
2688 TREE_SIDE_EFFECTS (exp) = 1;
2690 if (gimple_call_nothrow_p (stmt))
2691 TREE_NOTHROW (exp) = 1;
2693 if (gimple_no_warning_p (stmt))
2694 TREE_NO_WARNING (exp) = 1;
2696 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2697 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2698 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2699 if (decl
2700 && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
2701 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2702 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2703 else
2704 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2705 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2706 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2707 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2709 /* Ensure RTL is created for debug args. */
2710 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2712 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2713 unsigned int ix;
2714 tree dtemp;
2716 if (debug_args)
2717 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2719 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2720 expand_debug_expr (dtemp);
2724 rtx_insn *before_call = get_last_insn ();
2725 lhs = gimple_call_lhs (stmt);
2726 if (lhs)
2727 expand_assignment (lhs, exp, false);
2728 else
2729 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2731 /* If the gimple call is an indirect call and has 'nocf_check'
2732 attribute find a generated CALL insn to mark it as no
2733 control-flow verification is needed. */
2734 if (gimple_call_nocf_check_p (stmt)
2735 && !gimple_call_fndecl (stmt))
2737 rtx_insn *last = get_last_insn ();
2738 while (!CALL_P (last)
2739 && last != before_call)
2740 last = PREV_INSN (last);
2742 if (last != before_call)
2743 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2746 mark_transaction_restart_calls (stmt);
2750 /* Generate RTL for an asm statement (explicit assembler code).
2751 STRING is a STRING_CST node containing the assembler code text,
2752 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2753 insn is volatile; don't optimize it. */
2755 static void
2756 expand_asm_loc (tree string, int vol, location_t locus)
2758 rtx body;
2760 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2761 ggc_strdup (TREE_STRING_POINTER (string)),
2762 locus);
2764 MEM_VOLATILE_P (body) = vol;
2766 /* Non-empty basic ASM implicitly clobbers memory. */
2767 if (TREE_STRING_LENGTH (string) != 0)
2769 rtx asm_op, clob;
2770 unsigned i, nclobbers;
2771 auto_vec<rtx> input_rvec, output_rvec;
2772 auto_vec<const char *> constraints;
2773 auto_vec<rtx> clobber_rvec;
2774 HARD_REG_SET clobbered_regs;
2775 CLEAR_HARD_REG_SET (clobbered_regs);
2777 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2778 clobber_rvec.safe_push (clob);
2780 if (targetm.md_asm_adjust)
2781 targetm.md_asm_adjust (output_rvec, input_rvec,
2782 constraints, clobber_rvec,
2783 clobbered_regs);
2785 asm_op = body;
2786 nclobbers = clobber_rvec.length ();
2787 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2789 XVECEXP (body, 0, 0) = asm_op;
2790 for (i = 0; i < nclobbers; i++)
2791 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2794 emit_insn (body);
2797 /* Return the number of times character C occurs in string S. */
2798 static int
2799 n_occurrences (int c, const char *s)
2801 int n = 0;
2802 while (*s)
2803 n += (*s++ == c);
2804 return n;
2807 /* A subroutine of expand_asm_operands. Check that all operands have
2808 the same number of alternatives. Return true if so. */
2810 static bool
2811 check_operand_nalternatives (const vec<const char *> &constraints)
2813 unsigned len = constraints.length();
2814 if (len > 0)
2816 int nalternatives = n_occurrences (',', constraints[0]);
2818 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2820 error ("too many alternatives in %<asm%>");
2821 return false;
2824 for (unsigned i = 1; i < len; ++i)
2825 if (n_occurrences (',', constraints[i]) != nalternatives)
2827 error ("operand constraints for %<asm%> differ "
2828 "in number of alternatives");
2829 return false;
2832 return true;
2835 /* Check for overlap between registers marked in CLOBBERED_REGS and
2836 anything inappropriate in T. Emit error and return the register
2837 variable definition for error, NULL_TREE for ok. */
2839 static bool
2840 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2842 /* Conflicts between asm-declared register variables and the clobber
2843 list are not allowed. */
2844 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2846 if (overlap)
2848 error ("%<asm%> specifier for variable %qE conflicts with "
2849 "%<asm%> clobber list",
2850 DECL_NAME (overlap));
2852 /* Reset registerness to stop multiple errors emitted for a single
2853 variable. */
2854 DECL_REGISTER (overlap) = 0;
2855 return true;
2858 return false;
2861 /* Check that the given REGNO spanning NREGS is a valid
2862 asm clobber operand. Some HW registers cannot be
2863 saved/restored, hence they should not be clobbered by
2864 asm statements. */
2865 static bool
2866 asm_clobber_reg_is_valid (int regno, int nregs, const char *regname)
2868 bool is_valid = true;
2869 HARD_REG_SET regset;
2871 CLEAR_HARD_REG_SET (regset);
2873 add_range_to_hard_reg_set (&regset, regno, nregs);
2875 /* Clobbering the PIC register is an error. */
2876 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2877 && overlaps_hard_reg_set_p (regset, Pmode, PIC_OFFSET_TABLE_REGNUM))
2879 /* ??? Diagnose during gimplification? */
2880 error ("PIC register clobbered by %qs in %<asm%>", regname);
2881 is_valid = false;
2883 else if (!in_hard_reg_set_p
2884 (accessible_reg_set, reg_raw_mode[regno], regno))
2886 /* ??? Diagnose during gimplification? */
2887 error ("the register %qs cannot be clobbered in %<asm%>"
2888 " for the current target", regname);
2889 is_valid = false;
2892 /* Clobbering the stack pointer register is deprecated. GCC expects
2893 the value of the stack pointer after an asm statement to be the same
2894 as it was before, so no asm can validly clobber the stack pointer in
2895 the usual sense. Adding the stack pointer to the clobber list has
2896 traditionally had some undocumented and somewhat obscure side-effects. */
2897 if (overlaps_hard_reg_set_p (regset, Pmode, STACK_POINTER_REGNUM)
2898 && warning (OPT_Wdeprecated, "listing the stack pointer register"
2899 " %qs in a clobber list is deprecated", regname))
2900 inform (input_location, "the value of the stack pointer after an %<asm%>"
2901 " statement must be the same as it was before the statement");
2903 return is_valid;
2906 /* Generate RTL for an asm statement with arguments.
2907 STRING is the instruction template.
2908 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2909 Each output or input has an expression in the TREE_VALUE and
2910 a tree list in TREE_PURPOSE which in turn contains a constraint
2911 name in TREE_VALUE (or NULL_TREE) and a constraint string
2912 in TREE_PURPOSE.
2913 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2914 that is clobbered by this insn.
2916 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2917 should be the fallthru basic block of the asm goto.
2919 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2920 Some elements of OUTPUTS may be replaced with trees representing temporary
2921 values. The caller should copy those temporary values to the originally
2922 specified lvalues.
2924 VOL nonzero means the insn is volatile; don't optimize it. */
2926 static void
2927 expand_asm_stmt (gasm *stmt)
2929 class save_input_location
2931 location_t old;
2933 public:
2934 explicit save_input_location(location_t where)
2936 old = input_location;
2937 input_location = where;
2940 ~save_input_location()
2942 input_location = old;
2946 location_t locus = gimple_location (stmt);
2948 if (gimple_asm_input_p (stmt))
2950 const char *s = gimple_asm_string (stmt);
2951 tree string = build_string (strlen (s), s);
2952 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2953 return;
2956 /* There are some legacy diagnostics in here, and also avoids a
2957 sixth parameger to targetm.md_asm_adjust. */
2958 save_input_location s_i_l(locus);
2960 unsigned noutputs = gimple_asm_noutputs (stmt);
2961 unsigned ninputs = gimple_asm_ninputs (stmt);
2962 unsigned nlabels = gimple_asm_nlabels (stmt);
2963 unsigned i;
2965 /* ??? Diagnose during gimplification? */
2966 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2968 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2969 return;
2972 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2973 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2974 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2976 /* Copy the gimple vectors into new vectors that we can manipulate. */
2978 output_tvec.safe_grow (noutputs);
2979 input_tvec.safe_grow (ninputs);
2980 constraints.safe_grow (noutputs + ninputs);
2982 for (i = 0; i < noutputs; ++i)
2984 tree t = gimple_asm_output_op (stmt, i);
2985 output_tvec[i] = TREE_VALUE (t);
2986 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2988 for (i = 0; i < ninputs; i++)
2990 tree t = gimple_asm_input_op (stmt, i);
2991 input_tvec[i] = TREE_VALUE (t);
2992 constraints[i + noutputs]
2993 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2996 /* ??? Diagnose during gimplification? */
2997 if (! check_operand_nalternatives (constraints))
2998 return;
3000 /* Count the number of meaningful clobbered registers, ignoring what
3001 we would ignore later. */
3002 auto_vec<rtx> clobber_rvec;
3003 HARD_REG_SET clobbered_regs;
3004 CLEAR_HARD_REG_SET (clobbered_regs);
3006 if (unsigned n = gimple_asm_nclobbers (stmt))
3008 clobber_rvec.reserve (n);
3009 for (i = 0; i < n; i++)
3011 tree t = gimple_asm_clobber_op (stmt, i);
3012 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
3013 int nregs, j;
3015 j = decode_reg_name_and_count (regname, &nregs);
3016 if (j < 0)
3018 if (j == -2)
3020 /* ??? Diagnose during gimplification? */
3021 error ("unknown register name %qs in %<asm%>", regname);
3023 else if (j == -4)
3025 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3026 clobber_rvec.safe_push (x);
3028 else
3030 /* Otherwise we should have -1 == empty string
3031 or -3 == cc, which is not a register. */
3032 gcc_assert (j == -1 || j == -3);
3035 else
3036 for (int reg = j; reg < j + nregs; reg++)
3038 if (!asm_clobber_reg_is_valid (reg, nregs, regname))
3039 return;
3041 SET_HARD_REG_BIT (clobbered_regs, reg);
3042 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
3043 clobber_rvec.safe_push (x);
3048 /* First pass over inputs and outputs checks validity and sets
3049 mark_addressable if needed. */
3050 /* ??? Diagnose during gimplification? */
3052 for (i = 0; i < noutputs; ++i)
3054 tree val = output_tvec[i];
3055 tree type = TREE_TYPE (val);
3056 const char *constraint;
3057 bool is_inout;
3058 bool allows_reg;
3059 bool allows_mem;
3061 /* Try to parse the output constraint. If that fails, there's
3062 no point in going further. */
3063 constraint = constraints[i];
3064 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3065 &allows_mem, &allows_reg, &is_inout))
3066 return;
3068 /* If the output is a hard register, verify it doesn't conflict with
3069 any other operand's possible hard register use. */
3070 if (DECL_P (val)
3071 && REG_P (DECL_RTL (val))
3072 && HARD_REGISTER_P (DECL_RTL (val)))
3074 unsigned j, output_hregno = REGNO (DECL_RTL (val));
3075 bool early_clobber_p = strchr (constraints[i], '&') != NULL;
3076 unsigned long match;
3078 /* Verify the other outputs do not use the same hard register. */
3079 for (j = i + 1; j < noutputs; ++j)
3080 if (DECL_P (output_tvec[j])
3081 && REG_P (DECL_RTL (output_tvec[j]))
3082 && HARD_REGISTER_P (DECL_RTL (output_tvec[j]))
3083 && output_hregno == REGNO (DECL_RTL (output_tvec[j])))
3084 error ("invalid hard register usage between output operands");
3086 /* Verify matching constraint operands use the same hard register
3087 and that the non-matching constraint operands do not use the same
3088 hard register if the output is an early clobber operand. */
3089 for (j = 0; j < ninputs; ++j)
3090 if (DECL_P (input_tvec[j])
3091 && REG_P (DECL_RTL (input_tvec[j]))
3092 && HARD_REGISTER_P (DECL_RTL (input_tvec[j])))
3094 unsigned input_hregno = REGNO (DECL_RTL (input_tvec[j]));
3095 switch (*constraints[j + noutputs])
3097 case '0': case '1': case '2': case '3': case '4':
3098 case '5': case '6': case '7': case '8': case '9':
3099 match = strtoul (constraints[j + noutputs], NULL, 10);
3100 break;
3101 default:
3102 match = ULONG_MAX;
3103 break;
3105 if (i == match
3106 && output_hregno != input_hregno)
3107 error ("invalid hard register usage between output operand "
3108 "and matching constraint operand");
3109 else if (early_clobber_p
3110 && i != match
3111 && output_hregno == input_hregno)
3112 error ("invalid hard register usage between earlyclobber "
3113 "operand and input operand");
3117 if (! allows_reg
3118 && (allows_mem
3119 || is_inout
3120 || (DECL_P (val)
3121 && REG_P (DECL_RTL (val))
3122 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3123 mark_addressable (val);
3126 for (i = 0; i < ninputs; ++i)
3128 bool allows_reg, allows_mem;
3129 const char *constraint;
3131 constraint = constraints[i + noutputs];
3132 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3133 constraints.address (),
3134 &allows_mem, &allows_reg))
3135 return;
3137 if (! allows_reg && allows_mem)
3138 mark_addressable (input_tvec[i]);
3141 /* Second pass evaluates arguments. */
3143 /* Make sure stack is consistent for asm goto. */
3144 if (nlabels > 0)
3145 do_pending_stack_adjust ();
3146 int old_generating_concat_p = generating_concat_p;
3148 /* Vector of RTX's of evaluated output operands. */
3149 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3150 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3151 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3153 output_rvec.safe_grow (noutputs);
3155 for (i = 0; i < noutputs; ++i)
3157 tree val = output_tvec[i];
3158 tree type = TREE_TYPE (val);
3159 bool is_inout, allows_reg, allows_mem, ok;
3160 rtx op;
3162 ok = parse_output_constraint (&constraints[i], i, ninputs,
3163 noutputs, &allows_mem, &allows_reg,
3164 &is_inout);
3165 gcc_assert (ok);
3167 /* If an output operand is not a decl or indirect ref and our constraint
3168 allows a register, make a temporary to act as an intermediate.
3169 Make the asm insn write into that, then we will copy it to
3170 the real output operand. Likewise for promoted variables. */
3172 generating_concat_p = 0;
3174 if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3175 || (DECL_P (val)
3176 && (allows_mem || REG_P (DECL_RTL (val)))
3177 && ! (REG_P (DECL_RTL (val))
3178 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3179 || ! allows_reg
3180 || is_inout
3181 || TREE_ADDRESSABLE (type))
3183 op = expand_expr (val, NULL_RTX, VOIDmode,
3184 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3185 if (MEM_P (op))
3186 op = validize_mem (op);
3188 if (! allows_reg && !MEM_P (op))
3189 error ("output number %d not directly addressable", i);
3190 if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3191 || GET_CODE (op) == CONCAT)
3193 rtx old_op = op;
3194 op = gen_reg_rtx (GET_MODE (op));
3196 generating_concat_p = old_generating_concat_p;
3198 if (is_inout)
3199 emit_move_insn (op, old_op);
3201 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3202 emit_move_insn (old_op, op);
3203 after_rtl_seq = get_insns ();
3204 after_rtl_end = get_last_insn ();
3205 end_sequence ();
3208 else
3210 op = assign_temp (type, 0, 1);
3211 op = validize_mem (op);
3212 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3213 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3215 generating_concat_p = old_generating_concat_p;
3217 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3218 expand_assignment (val, make_tree (type, op), false);
3219 after_rtl_seq = get_insns ();
3220 after_rtl_end = get_last_insn ();
3221 end_sequence ();
3223 output_rvec[i] = op;
3225 if (is_inout)
3226 inout_opnum.safe_push (i);
3229 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3230 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3232 input_rvec.safe_grow (ninputs);
3233 input_mode.safe_grow (ninputs);
3235 generating_concat_p = 0;
3237 for (i = 0; i < ninputs; ++i)
3239 tree val = input_tvec[i];
3240 tree type = TREE_TYPE (val);
3241 bool allows_reg, allows_mem, ok;
3242 const char *constraint;
3243 rtx op;
3245 constraint = constraints[i + noutputs];
3246 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3247 constraints.address (),
3248 &allows_mem, &allows_reg);
3249 gcc_assert (ok);
3251 /* EXPAND_INITIALIZER will not generate code for valid initializer
3252 constants, but will still generate code for other types of operand.
3253 This is the behavior we want for constant constraints. */
3254 op = expand_expr (val, NULL_RTX, VOIDmode,
3255 allows_reg ? EXPAND_NORMAL
3256 : allows_mem ? EXPAND_MEMORY
3257 : EXPAND_INITIALIZER);
3259 /* Never pass a CONCAT to an ASM. */
3260 if (GET_CODE (op) == CONCAT)
3261 op = force_reg (GET_MODE (op), op);
3262 else if (MEM_P (op))
3263 op = validize_mem (op);
3265 if (asm_operand_ok (op, constraint, NULL) <= 0)
3267 if (allows_reg && TYPE_MODE (type) != BLKmode)
3268 op = force_reg (TYPE_MODE (type), op);
3269 else if (!allows_mem)
3270 warning (0, "%<asm%> operand %d probably does not match "
3271 "constraints",
3272 i + noutputs);
3273 else if (MEM_P (op))
3275 /* We won't recognize either volatile memory or memory
3276 with a queued address as available a memory_operand
3277 at this point. Ignore it: clearly this *is* a memory. */
3279 else
3280 gcc_unreachable ();
3282 input_rvec[i] = op;
3283 input_mode[i] = TYPE_MODE (type);
3286 /* For in-out operands, copy output rtx to input rtx. */
3287 unsigned ninout = inout_opnum.length();
3288 for (i = 0; i < ninout; i++)
3290 int j = inout_opnum[i];
3291 rtx o = output_rvec[j];
3293 input_rvec.safe_push (o);
3294 input_mode.safe_push (GET_MODE (o));
3296 char buffer[16];
3297 sprintf (buffer, "%d", j);
3298 constraints.safe_push (ggc_strdup (buffer));
3300 ninputs += ninout;
3302 /* Sometimes we wish to automatically clobber registers across an asm.
3303 Case in point is when the i386 backend moved from cc0 to a hard reg --
3304 maintaining source-level compatibility means automatically clobbering
3305 the flags register. */
3306 rtx_insn *after_md_seq = NULL;
3307 if (targetm.md_asm_adjust)
3308 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3309 constraints, clobber_rvec,
3310 clobbered_regs);
3312 /* Do not allow the hook to change the output and input count,
3313 lest it mess up the operand numbering. */
3314 gcc_assert (output_rvec.length() == noutputs);
3315 gcc_assert (input_rvec.length() == ninputs);
3316 gcc_assert (constraints.length() == noutputs + ninputs);
3318 /* But it certainly can adjust the clobbers. */
3319 unsigned nclobbers = clobber_rvec.length ();
3321 /* Third pass checks for easy conflicts. */
3322 /* ??? Why are we doing this on trees instead of rtx. */
3324 bool clobber_conflict_found = 0;
3325 for (i = 0; i < noutputs; ++i)
3326 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3327 clobber_conflict_found = 1;
3328 for (i = 0; i < ninputs - ninout; ++i)
3329 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3330 clobber_conflict_found = 1;
3332 /* Make vectors for the expression-rtx, constraint strings,
3333 and named operands. */
3335 rtvec argvec = rtvec_alloc (ninputs);
3336 rtvec constraintvec = rtvec_alloc (ninputs);
3337 rtvec labelvec = rtvec_alloc (nlabels);
3339 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3340 : GET_MODE (output_rvec[0])),
3341 ggc_strdup (gimple_asm_string (stmt)),
3342 "", 0, argvec, constraintvec,
3343 labelvec, locus);
3344 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3346 for (i = 0; i < ninputs; ++i)
3348 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3349 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3350 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3351 constraints[i + noutputs],
3352 locus);
3355 /* Copy labels to the vector. */
3356 rtx_code_label *fallthru_label = NULL;
3357 if (nlabels > 0)
3359 basic_block fallthru_bb = NULL;
3360 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3361 if (fallthru)
3362 fallthru_bb = fallthru->dest;
3364 for (i = 0; i < nlabels; ++i)
3366 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3367 rtx_insn *r;
3368 /* If asm goto has any labels in the fallthru basic block, use
3369 a label that we emit immediately after the asm goto. Expansion
3370 may insert further instructions into the same basic block after
3371 asm goto and if we don't do this, insertion of instructions on
3372 the fallthru edge might misbehave. See PR58670. */
3373 if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
3375 if (fallthru_label == NULL_RTX)
3376 fallthru_label = gen_label_rtx ();
3377 r = fallthru_label;
3379 else
3380 r = label_rtx (label);
3381 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3385 /* Now, for each output, construct an rtx
3386 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3387 ARGVEC CONSTRAINTS OPNAMES))
3388 If there is more than one, put them inside a PARALLEL. */
3390 if (nlabels > 0 && nclobbers == 0)
3392 gcc_assert (noutputs == 0);
3393 emit_jump_insn (body);
3395 else if (noutputs == 0 && nclobbers == 0)
3397 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3398 emit_insn (body);
3400 else if (noutputs == 1 && nclobbers == 0)
3402 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3403 emit_insn (gen_rtx_SET (output_rvec[0], body));
3405 else
3407 rtx obody = body;
3408 int num = noutputs;
3410 if (num == 0)
3411 num = 1;
3413 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3415 /* For each output operand, store a SET. */
3416 for (i = 0; i < noutputs; ++i)
3418 rtx src, o = output_rvec[i];
3419 if (i == 0)
3421 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3422 src = obody;
3424 else
3426 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3427 ASM_OPERANDS_TEMPLATE (obody),
3428 constraints[i], i, argvec,
3429 constraintvec, labelvec, locus);
3430 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3432 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3435 /* If there are no outputs (but there are some clobbers)
3436 store the bare ASM_OPERANDS into the PARALLEL. */
3437 if (i == 0)
3438 XVECEXP (body, 0, i++) = obody;
3440 /* Store (clobber REG) for each clobbered register specified. */
3441 for (unsigned j = 0; j < nclobbers; ++j)
3443 rtx clobbered_reg = clobber_rvec[j];
3445 /* Do sanity check for overlap between clobbers and respectively
3446 input and outputs that hasn't been handled. Such overlap
3447 should have been detected and reported above. */
3448 if (!clobber_conflict_found && REG_P (clobbered_reg))
3450 /* We test the old body (obody) contents to avoid
3451 tripping over the under-construction body. */
3452 for (unsigned k = 0; k < noutputs; ++k)
3453 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3454 internal_error ("%<asm%> clobber conflict with "
3455 "output operand");
3457 for (unsigned k = 0; k < ninputs - ninout; ++k)
3458 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3459 internal_error ("%<asm%> clobber conflict with "
3460 "input operand");
3463 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3466 if (nlabels > 0)
3467 emit_jump_insn (body);
3468 else
3469 emit_insn (body);
3472 generating_concat_p = old_generating_concat_p;
3474 if (fallthru_label)
3475 emit_label (fallthru_label);
3477 if (after_md_seq)
3478 emit_insn (after_md_seq);
3479 if (after_rtl_seq)
3480 emit_insn (after_rtl_seq);
3482 free_temp_slots ();
3483 crtl->has_asm_statement = 1;
3486 /* Emit code to jump to the address
3487 specified by the pointer expression EXP. */
3489 static void
3490 expand_computed_goto (tree exp)
3492 rtx x = expand_normal (exp);
3494 do_pending_stack_adjust ();
3495 emit_indirect_jump (x);
3498 /* Generate RTL code for a `goto' statement with target label LABEL.
3499 LABEL should be a LABEL_DECL tree node that was or will later be
3500 defined with `expand_label'. */
3502 static void
3503 expand_goto (tree label)
3505 if (flag_checking)
3507 /* Check for a nonlocal goto to a containing function. Should have
3508 gotten translated to __builtin_nonlocal_goto. */
3509 tree context = decl_function_context (label);
3510 gcc_assert (!context || context == current_function_decl);
3513 emit_jump (jump_target_rtx (label));
3516 /* Output a return with no value. */
3518 static void
3519 expand_null_return_1 (void)
3521 clear_pending_stack_adjust ();
3522 do_pending_stack_adjust ();
3523 emit_jump (return_label);
3526 /* Generate RTL to return from the current function, with no value.
3527 (That is, we do not do anything about returning any value.) */
3529 void
3530 expand_null_return (void)
3532 /* If this function was declared to return a value, but we
3533 didn't, clobber the return registers so that they are not
3534 propagated live to the rest of the function. */
3535 clobber_return_register ();
3537 expand_null_return_1 ();
3540 /* Generate RTL to return from the current function, with value VAL. */
3542 static void
3543 expand_value_return (rtx val)
3545 /* Copy the value to the return location unless it's already there. */
3547 tree decl = DECL_RESULT (current_function_decl);
3548 rtx return_reg = DECL_RTL (decl);
3549 if (return_reg != val)
3551 tree funtype = TREE_TYPE (current_function_decl);
3552 tree type = TREE_TYPE (decl);
3553 int unsignedp = TYPE_UNSIGNED (type);
3554 machine_mode old_mode = DECL_MODE (decl);
3555 machine_mode mode;
3556 if (DECL_BY_REFERENCE (decl))
3557 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3558 else
3559 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3561 if (mode != old_mode)
3562 val = convert_modes (mode, old_mode, val, unsignedp);
3564 if (GET_CODE (return_reg) == PARALLEL)
3565 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3566 else
3567 emit_move_insn (return_reg, val);
3570 expand_null_return_1 ();
3573 /* Generate RTL to evaluate the expression RETVAL and return it
3574 from the current function. */
3576 static void
3577 expand_return (tree retval)
3579 rtx result_rtl;
3580 rtx val = 0;
3581 tree retval_rhs;
3583 /* If function wants no value, give it none. */
3584 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3586 expand_normal (retval);
3587 expand_null_return ();
3588 return;
3591 if (retval == error_mark_node)
3593 /* Treat this like a return of no value from a function that
3594 returns a value. */
3595 expand_null_return ();
3596 return;
3598 else if ((TREE_CODE (retval) == MODIFY_EXPR
3599 || TREE_CODE (retval) == INIT_EXPR)
3600 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3601 retval_rhs = TREE_OPERAND (retval, 1);
3602 else
3603 retval_rhs = retval;
3605 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3607 /* If we are returning the RESULT_DECL, then the value has already
3608 been stored into it, so we don't have to do anything special. */
3609 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3610 expand_value_return (result_rtl);
3612 /* If the result is an aggregate that is being returned in one (or more)
3613 registers, load the registers here. */
3615 else if (retval_rhs != 0
3616 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3617 && REG_P (result_rtl))
3619 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3620 if (val)
3622 /* Use the mode of the result value on the return register. */
3623 PUT_MODE (result_rtl, GET_MODE (val));
3624 expand_value_return (val);
3626 else
3627 expand_null_return ();
3629 else if (retval_rhs != 0
3630 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3631 && (REG_P (result_rtl)
3632 || (GET_CODE (result_rtl) == PARALLEL)))
3634 /* Compute the return value into a temporary (usually a pseudo reg). */
3636 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3637 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3638 val = force_not_mem (val);
3639 expand_value_return (val);
3641 else
3643 /* No hard reg used; calculate value into hard return reg. */
3644 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3645 expand_value_return (result_rtl);
3649 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3650 register, tell the rtl optimizers that its value is no longer
3651 needed. */
3653 static void
3654 expand_clobber (tree lhs)
3656 if (DECL_P (lhs))
3658 rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3659 if (decl_rtl && REG_P (decl_rtl))
3661 machine_mode decl_mode = GET_MODE (decl_rtl);
3662 if (maybe_gt (GET_MODE_SIZE (decl_mode),
3663 REGMODE_NATURAL_SIZE (decl_mode)))
3664 emit_clobber (decl_rtl);
3669 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3670 STMT that doesn't require special handling for outgoing edges. That
3671 is no tailcalls and no GIMPLE_COND. */
3673 static void
3674 expand_gimple_stmt_1 (gimple *stmt)
3676 tree op0;
3678 set_curr_insn_location (gimple_location (stmt));
3680 switch (gimple_code (stmt))
3682 case GIMPLE_GOTO:
3683 op0 = gimple_goto_dest (stmt);
3684 if (TREE_CODE (op0) == LABEL_DECL)
3685 expand_goto (op0);
3686 else
3687 expand_computed_goto (op0);
3688 break;
3689 case GIMPLE_LABEL:
3690 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3691 break;
3692 case GIMPLE_NOP:
3693 case GIMPLE_PREDICT:
3694 break;
3695 case GIMPLE_SWITCH:
3697 gswitch *swtch = as_a <gswitch *> (stmt);
3698 if (gimple_switch_num_labels (swtch) == 1)
3699 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3700 else
3701 expand_case (swtch);
3703 break;
3704 case GIMPLE_ASM:
3705 expand_asm_stmt (as_a <gasm *> (stmt));
3706 break;
3707 case GIMPLE_CALL:
3708 expand_call_stmt (as_a <gcall *> (stmt));
3709 break;
3711 case GIMPLE_RETURN:
3713 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3715 if (op0 && op0 != error_mark_node)
3717 tree result = DECL_RESULT (current_function_decl);
3719 /* If we are not returning the current function's RESULT_DECL,
3720 build an assignment to it. */
3721 if (op0 != result)
3723 /* I believe that a function's RESULT_DECL is unique. */
3724 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3726 /* ??? We'd like to use simply expand_assignment here,
3727 but this fails if the value is of BLKmode but the return
3728 decl is a register. expand_return has special handling
3729 for this combination, which eventually should move
3730 to common code. See comments there. Until then, let's
3731 build a modify expression :-/ */
3732 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3733 result, op0);
3737 if (!op0)
3738 expand_null_return ();
3739 else
3740 expand_return (op0);
3742 break;
3744 case GIMPLE_ASSIGN:
3746 gassign *assign_stmt = as_a <gassign *> (stmt);
3747 tree lhs = gimple_assign_lhs (assign_stmt);
3749 /* Tree expand used to fiddle with |= and &= of two bitfield
3750 COMPONENT_REFs here. This can't happen with gimple, the LHS
3751 of binary assigns must be a gimple reg. */
3753 if (TREE_CODE (lhs) != SSA_NAME
3754 || get_gimple_rhs_class (gimple_expr_code (stmt))
3755 == GIMPLE_SINGLE_RHS)
3757 tree rhs = gimple_assign_rhs1 (assign_stmt);
3758 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3759 == GIMPLE_SINGLE_RHS);
3760 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3761 /* Do not put locations on possibly shared trees. */
3762 && !is_gimple_min_invariant (rhs))
3763 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3764 if (TREE_CLOBBER_P (rhs))
3765 /* This is a clobber to mark the going out of scope for
3766 this LHS. */
3767 expand_clobber (lhs);
3768 else
3769 expand_assignment (lhs, rhs,
3770 gimple_assign_nontemporal_move_p (
3771 assign_stmt));
3773 else
3775 rtx target, temp;
3776 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3777 struct separate_ops ops;
3778 bool promoted = false;
3780 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3781 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3782 promoted = true;
3784 ops.code = gimple_assign_rhs_code (assign_stmt);
3785 ops.type = TREE_TYPE (lhs);
3786 switch (get_gimple_rhs_class (ops.code))
3788 case GIMPLE_TERNARY_RHS:
3789 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3790 /* Fallthru */
3791 case GIMPLE_BINARY_RHS:
3792 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3793 /* Fallthru */
3794 case GIMPLE_UNARY_RHS:
3795 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3796 break;
3797 default:
3798 gcc_unreachable ();
3800 ops.location = gimple_location (stmt);
3802 /* If we want to use a nontemporal store, force the value to
3803 register first. If we store into a promoted register,
3804 don't directly expand to target. */
3805 temp = nontemporal || promoted ? NULL_RTX : target;
3806 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3807 EXPAND_NORMAL);
3809 if (temp == target)
3811 else if (promoted)
3813 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3814 /* If TEMP is a VOIDmode constant, use convert_modes to make
3815 sure that we properly convert it. */
3816 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3818 temp = convert_modes (GET_MODE (target),
3819 TYPE_MODE (ops.type),
3820 temp, unsignedp);
3821 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3822 GET_MODE (target), temp, unsignedp);
3825 convert_move (SUBREG_REG (target), temp, unsignedp);
3827 else if (nontemporal && emit_storent_insn (target, temp))
3829 else
3831 temp = force_operand (temp, target);
3832 if (temp != target)
3833 emit_move_insn (target, temp);
3837 break;
3839 default:
3840 gcc_unreachable ();
3844 /* Expand one gimple statement STMT and return the last RTL instruction
3845 before any of the newly generated ones.
3847 In addition to generating the necessary RTL instructions this also
3848 sets REG_EH_REGION notes if necessary and sets the current source
3849 location for diagnostics. */
3851 static rtx_insn *
3852 expand_gimple_stmt (gimple *stmt)
3854 location_t saved_location = input_location;
3855 rtx_insn *last = get_last_insn ();
3856 int lp_nr;
3858 gcc_assert (cfun);
3860 /* We need to save and restore the current source location so that errors
3861 discovered during expansion are emitted with the right location. But
3862 it would be better if the diagnostic routines used the source location
3863 embedded in the tree nodes rather than globals. */
3864 if (gimple_has_location (stmt))
3865 input_location = gimple_location (stmt);
3867 expand_gimple_stmt_1 (stmt);
3869 /* Free any temporaries used to evaluate this statement. */
3870 free_temp_slots ();
3872 input_location = saved_location;
3874 /* Mark all insns that may trap. */
3875 lp_nr = lookup_stmt_eh_lp (stmt);
3876 if (lp_nr)
3878 rtx_insn *insn;
3879 for (insn = next_real_insn (last); insn;
3880 insn = next_real_insn (insn))
3882 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3883 /* If we want exceptions for non-call insns, any
3884 may_trap_p instruction may throw. */
3885 && GET_CODE (PATTERN (insn)) != CLOBBER
3886 && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH
3887 && GET_CODE (PATTERN (insn)) != USE
3888 && insn_could_throw_p (insn))
3889 make_reg_eh_region_note (insn, 0, lp_nr);
3893 return last;
3896 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3897 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3898 generated a tail call (something that might be denied by the ABI
3899 rules governing the call; see calls.c).
3901 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3902 can still reach the rest of BB. The case here is __builtin_sqrt,
3903 where the NaN result goes through the external function (with a
3904 tailcall) and the normal result happens via a sqrt instruction. */
3906 static basic_block
3907 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3909 rtx_insn *last2, *last;
3910 edge e;
3911 edge_iterator ei;
3912 profile_probability probability;
3914 last2 = last = expand_gimple_stmt (stmt);
3916 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3917 if (CALL_P (last) && SIBLING_CALL_P (last))
3918 goto found;
3920 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3922 *can_fallthru = true;
3923 return NULL;
3925 found:
3926 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3927 Any instructions emitted here are about to be deleted. */
3928 do_pending_stack_adjust ();
3930 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3931 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3932 EH or abnormal edges, we shouldn't have created a tail call in
3933 the first place. So it seems to me we should just be removing
3934 all edges here, or redirecting the existing fallthru edge to
3935 the exit block. */
3937 probability = profile_probability::never ();
3939 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3941 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3943 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3944 e->dest->count -= e->count ();
3945 probability += e->probability;
3946 remove_edge (e);
3948 else
3949 ei_next (&ei);
3952 /* This is somewhat ugly: the call_expr expander often emits instructions
3953 after the sibcall (to perform the function return). These confuse the
3954 find_many_sub_basic_blocks code, so we need to get rid of these. */
3955 last = NEXT_INSN (last);
3956 gcc_assert (BARRIER_P (last));
3958 *can_fallthru = false;
3959 while (NEXT_INSN (last))
3961 /* For instance an sqrt builtin expander expands if with
3962 sibcall in the then and label for `else`. */
3963 if (LABEL_P (NEXT_INSN (last)))
3965 *can_fallthru = true;
3966 break;
3968 delete_insn (NEXT_INSN (last));
3971 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3972 | EDGE_SIBCALL);
3973 e->probability = probability;
3974 BB_END (bb) = last;
3975 update_bb_for_insn (bb);
3977 if (NEXT_INSN (last))
3979 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3981 last = BB_END (bb);
3982 if (BARRIER_P (last))
3983 BB_END (bb) = PREV_INSN (last);
3986 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3988 return bb;
3991 /* Return the difference between the floor and the truncated result of
3992 a signed division by OP1 with remainder MOD. */
3993 static rtx
3994 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3996 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3997 return gen_rtx_IF_THEN_ELSE
3998 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3999 gen_rtx_IF_THEN_ELSE
4000 (mode, gen_rtx_LT (BImode,
4001 gen_rtx_DIV (mode, op1, mod),
4002 const0_rtx),
4003 constm1_rtx, const0_rtx),
4004 const0_rtx);
4007 /* Return the difference between the ceil and the truncated result of
4008 a signed division by OP1 with remainder MOD. */
4009 static rtx
4010 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4012 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
4013 return gen_rtx_IF_THEN_ELSE
4014 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4015 gen_rtx_IF_THEN_ELSE
4016 (mode, gen_rtx_GT (BImode,
4017 gen_rtx_DIV (mode, op1, mod),
4018 const0_rtx),
4019 const1_rtx, const0_rtx),
4020 const0_rtx);
4023 /* Return the difference between the ceil and the truncated result of
4024 an unsigned division by OP1 with remainder MOD. */
4025 static rtx
4026 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
4028 /* (mod != 0 ? 1 : 0) */
4029 return gen_rtx_IF_THEN_ELSE
4030 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4031 const1_rtx, const0_rtx);
4034 /* Return the difference between the rounded and the truncated result
4035 of a signed division by OP1 with remainder MOD. Halfway cases are
4036 rounded away from zero, rather than to the nearest even number. */
4037 static rtx
4038 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4040 /* (abs (mod) >= abs (op1) - abs (mod)
4041 ? (op1 / mod > 0 ? 1 : -1)
4042 : 0) */
4043 return gen_rtx_IF_THEN_ELSE
4044 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
4045 gen_rtx_MINUS (mode,
4046 gen_rtx_ABS (mode, op1),
4047 gen_rtx_ABS (mode, mod))),
4048 gen_rtx_IF_THEN_ELSE
4049 (mode, gen_rtx_GT (BImode,
4050 gen_rtx_DIV (mode, op1, mod),
4051 const0_rtx),
4052 const1_rtx, constm1_rtx),
4053 const0_rtx);
4056 /* Return the difference between the rounded and the truncated result
4057 of a unsigned division by OP1 with remainder MOD. Halfway cases
4058 are rounded away from zero, rather than to the nearest even
4059 number. */
4060 static rtx
4061 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
4063 /* (mod >= op1 - mod ? 1 : 0) */
4064 return gen_rtx_IF_THEN_ELSE
4065 (mode, gen_rtx_GE (BImode, mod,
4066 gen_rtx_MINUS (mode, op1, mod)),
4067 const1_rtx, const0_rtx);
4070 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4071 any rtl. */
4073 static rtx
4074 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4075 addr_space_t as)
4077 #ifndef POINTERS_EXTEND_UNSIGNED
4078 gcc_assert (mode == Pmode
4079 || mode == targetm.addr_space.address_mode (as));
4080 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4081 #else
4082 rtx temp;
4084 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4086 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4087 return x;
4089 /* X must have some form of address mode already. */
4090 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4091 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4092 x = lowpart_subreg (mode, x, xmode);
4093 else if (POINTERS_EXTEND_UNSIGNED > 0)
4094 x = gen_rtx_ZERO_EXTEND (mode, x);
4095 else if (!POINTERS_EXTEND_UNSIGNED)
4096 x = gen_rtx_SIGN_EXTEND (mode, x);
4097 else
4099 switch (GET_CODE (x))
4101 case SUBREG:
4102 if ((SUBREG_PROMOTED_VAR_P (x)
4103 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4104 || (GET_CODE (SUBREG_REG (x)) == PLUS
4105 && REG_P (XEXP (SUBREG_REG (x), 0))
4106 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4107 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4108 && GET_MODE (SUBREG_REG (x)) == mode)
4109 return SUBREG_REG (x);
4110 break;
4111 case LABEL_REF:
4112 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4113 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4114 return temp;
4115 case SYMBOL_REF:
4116 temp = shallow_copy_rtx (x);
4117 PUT_MODE (temp, mode);
4118 return temp;
4119 case CONST:
4120 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4121 if (temp)
4122 temp = gen_rtx_CONST (mode, temp);
4123 return temp;
4124 case PLUS:
4125 case MINUS:
4126 if (CONST_INT_P (XEXP (x, 1)))
4128 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4129 if (temp)
4130 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4132 break;
4133 default:
4134 break;
4136 /* Don't know how to express ptr_extend as operation in debug info. */
4137 return NULL;
4139 #endif /* POINTERS_EXTEND_UNSIGNED */
4141 return x;
4144 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4145 by avoid_deep_ter_for_debug. */
4147 static hash_map<tree, tree> *deep_ter_debug_map;
4149 /* Split too deep TER chains for debug stmts using debug temporaries. */
4151 static void
4152 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4154 use_operand_p use_p;
4155 ssa_op_iter iter;
4156 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4158 tree use = USE_FROM_PTR (use_p);
4159 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4160 continue;
4161 gimple *g = get_gimple_for_ssa_name (use);
4162 if (g == NULL)
4163 continue;
4164 if (depth > 6 && !stmt_ends_bb_p (g))
4166 if (deep_ter_debug_map == NULL)
4167 deep_ter_debug_map = new hash_map<tree, tree>;
4169 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4170 if (vexpr != NULL)
4171 continue;
4172 vexpr = make_node (DEBUG_EXPR_DECL);
4173 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4174 DECL_ARTIFICIAL (vexpr) = 1;
4175 TREE_TYPE (vexpr) = TREE_TYPE (use);
4176 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4177 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4178 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4179 avoid_deep_ter_for_debug (def_temp, 0);
4181 else
4182 avoid_deep_ter_for_debug (g, depth + 1);
4186 /* Return an RTX equivalent to the value of the parameter DECL. */
4188 static rtx
4189 expand_debug_parm_decl (tree decl)
4191 rtx incoming = DECL_INCOMING_RTL (decl);
4193 if (incoming
4194 && GET_MODE (incoming) != BLKmode
4195 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4196 || (MEM_P (incoming)
4197 && REG_P (XEXP (incoming, 0))
4198 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4200 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4202 #ifdef HAVE_window_save
4203 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4204 If the target machine has an explicit window save instruction, the
4205 actual entry value is the corresponding OUTGOING_REGNO instead. */
4206 if (REG_P (incoming)
4207 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4208 incoming
4209 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4210 OUTGOING_REGNO (REGNO (incoming)), 0);
4211 else if (MEM_P (incoming))
4213 rtx reg = XEXP (incoming, 0);
4214 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4216 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4217 incoming = replace_equiv_address_nv (incoming, reg);
4219 else
4220 incoming = copy_rtx (incoming);
4222 #endif
4224 ENTRY_VALUE_EXP (rtl) = incoming;
4225 return rtl;
4228 if (incoming
4229 && GET_MODE (incoming) != BLKmode
4230 && !TREE_ADDRESSABLE (decl)
4231 && MEM_P (incoming)
4232 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4233 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4234 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4235 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4236 return copy_rtx (incoming);
4238 return NULL_RTX;
4241 /* Return an RTX equivalent to the value of the tree expression EXP. */
4243 static rtx
4244 expand_debug_expr (tree exp)
4246 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4247 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4248 machine_mode inner_mode = VOIDmode;
4249 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4250 addr_space_t as;
4251 scalar_int_mode op0_mode, op1_mode, addr_mode;
4253 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4255 case tcc_expression:
4256 switch (TREE_CODE (exp))
4258 case COND_EXPR:
4259 case DOT_PROD_EXPR:
4260 case SAD_EXPR:
4261 case WIDEN_MULT_PLUS_EXPR:
4262 case WIDEN_MULT_MINUS_EXPR:
4263 goto ternary;
4265 case TRUTH_ANDIF_EXPR:
4266 case TRUTH_ORIF_EXPR:
4267 case TRUTH_AND_EXPR:
4268 case TRUTH_OR_EXPR:
4269 case TRUTH_XOR_EXPR:
4270 goto binary;
4272 case TRUTH_NOT_EXPR:
4273 goto unary;
4275 default:
4276 break;
4278 break;
4280 ternary:
4281 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4282 if (!op2)
4283 return NULL_RTX;
4284 /* Fall through. */
4286 binary:
4287 case tcc_binary:
4288 if (mode == BLKmode)
4289 return NULL_RTX;
4290 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4291 if (!op1)
4292 return NULL_RTX;
4293 switch (TREE_CODE (exp))
4295 case LSHIFT_EXPR:
4296 case RSHIFT_EXPR:
4297 case LROTATE_EXPR:
4298 case RROTATE_EXPR:
4299 case WIDEN_LSHIFT_EXPR:
4300 /* Ensure second operand isn't wider than the first one. */
4301 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4302 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4303 && (GET_MODE_UNIT_PRECISION (mode)
4304 < GET_MODE_PRECISION (op1_mode)))
4305 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4306 break;
4307 default:
4308 break;
4310 /* Fall through. */
4312 unary:
4313 case tcc_unary:
4314 if (mode == BLKmode)
4315 return NULL_RTX;
4316 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4317 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4318 if (!op0)
4319 return NULL_RTX;
4320 break;
4322 case tcc_comparison:
4323 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4324 goto binary;
4326 case tcc_type:
4327 case tcc_statement:
4328 gcc_unreachable ();
4330 case tcc_constant:
4331 case tcc_exceptional:
4332 case tcc_declaration:
4333 case tcc_reference:
4334 case tcc_vl_exp:
4335 break;
4338 switch (TREE_CODE (exp))
4340 case STRING_CST:
4341 if (!lookup_constant_def (exp))
4343 if (strlen (TREE_STRING_POINTER (exp)) + 1
4344 != (size_t) TREE_STRING_LENGTH (exp))
4345 return NULL_RTX;
4346 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4347 op0 = gen_rtx_MEM (BLKmode, op0);
4348 set_mem_attributes (op0, exp, 0);
4349 return op0;
4351 /* Fall through. */
4353 case INTEGER_CST:
4354 case REAL_CST:
4355 case FIXED_CST:
4356 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4357 return op0;
4359 case POLY_INT_CST:
4360 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4362 case COMPLEX_CST:
4363 gcc_assert (COMPLEX_MODE_P (mode));
4364 op0 = expand_debug_expr (TREE_REALPART (exp));
4365 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4366 return gen_rtx_CONCAT (mode, op0, op1);
4368 case DEBUG_EXPR_DECL:
4369 op0 = DECL_RTL_IF_SET (exp);
4371 if (op0)
4372 return op0;
4374 op0 = gen_rtx_DEBUG_EXPR (mode);
4375 DEBUG_EXPR_TREE_DECL (op0) = exp;
4376 SET_DECL_RTL (exp, op0);
4378 return op0;
4380 case VAR_DECL:
4381 case PARM_DECL:
4382 case FUNCTION_DECL:
4383 case LABEL_DECL:
4384 case CONST_DECL:
4385 case RESULT_DECL:
4386 op0 = DECL_RTL_IF_SET (exp);
4388 /* This decl was probably optimized away. */
4389 if (!op0
4390 /* At least label RTXen are sometimes replaced by
4391 NOTE_INSN_DELETED_LABEL. Any notes here are not
4392 handled by copy_rtx. */
4393 || NOTE_P (op0))
4395 if (!VAR_P (exp)
4396 || DECL_EXTERNAL (exp)
4397 || !TREE_STATIC (exp)
4398 || !DECL_NAME (exp)
4399 || DECL_HARD_REGISTER (exp)
4400 || DECL_IN_CONSTANT_POOL (exp)
4401 || mode == VOIDmode)
4402 return NULL;
4404 op0 = make_decl_rtl_for_debug (exp);
4405 if (!MEM_P (op0)
4406 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4407 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4408 return NULL;
4410 else
4411 op0 = copy_rtx (op0);
4413 if (GET_MODE (op0) == BLKmode
4414 /* If op0 is not BLKmode, but mode is, adjust_mode
4415 below would ICE. While it is likely a FE bug,
4416 try to be robust here. See PR43166. */
4417 || mode == BLKmode
4418 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4420 gcc_assert (MEM_P (op0));
4421 op0 = adjust_address_nv (op0, mode, 0);
4422 return op0;
4425 /* Fall through. */
4427 adjust_mode:
4428 case PAREN_EXPR:
4429 CASE_CONVERT:
4431 inner_mode = GET_MODE (op0);
4433 if (mode == inner_mode)
4434 return op0;
4436 if (inner_mode == VOIDmode)
4438 if (TREE_CODE (exp) == SSA_NAME)
4439 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4440 else
4441 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4442 if (mode == inner_mode)
4443 return op0;
4446 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4448 if (GET_MODE_UNIT_BITSIZE (mode)
4449 == GET_MODE_UNIT_BITSIZE (inner_mode))
4450 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4451 else if (GET_MODE_UNIT_BITSIZE (mode)
4452 < GET_MODE_UNIT_BITSIZE (inner_mode))
4453 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4454 else
4455 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4457 else if (FLOAT_MODE_P (mode))
4459 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4460 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4461 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4462 else
4463 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4465 else if (FLOAT_MODE_P (inner_mode))
4467 if (unsignedp)
4468 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4469 else
4470 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4472 else if (GET_MODE_UNIT_PRECISION (mode)
4473 == GET_MODE_UNIT_PRECISION (inner_mode))
4474 op0 = lowpart_subreg (mode, op0, inner_mode);
4475 else if (GET_MODE_UNIT_PRECISION (mode)
4476 < GET_MODE_UNIT_PRECISION (inner_mode))
4477 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4478 else if (UNARY_CLASS_P (exp)
4479 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4480 : unsignedp)
4481 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4482 else
4483 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4485 return op0;
4488 case MEM_REF:
4489 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4491 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4492 TREE_OPERAND (exp, 0),
4493 TREE_OPERAND (exp, 1));
4494 if (newexp)
4495 return expand_debug_expr (newexp);
4497 /* FALLTHROUGH */
4498 case INDIRECT_REF:
4499 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4500 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4501 if (!op0)
4502 return NULL;
4504 if (TREE_CODE (exp) == MEM_REF)
4506 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4507 || (GET_CODE (op0) == PLUS
4508 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4509 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4510 Instead just use get_inner_reference. */
4511 goto component_ref;
4513 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4514 poly_int64 offset;
4515 if (!op1 || !poly_int_rtx_p (op1, &offset))
4516 return NULL;
4518 op0 = plus_constant (inner_mode, op0, offset);
4521 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4523 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4524 op0, as);
4525 if (op0 == NULL_RTX)
4526 return NULL;
4528 op0 = gen_rtx_MEM (mode, op0);
4529 set_mem_attributes (op0, exp, 0);
4530 if (TREE_CODE (exp) == MEM_REF
4531 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4532 set_mem_expr (op0, NULL_TREE);
4533 set_mem_addr_space (op0, as);
4535 return op0;
4537 case TARGET_MEM_REF:
4538 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4539 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4540 return NULL;
4542 op0 = expand_debug_expr
4543 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4544 if (!op0)
4545 return NULL;
4547 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4548 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4549 op0, as);
4550 if (op0 == NULL_RTX)
4551 return NULL;
4553 op0 = gen_rtx_MEM (mode, op0);
4555 set_mem_attributes (op0, exp, 0);
4556 set_mem_addr_space (op0, as);
4558 return op0;
4560 component_ref:
4561 case ARRAY_REF:
4562 case ARRAY_RANGE_REF:
4563 case COMPONENT_REF:
4564 case BIT_FIELD_REF:
4565 case REALPART_EXPR:
4566 case IMAGPART_EXPR:
4567 case VIEW_CONVERT_EXPR:
4569 machine_mode mode1;
4570 poly_int64 bitsize, bitpos;
4571 tree offset;
4572 int reversep, volatilep = 0;
4573 tree tem
4574 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4575 &unsignedp, &reversep, &volatilep);
4576 rtx orig_op0;
4578 if (known_eq (bitsize, 0))
4579 return NULL;
4581 orig_op0 = op0 = expand_debug_expr (tem);
4583 if (!op0)
4584 return NULL;
4586 if (offset)
4588 machine_mode addrmode, offmode;
4590 if (!MEM_P (op0))
4591 return NULL;
4593 op0 = XEXP (op0, 0);
4594 addrmode = GET_MODE (op0);
4595 if (addrmode == VOIDmode)
4596 addrmode = Pmode;
4598 op1 = expand_debug_expr (offset);
4599 if (!op1)
4600 return NULL;
4602 offmode = GET_MODE (op1);
4603 if (offmode == VOIDmode)
4604 offmode = TYPE_MODE (TREE_TYPE (offset));
4606 if (addrmode != offmode)
4607 op1 = lowpart_subreg (addrmode, op1, offmode);
4609 /* Don't use offset_address here, we don't need a
4610 recognizable address, and we don't want to generate
4611 code. */
4612 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4613 op0, op1));
4616 if (MEM_P (op0))
4618 if (mode1 == VOIDmode)
4620 if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4621 return NULL;
4622 /* Bitfield. */
4623 mode1 = smallest_int_mode_for_size (bitsize);
4625 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4626 if (maybe_ne (bytepos, 0))
4628 op0 = adjust_address_nv (op0, mode1, bytepos);
4629 bitpos = num_trailing_bits (bitpos);
4631 else if (known_eq (bitpos, 0)
4632 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4633 op0 = adjust_address_nv (op0, mode, 0);
4634 else if (GET_MODE (op0) != mode1)
4635 op0 = adjust_address_nv (op0, mode1, 0);
4636 else
4637 op0 = copy_rtx (op0);
4638 if (op0 == orig_op0)
4639 op0 = shallow_copy_rtx (op0);
4640 set_mem_attributes (op0, exp, 0);
4643 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4644 return op0;
4646 if (maybe_lt (bitpos, 0))
4647 return NULL;
4649 if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4650 return NULL;
4652 poly_int64 bytepos;
4653 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4654 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4656 machine_mode opmode = GET_MODE (op0);
4658 if (opmode == VOIDmode)
4659 opmode = TYPE_MODE (TREE_TYPE (tem));
4661 /* This condition may hold if we're expanding the address
4662 right past the end of an array that turned out not to
4663 be addressable (i.e., the address was only computed in
4664 debug stmts). The gen_subreg below would rightfully
4665 crash, and the address doesn't really exist, so just
4666 drop it. */
4667 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4668 return NULL;
4670 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4671 return simplify_gen_subreg (mode, op0, opmode, bytepos);
4674 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4675 && TYPE_UNSIGNED (TREE_TYPE (exp))
4676 ? SIGN_EXTRACT
4677 : ZERO_EXTRACT, mode,
4678 GET_MODE (op0) != VOIDmode
4679 ? GET_MODE (op0)
4680 : TYPE_MODE (TREE_TYPE (tem)),
4681 op0, gen_int_mode (bitsize, word_mode),
4682 gen_int_mode (bitpos, word_mode));
4685 case ABS_EXPR:
4686 case ABSU_EXPR:
4687 return simplify_gen_unary (ABS, mode, op0, mode);
4689 case NEGATE_EXPR:
4690 return simplify_gen_unary (NEG, mode, op0, mode);
4692 case BIT_NOT_EXPR:
4693 return simplify_gen_unary (NOT, mode, op0, mode);
4695 case FLOAT_EXPR:
4696 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4697 0)))
4698 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4699 inner_mode);
4701 case FIX_TRUNC_EXPR:
4702 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4703 inner_mode);
4705 case POINTER_PLUS_EXPR:
4706 /* For the rare target where pointers are not the same size as
4707 size_t, we need to check for mis-matched modes and correct
4708 the addend. */
4709 if (op0 && op1
4710 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4711 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4712 && op0_mode != op1_mode)
4714 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4715 /* If OP0 is a partial mode, then we must truncate, even
4716 if it has the same bitsize as OP1 as GCC's
4717 representation of partial modes is opaque. */
4718 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4719 && (GET_MODE_BITSIZE (op0_mode)
4720 == GET_MODE_BITSIZE (op1_mode))))
4721 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4722 else
4723 /* We always sign-extend, regardless of the signedness of
4724 the operand, because the operand is always unsigned
4725 here even if the original C expression is signed. */
4726 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4728 /* Fall through. */
4729 case PLUS_EXPR:
4730 return simplify_gen_binary (PLUS, mode, op0, op1);
4732 case MINUS_EXPR:
4733 case POINTER_DIFF_EXPR:
4734 return simplify_gen_binary (MINUS, mode, op0, op1);
4736 case MULT_EXPR:
4737 return simplify_gen_binary (MULT, mode, op0, op1);
4739 case RDIV_EXPR:
4740 case TRUNC_DIV_EXPR:
4741 case EXACT_DIV_EXPR:
4742 if (unsignedp)
4743 return simplify_gen_binary (UDIV, mode, op0, op1);
4744 else
4745 return simplify_gen_binary (DIV, mode, op0, op1);
4747 case TRUNC_MOD_EXPR:
4748 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4750 case FLOOR_DIV_EXPR:
4751 if (unsignedp)
4752 return simplify_gen_binary (UDIV, mode, op0, op1);
4753 else
4755 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4756 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4757 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4758 return simplify_gen_binary (PLUS, mode, div, adj);
4761 case FLOOR_MOD_EXPR:
4762 if (unsignedp)
4763 return simplify_gen_binary (UMOD, mode, op0, op1);
4764 else
4766 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4767 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4768 adj = simplify_gen_unary (NEG, mode,
4769 simplify_gen_binary (MULT, mode, adj, op1),
4770 mode);
4771 return simplify_gen_binary (PLUS, mode, mod, adj);
4774 case CEIL_DIV_EXPR:
4775 if (unsignedp)
4777 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4778 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4779 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4780 return simplify_gen_binary (PLUS, mode, div, adj);
4782 else
4784 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4785 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4786 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4787 return simplify_gen_binary (PLUS, mode, div, adj);
4790 case CEIL_MOD_EXPR:
4791 if (unsignedp)
4793 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4794 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4795 adj = simplify_gen_unary (NEG, mode,
4796 simplify_gen_binary (MULT, mode, adj, op1),
4797 mode);
4798 return simplify_gen_binary (PLUS, mode, mod, adj);
4800 else
4802 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4803 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4804 adj = simplify_gen_unary (NEG, mode,
4805 simplify_gen_binary (MULT, mode, adj, op1),
4806 mode);
4807 return simplify_gen_binary (PLUS, mode, mod, adj);
4810 case ROUND_DIV_EXPR:
4811 if (unsignedp)
4813 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4814 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4815 rtx adj = round_udiv_adjust (mode, mod, op1);
4816 return simplify_gen_binary (PLUS, mode, div, adj);
4818 else
4820 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4821 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4822 rtx adj = round_sdiv_adjust (mode, mod, op1);
4823 return simplify_gen_binary (PLUS, mode, div, adj);
4826 case ROUND_MOD_EXPR:
4827 if (unsignedp)
4829 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4830 rtx adj = round_udiv_adjust (mode, mod, op1);
4831 adj = simplify_gen_unary (NEG, mode,
4832 simplify_gen_binary (MULT, mode, adj, op1),
4833 mode);
4834 return simplify_gen_binary (PLUS, mode, mod, adj);
4836 else
4838 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4839 rtx adj = round_sdiv_adjust (mode, mod, op1);
4840 adj = simplify_gen_unary (NEG, mode,
4841 simplify_gen_binary (MULT, mode, adj, op1),
4842 mode);
4843 return simplify_gen_binary (PLUS, mode, mod, adj);
4846 case LSHIFT_EXPR:
4847 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4849 case RSHIFT_EXPR:
4850 if (unsignedp)
4851 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4852 else
4853 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4855 case LROTATE_EXPR:
4856 return simplify_gen_binary (ROTATE, mode, op0, op1);
4858 case RROTATE_EXPR:
4859 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4861 case MIN_EXPR:
4862 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4864 case MAX_EXPR:
4865 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4867 case BIT_AND_EXPR:
4868 case TRUTH_AND_EXPR:
4869 return simplify_gen_binary (AND, mode, op0, op1);
4871 case BIT_IOR_EXPR:
4872 case TRUTH_OR_EXPR:
4873 return simplify_gen_binary (IOR, mode, op0, op1);
4875 case BIT_XOR_EXPR:
4876 case TRUTH_XOR_EXPR:
4877 return simplify_gen_binary (XOR, mode, op0, op1);
4879 case TRUTH_ANDIF_EXPR:
4880 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4882 case TRUTH_ORIF_EXPR:
4883 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4885 case TRUTH_NOT_EXPR:
4886 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4888 case LT_EXPR:
4889 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4890 op0, op1);
4892 case LE_EXPR:
4893 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4894 op0, op1);
4896 case GT_EXPR:
4897 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4898 op0, op1);
4900 case GE_EXPR:
4901 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4902 op0, op1);
4904 case EQ_EXPR:
4905 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4907 case NE_EXPR:
4908 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4910 case UNORDERED_EXPR:
4911 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4913 case ORDERED_EXPR:
4914 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4916 case UNLT_EXPR:
4917 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4919 case UNLE_EXPR:
4920 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4922 case UNGT_EXPR:
4923 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4925 case UNGE_EXPR:
4926 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4928 case UNEQ_EXPR:
4929 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4931 case LTGT_EXPR:
4932 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4934 case COND_EXPR:
4935 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4937 case COMPLEX_EXPR:
4938 gcc_assert (COMPLEX_MODE_P (mode));
4939 if (GET_MODE (op0) == VOIDmode)
4940 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4941 if (GET_MODE (op1) == VOIDmode)
4942 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4943 return gen_rtx_CONCAT (mode, op0, op1);
4945 case CONJ_EXPR:
4946 if (GET_CODE (op0) == CONCAT)
4947 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4948 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4949 XEXP (op0, 1),
4950 GET_MODE_INNER (mode)));
4951 else
4953 scalar_mode imode = GET_MODE_INNER (mode);
4954 rtx re, im;
4956 if (MEM_P (op0))
4958 re = adjust_address_nv (op0, imode, 0);
4959 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4961 else
4963 scalar_int_mode ifmode;
4964 scalar_int_mode ihmode;
4965 rtx halfsize;
4966 if (!int_mode_for_mode (mode).exists (&ifmode)
4967 || !int_mode_for_mode (imode).exists (&ihmode))
4968 return NULL;
4969 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4970 re = op0;
4971 if (mode != ifmode)
4972 re = gen_rtx_SUBREG (ifmode, re, 0);
4973 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4974 if (imode != ihmode)
4975 re = gen_rtx_SUBREG (imode, re, 0);
4976 im = copy_rtx (op0);
4977 if (mode != ifmode)
4978 im = gen_rtx_SUBREG (ifmode, im, 0);
4979 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4980 if (imode != ihmode)
4981 im = gen_rtx_SUBREG (imode, im, 0);
4983 im = gen_rtx_NEG (imode, im);
4984 return gen_rtx_CONCAT (mode, re, im);
4987 case ADDR_EXPR:
4988 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4989 if (!op0 || !MEM_P (op0))
4991 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4992 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4993 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4994 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4995 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4996 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4998 if (handled_component_p (TREE_OPERAND (exp, 0)))
5000 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
5001 bool reverse;
5002 tree decl
5003 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
5004 &bitsize, &maxsize, &reverse);
5005 if ((VAR_P (decl)
5006 || TREE_CODE (decl) == PARM_DECL
5007 || TREE_CODE (decl) == RESULT_DECL)
5008 && (!TREE_ADDRESSABLE (decl)
5009 || target_for_debug_bind (decl))
5010 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
5011 && known_gt (bitsize, 0)
5012 && known_eq (bitsize, maxsize))
5014 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
5015 return plus_constant (mode, base, byteoffset);
5019 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
5020 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5021 == ADDR_EXPR)
5023 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5024 0));
5025 if (op0 != NULL
5026 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
5027 || (GET_CODE (op0) == PLUS
5028 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
5029 && CONST_INT_P (XEXP (op0, 1)))))
5031 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5032 1));
5033 poly_int64 offset;
5034 if (!op1 || !poly_int_rtx_p (op1, &offset))
5035 return NULL;
5037 return plus_constant (mode, op0, offset);
5041 return NULL;
5044 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
5045 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5046 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
5048 return op0;
5050 case VECTOR_CST:
5052 unsigned HOST_WIDE_INT i, nelts;
5054 if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
5055 return NULL;
5057 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5059 for (i = 0; i < nelts; ++i)
5061 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
5062 if (!op1)
5063 return NULL;
5064 XVECEXP (op0, 0, i) = op1;
5067 return op0;
5070 case CONSTRUCTOR:
5071 if (TREE_CLOBBER_P (exp))
5072 return NULL;
5073 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5075 unsigned i;
5076 unsigned HOST_WIDE_INT nelts;
5077 tree val;
5079 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5080 goto flag_unsupported;
5082 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5084 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5086 op1 = expand_debug_expr (val);
5087 if (!op1)
5088 return NULL;
5089 XVECEXP (op0, 0, i) = op1;
5092 if (i < nelts)
5094 op1 = expand_debug_expr
5095 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5097 if (!op1)
5098 return NULL;
5100 for (; i < nelts; i++)
5101 XVECEXP (op0, 0, i) = op1;
5104 return op0;
5106 else
5107 goto flag_unsupported;
5109 case CALL_EXPR:
5110 /* ??? Maybe handle some builtins? */
5111 return NULL;
5113 case SSA_NAME:
5115 gimple *g = get_gimple_for_ssa_name (exp);
5116 if (g)
5118 tree t = NULL_TREE;
5119 if (deep_ter_debug_map)
5121 tree *slot = deep_ter_debug_map->get (exp);
5122 if (slot)
5123 t = *slot;
5125 if (t == NULL_TREE)
5126 t = gimple_assign_rhs_to_tree (g);
5127 op0 = expand_debug_expr (t);
5128 if (!op0)
5129 return NULL;
5131 else
5133 /* If this is a reference to an incoming value of
5134 parameter that is never used in the code or where the
5135 incoming value is never used in the code, use
5136 PARM_DECL's DECL_RTL if set. */
5137 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5138 && SSA_NAME_VAR (exp)
5139 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5140 && has_zero_uses (exp))
5142 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5143 if (op0)
5144 goto adjust_mode;
5145 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5146 if (op0)
5147 goto adjust_mode;
5150 int part = var_to_partition (SA.map, exp);
5152 if (part == NO_PARTITION)
5153 return NULL;
5155 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5157 op0 = copy_rtx (SA.partition_to_pseudo[part]);
5159 goto adjust_mode;
5162 case ERROR_MARK:
5163 return NULL;
5165 /* Vector stuff. For most of the codes we don't have rtl codes. */
5166 case REALIGN_LOAD_EXPR:
5167 case VEC_COND_EXPR:
5168 case VEC_PACK_FIX_TRUNC_EXPR:
5169 case VEC_PACK_FLOAT_EXPR:
5170 case VEC_PACK_SAT_EXPR:
5171 case VEC_PACK_TRUNC_EXPR:
5172 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5173 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
5174 case VEC_UNPACK_FLOAT_HI_EXPR:
5175 case VEC_UNPACK_FLOAT_LO_EXPR:
5176 case VEC_UNPACK_HI_EXPR:
5177 case VEC_UNPACK_LO_EXPR:
5178 case VEC_WIDEN_MULT_HI_EXPR:
5179 case VEC_WIDEN_MULT_LO_EXPR:
5180 case VEC_WIDEN_MULT_EVEN_EXPR:
5181 case VEC_WIDEN_MULT_ODD_EXPR:
5182 case VEC_WIDEN_LSHIFT_HI_EXPR:
5183 case VEC_WIDEN_LSHIFT_LO_EXPR:
5184 case VEC_PERM_EXPR:
5185 case VEC_DUPLICATE_EXPR:
5186 case VEC_SERIES_EXPR:
5187 return NULL;
5189 /* Misc codes. */
5190 case ADDR_SPACE_CONVERT_EXPR:
5191 case FIXED_CONVERT_EXPR:
5192 case OBJ_TYPE_REF:
5193 case WITH_SIZE_EXPR:
5194 case BIT_INSERT_EXPR:
5195 return NULL;
5197 case DOT_PROD_EXPR:
5198 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5199 && SCALAR_INT_MODE_P (mode))
5202 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5203 0)))
5204 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5205 inner_mode);
5207 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5208 1)))
5209 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5210 inner_mode);
5211 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5212 return simplify_gen_binary (PLUS, mode, op0, op2);
5214 return NULL;
5216 case WIDEN_MULT_EXPR:
5217 case WIDEN_MULT_PLUS_EXPR:
5218 case WIDEN_MULT_MINUS_EXPR:
5219 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5220 && SCALAR_INT_MODE_P (mode))
5222 inner_mode = GET_MODE (op0);
5223 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5224 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5225 else
5226 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5227 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5228 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5229 else
5230 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5231 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5232 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5233 return op0;
5234 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5235 return simplify_gen_binary (PLUS, mode, op0, op2);
5236 else
5237 return simplify_gen_binary (MINUS, mode, op2, op0);
5239 return NULL;
5241 case MULT_HIGHPART_EXPR:
5242 /* ??? Similar to the above. */
5243 return NULL;
5245 case WIDEN_SUM_EXPR:
5246 case WIDEN_LSHIFT_EXPR:
5247 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5248 && SCALAR_INT_MODE_P (mode))
5251 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5252 0)))
5253 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5254 inner_mode);
5255 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5256 ? ASHIFT : PLUS, mode, op0, op1);
5258 return NULL;
5260 default:
5261 flag_unsupported:
5262 if (flag_checking)
5264 debug_tree (exp);
5265 gcc_unreachable ();
5267 return NULL;
5271 /* Return an RTX equivalent to the source bind value of the tree expression
5272 EXP. */
5274 static rtx
5275 expand_debug_source_expr (tree exp)
5277 rtx op0 = NULL_RTX;
5278 machine_mode mode = VOIDmode, inner_mode;
5280 switch (TREE_CODE (exp))
5282 case VAR_DECL:
5283 if (DECL_ABSTRACT_ORIGIN (exp))
5284 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5285 break;
5286 case PARM_DECL:
5288 mode = DECL_MODE (exp);
5289 op0 = expand_debug_parm_decl (exp);
5290 if (op0)
5291 break;
5292 /* See if this isn't an argument that has been completely
5293 optimized out. */
5294 if (!DECL_RTL_SET_P (exp)
5295 && !DECL_INCOMING_RTL (exp)
5296 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5298 tree aexp = DECL_ORIGIN (exp);
5299 if (DECL_CONTEXT (aexp)
5300 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5302 vec<tree, va_gc> **debug_args;
5303 unsigned int ix;
5304 tree ddecl;
5305 debug_args = decl_debug_args_lookup (current_function_decl);
5306 if (debug_args != NULL)
5308 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5309 ix += 2)
5310 if (ddecl == aexp)
5311 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5315 break;
5317 default:
5318 break;
5321 if (op0 == NULL_RTX)
5322 return NULL_RTX;
5324 inner_mode = GET_MODE (op0);
5325 if (mode == inner_mode)
5326 return op0;
5328 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5330 if (GET_MODE_UNIT_BITSIZE (mode)
5331 == GET_MODE_UNIT_BITSIZE (inner_mode))
5332 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5333 else if (GET_MODE_UNIT_BITSIZE (mode)
5334 < GET_MODE_UNIT_BITSIZE (inner_mode))
5335 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5336 else
5337 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5339 else if (FLOAT_MODE_P (mode))
5340 gcc_unreachable ();
5341 else if (FLOAT_MODE_P (inner_mode))
5343 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5344 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5345 else
5346 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5348 else if (GET_MODE_UNIT_PRECISION (mode)
5349 == GET_MODE_UNIT_PRECISION (inner_mode))
5350 op0 = lowpart_subreg (mode, op0, inner_mode);
5351 else if (GET_MODE_UNIT_PRECISION (mode)
5352 < GET_MODE_UNIT_PRECISION (inner_mode))
5353 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5354 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5355 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5356 else
5357 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5359 return op0;
5362 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5363 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5364 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5366 static void
5367 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5369 rtx exp = *exp_p;
5371 if (exp == NULL_RTX)
5372 return;
5374 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5375 return;
5377 if (depth == 4)
5379 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5380 rtx dval = make_debug_expr_from_rtl (exp);
5382 /* Emit a debug bind insn before INSN. */
5383 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5384 DEBUG_EXPR_TREE_DECL (dval), exp,
5385 VAR_INIT_STATUS_INITIALIZED);
5387 emit_debug_insn_before (bind, insn);
5388 *exp_p = dval;
5389 return;
5392 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5393 int i, j;
5394 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5395 switch (*format_ptr++)
5397 case 'e':
5398 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5399 break;
5401 case 'E':
5402 case 'V':
5403 for (j = 0; j < XVECLEN (exp, i); j++)
5404 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5405 break;
5407 default:
5408 break;
5412 /* Expand the _LOCs in debug insns. We run this after expanding all
5413 regular insns, so that any variables referenced in the function
5414 will have their DECL_RTLs set. */
5416 static void
5417 expand_debug_locations (void)
5419 rtx_insn *insn;
5420 rtx_insn *last = get_last_insn ();
5421 int save_strict_alias = flag_strict_aliasing;
5423 /* New alias sets while setting up memory attributes cause
5424 -fcompare-debug failures, even though it doesn't bring about any
5425 codegen changes. */
5426 flag_strict_aliasing = 0;
5428 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5429 if (DEBUG_BIND_INSN_P (insn))
5431 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5432 rtx val;
5433 rtx_insn *prev_insn, *insn2;
5434 machine_mode mode;
5436 if (value == NULL_TREE)
5437 val = NULL_RTX;
5438 else
5440 if (INSN_VAR_LOCATION_STATUS (insn)
5441 == VAR_INIT_STATUS_UNINITIALIZED)
5442 val = expand_debug_source_expr (value);
5443 /* The avoid_deep_ter_for_debug function inserts
5444 debug bind stmts after SSA_NAME definition, with the
5445 SSA_NAME as the whole bind location. Disable temporarily
5446 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5447 being defined in this DEBUG_INSN. */
5448 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5450 tree *slot = deep_ter_debug_map->get (value);
5451 if (slot)
5453 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5454 *slot = NULL_TREE;
5455 else
5456 slot = NULL;
5458 val = expand_debug_expr (value);
5459 if (slot)
5460 *slot = INSN_VAR_LOCATION_DECL (insn);
5462 else
5463 val = expand_debug_expr (value);
5464 gcc_assert (last == get_last_insn ());
5467 if (!val)
5468 val = gen_rtx_UNKNOWN_VAR_LOC ();
5469 else
5471 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5473 gcc_assert (mode == GET_MODE (val)
5474 || (GET_MODE (val) == VOIDmode
5475 && (CONST_SCALAR_INT_P (val)
5476 || GET_CODE (val) == CONST_FIXED
5477 || GET_CODE (val) == LABEL_REF)));
5480 INSN_VAR_LOCATION_LOC (insn) = val;
5481 prev_insn = PREV_INSN (insn);
5482 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5483 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5486 flag_strict_aliasing = save_strict_alias;
5489 /* Performs swapping operands of commutative operations to expand
5490 the expensive one first. */
5492 static void
5493 reorder_operands (basic_block bb)
5495 unsigned int *lattice; /* Hold cost of each statement. */
5496 unsigned int i = 0, n = 0;
5497 gimple_stmt_iterator gsi;
5498 gimple_seq stmts;
5499 gimple *stmt;
5500 bool swap;
5501 tree op0, op1;
5502 ssa_op_iter iter;
5503 use_operand_p use_p;
5504 gimple *def0, *def1;
5506 /* Compute cost of each statement using estimate_num_insns. */
5507 stmts = bb_seq (bb);
5508 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5510 stmt = gsi_stmt (gsi);
5511 if (!is_gimple_debug (stmt))
5512 gimple_set_uid (stmt, n++);
5514 lattice = XNEWVEC (unsigned int, n);
5515 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5517 unsigned cost;
5518 stmt = gsi_stmt (gsi);
5519 if (is_gimple_debug (stmt))
5520 continue;
5521 cost = estimate_num_insns (stmt, &eni_size_weights);
5522 lattice[i] = cost;
5523 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5525 tree use = USE_FROM_PTR (use_p);
5526 gimple *def_stmt;
5527 if (TREE_CODE (use) != SSA_NAME)
5528 continue;
5529 def_stmt = get_gimple_for_ssa_name (use);
5530 if (!def_stmt)
5531 continue;
5532 lattice[i] += lattice[gimple_uid (def_stmt)];
5534 i++;
5535 if (!is_gimple_assign (stmt)
5536 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5537 continue;
5538 op0 = gimple_op (stmt, 1);
5539 op1 = gimple_op (stmt, 2);
5540 if (TREE_CODE (op0) != SSA_NAME
5541 || TREE_CODE (op1) != SSA_NAME)
5542 continue;
5543 /* Swap operands if the second one is more expensive. */
5544 def0 = get_gimple_for_ssa_name (op0);
5545 def1 = get_gimple_for_ssa_name (op1);
5546 if (!def1)
5547 continue;
5548 swap = false;
5549 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5550 swap = true;
5551 if (swap)
5553 if (dump_file && (dump_flags & TDF_DETAILS))
5555 fprintf (dump_file, "Swap operands in stmt:\n");
5556 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5557 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5558 def0 ? lattice[gimple_uid (def0)] : 0,
5559 lattice[gimple_uid (def1)]);
5561 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5562 gimple_assign_rhs2_ptr (stmt));
5565 XDELETE (lattice);
5568 /* Expand basic block BB from GIMPLE trees to RTL. */
5570 static basic_block
5571 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5573 gimple_stmt_iterator gsi;
5574 gimple_seq stmts;
5575 gimple *stmt = NULL;
5576 rtx_note *note = NULL;
5577 rtx_insn *last;
5578 edge e;
5579 edge_iterator ei;
5581 if (dump_file)
5582 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5583 bb->index);
5585 /* Note that since we are now transitioning from GIMPLE to RTL, we
5586 cannot use the gsi_*_bb() routines because they expect the basic
5587 block to be in GIMPLE, instead of RTL. Therefore, we need to
5588 access the BB sequence directly. */
5589 if (optimize)
5590 reorder_operands (bb);
5591 stmts = bb_seq (bb);
5592 bb->il.gimple.seq = NULL;
5593 bb->il.gimple.phi_nodes = NULL;
5594 rtl_profile_for_bb (bb);
5595 init_rtl_bb_info (bb);
5596 bb->flags |= BB_RTL;
5598 /* Remove the RETURN_EXPR if we may fall though to the exit
5599 instead. */
5600 gsi = gsi_last (stmts);
5601 if (!gsi_end_p (gsi)
5602 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5604 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5606 gcc_assert (single_succ_p (bb));
5607 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5609 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5610 && !gimple_return_retval (ret_stmt))
5612 gsi_remove (&gsi, false);
5613 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5617 gsi = gsi_start (stmts);
5618 if (!gsi_end_p (gsi))
5620 stmt = gsi_stmt (gsi);
5621 if (gimple_code (stmt) != GIMPLE_LABEL)
5622 stmt = NULL;
5625 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5627 if (stmt || elt)
5629 gcc_checking_assert (!note);
5630 last = get_last_insn ();
5632 if (stmt)
5634 expand_gimple_stmt (stmt);
5635 gsi_next (&gsi);
5638 if (elt)
5639 emit_label (*elt);
5641 BB_HEAD (bb) = NEXT_INSN (last);
5642 if (NOTE_P (BB_HEAD (bb)))
5643 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5644 gcc_assert (LABEL_P (BB_HEAD (bb)));
5645 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5647 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5649 else
5650 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5652 if (note)
5653 NOTE_BASIC_BLOCK (note) = bb;
5655 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5657 basic_block new_bb;
5659 stmt = gsi_stmt (gsi);
5661 /* If this statement is a non-debug one, and we generate debug
5662 insns, then this one might be the last real use of a TERed
5663 SSA_NAME, but where there are still some debug uses further
5664 down. Expanding the current SSA name in such further debug
5665 uses by their RHS might lead to wrong debug info, as coalescing
5666 might make the operands of such RHS be placed into the same
5667 pseudo as something else. Like so:
5668 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5669 use(a_1);
5670 a_2 = ...
5671 #DEBUG ... => a_1
5672 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5673 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5674 the write to a_2 would actually have clobbered the place which
5675 formerly held a_0.
5677 So, instead of that, we recognize the situation, and generate
5678 debug temporaries at the last real use of TERed SSA names:
5679 a_1 = a_0 + 1;
5680 #DEBUG #D1 => a_1
5681 use(a_1);
5682 a_2 = ...
5683 #DEBUG ... => #D1
5685 if (MAY_HAVE_DEBUG_BIND_INSNS
5686 && SA.values
5687 && !is_gimple_debug (stmt))
5689 ssa_op_iter iter;
5690 tree op;
5691 gimple *def;
5693 location_t sloc = curr_insn_location ();
5695 /* Look for SSA names that have their last use here (TERed
5696 names always have only one real use). */
5697 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5698 if ((def = get_gimple_for_ssa_name (op)))
5700 imm_use_iterator imm_iter;
5701 use_operand_p use_p;
5702 bool have_debug_uses = false;
5704 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5706 if (gimple_debug_bind_p (USE_STMT (use_p)))
5708 have_debug_uses = true;
5709 break;
5713 if (have_debug_uses)
5715 /* OP is a TERed SSA name, with DEF its defining
5716 statement, and where OP is used in further debug
5717 instructions. Generate a debug temporary, and
5718 replace all uses of OP in debug insns with that
5719 temporary. */
5720 gimple *debugstmt;
5721 tree value = gimple_assign_rhs_to_tree (def);
5722 tree vexpr = make_node (DEBUG_EXPR_DECL);
5723 rtx val;
5724 machine_mode mode;
5726 set_curr_insn_location (gimple_location (def));
5728 DECL_ARTIFICIAL (vexpr) = 1;
5729 TREE_TYPE (vexpr) = TREE_TYPE (value);
5730 if (DECL_P (value))
5731 mode = DECL_MODE (value);
5732 else
5733 mode = TYPE_MODE (TREE_TYPE (value));
5734 SET_DECL_MODE (vexpr, mode);
5736 val = gen_rtx_VAR_LOCATION
5737 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5739 emit_debug_insn (val);
5741 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5743 if (!gimple_debug_bind_p (debugstmt))
5744 continue;
5746 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5747 SET_USE (use_p, vexpr);
5749 update_stmt (debugstmt);
5753 set_curr_insn_location (sloc);
5756 currently_expanding_gimple_stmt = stmt;
5758 /* Expand this statement, then evaluate the resulting RTL and
5759 fixup the CFG accordingly. */
5760 if (gimple_code (stmt) == GIMPLE_COND)
5762 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5763 if (new_bb)
5764 return new_bb;
5766 else if (is_gimple_debug (stmt))
5768 location_t sloc = curr_insn_location ();
5769 gimple_stmt_iterator nsi = gsi;
5771 for (;;)
5773 tree var;
5774 tree value = NULL_TREE;
5775 rtx val = NULL_RTX;
5776 machine_mode mode;
5778 if (!gimple_debug_nonbind_marker_p (stmt))
5780 if (gimple_debug_bind_p (stmt))
5782 var = gimple_debug_bind_get_var (stmt);
5784 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5785 && TREE_CODE (var) != LABEL_DECL
5786 && !target_for_debug_bind (var))
5787 goto delink_debug_stmt;
5789 if (DECL_P (var))
5790 mode = DECL_MODE (var);
5791 else
5792 mode = TYPE_MODE (TREE_TYPE (var));
5794 if (gimple_debug_bind_has_value_p (stmt))
5795 value = gimple_debug_bind_get_value (stmt);
5797 val = gen_rtx_VAR_LOCATION
5798 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5800 else if (gimple_debug_source_bind_p (stmt))
5802 var = gimple_debug_source_bind_get_var (stmt);
5804 value = gimple_debug_source_bind_get_value (stmt);
5806 mode = DECL_MODE (var);
5808 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5809 VAR_INIT_STATUS_UNINITIALIZED);
5811 else
5812 gcc_unreachable ();
5814 /* If this function was first compiled with markers
5815 enabled, but they're now disable (e.g. LTO), drop
5816 them on the floor. */
5817 else if (gimple_debug_nonbind_marker_p (stmt)
5818 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5819 goto delink_debug_stmt;
5820 else if (gimple_debug_begin_stmt_p (stmt))
5821 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5822 else if (gimple_debug_inline_entry_p (stmt))
5824 tree block = gimple_block (stmt);
5826 if (block)
5827 val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5828 else
5829 goto delink_debug_stmt;
5831 else
5832 gcc_unreachable ();
5834 last = get_last_insn ();
5836 set_curr_insn_location (gimple_location (stmt));
5838 emit_debug_insn (val);
5840 if (dump_file && (dump_flags & TDF_DETAILS))
5842 /* We can't dump the insn with a TREE where an RTX
5843 is expected. */
5844 if (GET_CODE (val) == VAR_LOCATION)
5846 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5847 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5849 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5850 if (GET_CODE (val) == VAR_LOCATION)
5851 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5854 delink_debug_stmt:
5855 /* In order not to generate too many debug temporaries,
5856 we delink all uses of debug statements we already expanded.
5857 Therefore debug statements between definition and real
5858 use of TERed SSA names will continue to use the SSA name,
5859 and not be replaced with debug temps. */
5860 delink_stmt_imm_use (stmt);
5862 gsi = nsi;
5863 gsi_next (&nsi);
5864 if (gsi_end_p (nsi))
5865 break;
5866 stmt = gsi_stmt (nsi);
5867 if (!is_gimple_debug (stmt))
5868 break;
5871 set_curr_insn_location (sloc);
5873 else
5875 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5876 if (call_stmt
5877 && gimple_call_tail_p (call_stmt)
5878 && disable_tail_calls)
5879 gimple_call_set_tail (call_stmt, false);
5881 if (call_stmt && gimple_call_tail_p (call_stmt))
5883 bool can_fallthru;
5884 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5885 if (new_bb)
5887 if (can_fallthru)
5888 bb = new_bb;
5889 else
5890 return new_bb;
5893 else
5895 def_operand_p def_p;
5896 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5898 if (def_p != NULL)
5900 /* Ignore this stmt if it is in the list of
5901 replaceable expressions. */
5902 if (SA.values
5903 && bitmap_bit_p (SA.values,
5904 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5905 continue;
5907 last = expand_gimple_stmt (stmt);
5908 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5913 currently_expanding_gimple_stmt = NULL;
5915 /* Expand implicit goto and convert goto_locus. */
5916 FOR_EACH_EDGE (e, ei, bb->succs)
5918 if (e->goto_locus != UNKNOWN_LOCATION)
5919 set_curr_insn_location (e->goto_locus);
5920 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5922 emit_jump (label_rtx_for_bb (e->dest));
5923 e->flags &= ~EDGE_FALLTHRU;
5927 /* Expanded RTL can create a jump in the last instruction of block.
5928 This later might be assumed to be a jump to successor and break edge insertion.
5929 We need to insert dummy move to prevent this. PR41440. */
5930 if (single_succ_p (bb)
5931 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5932 && (last = get_last_insn ())
5933 && (JUMP_P (last)
5934 || (DEBUG_INSN_P (last)
5935 && JUMP_P (prev_nondebug_insn (last)))))
5937 rtx dummy = gen_reg_rtx (SImode);
5938 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5941 do_pending_stack_adjust ();
5943 /* Find the block tail. The last insn in the block is the insn
5944 before a barrier and/or table jump insn. */
5945 last = get_last_insn ();
5946 if (BARRIER_P (last))
5947 last = PREV_INSN (last);
5948 if (JUMP_TABLE_DATA_P (last))
5949 last = PREV_INSN (PREV_INSN (last));
5950 if (BARRIER_P (last))
5951 last = PREV_INSN (last);
5952 BB_END (bb) = last;
5954 update_bb_for_insn (bb);
5956 return bb;
5960 /* Create a basic block for initialization code. */
5962 static basic_block
5963 construct_init_block (void)
5965 basic_block init_block, first_block;
5966 edge e = NULL;
5967 int flags;
5969 /* Multiple entry points not supported yet. */
5970 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5971 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5972 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5973 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5974 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5976 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5978 /* When entry edge points to first basic block, we don't need jump,
5979 otherwise we have to jump into proper target. */
5980 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5982 tree label = gimple_block_label (e->dest);
5984 emit_jump (jump_target_rtx (label));
5985 flags = 0;
5987 else
5988 flags = EDGE_FALLTHRU;
5990 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5991 get_last_insn (),
5992 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5993 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5994 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5995 if (e)
5997 first_block = e->dest;
5998 redirect_edge_succ (e, init_block);
5999 make_single_succ_edge (init_block, first_block, flags);
6001 else
6002 make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6003 EDGE_FALLTHRU);
6005 update_bb_for_insn (init_block);
6006 return init_block;
6009 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
6010 found in the block tree. */
6012 static void
6013 set_block_levels (tree block, int level)
6015 while (block)
6017 BLOCK_NUMBER (block) = level;
6018 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
6019 block = BLOCK_CHAIN (block);
6023 /* Create a block containing landing pads and similar stuff. */
6025 static void
6026 construct_exit_block (void)
6028 rtx_insn *head = get_last_insn ();
6029 rtx_insn *end;
6030 basic_block exit_block;
6031 edge e, e2;
6032 unsigned ix;
6033 edge_iterator ei;
6034 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
6035 rtx_insn *orig_end = BB_END (prev_bb);
6037 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6039 /* Make sure the locus is set to the end of the function, so that
6040 epilogue line numbers and warnings are set properly. */
6041 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
6042 input_location = cfun->function_end_locus;
6044 /* Generate rtl for function exit. */
6045 expand_function_end ();
6047 end = get_last_insn ();
6048 if (head == end)
6049 return;
6050 /* While emitting the function end we could move end of the last basic
6051 block. */
6052 BB_END (prev_bb) = orig_end;
6053 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
6054 head = NEXT_INSN (head);
6055 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6056 bb count counting will be confused. Any instructions before that
6057 label are emitted for the case where PREV_BB falls through into the
6058 exit block, so append those instructions to prev_bb in that case. */
6059 if (NEXT_INSN (head) != return_label)
6061 while (NEXT_INSN (head) != return_label)
6063 if (!NOTE_P (NEXT_INSN (head)))
6064 BB_END (prev_bb) = NEXT_INSN (head);
6065 head = NEXT_INSN (head);
6068 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
6069 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
6070 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6072 ix = 0;
6073 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6075 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6076 if (!(e->flags & EDGE_ABNORMAL))
6077 redirect_edge_succ (e, exit_block);
6078 else
6079 ix++;
6082 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6083 EDGE_FALLTHRU);
6084 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6085 if (e2 != e)
6087 exit_block->count -= e2->count ();
6089 update_bb_for_insn (exit_block);
6092 /* Helper function for discover_nonconstant_array_refs.
6093 Look for ARRAY_REF nodes with non-constant indexes and mark them
6094 addressable. */
6096 static tree
6097 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6098 void *data ATTRIBUTE_UNUSED)
6100 tree t = *tp;
6102 if (IS_TYPE_OR_DECL_P (t))
6103 *walk_subtrees = 0;
6104 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6106 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6107 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6108 && (!TREE_OPERAND (t, 2)
6109 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6110 || (TREE_CODE (t) == COMPONENT_REF
6111 && (!TREE_OPERAND (t,2)
6112 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6113 || TREE_CODE (t) == BIT_FIELD_REF
6114 || TREE_CODE (t) == REALPART_EXPR
6115 || TREE_CODE (t) == IMAGPART_EXPR
6116 || TREE_CODE (t) == VIEW_CONVERT_EXPR
6117 || CONVERT_EXPR_P (t))
6118 t = TREE_OPERAND (t, 0);
6120 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6122 t = get_base_address (t);
6123 if (t && DECL_P (t)
6124 && DECL_MODE (t) != BLKmode)
6125 TREE_ADDRESSABLE (t) = 1;
6128 *walk_subtrees = 0;
6131 return NULL_TREE;
6134 /* RTL expansion is not able to compile array references with variable
6135 offsets for arrays stored in single register. Discover such
6136 expressions and mark variables as addressable to avoid this
6137 scenario. */
6139 static void
6140 discover_nonconstant_array_refs (void)
6142 basic_block bb;
6143 gimple_stmt_iterator gsi;
6145 FOR_EACH_BB_FN (bb, cfun)
6146 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6148 gimple *stmt = gsi_stmt (gsi);
6149 if (!is_gimple_debug (stmt))
6150 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6154 /* This function sets crtl->args.internal_arg_pointer to a virtual
6155 register if DRAP is needed. Local register allocator will replace
6156 virtual_incoming_args_rtx with the virtual register. */
6158 static void
6159 expand_stack_alignment (void)
6161 rtx drap_rtx;
6162 unsigned int preferred_stack_boundary;
6164 if (! SUPPORTS_STACK_ALIGNMENT)
6165 return;
6167 if (cfun->calls_alloca
6168 || cfun->has_nonlocal_label
6169 || crtl->has_nonlocal_goto)
6170 crtl->need_drap = true;
6172 /* Call update_stack_boundary here again to update incoming stack
6173 boundary. It may set incoming stack alignment to a different
6174 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6175 use the minimum incoming stack alignment to check if it is OK
6176 to perform sibcall optimization since sibcall optimization will
6177 only align the outgoing stack to incoming stack boundary. */
6178 if (targetm.calls.update_stack_boundary)
6179 targetm.calls.update_stack_boundary ();
6181 /* The incoming stack frame has to be aligned at least at
6182 parm_stack_boundary. */
6183 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6185 /* Update crtl->stack_alignment_estimated and use it later to align
6186 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6187 exceptions since callgraph doesn't collect incoming stack alignment
6188 in this case. */
6189 if (cfun->can_throw_non_call_exceptions
6190 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6191 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6192 else
6193 preferred_stack_boundary = crtl->preferred_stack_boundary;
6194 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6195 crtl->stack_alignment_estimated = preferred_stack_boundary;
6196 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6197 crtl->stack_alignment_needed = preferred_stack_boundary;
6199 gcc_assert (crtl->stack_alignment_needed
6200 <= crtl->stack_alignment_estimated);
6202 crtl->stack_realign_needed
6203 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6204 crtl->stack_realign_tried = crtl->stack_realign_needed;
6206 crtl->stack_realign_processed = true;
6208 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6209 alignment. */
6210 gcc_assert (targetm.calls.get_drap_rtx != NULL);
6211 drap_rtx = targetm.calls.get_drap_rtx ();
6213 /* stack_realign_drap and drap_rtx must match. */
6214 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6216 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6217 if (drap_rtx != NULL)
6219 crtl->args.internal_arg_pointer = drap_rtx;
6221 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6222 needed. */
6223 fixup_tail_calls ();
6228 static void
6229 expand_main_function (void)
6231 #if (defined(INVOKE__main) \
6232 || (!defined(HAS_INIT_SECTION) \
6233 && !defined(INIT_SECTION_ASM_OP) \
6234 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6235 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6236 #endif
6240 /* Expand code to initialize the stack_protect_guard. This is invoked at
6241 the beginning of a function to be protected. */
6243 static void
6244 stack_protect_prologue (void)
6246 tree guard_decl = targetm.stack_protect_guard ();
6247 rtx x, y;
6249 crtl->stack_protect_guard_decl = guard_decl;
6250 x = expand_normal (crtl->stack_protect_guard);
6252 if (targetm.have_stack_protect_combined_set () && guard_decl)
6254 gcc_assert (DECL_P (guard_decl));
6255 y = DECL_RTL (guard_decl);
6257 /* Allow the target to compute address of Y and copy it to X without
6258 leaking Y into a register. This combined address + copy pattern
6259 allows the target to prevent spilling of any intermediate results by
6260 splitting it after register allocator. */
6261 if (rtx_insn *insn = targetm.gen_stack_protect_combined_set (x, y))
6263 emit_insn (insn);
6264 return;
6268 if (guard_decl)
6269 y = expand_normal (guard_decl);
6270 else
6271 y = const0_rtx;
6273 /* Allow the target to copy from Y to X without leaking Y into a
6274 register. */
6275 if (targetm.have_stack_protect_set ())
6276 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6278 emit_insn (insn);
6279 return;
6282 /* Otherwise do a straight move. */
6283 emit_move_insn (x, y);
6286 /* Translate the intermediate representation contained in the CFG
6287 from GIMPLE trees to RTL.
6289 We do conversion per basic block and preserve/update the tree CFG.
6290 This implies we have to do some magic as the CFG can simultaneously
6291 consist of basic blocks containing RTL and GIMPLE trees. This can
6292 confuse the CFG hooks, so be careful to not manipulate CFG during
6293 the expansion. */
6295 namespace {
6297 const pass_data pass_data_expand =
6299 RTL_PASS, /* type */
6300 "expand", /* name */
6301 OPTGROUP_NONE, /* optinfo_flags */
6302 TV_EXPAND, /* tv_id */
6303 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6304 | PROP_gimple_lcx
6305 | PROP_gimple_lvec
6306 | PROP_gimple_lva), /* properties_required */
6307 PROP_rtl, /* properties_provided */
6308 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6309 0, /* todo_flags_start */
6310 0, /* todo_flags_finish */
6313 class pass_expand : public rtl_opt_pass
6315 public:
6316 pass_expand (gcc::context *ctxt)
6317 : rtl_opt_pass (pass_data_expand, ctxt)
6320 /* opt_pass methods: */
6321 virtual unsigned int execute (function *);
6323 }; // class pass_expand
6325 unsigned int
6326 pass_expand::execute (function *fun)
6328 basic_block bb, init_block;
6329 edge_iterator ei;
6330 edge e;
6331 rtx_insn *var_seq, *var_ret_seq;
6332 unsigned i;
6334 timevar_push (TV_OUT_OF_SSA);
6335 rewrite_out_of_ssa (&SA);
6336 timevar_pop (TV_OUT_OF_SSA);
6337 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6339 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6341 gimple_stmt_iterator gsi;
6342 FOR_EACH_BB_FN (bb, cfun)
6343 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6344 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6345 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6348 /* Make sure all values used by the optimization passes have sane
6349 defaults. */
6350 reg_renumber = 0;
6352 /* Some backends want to know that we are expanding to RTL. */
6353 currently_expanding_to_rtl = 1;
6354 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6355 free_dominance_info (CDI_DOMINATORS);
6357 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6359 insn_locations_init ();
6360 if (!DECL_IS_BUILTIN (current_function_decl))
6362 /* Eventually, all FEs should explicitly set function_start_locus. */
6363 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6364 set_curr_insn_location
6365 (DECL_SOURCE_LOCATION (current_function_decl));
6366 else
6367 set_curr_insn_location (fun->function_start_locus);
6369 else
6370 set_curr_insn_location (UNKNOWN_LOCATION);
6371 prologue_location = curr_insn_location ();
6373 #ifdef INSN_SCHEDULING
6374 init_sched_attrs ();
6375 #endif
6377 /* Make sure first insn is a note even if we don't want linenums.
6378 This makes sure the first insn will never be deleted.
6379 Also, final expects a note to appear there. */
6380 emit_note (NOTE_INSN_DELETED);
6382 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6383 discover_nonconstant_array_refs ();
6385 targetm.expand_to_rtl_hook ();
6386 crtl->init_stack_alignment ();
6387 fun->cfg->max_jumptable_ents = 0;
6389 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6390 of the function section at exapnsion time to predict distance of calls. */
6391 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6393 /* Expand the variables recorded during gimple lowering. */
6394 timevar_push (TV_VAR_EXPAND);
6395 start_sequence ();
6397 var_ret_seq = expand_used_vars ();
6399 var_seq = get_insns ();
6400 end_sequence ();
6401 timevar_pop (TV_VAR_EXPAND);
6403 /* Honor stack protection warnings. */
6404 if (warn_stack_protect)
6406 if (fun->calls_alloca)
6407 warning (OPT_Wstack_protector,
6408 "stack protector not protecting local variables: "
6409 "variable length buffer");
6410 if (has_short_buffer && !crtl->stack_protect_guard)
6411 warning (OPT_Wstack_protector,
6412 "stack protector not protecting function: "
6413 "all local arrays are less than %d bytes long",
6414 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6417 /* Set up parameters and prepare for return, for the function. */
6418 expand_function_start (current_function_decl);
6420 /* If we emitted any instructions for setting up the variables,
6421 emit them before the FUNCTION_START note. */
6422 if (var_seq)
6424 emit_insn_before (var_seq, parm_birth_insn);
6426 /* In expand_function_end we'll insert the alloca save/restore
6427 before parm_birth_insn. We've just insertted an alloca call.
6428 Adjust the pointer to match. */
6429 parm_birth_insn = var_seq;
6432 /* Now propagate the RTL assignment of each partition to the
6433 underlying var of each SSA_NAME. */
6434 tree name;
6436 FOR_EACH_SSA_NAME (i, name, cfun)
6438 /* We might have generated new SSA names in
6439 update_alias_info_with_stack_vars. They will have a NULL
6440 defining statements, and won't be part of the partitioning,
6441 so ignore those. */
6442 if (!SSA_NAME_DEF_STMT (name))
6443 continue;
6445 adjust_one_expanded_partition_var (name);
6448 /* Clean up RTL of variables that straddle across multiple
6449 partitions, and check that the rtl of any PARM_DECLs that are not
6450 cleaned up is that of their default defs. */
6451 FOR_EACH_SSA_NAME (i, name, cfun)
6453 int part;
6455 /* We might have generated new SSA names in
6456 update_alias_info_with_stack_vars. They will have a NULL
6457 defining statements, and won't be part of the partitioning,
6458 so ignore those. */
6459 if (!SSA_NAME_DEF_STMT (name))
6460 continue;
6461 part = var_to_partition (SA.map, name);
6462 if (part == NO_PARTITION)
6463 continue;
6465 /* If this decl was marked as living in multiple places, reset
6466 this now to NULL. */
6467 tree var = SSA_NAME_VAR (name);
6468 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6469 SET_DECL_RTL (var, NULL);
6470 /* Check that the pseudos chosen by assign_parms are those of
6471 the corresponding default defs. */
6472 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6473 && (TREE_CODE (var) == PARM_DECL
6474 || TREE_CODE (var) == RESULT_DECL))
6476 rtx in = DECL_RTL_IF_SET (var);
6477 gcc_assert (in);
6478 rtx out = SA.partition_to_pseudo[part];
6479 gcc_assert (in == out);
6481 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6482 those expected by debug backends for each parm and for
6483 the result. This is particularly important for stabs,
6484 whose register elimination from parm's DECL_RTL may cause
6485 -fcompare-debug differences as SET_DECL_RTL changes reg's
6486 attrs. So, make sure the RTL already has the parm as the
6487 EXPR, so that it won't change. */
6488 SET_DECL_RTL (var, NULL_RTX);
6489 if (MEM_P (in))
6490 set_mem_attributes (in, var, true);
6491 SET_DECL_RTL (var, in);
6495 /* If this function is `main', emit a call to `__main'
6496 to run global initializers, etc. */
6497 if (DECL_NAME (current_function_decl)
6498 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6499 && DECL_FILE_SCOPE_P (current_function_decl))
6500 expand_main_function ();
6502 /* Initialize the stack_protect_guard field. This must happen after the
6503 call to __main (if any) so that the external decl is initialized. */
6504 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6505 stack_protect_prologue ();
6507 expand_phi_nodes (&SA);
6509 /* Release any stale SSA redirection data. */
6510 redirect_edge_var_map_empty ();
6512 /* Register rtl specific functions for cfg. */
6513 rtl_register_cfg_hooks ();
6515 init_block = construct_init_block ();
6517 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6518 remaining edges later. */
6519 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6520 e->flags &= ~EDGE_EXECUTABLE;
6522 /* If the function has too many markers, drop them while expanding. */
6523 if (cfun->debug_marker_count
6524 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6525 cfun->debug_nonbind_markers = false;
6527 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6528 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6529 next_bb)
6530 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6532 if (MAY_HAVE_DEBUG_BIND_INSNS)
6533 expand_debug_locations ();
6535 if (deep_ter_debug_map)
6537 delete deep_ter_debug_map;
6538 deep_ter_debug_map = NULL;
6541 /* Free stuff we no longer need after GIMPLE optimizations. */
6542 free_dominance_info (CDI_DOMINATORS);
6543 free_dominance_info (CDI_POST_DOMINATORS);
6544 delete_tree_cfg_annotations (fun);
6546 timevar_push (TV_OUT_OF_SSA);
6547 finish_out_of_ssa (&SA);
6548 timevar_pop (TV_OUT_OF_SSA);
6550 timevar_push (TV_POST_EXPAND);
6551 /* We are no longer in SSA form. */
6552 fun->gimple_df->in_ssa_p = false;
6553 loops_state_clear (LOOP_CLOSED_SSA);
6555 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6556 conservatively to true until they are all profile aware. */
6557 delete lab_rtx_for_bb;
6558 free_histograms (fun);
6560 construct_exit_block ();
6561 insn_locations_finalize ();
6563 if (var_ret_seq)
6565 rtx_insn *after = return_label;
6566 rtx_insn *next = NEXT_INSN (after);
6567 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6568 after = next;
6569 emit_insn_after (var_ret_seq, after);
6572 /* Zap the tree EH table. */
6573 set_eh_throw_stmt_table (fun, NULL);
6575 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6576 split edges which edge insertions might do. */
6577 rebuild_jump_labels (get_insns ());
6579 /* If we have a single successor to the entry block, put the pending insns
6580 after parm birth, but before NOTE_INSNS_FUNCTION_BEG. */
6581 if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6583 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
6584 if (e->insns.r)
6586 rtx_insn *insns = e->insns.r;
6587 e->insns.r = NULL;
6588 rebuild_jump_labels_chain (insns);
6589 if (NOTE_P (parm_birth_insn)
6590 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6591 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6592 else
6593 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6597 /* Otherwise, as well as for other edges, take the usual way. */
6598 commit_edge_insertions ();
6600 /* We're done expanding trees to RTL. */
6601 currently_expanding_to_rtl = 0;
6603 flush_mark_addressable_queue ();
6605 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6606 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6608 edge e;
6609 edge_iterator ei;
6610 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6612 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6613 e->flags &= ~EDGE_EXECUTABLE;
6615 /* At the moment not all abnormal edges match the RTL
6616 representation. It is safe to remove them here as
6617 find_many_sub_basic_blocks will rediscover them.
6618 In the future we should get this fixed properly. */
6619 if ((e->flags & EDGE_ABNORMAL)
6620 && !(e->flags & EDGE_SIBCALL))
6621 remove_edge (e);
6622 else
6623 ei_next (&ei);
6627 auto_sbitmap blocks (last_basic_block_for_fn (fun));
6628 bitmap_ones (blocks);
6629 find_many_sub_basic_blocks (blocks);
6630 purge_all_dead_edges ();
6632 /* After initial rtl generation, call back to finish generating
6633 exception support code. We need to do this before cleaning up
6634 the CFG as the code does not expect dead landing pads. */
6635 if (fun->eh->region_tree != NULL)
6636 finish_eh_generation ();
6638 /* Call expand_stack_alignment after finishing all
6639 updates to crtl->preferred_stack_boundary. */
6640 expand_stack_alignment ();
6642 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6643 function. */
6644 if (crtl->tail_call_emit)
6645 fixup_tail_calls ();
6647 /* BB subdivision may have created basic blocks that are are only reachable
6648 from unlikely bbs but not marked as such in the profile. */
6649 if (optimize)
6650 propagate_unlikely_bbs_forward ();
6652 /* Remove unreachable blocks, otherwise we cannot compute dominators
6653 which are needed for loop state verification. As a side-effect
6654 this also compacts blocks.
6655 ??? We cannot remove trivially dead insns here as for example
6656 the DRAP reg on i?86 is not magically live at this point.
6657 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6658 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6660 checking_verify_flow_info ();
6662 /* Initialize pseudos allocated for hard registers. */
6663 emit_initial_value_sets ();
6665 /* And finally unshare all RTL. */
6666 unshare_all_rtl ();
6668 /* There's no need to defer outputting this function any more; we
6669 know we want to output it. */
6670 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6672 /* Now that we're done expanding trees to RTL, we shouldn't have any
6673 more CONCATs anywhere. */
6674 generating_concat_p = 0;
6676 if (dump_file)
6678 fprintf (dump_file,
6679 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6680 /* And the pass manager will dump RTL for us. */
6683 /* If we're emitting a nested function, make sure its parent gets
6684 emitted as well. Doing otherwise confuses debug info. */
6686 tree parent;
6687 for (parent = DECL_CONTEXT (current_function_decl);
6688 parent != NULL_TREE;
6689 parent = get_containing_scope (parent))
6690 if (TREE_CODE (parent) == FUNCTION_DECL)
6691 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6694 TREE_ASM_WRITTEN (current_function_decl) = 1;
6696 /* After expanding, the return labels are no longer needed. */
6697 return_label = NULL;
6698 naked_return_label = NULL;
6700 /* After expanding, the tm_restart map is no longer needed. */
6701 if (fun->gimple_df->tm_restart)
6702 fun->gimple_df->tm_restart = NULL;
6704 /* Tag the blocks with a depth number so that change_scope can find
6705 the common parent easily. */
6706 set_block_levels (DECL_INITIAL (fun->decl), 0);
6707 default_rtl_profile ();
6709 /* For -dx discard loops now, otherwise IL verify in clean_state will
6710 ICE. */
6711 if (rtl_dump_and_exit)
6713 cfun->curr_properties &= ~PROP_loops;
6714 loop_optimizer_finalize ();
6717 timevar_pop (TV_POST_EXPAND);
6719 return 0;
6722 } // anon namespace
6724 rtl_opt_pass *
6725 make_pass_expand (gcc::context *ctxt)
6727 return new pass_expand (ctxt);