target-supports.exp (check_effective_target_weak_undefined): Return 0 on hppa*-*...
[official-gcc.git] / gcc / cfgexpand.c
blob518d71c74c8244a25a1bbdcc468ebf2ccc3c9c6d
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber. */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* This variable holds information helping the rewriting of SSA trees
87 into RTL. */
88 struct ssaexpand SA;
90 /* This variable holds the currently expanded gimple statement for purposes
91 of comminucating the profile info to the builtin expanders. */
92 gimple *currently_expanding_gimple_stmt;
94 static rtx expand_debug_expr (tree);
96 static bool defer_stack_allocation (tree, bool);
98 static void record_alignment_for_reg_var (unsigned int);
100 /* Return an expression tree corresponding to the RHS of GIMPLE
101 statement STMT. */
103 tree
104 gimple_assign_rhs_to_tree (gimple *stmt)
106 tree t;
107 enum gimple_rhs_class grhs_class;
109 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
111 if (grhs_class == GIMPLE_TERNARY_RHS)
112 t = build3 (gimple_assign_rhs_code (stmt),
113 TREE_TYPE (gimple_assign_lhs (stmt)),
114 gimple_assign_rhs1 (stmt),
115 gimple_assign_rhs2 (stmt),
116 gimple_assign_rhs3 (stmt));
117 else if (grhs_class == GIMPLE_BINARY_RHS)
118 t = build2 (gimple_assign_rhs_code (stmt),
119 TREE_TYPE (gimple_assign_lhs (stmt)),
120 gimple_assign_rhs1 (stmt),
121 gimple_assign_rhs2 (stmt));
122 else if (grhs_class == GIMPLE_UNARY_RHS)
123 t = build1 (gimple_assign_rhs_code (stmt),
124 TREE_TYPE (gimple_assign_lhs (stmt)),
125 gimple_assign_rhs1 (stmt));
126 else if (grhs_class == GIMPLE_SINGLE_RHS)
128 t = gimple_assign_rhs1 (stmt);
129 /* Avoid modifying this tree in place below. */
130 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
131 && gimple_location (stmt) != EXPR_LOCATION (t))
132 || (gimple_block (stmt)
133 && currently_expanding_to_rtl
134 && EXPR_P (t)))
135 t = copy_node (t);
137 else
138 gcc_unreachable ();
140 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
141 SET_EXPR_LOCATION (t, gimple_location (stmt));
143 return t;
147 #ifndef STACK_ALIGNMENT_NEEDED
148 #define STACK_ALIGNMENT_NEEDED 1
149 #endif
151 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
153 /* Choose either CUR or NEXT as the leader DECL for a partition.
154 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155 out of the same user variable being in multiple partitions (this is
156 less likely for compiler-introduced temps). */
158 static tree
159 leader_merge (tree cur, tree next)
161 if (cur == NULL || cur == next)
162 return next;
164 if (DECL_P (cur) && DECL_IGNORED_P (cur))
165 return cur;
167 if (DECL_P (next) && DECL_IGNORED_P (next))
168 return next;
170 return cur;
173 /* Associate declaration T with storage space X. If T is no
174 SSA name this is exactly SET_DECL_RTL, otherwise make the
175 partition of T associated with X. */
176 static inline void
177 set_rtl (tree t, rtx x)
179 gcc_checking_assert (!x
180 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
181 || (use_register_for_decl (t)
182 ? (REG_P (x)
183 || (GET_CODE (x) == CONCAT
184 && (REG_P (XEXP (x, 0))
185 || SUBREG_P (XEXP (x, 0)))
186 && (REG_P (XEXP (x, 1))
187 || SUBREG_P (XEXP (x, 1))))
188 /* We need to accept PARALLELs for RESUT_DECLs
189 because of vector types with BLKmode returned
190 in multiple registers, but they are supposed
191 to be uncoalesced. */
192 || (GET_CODE (x) == PARALLEL
193 && SSAVAR (t)
194 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
195 && (GET_MODE (x) == BLKmode
196 || !flag_tree_coalesce_vars)))
197 : (MEM_P (x) || x == pc_rtx
198 || (GET_CODE (x) == CONCAT
199 && MEM_P (XEXP (x, 0))
200 && MEM_P (XEXP (x, 1))))));
201 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202 RESULT_DECLs has the expected mode. For memory, we accept
203 unpromoted modes, since that's what we're likely to get. For
204 PARM_DECLs and RESULT_DECLs, we'll have been called by
205 set_parm_rtl, which will give us the default def, so we don't
206 have to compute it ourselves. For RESULT_DECLs, we accept mode
207 mismatches too, as long as we have BLKmode or are not coalescing
208 across variables, so that we don't reject BLKmode PARALLELs or
209 unpromoted REGs. */
210 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
211 || (SSAVAR (t)
212 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
213 && (promote_ssa_mode (t, NULL) == BLKmode
214 || !flag_tree_coalesce_vars))
215 || !use_register_for_decl (t)
216 || GET_MODE (x) == promote_ssa_mode (t, NULL));
218 if (x)
220 bool skip = false;
221 tree cur = NULL_TREE;
222 rtx xm = x;
224 retry:
225 if (MEM_P (xm))
226 cur = MEM_EXPR (xm);
227 else if (REG_P (xm))
228 cur = REG_EXPR (xm);
229 else if (SUBREG_P (xm))
231 gcc_assert (subreg_lowpart_p (xm));
232 xm = SUBREG_REG (xm);
233 goto retry;
235 else if (GET_CODE (xm) == CONCAT)
237 xm = XEXP (xm, 0);
238 goto retry;
240 else if (GET_CODE (xm) == PARALLEL)
242 xm = XVECEXP (xm, 0, 0);
243 gcc_assert (GET_CODE (xm) == EXPR_LIST);
244 xm = XEXP (xm, 0);
245 goto retry;
247 else if (xm == pc_rtx)
248 skip = true;
249 else
250 gcc_unreachable ();
252 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
254 if (cur != next)
256 if (MEM_P (x))
257 set_mem_attributes (x,
258 next && TREE_CODE (next) == SSA_NAME
259 ? TREE_TYPE (next)
260 : next, true);
261 else
262 set_reg_attrs_for_decl_rtl (next, x);
266 if (TREE_CODE (t) == SSA_NAME)
268 int part = var_to_partition (SA.map, t);
269 if (part != NO_PARTITION)
271 if (SA.partition_to_pseudo[part])
272 gcc_assert (SA.partition_to_pseudo[part] == x);
273 else if (x != pc_rtx)
274 SA.partition_to_pseudo[part] = x;
276 /* For the benefit of debug information at -O0 (where
277 vartracking doesn't run) record the place also in the base
278 DECL. For PARMs and RESULTs, do so only when setting the
279 default def. */
280 if (x && x != pc_rtx && SSA_NAME_VAR (t)
281 && (VAR_P (SSA_NAME_VAR (t))
282 || SSA_NAME_IS_DEFAULT_DEF (t)))
284 tree var = SSA_NAME_VAR (t);
285 /* If we don't yet have something recorded, just record it now. */
286 if (!DECL_RTL_SET_P (var))
287 SET_DECL_RTL (var, x);
288 /* If we have it set already to "multiple places" don't
289 change this. */
290 else if (DECL_RTL (var) == pc_rtx)
292 /* If we have something recorded and it's not the same place
293 as we want to record now, we have multiple partitions for the
294 same base variable, with different places. We can't just
295 randomly chose one, hence we have to say that we don't know.
296 This only happens with optimization, and there var-tracking
297 will figure out the right thing. */
298 else if (DECL_RTL (var) != x)
299 SET_DECL_RTL (var, pc_rtx);
302 else
303 SET_DECL_RTL (t, x);
306 /* This structure holds data relevant to one variable that will be
307 placed in a stack slot. */
308 struct stack_var
310 /* The Variable. */
311 tree decl;
313 /* Initially, the size of the variable. Later, the size of the partition,
314 if this variable becomes it's partition's representative. */
315 poly_uint64 size;
317 /* The *byte* alignment required for this variable. Or as, with the
318 size, the alignment for this partition. */
319 unsigned int alignb;
321 /* The partition representative. */
322 size_t representative;
324 /* The next stack variable in the partition, or EOC. */
325 size_t next;
327 /* The numbers of conflicting stack variables. */
328 bitmap conflicts;
331 #define EOC ((size_t)-1)
333 /* We have an array of such objects while deciding allocation. */
334 static struct stack_var *stack_vars;
335 static size_t stack_vars_alloc;
336 static size_t stack_vars_num;
337 static hash_map<tree, size_t> *decl_to_stack_part;
339 /* Conflict bitmaps go on this obstack. This allows us to destroy
340 all of them in one big sweep. */
341 static bitmap_obstack stack_var_bitmap_obstack;
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344 is non-decreasing. */
345 static size_t *stack_vars_sorted;
347 /* The phase of the stack frame. This is the known misalignment of
348 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
349 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
350 static int frame_phase;
352 /* Used during expand_used_vars to remember if we saw any decls for
353 which we'd like to enable stack smashing protection. */
354 static bool has_protected_decls;
356 /* Used during expand_used_vars. Remember if we say a character buffer
357 smaller than our cutoff threshold. Used for -Wstack-protector. */
358 static bool has_short_buffer;
360 /* Compute the byte alignment to use for DECL. Ignore alignment
361 we can't do with expected alignment of the stack boundary. */
363 static unsigned int
364 align_local_variable (tree decl)
366 unsigned int align;
368 if (TREE_CODE (decl) == SSA_NAME)
369 align = TYPE_ALIGN (TREE_TYPE (decl));
370 else
372 align = LOCAL_DECL_ALIGNMENT (decl);
373 SET_DECL_ALIGN (decl, align);
375 return align / BITS_PER_UNIT;
378 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
379 down otherwise. Return truncated BASE value. */
381 static inline unsigned HOST_WIDE_INT
382 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
384 return align_up ? (base + align - 1) & -align : base & -align;
387 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
388 Return the frame offset. */
390 static poly_int64
391 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
393 poly_int64 offset, new_frame_offset;
395 if (FRAME_GROWS_DOWNWARD)
397 new_frame_offset
398 = aligned_lower_bound (frame_offset - frame_phase - size,
399 align) + frame_phase;
400 offset = new_frame_offset;
402 else
404 new_frame_offset
405 = aligned_upper_bound (frame_offset - frame_phase,
406 align) + frame_phase;
407 offset = new_frame_offset;
408 new_frame_offset += size;
410 frame_offset = new_frame_offset;
412 if (frame_offset_overflow (frame_offset, cfun->decl))
413 frame_offset = offset = 0;
415 return offset;
418 /* Accumulate DECL into STACK_VARS. */
420 static void
421 add_stack_var (tree decl)
423 struct stack_var *v;
425 if (stack_vars_num >= stack_vars_alloc)
427 if (stack_vars_alloc)
428 stack_vars_alloc = stack_vars_alloc * 3 / 2;
429 else
430 stack_vars_alloc = 32;
431 stack_vars
432 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
434 if (!decl_to_stack_part)
435 decl_to_stack_part = new hash_map<tree, size_t>;
437 v = &stack_vars[stack_vars_num];
438 decl_to_stack_part->put (decl, stack_vars_num);
440 v->decl = decl;
441 tree size = TREE_CODE (decl) == SSA_NAME
442 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
443 : DECL_SIZE_UNIT (decl);
444 v->size = tree_to_poly_uint64 (size);
445 /* Ensure that all variables have size, so that &a != &b for any two
446 variables that are simultaneously live. */
447 if (known_eq (v->size, 0U))
448 v->size = 1;
449 v->alignb = align_local_variable (decl);
450 /* An alignment of zero can mightily confuse us later. */
451 gcc_assert (v->alignb != 0);
453 /* All variables are initially in their own partition. */
454 v->representative = stack_vars_num;
455 v->next = EOC;
457 /* All variables initially conflict with no other. */
458 v->conflicts = NULL;
460 /* Ensure that this decl doesn't get put onto the list twice. */
461 set_rtl (decl, pc_rtx);
463 stack_vars_num++;
466 /* Make the decls associated with luid's X and Y conflict. */
468 static void
469 add_stack_var_conflict (size_t x, size_t y)
471 struct stack_var *a = &stack_vars[x];
472 struct stack_var *b = &stack_vars[y];
473 if (x == y)
474 return;
475 if (!a->conflicts)
476 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
477 if (!b->conflicts)
478 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
479 bitmap_set_bit (a->conflicts, y);
480 bitmap_set_bit (b->conflicts, x);
483 /* Check whether the decls associated with luid's X and Y conflict. */
485 static bool
486 stack_var_conflict_p (size_t x, size_t y)
488 struct stack_var *a = &stack_vars[x];
489 struct stack_var *b = &stack_vars[y];
490 if (x == y)
491 return false;
492 /* Partitions containing an SSA name result from gimple registers
493 with things like unsupported modes. They are top-level and
494 hence conflict with everything else. */
495 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
496 return true;
498 if (!a->conflicts || !b->conflicts)
499 return false;
500 return bitmap_bit_p (a->conflicts, y);
503 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
504 enter its partition number into bitmap DATA. */
506 static bool
507 visit_op (gimple *, tree op, tree, void *data)
509 bitmap active = (bitmap)data;
510 op = get_base_address (op);
511 if (op
512 && DECL_P (op)
513 && DECL_RTL_IF_SET (op) == pc_rtx)
515 size_t *v = decl_to_stack_part->get (op);
516 if (v)
517 bitmap_set_bit (active, *v);
519 return false;
522 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
523 record conflicts between it and all currently active other partitions
524 from bitmap DATA. */
526 static bool
527 visit_conflict (gimple *, tree op, tree, void *data)
529 bitmap active = (bitmap)data;
530 op = get_base_address (op);
531 if (op
532 && DECL_P (op)
533 && DECL_RTL_IF_SET (op) == pc_rtx)
535 size_t *v = decl_to_stack_part->get (op);
536 if (v && bitmap_set_bit (active, *v))
538 size_t num = *v;
539 bitmap_iterator bi;
540 unsigned i;
541 gcc_assert (num < stack_vars_num);
542 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
543 add_stack_var_conflict (num, i);
546 return false;
549 /* Helper routine for add_scope_conflicts, calculating the active partitions
550 at the end of BB, leaving the result in WORK. We're called to generate
551 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
552 liveness. */
554 static void
555 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
557 edge e;
558 edge_iterator ei;
559 gimple_stmt_iterator gsi;
560 walk_stmt_load_store_addr_fn visit;
562 bitmap_clear (work);
563 FOR_EACH_EDGE (e, ei, bb->preds)
564 bitmap_ior_into (work, (bitmap)e->src->aux);
566 visit = visit_op;
568 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
570 gimple *stmt = gsi_stmt (gsi);
571 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
573 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
575 gimple *stmt = gsi_stmt (gsi);
577 if (gimple_clobber_p (stmt))
579 tree lhs = gimple_assign_lhs (stmt);
580 size_t *v;
581 /* Nested function lowering might introduce LHSs
582 that are COMPONENT_REFs. */
583 if (!VAR_P (lhs))
584 continue;
585 if (DECL_RTL_IF_SET (lhs) == pc_rtx
586 && (v = decl_to_stack_part->get (lhs)))
587 bitmap_clear_bit (work, *v);
589 else if (!is_gimple_debug (stmt))
591 if (for_conflict
592 && visit == visit_op)
594 /* If this is the first real instruction in this BB we need
595 to add conflicts for everything live at this point now.
596 Unlike classical liveness for named objects we can't
597 rely on seeing a def/use of the names we're interested in.
598 There might merely be indirect loads/stores. We'd not add any
599 conflicts for such partitions. */
600 bitmap_iterator bi;
601 unsigned i;
602 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
604 struct stack_var *a = &stack_vars[i];
605 if (!a->conflicts)
606 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
607 bitmap_ior_into (a->conflicts, work);
609 visit = visit_conflict;
611 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
616 /* Generate stack partition conflicts between all partitions that are
617 simultaneously live. */
619 static void
620 add_scope_conflicts (void)
622 basic_block bb;
623 bool changed;
624 bitmap work = BITMAP_ALLOC (NULL);
625 int *rpo;
626 int n_bbs;
628 /* We approximate the live range of a stack variable by taking the first
629 mention of its name as starting point(s), and by the end-of-scope
630 death clobber added by gimplify as ending point(s) of the range.
631 This overapproximates in the case we for instance moved an address-taken
632 operation upward, without also moving a dereference to it upwards.
633 But it's conservatively correct as a variable never can hold values
634 before its name is mentioned at least once.
636 We then do a mostly classical bitmap liveness algorithm. */
638 FOR_ALL_BB_FN (bb, cfun)
639 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
641 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
642 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
644 changed = true;
645 while (changed)
647 int i;
648 changed = false;
649 for (i = 0; i < n_bbs; i++)
651 bitmap active;
652 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
653 active = (bitmap)bb->aux;
654 add_scope_conflicts_1 (bb, work, false);
655 if (bitmap_ior_into (active, work))
656 changed = true;
660 FOR_EACH_BB_FN (bb, cfun)
661 add_scope_conflicts_1 (bb, work, true);
663 free (rpo);
664 BITMAP_FREE (work);
665 FOR_ALL_BB_FN (bb, cfun)
666 BITMAP_FREE (bb->aux);
669 /* A subroutine of partition_stack_vars. A comparison function for qsort,
670 sorting an array of indices by the properties of the object. */
672 static int
673 stack_var_cmp (const void *a, const void *b)
675 size_t ia = *(const size_t *)a;
676 size_t ib = *(const size_t *)b;
677 unsigned int aligna = stack_vars[ia].alignb;
678 unsigned int alignb = stack_vars[ib].alignb;
679 poly_int64 sizea = stack_vars[ia].size;
680 poly_int64 sizeb = stack_vars[ib].size;
681 tree decla = stack_vars[ia].decl;
682 tree declb = stack_vars[ib].decl;
683 bool largea, largeb;
684 unsigned int uida, uidb;
686 /* Primary compare on "large" alignment. Large comes first. */
687 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
689 if (largea != largeb)
690 return (int)largeb - (int)largea;
692 /* Secondary compare on size, decreasing */
693 int diff = compare_sizes_for_sort (sizeb, sizea);
694 if (diff != 0)
695 return diff;
697 /* Tertiary compare on true alignment, decreasing. */
698 if (aligna < alignb)
699 return -1;
700 if (aligna > alignb)
701 return 1;
703 /* Final compare on ID for sort stability, increasing.
704 Two SSA names are compared by their version, SSA names come before
705 non-SSA names, and two normal decls are compared by their DECL_UID. */
706 if (TREE_CODE (decla) == SSA_NAME)
708 if (TREE_CODE (declb) == SSA_NAME)
709 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710 else
711 return -1;
713 else if (TREE_CODE (declb) == SSA_NAME)
714 return 1;
715 else
716 uida = DECL_UID (decla), uidb = DECL_UID (declb);
717 if (uida < uidb)
718 return 1;
719 if (uida > uidb)
720 return -1;
721 return 0;
724 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
725 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
727 /* If the points-to solution *PI points to variables that are in a partition
728 together with other variables add all partition members to the pointed-to
729 variables bitmap. */
731 static void
732 add_partitioned_vars_to_ptset (struct pt_solution *pt,
733 part_hashmap *decls_to_partitions,
734 hash_set<bitmap> *visited, bitmap temp)
736 bitmap_iterator bi;
737 unsigned i;
738 bitmap *part;
740 if (pt->anything
741 || pt->vars == NULL
742 /* The pointed-to vars bitmap is shared, it is enough to
743 visit it once. */
744 || visited->add (pt->vars))
745 return;
747 bitmap_clear (temp);
749 /* By using a temporary bitmap to store all members of the partitions
750 we have to add we make sure to visit each of the partitions only
751 once. */
752 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753 if ((!temp
754 || !bitmap_bit_p (temp, i))
755 && (part = decls_to_partitions->get (i)))
756 bitmap_ior_into (temp, *part);
757 if (!bitmap_empty_p (temp))
758 bitmap_ior_into (pt->vars, temp);
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762 The bitmaps representing stack partitions will be saved until expand,
763 where partitioned decls used as bases in memory expressions will be
764 rewritten. */
766 static void
767 update_alias_info_with_stack_vars (void)
769 part_hashmap *decls_to_partitions = NULL;
770 size_t i, j;
771 tree var = NULL_TREE;
773 for (i = 0; i < stack_vars_num; i++)
775 bitmap part = NULL;
776 tree name;
777 struct ptr_info_def *pi;
779 /* Not interested in partitions with single variable. */
780 if (stack_vars[i].representative != i
781 || stack_vars[i].next == EOC)
782 continue;
784 if (!decls_to_partitions)
786 decls_to_partitions = new part_hashmap;
787 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
790 /* Create an SSA_NAME that points to the partition for use
791 as base during alias-oracle queries on RTL for bases that
792 have been partitioned. */
793 if (var == NULL_TREE)
794 var = create_tmp_var (ptr_type_node);
795 name = make_ssa_name (var);
797 /* Create bitmaps representing partitions. They will be used for
798 points-to sets later, so use GGC alloc. */
799 part = BITMAP_GGC_ALLOC ();
800 for (j = i; j != EOC; j = stack_vars[j].next)
802 tree decl = stack_vars[j].decl;
803 unsigned int uid = DECL_PT_UID (decl);
804 bitmap_set_bit (part, uid);
805 decls_to_partitions->put (uid, part);
806 cfun->gimple_df->decls_to_pointers->put (decl, name);
807 if (TREE_ADDRESSABLE (decl))
808 TREE_ADDRESSABLE (name) = 1;
811 /* Make the SSA name point to all partition members. */
812 pi = get_ptr_info (name);
813 pt_solution_set (&pi->pt, part, false);
816 /* Make all points-to sets that contain one member of a partition
817 contain all members of the partition. */
818 if (decls_to_partitions)
820 unsigned i;
821 tree name;
822 hash_set<bitmap> visited;
823 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
825 FOR_EACH_SSA_NAME (i, name, cfun)
827 struct ptr_info_def *pi;
829 if (POINTER_TYPE_P (TREE_TYPE (name))
830 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
832 &visited, temp);
835 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
836 decls_to_partitions, &visited, temp);
838 delete decls_to_partitions;
839 BITMAP_FREE (temp);
843 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
844 partitioning algorithm. Partitions A and B are known to be non-conflicting.
845 Merge them into a single partition A. */
847 static void
848 union_stack_vars (size_t a, size_t b)
850 struct stack_var *vb = &stack_vars[b];
851 bitmap_iterator bi;
852 unsigned u;
854 gcc_assert (stack_vars[b].next == EOC);
855 /* Add B to A's partition. */
856 stack_vars[b].next = stack_vars[a].next;
857 stack_vars[b].representative = a;
858 stack_vars[a].next = b;
860 /* Update the required alignment of partition A to account for B. */
861 if (stack_vars[a].alignb < stack_vars[b].alignb)
862 stack_vars[a].alignb = stack_vars[b].alignb;
864 /* Update the interference graph and merge the conflicts. */
865 if (vb->conflicts)
867 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 add_stack_var_conflict (a, stack_vars[u].representative);
869 BITMAP_FREE (vb->conflicts);
873 /* A subroutine of expand_used_vars. Binpack the variables into
874 partitions constrained by the interference graph. The overall
875 algorithm used is as follows:
877 Sort the objects by size in descending order.
878 For each object A {
879 S = size(A)
880 O = 0
881 loop {
882 Look for the largest non-conflicting object B with size <= S.
883 UNION (A, B)
888 static void
889 partition_stack_vars (void)
891 size_t si, sj, n = stack_vars_num;
893 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894 for (si = 0; si < n; ++si)
895 stack_vars_sorted[si] = si;
897 if (n == 1)
898 return;
900 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
902 for (si = 0; si < n; ++si)
904 size_t i = stack_vars_sorted[si];
905 unsigned int ialign = stack_vars[i].alignb;
906 poly_int64 isize = stack_vars[i].size;
908 /* Ignore objects that aren't partition representatives. If we
909 see a var that is not a partition representative, it must
910 have been merged earlier. */
911 if (stack_vars[i].representative != i)
912 continue;
914 for (sj = si + 1; sj < n; ++sj)
916 size_t j = stack_vars_sorted[sj];
917 unsigned int jalign = stack_vars[j].alignb;
918 poly_int64 jsize = stack_vars[j].size;
920 /* Ignore objects that aren't partition representatives. */
921 if (stack_vars[j].representative != j)
922 continue;
924 /* Do not mix objects of "small" (supported) alignment
925 and "large" (unsupported) alignment. */
926 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
928 break;
930 /* For Address Sanitizer do not mix objects with different
931 sizes, as the shorter vars wouldn't be adequately protected.
932 Don't do that for "large" (unsupported) alignment objects,
933 those aren't protected anyway. */
934 if (asan_sanitize_stack_p ()
935 && maybe_ne (isize, jsize)
936 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 break;
939 /* Ignore conflicting objects. */
940 if (stack_var_conflict_p (i, j))
941 continue;
943 /* UNION the objects, placing J at OFFSET. */
944 union_stack_vars (i, j);
948 update_alias_info_with_stack_vars ();
951 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
953 static void
954 dump_stack_var_partition (void)
956 size_t si, i, j, n = stack_vars_num;
958 for (si = 0; si < n; ++si)
960 i = stack_vars_sorted[si];
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars[i].representative != i)
964 continue;
966 fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
967 print_dec (stack_vars[i].size, dump_file);
968 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
970 for (j = i; j != EOC; j = stack_vars[j].next)
972 fputc ('\t', dump_file);
973 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
975 fputc ('\n', dump_file);
979 /* Assign rtl to DECL at BASE + OFFSET. */
981 static void
982 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
983 poly_int64 offset)
985 unsigned align;
986 rtx x;
988 /* If this fails, we've overflowed the stack frame. Error nicely? */
989 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
991 x = plus_constant (Pmode, base, offset);
992 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 ? TYPE_MODE (TREE_TYPE (decl))
994 : DECL_MODE (SSAVAR (decl)), x);
996 if (TREE_CODE (decl) != SSA_NAME)
998 /* Set alignment we actually gave this decl if it isn't an SSA name.
999 If it is we generate stack slots only accidentally so it isn't as
1000 important, we'll simply use the alignment that is already set. */
1001 if (base == virtual_stack_vars_rtx)
1002 offset -= frame_phase;
1003 align = known_alignment (offset);
1004 align *= BITS_PER_UNIT;
1005 if (align == 0 || align > base_align)
1006 align = base_align;
1008 /* One would think that we could assert that we're not decreasing
1009 alignment here, but (at least) the i386 port does exactly this
1010 via the MINIMUM_ALIGNMENT hook. */
1012 SET_DECL_ALIGN (decl, align);
1013 DECL_USER_ALIGN (decl) = 0;
1016 set_rtl (decl, x);
1019 struct stack_vars_data
1021 /* Vector of offset pairs, always end of some padding followed
1022 by start of the padding that needs Address Sanitizer protection.
1023 The vector is in reversed, highest offset pairs come first. */
1024 auto_vec<HOST_WIDE_INT> asan_vec;
1026 /* Vector of partition representative decls in between the paddings. */
1027 auto_vec<tree> asan_decl_vec;
1029 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1030 rtx asan_base;
1032 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1033 unsigned int asan_alignb;
1036 /* A subroutine of expand_used_vars. Give each partition representative
1037 a unique location within the stack frame. Update each partition member
1038 with that location. */
1040 static void
1041 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1043 size_t si, i, j, n = stack_vars_num;
1044 poly_uint64 large_size = 0, large_alloc = 0;
1045 rtx large_base = NULL;
1046 unsigned large_align = 0;
1047 bool large_allocation_done = false;
1048 tree decl;
1050 /* Determine if there are any variables requiring "large" alignment.
1051 Since these are dynamically allocated, we only process these if
1052 no predicate involved. */
1053 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1056 /* Find the total size of these variables. */
1057 for (si = 0; si < n; ++si)
1059 unsigned alignb;
1061 i = stack_vars_sorted[si];
1062 alignb = stack_vars[i].alignb;
1064 /* All "large" alignment decls come before all "small" alignment
1065 decls, but "large" alignment decls are not sorted based on
1066 their alignment. Increase large_align to track the largest
1067 required alignment. */
1068 if ((alignb * BITS_PER_UNIT) > large_align)
1069 large_align = alignb * BITS_PER_UNIT;
1071 /* Stop when we get to the first decl with "small" alignment. */
1072 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 break;
1075 /* Skip variables that aren't partition representatives. */
1076 if (stack_vars[i].representative != i)
1077 continue;
1079 /* Skip variables that have already had rtl assigned. See also
1080 add_stack_var where we perpetrate this pc_rtx hack. */
1081 decl = stack_vars[i].decl;
1082 if (TREE_CODE (decl) == SSA_NAME
1083 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 : DECL_RTL (decl) != pc_rtx)
1085 continue;
1087 large_size = aligned_upper_bound (large_size, alignb);
1088 large_size += stack_vars[i].size;
1092 for (si = 0; si < n; ++si)
1094 rtx base;
1095 unsigned base_align, alignb;
1096 poly_int64 offset;
1098 i = stack_vars_sorted[si];
1100 /* Skip variables that aren't partition representatives, for now. */
1101 if (stack_vars[i].representative != i)
1102 continue;
1104 /* Skip variables that have already had rtl assigned. See also
1105 add_stack_var where we perpetrate this pc_rtx hack. */
1106 decl = stack_vars[i].decl;
1107 if (TREE_CODE (decl) == SSA_NAME
1108 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1109 : DECL_RTL (decl) != pc_rtx)
1110 continue;
1112 /* Check the predicate to see whether this variable should be
1113 allocated in this pass. */
1114 if (pred && !pred (i))
1115 continue;
1117 alignb = stack_vars[i].alignb;
1118 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1120 base = virtual_stack_vars_rtx;
1121 /* ASAN description strings don't yet have a syntax for expressing
1122 polynomial offsets. */
1123 HOST_WIDE_INT prev_offset;
1124 if (asan_sanitize_stack_p ()
1125 && pred
1126 && frame_offset.is_constant (&prev_offset)
1127 && stack_vars[i].size.is_constant ())
1129 if (data->asan_vec.is_empty ())
1131 alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE);
1132 prev_offset = frame_offset.to_constant ();
1134 prev_offset = align_base (prev_offset,
1135 ASAN_MIN_RED_ZONE_SIZE,
1136 !FRAME_GROWS_DOWNWARD);
1137 tree repr_decl = NULL_TREE;
1138 unsigned HOST_WIDE_INT size
1139 = asan_var_and_redzone_size (stack_vars[i].size.to_constant ());
1140 if (data->asan_vec.is_empty ())
1141 size = MAX (size, ASAN_RED_ZONE_SIZE);
1143 unsigned HOST_WIDE_INT alignment = MAX (alignb,
1144 ASAN_MIN_RED_ZONE_SIZE);
1145 offset = alloc_stack_frame_space (size, alignment);
1147 data->asan_vec.safe_push (prev_offset);
1148 /* Allocating a constant amount of space from a constant
1149 starting offset must give a constant result. */
1150 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1151 .to_constant ());
1152 /* Find best representative of the partition.
1153 Prefer those with DECL_NAME, even better
1154 satisfying asan_protect_stack_decl predicate. */
1155 for (j = i; j != EOC; j = stack_vars[j].next)
1156 if (asan_protect_stack_decl (stack_vars[j].decl)
1157 && DECL_NAME (stack_vars[j].decl))
1159 repr_decl = stack_vars[j].decl;
1160 break;
1162 else if (repr_decl == NULL_TREE
1163 && DECL_P (stack_vars[j].decl)
1164 && DECL_NAME (stack_vars[j].decl))
1165 repr_decl = stack_vars[j].decl;
1166 if (repr_decl == NULL_TREE)
1167 repr_decl = stack_vars[i].decl;
1168 data->asan_decl_vec.safe_push (repr_decl);
1170 /* Make sure a representative is unpoison if another
1171 variable in the partition is handled by
1172 use-after-scope sanitization. */
1173 if (asan_handled_variables != NULL
1174 && !asan_handled_variables->contains (repr_decl))
1176 for (j = i; j != EOC; j = stack_vars[j].next)
1177 if (asan_handled_variables->contains (stack_vars[j].decl))
1178 break;
1179 if (j != EOC)
1180 asan_handled_variables->add (repr_decl);
1183 data->asan_alignb = MAX (data->asan_alignb, alignb);
1184 if (data->asan_base == NULL)
1185 data->asan_base = gen_reg_rtx (Pmode);
1186 base = data->asan_base;
1188 if (!STRICT_ALIGNMENT)
1189 base_align = crtl->max_used_stack_slot_alignment;
1190 else
1191 base_align = MAX (crtl->max_used_stack_slot_alignment,
1192 GET_MODE_ALIGNMENT (SImode)
1193 << ASAN_SHADOW_SHIFT);
1195 else
1197 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1198 base_align = crtl->max_used_stack_slot_alignment;
1201 else
1203 /* Large alignment is only processed in the last pass. */
1204 if (pred)
1205 continue;
1207 /* If there were any variables requiring "large" alignment, allocate
1208 space. */
1209 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1211 poly_int64 loffset;
1212 rtx large_allocsize;
1214 large_allocsize = gen_int_mode (large_size, Pmode);
1215 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1216 loffset = alloc_stack_frame_space
1217 (rtx_to_poly_int64 (large_allocsize),
1218 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1219 large_base = get_dynamic_stack_base (loffset, large_align);
1220 large_allocation_done = true;
1222 gcc_assert (large_base != NULL);
1224 large_alloc = aligned_upper_bound (large_alloc, alignb);
1225 offset = large_alloc;
1226 large_alloc += stack_vars[i].size;
1228 base = large_base;
1229 base_align = large_align;
1232 /* Create rtl for each variable based on their location within the
1233 partition. */
1234 for (j = i; j != EOC; j = stack_vars[j].next)
1236 expand_one_stack_var_at (stack_vars[j].decl,
1237 base, base_align,
1238 offset);
1242 gcc_assert (known_eq (large_alloc, large_size));
1245 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1246 static poly_uint64
1247 account_stack_vars (void)
1249 size_t si, j, i, n = stack_vars_num;
1250 poly_uint64 size = 0;
1252 for (si = 0; si < n; ++si)
1254 i = stack_vars_sorted[si];
1256 /* Skip variables that aren't partition representatives, for now. */
1257 if (stack_vars[i].representative != i)
1258 continue;
1260 size += stack_vars[i].size;
1261 for (j = i; j != EOC; j = stack_vars[j].next)
1262 set_rtl (stack_vars[j].decl, NULL);
1264 return size;
1267 /* Record the RTL assignment X for the default def of PARM. */
1269 extern void
1270 set_parm_rtl (tree parm, rtx x)
1272 gcc_assert (TREE_CODE (parm) == PARM_DECL
1273 || TREE_CODE (parm) == RESULT_DECL);
1275 if (x && !MEM_P (x))
1277 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1278 TYPE_MODE (TREE_TYPE (parm)),
1279 TYPE_ALIGN (TREE_TYPE (parm)));
1281 /* If the variable alignment is very large we'll dynamicaly
1282 allocate it, which means that in-frame portion is just a
1283 pointer. ??? We've got a pseudo for sure here, do we
1284 actually dynamically allocate its spilling area if needed?
1285 ??? Isn't it a problem when Pmode alignment also exceeds
1286 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1287 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1288 align = GET_MODE_ALIGNMENT (Pmode);
1290 record_alignment_for_reg_var (align);
1293 tree ssa = ssa_default_def (cfun, parm);
1294 if (!ssa)
1295 return set_rtl (parm, x);
1297 int part = var_to_partition (SA.map, ssa);
1298 gcc_assert (part != NO_PARTITION);
1300 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1301 gcc_assert (changed);
1303 set_rtl (ssa, x);
1304 gcc_assert (DECL_RTL (parm) == x);
1307 /* A subroutine of expand_one_var. Called to immediately assign rtl
1308 to a variable to be allocated in the stack frame. */
1310 static void
1311 expand_one_stack_var_1 (tree var)
1313 poly_uint64 size;
1314 poly_int64 offset;
1315 unsigned byte_align;
1317 if (TREE_CODE (var) == SSA_NAME)
1319 tree type = TREE_TYPE (var);
1320 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1321 byte_align = TYPE_ALIGN_UNIT (type);
1323 else
1325 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1326 byte_align = align_local_variable (var);
1329 /* We handle highly aligned variables in expand_stack_vars. */
1330 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1332 offset = alloc_stack_frame_space (size, byte_align);
1334 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1335 crtl->max_used_stack_slot_alignment, offset);
1338 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1339 already assigned some MEM. */
1341 static void
1342 expand_one_stack_var (tree var)
1344 if (TREE_CODE (var) == SSA_NAME)
1346 int part = var_to_partition (SA.map, var);
1347 if (part != NO_PARTITION)
1349 rtx x = SA.partition_to_pseudo[part];
1350 gcc_assert (x);
1351 gcc_assert (MEM_P (x));
1352 return;
1356 return expand_one_stack_var_1 (var);
1359 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1360 that will reside in a hard register. */
1362 static void
1363 expand_one_hard_reg_var (tree var)
1365 rest_of_decl_compilation (var, 0, 0);
1368 /* Record the alignment requirements of some variable assigned to a
1369 pseudo. */
1371 static void
1372 record_alignment_for_reg_var (unsigned int align)
1374 if (SUPPORTS_STACK_ALIGNMENT
1375 && crtl->stack_alignment_estimated < align)
1377 /* stack_alignment_estimated shouldn't change after stack
1378 realign decision made */
1379 gcc_assert (!crtl->stack_realign_processed);
1380 crtl->stack_alignment_estimated = align;
1383 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1384 So here we only make sure stack_alignment_needed >= align. */
1385 if (crtl->stack_alignment_needed < align)
1386 crtl->stack_alignment_needed = align;
1387 if (crtl->max_used_stack_slot_alignment < align)
1388 crtl->max_used_stack_slot_alignment = align;
1391 /* Create RTL for an SSA partition. */
1393 static void
1394 expand_one_ssa_partition (tree var)
1396 int part = var_to_partition (SA.map, var);
1397 gcc_assert (part != NO_PARTITION);
1399 if (SA.partition_to_pseudo[part])
1400 return;
1402 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1403 TYPE_MODE (TREE_TYPE (var)),
1404 TYPE_ALIGN (TREE_TYPE (var)));
1406 /* If the variable alignment is very large we'll dynamicaly allocate
1407 it, which means that in-frame portion is just a pointer. */
1408 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1409 align = GET_MODE_ALIGNMENT (Pmode);
1411 record_alignment_for_reg_var (align);
1413 if (!use_register_for_decl (var))
1415 if (defer_stack_allocation (var, true))
1416 add_stack_var (var);
1417 else
1418 expand_one_stack_var_1 (var);
1419 return;
1422 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1423 rtx x = gen_reg_rtx (reg_mode);
1425 set_rtl (var, x);
1427 /* For a promoted variable, X will not be used directly but wrapped in a
1428 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1429 will assume that its upper bits can be inferred from its lower bits.
1430 Therefore, if X isn't initialized on every path from the entry, then
1431 we must do it manually in order to fulfill the above assumption. */
1432 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1433 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1434 emit_move_insn (x, CONST0_RTX (reg_mode));
1437 /* Record the association between the RTL generated for partition PART
1438 and the underlying variable of the SSA_NAME VAR. */
1440 static void
1441 adjust_one_expanded_partition_var (tree var)
1443 if (!var)
1444 return;
1446 tree decl = SSA_NAME_VAR (var);
1448 int part = var_to_partition (SA.map, var);
1449 if (part == NO_PARTITION)
1450 return;
1452 rtx x = SA.partition_to_pseudo[part];
1454 gcc_assert (x);
1456 set_rtl (var, x);
1458 if (!REG_P (x))
1459 return;
1461 /* Note if the object is a user variable. */
1462 if (decl && !DECL_ARTIFICIAL (decl))
1463 mark_user_reg (x);
1465 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1466 mark_reg_pointer (x, get_pointer_alignment (var));
1469 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1470 that will reside in a pseudo register. */
1472 static void
1473 expand_one_register_var (tree var)
1475 if (TREE_CODE (var) == SSA_NAME)
1477 int part = var_to_partition (SA.map, var);
1478 if (part != NO_PARTITION)
1480 rtx x = SA.partition_to_pseudo[part];
1481 gcc_assert (x);
1482 gcc_assert (REG_P (x));
1483 return;
1485 gcc_unreachable ();
1488 tree decl = var;
1489 tree type = TREE_TYPE (decl);
1490 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1491 rtx x = gen_reg_rtx (reg_mode);
1493 set_rtl (var, x);
1495 /* Note if the object is a user variable. */
1496 if (!DECL_ARTIFICIAL (decl))
1497 mark_user_reg (x);
1499 if (POINTER_TYPE_P (type))
1500 mark_reg_pointer (x, get_pointer_alignment (var));
1503 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1504 has some associated error, e.g. its type is error-mark. We just need
1505 to pick something that won't crash the rest of the compiler. */
1507 static void
1508 expand_one_error_var (tree var)
1510 machine_mode mode = DECL_MODE (var);
1511 rtx x;
1513 if (mode == BLKmode)
1514 x = gen_rtx_MEM (BLKmode, const0_rtx);
1515 else if (mode == VOIDmode)
1516 x = const0_rtx;
1517 else
1518 x = gen_reg_rtx (mode);
1520 SET_DECL_RTL (var, x);
1523 /* A subroutine of expand_one_var. VAR is a variable that will be
1524 allocated to the local stack frame. Return true if we wish to
1525 add VAR to STACK_VARS so that it will be coalesced with other
1526 variables. Return false to allocate VAR immediately.
1528 This function is used to reduce the number of variables considered
1529 for coalescing, which reduces the size of the quadratic problem. */
1531 static bool
1532 defer_stack_allocation (tree var, bool toplevel)
1534 tree size_unit = TREE_CODE (var) == SSA_NAME
1535 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1536 : DECL_SIZE_UNIT (var);
1537 poly_uint64 size;
1539 /* Whether the variable is small enough for immediate allocation not to be
1540 a problem with regard to the frame size. */
1541 bool smallish
1542 = (poly_int_tree_p (size_unit, &size)
1543 && (estimated_poly_value (size)
1544 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1546 /* If stack protection is enabled, *all* stack variables must be deferred,
1547 so that we can re-order the strings to the top of the frame.
1548 Similarly for Address Sanitizer. */
1549 if (flag_stack_protect || asan_sanitize_stack_p ())
1550 return true;
1552 unsigned int align = TREE_CODE (var) == SSA_NAME
1553 ? TYPE_ALIGN (TREE_TYPE (var))
1554 : DECL_ALIGN (var);
1556 /* We handle "large" alignment via dynamic allocation. We want to handle
1557 this extra complication in only one place, so defer them. */
1558 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1559 return true;
1561 bool ignored = TREE_CODE (var) == SSA_NAME
1562 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1563 : DECL_IGNORED_P (var);
1565 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1566 might be detached from their block and appear at toplevel when we reach
1567 here. We want to coalesce them with variables from other blocks when
1568 the immediate contribution to the frame size would be noticeable. */
1569 if (toplevel && optimize > 0 && ignored && !smallish)
1570 return true;
1572 /* Variables declared in the outermost scope automatically conflict
1573 with every other variable. The only reason to want to defer them
1574 at all is that, after sorting, we can more efficiently pack
1575 small variables in the stack frame. Continue to defer at -O2. */
1576 if (toplevel && optimize < 2)
1577 return false;
1579 /* Without optimization, *most* variables are allocated from the
1580 stack, which makes the quadratic problem large exactly when we
1581 want compilation to proceed as quickly as possible. On the
1582 other hand, we don't want the function's stack frame size to
1583 get completely out of hand. So we avoid adding scalars and
1584 "small" aggregates to the list at all. */
1585 if (optimize == 0 && smallish)
1586 return false;
1588 return true;
1591 /* A subroutine of expand_used_vars. Expand one variable according to
1592 its flavor. Variables to be placed on the stack are not actually
1593 expanded yet, merely recorded.
1594 When REALLY_EXPAND is false, only add stack values to be allocated.
1595 Return stack usage this variable is supposed to take.
1598 static poly_uint64
1599 expand_one_var (tree var, bool toplevel, bool really_expand)
1601 unsigned int align = BITS_PER_UNIT;
1602 tree origvar = var;
1604 var = SSAVAR (var);
1606 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1608 if (is_global_var (var))
1609 return 0;
1611 /* Because we don't know if VAR will be in register or on stack,
1612 we conservatively assume it will be on stack even if VAR is
1613 eventually put into register after RA pass. For non-automatic
1614 variables, which won't be on stack, we collect alignment of
1615 type and ignore user specified alignment. Similarly for
1616 SSA_NAMEs for which use_register_for_decl returns true. */
1617 if (TREE_STATIC (var)
1618 || DECL_EXTERNAL (var)
1619 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1620 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1621 TYPE_MODE (TREE_TYPE (var)),
1622 TYPE_ALIGN (TREE_TYPE (var)));
1623 else if (DECL_HAS_VALUE_EXPR_P (var)
1624 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1625 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1626 or variables which were assigned a stack slot already by
1627 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1628 changed from the offset chosen to it. */
1629 align = crtl->stack_alignment_estimated;
1630 else
1631 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1633 /* If the variable alignment is very large we'll dynamicaly allocate
1634 it, which means that in-frame portion is just a pointer. */
1635 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1636 align = GET_MODE_ALIGNMENT (Pmode);
1639 record_alignment_for_reg_var (align);
1641 poly_uint64 size;
1642 if (TREE_CODE (origvar) == SSA_NAME)
1644 gcc_assert (!VAR_P (var)
1645 || (!DECL_EXTERNAL (var)
1646 && !DECL_HAS_VALUE_EXPR_P (var)
1647 && !TREE_STATIC (var)
1648 && TREE_TYPE (var) != error_mark_node
1649 && !DECL_HARD_REGISTER (var)
1650 && really_expand));
1652 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1654 else if (DECL_EXTERNAL (var))
1656 else if (DECL_HAS_VALUE_EXPR_P (var))
1658 else if (TREE_STATIC (var))
1660 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1662 else if (TREE_TYPE (var) == error_mark_node)
1664 if (really_expand)
1665 expand_one_error_var (var);
1667 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1669 if (really_expand)
1671 expand_one_hard_reg_var (var);
1672 if (!DECL_HARD_REGISTER (var))
1673 /* Invalid register specification. */
1674 expand_one_error_var (var);
1677 else if (use_register_for_decl (var))
1679 if (really_expand)
1680 expand_one_register_var (origvar);
1682 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1683 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1685 /* Reject variables which cover more than half of the address-space. */
1686 if (really_expand)
1688 if (DECL_NONLOCAL_FRAME (var))
1689 error_at (DECL_SOURCE_LOCATION (current_function_decl),
1690 "total size of local objects is too large");
1691 else
1692 error_at (DECL_SOURCE_LOCATION (var),
1693 "size of variable %q+D is too large", var);
1694 expand_one_error_var (var);
1697 else if (defer_stack_allocation (var, toplevel))
1698 add_stack_var (origvar);
1699 else
1701 if (really_expand)
1703 if (lookup_attribute ("naked",
1704 DECL_ATTRIBUTES (current_function_decl)))
1705 error ("cannot allocate stack for variable %q+D, naked function.",
1706 var);
1708 expand_one_stack_var (origvar);
1710 return size;
1712 return 0;
1715 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1716 expanding variables. Those variables that can be put into registers
1717 are allocated pseudos; those that can't are put on the stack.
1719 TOPLEVEL is true if this is the outermost BLOCK. */
1721 static void
1722 expand_used_vars_for_block (tree block, bool toplevel)
1724 tree t;
1726 /* Expand all variables at this level. */
1727 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1728 if (TREE_USED (t)
1729 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1730 || !DECL_NONSHAREABLE (t)))
1731 expand_one_var (t, toplevel, true);
1733 /* Expand all variables at containing levels. */
1734 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1735 expand_used_vars_for_block (t, false);
1738 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1739 and clear TREE_USED on all local variables. */
1741 static void
1742 clear_tree_used (tree block)
1744 tree t;
1746 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1747 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1748 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1749 || !DECL_NONSHAREABLE (t))
1750 TREE_USED (t) = 0;
1752 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1753 clear_tree_used (t);
1756 enum {
1757 SPCT_FLAG_DEFAULT = 1,
1758 SPCT_FLAG_ALL = 2,
1759 SPCT_FLAG_STRONG = 3,
1760 SPCT_FLAG_EXPLICIT = 4
1763 /* Examine TYPE and determine a bit mask of the following features. */
1765 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1766 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1767 #define SPCT_HAS_ARRAY 4
1768 #define SPCT_HAS_AGGREGATE 8
1770 static unsigned int
1771 stack_protect_classify_type (tree type)
1773 unsigned int ret = 0;
1774 tree t;
1776 switch (TREE_CODE (type))
1778 case ARRAY_TYPE:
1779 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1780 if (t == char_type_node
1781 || t == signed_char_type_node
1782 || t == unsigned_char_type_node)
1784 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1785 unsigned HOST_WIDE_INT len;
1787 if (!TYPE_SIZE_UNIT (type)
1788 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1789 len = max;
1790 else
1791 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1793 if (len < max)
1794 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1795 else
1796 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1798 else
1799 ret = SPCT_HAS_ARRAY;
1800 break;
1802 case UNION_TYPE:
1803 case QUAL_UNION_TYPE:
1804 case RECORD_TYPE:
1805 ret = SPCT_HAS_AGGREGATE;
1806 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1807 if (TREE_CODE (t) == FIELD_DECL)
1808 ret |= stack_protect_classify_type (TREE_TYPE (t));
1809 break;
1811 default:
1812 break;
1815 return ret;
1818 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1819 part of the local stack frame. Remember if we ever return nonzero for
1820 any variable in this function. The return value is the phase number in
1821 which the variable should be allocated. */
1823 static int
1824 stack_protect_decl_phase (tree decl)
1826 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1827 int ret = 0;
1829 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1830 has_short_buffer = true;
1832 if (flag_stack_protect == SPCT_FLAG_ALL
1833 || flag_stack_protect == SPCT_FLAG_STRONG
1834 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1835 && lookup_attribute ("stack_protect",
1836 DECL_ATTRIBUTES (current_function_decl))))
1838 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1839 && !(bits & SPCT_HAS_AGGREGATE))
1840 ret = 1;
1841 else if (bits & SPCT_HAS_ARRAY)
1842 ret = 2;
1844 else
1845 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1847 if (ret)
1848 has_protected_decls = true;
1850 return ret;
1853 /* Two helper routines that check for phase 1 and phase 2. These are used
1854 as callbacks for expand_stack_vars. */
1856 static bool
1857 stack_protect_decl_phase_1 (size_t i)
1859 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1862 static bool
1863 stack_protect_decl_phase_2 (size_t i)
1865 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1868 /* And helper function that checks for asan phase (with stack protector
1869 it is phase 3). This is used as callback for expand_stack_vars.
1870 Returns true if any of the vars in the partition need to be protected. */
1872 static bool
1873 asan_decl_phase_3 (size_t i)
1875 while (i != EOC)
1877 if (asan_protect_stack_decl (stack_vars[i].decl))
1878 return true;
1879 i = stack_vars[i].next;
1881 return false;
1884 /* Ensure that variables in different stack protection phases conflict
1885 so that they are not merged and share the same stack slot. */
1887 static void
1888 add_stack_protection_conflicts (void)
1890 size_t i, j, n = stack_vars_num;
1891 unsigned char *phase;
1893 phase = XNEWVEC (unsigned char, n);
1894 for (i = 0; i < n; ++i)
1895 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1897 for (i = 0; i < n; ++i)
1899 unsigned char ph_i = phase[i];
1900 for (j = i + 1; j < n; ++j)
1901 if (ph_i != phase[j])
1902 add_stack_var_conflict (i, j);
1905 XDELETEVEC (phase);
1908 /* Create a decl for the guard at the top of the stack frame. */
1910 static void
1911 create_stack_guard (void)
1913 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1914 VAR_DECL, NULL, ptr_type_node);
1915 TREE_THIS_VOLATILE (guard) = 1;
1916 TREE_USED (guard) = 1;
1917 expand_one_stack_var (guard);
1918 crtl->stack_protect_guard = guard;
1921 /* Prepare for expanding variables. */
1922 static void
1923 init_vars_expansion (void)
1925 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1926 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1928 /* A map from decl to stack partition. */
1929 decl_to_stack_part = new hash_map<tree, size_t>;
1931 /* Initialize local stack smashing state. */
1932 has_protected_decls = false;
1933 has_short_buffer = false;
1936 /* Free up stack variable graph data. */
1937 static void
1938 fini_vars_expansion (void)
1940 bitmap_obstack_release (&stack_var_bitmap_obstack);
1941 if (stack_vars)
1942 XDELETEVEC (stack_vars);
1943 if (stack_vars_sorted)
1944 XDELETEVEC (stack_vars_sorted);
1945 stack_vars = NULL;
1946 stack_vars_sorted = NULL;
1947 stack_vars_alloc = stack_vars_num = 0;
1948 delete decl_to_stack_part;
1949 decl_to_stack_part = NULL;
1952 /* Make a fair guess for the size of the stack frame of the function
1953 in NODE. This doesn't have to be exact, the result is only used in
1954 the inline heuristics. So we don't want to run the full stack var
1955 packing algorithm (which is quadratic in the number of stack vars).
1956 Instead, we calculate the total size of all stack vars. This turns
1957 out to be a pretty fair estimate -- packing of stack vars doesn't
1958 happen very often. */
1960 HOST_WIDE_INT
1961 estimated_stack_frame_size (struct cgraph_node *node)
1963 poly_int64 size = 0;
1964 size_t i;
1965 tree var;
1966 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1968 push_cfun (fn);
1970 init_vars_expansion ();
1972 FOR_EACH_LOCAL_DECL (fn, i, var)
1973 if (auto_var_in_fn_p (var, fn->decl))
1974 size += expand_one_var (var, true, false);
1976 if (stack_vars_num > 0)
1978 /* Fake sorting the stack vars for account_stack_vars (). */
1979 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1980 for (i = 0; i < stack_vars_num; ++i)
1981 stack_vars_sorted[i] = i;
1982 size += account_stack_vars ();
1985 fini_vars_expansion ();
1986 pop_cfun ();
1987 return estimated_poly_value (size);
1990 /* Helper routine to check if a record or union contains an array field. */
1992 static int
1993 record_or_union_type_has_array_p (const_tree tree_type)
1995 tree fields = TYPE_FIELDS (tree_type);
1996 tree f;
1998 for (f = fields; f; f = DECL_CHAIN (f))
1999 if (TREE_CODE (f) == FIELD_DECL)
2001 tree field_type = TREE_TYPE (f);
2002 if (RECORD_OR_UNION_TYPE_P (field_type)
2003 && record_or_union_type_has_array_p (field_type))
2004 return 1;
2005 if (TREE_CODE (field_type) == ARRAY_TYPE)
2006 return 1;
2008 return 0;
2011 /* Check if the current function has local referenced variables that
2012 have their addresses taken, contain an array, or are arrays. */
2014 static bool
2015 stack_protect_decl_p ()
2017 unsigned i;
2018 tree var;
2020 FOR_EACH_LOCAL_DECL (cfun, i, var)
2021 if (!is_global_var (var))
2023 tree var_type = TREE_TYPE (var);
2024 if (VAR_P (var)
2025 && (TREE_CODE (var_type) == ARRAY_TYPE
2026 || TREE_ADDRESSABLE (var)
2027 || (RECORD_OR_UNION_TYPE_P (var_type)
2028 && record_or_union_type_has_array_p (var_type))))
2029 return true;
2031 return false;
2034 /* Check if the current function has calls that use a return slot. */
2036 static bool
2037 stack_protect_return_slot_p ()
2039 basic_block bb;
2041 FOR_ALL_BB_FN (bb, cfun)
2042 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2043 !gsi_end_p (gsi); gsi_next (&gsi))
2045 gimple *stmt = gsi_stmt (gsi);
2046 /* This assumes that calls to internal-only functions never
2047 use a return slot. */
2048 if (is_gimple_call (stmt)
2049 && !gimple_call_internal_p (stmt)
2050 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2051 gimple_call_fndecl (stmt)))
2052 return true;
2054 return false;
2057 /* Expand all variables used in the function. */
2059 static rtx_insn *
2060 expand_used_vars (void)
2062 tree var, outer_block = DECL_INITIAL (current_function_decl);
2063 auto_vec<tree> maybe_local_decls;
2064 rtx_insn *var_end_seq = NULL;
2065 unsigned i;
2066 unsigned len;
2067 bool gen_stack_protect_signal = false;
2069 /* Compute the phase of the stack frame for this function. */
2071 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2072 int off = targetm.starting_frame_offset () % align;
2073 frame_phase = off ? align - off : 0;
2076 /* Set TREE_USED on all variables in the local_decls. */
2077 FOR_EACH_LOCAL_DECL (cfun, i, var)
2078 TREE_USED (var) = 1;
2079 /* Clear TREE_USED on all variables associated with a block scope. */
2080 clear_tree_used (DECL_INITIAL (current_function_decl));
2082 init_vars_expansion ();
2084 if (targetm.use_pseudo_pic_reg ())
2085 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2087 for (i = 0; i < SA.map->num_partitions; i++)
2089 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2090 continue;
2092 tree var = partition_to_var (SA.map, i);
2094 gcc_assert (!virtual_operand_p (var));
2096 expand_one_ssa_partition (var);
2099 if (flag_stack_protect == SPCT_FLAG_STRONG)
2100 gen_stack_protect_signal
2101 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2103 /* At this point all variables on the local_decls with TREE_USED
2104 set are not associated with any block scope. Lay them out. */
2106 len = vec_safe_length (cfun->local_decls);
2107 FOR_EACH_LOCAL_DECL (cfun, i, var)
2109 bool expand_now = false;
2111 /* Expanded above already. */
2112 if (is_gimple_reg (var))
2114 TREE_USED (var) = 0;
2115 goto next;
2117 /* We didn't set a block for static or extern because it's hard
2118 to tell the difference between a global variable (re)declared
2119 in a local scope, and one that's really declared there to
2120 begin with. And it doesn't really matter much, since we're
2121 not giving them stack space. Expand them now. */
2122 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2123 expand_now = true;
2125 /* Expand variables not associated with any block now. Those created by
2126 the optimizers could be live anywhere in the function. Those that
2127 could possibly have been scoped originally and detached from their
2128 block will have their allocation deferred so we coalesce them with
2129 others when optimization is enabled. */
2130 else if (TREE_USED (var))
2131 expand_now = true;
2133 /* Finally, mark all variables on the list as used. We'll use
2134 this in a moment when we expand those associated with scopes. */
2135 TREE_USED (var) = 1;
2137 if (expand_now)
2138 expand_one_var (var, true, true);
2140 next:
2141 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2143 rtx rtl = DECL_RTL_IF_SET (var);
2145 /* Keep artificial non-ignored vars in cfun->local_decls
2146 chain until instantiate_decls. */
2147 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2148 add_local_decl (cfun, var);
2149 else if (rtl == NULL_RTX)
2150 /* If rtl isn't set yet, which can happen e.g. with
2151 -fstack-protector, retry before returning from this
2152 function. */
2153 maybe_local_decls.safe_push (var);
2157 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2159 +-----------------+-----------------+
2160 | ...processed... | ...duplicates...|
2161 +-----------------+-----------------+
2163 +-- LEN points here.
2165 We just want the duplicates, as those are the artificial
2166 non-ignored vars that we want to keep until instantiate_decls.
2167 Move them down and truncate the array. */
2168 if (!vec_safe_is_empty (cfun->local_decls))
2169 cfun->local_decls->block_remove (0, len);
2171 /* At this point, all variables within the block tree with TREE_USED
2172 set are actually used by the optimized function. Lay them out. */
2173 expand_used_vars_for_block (outer_block, true);
2175 if (stack_vars_num > 0)
2177 add_scope_conflicts ();
2179 /* If stack protection is enabled, we don't share space between
2180 vulnerable data and non-vulnerable data. */
2181 if (flag_stack_protect != 0
2182 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2183 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2184 && lookup_attribute ("stack_protect",
2185 DECL_ATTRIBUTES (current_function_decl)))))
2186 add_stack_protection_conflicts ();
2188 /* Now that we have collected all stack variables, and have computed a
2189 minimal interference graph, attempt to save some stack space. */
2190 partition_stack_vars ();
2191 if (dump_file)
2192 dump_stack_var_partition ();
2195 switch (flag_stack_protect)
2197 case SPCT_FLAG_ALL:
2198 create_stack_guard ();
2199 break;
2201 case SPCT_FLAG_STRONG:
2202 if (gen_stack_protect_signal
2203 || cfun->calls_alloca || has_protected_decls
2204 || lookup_attribute ("stack_protect",
2205 DECL_ATTRIBUTES (current_function_decl)))
2206 create_stack_guard ();
2207 break;
2209 case SPCT_FLAG_DEFAULT:
2210 if (cfun->calls_alloca || has_protected_decls
2211 || lookup_attribute ("stack_protect",
2212 DECL_ATTRIBUTES (current_function_decl)))
2213 create_stack_guard ();
2214 break;
2216 case SPCT_FLAG_EXPLICIT:
2217 if (lookup_attribute ("stack_protect",
2218 DECL_ATTRIBUTES (current_function_decl)))
2219 create_stack_guard ();
2220 break;
2221 default:
2225 /* Assign rtl to each variable based on these partitions. */
2226 if (stack_vars_num > 0)
2228 struct stack_vars_data data;
2230 data.asan_base = NULL_RTX;
2231 data.asan_alignb = 0;
2233 /* Reorder decls to be protected by iterating over the variables
2234 array multiple times, and allocating out of each phase in turn. */
2235 /* ??? We could probably integrate this into the qsort we did
2236 earlier, such that we naturally see these variables first,
2237 and thus naturally allocate things in the right order. */
2238 if (has_protected_decls)
2240 /* Phase 1 contains only character arrays. */
2241 expand_stack_vars (stack_protect_decl_phase_1, &data);
2243 /* Phase 2 contains other kinds of arrays. */
2244 if (flag_stack_protect == SPCT_FLAG_ALL
2245 || flag_stack_protect == SPCT_FLAG_STRONG
2246 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2247 && lookup_attribute ("stack_protect",
2248 DECL_ATTRIBUTES (current_function_decl))))
2249 expand_stack_vars (stack_protect_decl_phase_2, &data);
2252 if (asan_sanitize_stack_p ())
2253 /* Phase 3, any partitions that need asan protection
2254 in addition to phase 1 and 2. */
2255 expand_stack_vars (asan_decl_phase_3, &data);
2257 /* ASAN description strings don't yet have a syntax for expressing
2258 polynomial offsets. */
2259 HOST_WIDE_INT prev_offset;
2260 if (!data.asan_vec.is_empty ()
2261 && frame_offset.is_constant (&prev_offset))
2263 HOST_WIDE_INT offset, sz, redzonesz;
2264 redzonesz = ASAN_RED_ZONE_SIZE;
2265 sz = data.asan_vec[0] - prev_offset;
2266 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2267 && data.asan_alignb <= 4096
2268 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2269 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2270 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2271 /* Allocating a constant amount of space from a constant
2272 starting offset must give a constant result. */
2273 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2274 .to_constant ());
2275 data.asan_vec.safe_push (prev_offset);
2276 data.asan_vec.safe_push (offset);
2277 /* Leave space for alignment if STRICT_ALIGNMENT. */
2278 if (STRICT_ALIGNMENT)
2279 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2280 << ASAN_SHADOW_SHIFT)
2281 / BITS_PER_UNIT, 1);
2283 var_end_seq
2284 = asan_emit_stack_protection (virtual_stack_vars_rtx,
2285 data.asan_base,
2286 data.asan_alignb,
2287 data.asan_vec.address (),
2288 data.asan_decl_vec.address (),
2289 data.asan_vec.length ());
2292 expand_stack_vars (NULL, &data);
2295 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2296 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2297 virtual_stack_vars_rtx,
2298 var_end_seq);
2300 fini_vars_expansion ();
2302 /* If there were any artificial non-ignored vars without rtl
2303 found earlier, see if deferred stack allocation hasn't assigned
2304 rtl to them. */
2305 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2307 rtx rtl = DECL_RTL_IF_SET (var);
2309 /* Keep artificial non-ignored vars in cfun->local_decls
2310 chain until instantiate_decls. */
2311 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2312 add_local_decl (cfun, var);
2315 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2316 if (STACK_ALIGNMENT_NEEDED)
2318 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2319 if (FRAME_GROWS_DOWNWARD)
2320 frame_offset = aligned_lower_bound (frame_offset, align);
2321 else
2322 frame_offset = aligned_upper_bound (frame_offset, align);
2325 return var_end_seq;
2329 /* If we need to produce a detailed dump, print the tree representation
2330 for STMT to the dump file. SINCE is the last RTX after which the RTL
2331 generated for STMT should have been appended. */
2333 static void
2334 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2336 if (dump_file && (dump_flags & TDF_DETAILS))
2338 fprintf (dump_file, "\n;; ");
2339 print_gimple_stmt (dump_file, stmt, 0,
2340 TDF_SLIM | (dump_flags & TDF_LINENO));
2341 fprintf (dump_file, "\n");
2343 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2347 /* Maps the blocks that do not contain tree labels to rtx labels. */
2349 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2351 /* Returns the label_rtx expression for a label starting basic block BB. */
2353 static rtx_code_label *
2354 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2356 gimple_stmt_iterator gsi;
2357 tree lab;
2359 if (bb->flags & BB_RTL)
2360 return block_label (bb);
2362 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2363 if (elt)
2364 return *elt;
2366 /* Find the tree label if it is present. */
2368 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2370 glabel *lab_stmt;
2372 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2373 if (!lab_stmt)
2374 break;
2376 lab = gimple_label_label (lab_stmt);
2377 if (DECL_NONLOCAL (lab))
2378 break;
2380 return jump_target_rtx (lab);
2383 rtx_code_label *l = gen_label_rtx ();
2384 lab_rtx_for_bb->put (bb, l);
2385 return l;
2389 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2390 of a basic block where we just expanded the conditional at the end,
2391 possibly clean up the CFG and instruction sequence. LAST is the
2392 last instruction before the just emitted jump sequence. */
2394 static void
2395 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2397 /* Special case: when jumpif decides that the condition is
2398 trivial it emits an unconditional jump (and the necessary
2399 barrier). But we still have two edges, the fallthru one is
2400 wrong. purge_dead_edges would clean this up later. Unfortunately
2401 we have to insert insns (and split edges) before
2402 find_many_sub_basic_blocks and hence before purge_dead_edges.
2403 But splitting edges might create new blocks which depend on the
2404 fact that if there are two edges there's no barrier. So the
2405 barrier would get lost and verify_flow_info would ICE. Instead
2406 of auditing all edge splitters to care for the barrier (which
2407 normally isn't there in a cleaned CFG), fix it here. */
2408 if (BARRIER_P (get_last_insn ()))
2410 rtx_insn *insn;
2411 remove_edge (e);
2412 /* Now, we have a single successor block, if we have insns to
2413 insert on the remaining edge we potentially will insert
2414 it at the end of this block (if the dest block isn't feasible)
2415 in order to avoid splitting the edge. This insertion will take
2416 place in front of the last jump. But we might have emitted
2417 multiple jumps (conditional and one unconditional) to the
2418 same destination. Inserting in front of the last one then
2419 is a problem. See PR 40021. We fix this by deleting all
2420 jumps except the last unconditional one. */
2421 insn = PREV_INSN (get_last_insn ());
2422 /* Make sure we have an unconditional jump. Otherwise we're
2423 confused. */
2424 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2425 for (insn = PREV_INSN (insn); insn != last;)
2427 insn = PREV_INSN (insn);
2428 if (JUMP_P (NEXT_INSN (insn)))
2430 if (!any_condjump_p (NEXT_INSN (insn)))
2432 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2433 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2435 delete_insn (NEXT_INSN (insn));
2441 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2442 Returns a new basic block if we've terminated the current basic
2443 block and created a new one. */
2445 static basic_block
2446 expand_gimple_cond (basic_block bb, gcond *stmt)
2448 basic_block new_bb, dest;
2449 edge true_edge;
2450 edge false_edge;
2451 rtx_insn *last2, *last;
2452 enum tree_code code;
2453 tree op0, op1;
2455 code = gimple_cond_code (stmt);
2456 op0 = gimple_cond_lhs (stmt);
2457 op1 = gimple_cond_rhs (stmt);
2458 /* We're sometimes presented with such code:
2459 D.123_1 = x < y;
2460 if (D.123_1 != 0)
2462 This would expand to two comparisons which then later might
2463 be cleaned up by combine. But some pattern matchers like if-conversion
2464 work better when there's only one compare, so make up for this
2465 here as special exception if TER would have made the same change. */
2466 if (SA.values
2467 && TREE_CODE (op0) == SSA_NAME
2468 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2469 && TREE_CODE (op1) == INTEGER_CST
2470 && ((gimple_cond_code (stmt) == NE_EXPR
2471 && integer_zerop (op1))
2472 || (gimple_cond_code (stmt) == EQ_EXPR
2473 && integer_onep (op1)))
2474 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2476 gimple *second = SSA_NAME_DEF_STMT (op0);
2477 if (gimple_code (second) == GIMPLE_ASSIGN)
2479 enum tree_code code2 = gimple_assign_rhs_code (second);
2480 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2482 code = code2;
2483 op0 = gimple_assign_rhs1 (second);
2484 op1 = gimple_assign_rhs2 (second);
2486 /* If jumps are cheap and the target does not support conditional
2487 compare, turn some more codes into jumpy sequences. */
2488 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2489 && targetm.gen_ccmp_first == NULL)
2491 if ((code2 == BIT_AND_EXPR
2492 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2493 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2494 || code2 == TRUTH_AND_EXPR)
2496 code = TRUTH_ANDIF_EXPR;
2497 op0 = gimple_assign_rhs1 (second);
2498 op1 = gimple_assign_rhs2 (second);
2500 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2502 code = TRUTH_ORIF_EXPR;
2503 op0 = gimple_assign_rhs1 (second);
2504 op1 = gimple_assign_rhs2 (second);
2510 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2511 into (x - C2) * C3 < C4. */
2512 if ((code == EQ_EXPR || code == NE_EXPR)
2513 && TREE_CODE (op0) == SSA_NAME
2514 && TREE_CODE (op1) == INTEGER_CST)
2515 code = maybe_optimize_mod_cmp (code, &op0, &op1);
2517 last2 = last = get_last_insn ();
2519 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2520 set_curr_insn_location (gimple_location (stmt));
2522 /* These flags have no purpose in RTL land. */
2523 true_edge->flags &= ~EDGE_TRUE_VALUE;
2524 false_edge->flags &= ~EDGE_FALSE_VALUE;
2526 /* We can either have a pure conditional jump with one fallthru edge or
2527 two-way jump that needs to be decomposed into two basic blocks. */
2528 if (false_edge->dest == bb->next_bb)
2530 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2531 true_edge->probability);
2532 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2533 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2534 set_curr_insn_location (true_edge->goto_locus);
2535 false_edge->flags |= EDGE_FALLTHRU;
2536 maybe_cleanup_end_of_block (false_edge, last);
2537 return NULL;
2539 if (true_edge->dest == bb->next_bb)
2541 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2542 false_edge->probability);
2543 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2544 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2545 set_curr_insn_location (false_edge->goto_locus);
2546 true_edge->flags |= EDGE_FALLTHRU;
2547 maybe_cleanup_end_of_block (true_edge, last);
2548 return NULL;
2551 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2552 true_edge->probability);
2553 last = get_last_insn ();
2554 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2555 set_curr_insn_location (false_edge->goto_locus);
2556 emit_jump (label_rtx_for_bb (false_edge->dest));
2558 BB_END (bb) = last;
2559 if (BARRIER_P (BB_END (bb)))
2560 BB_END (bb) = PREV_INSN (BB_END (bb));
2561 update_bb_for_insn (bb);
2563 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2564 dest = false_edge->dest;
2565 redirect_edge_succ (false_edge, new_bb);
2566 false_edge->flags |= EDGE_FALLTHRU;
2567 new_bb->count = false_edge->count ();
2568 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2569 add_bb_to_loop (new_bb, loop);
2570 if (loop->latch == bb
2571 && loop->header == dest)
2572 loop->latch = new_bb;
2573 make_single_succ_edge (new_bb, dest, 0);
2574 if (BARRIER_P (BB_END (new_bb)))
2575 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2576 update_bb_for_insn (new_bb);
2578 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2580 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2582 set_curr_insn_location (true_edge->goto_locus);
2583 true_edge->goto_locus = curr_insn_location ();
2586 return new_bb;
2589 /* Mark all calls that can have a transaction restart. */
2591 static void
2592 mark_transaction_restart_calls (gimple *stmt)
2594 struct tm_restart_node dummy;
2595 tm_restart_node **slot;
2597 if (!cfun->gimple_df->tm_restart)
2598 return;
2600 dummy.stmt = stmt;
2601 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2602 if (slot)
2604 struct tm_restart_node *n = *slot;
2605 tree list = n->label_or_list;
2606 rtx_insn *insn;
2608 for (insn = next_real_insn (get_last_insn ());
2609 !CALL_P (insn);
2610 insn = next_real_insn (insn))
2611 continue;
2613 if (TREE_CODE (list) == LABEL_DECL)
2614 add_reg_note (insn, REG_TM, label_rtx (list));
2615 else
2616 for (; list ; list = TREE_CHAIN (list))
2617 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2621 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2622 statement STMT. */
2624 static void
2625 expand_call_stmt (gcall *stmt)
2627 tree exp, decl, lhs;
2628 bool builtin_p;
2629 size_t i;
2631 if (gimple_call_internal_p (stmt))
2633 expand_internal_call (stmt);
2634 return;
2637 /* If this is a call to a built-in function and it has no effect other
2638 than setting the lhs, try to implement it using an internal function
2639 instead. */
2640 decl = gimple_call_fndecl (stmt);
2641 if (gimple_call_lhs (stmt)
2642 && !gimple_has_side_effects (stmt)
2643 && (optimize || (decl && called_as_built_in (decl))))
2645 internal_fn ifn = replacement_internal_fn (stmt);
2646 if (ifn != IFN_LAST)
2648 expand_internal_call (ifn, stmt);
2649 return;
2653 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2655 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2656 builtin_p = decl && fndecl_built_in_p (decl);
2658 /* If this is not a builtin function, the function type through which the
2659 call is made may be different from the type of the function. */
2660 if (!builtin_p)
2661 CALL_EXPR_FN (exp)
2662 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2663 CALL_EXPR_FN (exp));
2665 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2666 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2668 for (i = 0; i < gimple_call_num_args (stmt); i++)
2670 tree arg = gimple_call_arg (stmt, i);
2671 gimple *def;
2672 /* TER addresses into arguments of builtin functions so we have a
2673 chance to infer more correct alignment information. See PR39954. */
2674 if (builtin_p
2675 && TREE_CODE (arg) == SSA_NAME
2676 && (def = get_gimple_for_ssa_name (arg))
2677 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2678 arg = gimple_assign_rhs1 (def);
2679 CALL_EXPR_ARG (exp, i) = arg;
2682 if (gimple_has_side_effects (stmt))
2683 TREE_SIDE_EFFECTS (exp) = 1;
2685 if (gimple_call_nothrow_p (stmt))
2686 TREE_NOTHROW (exp) = 1;
2688 if (gimple_no_warning_p (stmt))
2689 TREE_NO_WARNING (exp) = 1;
2691 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2692 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2693 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2694 if (decl
2695 && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
2696 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2697 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2698 else
2699 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2700 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2701 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2702 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2704 /* Ensure RTL is created for debug args. */
2705 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2707 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2708 unsigned int ix;
2709 tree dtemp;
2711 if (debug_args)
2712 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2714 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2715 expand_debug_expr (dtemp);
2719 rtx_insn *before_call = get_last_insn ();
2720 lhs = gimple_call_lhs (stmt);
2721 if (lhs)
2722 expand_assignment (lhs, exp, false);
2723 else
2724 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2726 /* If the gimple call is an indirect call and has 'nocf_check'
2727 attribute find a generated CALL insn to mark it as no
2728 control-flow verification is needed. */
2729 if (gimple_call_nocf_check_p (stmt)
2730 && !gimple_call_fndecl (stmt))
2732 rtx_insn *last = get_last_insn ();
2733 while (!CALL_P (last)
2734 && last != before_call)
2735 last = PREV_INSN (last);
2737 if (last != before_call)
2738 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2741 mark_transaction_restart_calls (stmt);
2745 /* Generate RTL for an asm statement (explicit assembler code).
2746 STRING is a STRING_CST node containing the assembler code text,
2747 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2748 insn is volatile; don't optimize it. */
2750 static void
2751 expand_asm_loc (tree string, int vol, location_t locus)
2753 rtx body;
2755 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2756 ggc_strdup (TREE_STRING_POINTER (string)),
2757 locus);
2759 MEM_VOLATILE_P (body) = vol;
2761 /* Non-empty basic ASM implicitly clobbers memory. */
2762 if (TREE_STRING_LENGTH (string) != 0)
2764 rtx asm_op, clob;
2765 unsigned i, nclobbers;
2766 auto_vec<rtx> input_rvec, output_rvec;
2767 auto_vec<const char *> constraints;
2768 auto_vec<rtx> clobber_rvec;
2769 HARD_REG_SET clobbered_regs;
2770 CLEAR_HARD_REG_SET (clobbered_regs);
2772 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2773 clobber_rvec.safe_push (clob);
2775 if (targetm.md_asm_adjust)
2776 targetm.md_asm_adjust (output_rvec, input_rvec,
2777 constraints, clobber_rvec,
2778 clobbered_regs);
2780 asm_op = body;
2781 nclobbers = clobber_rvec.length ();
2782 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2784 XVECEXP (body, 0, 0) = asm_op;
2785 for (i = 0; i < nclobbers; i++)
2786 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2789 emit_insn (body);
2792 /* Return the number of times character C occurs in string S. */
2793 static int
2794 n_occurrences (int c, const char *s)
2796 int n = 0;
2797 while (*s)
2798 n += (*s++ == c);
2799 return n;
2802 /* A subroutine of expand_asm_operands. Check that all operands have
2803 the same number of alternatives. Return true if so. */
2805 static bool
2806 check_operand_nalternatives (const vec<const char *> &constraints)
2808 unsigned len = constraints.length();
2809 if (len > 0)
2811 int nalternatives = n_occurrences (',', constraints[0]);
2813 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2815 error ("too many alternatives in %<asm%>");
2816 return false;
2819 for (unsigned i = 1; i < len; ++i)
2820 if (n_occurrences (',', constraints[i]) != nalternatives)
2822 error ("operand constraints for %<asm%> differ "
2823 "in number of alternatives");
2824 return false;
2827 return true;
2830 /* Check for overlap between registers marked in CLOBBERED_REGS and
2831 anything inappropriate in T. Emit error and return the register
2832 variable definition for error, NULL_TREE for ok. */
2834 static bool
2835 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2837 /* Conflicts between asm-declared register variables and the clobber
2838 list are not allowed. */
2839 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2841 if (overlap)
2843 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2844 DECL_NAME (overlap));
2846 /* Reset registerness to stop multiple errors emitted for a single
2847 variable. */
2848 DECL_REGISTER (overlap) = 0;
2849 return true;
2852 return false;
2855 /* Check that the given REGNO spanning NREGS is a valid
2856 asm clobber operand. Some HW registers cannot be
2857 saved/restored, hence they should not be clobbered by
2858 asm statements. */
2859 static bool
2860 asm_clobber_reg_is_valid (int regno, int nregs, const char *regname)
2862 bool is_valid = true;
2863 HARD_REG_SET regset;
2865 CLEAR_HARD_REG_SET (regset);
2867 add_range_to_hard_reg_set (&regset, regno, nregs);
2869 /* Clobbering the PIC register is an error. */
2870 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2871 && overlaps_hard_reg_set_p (regset, Pmode, PIC_OFFSET_TABLE_REGNUM))
2873 /* ??? Diagnose during gimplification? */
2874 error ("PIC register clobbered by %qs in %<asm%>", regname);
2875 is_valid = false;
2877 /* Clobbering the stack pointer register is deprecated. GCC expects
2878 the value of the stack pointer after an asm statement to be the same
2879 as it was before, so no asm can validly clobber the stack pointer in
2880 the usual sense. Adding the stack pointer to the clobber list has
2881 traditionally had some undocumented and somewhat obscure side-effects. */
2882 if (overlaps_hard_reg_set_p (regset, Pmode, STACK_POINTER_REGNUM)
2883 && warning (OPT_Wdeprecated, "listing the stack pointer register"
2884 " %qs in a clobber list is deprecated", regname))
2885 inform (input_location, "the value of the stack pointer after an %<asm%>"
2886 " statement must be the same as it was before the statement");
2888 return is_valid;
2891 /* Generate RTL for an asm statement with arguments.
2892 STRING is the instruction template.
2893 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2894 Each output or input has an expression in the TREE_VALUE and
2895 a tree list in TREE_PURPOSE which in turn contains a constraint
2896 name in TREE_VALUE (or NULL_TREE) and a constraint string
2897 in TREE_PURPOSE.
2898 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2899 that is clobbered by this insn.
2901 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2902 should be the fallthru basic block of the asm goto.
2904 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2905 Some elements of OUTPUTS may be replaced with trees representing temporary
2906 values. The caller should copy those temporary values to the originally
2907 specified lvalues.
2909 VOL nonzero means the insn is volatile; don't optimize it. */
2911 static void
2912 expand_asm_stmt (gasm *stmt)
2914 class save_input_location
2916 location_t old;
2918 public:
2919 explicit save_input_location(location_t where)
2921 old = input_location;
2922 input_location = where;
2925 ~save_input_location()
2927 input_location = old;
2931 location_t locus = gimple_location (stmt);
2933 if (gimple_asm_input_p (stmt))
2935 const char *s = gimple_asm_string (stmt);
2936 tree string = build_string (strlen (s), s);
2937 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2938 return;
2941 /* There are some legacy diagnostics in here, and also avoids a
2942 sixth parameger to targetm.md_asm_adjust. */
2943 save_input_location s_i_l(locus);
2945 unsigned noutputs = gimple_asm_noutputs (stmt);
2946 unsigned ninputs = gimple_asm_ninputs (stmt);
2947 unsigned nlabels = gimple_asm_nlabels (stmt);
2948 unsigned i;
2950 /* ??? Diagnose during gimplification? */
2951 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2953 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2954 return;
2957 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2958 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2959 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2961 /* Copy the gimple vectors into new vectors that we can manipulate. */
2963 output_tvec.safe_grow (noutputs);
2964 input_tvec.safe_grow (ninputs);
2965 constraints.safe_grow (noutputs + ninputs);
2967 for (i = 0; i < noutputs; ++i)
2969 tree t = gimple_asm_output_op (stmt, i);
2970 output_tvec[i] = TREE_VALUE (t);
2971 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2973 for (i = 0; i < ninputs; i++)
2975 tree t = gimple_asm_input_op (stmt, i);
2976 input_tvec[i] = TREE_VALUE (t);
2977 constraints[i + noutputs]
2978 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2981 /* ??? Diagnose during gimplification? */
2982 if (! check_operand_nalternatives (constraints))
2983 return;
2985 /* Count the number of meaningful clobbered registers, ignoring what
2986 we would ignore later. */
2987 auto_vec<rtx> clobber_rvec;
2988 HARD_REG_SET clobbered_regs;
2989 CLEAR_HARD_REG_SET (clobbered_regs);
2991 if (unsigned n = gimple_asm_nclobbers (stmt))
2993 clobber_rvec.reserve (n);
2994 for (i = 0; i < n; i++)
2996 tree t = gimple_asm_clobber_op (stmt, i);
2997 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2998 int nregs, j;
3000 j = decode_reg_name_and_count (regname, &nregs);
3001 if (j < 0)
3003 if (j == -2)
3005 /* ??? Diagnose during gimplification? */
3006 error ("unknown register name %qs in %<asm%>", regname);
3008 else if (j == -4)
3010 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3011 clobber_rvec.safe_push (x);
3013 else
3015 /* Otherwise we should have -1 == empty string
3016 or -3 == cc, which is not a register. */
3017 gcc_assert (j == -1 || j == -3);
3020 else
3021 for (int reg = j; reg < j + nregs; reg++)
3023 if (!asm_clobber_reg_is_valid (reg, nregs, regname))
3024 return;
3026 SET_HARD_REG_BIT (clobbered_regs, reg);
3027 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
3028 clobber_rvec.safe_push (x);
3032 unsigned nclobbers = clobber_rvec.length();
3034 /* First pass over inputs and outputs checks validity and sets
3035 mark_addressable if needed. */
3036 /* ??? Diagnose during gimplification? */
3038 for (i = 0; i < noutputs; ++i)
3040 tree val = output_tvec[i];
3041 tree type = TREE_TYPE (val);
3042 const char *constraint;
3043 bool is_inout;
3044 bool allows_reg;
3045 bool allows_mem;
3047 /* Try to parse the output constraint. If that fails, there's
3048 no point in going further. */
3049 constraint = constraints[i];
3050 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3051 &allows_mem, &allows_reg, &is_inout))
3052 return;
3054 /* If the output is a hard register, verify it doesn't conflict with
3055 any other operand's possible hard register use. */
3056 if (DECL_P (val)
3057 && REG_P (DECL_RTL (val))
3058 && HARD_REGISTER_P (DECL_RTL (val)))
3060 unsigned j, output_hregno = REGNO (DECL_RTL (val));
3061 bool early_clobber_p = strchr (constraints[i], '&') != NULL;
3062 unsigned long match;
3064 /* Verify the other outputs do not use the same hard register. */
3065 for (j = i + 1; j < noutputs; ++j)
3066 if (DECL_P (output_tvec[j])
3067 && REG_P (DECL_RTL (output_tvec[j]))
3068 && HARD_REGISTER_P (DECL_RTL (output_tvec[j]))
3069 && output_hregno == REGNO (DECL_RTL (output_tvec[j])))
3070 error ("invalid hard register usage between output operands");
3072 /* Verify matching constraint operands use the same hard register
3073 and that the non-matching constraint operands do not use the same
3074 hard register if the output is an early clobber operand. */
3075 for (j = 0; j < ninputs; ++j)
3076 if (DECL_P (input_tvec[j])
3077 && REG_P (DECL_RTL (input_tvec[j]))
3078 && HARD_REGISTER_P (DECL_RTL (input_tvec[j])))
3080 unsigned input_hregno = REGNO (DECL_RTL (input_tvec[j]));
3081 switch (*constraints[j + noutputs])
3083 case '0': case '1': case '2': case '3': case '4':
3084 case '5': case '6': case '7': case '8': case '9':
3085 match = strtoul (constraints[j + noutputs], NULL, 10);
3086 break;
3087 default:
3088 match = ULONG_MAX;
3089 break;
3091 if (i == match
3092 && output_hregno != input_hregno)
3093 error ("invalid hard register usage between output operand "
3094 "and matching constraint operand");
3095 else if (early_clobber_p
3096 && i != match
3097 && output_hregno == input_hregno)
3098 error ("invalid hard register usage between earlyclobber "
3099 "operand and input operand");
3103 if (! allows_reg
3104 && (allows_mem
3105 || is_inout
3106 || (DECL_P (val)
3107 && REG_P (DECL_RTL (val))
3108 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3109 mark_addressable (val);
3112 for (i = 0; i < ninputs; ++i)
3114 bool allows_reg, allows_mem;
3115 const char *constraint;
3117 constraint = constraints[i + noutputs];
3118 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3119 constraints.address (),
3120 &allows_mem, &allows_reg))
3121 return;
3123 if (! allows_reg && allows_mem)
3124 mark_addressable (input_tvec[i]);
3127 /* Second pass evaluates arguments. */
3129 /* Make sure stack is consistent for asm goto. */
3130 if (nlabels > 0)
3131 do_pending_stack_adjust ();
3132 int old_generating_concat_p = generating_concat_p;
3134 /* Vector of RTX's of evaluated output operands. */
3135 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3136 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3137 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3139 output_rvec.safe_grow (noutputs);
3141 for (i = 0; i < noutputs; ++i)
3143 tree val = output_tvec[i];
3144 tree type = TREE_TYPE (val);
3145 bool is_inout, allows_reg, allows_mem, ok;
3146 rtx op;
3148 ok = parse_output_constraint (&constraints[i], i, ninputs,
3149 noutputs, &allows_mem, &allows_reg,
3150 &is_inout);
3151 gcc_assert (ok);
3153 /* If an output operand is not a decl or indirect ref and our constraint
3154 allows a register, make a temporary to act as an intermediate.
3155 Make the asm insn write into that, then we will copy it to
3156 the real output operand. Likewise for promoted variables. */
3158 generating_concat_p = 0;
3160 if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3161 || (DECL_P (val)
3162 && (allows_mem || REG_P (DECL_RTL (val)))
3163 && ! (REG_P (DECL_RTL (val))
3164 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3165 || ! allows_reg
3166 || is_inout
3167 || TREE_ADDRESSABLE (type))
3169 op = expand_expr (val, NULL_RTX, VOIDmode,
3170 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3171 if (MEM_P (op))
3172 op = validize_mem (op);
3174 if (! allows_reg && !MEM_P (op))
3175 error ("output number %d not directly addressable", i);
3176 if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3177 || GET_CODE (op) == CONCAT)
3179 rtx old_op = op;
3180 op = gen_reg_rtx (GET_MODE (op));
3182 generating_concat_p = old_generating_concat_p;
3184 if (is_inout)
3185 emit_move_insn (op, old_op);
3187 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3188 emit_move_insn (old_op, op);
3189 after_rtl_seq = get_insns ();
3190 after_rtl_end = get_last_insn ();
3191 end_sequence ();
3194 else
3196 op = assign_temp (type, 0, 1);
3197 op = validize_mem (op);
3198 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3199 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3201 generating_concat_p = old_generating_concat_p;
3203 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3204 expand_assignment (val, make_tree (type, op), false);
3205 after_rtl_seq = get_insns ();
3206 after_rtl_end = get_last_insn ();
3207 end_sequence ();
3209 output_rvec[i] = op;
3211 if (is_inout)
3212 inout_opnum.safe_push (i);
3215 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3216 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3218 input_rvec.safe_grow (ninputs);
3219 input_mode.safe_grow (ninputs);
3221 generating_concat_p = 0;
3223 for (i = 0; i < ninputs; ++i)
3225 tree val = input_tvec[i];
3226 tree type = TREE_TYPE (val);
3227 bool allows_reg, allows_mem, ok;
3228 const char *constraint;
3229 rtx op;
3231 constraint = constraints[i + noutputs];
3232 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3233 constraints.address (),
3234 &allows_mem, &allows_reg);
3235 gcc_assert (ok);
3237 /* EXPAND_INITIALIZER will not generate code for valid initializer
3238 constants, but will still generate code for other types of operand.
3239 This is the behavior we want for constant constraints. */
3240 op = expand_expr (val, NULL_RTX, VOIDmode,
3241 allows_reg ? EXPAND_NORMAL
3242 : allows_mem ? EXPAND_MEMORY
3243 : EXPAND_INITIALIZER);
3245 /* Never pass a CONCAT to an ASM. */
3246 if (GET_CODE (op) == CONCAT)
3247 op = force_reg (GET_MODE (op), op);
3248 else if (MEM_P (op))
3249 op = validize_mem (op);
3251 if (asm_operand_ok (op, constraint, NULL) <= 0)
3253 if (allows_reg && TYPE_MODE (type) != BLKmode)
3254 op = force_reg (TYPE_MODE (type), op);
3255 else if (!allows_mem)
3256 warning (0, "asm operand %d probably doesn%'t match constraints",
3257 i + noutputs);
3258 else if (MEM_P (op))
3260 /* We won't recognize either volatile memory or memory
3261 with a queued address as available a memory_operand
3262 at this point. Ignore it: clearly this *is* a memory. */
3264 else
3265 gcc_unreachable ();
3267 input_rvec[i] = op;
3268 input_mode[i] = TYPE_MODE (type);
3271 /* For in-out operands, copy output rtx to input rtx. */
3272 unsigned ninout = inout_opnum.length();
3273 for (i = 0; i < ninout; i++)
3275 int j = inout_opnum[i];
3276 rtx o = output_rvec[j];
3278 input_rvec.safe_push (o);
3279 input_mode.safe_push (GET_MODE (o));
3281 char buffer[16];
3282 sprintf (buffer, "%d", j);
3283 constraints.safe_push (ggc_strdup (buffer));
3285 ninputs += ninout;
3287 /* Sometimes we wish to automatically clobber registers across an asm.
3288 Case in point is when the i386 backend moved from cc0 to a hard reg --
3289 maintaining source-level compatibility means automatically clobbering
3290 the flags register. */
3291 rtx_insn *after_md_seq = NULL;
3292 if (targetm.md_asm_adjust)
3293 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3294 constraints, clobber_rvec,
3295 clobbered_regs);
3297 /* Do not allow the hook to change the output and input count,
3298 lest it mess up the operand numbering. */
3299 gcc_assert (output_rvec.length() == noutputs);
3300 gcc_assert (input_rvec.length() == ninputs);
3301 gcc_assert (constraints.length() == noutputs + ninputs);
3303 /* But it certainly can adjust the clobbers. */
3304 nclobbers = clobber_rvec.length();
3306 /* Third pass checks for easy conflicts. */
3307 /* ??? Why are we doing this on trees instead of rtx. */
3309 bool clobber_conflict_found = 0;
3310 for (i = 0; i < noutputs; ++i)
3311 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3312 clobber_conflict_found = 1;
3313 for (i = 0; i < ninputs - ninout; ++i)
3314 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3315 clobber_conflict_found = 1;
3317 /* Make vectors for the expression-rtx, constraint strings,
3318 and named operands. */
3320 rtvec argvec = rtvec_alloc (ninputs);
3321 rtvec constraintvec = rtvec_alloc (ninputs);
3322 rtvec labelvec = rtvec_alloc (nlabels);
3324 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3325 : GET_MODE (output_rvec[0])),
3326 ggc_strdup (gimple_asm_string (stmt)),
3327 "", 0, argvec, constraintvec,
3328 labelvec, locus);
3329 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3331 for (i = 0; i < ninputs; ++i)
3333 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3334 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3335 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3336 constraints[i + noutputs],
3337 locus);
3340 /* Copy labels to the vector. */
3341 rtx_code_label *fallthru_label = NULL;
3342 if (nlabels > 0)
3344 basic_block fallthru_bb = NULL;
3345 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3346 if (fallthru)
3347 fallthru_bb = fallthru->dest;
3349 for (i = 0; i < nlabels; ++i)
3351 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3352 rtx_insn *r;
3353 /* If asm goto has any labels in the fallthru basic block, use
3354 a label that we emit immediately after the asm goto. Expansion
3355 may insert further instructions into the same basic block after
3356 asm goto and if we don't do this, insertion of instructions on
3357 the fallthru edge might misbehave. See PR58670. */
3358 if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
3360 if (fallthru_label == NULL_RTX)
3361 fallthru_label = gen_label_rtx ();
3362 r = fallthru_label;
3364 else
3365 r = label_rtx (label);
3366 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3370 /* Now, for each output, construct an rtx
3371 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3372 ARGVEC CONSTRAINTS OPNAMES))
3373 If there is more than one, put them inside a PARALLEL. */
3375 if (nlabels > 0 && nclobbers == 0)
3377 gcc_assert (noutputs == 0);
3378 emit_jump_insn (body);
3380 else if (noutputs == 0 && nclobbers == 0)
3382 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3383 emit_insn (body);
3385 else if (noutputs == 1 && nclobbers == 0)
3387 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3388 emit_insn (gen_rtx_SET (output_rvec[0], body));
3390 else
3392 rtx obody = body;
3393 int num = noutputs;
3395 if (num == 0)
3396 num = 1;
3398 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3400 /* For each output operand, store a SET. */
3401 for (i = 0; i < noutputs; ++i)
3403 rtx src, o = output_rvec[i];
3404 if (i == 0)
3406 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3407 src = obody;
3409 else
3411 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3412 ASM_OPERANDS_TEMPLATE (obody),
3413 constraints[i], i, argvec,
3414 constraintvec, labelvec, locus);
3415 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3417 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3420 /* If there are no outputs (but there are some clobbers)
3421 store the bare ASM_OPERANDS into the PARALLEL. */
3422 if (i == 0)
3423 XVECEXP (body, 0, i++) = obody;
3425 /* Store (clobber REG) for each clobbered register specified. */
3426 for (unsigned j = 0; j < nclobbers; ++j)
3428 rtx clobbered_reg = clobber_rvec[j];
3430 /* Do sanity check for overlap between clobbers and respectively
3431 input and outputs that hasn't been handled. Such overlap
3432 should have been detected and reported above. */
3433 if (!clobber_conflict_found && REG_P (clobbered_reg))
3435 /* We test the old body (obody) contents to avoid
3436 tripping over the under-construction body. */
3437 for (unsigned k = 0; k < noutputs; ++k)
3438 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3439 internal_error ("asm clobber conflict with output operand");
3441 for (unsigned k = 0; k < ninputs - ninout; ++k)
3442 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3443 internal_error ("asm clobber conflict with input operand");
3446 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3449 if (nlabels > 0)
3450 emit_jump_insn (body);
3451 else
3452 emit_insn (body);
3455 generating_concat_p = old_generating_concat_p;
3457 if (fallthru_label)
3458 emit_label (fallthru_label);
3460 if (after_md_seq)
3461 emit_insn (after_md_seq);
3462 if (after_rtl_seq)
3463 emit_insn (after_rtl_seq);
3465 free_temp_slots ();
3466 crtl->has_asm_statement = 1;
3469 /* Emit code to jump to the address
3470 specified by the pointer expression EXP. */
3472 static void
3473 expand_computed_goto (tree exp)
3475 rtx x = expand_normal (exp);
3477 do_pending_stack_adjust ();
3478 emit_indirect_jump (x);
3481 /* Generate RTL code for a `goto' statement with target label LABEL.
3482 LABEL should be a LABEL_DECL tree node that was or will later be
3483 defined with `expand_label'. */
3485 static void
3486 expand_goto (tree label)
3488 if (flag_checking)
3490 /* Check for a nonlocal goto to a containing function. Should have
3491 gotten translated to __builtin_nonlocal_goto. */
3492 tree context = decl_function_context (label);
3493 gcc_assert (!context || context == current_function_decl);
3496 emit_jump (jump_target_rtx (label));
3499 /* Output a return with no value. */
3501 static void
3502 expand_null_return_1 (void)
3504 clear_pending_stack_adjust ();
3505 do_pending_stack_adjust ();
3506 emit_jump (return_label);
3509 /* Generate RTL to return from the current function, with no value.
3510 (That is, we do not do anything about returning any value.) */
3512 void
3513 expand_null_return (void)
3515 /* If this function was declared to return a value, but we
3516 didn't, clobber the return registers so that they are not
3517 propagated live to the rest of the function. */
3518 clobber_return_register ();
3520 expand_null_return_1 ();
3523 /* Generate RTL to return from the current function, with value VAL. */
3525 static void
3526 expand_value_return (rtx val)
3528 /* Copy the value to the return location unless it's already there. */
3530 tree decl = DECL_RESULT (current_function_decl);
3531 rtx return_reg = DECL_RTL (decl);
3532 if (return_reg != val)
3534 tree funtype = TREE_TYPE (current_function_decl);
3535 tree type = TREE_TYPE (decl);
3536 int unsignedp = TYPE_UNSIGNED (type);
3537 machine_mode old_mode = DECL_MODE (decl);
3538 machine_mode mode;
3539 if (DECL_BY_REFERENCE (decl))
3540 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3541 else
3542 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3544 if (mode != old_mode)
3545 val = convert_modes (mode, old_mode, val, unsignedp);
3547 if (GET_CODE (return_reg) == PARALLEL)
3548 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3549 else
3550 emit_move_insn (return_reg, val);
3553 expand_null_return_1 ();
3556 /* Generate RTL to evaluate the expression RETVAL and return it
3557 from the current function. */
3559 static void
3560 expand_return (tree retval)
3562 rtx result_rtl;
3563 rtx val = 0;
3564 tree retval_rhs;
3566 /* If function wants no value, give it none. */
3567 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3569 expand_normal (retval);
3570 expand_null_return ();
3571 return;
3574 if (retval == error_mark_node)
3576 /* Treat this like a return of no value from a function that
3577 returns a value. */
3578 expand_null_return ();
3579 return;
3581 else if ((TREE_CODE (retval) == MODIFY_EXPR
3582 || TREE_CODE (retval) == INIT_EXPR)
3583 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3584 retval_rhs = TREE_OPERAND (retval, 1);
3585 else
3586 retval_rhs = retval;
3588 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3590 /* If we are returning the RESULT_DECL, then the value has already
3591 been stored into it, so we don't have to do anything special. */
3592 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3593 expand_value_return (result_rtl);
3595 /* If the result is an aggregate that is being returned in one (or more)
3596 registers, load the registers here. */
3598 else if (retval_rhs != 0
3599 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3600 && REG_P (result_rtl))
3602 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3603 if (val)
3605 /* Use the mode of the result value on the return register. */
3606 PUT_MODE (result_rtl, GET_MODE (val));
3607 expand_value_return (val);
3609 else
3610 expand_null_return ();
3612 else if (retval_rhs != 0
3613 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3614 && (REG_P (result_rtl)
3615 || (GET_CODE (result_rtl) == PARALLEL)))
3617 /* Compute the return value into a temporary (usually a pseudo reg). */
3619 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3620 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3621 val = force_not_mem (val);
3622 expand_value_return (val);
3624 else
3626 /* No hard reg used; calculate value into hard return reg. */
3627 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3628 expand_value_return (result_rtl);
3632 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3633 register, tell the rtl optimizers that its value is no longer
3634 needed. */
3636 static void
3637 expand_clobber (tree lhs)
3639 if (DECL_P (lhs))
3641 rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3642 if (decl_rtl && REG_P (decl_rtl))
3644 machine_mode decl_mode = GET_MODE (decl_rtl);
3645 if (maybe_gt (GET_MODE_SIZE (decl_mode),
3646 REGMODE_NATURAL_SIZE (decl_mode)))
3647 emit_clobber (decl_rtl);
3652 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3653 STMT that doesn't require special handling for outgoing edges. That
3654 is no tailcalls and no GIMPLE_COND. */
3656 static void
3657 expand_gimple_stmt_1 (gimple *stmt)
3659 tree op0;
3661 set_curr_insn_location (gimple_location (stmt));
3663 switch (gimple_code (stmt))
3665 case GIMPLE_GOTO:
3666 op0 = gimple_goto_dest (stmt);
3667 if (TREE_CODE (op0) == LABEL_DECL)
3668 expand_goto (op0);
3669 else
3670 expand_computed_goto (op0);
3671 break;
3672 case GIMPLE_LABEL:
3673 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3674 break;
3675 case GIMPLE_NOP:
3676 case GIMPLE_PREDICT:
3677 break;
3678 case GIMPLE_SWITCH:
3680 gswitch *swtch = as_a <gswitch *> (stmt);
3681 if (gimple_switch_num_labels (swtch) == 1)
3682 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3683 else
3684 expand_case (swtch);
3686 break;
3687 case GIMPLE_ASM:
3688 expand_asm_stmt (as_a <gasm *> (stmt));
3689 break;
3690 case GIMPLE_CALL:
3691 expand_call_stmt (as_a <gcall *> (stmt));
3692 break;
3694 case GIMPLE_RETURN:
3696 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3698 if (op0 && op0 != error_mark_node)
3700 tree result = DECL_RESULT (current_function_decl);
3702 /* If we are not returning the current function's RESULT_DECL,
3703 build an assignment to it. */
3704 if (op0 != result)
3706 /* I believe that a function's RESULT_DECL is unique. */
3707 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3709 /* ??? We'd like to use simply expand_assignment here,
3710 but this fails if the value is of BLKmode but the return
3711 decl is a register. expand_return has special handling
3712 for this combination, which eventually should move
3713 to common code. See comments there. Until then, let's
3714 build a modify expression :-/ */
3715 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3716 result, op0);
3720 if (!op0)
3721 expand_null_return ();
3722 else
3723 expand_return (op0);
3725 break;
3727 case GIMPLE_ASSIGN:
3729 gassign *assign_stmt = as_a <gassign *> (stmt);
3730 tree lhs = gimple_assign_lhs (assign_stmt);
3732 /* Tree expand used to fiddle with |= and &= of two bitfield
3733 COMPONENT_REFs here. This can't happen with gimple, the LHS
3734 of binary assigns must be a gimple reg. */
3736 if (TREE_CODE (lhs) != SSA_NAME
3737 || get_gimple_rhs_class (gimple_expr_code (stmt))
3738 == GIMPLE_SINGLE_RHS)
3740 tree rhs = gimple_assign_rhs1 (assign_stmt);
3741 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3742 == GIMPLE_SINGLE_RHS);
3743 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3744 /* Do not put locations on possibly shared trees. */
3745 && !is_gimple_min_invariant (rhs))
3746 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3747 if (TREE_CLOBBER_P (rhs))
3748 /* This is a clobber to mark the going out of scope for
3749 this LHS. */
3750 expand_clobber (lhs);
3751 else
3752 expand_assignment (lhs, rhs,
3753 gimple_assign_nontemporal_move_p (
3754 assign_stmt));
3756 else
3758 rtx target, temp;
3759 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3760 struct separate_ops ops;
3761 bool promoted = false;
3763 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3764 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3765 promoted = true;
3767 ops.code = gimple_assign_rhs_code (assign_stmt);
3768 ops.type = TREE_TYPE (lhs);
3769 switch (get_gimple_rhs_class (ops.code))
3771 case GIMPLE_TERNARY_RHS:
3772 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3773 /* Fallthru */
3774 case GIMPLE_BINARY_RHS:
3775 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3776 /* Fallthru */
3777 case GIMPLE_UNARY_RHS:
3778 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3779 break;
3780 default:
3781 gcc_unreachable ();
3783 ops.location = gimple_location (stmt);
3785 /* If we want to use a nontemporal store, force the value to
3786 register first. If we store into a promoted register,
3787 don't directly expand to target. */
3788 temp = nontemporal || promoted ? NULL_RTX : target;
3789 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3790 EXPAND_NORMAL);
3792 if (temp == target)
3794 else if (promoted)
3796 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3797 /* If TEMP is a VOIDmode constant, use convert_modes to make
3798 sure that we properly convert it. */
3799 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3801 temp = convert_modes (GET_MODE (target),
3802 TYPE_MODE (ops.type),
3803 temp, unsignedp);
3804 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3805 GET_MODE (target), temp, unsignedp);
3808 convert_move (SUBREG_REG (target), temp, unsignedp);
3810 else if (nontemporal && emit_storent_insn (target, temp))
3812 else
3814 temp = force_operand (temp, target);
3815 if (temp != target)
3816 emit_move_insn (target, temp);
3820 break;
3822 default:
3823 gcc_unreachable ();
3827 /* Expand one gimple statement STMT and return the last RTL instruction
3828 before any of the newly generated ones.
3830 In addition to generating the necessary RTL instructions this also
3831 sets REG_EH_REGION notes if necessary and sets the current source
3832 location for diagnostics. */
3834 static rtx_insn *
3835 expand_gimple_stmt (gimple *stmt)
3837 location_t saved_location = input_location;
3838 rtx_insn *last = get_last_insn ();
3839 int lp_nr;
3841 gcc_assert (cfun);
3843 /* We need to save and restore the current source location so that errors
3844 discovered during expansion are emitted with the right location. But
3845 it would be better if the diagnostic routines used the source location
3846 embedded in the tree nodes rather than globals. */
3847 if (gimple_has_location (stmt))
3848 input_location = gimple_location (stmt);
3850 expand_gimple_stmt_1 (stmt);
3852 /* Free any temporaries used to evaluate this statement. */
3853 free_temp_slots ();
3855 input_location = saved_location;
3857 /* Mark all insns that may trap. */
3858 lp_nr = lookup_stmt_eh_lp (stmt);
3859 if (lp_nr)
3861 rtx_insn *insn;
3862 for (insn = next_real_insn (last); insn;
3863 insn = next_real_insn (insn))
3865 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3866 /* If we want exceptions for non-call insns, any
3867 may_trap_p instruction may throw. */
3868 && GET_CODE (PATTERN (insn)) != CLOBBER
3869 && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH
3870 && GET_CODE (PATTERN (insn)) != USE
3871 && insn_could_throw_p (insn))
3872 make_reg_eh_region_note (insn, 0, lp_nr);
3876 return last;
3879 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3880 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3881 generated a tail call (something that might be denied by the ABI
3882 rules governing the call; see calls.c).
3884 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3885 can still reach the rest of BB. The case here is __builtin_sqrt,
3886 where the NaN result goes through the external function (with a
3887 tailcall) and the normal result happens via a sqrt instruction. */
3889 static basic_block
3890 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3892 rtx_insn *last2, *last;
3893 edge e;
3894 edge_iterator ei;
3895 profile_probability probability;
3897 last2 = last = expand_gimple_stmt (stmt);
3899 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3900 if (CALL_P (last) && SIBLING_CALL_P (last))
3901 goto found;
3903 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3905 *can_fallthru = true;
3906 return NULL;
3908 found:
3909 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3910 Any instructions emitted here are about to be deleted. */
3911 do_pending_stack_adjust ();
3913 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3914 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3915 EH or abnormal edges, we shouldn't have created a tail call in
3916 the first place. So it seems to me we should just be removing
3917 all edges here, or redirecting the existing fallthru edge to
3918 the exit block. */
3920 probability = profile_probability::never ();
3922 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3924 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3926 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3927 e->dest->count -= e->count ();
3928 probability += e->probability;
3929 remove_edge (e);
3931 else
3932 ei_next (&ei);
3935 /* This is somewhat ugly: the call_expr expander often emits instructions
3936 after the sibcall (to perform the function return). These confuse the
3937 find_many_sub_basic_blocks code, so we need to get rid of these. */
3938 last = NEXT_INSN (last);
3939 gcc_assert (BARRIER_P (last));
3941 *can_fallthru = false;
3942 while (NEXT_INSN (last))
3944 /* For instance an sqrt builtin expander expands if with
3945 sibcall in the then and label for `else`. */
3946 if (LABEL_P (NEXT_INSN (last)))
3948 *can_fallthru = true;
3949 break;
3951 delete_insn (NEXT_INSN (last));
3954 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3955 | EDGE_SIBCALL);
3956 e->probability = probability;
3957 BB_END (bb) = last;
3958 update_bb_for_insn (bb);
3960 if (NEXT_INSN (last))
3962 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3964 last = BB_END (bb);
3965 if (BARRIER_P (last))
3966 BB_END (bb) = PREV_INSN (last);
3969 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3971 return bb;
3974 /* Return the difference between the floor and the truncated result of
3975 a signed division by OP1 with remainder MOD. */
3976 static rtx
3977 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3979 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3980 return gen_rtx_IF_THEN_ELSE
3981 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3982 gen_rtx_IF_THEN_ELSE
3983 (mode, gen_rtx_LT (BImode,
3984 gen_rtx_DIV (mode, op1, mod),
3985 const0_rtx),
3986 constm1_rtx, const0_rtx),
3987 const0_rtx);
3990 /* Return the difference between the ceil and the truncated result of
3991 a signed division by OP1 with remainder MOD. */
3992 static rtx
3993 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3995 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3996 return gen_rtx_IF_THEN_ELSE
3997 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3998 gen_rtx_IF_THEN_ELSE
3999 (mode, gen_rtx_GT (BImode,
4000 gen_rtx_DIV (mode, op1, mod),
4001 const0_rtx),
4002 const1_rtx, const0_rtx),
4003 const0_rtx);
4006 /* Return the difference between the ceil and the truncated result of
4007 an unsigned division by OP1 with remainder MOD. */
4008 static rtx
4009 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
4011 /* (mod != 0 ? 1 : 0) */
4012 return gen_rtx_IF_THEN_ELSE
4013 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4014 const1_rtx, const0_rtx);
4017 /* Return the difference between the rounded and the truncated result
4018 of a signed division by OP1 with remainder MOD. Halfway cases are
4019 rounded away from zero, rather than to the nearest even number. */
4020 static rtx
4021 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4023 /* (abs (mod) >= abs (op1) - abs (mod)
4024 ? (op1 / mod > 0 ? 1 : -1)
4025 : 0) */
4026 return gen_rtx_IF_THEN_ELSE
4027 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
4028 gen_rtx_MINUS (mode,
4029 gen_rtx_ABS (mode, op1),
4030 gen_rtx_ABS (mode, mod))),
4031 gen_rtx_IF_THEN_ELSE
4032 (mode, gen_rtx_GT (BImode,
4033 gen_rtx_DIV (mode, op1, mod),
4034 const0_rtx),
4035 const1_rtx, constm1_rtx),
4036 const0_rtx);
4039 /* Return the difference between the rounded and the truncated result
4040 of a unsigned division by OP1 with remainder MOD. Halfway cases
4041 are rounded away from zero, rather than to the nearest even
4042 number. */
4043 static rtx
4044 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
4046 /* (mod >= op1 - mod ? 1 : 0) */
4047 return gen_rtx_IF_THEN_ELSE
4048 (mode, gen_rtx_GE (BImode, mod,
4049 gen_rtx_MINUS (mode, op1, mod)),
4050 const1_rtx, const0_rtx);
4053 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4054 any rtl. */
4056 static rtx
4057 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4058 addr_space_t as)
4060 #ifndef POINTERS_EXTEND_UNSIGNED
4061 gcc_assert (mode == Pmode
4062 || mode == targetm.addr_space.address_mode (as));
4063 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4064 #else
4065 rtx temp;
4067 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4069 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4070 return x;
4072 /* X must have some form of address mode already. */
4073 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4074 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4075 x = lowpart_subreg (mode, x, xmode);
4076 else if (POINTERS_EXTEND_UNSIGNED > 0)
4077 x = gen_rtx_ZERO_EXTEND (mode, x);
4078 else if (!POINTERS_EXTEND_UNSIGNED)
4079 x = gen_rtx_SIGN_EXTEND (mode, x);
4080 else
4082 switch (GET_CODE (x))
4084 case SUBREG:
4085 if ((SUBREG_PROMOTED_VAR_P (x)
4086 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4087 || (GET_CODE (SUBREG_REG (x)) == PLUS
4088 && REG_P (XEXP (SUBREG_REG (x), 0))
4089 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4090 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4091 && GET_MODE (SUBREG_REG (x)) == mode)
4092 return SUBREG_REG (x);
4093 break;
4094 case LABEL_REF:
4095 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4096 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4097 return temp;
4098 case SYMBOL_REF:
4099 temp = shallow_copy_rtx (x);
4100 PUT_MODE (temp, mode);
4101 return temp;
4102 case CONST:
4103 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4104 if (temp)
4105 temp = gen_rtx_CONST (mode, temp);
4106 return temp;
4107 case PLUS:
4108 case MINUS:
4109 if (CONST_INT_P (XEXP (x, 1)))
4111 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4112 if (temp)
4113 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4115 break;
4116 default:
4117 break;
4119 /* Don't know how to express ptr_extend as operation in debug info. */
4120 return NULL;
4122 #endif /* POINTERS_EXTEND_UNSIGNED */
4124 return x;
4127 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4128 by avoid_deep_ter_for_debug. */
4130 static hash_map<tree, tree> *deep_ter_debug_map;
4132 /* Split too deep TER chains for debug stmts using debug temporaries. */
4134 static void
4135 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4137 use_operand_p use_p;
4138 ssa_op_iter iter;
4139 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4141 tree use = USE_FROM_PTR (use_p);
4142 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4143 continue;
4144 gimple *g = get_gimple_for_ssa_name (use);
4145 if (g == NULL)
4146 continue;
4147 if (depth > 6 && !stmt_ends_bb_p (g))
4149 if (deep_ter_debug_map == NULL)
4150 deep_ter_debug_map = new hash_map<tree, tree>;
4152 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4153 if (vexpr != NULL)
4154 continue;
4155 vexpr = make_node (DEBUG_EXPR_DECL);
4156 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4157 DECL_ARTIFICIAL (vexpr) = 1;
4158 TREE_TYPE (vexpr) = TREE_TYPE (use);
4159 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4160 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4161 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4162 avoid_deep_ter_for_debug (def_temp, 0);
4164 else
4165 avoid_deep_ter_for_debug (g, depth + 1);
4169 /* Return an RTX equivalent to the value of the parameter DECL. */
4171 static rtx
4172 expand_debug_parm_decl (tree decl)
4174 rtx incoming = DECL_INCOMING_RTL (decl);
4176 if (incoming
4177 && GET_MODE (incoming) != BLKmode
4178 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4179 || (MEM_P (incoming)
4180 && REG_P (XEXP (incoming, 0))
4181 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4183 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4185 #ifdef HAVE_window_save
4186 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4187 If the target machine has an explicit window save instruction, the
4188 actual entry value is the corresponding OUTGOING_REGNO instead. */
4189 if (REG_P (incoming)
4190 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4191 incoming
4192 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4193 OUTGOING_REGNO (REGNO (incoming)), 0);
4194 else if (MEM_P (incoming))
4196 rtx reg = XEXP (incoming, 0);
4197 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4199 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4200 incoming = replace_equiv_address_nv (incoming, reg);
4202 else
4203 incoming = copy_rtx (incoming);
4205 #endif
4207 ENTRY_VALUE_EXP (rtl) = incoming;
4208 return rtl;
4211 if (incoming
4212 && GET_MODE (incoming) != BLKmode
4213 && !TREE_ADDRESSABLE (decl)
4214 && MEM_P (incoming)
4215 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4216 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4217 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4218 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4219 return copy_rtx (incoming);
4221 return NULL_RTX;
4224 /* Return an RTX equivalent to the value of the tree expression EXP. */
4226 static rtx
4227 expand_debug_expr (tree exp)
4229 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4230 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4231 machine_mode inner_mode = VOIDmode;
4232 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4233 addr_space_t as;
4234 scalar_int_mode op0_mode, op1_mode, addr_mode;
4236 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4238 case tcc_expression:
4239 switch (TREE_CODE (exp))
4241 case COND_EXPR:
4242 case DOT_PROD_EXPR:
4243 case SAD_EXPR:
4244 case WIDEN_MULT_PLUS_EXPR:
4245 case WIDEN_MULT_MINUS_EXPR:
4246 goto ternary;
4248 case TRUTH_ANDIF_EXPR:
4249 case TRUTH_ORIF_EXPR:
4250 case TRUTH_AND_EXPR:
4251 case TRUTH_OR_EXPR:
4252 case TRUTH_XOR_EXPR:
4253 goto binary;
4255 case TRUTH_NOT_EXPR:
4256 goto unary;
4258 default:
4259 break;
4261 break;
4263 ternary:
4264 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4265 if (!op2)
4266 return NULL_RTX;
4267 /* Fall through. */
4269 binary:
4270 case tcc_binary:
4271 if (mode == BLKmode)
4272 return NULL_RTX;
4273 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4274 if (!op1)
4275 return NULL_RTX;
4276 switch (TREE_CODE (exp))
4278 case LSHIFT_EXPR:
4279 case RSHIFT_EXPR:
4280 case LROTATE_EXPR:
4281 case RROTATE_EXPR:
4282 case WIDEN_LSHIFT_EXPR:
4283 /* Ensure second operand isn't wider than the first one. */
4284 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4285 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4286 && (GET_MODE_UNIT_PRECISION (mode)
4287 < GET_MODE_PRECISION (op1_mode)))
4288 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4289 break;
4290 default:
4291 break;
4293 /* Fall through. */
4295 unary:
4296 case tcc_unary:
4297 if (mode == BLKmode)
4298 return NULL_RTX;
4299 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4300 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4301 if (!op0)
4302 return NULL_RTX;
4303 break;
4305 case tcc_comparison:
4306 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4307 goto binary;
4309 case tcc_type:
4310 case tcc_statement:
4311 gcc_unreachable ();
4313 case tcc_constant:
4314 case tcc_exceptional:
4315 case tcc_declaration:
4316 case tcc_reference:
4317 case tcc_vl_exp:
4318 break;
4321 switch (TREE_CODE (exp))
4323 case STRING_CST:
4324 if (!lookup_constant_def (exp))
4326 if (strlen (TREE_STRING_POINTER (exp)) + 1
4327 != (size_t) TREE_STRING_LENGTH (exp))
4328 return NULL_RTX;
4329 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4330 op0 = gen_rtx_MEM (BLKmode, op0);
4331 set_mem_attributes (op0, exp, 0);
4332 return op0;
4334 /* Fall through. */
4336 case INTEGER_CST:
4337 case REAL_CST:
4338 case FIXED_CST:
4339 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4340 return op0;
4342 case POLY_INT_CST:
4343 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4345 case COMPLEX_CST:
4346 gcc_assert (COMPLEX_MODE_P (mode));
4347 op0 = expand_debug_expr (TREE_REALPART (exp));
4348 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4349 return gen_rtx_CONCAT (mode, op0, op1);
4351 case DEBUG_EXPR_DECL:
4352 op0 = DECL_RTL_IF_SET (exp);
4354 if (op0)
4355 return op0;
4357 op0 = gen_rtx_DEBUG_EXPR (mode);
4358 DEBUG_EXPR_TREE_DECL (op0) = exp;
4359 SET_DECL_RTL (exp, op0);
4361 return op0;
4363 case VAR_DECL:
4364 case PARM_DECL:
4365 case FUNCTION_DECL:
4366 case LABEL_DECL:
4367 case CONST_DECL:
4368 case RESULT_DECL:
4369 op0 = DECL_RTL_IF_SET (exp);
4371 /* This decl was probably optimized away. */
4372 if (!op0)
4374 if (!VAR_P (exp)
4375 || DECL_EXTERNAL (exp)
4376 || !TREE_STATIC (exp)
4377 || !DECL_NAME (exp)
4378 || DECL_HARD_REGISTER (exp)
4379 || DECL_IN_CONSTANT_POOL (exp)
4380 || mode == VOIDmode)
4381 return NULL;
4383 op0 = make_decl_rtl_for_debug (exp);
4384 if (!MEM_P (op0)
4385 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4386 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4387 return NULL;
4389 else
4390 op0 = copy_rtx (op0);
4392 if (GET_MODE (op0) == BLKmode
4393 /* If op0 is not BLKmode, but mode is, adjust_mode
4394 below would ICE. While it is likely a FE bug,
4395 try to be robust here. See PR43166. */
4396 || mode == BLKmode
4397 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4399 gcc_assert (MEM_P (op0));
4400 op0 = adjust_address_nv (op0, mode, 0);
4401 return op0;
4404 /* Fall through. */
4406 adjust_mode:
4407 case PAREN_EXPR:
4408 CASE_CONVERT:
4410 inner_mode = GET_MODE (op0);
4412 if (mode == inner_mode)
4413 return op0;
4415 if (inner_mode == VOIDmode)
4417 if (TREE_CODE (exp) == SSA_NAME)
4418 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4419 else
4420 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4421 if (mode == inner_mode)
4422 return op0;
4425 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4427 if (GET_MODE_UNIT_BITSIZE (mode)
4428 == GET_MODE_UNIT_BITSIZE (inner_mode))
4429 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4430 else if (GET_MODE_UNIT_BITSIZE (mode)
4431 < GET_MODE_UNIT_BITSIZE (inner_mode))
4432 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4433 else
4434 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4436 else if (FLOAT_MODE_P (mode))
4438 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4439 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4440 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4441 else
4442 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4444 else if (FLOAT_MODE_P (inner_mode))
4446 if (unsignedp)
4447 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4448 else
4449 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4451 else if (GET_MODE_UNIT_PRECISION (mode)
4452 == GET_MODE_UNIT_PRECISION (inner_mode))
4453 op0 = lowpart_subreg (mode, op0, inner_mode);
4454 else if (GET_MODE_UNIT_PRECISION (mode)
4455 < GET_MODE_UNIT_PRECISION (inner_mode))
4456 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4457 else if (UNARY_CLASS_P (exp)
4458 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4459 : unsignedp)
4460 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4461 else
4462 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4464 return op0;
4467 case MEM_REF:
4468 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4470 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4471 TREE_OPERAND (exp, 0),
4472 TREE_OPERAND (exp, 1));
4473 if (newexp)
4474 return expand_debug_expr (newexp);
4476 /* FALLTHROUGH */
4477 case INDIRECT_REF:
4478 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4479 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4480 if (!op0)
4481 return NULL;
4483 if (TREE_CODE (exp) == MEM_REF)
4485 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4486 || (GET_CODE (op0) == PLUS
4487 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4488 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4489 Instead just use get_inner_reference. */
4490 goto component_ref;
4492 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4493 poly_int64 offset;
4494 if (!op1 || !poly_int_rtx_p (op1, &offset))
4495 return NULL;
4497 op0 = plus_constant (inner_mode, op0, offset);
4500 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4502 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4503 op0, as);
4504 if (op0 == NULL_RTX)
4505 return NULL;
4507 op0 = gen_rtx_MEM (mode, op0);
4508 set_mem_attributes (op0, exp, 0);
4509 if (TREE_CODE (exp) == MEM_REF
4510 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4511 set_mem_expr (op0, NULL_TREE);
4512 set_mem_addr_space (op0, as);
4514 return op0;
4516 case TARGET_MEM_REF:
4517 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4518 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4519 return NULL;
4521 op0 = expand_debug_expr
4522 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4523 if (!op0)
4524 return NULL;
4526 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4527 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4528 op0, as);
4529 if (op0 == NULL_RTX)
4530 return NULL;
4532 op0 = gen_rtx_MEM (mode, op0);
4534 set_mem_attributes (op0, exp, 0);
4535 set_mem_addr_space (op0, as);
4537 return op0;
4539 component_ref:
4540 case ARRAY_REF:
4541 case ARRAY_RANGE_REF:
4542 case COMPONENT_REF:
4543 case BIT_FIELD_REF:
4544 case REALPART_EXPR:
4545 case IMAGPART_EXPR:
4546 case VIEW_CONVERT_EXPR:
4548 machine_mode mode1;
4549 poly_int64 bitsize, bitpos;
4550 tree offset;
4551 int reversep, volatilep = 0;
4552 tree tem
4553 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4554 &unsignedp, &reversep, &volatilep);
4555 rtx orig_op0;
4557 if (known_eq (bitsize, 0))
4558 return NULL;
4560 orig_op0 = op0 = expand_debug_expr (tem);
4562 if (!op0)
4563 return NULL;
4565 if (offset)
4567 machine_mode addrmode, offmode;
4569 if (!MEM_P (op0))
4570 return NULL;
4572 op0 = XEXP (op0, 0);
4573 addrmode = GET_MODE (op0);
4574 if (addrmode == VOIDmode)
4575 addrmode = Pmode;
4577 op1 = expand_debug_expr (offset);
4578 if (!op1)
4579 return NULL;
4581 offmode = GET_MODE (op1);
4582 if (offmode == VOIDmode)
4583 offmode = TYPE_MODE (TREE_TYPE (offset));
4585 if (addrmode != offmode)
4586 op1 = lowpart_subreg (addrmode, op1, offmode);
4588 /* Don't use offset_address here, we don't need a
4589 recognizable address, and we don't want to generate
4590 code. */
4591 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4592 op0, op1));
4595 if (MEM_P (op0))
4597 if (mode1 == VOIDmode)
4599 if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4600 return NULL;
4601 /* Bitfield. */
4602 mode1 = smallest_int_mode_for_size (bitsize);
4604 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4605 if (maybe_ne (bytepos, 0))
4607 op0 = adjust_address_nv (op0, mode1, bytepos);
4608 bitpos = num_trailing_bits (bitpos);
4610 else if (known_eq (bitpos, 0)
4611 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4612 op0 = adjust_address_nv (op0, mode, 0);
4613 else if (GET_MODE (op0) != mode1)
4614 op0 = adjust_address_nv (op0, mode1, 0);
4615 else
4616 op0 = copy_rtx (op0);
4617 if (op0 == orig_op0)
4618 op0 = shallow_copy_rtx (op0);
4619 set_mem_attributes (op0, exp, 0);
4622 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4623 return op0;
4625 if (maybe_lt (bitpos, 0))
4626 return NULL;
4628 if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4629 return NULL;
4631 poly_int64 bytepos;
4632 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4633 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4635 machine_mode opmode = GET_MODE (op0);
4637 if (opmode == VOIDmode)
4638 opmode = TYPE_MODE (TREE_TYPE (tem));
4640 /* This condition may hold if we're expanding the address
4641 right past the end of an array that turned out not to
4642 be addressable (i.e., the address was only computed in
4643 debug stmts). The gen_subreg below would rightfully
4644 crash, and the address doesn't really exist, so just
4645 drop it. */
4646 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4647 return NULL;
4649 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4650 return simplify_gen_subreg (mode, op0, opmode, bytepos);
4653 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4654 && TYPE_UNSIGNED (TREE_TYPE (exp))
4655 ? SIGN_EXTRACT
4656 : ZERO_EXTRACT, mode,
4657 GET_MODE (op0) != VOIDmode
4658 ? GET_MODE (op0)
4659 : TYPE_MODE (TREE_TYPE (tem)),
4660 op0, gen_int_mode (bitsize, word_mode),
4661 gen_int_mode (bitpos, word_mode));
4664 case ABS_EXPR:
4665 case ABSU_EXPR:
4666 return simplify_gen_unary (ABS, mode, op0, mode);
4668 case NEGATE_EXPR:
4669 return simplify_gen_unary (NEG, mode, op0, mode);
4671 case BIT_NOT_EXPR:
4672 return simplify_gen_unary (NOT, mode, op0, mode);
4674 case FLOAT_EXPR:
4675 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4676 0)))
4677 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4678 inner_mode);
4680 case FIX_TRUNC_EXPR:
4681 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4682 inner_mode);
4684 case POINTER_PLUS_EXPR:
4685 /* For the rare target where pointers are not the same size as
4686 size_t, we need to check for mis-matched modes and correct
4687 the addend. */
4688 if (op0 && op1
4689 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4690 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4691 && op0_mode != op1_mode)
4693 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4694 /* If OP0 is a partial mode, then we must truncate, even
4695 if it has the same bitsize as OP1 as GCC's
4696 representation of partial modes is opaque. */
4697 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4698 && (GET_MODE_BITSIZE (op0_mode)
4699 == GET_MODE_BITSIZE (op1_mode))))
4700 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4701 else
4702 /* We always sign-extend, regardless of the signedness of
4703 the operand, because the operand is always unsigned
4704 here even if the original C expression is signed. */
4705 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4707 /* Fall through. */
4708 case PLUS_EXPR:
4709 return simplify_gen_binary (PLUS, mode, op0, op1);
4711 case MINUS_EXPR:
4712 case POINTER_DIFF_EXPR:
4713 return simplify_gen_binary (MINUS, mode, op0, op1);
4715 case MULT_EXPR:
4716 return simplify_gen_binary (MULT, mode, op0, op1);
4718 case RDIV_EXPR:
4719 case TRUNC_DIV_EXPR:
4720 case EXACT_DIV_EXPR:
4721 if (unsignedp)
4722 return simplify_gen_binary (UDIV, mode, op0, op1);
4723 else
4724 return simplify_gen_binary (DIV, mode, op0, op1);
4726 case TRUNC_MOD_EXPR:
4727 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4729 case FLOOR_DIV_EXPR:
4730 if (unsignedp)
4731 return simplify_gen_binary (UDIV, mode, op0, op1);
4732 else
4734 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4735 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4736 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4737 return simplify_gen_binary (PLUS, mode, div, adj);
4740 case FLOOR_MOD_EXPR:
4741 if (unsignedp)
4742 return simplify_gen_binary (UMOD, mode, op0, op1);
4743 else
4745 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4746 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4747 adj = simplify_gen_unary (NEG, mode,
4748 simplify_gen_binary (MULT, mode, adj, op1),
4749 mode);
4750 return simplify_gen_binary (PLUS, mode, mod, adj);
4753 case CEIL_DIV_EXPR:
4754 if (unsignedp)
4756 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4757 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4758 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4759 return simplify_gen_binary (PLUS, mode, div, adj);
4761 else
4763 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4764 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4765 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4766 return simplify_gen_binary (PLUS, mode, div, adj);
4769 case CEIL_MOD_EXPR:
4770 if (unsignedp)
4772 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4773 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4774 adj = simplify_gen_unary (NEG, mode,
4775 simplify_gen_binary (MULT, mode, adj, op1),
4776 mode);
4777 return simplify_gen_binary (PLUS, mode, mod, adj);
4779 else
4781 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4782 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4783 adj = simplify_gen_unary (NEG, mode,
4784 simplify_gen_binary (MULT, mode, adj, op1),
4785 mode);
4786 return simplify_gen_binary (PLUS, mode, mod, adj);
4789 case ROUND_DIV_EXPR:
4790 if (unsignedp)
4792 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4793 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4794 rtx adj = round_udiv_adjust (mode, mod, op1);
4795 return simplify_gen_binary (PLUS, mode, div, adj);
4797 else
4799 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4800 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4801 rtx adj = round_sdiv_adjust (mode, mod, op1);
4802 return simplify_gen_binary (PLUS, mode, div, adj);
4805 case ROUND_MOD_EXPR:
4806 if (unsignedp)
4808 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4809 rtx adj = round_udiv_adjust (mode, mod, op1);
4810 adj = simplify_gen_unary (NEG, mode,
4811 simplify_gen_binary (MULT, mode, adj, op1),
4812 mode);
4813 return simplify_gen_binary (PLUS, mode, mod, adj);
4815 else
4817 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4818 rtx adj = round_sdiv_adjust (mode, mod, op1);
4819 adj = simplify_gen_unary (NEG, mode,
4820 simplify_gen_binary (MULT, mode, adj, op1),
4821 mode);
4822 return simplify_gen_binary (PLUS, mode, mod, adj);
4825 case LSHIFT_EXPR:
4826 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4828 case RSHIFT_EXPR:
4829 if (unsignedp)
4830 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4831 else
4832 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4834 case LROTATE_EXPR:
4835 return simplify_gen_binary (ROTATE, mode, op0, op1);
4837 case RROTATE_EXPR:
4838 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4840 case MIN_EXPR:
4841 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4843 case MAX_EXPR:
4844 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4846 case BIT_AND_EXPR:
4847 case TRUTH_AND_EXPR:
4848 return simplify_gen_binary (AND, mode, op0, op1);
4850 case BIT_IOR_EXPR:
4851 case TRUTH_OR_EXPR:
4852 return simplify_gen_binary (IOR, mode, op0, op1);
4854 case BIT_XOR_EXPR:
4855 case TRUTH_XOR_EXPR:
4856 return simplify_gen_binary (XOR, mode, op0, op1);
4858 case TRUTH_ANDIF_EXPR:
4859 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4861 case TRUTH_ORIF_EXPR:
4862 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4864 case TRUTH_NOT_EXPR:
4865 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4867 case LT_EXPR:
4868 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4869 op0, op1);
4871 case LE_EXPR:
4872 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4873 op0, op1);
4875 case GT_EXPR:
4876 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4877 op0, op1);
4879 case GE_EXPR:
4880 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4881 op0, op1);
4883 case EQ_EXPR:
4884 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4886 case NE_EXPR:
4887 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4889 case UNORDERED_EXPR:
4890 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4892 case ORDERED_EXPR:
4893 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4895 case UNLT_EXPR:
4896 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4898 case UNLE_EXPR:
4899 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4901 case UNGT_EXPR:
4902 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4904 case UNGE_EXPR:
4905 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4907 case UNEQ_EXPR:
4908 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4910 case LTGT_EXPR:
4911 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4913 case COND_EXPR:
4914 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4916 case COMPLEX_EXPR:
4917 gcc_assert (COMPLEX_MODE_P (mode));
4918 if (GET_MODE (op0) == VOIDmode)
4919 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4920 if (GET_MODE (op1) == VOIDmode)
4921 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4922 return gen_rtx_CONCAT (mode, op0, op1);
4924 case CONJ_EXPR:
4925 if (GET_CODE (op0) == CONCAT)
4926 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4927 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4928 XEXP (op0, 1),
4929 GET_MODE_INNER (mode)));
4930 else
4932 scalar_mode imode = GET_MODE_INNER (mode);
4933 rtx re, im;
4935 if (MEM_P (op0))
4937 re = adjust_address_nv (op0, imode, 0);
4938 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4940 else
4942 scalar_int_mode ifmode;
4943 scalar_int_mode ihmode;
4944 rtx halfsize;
4945 if (!int_mode_for_mode (mode).exists (&ifmode)
4946 || !int_mode_for_mode (imode).exists (&ihmode))
4947 return NULL;
4948 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4949 re = op0;
4950 if (mode != ifmode)
4951 re = gen_rtx_SUBREG (ifmode, re, 0);
4952 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4953 if (imode != ihmode)
4954 re = gen_rtx_SUBREG (imode, re, 0);
4955 im = copy_rtx (op0);
4956 if (mode != ifmode)
4957 im = gen_rtx_SUBREG (ifmode, im, 0);
4958 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4959 if (imode != ihmode)
4960 im = gen_rtx_SUBREG (imode, im, 0);
4962 im = gen_rtx_NEG (imode, im);
4963 return gen_rtx_CONCAT (mode, re, im);
4966 case ADDR_EXPR:
4967 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4968 if (!op0 || !MEM_P (op0))
4970 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4971 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4972 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4973 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4974 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4975 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4977 if (handled_component_p (TREE_OPERAND (exp, 0)))
4979 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4980 bool reverse;
4981 tree decl
4982 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4983 &bitsize, &maxsize, &reverse);
4984 if ((VAR_P (decl)
4985 || TREE_CODE (decl) == PARM_DECL
4986 || TREE_CODE (decl) == RESULT_DECL)
4987 && (!TREE_ADDRESSABLE (decl)
4988 || target_for_debug_bind (decl))
4989 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4990 && known_gt (bitsize, 0)
4991 && known_eq (bitsize, maxsize))
4993 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4994 return plus_constant (mode, base, byteoffset);
4998 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4999 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5000 == ADDR_EXPR)
5002 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5003 0));
5004 if (op0 != NULL
5005 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
5006 || (GET_CODE (op0) == PLUS
5007 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
5008 && CONST_INT_P (XEXP (op0, 1)))))
5010 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5011 1));
5012 poly_int64 offset;
5013 if (!op1 || !poly_int_rtx_p (op1, &offset))
5014 return NULL;
5016 return plus_constant (mode, op0, offset);
5020 return NULL;
5023 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
5024 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5025 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
5027 return op0;
5029 case VECTOR_CST:
5031 unsigned HOST_WIDE_INT i, nelts;
5033 if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
5034 return NULL;
5036 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5038 for (i = 0; i < nelts; ++i)
5040 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
5041 if (!op1)
5042 return NULL;
5043 XVECEXP (op0, 0, i) = op1;
5046 return op0;
5049 case CONSTRUCTOR:
5050 if (TREE_CLOBBER_P (exp))
5051 return NULL;
5052 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5054 unsigned i;
5055 unsigned HOST_WIDE_INT nelts;
5056 tree val;
5058 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5059 goto flag_unsupported;
5061 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5063 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5065 op1 = expand_debug_expr (val);
5066 if (!op1)
5067 return NULL;
5068 XVECEXP (op0, 0, i) = op1;
5071 if (i < nelts)
5073 op1 = expand_debug_expr
5074 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5076 if (!op1)
5077 return NULL;
5079 for (; i < nelts; i++)
5080 XVECEXP (op0, 0, i) = op1;
5083 return op0;
5085 else
5086 goto flag_unsupported;
5088 case CALL_EXPR:
5089 /* ??? Maybe handle some builtins? */
5090 return NULL;
5092 case SSA_NAME:
5094 gimple *g = get_gimple_for_ssa_name (exp);
5095 if (g)
5097 tree t = NULL_TREE;
5098 if (deep_ter_debug_map)
5100 tree *slot = deep_ter_debug_map->get (exp);
5101 if (slot)
5102 t = *slot;
5104 if (t == NULL_TREE)
5105 t = gimple_assign_rhs_to_tree (g);
5106 op0 = expand_debug_expr (t);
5107 if (!op0)
5108 return NULL;
5110 else
5112 /* If this is a reference to an incoming value of
5113 parameter that is never used in the code or where the
5114 incoming value is never used in the code, use
5115 PARM_DECL's DECL_RTL if set. */
5116 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5117 && SSA_NAME_VAR (exp)
5118 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5119 && has_zero_uses (exp))
5121 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5122 if (op0)
5123 goto adjust_mode;
5124 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5125 if (op0)
5126 goto adjust_mode;
5129 int part = var_to_partition (SA.map, exp);
5131 if (part == NO_PARTITION)
5132 return NULL;
5134 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5136 op0 = copy_rtx (SA.partition_to_pseudo[part]);
5138 goto adjust_mode;
5141 case ERROR_MARK:
5142 return NULL;
5144 /* Vector stuff. For most of the codes we don't have rtl codes. */
5145 case REALIGN_LOAD_EXPR:
5146 case VEC_COND_EXPR:
5147 case VEC_PACK_FIX_TRUNC_EXPR:
5148 case VEC_PACK_FLOAT_EXPR:
5149 case VEC_PACK_SAT_EXPR:
5150 case VEC_PACK_TRUNC_EXPR:
5151 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5152 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
5153 case VEC_UNPACK_FLOAT_HI_EXPR:
5154 case VEC_UNPACK_FLOAT_LO_EXPR:
5155 case VEC_UNPACK_HI_EXPR:
5156 case VEC_UNPACK_LO_EXPR:
5157 case VEC_WIDEN_MULT_HI_EXPR:
5158 case VEC_WIDEN_MULT_LO_EXPR:
5159 case VEC_WIDEN_MULT_EVEN_EXPR:
5160 case VEC_WIDEN_MULT_ODD_EXPR:
5161 case VEC_WIDEN_LSHIFT_HI_EXPR:
5162 case VEC_WIDEN_LSHIFT_LO_EXPR:
5163 case VEC_PERM_EXPR:
5164 case VEC_DUPLICATE_EXPR:
5165 case VEC_SERIES_EXPR:
5166 return NULL;
5168 /* Misc codes. */
5169 case ADDR_SPACE_CONVERT_EXPR:
5170 case FIXED_CONVERT_EXPR:
5171 case OBJ_TYPE_REF:
5172 case WITH_SIZE_EXPR:
5173 case BIT_INSERT_EXPR:
5174 return NULL;
5176 case DOT_PROD_EXPR:
5177 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5178 && SCALAR_INT_MODE_P (mode))
5181 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5182 0)))
5183 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5184 inner_mode);
5186 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5187 1)))
5188 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5189 inner_mode);
5190 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5191 return simplify_gen_binary (PLUS, mode, op0, op2);
5193 return NULL;
5195 case WIDEN_MULT_EXPR:
5196 case WIDEN_MULT_PLUS_EXPR:
5197 case WIDEN_MULT_MINUS_EXPR:
5198 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5199 && SCALAR_INT_MODE_P (mode))
5201 inner_mode = GET_MODE (op0);
5202 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5203 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5204 else
5205 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5206 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5207 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5208 else
5209 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5210 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5211 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5212 return op0;
5213 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5214 return simplify_gen_binary (PLUS, mode, op0, op2);
5215 else
5216 return simplify_gen_binary (MINUS, mode, op2, op0);
5218 return NULL;
5220 case MULT_HIGHPART_EXPR:
5221 /* ??? Similar to the above. */
5222 return NULL;
5224 case WIDEN_SUM_EXPR:
5225 case WIDEN_LSHIFT_EXPR:
5226 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5227 && SCALAR_INT_MODE_P (mode))
5230 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5231 0)))
5232 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5233 inner_mode);
5234 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5235 ? ASHIFT : PLUS, mode, op0, op1);
5237 return NULL;
5239 default:
5240 flag_unsupported:
5241 if (flag_checking)
5243 debug_tree (exp);
5244 gcc_unreachable ();
5246 return NULL;
5250 /* Return an RTX equivalent to the source bind value of the tree expression
5251 EXP. */
5253 static rtx
5254 expand_debug_source_expr (tree exp)
5256 rtx op0 = NULL_RTX;
5257 machine_mode mode = VOIDmode, inner_mode;
5259 switch (TREE_CODE (exp))
5261 case VAR_DECL:
5262 if (DECL_ABSTRACT_ORIGIN (exp))
5263 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5264 break;
5265 case PARM_DECL:
5267 mode = DECL_MODE (exp);
5268 op0 = expand_debug_parm_decl (exp);
5269 if (op0)
5270 break;
5271 /* See if this isn't an argument that has been completely
5272 optimized out. */
5273 if (!DECL_RTL_SET_P (exp)
5274 && !DECL_INCOMING_RTL (exp)
5275 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5277 tree aexp = DECL_ORIGIN (exp);
5278 if (DECL_CONTEXT (aexp)
5279 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5281 vec<tree, va_gc> **debug_args;
5282 unsigned int ix;
5283 tree ddecl;
5284 debug_args = decl_debug_args_lookup (current_function_decl);
5285 if (debug_args != NULL)
5287 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5288 ix += 2)
5289 if (ddecl == aexp)
5290 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5294 break;
5296 default:
5297 break;
5300 if (op0 == NULL_RTX)
5301 return NULL_RTX;
5303 inner_mode = GET_MODE (op0);
5304 if (mode == inner_mode)
5305 return op0;
5307 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5309 if (GET_MODE_UNIT_BITSIZE (mode)
5310 == GET_MODE_UNIT_BITSIZE (inner_mode))
5311 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5312 else if (GET_MODE_UNIT_BITSIZE (mode)
5313 < GET_MODE_UNIT_BITSIZE (inner_mode))
5314 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5315 else
5316 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5318 else if (FLOAT_MODE_P (mode))
5319 gcc_unreachable ();
5320 else if (FLOAT_MODE_P (inner_mode))
5322 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5323 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5324 else
5325 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5327 else if (GET_MODE_UNIT_PRECISION (mode)
5328 == GET_MODE_UNIT_PRECISION (inner_mode))
5329 op0 = lowpart_subreg (mode, op0, inner_mode);
5330 else if (GET_MODE_UNIT_PRECISION (mode)
5331 < GET_MODE_UNIT_PRECISION (inner_mode))
5332 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5333 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5334 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5335 else
5336 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5338 return op0;
5341 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5342 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5343 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5345 static void
5346 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5348 rtx exp = *exp_p;
5350 if (exp == NULL_RTX)
5351 return;
5353 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5354 return;
5356 if (depth == 4)
5358 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5359 rtx dval = make_debug_expr_from_rtl (exp);
5361 /* Emit a debug bind insn before INSN. */
5362 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5363 DEBUG_EXPR_TREE_DECL (dval), exp,
5364 VAR_INIT_STATUS_INITIALIZED);
5366 emit_debug_insn_before (bind, insn);
5367 *exp_p = dval;
5368 return;
5371 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5372 int i, j;
5373 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5374 switch (*format_ptr++)
5376 case 'e':
5377 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5378 break;
5380 case 'E':
5381 case 'V':
5382 for (j = 0; j < XVECLEN (exp, i); j++)
5383 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5384 break;
5386 default:
5387 break;
5391 /* Expand the _LOCs in debug insns. We run this after expanding all
5392 regular insns, so that any variables referenced in the function
5393 will have their DECL_RTLs set. */
5395 static void
5396 expand_debug_locations (void)
5398 rtx_insn *insn;
5399 rtx_insn *last = get_last_insn ();
5400 int save_strict_alias = flag_strict_aliasing;
5402 /* New alias sets while setting up memory attributes cause
5403 -fcompare-debug failures, even though it doesn't bring about any
5404 codegen changes. */
5405 flag_strict_aliasing = 0;
5407 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5408 if (DEBUG_BIND_INSN_P (insn))
5410 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5411 rtx val;
5412 rtx_insn *prev_insn, *insn2;
5413 machine_mode mode;
5415 if (value == NULL_TREE)
5416 val = NULL_RTX;
5417 else
5419 if (INSN_VAR_LOCATION_STATUS (insn)
5420 == VAR_INIT_STATUS_UNINITIALIZED)
5421 val = expand_debug_source_expr (value);
5422 /* The avoid_deep_ter_for_debug function inserts
5423 debug bind stmts after SSA_NAME definition, with the
5424 SSA_NAME as the whole bind location. Disable temporarily
5425 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5426 being defined in this DEBUG_INSN. */
5427 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5429 tree *slot = deep_ter_debug_map->get (value);
5430 if (slot)
5432 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5433 *slot = NULL_TREE;
5434 else
5435 slot = NULL;
5437 val = expand_debug_expr (value);
5438 if (slot)
5439 *slot = INSN_VAR_LOCATION_DECL (insn);
5441 else
5442 val = expand_debug_expr (value);
5443 gcc_assert (last == get_last_insn ());
5446 if (!val)
5447 val = gen_rtx_UNKNOWN_VAR_LOC ();
5448 else
5450 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5452 gcc_assert (mode == GET_MODE (val)
5453 || (GET_MODE (val) == VOIDmode
5454 && (CONST_SCALAR_INT_P (val)
5455 || GET_CODE (val) == CONST_FIXED
5456 || GET_CODE (val) == LABEL_REF)));
5459 INSN_VAR_LOCATION_LOC (insn) = val;
5460 prev_insn = PREV_INSN (insn);
5461 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5462 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5465 flag_strict_aliasing = save_strict_alias;
5468 /* Performs swapping operands of commutative operations to expand
5469 the expensive one first. */
5471 static void
5472 reorder_operands (basic_block bb)
5474 unsigned int *lattice; /* Hold cost of each statement. */
5475 unsigned int i = 0, n = 0;
5476 gimple_stmt_iterator gsi;
5477 gimple_seq stmts;
5478 gimple *stmt;
5479 bool swap;
5480 tree op0, op1;
5481 ssa_op_iter iter;
5482 use_operand_p use_p;
5483 gimple *def0, *def1;
5485 /* Compute cost of each statement using estimate_num_insns. */
5486 stmts = bb_seq (bb);
5487 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5489 stmt = gsi_stmt (gsi);
5490 if (!is_gimple_debug (stmt))
5491 gimple_set_uid (stmt, n++);
5493 lattice = XNEWVEC (unsigned int, n);
5494 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5496 unsigned cost;
5497 stmt = gsi_stmt (gsi);
5498 if (is_gimple_debug (stmt))
5499 continue;
5500 cost = estimate_num_insns (stmt, &eni_size_weights);
5501 lattice[i] = cost;
5502 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5504 tree use = USE_FROM_PTR (use_p);
5505 gimple *def_stmt;
5506 if (TREE_CODE (use) != SSA_NAME)
5507 continue;
5508 def_stmt = get_gimple_for_ssa_name (use);
5509 if (!def_stmt)
5510 continue;
5511 lattice[i] += lattice[gimple_uid (def_stmt)];
5513 i++;
5514 if (!is_gimple_assign (stmt)
5515 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5516 continue;
5517 op0 = gimple_op (stmt, 1);
5518 op1 = gimple_op (stmt, 2);
5519 if (TREE_CODE (op0) != SSA_NAME
5520 || TREE_CODE (op1) != SSA_NAME)
5521 continue;
5522 /* Swap operands if the second one is more expensive. */
5523 def0 = get_gimple_for_ssa_name (op0);
5524 def1 = get_gimple_for_ssa_name (op1);
5525 if (!def1)
5526 continue;
5527 swap = false;
5528 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5529 swap = true;
5530 if (swap)
5532 if (dump_file && (dump_flags & TDF_DETAILS))
5534 fprintf (dump_file, "Swap operands in stmt:\n");
5535 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5536 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5537 def0 ? lattice[gimple_uid (def0)] : 0,
5538 lattice[gimple_uid (def1)]);
5540 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5541 gimple_assign_rhs2_ptr (stmt));
5544 XDELETE (lattice);
5547 /* Expand basic block BB from GIMPLE trees to RTL. */
5549 static basic_block
5550 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5552 gimple_stmt_iterator gsi;
5553 gimple_seq stmts;
5554 gimple *stmt = NULL;
5555 rtx_note *note = NULL;
5556 rtx_insn *last;
5557 edge e;
5558 edge_iterator ei;
5560 if (dump_file)
5561 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5562 bb->index);
5564 /* Note that since we are now transitioning from GIMPLE to RTL, we
5565 cannot use the gsi_*_bb() routines because they expect the basic
5566 block to be in GIMPLE, instead of RTL. Therefore, we need to
5567 access the BB sequence directly. */
5568 if (optimize)
5569 reorder_operands (bb);
5570 stmts = bb_seq (bb);
5571 bb->il.gimple.seq = NULL;
5572 bb->il.gimple.phi_nodes = NULL;
5573 rtl_profile_for_bb (bb);
5574 init_rtl_bb_info (bb);
5575 bb->flags |= BB_RTL;
5577 /* Remove the RETURN_EXPR if we may fall though to the exit
5578 instead. */
5579 gsi = gsi_last (stmts);
5580 if (!gsi_end_p (gsi)
5581 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5583 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5585 gcc_assert (single_succ_p (bb));
5586 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5588 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5589 && !gimple_return_retval (ret_stmt))
5591 gsi_remove (&gsi, false);
5592 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5596 gsi = gsi_start (stmts);
5597 if (!gsi_end_p (gsi))
5599 stmt = gsi_stmt (gsi);
5600 if (gimple_code (stmt) != GIMPLE_LABEL)
5601 stmt = NULL;
5604 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5606 if (stmt || elt)
5608 gcc_checking_assert (!note);
5609 last = get_last_insn ();
5611 if (stmt)
5613 expand_gimple_stmt (stmt);
5614 gsi_next (&gsi);
5617 if (elt)
5618 emit_label (*elt);
5620 BB_HEAD (bb) = NEXT_INSN (last);
5621 if (NOTE_P (BB_HEAD (bb)))
5622 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5623 gcc_assert (LABEL_P (BB_HEAD (bb)));
5624 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5626 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5628 else
5629 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5631 if (note)
5632 NOTE_BASIC_BLOCK (note) = bb;
5634 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5636 basic_block new_bb;
5638 stmt = gsi_stmt (gsi);
5640 /* If this statement is a non-debug one, and we generate debug
5641 insns, then this one might be the last real use of a TERed
5642 SSA_NAME, but where there are still some debug uses further
5643 down. Expanding the current SSA name in such further debug
5644 uses by their RHS might lead to wrong debug info, as coalescing
5645 might make the operands of such RHS be placed into the same
5646 pseudo as something else. Like so:
5647 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5648 use(a_1);
5649 a_2 = ...
5650 #DEBUG ... => a_1
5651 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5652 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5653 the write to a_2 would actually have clobbered the place which
5654 formerly held a_0.
5656 So, instead of that, we recognize the situation, and generate
5657 debug temporaries at the last real use of TERed SSA names:
5658 a_1 = a_0 + 1;
5659 #DEBUG #D1 => a_1
5660 use(a_1);
5661 a_2 = ...
5662 #DEBUG ... => #D1
5664 if (MAY_HAVE_DEBUG_BIND_INSNS
5665 && SA.values
5666 && !is_gimple_debug (stmt))
5668 ssa_op_iter iter;
5669 tree op;
5670 gimple *def;
5672 location_t sloc = curr_insn_location ();
5674 /* Look for SSA names that have their last use here (TERed
5675 names always have only one real use). */
5676 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5677 if ((def = get_gimple_for_ssa_name (op)))
5679 imm_use_iterator imm_iter;
5680 use_operand_p use_p;
5681 bool have_debug_uses = false;
5683 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5685 if (gimple_debug_bind_p (USE_STMT (use_p)))
5687 have_debug_uses = true;
5688 break;
5692 if (have_debug_uses)
5694 /* OP is a TERed SSA name, with DEF its defining
5695 statement, and where OP is used in further debug
5696 instructions. Generate a debug temporary, and
5697 replace all uses of OP in debug insns with that
5698 temporary. */
5699 gimple *debugstmt;
5700 tree value = gimple_assign_rhs_to_tree (def);
5701 tree vexpr = make_node (DEBUG_EXPR_DECL);
5702 rtx val;
5703 machine_mode mode;
5705 set_curr_insn_location (gimple_location (def));
5707 DECL_ARTIFICIAL (vexpr) = 1;
5708 TREE_TYPE (vexpr) = TREE_TYPE (value);
5709 if (DECL_P (value))
5710 mode = DECL_MODE (value);
5711 else
5712 mode = TYPE_MODE (TREE_TYPE (value));
5713 SET_DECL_MODE (vexpr, mode);
5715 val = gen_rtx_VAR_LOCATION
5716 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5718 emit_debug_insn (val);
5720 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5722 if (!gimple_debug_bind_p (debugstmt))
5723 continue;
5725 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5726 SET_USE (use_p, vexpr);
5728 update_stmt (debugstmt);
5732 set_curr_insn_location (sloc);
5735 currently_expanding_gimple_stmt = stmt;
5737 /* Expand this statement, then evaluate the resulting RTL and
5738 fixup the CFG accordingly. */
5739 if (gimple_code (stmt) == GIMPLE_COND)
5741 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5742 if (new_bb)
5743 return new_bb;
5745 else if (is_gimple_debug (stmt))
5747 location_t sloc = curr_insn_location ();
5748 gimple_stmt_iterator nsi = gsi;
5750 for (;;)
5752 tree var;
5753 tree value = NULL_TREE;
5754 rtx val = NULL_RTX;
5755 machine_mode mode;
5757 if (!gimple_debug_nonbind_marker_p (stmt))
5759 if (gimple_debug_bind_p (stmt))
5761 var = gimple_debug_bind_get_var (stmt);
5763 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5764 && TREE_CODE (var) != LABEL_DECL
5765 && !target_for_debug_bind (var))
5766 goto delink_debug_stmt;
5768 if (DECL_P (var))
5769 mode = DECL_MODE (var);
5770 else
5771 mode = TYPE_MODE (TREE_TYPE (var));
5773 if (gimple_debug_bind_has_value_p (stmt))
5774 value = gimple_debug_bind_get_value (stmt);
5776 val = gen_rtx_VAR_LOCATION
5777 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5779 else if (gimple_debug_source_bind_p (stmt))
5781 var = gimple_debug_source_bind_get_var (stmt);
5783 value = gimple_debug_source_bind_get_value (stmt);
5785 mode = DECL_MODE (var);
5787 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5788 VAR_INIT_STATUS_UNINITIALIZED);
5790 else
5791 gcc_unreachable ();
5793 /* If this function was first compiled with markers
5794 enabled, but they're now disable (e.g. LTO), drop
5795 them on the floor. */
5796 else if (gimple_debug_nonbind_marker_p (stmt)
5797 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5798 goto delink_debug_stmt;
5799 else if (gimple_debug_begin_stmt_p (stmt))
5800 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5801 else if (gimple_debug_inline_entry_p (stmt))
5803 tree block = gimple_block (stmt);
5805 if (block)
5806 val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5807 else
5808 goto delink_debug_stmt;
5810 else
5811 gcc_unreachable ();
5813 last = get_last_insn ();
5815 set_curr_insn_location (gimple_location (stmt));
5817 emit_debug_insn (val);
5819 if (dump_file && (dump_flags & TDF_DETAILS))
5821 /* We can't dump the insn with a TREE where an RTX
5822 is expected. */
5823 if (GET_CODE (val) == VAR_LOCATION)
5825 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5826 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5828 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5829 if (GET_CODE (val) == VAR_LOCATION)
5830 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5833 delink_debug_stmt:
5834 /* In order not to generate too many debug temporaries,
5835 we delink all uses of debug statements we already expanded.
5836 Therefore debug statements between definition and real
5837 use of TERed SSA names will continue to use the SSA name,
5838 and not be replaced with debug temps. */
5839 delink_stmt_imm_use (stmt);
5841 gsi = nsi;
5842 gsi_next (&nsi);
5843 if (gsi_end_p (nsi))
5844 break;
5845 stmt = gsi_stmt (nsi);
5846 if (!is_gimple_debug (stmt))
5847 break;
5850 set_curr_insn_location (sloc);
5852 else
5854 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5855 if (call_stmt
5856 && gimple_call_tail_p (call_stmt)
5857 && disable_tail_calls)
5858 gimple_call_set_tail (call_stmt, false);
5860 if (call_stmt && gimple_call_tail_p (call_stmt))
5862 bool can_fallthru;
5863 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5864 if (new_bb)
5866 if (can_fallthru)
5867 bb = new_bb;
5868 else
5869 return new_bb;
5872 else
5874 def_operand_p def_p;
5875 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5877 if (def_p != NULL)
5879 /* Ignore this stmt if it is in the list of
5880 replaceable expressions. */
5881 if (SA.values
5882 && bitmap_bit_p (SA.values,
5883 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5884 continue;
5886 last = expand_gimple_stmt (stmt);
5887 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5892 currently_expanding_gimple_stmt = NULL;
5894 /* Expand implicit goto and convert goto_locus. */
5895 FOR_EACH_EDGE (e, ei, bb->succs)
5897 if (e->goto_locus != UNKNOWN_LOCATION)
5898 set_curr_insn_location (e->goto_locus);
5899 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5901 emit_jump (label_rtx_for_bb (e->dest));
5902 e->flags &= ~EDGE_FALLTHRU;
5906 /* Expanded RTL can create a jump in the last instruction of block.
5907 This later might be assumed to be a jump to successor and break edge insertion.
5908 We need to insert dummy move to prevent this. PR41440. */
5909 if (single_succ_p (bb)
5910 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5911 && (last = get_last_insn ())
5912 && (JUMP_P (last)
5913 || (DEBUG_INSN_P (last)
5914 && JUMP_P (prev_nondebug_insn (last)))))
5916 rtx dummy = gen_reg_rtx (SImode);
5917 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5920 do_pending_stack_adjust ();
5922 /* Find the block tail. The last insn in the block is the insn
5923 before a barrier and/or table jump insn. */
5924 last = get_last_insn ();
5925 if (BARRIER_P (last))
5926 last = PREV_INSN (last);
5927 if (JUMP_TABLE_DATA_P (last))
5928 last = PREV_INSN (PREV_INSN (last));
5929 if (BARRIER_P (last))
5930 last = PREV_INSN (last);
5931 BB_END (bb) = last;
5933 update_bb_for_insn (bb);
5935 return bb;
5939 /* Create a basic block for initialization code. */
5941 static basic_block
5942 construct_init_block (void)
5944 basic_block init_block, first_block;
5945 edge e = NULL;
5946 int flags;
5948 /* Multiple entry points not supported yet. */
5949 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5950 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5951 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5952 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5953 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5955 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5957 /* When entry edge points to first basic block, we don't need jump,
5958 otherwise we have to jump into proper target. */
5959 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5961 tree label = gimple_block_label (e->dest);
5963 emit_jump (jump_target_rtx (label));
5964 flags = 0;
5966 else
5967 flags = EDGE_FALLTHRU;
5969 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5970 get_last_insn (),
5971 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5972 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5973 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5974 if (e)
5976 first_block = e->dest;
5977 redirect_edge_succ (e, init_block);
5978 e = make_single_succ_edge (init_block, first_block, flags);
5980 else
5981 e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5982 EDGE_FALLTHRU);
5984 update_bb_for_insn (init_block);
5985 return init_block;
5988 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5989 found in the block tree. */
5991 static void
5992 set_block_levels (tree block, int level)
5994 while (block)
5996 BLOCK_NUMBER (block) = level;
5997 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5998 block = BLOCK_CHAIN (block);
6002 /* Create a block containing landing pads and similar stuff. */
6004 static void
6005 construct_exit_block (void)
6007 rtx_insn *head = get_last_insn ();
6008 rtx_insn *end;
6009 basic_block exit_block;
6010 edge e, e2;
6011 unsigned ix;
6012 edge_iterator ei;
6013 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
6014 rtx_insn *orig_end = BB_END (prev_bb);
6016 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6018 /* Make sure the locus is set to the end of the function, so that
6019 epilogue line numbers and warnings are set properly. */
6020 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
6021 input_location = cfun->function_end_locus;
6023 /* Generate rtl for function exit. */
6024 expand_function_end ();
6026 end = get_last_insn ();
6027 if (head == end)
6028 return;
6029 /* While emitting the function end we could move end of the last basic
6030 block. */
6031 BB_END (prev_bb) = orig_end;
6032 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
6033 head = NEXT_INSN (head);
6034 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6035 bb count counting will be confused. Any instructions before that
6036 label are emitted for the case where PREV_BB falls through into the
6037 exit block, so append those instructions to prev_bb in that case. */
6038 if (NEXT_INSN (head) != return_label)
6040 while (NEXT_INSN (head) != return_label)
6042 if (!NOTE_P (NEXT_INSN (head)))
6043 BB_END (prev_bb) = NEXT_INSN (head);
6044 head = NEXT_INSN (head);
6047 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
6048 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
6049 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6051 ix = 0;
6052 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6054 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6055 if (!(e->flags & EDGE_ABNORMAL))
6056 redirect_edge_succ (e, exit_block);
6057 else
6058 ix++;
6061 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6062 EDGE_FALLTHRU);
6063 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6064 if (e2 != e)
6066 exit_block->count -= e2->count ();
6068 update_bb_for_insn (exit_block);
6071 /* Helper function for discover_nonconstant_array_refs.
6072 Look for ARRAY_REF nodes with non-constant indexes and mark them
6073 addressable. */
6075 static tree
6076 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6077 void *data ATTRIBUTE_UNUSED)
6079 tree t = *tp;
6081 if (IS_TYPE_OR_DECL_P (t))
6082 *walk_subtrees = 0;
6083 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6085 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6086 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6087 && (!TREE_OPERAND (t, 2)
6088 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6089 || (TREE_CODE (t) == COMPONENT_REF
6090 && (!TREE_OPERAND (t,2)
6091 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6092 || TREE_CODE (t) == BIT_FIELD_REF
6093 || TREE_CODE (t) == REALPART_EXPR
6094 || TREE_CODE (t) == IMAGPART_EXPR
6095 || TREE_CODE (t) == VIEW_CONVERT_EXPR
6096 || CONVERT_EXPR_P (t))
6097 t = TREE_OPERAND (t, 0);
6099 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6101 t = get_base_address (t);
6102 if (t && DECL_P (t)
6103 && DECL_MODE (t) != BLKmode)
6104 TREE_ADDRESSABLE (t) = 1;
6107 *walk_subtrees = 0;
6110 return NULL_TREE;
6113 /* RTL expansion is not able to compile array references with variable
6114 offsets for arrays stored in single register. Discover such
6115 expressions and mark variables as addressable to avoid this
6116 scenario. */
6118 static void
6119 discover_nonconstant_array_refs (void)
6121 basic_block bb;
6122 gimple_stmt_iterator gsi;
6124 FOR_EACH_BB_FN (bb, cfun)
6125 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6127 gimple *stmt = gsi_stmt (gsi);
6128 if (!is_gimple_debug (stmt))
6129 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6133 /* This function sets crtl->args.internal_arg_pointer to a virtual
6134 register if DRAP is needed. Local register allocator will replace
6135 virtual_incoming_args_rtx with the virtual register. */
6137 static void
6138 expand_stack_alignment (void)
6140 rtx drap_rtx;
6141 unsigned int preferred_stack_boundary;
6143 if (! SUPPORTS_STACK_ALIGNMENT)
6144 return;
6146 if (cfun->calls_alloca
6147 || cfun->has_nonlocal_label
6148 || crtl->has_nonlocal_goto)
6149 crtl->need_drap = true;
6151 /* Call update_stack_boundary here again to update incoming stack
6152 boundary. It may set incoming stack alignment to a different
6153 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6154 use the minimum incoming stack alignment to check if it is OK
6155 to perform sibcall optimization since sibcall optimization will
6156 only align the outgoing stack to incoming stack boundary. */
6157 if (targetm.calls.update_stack_boundary)
6158 targetm.calls.update_stack_boundary ();
6160 /* The incoming stack frame has to be aligned at least at
6161 parm_stack_boundary. */
6162 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6164 /* Update crtl->stack_alignment_estimated and use it later to align
6165 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6166 exceptions since callgraph doesn't collect incoming stack alignment
6167 in this case. */
6168 if (cfun->can_throw_non_call_exceptions
6169 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6170 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6171 else
6172 preferred_stack_boundary = crtl->preferred_stack_boundary;
6173 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6174 crtl->stack_alignment_estimated = preferred_stack_boundary;
6175 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6176 crtl->stack_alignment_needed = preferred_stack_boundary;
6178 gcc_assert (crtl->stack_alignment_needed
6179 <= crtl->stack_alignment_estimated);
6181 crtl->stack_realign_needed
6182 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6183 crtl->stack_realign_tried = crtl->stack_realign_needed;
6185 crtl->stack_realign_processed = true;
6187 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6188 alignment. */
6189 gcc_assert (targetm.calls.get_drap_rtx != NULL);
6190 drap_rtx = targetm.calls.get_drap_rtx ();
6192 /* stack_realign_drap and drap_rtx must match. */
6193 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6195 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6196 if (drap_rtx != NULL)
6198 crtl->args.internal_arg_pointer = drap_rtx;
6200 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6201 needed. */
6202 fixup_tail_calls ();
6207 static void
6208 expand_main_function (void)
6210 #if (defined(INVOKE__main) \
6211 || (!defined(HAS_INIT_SECTION) \
6212 && !defined(INIT_SECTION_ASM_OP) \
6213 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6214 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6215 #endif
6219 /* Expand code to initialize the stack_protect_guard. This is invoked at
6220 the beginning of a function to be protected. */
6222 static void
6223 stack_protect_prologue (void)
6225 tree guard_decl = targetm.stack_protect_guard ();
6226 rtx x, y;
6228 crtl->stack_protect_guard_decl = guard_decl;
6229 x = expand_normal (crtl->stack_protect_guard);
6231 if (targetm.have_stack_protect_combined_set () && guard_decl)
6233 gcc_assert (DECL_P (guard_decl));
6234 y = DECL_RTL (guard_decl);
6236 /* Allow the target to compute address of Y and copy it to X without
6237 leaking Y into a register. This combined address + copy pattern
6238 allows the target to prevent spilling of any intermediate results by
6239 splitting it after register allocator. */
6240 if (rtx_insn *insn = targetm.gen_stack_protect_combined_set (x, y))
6242 emit_insn (insn);
6243 return;
6247 if (guard_decl)
6248 y = expand_normal (guard_decl);
6249 else
6250 y = const0_rtx;
6252 /* Allow the target to copy from Y to X without leaking Y into a
6253 register. */
6254 if (targetm.have_stack_protect_set ())
6255 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6257 emit_insn (insn);
6258 return;
6261 /* Otherwise do a straight move. */
6262 emit_move_insn (x, y);
6265 /* Translate the intermediate representation contained in the CFG
6266 from GIMPLE trees to RTL.
6268 We do conversion per basic block and preserve/update the tree CFG.
6269 This implies we have to do some magic as the CFG can simultaneously
6270 consist of basic blocks containing RTL and GIMPLE trees. This can
6271 confuse the CFG hooks, so be careful to not manipulate CFG during
6272 the expansion. */
6274 namespace {
6276 const pass_data pass_data_expand =
6278 RTL_PASS, /* type */
6279 "expand", /* name */
6280 OPTGROUP_NONE, /* optinfo_flags */
6281 TV_EXPAND, /* tv_id */
6282 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6283 | PROP_gimple_lcx
6284 | PROP_gimple_lvec
6285 | PROP_gimple_lva), /* properties_required */
6286 PROP_rtl, /* properties_provided */
6287 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6288 0, /* todo_flags_start */
6289 0, /* todo_flags_finish */
6292 class pass_expand : public rtl_opt_pass
6294 public:
6295 pass_expand (gcc::context *ctxt)
6296 : rtl_opt_pass (pass_data_expand, ctxt)
6299 /* opt_pass methods: */
6300 virtual unsigned int execute (function *);
6302 }; // class pass_expand
6304 unsigned int
6305 pass_expand::execute (function *fun)
6307 basic_block bb, init_block;
6308 edge_iterator ei;
6309 edge e;
6310 rtx_insn *var_seq, *var_ret_seq;
6311 unsigned i;
6313 timevar_push (TV_OUT_OF_SSA);
6314 rewrite_out_of_ssa (&SA);
6315 timevar_pop (TV_OUT_OF_SSA);
6316 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6318 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6320 gimple_stmt_iterator gsi;
6321 FOR_EACH_BB_FN (bb, cfun)
6322 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6323 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6324 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6327 /* Make sure all values used by the optimization passes have sane
6328 defaults. */
6329 reg_renumber = 0;
6331 /* Some backends want to know that we are expanding to RTL. */
6332 currently_expanding_to_rtl = 1;
6333 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6334 free_dominance_info (CDI_DOMINATORS);
6336 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6338 insn_locations_init ();
6339 if (!DECL_IS_BUILTIN (current_function_decl))
6341 /* Eventually, all FEs should explicitly set function_start_locus. */
6342 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6343 set_curr_insn_location
6344 (DECL_SOURCE_LOCATION (current_function_decl));
6345 else
6346 set_curr_insn_location (fun->function_start_locus);
6348 else
6349 set_curr_insn_location (UNKNOWN_LOCATION);
6350 prologue_location = curr_insn_location ();
6352 #ifdef INSN_SCHEDULING
6353 init_sched_attrs ();
6354 #endif
6356 /* Make sure first insn is a note even if we don't want linenums.
6357 This makes sure the first insn will never be deleted.
6358 Also, final expects a note to appear there. */
6359 emit_note (NOTE_INSN_DELETED);
6361 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6362 discover_nonconstant_array_refs ();
6364 targetm.expand_to_rtl_hook ();
6365 crtl->init_stack_alignment ();
6366 fun->cfg->max_jumptable_ents = 0;
6368 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6369 of the function section at exapnsion time to predict distance of calls. */
6370 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6372 /* Expand the variables recorded during gimple lowering. */
6373 timevar_push (TV_VAR_EXPAND);
6374 start_sequence ();
6376 var_ret_seq = expand_used_vars ();
6378 var_seq = get_insns ();
6379 end_sequence ();
6380 timevar_pop (TV_VAR_EXPAND);
6382 /* Honor stack protection warnings. */
6383 if (warn_stack_protect)
6385 if (fun->calls_alloca)
6386 warning (OPT_Wstack_protector,
6387 "stack protector not protecting local variables: "
6388 "variable length buffer");
6389 if (has_short_buffer && !crtl->stack_protect_guard)
6390 warning (OPT_Wstack_protector,
6391 "stack protector not protecting function: "
6392 "all local arrays are less than %d bytes long",
6393 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6396 /* Set up parameters and prepare for return, for the function. */
6397 expand_function_start (current_function_decl);
6399 /* If we emitted any instructions for setting up the variables,
6400 emit them before the FUNCTION_START note. */
6401 if (var_seq)
6403 emit_insn_before (var_seq, parm_birth_insn);
6405 /* In expand_function_end we'll insert the alloca save/restore
6406 before parm_birth_insn. We've just insertted an alloca call.
6407 Adjust the pointer to match. */
6408 parm_birth_insn = var_seq;
6411 /* Now propagate the RTL assignment of each partition to the
6412 underlying var of each SSA_NAME. */
6413 tree name;
6415 FOR_EACH_SSA_NAME (i, name, cfun)
6417 /* We might have generated new SSA names in
6418 update_alias_info_with_stack_vars. They will have a NULL
6419 defining statements, and won't be part of the partitioning,
6420 so ignore those. */
6421 if (!SSA_NAME_DEF_STMT (name))
6422 continue;
6424 adjust_one_expanded_partition_var (name);
6427 /* Clean up RTL of variables that straddle across multiple
6428 partitions, and check that the rtl of any PARM_DECLs that are not
6429 cleaned up is that of their default defs. */
6430 FOR_EACH_SSA_NAME (i, name, cfun)
6432 int part;
6434 /* We might have generated new SSA names in
6435 update_alias_info_with_stack_vars. They will have a NULL
6436 defining statements, and won't be part of the partitioning,
6437 so ignore those. */
6438 if (!SSA_NAME_DEF_STMT (name))
6439 continue;
6440 part = var_to_partition (SA.map, name);
6441 if (part == NO_PARTITION)
6442 continue;
6444 /* If this decl was marked as living in multiple places, reset
6445 this now to NULL. */
6446 tree var = SSA_NAME_VAR (name);
6447 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6448 SET_DECL_RTL (var, NULL);
6449 /* Check that the pseudos chosen by assign_parms are those of
6450 the corresponding default defs. */
6451 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6452 && (TREE_CODE (var) == PARM_DECL
6453 || TREE_CODE (var) == RESULT_DECL))
6455 rtx in = DECL_RTL_IF_SET (var);
6456 gcc_assert (in);
6457 rtx out = SA.partition_to_pseudo[part];
6458 gcc_assert (in == out);
6460 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6461 those expected by debug backends for each parm and for
6462 the result. This is particularly important for stabs,
6463 whose register elimination from parm's DECL_RTL may cause
6464 -fcompare-debug differences as SET_DECL_RTL changes reg's
6465 attrs. So, make sure the RTL already has the parm as the
6466 EXPR, so that it won't change. */
6467 SET_DECL_RTL (var, NULL_RTX);
6468 if (MEM_P (in))
6469 set_mem_attributes (in, var, true);
6470 SET_DECL_RTL (var, in);
6474 /* If this function is `main', emit a call to `__main'
6475 to run global initializers, etc. */
6476 if (DECL_NAME (current_function_decl)
6477 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6478 && DECL_FILE_SCOPE_P (current_function_decl))
6479 expand_main_function ();
6481 /* Initialize the stack_protect_guard field. This must happen after the
6482 call to __main (if any) so that the external decl is initialized. */
6483 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6484 stack_protect_prologue ();
6486 expand_phi_nodes (&SA);
6488 /* Release any stale SSA redirection data. */
6489 redirect_edge_var_map_empty ();
6491 /* Register rtl specific functions for cfg. */
6492 rtl_register_cfg_hooks ();
6494 init_block = construct_init_block ();
6496 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6497 remaining edges later. */
6498 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6499 e->flags &= ~EDGE_EXECUTABLE;
6501 /* If the function has too many markers, drop them while expanding. */
6502 if (cfun->debug_marker_count
6503 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6504 cfun->debug_nonbind_markers = false;
6506 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6507 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6508 next_bb)
6509 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6511 if (MAY_HAVE_DEBUG_BIND_INSNS)
6512 expand_debug_locations ();
6514 if (deep_ter_debug_map)
6516 delete deep_ter_debug_map;
6517 deep_ter_debug_map = NULL;
6520 /* Free stuff we no longer need after GIMPLE optimizations. */
6521 free_dominance_info (CDI_DOMINATORS);
6522 free_dominance_info (CDI_POST_DOMINATORS);
6523 delete_tree_cfg_annotations (fun);
6525 timevar_push (TV_OUT_OF_SSA);
6526 finish_out_of_ssa (&SA);
6527 timevar_pop (TV_OUT_OF_SSA);
6529 timevar_push (TV_POST_EXPAND);
6530 /* We are no longer in SSA form. */
6531 fun->gimple_df->in_ssa_p = false;
6532 loops_state_clear (LOOP_CLOSED_SSA);
6534 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6535 conservatively to true until they are all profile aware. */
6536 delete lab_rtx_for_bb;
6537 free_histograms (fun);
6539 construct_exit_block ();
6540 insn_locations_finalize ();
6542 if (var_ret_seq)
6544 rtx_insn *after = return_label;
6545 rtx_insn *next = NEXT_INSN (after);
6546 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6547 after = next;
6548 emit_insn_after (var_ret_seq, after);
6551 /* Zap the tree EH table. */
6552 set_eh_throw_stmt_table (fun, NULL);
6554 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6555 split edges which edge insertions might do. */
6556 rebuild_jump_labels (get_insns ());
6558 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6559 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6561 edge e;
6562 edge_iterator ei;
6563 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6565 if (e->insns.r)
6567 rebuild_jump_labels_chain (e->insns.r);
6568 /* Put insns after parm birth, but before
6569 NOTE_INSNS_FUNCTION_BEG. */
6570 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6571 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6573 rtx_insn *insns = e->insns.r;
6574 e->insns.r = NULL;
6575 if (NOTE_P (parm_birth_insn)
6576 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6577 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6578 else
6579 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6581 else
6582 commit_one_edge_insertion (e);
6584 else
6585 ei_next (&ei);
6589 /* We're done expanding trees to RTL. */
6590 currently_expanding_to_rtl = 0;
6592 flush_mark_addressable_queue ();
6594 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6595 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6597 edge e;
6598 edge_iterator ei;
6599 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6601 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6602 e->flags &= ~EDGE_EXECUTABLE;
6604 /* At the moment not all abnormal edges match the RTL
6605 representation. It is safe to remove them here as
6606 find_many_sub_basic_blocks will rediscover them.
6607 In the future we should get this fixed properly. */
6608 if ((e->flags & EDGE_ABNORMAL)
6609 && !(e->flags & EDGE_SIBCALL))
6610 remove_edge (e);
6611 else
6612 ei_next (&ei);
6616 auto_sbitmap blocks (last_basic_block_for_fn (fun));
6617 bitmap_ones (blocks);
6618 find_many_sub_basic_blocks (blocks);
6619 purge_all_dead_edges ();
6621 /* After initial rtl generation, call back to finish generating
6622 exception support code. We need to do this before cleaning up
6623 the CFG as the code does not expect dead landing pads. */
6624 if (fun->eh->region_tree != NULL)
6625 finish_eh_generation ();
6627 /* Call expand_stack_alignment after finishing all
6628 updates to crtl->preferred_stack_boundary. */
6629 expand_stack_alignment ();
6631 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6632 function. */
6633 if (crtl->tail_call_emit)
6634 fixup_tail_calls ();
6636 /* BB subdivision may have created basic blocks that are are only reachable
6637 from unlikely bbs but not marked as such in the profile. */
6638 if (optimize)
6639 propagate_unlikely_bbs_forward ();
6641 /* Remove unreachable blocks, otherwise we cannot compute dominators
6642 which are needed for loop state verification. As a side-effect
6643 this also compacts blocks.
6644 ??? We cannot remove trivially dead insns here as for example
6645 the DRAP reg on i?86 is not magically live at this point.
6646 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6647 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6649 checking_verify_flow_info ();
6651 /* Initialize pseudos allocated for hard registers. */
6652 emit_initial_value_sets ();
6654 /* And finally unshare all RTL. */
6655 unshare_all_rtl ();
6657 /* There's no need to defer outputting this function any more; we
6658 know we want to output it. */
6659 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6661 /* Now that we're done expanding trees to RTL, we shouldn't have any
6662 more CONCATs anywhere. */
6663 generating_concat_p = 0;
6665 if (dump_file)
6667 fprintf (dump_file,
6668 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6669 /* And the pass manager will dump RTL for us. */
6672 /* If we're emitting a nested function, make sure its parent gets
6673 emitted as well. Doing otherwise confuses debug info. */
6675 tree parent;
6676 for (parent = DECL_CONTEXT (current_function_decl);
6677 parent != NULL_TREE;
6678 parent = get_containing_scope (parent))
6679 if (TREE_CODE (parent) == FUNCTION_DECL)
6680 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6683 TREE_ASM_WRITTEN (current_function_decl) = 1;
6685 /* After expanding, the return labels are no longer needed. */
6686 return_label = NULL;
6687 naked_return_label = NULL;
6689 /* After expanding, the tm_restart map is no longer needed. */
6690 if (fun->gimple_df->tm_restart)
6691 fun->gimple_df->tm_restart = NULL;
6693 /* Tag the blocks with a depth number so that change_scope can find
6694 the common parent easily. */
6695 set_block_levels (DECL_INITIAL (fun->decl), 0);
6696 default_rtl_profile ();
6698 /* For -dx discard loops now, otherwise IL verify in clean_state will
6699 ICE. */
6700 if (rtl_dump_and_exit)
6702 cfun->curr_properties &= ~PROP_loops;
6703 loop_optimizer_finalize ();
6706 timevar_pop (TV_POST_EXPAND);
6708 return 0;
6711 } // anon namespace
6713 rtl_opt_pass *
6714 make_pass_expand (gcc::context *ctxt)
6716 return new pass_expand (ctxt);