aix: Fix _STDC_FORMAT_MACROS in inttypes.h [PR97044]
[official-gcc.git] / gcc / cfgexpand.c
blob1eaa1da11b9714dacfc51f3ffb774a62500bfa6c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber. */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
68 #include "cfgloop.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING. */
70 #include "stringpool.h"
71 #include "attribs.h"
72 #include "asan.h"
73 #include "tree-ssa-address.h"
74 #include "output.h"
75 #include "builtins.h"
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
85 /* This variable holds information helping the rewriting of SSA trees
86 into RTL. */
87 struct ssaexpand SA;
89 /* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
91 gimple *currently_expanding_gimple_stmt;
93 static rtx expand_debug_expr (tree);
95 static bool defer_stack_allocation (tree, bool);
97 static void record_alignment_for_reg_var (unsigned int);
99 /* Return an expression tree corresponding to the RHS of GIMPLE
100 statement STMT. */
102 tree
103 gimple_assign_rhs_to_tree (gimple *stmt)
105 tree t;
106 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
108 case GIMPLE_TERNARY_RHS:
109 t = build3 (gimple_assign_rhs_code (stmt),
110 TREE_TYPE (gimple_assign_lhs (stmt)),
111 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt),
112 gimple_assign_rhs3 (stmt));
113 break;
114 case GIMPLE_BINARY_RHS:
115 t = build2 (gimple_assign_rhs_code (stmt),
116 TREE_TYPE (gimple_assign_lhs (stmt)),
117 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
118 break;
119 case GIMPLE_UNARY_RHS:
120 t = build1 (gimple_assign_rhs_code (stmt),
121 TREE_TYPE (gimple_assign_lhs (stmt)),
122 gimple_assign_rhs1 (stmt));
123 break;
124 case GIMPLE_SINGLE_RHS:
126 t = gimple_assign_rhs1 (stmt);
127 /* Avoid modifying this tree in place below. */
128 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
129 && gimple_location (stmt) != EXPR_LOCATION (t))
130 || (gimple_block (stmt) && currently_expanding_to_rtl
131 && EXPR_P (t)))
132 t = copy_node (t);
133 break;
135 default:
136 gcc_unreachable ();
139 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
140 SET_EXPR_LOCATION (t, gimple_location (stmt));
142 return t;
146 #ifndef STACK_ALIGNMENT_NEEDED
147 #define STACK_ALIGNMENT_NEEDED 1
148 #endif
150 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152 /* Choose either CUR or NEXT as the leader DECL for a partition.
153 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154 out of the same user variable being in multiple partitions (this is
155 less likely for compiler-introduced temps). */
157 static tree
158 leader_merge (tree cur, tree next)
160 if (cur == NULL || cur == next)
161 return next;
163 if (DECL_P (cur) && DECL_IGNORED_P (cur))
164 return cur;
166 if (DECL_P (next) && DECL_IGNORED_P (next))
167 return next;
169 return cur;
172 /* Associate declaration T with storage space X. If T is no
173 SSA name this is exactly SET_DECL_RTL, otherwise make the
174 partition of T associated with X. */
175 static inline void
176 set_rtl (tree t, rtx x)
178 gcc_checking_assert (!x
179 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
180 || (use_register_for_decl (t)
181 ? (REG_P (x)
182 || (GET_CODE (x) == CONCAT
183 && (REG_P (XEXP (x, 0))
184 || SUBREG_P (XEXP (x, 0)))
185 && (REG_P (XEXP (x, 1))
186 || SUBREG_P (XEXP (x, 1))))
187 /* We need to accept PARALLELs for RESUT_DECLs
188 because of vector types with BLKmode returned
189 in multiple registers, but they are supposed
190 to be uncoalesced. */
191 || (GET_CODE (x) == PARALLEL
192 && SSAVAR (t)
193 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
194 && (GET_MODE (x) == BLKmode
195 || !flag_tree_coalesce_vars)))
196 : (MEM_P (x) || x == pc_rtx
197 || (GET_CODE (x) == CONCAT
198 && MEM_P (XEXP (x, 0))
199 && MEM_P (XEXP (x, 1))))));
200 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201 RESULT_DECLs has the expected mode. For memory, we accept
202 unpromoted modes, since that's what we're likely to get. For
203 PARM_DECLs and RESULT_DECLs, we'll have been called by
204 set_parm_rtl, which will give us the default def, so we don't
205 have to compute it ourselves. For RESULT_DECLs, we accept mode
206 mismatches too, as long as we have BLKmode or are not coalescing
207 across variables, so that we don't reject BLKmode PARALLELs or
208 unpromoted REGs. */
209 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
210 || (SSAVAR (t)
211 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
212 && (promote_ssa_mode (t, NULL) == BLKmode
213 || !flag_tree_coalesce_vars))
214 || !use_register_for_decl (t)
215 || GET_MODE (x) == promote_ssa_mode (t, NULL));
217 if (x)
219 bool skip = false;
220 tree cur = NULL_TREE;
221 rtx xm = x;
223 retry:
224 if (MEM_P (xm))
225 cur = MEM_EXPR (xm);
226 else if (REG_P (xm))
227 cur = REG_EXPR (xm);
228 else if (SUBREG_P (xm))
230 gcc_assert (subreg_lowpart_p (xm));
231 xm = SUBREG_REG (xm);
232 goto retry;
234 else if (GET_CODE (xm) == CONCAT)
236 xm = XEXP (xm, 0);
237 goto retry;
239 else if (GET_CODE (xm) == PARALLEL)
241 xm = XVECEXP (xm, 0, 0);
242 gcc_assert (GET_CODE (xm) == EXPR_LIST);
243 xm = XEXP (xm, 0);
244 goto retry;
246 else if (xm == pc_rtx)
247 skip = true;
248 else
249 gcc_unreachable ();
251 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
253 if (cur != next)
255 if (MEM_P (x))
256 set_mem_attributes (x,
257 next && TREE_CODE (next) == SSA_NAME
258 ? TREE_TYPE (next)
259 : next, true);
260 else
261 set_reg_attrs_for_decl_rtl (next, x);
265 if (TREE_CODE (t) == SSA_NAME)
267 int part = var_to_partition (SA.map, t);
268 if (part != NO_PARTITION)
270 if (SA.partition_to_pseudo[part])
271 gcc_assert (SA.partition_to_pseudo[part] == x);
272 else if (x != pc_rtx)
273 SA.partition_to_pseudo[part] = x;
275 /* For the benefit of debug information at -O0 (where
276 vartracking doesn't run) record the place also in the base
277 DECL. For PARMs and RESULTs, do so only when setting the
278 default def. */
279 if (x && x != pc_rtx && SSA_NAME_VAR (t)
280 && (VAR_P (SSA_NAME_VAR (t))
281 || SSA_NAME_IS_DEFAULT_DEF (t)))
283 tree var = SSA_NAME_VAR (t);
284 /* If we don't yet have something recorded, just record it now. */
285 if (!DECL_RTL_SET_P (var))
286 SET_DECL_RTL (var, x);
287 /* If we have it set already to "multiple places" don't
288 change this. */
289 else if (DECL_RTL (var) == pc_rtx)
291 /* If we have something recorded and it's not the same place
292 as we want to record now, we have multiple partitions for the
293 same base variable, with different places. We can't just
294 randomly chose one, hence we have to say that we don't know.
295 This only happens with optimization, and there var-tracking
296 will figure out the right thing. */
297 else if (DECL_RTL (var) != x)
298 SET_DECL_RTL (var, pc_rtx);
301 else
302 SET_DECL_RTL (t, x);
305 /* This structure holds data relevant to one variable that will be
306 placed in a stack slot. */
307 class stack_var
309 public:
310 /* The Variable. */
311 tree decl;
313 /* Initially, the size of the variable. Later, the size of the partition,
314 if this variable becomes it's partition's representative. */
315 poly_uint64 size;
317 /* The *byte* alignment required for this variable. Or as, with the
318 size, the alignment for this partition. */
319 unsigned int alignb;
321 /* The partition representative. */
322 size_t representative;
324 /* The next stack variable in the partition, or EOC. */
325 size_t next;
327 /* The numbers of conflicting stack variables. */
328 bitmap conflicts;
331 #define EOC ((size_t)-1)
333 /* We have an array of such objects while deciding allocation. */
334 static class stack_var *stack_vars;
335 static size_t stack_vars_alloc;
336 static size_t stack_vars_num;
337 static hash_map<tree, size_t> *decl_to_stack_part;
339 /* Conflict bitmaps go on this obstack. This allows us to destroy
340 all of them in one big sweep. */
341 static bitmap_obstack stack_var_bitmap_obstack;
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344 is non-decreasing. */
345 static size_t *stack_vars_sorted;
347 /* The phase of the stack frame. This is the known misalignment of
348 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
349 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
350 static int frame_phase;
352 /* Used during expand_used_vars to remember if we saw any decls for
353 which we'd like to enable stack smashing protection. */
354 static bool has_protected_decls;
356 /* Used during expand_used_vars. Remember if we say a character buffer
357 smaller than our cutoff threshold. Used for -Wstack-protector. */
358 static bool has_short_buffer;
360 /* Compute the byte alignment to use for DECL. Ignore alignment
361 we can't do with expected alignment of the stack boundary. */
363 static unsigned int
364 align_local_variable (tree decl, bool really_expand)
366 unsigned int align;
368 if (TREE_CODE (decl) == SSA_NAME)
369 align = TYPE_ALIGN (TREE_TYPE (decl));
370 else
372 align = LOCAL_DECL_ALIGNMENT (decl);
373 /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
374 That is done before IPA and could bump alignment based on host
375 backend even for offloaded code which wants different
376 LOCAL_DECL_ALIGNMENT. */
377 if (really_expand)
378 SET_DECL_ALIGN (decl, align);
380 return align / BITS_PER_UNIT;
383 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
384 down otherwise. Return truncated BASE value. */
386 static inline unsigned HOST_WIDE_INT
387 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
389 return align_up ? (base + align - 1) & -align : base & -align;
392 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
393 Return the frame offset. */
395 static poly_int64
396 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
398 poly_int64 offset, new_frame_offset;
400 if (FRAME_GROWS_DOWNWARD)
402 new_frame_offset
403 = aligned_lower_bound (frame_offset - frame_phase - size,
404 align) + frame_phase;
405 offset = new_frame_offset;
407 else
409 new_frame_offset
410 = aligned_upper_bound (frame_offset - frame_phase,
411 align) + frame_phase;
412 offset = new_frame_offset;
413 new_frame_offset += size;
415 frame_offset = new_frame_offset;
417 if (frame_offset_overflow (frame_offset, cfun->decl))
418 frame_offset = offset = 0;
420 return offset;
423 /* Accumulate DECL into STACK_VARS. */
425 static void
426 add_stack_var (tree decl, bool really_expand)
428 class stack_var *v;
430 if (stack_vars_num >= stack_vars_alloc)
432 if (stack_vars_alloc)
433 stack_vars_alloc = stack_vars_alloc * 3 / 2;
434 else
435 stack_vars_alloc = 32;
436 stack_vars
437 = XRESIZEVEC (class stack_var, stack_vars, stack_vars_alloc);
439 if (!decl_to_stack_part)
440 decl_to_stack_part = new hash_map<tree, size_t>;
442 v = &stack_vars[stack_vars_num];
443 decl_to_stack_part->put (decl, stack_vars_num);
445 v->decl = decl;
446 tree size = TREE_CODE (decl) == SSA_NAME
447 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
448 : DECL_SIZE_UNIT (decl);
449 v->size = tree_to_poly_uint64 (size);
450 /* Ensure that all variables have size, so that &a != &b for any two
451 variables that are simultaneously live. */
452 if (known_eq (v->size, 0U))
453 v->size = 1;
454 v->alignb = align_local_variable (decl, really_expand);
455 /* An alignment of zero can mightily confuse us later. */
456 gcc_assert (v->alignb != 0);
458 /* All variables are initially in their own partition. */
459 v->representative = stack_vars_num;
460 v->next = EOC;
462 /* All variables initially conflict with no other. */
463 v->conflicts = NULL;
465 /* Ensure that this decl doesn't get put onto the list twice. */
466 set_rtl (decl, pc_rtx);
468 stack_vars_num++;
471 /* Make the decls associated with luid's X and Y conflict. */
473 static void
474 add_stack_var_conflict (size_t x, size_t y)
476 class stack_var *a = &stack_vars[x];
477 class stack_var *b = &stack_vars[y];
478 if (x == y)
479 return;
480 if (!a->conflicts)
481 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
482 if (!b->conflicts)
483 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
484 bitmap_set_bit (a->conflicts, y);
485 bitmap_set_bit (b->conflicts, x);
488 /* Check whether the decls associated with luid's X and Y conflict. */
490 static bool
491 stack_var_conflict_p (size_t x, size_t y)
493 class stack_var *a = &stack_vars[x];
494 class stack_var *b = &stack_vars[y];
495 if (x == y)
496 return false;
497 /* Partitions containing an SSA name result from gimple registers
498 with things like unsupported modes. They are top-level and
499 hence conflict with everything else. */
500 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
501 return true;
503 if (!a->conflicts || !b->conflicts)
504 return false;
505 return bitmap_bit_p (a->conflicts, y);
508 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
509 enter its partition number into bitmap DATA. */
511 static bool
512 visit_op (gimple *, tree op, tree, void *data)
514 bitmap active = (bitmap)data;
515 op = get_base_address (op);
516 if (op
517 && DECL_P (op)
518 && DECL_RTL_IF_SET (op) == pc_rtx)
520 size_t *v = decl_to_stack_part->get (op);
521 if (v)
522 bitmap_set_bit (active, *v);
524 return false;
527 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
528 record conflicts between it and all currently active other partitions
529 from bitmap DATA. */
531 static bool
532 visit_conflict (gimple *, tree op, tree, void *data)
534 bitmap active = (bitmap)data;
535 op = get_base_address (op);
536 if (op
537 && DECL_P (op)
538 && DECL_RTL_IF_SET (op) == pc_rtx)
540 size_t *v = decl_to_stack_part->get (op);
541 if (v && bitmap_set_bit (active, *v))
543 size_t num = *v;
544 bitmap_iterator bi;
545 unsigned i;
546 gcc_assert (num < stack_vars_num);
547 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
548 add_stack_var_conflict (num, i);
551 return false;
554 /* Helper routine for add_scope_conflicts, calculating the active partitions
555 at the end of BB, leaving the result in WORK. We're called to generate
556 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
557 liveness. */
559 static void
560 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
562 edge e;
563 edge_iterator ei;
564 gimple_stmt_iterator gsi;
565 walk_stmt_load_store_addr_fn visit;
567 bitmap_clear (work);
568 FOR_EACH_EDGE (e, ei, bb->preds)
569 bitmap_ior_into (work, (bitmap)e->src->aux);
571 visit = visit_op;
573 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
575 gimple *stmt = gsi_stmt (gsi);
576 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
578 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
580 gimple *stmt = gsi_stmt (gsi);
582 if (gimple_clobber_p (stmt))
584 tree lhs = gimple_assign_lhs (stmt);
585 size_t *v;
586 /* Nested function lowering might introduce LHSs
587 that are COMPONENT_REFs. */
588 if (!VAR_P (lhs))
589 continue;
590 if (DECL_RTL_IF_SET (lhs) == pc_rtx
591 && (v = decl_to_stack_part->get (lhs)))
592 bitmap_clear_bit (work, *v);
594 else if (!is_gimple_debug (stmt))
596 if (for_conflict
597 && visit == visit_op)
599 /* If this is the first real instruction in this BB we need
600 to add conflicts for everything live at this point now.
601 Unlike classical liveness for named objects we can't
602 rely on seeing a def/use of the names we're interested in.
603 There might merely be indirect loads/stores. We'd not add any
604 conflicts for such partitions. */
605 bitmap_iterator bi;
606 unsigned i;
607 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
609 class stack_var *a = &stack_vars[i];
610 if (!a->conflicts)
611 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
612 bitmap_ior_into (a->conflicts, work);
614 visit = visit_conflict;
616 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
621 /* Generate stack partition conflicts between all partitions that are
622 simultaneously live. */
624 static void
625 add_scope_conflicts (void)
627 basic_block bb;
628 bool changed;
629 bitmap work = BITMAP_ALLOC (NULL);
630 int *rpo;
631 int n_bbs;
633 /* We approximate the live range of a stack variable by taking the first
634 mention of its name as starting point(s), and by the end-of-scope
635 death clobber added by gimplify as ending point(s) of the range.
636 This overapproximates in the case we for instance moved an address-taken
637 operation upward, without also moving a dereference to it upwards.
638 But it's conservatively correct as a variable never can hold values
639 before its name is mentioned at least once.
641 We then do a mostly classical bitmap liveness algorithm. */
643 FOR_ALL_BB_FN (bb, cfun)
644 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
646 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
647 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
649 changed = true;
650 while (changed)
652 int i;
653 changed = false;
654 for (i = 0; i < n_bbs; i++)
656 bitmap active;
657 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
658 active = (bitmap)bb->aux;
659 add_scope_conflicts_1 (bb, work, false);
660 if (bitmap_ior_into (active, work))
661 changed = true;
665 FOR_EACH_BB_FN (bb, cfun)
666 add_scope_conflicts_1 (bb, work, true);
668 free (rpo);
669 BITMAP_FREE (work);
670 FOR_ALL_BB_FN (bb, cfun)
671 BITMAP_FREE (bb->aux);
674 /* A subroutine of partition_stack_vars. A comparison function for qsort,
675 sorting an array of indices by the properties of the object. */
677 static int
678 stack_var_cmp (const void *a, const void *b)
680 size_t ia = *(const size_t *)a;
681 size_t ib = *(const size_t *)b;
682 unsigned int aligna = stack_vars[ia].alignb;
683 unsigned int alignb = stack_vars[ib].alignb;
684 poly_int64 sizea = stack_vars[ia].size;
685 poly_int64 sizeb = stack_vars[ib].size;
686 tree decla = stack_vars[ia].decl;
687 tree declb = stack_vars[ib].decl;
688 bool largea, largeb;
689 unsigned int uida, uidb;
691 /* Primary compare on "large" alignment. Large comes first. */
692 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
693 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
694 if (largea != largeb)
695 return (int)largeb - (int)largea;
697 /* Secondary compare on size, decreasing */
698 int diff = compare_sizes_for_sort (sizeb, sizea);
699 if (diff != 0)
700 return diff;
702 /* Tertiary compare on true alignment, decreasing. */
703 if (aligna < alignb)
704 return -1;
705 if (aligna > alignb)
706 return 1;
708 /* Final compare on ID for sort stability, increasing.
709 Two SSA names are compared by their version, SSA names come before
710 non-SSA names, and two normal decls are compared by their DECL_UID. */
711 if (TREE_CODE (decla) == SSA_NAME)
713 if (TREE_CODE (declb) == SSA_NAME)
714 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
715 else
716 return -1;
718 else if (TREE_CODE (declb) == SSA_NAME)
719 return 1;
720 else
721 uida = DECL_UID (decla), uidb = DECL_UID (declb);
722 if (uida < uidb)
723 return 1;
724 if (uida > uidb)
725 return -1;
726 return 0;
729 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
730 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
732 /* If the points-to solution *PI points to variables that are in a partition
733 together with other variables add all partition members to the pointed-to
734 variables bitmap. */
736 static void
737 add_partitioned_vars_to_ptset (struct pt_solution *pt,
738 part_hashmap *decls_to_partitions,
739 hash_set<bitmap> *visited, bitmap temp)
741 bitmap_iterator bi;
742 unsigned i;
743 bitmap *part;
745 if (pt->anything
746 || pt->vars == NULL
747 /* The pointed-to vars bitmap is shared, it is enough to
748 visit it once. */
749 || visited->add (pt->vars))
750 return;
752 bitmap_clear (temp);
754 /* By using a temporary bitmap to store all members of the partitions
755 we have to add we make sure to visit each of the partitions only
756 once. */
757 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
758 if ((!temp
759 || !bitmap_bit_p (temp, i))
760 && (part = decls_to_partitions->get (i)))
761 bitmap_ior_into (temp, *part);
762 if (!bitmap_empty_p (temp))
763 bitmap_ior_into (pt->vars, temp);
766 /* Update points-to sets based on partition info, so we can use them on RTL.
767 The bitmaps representing stack partitions will be saved until expand,
768 where partitioned decls used as bases in memory expressions will be
769 rewritten. */
771 static void
772 update_alias_info_with_stack_vars (void)
774 part_hashmap *decls_to_partitions = NULL;
775 size_t i, j;
776 tree var = NULL_TREE;
778 for (i = 0; i < stack_vars_num; i++)
780 bitmap part = NULL;
781 tree name;
782 struct ptr_info_def *pi;
784 /* Not interested in partitions with single variable. */
785 if (stack_vars[i].representative != i
786 || stack_vars[i].next == EOC)
787 continue;
789 if (!decls_to_partitions)
791 decls_to_partitions = new part_hashmap;
792 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
795 /* Create an SSA_NAME that points to the partition for use
796 as base during alias-oracle queries on RTL for bases that
797 have been partitioned. */
798 if (var == NULL_TREE)
799 var = create_tmp_var (ptr_type_node);
800 name = make_ssa_name (var);
802 /* Create bitmaps representing partitions. They will be used for
803 points-to sets later, so use GGC alloc. */
804 part = BITMAP_GGC_ALLOC ();
805 for (j = i; j != EOC; j = stack_vars[j].next)
807 tree decl = stack_vars[j].decl;
808 unsigned int uid = DECL_PT_UID (decl);
809 bitmap_set_bit (part, uid);
810 decls_to_partitions->put (uid, part);
811 cfun->gimple_df->decls_to_pointers->put (decl, name);
812 if (TREE_ADDRESSABLE (decl))
813 TREE_ADDRESSABLE (name) = 1;
816 /* Make the SSA name point to all partition members. */
817 pi = get_ptr_info (name);
818 pt_solution_set (&pi->pt, part, false);
821 /* Make all points-to sets that contain one member of a partition
822 contain all members of the partition. */
823 if (decls_to_partitions)
825 unsigned i;
826 tree name;
827 hash_set<bitmap> visited;
828 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
830 FOR_EACH_SSA_NAME (i, name, cfun)
832 struct ptr_info_def *pi;
834 if (POINTER_TYPE_P (TREE_TYPE (name))
835 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
836 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
837 &visited, temp);
840 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
841 decls_to_partitions, &visited, temp);
843 delete decls_to_partitions;
844 BITMAP_FREE (temp);
848 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
849 partitioning algorithm. Partitions A and B are known to be non-conflicting.
850 Merge them into a single partition A. */
852 static void
853 union_stack_vars (size_t a, size_t b)
855 class stack_var *vb = &stack_vars[b];
856 bitmap_iterator bi;
857 unsigned u;
859 gcc_assert (stack_vars[b].next == EOC);
860 /* Add B to A's partition. */
861 stack_vars[b].next = stack_vars[a].next;
862 stack_vars[b].representative = a;
863 stack_vars[a].next = b;
865 /* Make sure A is big enough to hold B. */
866 stack_vars[a].size = upper_bound (stack_vars[a].size, stack_vars[b].size);
868 /* Update the required alignment of partition A to account for B. */
869 if (stack_vars[a].alignb < stack_vars[b].alignb)
870 stack_vars[a].alignb = stack_vars[b].alignb;
872 /* Update the interference graph and merge the conflicts. */
873 if (vb->conflicts)
875 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
876 add_stack_var_conflict (a, stack_vars[u].representative);
877 BITMAP_FREE (vb->conflicts);
881 /* A subroutine of expand_used_vars. Binpack the variables into
882 partitions constrained by the interference graph. The overall
883 algorithm used is as follows:
885 Sort the objects by size in descending order.
886 For each object A {
887 S = size(A)
888 O = 0
889 loop {
890 Look for the largest non-conflicting object B with size <= S.
891 UNION (A, B)
896 static void
897 partition_stack_vars (void)
899 size_t si, sj, n = stack_vars_num;
901 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
902 for (si = 0; si < n; ++si)
903 stack_vars_sorted[si] = si;
905 if (n == 1)
906 return;
908 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
910 for (si = 0; si < n; ++si)
912 size_t i = stack_vars_sorted[si];
913 unsigned int ialign = stack_vars[i].alignb;
914 poly_int64 isize = stack_vars[i].size;
916 /* Ignore objects that aren't partition representatives. If we
917 see a var that is not a partition representative, it must
918 have been merged earlier. */
919 if (stack_vars[i].representative != i)
920 continue;
922 for (sj = si + 1; sj < n; ++sj)
924 size_t j = stack_vars_sorted[sj];
925 unsigned int jalign = stack_vars[j].alignb;
926 poly_int64 jsize = stack_vars[j].size;
928 /* Ignore objects that aren't partition representatives. */
929 if (stack_vars[j].representative != j)
930 continue;
932 /* Do not mix objects of "small" (supported) alignment
933 and "large" (unsupported) alignment. */
934 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
935 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
936 break;
938 /* For Address Sanitizer do not mix objects with different
939 sizes, as the shorter vars wouldn't be adequately protected.
940 Don't do that for "large" (unsupported) alignment objects,
941 those aren't protected anyway. */
942 if (asan_sanitize_stack_p ()
943 && maybe_ne (isize, jsize)
944 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
945 break;
947 /* Ignore conflicting objects. */
948 if (stack_var_conflict_p (i, j))
949 continue;
951 /* UNION the objects, placing J at OFFSET. */
952 union_stack_vars (i, j);
956 update_alias_info_with_stack_vars ();
959 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
961 static void
962 dump_stack_var_partition (void)
964 size_t si, i, j, n = stack_vars_num;
966 for (si = 0; si < n; ++si)
968 i = stack_vars_sorted[si];
970 /* Skip variables that aren't partition representatives, for now. */
971 if (stack_vars[i].representative != i)
972 continue;
974 fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
975 print_dec (stack_vars[i].size, dump_file);
976 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
978 for (j = i; j != EOC; j = stack_vars[j].next)
980 fputc ('\t', dump_file);
981 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
983 fputc ('\n', dump_file);
987 /* Assign rtl to DECL at BASE + OFFSET. */
989 static void
990 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
991 poly_int64 offset)
993 unsigned align;
994 rtx x;
996 /* If this fails, we've overflowed the stack frame. Error nicely? */
997 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
999 x = plus_constant (Pmode, base, offset);
1000 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
1001 ? TYPE_MODE (TREE_TYPE (decl))
1002 : DECL_MODE (SSAVAR (decl)), x);
1004 if (TREE_CODE (decl) != SSA_NAME)
1006 /* Set alignment we actually gave this decl if it isn't an SSA name.
1007 If it is we generate stack slots only accidentally so it isn't as
1008 important, we'll simply use the alignment that is already set. */
1009 if (base == virtual_stack_vars_rtx)
1010 offset -= frame_phase;
1011 align = known_alignment (offset);
1012 align *= BITS_PER_UNIT;
1013 if (align == 0 || align > base_align)
1014 align = base_align;
1016 /* One would think that we could assert that we're not decreasing
1017 alignment here, but (at least) the i386 port does exactly this
1018 via the MINIMUM_ALIGNMENT hook. */
1020 SET_DECL_ALIGN (decl, align);
1021 DECL_USER_ALIGN (decl) = 0;
1024 set_rtl (decl, x);
1027 class stack_vars_data
1029 public:
1030 /* Vector of offset pairs, always end of some padding followed
1031 by start of the padding that needs Address Sanitizer protection.
1032 The vector is in reversed, highest offset pairs come first. */
1033 auto_vec<HOST_WIDE_INT> asan_vec;
1035 /* Vector of partition representative decls in between the paddings. */
1036 auto_vec<tree> asan_decl_vec;
1038 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1039 rtx asan_base;
1041 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1042 unsigned int asan_alignb;
1045 /* A subroutine of expand_used_vars. Give each partition representative
1046 a unique location within the stack frame. Update each partition member
1047 with that location. */
1049 static void
1050 expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
1052 size_t si, i, j, n = stack_vars_num;
1053 poly_uint64 large_size = 0, large_alloc = 0;
1054 rtx large_base = NULL;
1055 unsigned large_align = 0;
1056 bool large_allocation_done = false;
1057 tree decl;
1059 /* Determine if there are any variables requiring "large" alignment.
1060 Since these are dynamically allocated, we only process these if
1061 no predicate involved. */
1062 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1063 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1065 /* Find the total size of these variables. */
1066 for (si = 0; si < n; ++si)
1068 unsigned alignb;
1070 i = stack_vars_sorted[si];
1071 alignb = stack_vars[i].alignb;
1073 /* All "large" alignment decls come before all "small" alignment
1074 decls, but "large" alignment decls are not sorted based on
1075 their alignment. Increase large_align to track the largest
1076 required alignment. */
1077 if ((alignb * BITS_PER_UNIT) > large_align)
1078 large_align = alignb * BITS_PER_UNIT;
1080 /* Stop when we get to the first decl with "small" alignment. */
1081 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1082 break;
1084 /* Skip variables that aren't partition representatives. */
1085 if (stack_vars[i].representative != i)
1086 continue;
1088 /* Skip variables that have already had rtl assigned. See also
1089 add_stack_var where we perpetrate this pc_rtx hack. */
1090 decl = stack_vars[i].decl;
1091 if (TREE_CODE (decl) == SSA_NAME
1092 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1093 : DECL_RTL (decl) != pc_rtx)
1094 continue;
1096 large_size = aligned_upper_bound (large_size, alignb);
1097 large_size += stack_vars[i].size;
1101 for (si = 0; si < n; ++si)
1103 rtx base;
1104 unsigned base_align, alignb;
1105 poly_int64 offset;
1107 i = stack_vars_sorted[si];
1109 /* Skip variables that aren't partition representatives, for now. */
1110 if (stack_vars[i].representative != i)
1111 continue;
1113 /* Skip variables that have already had rtl assigned. See also
1114 add_stack_var where we perpetrate this pc_rtx hack. */
1115 decl = stack_vars[i].decl;
1116 if (TREE_CODE (decl) == SSA_NAME
1117 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1118 : DECL_RTL (decl) != pc_rtx)
1119 continue;
1121 /* Check the predicate to see whether this variable should be
1122 allocated in this pass. */
1123 if (pred && !pred (i))
1124 continue;
1126 alignb = stack_vars[i].alignb;
1127 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1129 base = virtual_stack_vars_rtx;
1130 /* ASAN description strings don't yet have a syntax for expressing
1131 polynomial offsets. */
1132 HOST_WIDE_INT prev_offset;
1133 if (asan_sanitize_stack_p ()
1134 && pred
1135 && frame_offset.is_constant (&prev_offset)
1136 && stack_vars[i].size.is_constant ())
1138 if (data->asan_vec.is_empty ())
1140 alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE);
1141 prev_offset = frame_offset.to_constant ();
1143 prev_offset = align_base (prev_offset,
1144 ASAN_MIN_RED_ZONE_SIZE,
1145 !FRAME_GROWS_DOWNWARD);
1146 tree repr_decl = NULL_TREE;
1147 unsigned HOST_WIDE_INT size
1148 = asan_var_and_redzone_size (stack_vars[i].size.to_constant ());
1149 if (data->asan_vec.is_empty ())
1150 size = MAX (size, ASAN_RED_ZONE_SIZE);
1152 unsigned HOST_WIDE_INT alignment = MAX (alignb,
1153 ASAN_MIN_RED_ZONE_SIZE);
1154 offset = alloc_stack_frame_space (size, alignment);
1156 data->asan_vec.safe_push (prev_offset);
1157 /* Allocating a constant amount of space from a constant
1158 starting offset must give a constant result. */
1159 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1160 .to_constant ());
1161 /* Find best representative of the partition.
1162 Prefer those with DECL_NAME, even better
1163 satisfying asan_protect_stack_decl predicate. */
1164 for (j = i; j != EOC; j = stack_vars[j].next)
1165 if (asan_protect_stack_decl (stack_vars[j].decl)
1166 && DECL_NAME (stack_vars[j].decl))
1168 repr_decl = stack_vars[j].decl;
1169 break;
1171 else if (repr_decl == NULL_TREE
1172 && DECL_P (stack_vars[j].decl)
1173 && DECL_NAME (stack_vars[j].decl))
1174 repr_decl = stack_vars[j].decl;
1175 if (repr_decl == NULL_TREE)
1176 repr_decl = stack_vars[i].decl;
1177 data->asan_decl_vec.safe_push (repr_decl);
1179 /* Make sure a representative is unpoison if another
1180 variable in the partition is handled by
1181 use-after-scope sanitization. */
1182 if (asan_handled_variables != NULL
1183 && !asan_handled_variables->contains (repr_decl))
1185 for (j = i; j != EOC; j = stack_vars[j].next)
1186 if (asan_handled_variables->contains (stack_vars[j].decl))
1187 break;
1188 if (j != EOC)
1189 asan_handled_variables->add (repr_decl);
1192 data->asan_alignb = MAX (data->asan_alignb, alignb);
1193 if (data->asan_base == NULL)
1194 data->asan_base = gen_reg_rtx (Pmode);
1195 base = data->asan_base;
1197 if (!STRICT_ALIGNMENT)
1198 base_align = crtl->max_used_stack_slot_alignment;
1199 else
1200 base_align = MAX (crtl->max_used_stack_slot_alignment,
1201 GET_MODE_ALIGNMENT (SImode)
1202 << ASAN_SHADOW_SHIFT);
1204 else
1206 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1207 base_align = crtl->max_used_stack_slot_alignment;
1210 else
1212 /* Large alignment is only processed in the last pass. */
1213 if (pred)
1214 continue;
1216 /* If there were any variables requiring "large" alignment, allocate
1217 space. */
1218 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1220 poly_int64 loffset;
1221 rtx large_allocsize;
1223 large_allocsize = gen_int_mode (large_size, Pmode);
1224 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1225 loffset = alloc_stack_frame_space
1226 (rtx_to_poly_int64 (large_allocsize),
1227 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1228 large_base = get_dynamic_stack_base (loffset, large_align);
1229 large_allocation_done = true;
1231 gcc_assert (large_base != NULL);
1233 large_alloc = aligned_upper_bound (large_alloc, alignb);
1234 offset = large_alloc;
1235 large_alloc += stack_vars[i].size;
1237 base = large_base;
1238 base_align = large_align;
1241 /* Create rtl for each variable based on their location within the
1242 partition. */
1243 for (j = i; j != EOC; j = stack_vars[j].next)
1245 expand_one_stack_var_at (stack_vars[j].decl,
1246 base, base_align,
1247 offset);
1251 gcc_assert (known_eq (large_alloc, large_size));
1254 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1255 static poly_uint64
1256 account_stack_vars (void)
1258 size_t si, j, i, n = stack_vars_num;
1259 poly_uint64 size = 0;
1261 for (si = 0; si < n; ++si)
1263 i = stack_vars_sorted[si];
1265 /* Skip variables that aren't partition representatives, for now. */
1266 if (stack_vars[i].representative != i)
1267 continue;
1269 size += stack_vars[i].size;
1270 for (j = i; j != EOC; j = stack_vars[j].next)
1271 set_rtl (stack_vars[j].decl, NULL);
1273 return size;
1276 /* Record the RTL assignment X for the default def of PARM. */
1278 extern void
1279 set_parm_rtl (tree parm, rtx x)
1281 gcc_assert (TREE_CODE (parm) == PARM_DECL
1282 || TREE_CODE (parm) == RESULT_DECL);
1284 if (x && !MEM_P (x))
1286 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1287 TYPE_MODE (TREE_TYPE (parm)),
1288 TYPE_ALIGN (TREE_TYPE (parm)));
1290 /* If the variable alignment is very large we'll dynamicaly
1291 allocate it, which means that in-frame portion is just a
1292 pointer. ??? We've got a pseudo for sure here, do we
1293 actually dynamically allocate its spilling area if needed?
1294 ??? Isn't it a problem when Pmode alignment also exceeds
1295 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1296 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1297 align = GET_MODE_ALIGNMENT (Pmode);
1299 record_alignment_for_reg_var (align);
1302 tree ssa = ssa_default_def (cfun, parm);
1303 if (!ssa)
1304 return set_rtl (parm, x);
1306 int part = var_to_partition (SA.map, ssa);
1307 gcc_assert (part != NO_PARTITION);
1309 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1310 gcc_assert (changed);
1312 set_rtl (ssa, x);
1313 gcc_assert (DECL_RTL (parm) == x);
1316 /* A subroutine of expand_one_var. Called to immediately assign rtl
1317 to a variable to be allocated in the stack frame. */
1319 static void
1320 expand_one_stack_var_1 (tree var)
1322 poly_uint64 size;
1323 poly_int64 offset;
1324 unsigned byte_align;
1326 if (TREE_CODE (var) == SSA_NAME)
1328 tree type = TREE_TYPE (var);
1329 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1330 byte_align = TYPE_ALIGN_UNIT (type);
1332 else
1334 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1335 byte_align = align_local_variable (var, true);
1338 /* We handle highly aligned variables in expand_stack_vars. */
1339 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1341 offset = alloc_stack_frame_space (size, byte_align);
1343 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1344 crtl->max_used_stack_slot_alignment, offset);
1347 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1348 already assigned some MEM. */
1350 static void
1351 expand_one_stack_var (tree var)
1353 if (TREE_CODE (var) == SSA_NAME)
1355 int part = var_to_partition (SA.map, var);
1356 if (part != NO_PARTITION)
1358 rtx x = SA.partition_to_pseudo[part];
1359 gcc_assert (x);
1360 gcc_assert (MEM_P (x));
1361 return;
1365 return expand_one_stack_var_1 (var);
1368 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1369 that will reside in a hard register. */
1371 static void
1372 expand_one_hard_reg_var (tree var)
1374 rest_of_decl_compilation (var, 0, 0);
1377 /* Record the alignment requirements of some variable assigned to a
1378 pseudo. */
1380 static void
1381 record_alignment_for_reg_var (unsigned int align)
1383 if (SUPPORTS_STACK_ALIGNMENT
1384 && crtl->stack_alignment_estimated < align)
1386 /* stack_alignment_estimated shouldn't change after stack
1387 realign decision made */
1388 gcc_assert (!crtl->stack_realign_processed);
1389 crtl->stack_alignment_estimated = align;
1392 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1393 So here we only make sure stack_alignment_needed >= align. */
1394 if (crtl->stack_alignment_needed < align)
1395 crtl->stack_alignment_needed = align;
1396 if (crtl->max_used_stack_slot_alignment < align)
1397 crtl->max_used_stack_slot_alignment = align;
1400 /* Create RTL for an SSA partition. */
1402 static void
1403 expand_one_ssa_partition (tree var)
1405 int part = var_to_partition (SA.map, var);
1406 gcc_assert (part != NO_PARTITION);
1408 if (SA.partition_to_pseudo[part])
1409 return;
1411 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1412 TYPE_MODE (TREE_TYPE (var)),
1413 TYPE_ALIGN (TREE_TYPE (var)));
1415 /* If the variable alignment is very large we'll dynamicaly allocate
1416 it, which means that in-frame portion is just a pointer. */
1417 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1418 align = GET_MODE_ALIGNMENT (Pmode);
1420 record_alignment_for_reg_var (align);
1422 if (!use_register_for_decl (var))
1424 if (defer_stack_allocation (var, true))
1425 add_stack_var (var, true);
1426 else
1427 expand_one_stack_var_1 (var);
1428 return;
1431 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1432 rtx x = gen_reg_rtx (reg_mode);
1434 set_rtl (var, x);
1436 /* For a promoted variable, X will not be used directly but wrapped in a
1437 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1438 will assume that its upper bits can be inferred from its lower bits.
1439 Therefore, if X isn't initialized on every path from the entry, then
1440 we must do it manually in order to fulfill the above assumption. */
1441 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1442 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1443 emit_move_insn (x, CONST0_RTX (reg_mode));
1446 /* Record the association between the RTL generated for partition PART
1447 and the underlying variable of the SSA_NAME VAR. */
1449 static void
1450 adjust_one_expanded_partition_var (tree var)
1452 if (!var)
1453 return;
1455 tree decl = SSA_NAME_VAR (var);
1457 int part = var_to_partition (SA.map, var);
1458 if (part == NO_PARTITION)
1459 return;
1461 rtx x = SA.partition_to_pseudo[part];
1463 gcc_assert (x);
1465 set_rtl (var, x);
1467 if (!REG_P (x))
1468 return;
1470 /* Note if the object is a user variable. */
1471 if (decl && !DECL_ARTIFICIAL (decl))
1472 mark_user_reg (x);
1474 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1475 mark_reg_pointer (x, get_pointer_alignment (var));
1478 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1479 that will reside in a pseudo register. */
1481 static void
1482 expand_one_register_var (tree var)
1484 if (TREE_CODE (var) == SSA_NAME)
1486 int part = var_to_partition (SA.map, var);
1487 if (part != NO_PARTITION)
1489 rtx x = SA.partition_to_pseudo[part];
1490 gcc_assert (x);
1491 gcc_assert (REG_P (x));
1492 return;
1494 gcc_unreachable ();
1497 tree decl = var;
1498 tree type = TREE_TYPE (decl);
1499 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1500 rtx x = gen_reg_rtx (reg_mode);
1502 set_rtl (var, x);
1504 /* Note if the object is a user variable. */
1505 if (!DECL_ARTIFICIAL (decl))
1506 mark_user_reg (x);
1508 if (POINTER_TYPE_P (type))
1509 mark_reg_pointer (x, get_pointer_alignment (var));
1512 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1513 has some associated error, e.g. its type is error-mark. We just need
1514 to pick something that won't crash the rest of the compiler. */
1516 static void
1517 expand_one_error_var (tree var)
1519 machine_mode mode = DECL_MODE (var);
1520 rtx x;
1522 if (mode == BLKmode)
1523 x = gen_rtx_MEM (BLKmode, const0_rtx);
1524 else if (mode == VOIDmode)
1525 x = const0_rtx;
1526 else
1527 x = gen_reg_rtx (mode);
1529 SET_DECL_RTL (var, x);
1532 /* A subroutine of expand_one_var. VAR is a variable that will be
1533 allocated to the local stack frame. Return true if we wish to
1534 add VAR to STACK_VARS so that it will be coalesced with other
1535 variables. Return false to allocate VAR immediately.
1537 This function is used to reduce the number of variables considered
1538 for coalescing, which reduces the size of the quadratic problem. */
1540 static bool
1541 defer_stack_allocation (tree var, bool toplevel)
1543 tree size_unit = TREE_CODE (var) == SSA_NAME
1544 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1545 : DECL_SIZE_UNIT (var);
1546 poly_uint64 size;
1548 /* Whether the variable is small enough for immediate allocation not to be
1549 a problem with regard to the frame size. */
1550 bool smallish
1551 = (poly_int_tree_p (size_unit, &size)
1552 && (estimated_poly_value (size)
1553 < param_min_size_for_stack_sharing));
1555 /* If stack protection is enabled, *all* stack variables must be deferred,
1556 so that we can re-order the strings to the top of the frame.
1557 Similarly for Address Sanitizer. */
1558 if (flag_stack_protect || asan_sanitize_stack_p ())
1559 return true;
1561 unsigned int align = TREE_CODE (var) == SSA_NAME
1562 ? TYPE_ALIGN (TREE_TYPE (var))
1563 : DECL_ALIGN (var);
1565 /* We handle "large" alignment via dynamic allocation. We want to handle
1566 this extra complication in only one place, so defer them. */
1567 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1568 return true;
1570 bool ignored = TREE_CODE (var) == SSA_NAME
1571 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1572 : DECL_IGNORED_P (var);
1574 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1575 might be detached from their block and appear at toplevel when we reach
1576 here. We want to coalesce them with variables from other blocks when
1577 the immediate contribution to the frame size would be noticeable. */
1578 if (toplevel && optimize > 0 && ignored && !smallish)
1579 return true;
1581 /* Variables declared in the outermost scope automatically conflict
1582 with every other variable. The only reason to want to defer them
1583 at all is that, after sorting, we can more efficiently pack
1584 small variables in the stack frame. Continue to defer at -O2. */
1585 if (toplevel && optimize < 2)
1586 return false;
1588 /* Without optimization, *most* variables are allocated from the
1589 stack, which makes the quadratic problem large exactly when we
1590 want compilation to proceed as quickly as possible. On the
1591 other hand, we don't want the function's stack frame size to
1592 get completely out of hand. So we avoid adding scalars and
1593 "small" aggregates to the list at all. */
1594 if (optimize == 0 && smallish)
1595 return false;
1597 return true;
1600 /* A subroutine of expand_used_vars. Expand one variable according to
1601 its flavor. Variables to be placed on the stack are not actually
1602 expanded yet, merely recorded.
1603 When REALLY_EXPAND is false, only add stack values to be allocated.
1604 Return stack usage this variable is supposed to take.
1607 static poly_uint64
1608 expand_one_var (tree var, bool toplevel, bool really_expand)
1610 unsigned int align = BITS_PER_UNIT;
1611 tree origvar = var;
1613 var = SSAVAR (var);
1615 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1617 if (is_global_var (var))
1618 return 0;
1620 /* Because we don't know if VAR will be in register or on stack,
1621 we conservatively assume it will be on stack even if VAR is
1622 eventually put into register after RA pass. For non-automatic
1623 variables, which won't be on stack, we collect alignment of
1624 type and ignore user specified alignment. Similarly for
1625 SSA_NAMEs for which use_register_for_decl returns true. */
1626 if (TREE_STATIC (var)
1627 || DECL_EXTERNAL (var)
1628 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1629 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1630 TYPE_MODE (TREE_TYPE (var)),
1631 TYPE_ALIGN (TREE_TYPE (var)));
1632 else if (DECL_HAS_VALUE_EXPR_P (var)
1633 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1634 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1635 or variables which were assigned a stack slot already by
1636 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1637 changed from the offset chosen to it. */
1638 align = crtl->stack_alignment_estimated;
1639 else
1640 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1642 /* If the variable alignment is very large we'll dynamicaly allocate
1643 it, which means that in-frame portion is just a pointer. */
1644 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1645 align = GET_MODE_ALIGNMENT (Pmode);
1648 record_alignment_for_reg_var (align);
1650 poly_uint64 size;
1651 if (TREE_CODE (origvar) == SSA_NAME)
1653 gcc_assert (!VAR_P (var)
1654 || (!DECL_EXTERNAL (var)
1655 && !DECL_HAS_VALUE_EXPR_P (var)
1656 && !TREE_STATIC (var)
1657 && TREE_TYPE (var) != error_mark_node
1658 && !DECL_HARD_REGISTER (var)
1659 && really_expand));
1661 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1663 else if (DECL_EXTERNAL (var))
1665 else if (DECL_HAS_VALUE_EXPR_P (var))
1667 else if (TREE_STATIC (var))
1669 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1671 else if (TREE_TYPE (var) == error_mark_node)
1673 if (really_expand)
1674 expand_one_error_var (var);
1676 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1678 if (really_expand)
1680 expand_one_hard_reg_var (var);
1681 if (!DECL_HARD_REGISTER (var))
1682 /* Invalid register specification. */
1683 expand_one_error_var (var);
1686 else if (use_register_for_decl (var))
1688 if (really_expand)
1689 expand_one_register_var (origvar);
1691 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1692 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1694 /* Reject variables which cover more than half of the address-space. */
1695 if (really_expand)
1697 if (DECL_NONLOCAL_FRAME (var))
1698 error_at (DECL_SOURCE_LOCATION (current_function_decl),
1699 "total size of local objects is too large");
1700 else
1701 error_at (DECL_SOURCE_LOCATION (var),
1702 "size of variable %q+D is too large", var);
1703 expand_one_error_var (var);
1706 else if (defer_stack_allocation (var, toplevel))
1707 add_stack_var (origvar, really_expand);
1708 else
1710 if (really_expand)
1712 if (lookup_attribute ("naked",
1713 DECL_ATTRIBUTES (current_function_decl)))
1714 error ("cannot allocate stack for variable %q+D, naked function",
1715 var);
1717 expand_one_stack_var (origvar);
1719 return size;
1721 return 0;
1724 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1725 expanding variables. Those variables that can be put into registers
1726 are allocated pseudos; those that can't are put on the stack.
1728 TOPLEVEL is true if this is the outermost BLOCK. */
1730 static void
1731 expand_used_vars_for_block (tree block, bool toplevel)
1733 tree t;
1735 /* Expand all variables at this level. */
1736 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1737 if (TREE_USED (t)
1738 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1739 || !DECL_NONSHAREABLE (t)))
1740 expand_one_var (t, toplevel, true);
1742 /* Expand all variables at containing levels. */
1743 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1744 expand_used_vars_for_block (t, false);
1747 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1748 and clear TREE_USED on all local variables. */
1750 static void
1751 clear_tree_used (tree block)
1753 tree t;
1755 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1756 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1757 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1758 || !DECL_NONSHAREABLE (t))
1759 TREE_USED (t) = 0;
1761 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1762 clear_tree_used (t);
1765 enum {
1766 SPCT_FLAG_DEFAULT = 1,
1767 SPCT_FLAG_ALL = 2,
1768 SPCT_FLAG_STRONG = 3,
1769 SPCT_FLAG_EXPLICIT = 4
1772 /* Examine TYPE and determine a bit mask of the following features. */
1774 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1775 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1776 #define SPCT_HAS_ARRAY 4
1777 #define SPCT_HAS_AGGREGATE 8
1779 static unsigned int
1780 stack_protect_classify_type (tree type)
1782 unsigned int ret = 0;
1783 tree t;
1785 switch (TREE_CODE (type))
1787 case ARRAY_TYPE:
1788 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1789 if (t == char_type_node
1790 || t == signed_char_type_node
1791 || t == unsigned_char_type_node)
1793 unsigned HOST_WIDE_INT max = param_ssp_buffer_size;
1794 unsigned HOST_WIDE_INT len;
1796 if (!TYPE_SIZE_UNIT (type)
1797 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1798 len = max;
1799 else
1800 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1802 if (len < max)
1803 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1804 else
1805 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1807 else
1808 ret = SPCT_HAS_ARRAY;
1809 break;
1811 case UNION_TYPE:
1812 case QUAL_UNION_TYPE:
1813 case RECORD_TYPE:
1814 ret = SPCT_HAS_AGGREGATE;
1815 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1816 if (TREE_CODE (t) == FIELD_DECL)
1817 ret |= stack_protect_classify_type (TREE_TYPE (t));
1818 break;
1820 default:
1821 break;
1824 return ret;
1827 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1828 part of the local stack frame. Remember if we ever return nonzero for
1829 any variable in this function. The return value is the phase number in
1830 which the variable should be allocated. */
1832 static int
1833 stack_protect_decl_phase (tree decl)
1835 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1836 int ret = 0;
1838 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1839 has_short_buffer = true;
1841 if (flag_stack_protect == SPCT_FLAG_ALL
1842 || flag_stack_protect == SPCT_FLAG_STRONG
1843 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1844 && lookup_attribute ("stack_protect",
1845 DECL_ATTRIBUTES (current_function_decl))))
1847 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1848 && !(bits & SPCT_HAS_AGGREGATE))
1849 ret = 1;
1850 else if (bits & SPCT_HAS_ARRAY)
1851 ret = 2;
1853 else
1854 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1856 if (ret)
1857 has_protected_decls = true;
1859 return ret;
1862 /* Two helper routines that check for phase 1 and phase 2. These are used
1863 as callbacks for expand_stack_vars. */
1865 static bool
1866 stack_protect_decl_phase_1 (size_t i)
1868 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1871 static bool
1872 stack_protect_decl_phase_2 (size_t i)
1874 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1877 /* And helper function that checks for asan phase (with stack protector
1878 it is phase 3). This is used as callback for expand_stack_vars.
1879 Returns true if any of the vars in the partition need to be protected. */
1881 static bool
1882 asan_decl_phase_3 (size_t i)
1884 while (i != EOC)
1886 if (asan_protect_stack_decl (stack_vars[i].decl))
1887 return true;
1888 i = stack_vars[i].next;
1890 return false;
1893 /* Ensure that variables in different stack protection phases conflict
1894 so that they are not merged and share the same stack slot.
1895 Return true if there are any address taken variables. */
1897 static bool
1898 add_stack_protection_conflicts (void)
1900 size_t i, j, n = stack_vars_num;
1901 unsigned char *phase;
1902 bool ret = false;
1904 phase = XNEWVEC (unsigned char, n);
1905 for (i = 0; i < n; ++i)
1907 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1908 if (TREE_ADDRESSABLE (stack_vars[i].decl))
1909 ret = true;
1912 for (i = 0; i < n; ++i)
1914 unsigned char ph_i = phase[i];
1915 for (j = i + 1; j < n; ++j)
1916 if (ph_i != phase[j])
1917 add_stack_var_conflict (i, j);
1920 XDELETEVEC (phase);
1921 return ret;
1924 /* Create a decl for the guard at the top of the stack frame. */
1926 static void
1927 create_stack_guard (void)
1929 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1930 VAR_DECL, NULL, ptr_type_node);
1931 TREE_THIS_VOLATILE (guard) = 1;
1932 TREE_USED (guard) = 1;
1933 expand_one_stack_var (guard);
1934 crtl->stack_protect_guard = guard;
1937 /* Prepare for expanding variables. */
1938 static void
1939 init_vars_expansion (void)
1941 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1942 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1944 /* A map from decl to stack partition. */
1945 decl_to_stack_part = new hash_map<tree, size_t>;
1947 /* Initialize local stack smashing state. */
1948 has_protected_decls = false;
1949 has_short_buffer = false;
1952 /* Free up stack variable graph data. */
1953 static void
1954 fini_vars_expansion (void)
1956 bitmap_obstack_release (&stack_var_bitmap_obstack);
1957 if (stack_vars)
1958 XDELETEVEC (stack_vars);
1959 if (stack_vars_sorted)
1960 XDELETEVEC (stack_vars_sorted);
1961 stack_vars = NULL;
1962 stack_vars_sorted = NULL;
1963 stack_vars_alloc = stack_vars_num = 0;
1964 delete decl_to_stack_part;
1965 decl_to_stack_part = NULL;
1968 /* Make a fair guess for the size of the stack frame of the function
1969 in NODE. This doesn't have to be exact, the result is only used in
1970 the inline heuristics. So we don't want to run the full stack var
1971 packing algorithm (which is quadratic in the number of stack vars).
1972 Instead, we calculate the total size of all stack vars. This turns
1973 out to be a pretty fair estimate -- packing of stack vars doesn't
1974 happen very often. */
1976 HOST_WIDE_INT
1977 estimated_stack_frame_size (struct cgraph_node *node)
1979 poly_int64 size = 0;
1980 size_t i;
1981 tree var;
1982 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1984 push_cfun (fn);
1986 init_vars_expansion ();
1988 FOR_EACH_LOCAL_DECL (fn, i, var)
1989 if (auto_var_in_fn_p (var, fn->decl))
1990 size += expand_one_var (var, true, false);
1992 if (stack_vars_num > 0)
1994 /* Fake sorting the stack vars for account_stack_vars (). */
1995 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1996 for (i = 0; i < stack_vars_num; ++i)
1997 stack_vars_sorted[i] = i;
1998 size += account_stack_vars ();
2001 fini_vars_expansion ();
2002 pop_cfun ();
2003 return estimated_poly_value (size);
2006 /* Check if the current function has calls that use a return slot. */
2008 static bool
2009 stack_protect_return_slot_p ()
2011 basic_block bb;
2013 FOR_ALL_BB_FN (bb, cfun)
2014 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2015 !gsi_end_p (gsi); gsi_next (&gsi))
2017 gimple *stmt = gsi_stmt (gsi);
2018 /* This assumes that calls to internal-only functions never
2019 use a return slot. */
2020 if (is_gimple_call (stmt)
2021 && !gimple_call_internal_p (stmt)
2022 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2023 gimple_call_fndecl (stmt)))
2024 return true;
2026 return false;
2029 /* Expand all variables used in the function. */
2031 static rtx_insn *
2032 expand_used_vars (void)
2034 tree var, outer_block = DECL_INITIAL (current_function_decl);
2035 auto_vec<tree> maybe_local_decls;
2036 rtx_insn *var_end_seq = NULL;
2037 unsigned i;
2038 unsigned len;
2039 bool gen_stack_protect_signal = false;
2041 /* Compute the phase of the stack frame for this function. */
2043 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2044 int off = targetm.starting_frame_offset () % align;
2045 frame_phase = off ? align - off : 0;
2048 /* Set TREE_USED on all variables in the local_decls. */
2049 FOR_EACH_LOCAL_DECL (cfun, i, var)
2050 TREE_USED (var) = 1;
2051 /* Clear TREE_USED on all variables associated with a block scope. */
2052 clear_tree_used (DECL_INITIAL (current_function_decl));
2054 init_vars_expansion ();
2056 if (targetm.use_pseudo_pic_reg ())
2057 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2059 for (i = 0; i < SA.map->num_partitions; i++)
2061 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2062 continue;
2064 tree var = partition_to_var (SA.map, i);
2066 gcc_assert (!virtual_operand_p (var));
2068 expand_one_ssa_partition (var);
2071 if (flag_stack_protect == SPCT_FLAG_STRONG)
2072 gen_stack_protect_signal = stack_protect_return_slot_p ();
2074 /* At this point all variables on the local_decls with TREE_USED
2075 set are not associated with any block scope. Lay them out. */
2077 len = vec_safe_length (cfun->local_decls);
2078 FOR_EACH_LOCAL_DECL (cfun, i, var)
2080 bool expand_now = false;
2082 /* Expanded above already. */
2083 if (is_gimple_reg (var))
2085 TREE_USED (var) = 0;
2086 goto next;
2088 /* We didn't set a block for static or extern because it's hard
2089 to tell the difference between a global variable (re)declared
2090 in a local scope, and one that's really declared there to
2091 begin with. And it doesn't really matter much, since we're
2092 not giving them stack space. Expand them now. */
2093 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2094 expand_now = true;
2096 /* Expand variables not associated with any block now. Those created by
2097 the optimizers could be live anywhere in the function. Those that
2098 could possibly have been scoped originally and detached from their
2099 block will have their allocation deferred so we coalesce them with
2100 others when optimization is enabled. */
2101 else if (TREE_USED (var))
2102 expand_now = true;
2104 /* Finally, mark all variables on the list as used. We'll use
2105 this in a moment when we expand those associated with scopes. */
2106 TREE_USED (var) = 1;
2108 if (expand_now)
2109 expand_one_var (var, true, true);
2111 next:
2112 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2114 rtx rtl = DECL_RTL_IF_SET (var);
2116 /* Keep artificial non-ignored vars in cfun->local_decls
2117 chain until instantiate_decls. */
2118 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2119 add_local_decl (cfun, var);
2120 else if (rtl == NULL_RTX)
2121 /* If rtl isn't set yet, which can happen e.g. with
2122 -fstack-protector, retry before returning from this
2123 function. */
2124 maybe_local_decls.safe_push (var);
2128 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2130 +-----------------+-----------------+
2131 | ...processed... | ...duplicates...|
2132 +-----------------+-----------------+
2134 +-- LEN points here.
2136 We just want the duplicates, as those are the artificial
2137 non-ignored vars that we want to keep until instantiate_decls.
2138 Move them down and truncate the array. */
2139 if (!vec_safe_is_empty (cfun->local_decls))
2140 cfun->local_decls->block_remove (0, len);
2142 /* At this point, all variables within the block tree with TREE_USED
2143 set are actually used by the optimized function. Lay them out. */
2144 expand_used_vars_for_block (outer_block, true);
2146 if (stack_vars_num > 0)
2148 bool has_addressable_vars = false;
2150 add_scope_conflicts ();
2152 /* If stack protection is enabled, we don't share space between
2153 vulnerable data and non-vulnerable data. */
2154 if (flag_stack_protect != 0
2155 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2156 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2157 && lookup_attribute ("stack_protect",
2158 DECL_ATTRIBUTES (current_function_decl)))))
2159 has_addressable_vars = add_stack_protection_conflicts ();
2161 if (flag_stack_protect == SPCT_FLAG_STRONG && has_addressable_vars)
2162 gen_stack_protect_signal = true;
2164 /* Now that we have collected all stack variables, and have computed a
2165 minimal interference graph, attempt to save some stack space. */
2166 partition_stack_vars ();
2167 if (dump_file)
2168 dump_stack_var_partition ();
2171 switch (flag_stack_protect)
2173 case SPCT_FLAG_ALL:
2174 create_stack_guard ();
2175 break;
2177 case SPCT_FLAG_STRONG:
2178 if (gen_stack_protect_signal
2179 || cfun->calls_alloca
2180 || has_protected_decls
2181 || lookup_attribute ("stack_protect",
2182 DECL_ATTRIBUTES (current_function_decl)))
2183 create_stack_guard ();
2184 break;
2186 case SPCT_FLAG_DEFAULT:
2187 if (cfun->calls_alloca
2188 || has_protected_decls
2189 || lookup_attribute ("stack_protect",
2190 DECL_ATTRIBUTES (current_function_decl)))
2191 create_stack_guard ();
2192 break;
2194 case SPCT_FLAG_EXPLICIT:
2195 if (lookup_attribute ("stack_protect",
2196 DECL_ATTRIBUTES (current_function_decl)))
2197 create_stack_guard ();
2198 break;
2200 default:
2201 break;
2204 /* Assign rtl to each variable based on these partitions. */
2205 if (stack_vars_num > 0)
2207 class stack_vars_data data;
2209 data.asan_base = NULL_RTX;
2210 data.asan_alignb = 0;
2212 /* Reorder decls to be protected by iterating over the variables
2213 array multiple times, and allocating out of each phase in turn. */
2214 /* ??? We could probably integrate this into the qsort we did
2215 earlier, such that we naturally see these variables first,
2216 and thus naturally allocate things in the right order. */
2217 if (has_protected_decls)
2219 /* Phase 1 contains only character arrays. */
2220 expand_stack_vars (stack_protect_decl_phase_1, &data);
2222 /* Phase 2 contains other kinds of arrays. */
2223 if (flag_stack_protect == SPCT_FLAG_ALL
2224 || flag_stack_protect == SPCT_FLAG_STRONG
2225 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2226 && lookup_attribute ("stack_protect",
2227 DECL_ATTRIBUTES (current_function_decl))))
2228 expand_stack_vars (stack_protect_decl_phase_2, &data);
2231 if (asan_sanitize_stack_p ())
2232 /* Phase 3, any partitions that need asan protection
2233 in addition to phase 1 and 2. */
2234 expand_stack_vars (asan_decl_phase_3, &data);
2236 /* ASAN description strings don't yet have a syntax for expressing
2237 polynomial offsets. */
2238 HOST_WIDE_INT prev_offset;
2239 if (!data.asan_vec.is_empty ()
2240 && frame_offset.is_constant (&prev_offset))
2242 HOST_WIDE_INT offset, sz, redzonesz;
2243 redzonesz = ASAN_RED_ZONE_SIZE;
2244 sz = data.asan_vec[0] - prev_offset;
2245 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2246 && data.asan_alignb <= 4096
2247 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2248 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2249 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2250 /* Allocating a constant amount of space from a constant
2251 starting offset must give a constant result. */
2252 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2253 .to_constant ());
2254 data.asan_vec.safe_push (prev_offset);
2255 data.asan_vec.safe_push (offset);
2256 /* Leave space for alignment if STRICT_ALIGNMENT. */
2257 if (STRICT_ALIGNMENT)
2258 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2259 << ASAN_SHADOW_SHIFT)
2260 / BITS_PER_UNIT, 1);
2262 var_end_seq
2263 = asan_emit_stack_protection (virtual_stack_vars_rtx,
2264 data.asan_base,
2265 data.asan_alignb,
2266 data.asan_vec.address (),
2267 data.asan_decl_vec.address (),
2268 data.asan_vec.length ());
2271 expand_stack_vars (NULL, &data);
2274 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2275 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2276 virtual_stack_vars_rtx,
2277 var_end_seq);
2279 fini_vars_expansion ();
2281 /* If there were any artificial non-ignored vars without rtl
2282 found earlier, see if deferred stack allocation hasn't assigned
2283 rtl to them. */
2284 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2286 rtx rtl = DECL_RTL_IF_SET (var);
2288 /* Keep artificial non-ignored vars in cfun->local_decls
2289 chain until instantiate_decls. */
2290 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2291 add_local_decl (cfun, var);
2294 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2295 if (STACK_ALIGNMENT_NEEDED)
2297 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2298 if (FRAME_GROWS_DOWNWARD)
2299 frame_offset = aligned_lower_bound (frame_offset, align);
2300 else
2301 frame_offset = aligned_upper_bound (frame_offset, align);
2304 return var_end_seq;
2308 /* If we need to produce a detailed dump, print the tree representation
2309 for STMT to the dump file. SINCE is the last RTX after which the RTL
2310 generated for STMT should have been appended. */
2312 static void
2313 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2315 if (dump_file && (dump_flags & TDF_DETAILS))
2317 fprintf (dump_file, "\n;; ");
2318 print_gimple_stmt (dump_file, stmt, 0,
2319 TDF_SLIM | (dump_flags & TDF_LINENO));
2320 fprintf (dump_file, "\n");
2322 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2326 /* Maps the blocks that do not contain tree labels to rtx labels. */
2328 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2330 /* Returns the label_rtx expression for a label starting basic block BB. */
2332 static rtx_code_label *
2333 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2335 gimple_stmt_iterator gsi;
2336 tree lab;
2338 if (bb->flags & BB_RTL)
2339 return block_label (bb);
2341 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2342 if (elt)
2343 return *elt;
2345 /* Find the tree label if it is present. */
2347 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2349 glabel *lab_stmt;
2351 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2352 if (!lab_stmt)
2353 break;
2355 lab = gimple_label_label (lab_stmt);
2356 if (DECL_NONLOCAL (lab))
2357 break;
2359 return jump_target_rtx (lab);
2362 rtx_code_label *l = gen_label_rtx ();
2363 lab_rtx_for_bb->put (bb, l);
2364 return l;
2368 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2369 of a basic block where we just expanded the conditional at the end,
2370 possibly clean up the CFG and instruction sequence. LAST is the
2371 last instruction before the just emitted jump sequence. */
2373 static void
2374 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2376 /* Special case: when jumpif decides that the condition is
2377 trivial it emits an unconditional jump (and the necessary
2378 barrier). But we still have two edges, the fallthru one is
2379 wrong. purge_dead_edges would clean this up later. Unfortunately
2380 we have to insert insns (and split edges) before
2381 find_many_sub_basic_blocks and hence before purge_dead_edges.
2382 But splitting edges might create new blocks which depend on the
2383 fact that if there are two edges there's no barrier. So the
2384 barrier would get lost and verify_flow_info would ICE. Instead
2385 of auditing all edge splitters to care for the barrier (which
2386 normally isn't there in a cleaned CFG), fix it here. */
2387 if (BARRIER_P (get_last_insn ()))
2389 rtx_insn *insn;
2390 remove_edge (e);
2391 /* Now, we have a single successor block, if we have insns to
2392 insert on the remaining edge we potentially will insert
2393 it at the end of this block (if the dest block isn't feasible)
2394 in order to avoid splitting the edge. This insertion will take
2395 place in front of the last jump. But we might have emitted
2396 multiple jumps (conditional and one unconditional) to the
2397 same destination. Inserting in front of the last one then
2398 is a problem. See PR 40021. We fix this by deleting all
2399 jumps except the last unconditional one. */
2400 insn = PREV_INSN (get_last_insn ());
2401 /* Make sure we have an unconditional jump. Otherwise we're
2402 confused. */
2403 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2404 for (insn = PREV_INSN (insn); insn != last;)
2406 insn = PREV_INSN (insn);
2407 if (JUMP_P (NEXT_INSN (insn)))
2409 if (!any_condjump_p (NEXT_INSN (insn)))
2411 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2412 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2414 delete_insn (NEXT_INSN (insn));
2420 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2421 Returns a new basic block if we've terminated the current basic
2422 block and created a new one. */
2424 static basic_block
2425 expand_gimple_cond (basic_block bb, gcond *stmt)
2427 basic_block new_bb, dest;
2428 edge true_edge;
2429 edge false_edge;
2430 rtx_insn *last2, *last;
2431 enum tree_code code;
2432 tree op0, op1;
2434 code = gimple_cond_code (stmt);
2435 op0 = gimple_cond_lhs (stmt);
2436 op1 = gimple_cond_rhs (stmt);
2437 /* We're sometimes presented with such code:
2438 D.123_1 = x < y;
2439 if (D.123_1 != 0)
2441 This would expand to two comparisons which then later might
2442 be cleaned up by combine. But some pattern matchers like if-conversion
2443 work better when there's only one compare, so make up for this
2444 here as special exception if TER would have made the same change. */
2445 if (SA.values
2446 && TREE_CODE (op0) == SSA_NAME
2447 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2448 && TREE_CODE (op1) == INTEGER_CST
2449 && ((gimple_cond_code (stmt) == NE_EXPR
2450 && integer_zerop (op1))
2451 || (gimple_cond_code (stmt) == EQ_EXPR
2452 && integer_onep (op1)))
2453 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2455 gimple *second = SSA_NAME_DEF_STMT (op0);
2456 if (gimple_code (second) == GIMPLE_ASSIGN)
2458 enum tree_code code2 = gimple_assign_rhs_code (second);
2459 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2461 code = code2;
2462 op0 = gimple_assign_rhs1 (second);
2463 op1 = gimple_assign_rhs2 (second);
2465 /* If jumps are cheap and the target does not support conditional
2466 compare, turn some more codes into jumpy sequences. */
2467 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2468 && targetm.gen_ccmp_first == NULL)
2470 if ((code2 == BIT_AND_EXPR
2471 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2472 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2473 || code2 == TRUTH_AND_EXPR)
2475 code = TRUTH_ANDIF_EXPR;
2476 op0 = gimple_assign_rhs1 (second);
2477 op1 = gimple_assign_rhs2 (second);
2479 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2481 code = TRUTH_ORIF_EXPR;
2482 op0 = gimple_assign_rhs1 (second);
2483 op1 = gimple_assign_rhs2 (second);
2489 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2490 into (x - C2) * C3 < C4. */
2491 if ((code == EQ_EXPR || code == NE_EXPR)
2492 && TREE_CODE (op0) == SSA_NAME
2493 && TREE_CODE (op1) == INTEGER_CST)
2494 code = maybe_optimize_mod_cmp (code, &op0, &op1);
2496 last2 = last = get_last_insn ();
2498 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2499 set_curr_insn_location (gimple_location (stmt));
2501 /* These flags have no purpose in RTL land. */
2502 true_edge->flags &= ~EDGE_TRUE_VALUE;
2503 false_edge->flags &= ~EDGE_FALSE_VALUE;
2505 /* We can either have a pure conditional jump with one fallthru edge or
2506 two-way jump that needs to be decomposed into two basic blocks. */
2507 if (false_edge->dest == bb->next_bb)
2509 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2510 true_edge->probability);
2511 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2512 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2513 set_curr_insn_location (true_edge->goto_locus);
2514 false_edge->flags |= EDGE_FALLTHRU;
2515 maybe_cleanup_end_of_block (false_edge, last);
2516 return NULL;
2518 if (true_edge->dest == bb->next_bb)
2520 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2521 false_edge->probability);
2522 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2523 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2524 set_curr_insn_location (false_edge->goto_locus);
2525 true_edge->flags |= EDGE_FALLTHRU;
2526 maybe_cleanup_end_of_block (true_edge, last);
2527 return NULL;
2530 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2531 true_edge->probability);
2532 last = get_last_insn ();
2533 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2534 set_curr_insn_location (false_edge->goto_locus);
2535 emit_jump (label_rtx_for_bb (false_edge->dest));
2537 BB_END (bb) = last;
2538 if (BARRIER_P (BB_END (bb)))
2539 BB_END (bb) = PREV_INSN (BB_END (bb));
2540 update_bb_for_insn (bb);
2542 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2543 dest = false_edge->dest;
2544 redirect_edge_succ (false_edge, new_bb);
2545 false_edge->flags |= EDGE_FALLTHRU;
2546 new_bb->count = false_edge->count ();
2547 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2548 add_bb_to_loop (new_bb, loop);
2549 if (loop->latch == bb
2550 && loop->header == dest)
2551 loop->latch = new_bb;
2552 make_single_succ_edge (new_bb, dest, 0);
2553 if (BARRIER_P (BB_END (new_bb)))
2554 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2555 update_bb_for_insn (new_bb);
2557 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2559 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2561 set_curr_insn_location (true_edge->goto_locus);
2562 true_edge->goto_locus = curr_insn_location ();
2565 return new_bb;
2568 /* Mark all calls that can have a transaction restart. */
2570 static void
2571 mark_transaction_restart_calls (gimple *stmt)
2573 struct tm_restart_node dummy;
2574 tm_restart_node **slot;
2576 if (!cfun->gimple_df->tm_restart)
2577 return;
2579 dummy.stmt = stmt;
2580 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2581 if (slot)
2583 struct tm_restart_node *n = *slot;
2584 tree list = n->label_or_list;
2585 rtx_insn *insn;
2587 for (insn = next_real_insn (get_last_insn ());
2588 !CALL_P (insn);
2589 insn = next_real_insn (insn))
2590 continue;
2592 if (TREE_CODE (list) == LABEL_DECL)
2593 add_reg_note (insn, REG_TM, label_rtx (list));
2594 else
2595 for (; list ; list = TREE_CHAIN (list))
2596 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2600 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2601 statement STMT. */
2603 static void
2604 expand_call_stmt (gcall *stmt)
2606 tree exp, decl, lhs;
2607 bool builtin_p;
2608 size_t i;
2610 if (gimple_call_internal_p (stmt))
2612 expand_internal_call (stmt);
2613 return;
2616 /* If this is a call to a built-in function and it has no effect other
2617 than setting the lhs, try to implement it using an internal function
2618 instead. */
2619 decl = gimple_call_fndecl (stmt);
2620 if (gimple_call_lhs (stmt)
2621 && !gimple_has_side_effects (stmt)
2622 && (optimize || (decl && called_as_built_in (decl))))
2624 internal_fn ifn = replacement_internal_fn (stmt);
2625 if (ifn != IFN_LAST)
2627 expand_internal_call (ifn, stmt);
2628 return;
2632 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2634 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2635 builtin_p = decl && fndecl_built_in_p (decl);
2637 /* If this is not a builtin function, the function type through which the
2638 call is made may be different from the type of the function. */
2639 if (!builtin_p)
2640 CALL_EXPR_FN (exp)
2641 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2642 CALL_EXPR_FN (exp));
2644 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2645 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2647 for (i = 0; i < gimple_call_num_args (stmt); i++)
2649 tree arg = gimple_call_arg (stmt, i);
2650 gimple *def;
2651 /* TER addresses into arguments of builtin functions so we have a
2652 chance to infer more correct alignment information. See PR39954. */
2653 if (builtin_p
2654 && TREE_CODE (arg) == SSA_NAME
2655 && (def = get_gimple_for_ssa_name (arg))
2656 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2657 arg = gimple_assign_rhs1 (def);
2658 CALL_EXPR_ARG (exp, i) = arg;
2661 if (gimple_has_side_effects (stmt))
2662 TREE_SIDE_EFFECTS (exp) = 1;
2664 if (gimple_call_nothrow_p (stmt))
2665 TREE_NOTHROW (exp) = 1;
2667 if (gimple_no_warning_p (stmt))
2668 TREE_NO_WARNING (exp) = 1;
2670 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2671 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2672 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2673 if (decl
2674 && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
2675 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2676 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2677 else
2678 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2679 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2680 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2681 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2683 /* Ensure RTL is created for debug args. */
2684 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2686 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2687 unsigned int ix;
2688 tree dtemp;
2690 if (debug_args)
2691 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2693 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2694 expand_debug_expr (dtemp);
2698 rtx_insn *before_call = get_last_insn ();
2699 lhs = gimple_call_lhs (stmt);
2700 if (lhs)
2701 expand_assignment (lhs, exp, false);
2702 else
2703 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2705 /* If the gimple call is an indirect call and has 'nocf_check'
2706 attribute find a generated CALL insn to mark it as no
2707 control-flow verification is needed. */
2708 if (gimple_call_nocf_check_p (stmt)
2709 && !gimple_call_fndecl (stmt))
2711 rtx_insn *last = get_last_insn ();
2712 while (!CALL_P (last)
2713 && last != before_call)
2714 last = PREV_INSN (last);
2716 if (last != before_call)
2717 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2720 mark_transaction_restart_calls (stmt);
2724 /* Generate RTL for an asm statement (explicit assembler code).
2725 STRING is a STRING_CST node containing the assembler code text,
2726 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2727 insn is volatile; don't optimize it. */
2729 static void
2730 expand_asm_loc (tree string, int vol, location_t locus)
2732 rtx body;
2734 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2735 ggc_strdup (TREE_STRING_POINTER (string)),
2736 locus);
2738 MEM_VOLATILE_P (body) = vol;
2740 /* Non-empty basic ASM implicitly clobbers memory. */
2741 if (TREE_STRING_LENGTH (string) != 0)
2743 rtx asm_op, clob;
2744 unsigned i, nclobbers;
2745 auto_vec<rtx> input_rvec, output_rvec;
2746 auto_vec<const char *> constraints;
2747 auto_vec<rtx> clobber_rvec;
2748 HARD_REG_SET clobbered_regs;
2749 CLEAR_HARD_REG_SET (clobbered_regs);
2751 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2752 clobber_rvec.safe_push (clob);
2754 if (targetm.md_asm_adjust)
2755 targetm.md_asm_adjust (output_rvec, input_rvec,
2756 constraints, clobber_rvec,
2757 clobbered_regs);
2759 asm_op = body;
2760 nclobbers = clobber_rvec.length ();
2761 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2763 XVECEXP (body, 0, 0) = asm_op;
2764 for (i = 0; i < nclobbers; i++)
2765 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2768 emit_insn (body);
2771 /* Return the number of times character C occurs in string S. */
2772 static int
2773 n_occurrences (int c, const char *s)
2775 int n = 0;
2776 while (*s)
2777 n += (*s++ == c);
2778 return n;
2781 /* A subroutine of expand_asm_operands. Check that all operands have
2782 the same number of alternatives. Return true if so. */
2784 static bool
2785 check_operand_nalternatives (const vec<const char *> &constraints)
2787 unsigned len = constraints.length();
2788 if (len > 0)
2790 int nalternatives = n_occurrences (',', constraints[0]);
2792 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2794 error ("too many alternatives in %<asm%>");
2795 return false;
2798 for (unsigned i = 1; i < len; ++i)
2799 if (n_occurrences (',', constraints[i]) != nalternatives)
2801 error ("operand constraints for %<asm%> differ "
2802 "in number of alternatives");
2803 return false;
2806 return true;
2809 /* Check for overlap between registers marked in CLOBBERED_REGS and
2810 anything inappropriate in T. Emit error and return the register
2811 variable definition for error, NULL_TREE for ok. */
2813 static bool
2814 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2816 /* Conflicts between asm-declared register variables and the clobber
2817 list are not allowed. */
2818 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2820 if (overlap)
2822 error ("%<asm%> specifier for variable %qE conflicts with "
2823 "%<asm%> clobber list",
2824 DECL_NAME (overlap));
2826 /* Reset registerness to stop multiple errors emitted for a single
2827 variable. */
2828 DECL_REGISTER (overlap) = 0;
2829 return true;
2832 return false;
2835 /* Check that the given REGNO spanning NREGS is a valid
2836 asm clobber operand. Some HW registers cannot be
2837 saved/restored, hence they should not be clobbered by
2838 asm statements. */
2839 static bool
2840 asm_clobber_reg_is_valid (int regno, int nregs, const char *regname)
2842 bool is_valid = true;
2843 HARD_REG_SET regset;
2845 CLEAR_HARD_REG_SET (regset);
2847 add_range_to_hard_reg_set (&regset, regno, nregs);
2849 /* Clobbering the PIC register is an error. */
2850 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2851 && overlaps_hard_reg_set_p (regset, Pmode, PIC_OFFSET_TABLE_REGNUM))
2853 /* ??? Diagnose during gimplification? */
2854 error ("PIC register clobbered by %qs in %<asm%>", regname);
2855 is_valid = false;
2857 else if (!in_hard_reg_set_p
2858 (accessible_reg_set, reg_raw_mode[regno], regno))
2860 /* ??? Diagnose during gimplification? */
2861 error ("the register %qs cannot be clobbered in %<asm%>"
2862 " for the current target", regname);
2863 is_valid = false;
2866 /* Clobbering the stack pointer register is deprecated. GCC expects
2867 the value of the stack pointer after an asm statement to be the same
2868 as it was before, so no asm can validly clobber the stack pointer in
2869 the usual sense. Adding the stack pointer to the clobber list has
2870 traditionally had some undocumented and somewhat obscure side-effects. */
2871 if (overlaps_hard_reg_set_p (regset, Pmode, STACK_POINTER_REGNUM))
2873 crtl->sp_is_clobbered_by_asm = true;
2874 if (warning (OPT_Wdeprecated, "listing the stack pointer register"
2875 " %qs in a clobber list is deprecated", regname))
2876 inform (input_location, "the value of the stack pointer after"
2877 " an %<asm%> statement must be the same as it was before"
2878 " the statement");
2881 return is_valid;
2884 /* Generate RTL for an asm statement with arguments.
2885 STRING is the instruction template.
2886 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2887 Each output or input has an expression in the TREE_VALUE and
2888 a tree list in TREE_PURPOSE which in turn contains a constraint
2889 name in TREE_VALUE (or NULL_TREE) and a constraint string
2890 in TREE_PURPOSE.
2891 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2892 that is clobbered by this insn.
2894 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2895 should be the fallthru basic block of the asm goto.
2897 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2898 Some elements of OUTPUTS may be replaced with trees representing temporary
2899 values. The caller should copy those temporary values to the originally
2900 specified lvalues.
2902 VOL nonzero means the insn is volatile; don't optimize it. */
2904 static void
2905 expand_asm_stmt (gasm *stmt)
2907 class save_input_location
2909 location_t old;
2911 public:
2912 explicit save_input_location(location_t where)
2914 old = input_location;
2915 input_location = where;
2918 ~save_input_location()
2920 input_location = old;
2924 location_t locus = gimple_location (stmt);
2926 if (gimple_asm_input_p (stmt))
2928 const char *s = gimple_asm_string (stmt);
2929 tree string = build_string (strlen (s), s);
2930 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2931 return;
2934 /* There are some legacy diagnostics in here, and also avoids a
2935 sixth parameger to targetm.md_asm_adjust. */
2936 save_input_location s_i_l(locus);
2938 unsigned noutputs = gimple_asm_noutputs (stmt);
2939 unsigned ninputs = gimple_asm_ninputs (stmt);
2940 unsigned nlabels = gimple_asm_nlabels (stmt);
2941 unsigned i;
2943 /* ??? Diagnose during gimplification? */
2944 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2946 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2947 return;
2950 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2951 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2952 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2954 /* Copy the gimple vectors into new vectors that we can manipulate. */
2956 output_tvec.safe_grow (noutputs, true);
2957 input_tvec.safe_grow (ninputs, true);
2958 constraints.safe_grow (noutputs + ninputs, true);
2960 for (i = 0; i < noutputs; ++i)
2962 tree t = gimple_asm_output_op (stmt, i);
2963 output_tvec[i] = TREE_VALUE (t);
2964 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2966 for (i = 0; i < ninputs; i++)
2968 tree t = gimple_asm_input_op (stmt, i);
2969 input_tvec[i] = TREE_VALUE (t);
2970 constraints[i + noutputs]
2971 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2974 /* ??? Diagnose during gimplification? */
2975 if (! check_operand_nalternatives (constraints))
2976 return;
2978 /* Count the number of meaningful clobbered registers, ignoring what
2979 we would ignore later. */
2980 auto_vec<rtx> clobber_rvec;
2981 HARD_REG_SET clobbered_regs;
2982 CLEAR_HARD_REG_SET (clobbered_regs);
2984 if (unsigned n = gimple_asm_nclobbers (stmt))
2986 clobber_rvec.reserve (n);
2987 for (i = 0; i < n; i++)
2989 tree t = gimple_asm_clobber_op (stmt, i);
2990 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2991 int nregs, j;
2993 j = decode_reg_name_and_count (regname, &nregs);
2994 if (j < 0)
2996 if (j == -2)
2998 /* ??? Diagnose during gimplification? */
2999 error ("unknown register name %qs in %<asm%>", regname);
3001 else if (j == -4)
3003 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3004 clobber_rvec.safe_push (x);
3006 else
3008 /* Otherwise we should have -1 == empty string
3009 or -3 == cc, which is not a register. */
3010 gcc_assert (j == -1 || j == -3);
3013 else
3014 for (int reg = j; reg < j + nregs; reg++)
3016 if (!asm_clobber_reg_is_valid (reg, nregs, regname))
3017 return;
3019 SET_HARD_REG_BIT (clobbered_regs, reg);
3020 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
3021 clobber_rvec.safe_push (x);
3026 /* First pass over inputs and outputs checks validity and sets
3027 mark_addressable if needed. */
3028 /* ??? Diagnose during gimplification? */
3030 for (i = 0; i < noutputs; ++i)
3032 tree val = output_tvec[i];
3033 tree type = TREE_TYPE (val);
3034 const char *constraint;
3035 bool is_inout;
3036 bool allows_reg;
3037 bool allows_mem;
3039 /* Try to parse the output constraint. If that fails, there's
3040 no point in going further. */
3041 constraint = constraints[i];
3042 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3043 &allows_mem, &allows_reg, &is_inout))
3044 return;
3046 /* If the output is a hard register, verify it doesn't conflict with
3047 any other operand's possible hard register use. */
3048 if (DECL_P (val)
3049 && REG_P (DECL_RTL (val))
3050 && HARD_REGISTER_P (DECL_RTL (val)))
3052 unsigned j, output_hregno = REGNO (DECL_RTL (val));
3053 bool early_clobber_p = strchr (constraints[i], '&') != NULL;
3054 unsigned long match;
3056 /* Verify the other outputs do not use the same hard register. */
3057 for (j = i + 1; j < noutputs; ++j)
3058 if (DECL_P (output_tvec[j])
3059 && REG_P (DECL_RTL (output_tvec[j]))
3060 && HARD_REGISTER_P (DECL_RTL (output_tvec[j]))
3061 && output_hregno == REGNO (DECL_RTL (output_tvec[j])))
3062 error ("invalid hard register usage between output operands");
3064 /* Verify matching constraint operands use the same hard register
3065 and that the non-matching constraint operands do not use the same
3066 hard register if the output is an early clobber operand. */
3067 for (j = 0; j < ninputs; ++j)
3068 if (DECL_P (input_tvec[j])
3069 && REG_P (DECL_RTL (input_tvec[j]))
3070 && HARD_REGISTER_P (DECL_RTL (input_tvec[j])))
3072 unsigned input_hregno = REGNO (DECL_RTL (input_tvec[j]));
3073 switch (*constraints[j + noutputs])
3075 case '0': case '1': case '2': case '3': case '4':
3076 case '5': case '6': case '7': case '8': case '9':
3077 match = strtoul (constraints[j + noutputs], NULL, 10);
3078 break;
3079 default:
3080 match = ULONG_MAX;
3081 break;
3083 if (i == match
3084 && output_hregno != input_hregno)
3085 error ("invalid hard register usage between output operand "
3086 "and matching constraint operand");
3087 else if (early_clobber_p
3088 && i != match
3089 && output_hregno == input_hregno)
3090 error ("invalid hard register usage between earlyclobber "
3091 "operand and input operand");
3095 if (! allows_reg
3096 && (allows_mem
3097 || is_inout
3098 || (DECL_P (val)
3099 && REG_P (DECL_RTL (val))
3100 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3101 mark_addressable (val);
3104 for (i = 0; i < ninputs; ++i)
3106 bool allows_reg, allows_mem;
3107 const char *constraint;
3109 constraint = constraints[i + noutputs];
3110 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3111 constraints.address (),
3112 &allows_mem, &allows_reg))
3113 return;
3115 if (! allows_reg && allows_mem)
3116 mark_addressable (input_tvec[i]);
3119 /* Second pass evaluates arguments. */
3121 /* Make sure stack is consistent for asm goto. */
3122 if (nlabels > 0)
3123 do_pending_stack_adjust ();
3124 int old_generating_concat_p = generating_concat_p;
3126 /* Vector of RTX's of evaluated output operands. */
3127 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3128 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3129 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3131 output_rvec.safe_grow (noutputs, true);
3133 for (i = 0; i < noutputs; ++i)
3135 tree val = output_tvec[i];
3136 tree type = TREE_TYPE (val);
3137 bool is_inout, allows_reg, allows_mem, ok;
3138 rtx op;
3140 ok = parse_output_constraint (&constraints[i], i, ninputs,
3141 noutputs, &allows_mem, &allows_reg,
3142 &is_inout);
3143 gcc_assert (ok);
3145 /* If an output operand is not a decl or indirect ref and our constraint
3146 allows a register, make a temporary to act as an intermediate.
3147 Make the asm insn write into that, then we will copy it to
3148 the real output operand. Likewise for promoted variables. */
3150 generating_concat_p = 0;
3152 if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3153 || (DECL_P (val)
3154 && (allows_mem || REG_P (DECL_RTL (val)))
3155 && ! (REG_P (DECL_RTL (val))
3156 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3157 || ! allows_reg
3158 || is_inout
3159 || TREE_ADDRESSABLE (type))
3161 op = expand_expr (val, NULL_RTX, VOIDmode,
3162 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3163 if (MEM_P (op))
3164 op = validize_mem (op);
3166 if (! allows_reg && !MEM_P (op))
3167 error ("output number %d not directly addressable", i);
3168 if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3169 || GET_CODE (op) == CONCAT)
3171 rtx old_op = op;
3172 op = gen_reg_rtx (GET_MODE (op));
3174 generating_concat_p = old_generating_concat_p;
3176 if (is_inout)
3177 emit_move_insn (op, old_op);
3179 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3180 emit_move_insn (old_op, op);
3181 after_rtl_seq = get_insns ();
3182 after_rtl_end = get_last_insn ();
3183 end_sequence ();
3186 else
3188 op = assign_temp (type, 0, 1);
3189 op = validize_mem (op);
3190 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3191 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3193 generating_concat_p = old_generating_concat_p;
3195 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3196 expand_assignment (val, make_tree (type, op), false);
3197 after_rtl_seq = get_insns ();
3198 after_rtl_end = get_last_insn ();
3199 end_sequence ();
3201 output_rvec[i] = op;
3203 if (is_inout)
3204 inout_opnum.safe_push (i);
3207 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3208 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3210 input_rvec.safe_grow (ninputs, true);
3211 input_mode.safe_grow (ninputs, true);
3213 generating_concat_p = 0;
3215 for (i = 0; i < ninputs; ++i)
3217 tree val = input_tvec[i];
3218 tree type = TREE_TYPE (val);
3219 bool allows_reg, allows_mem, ok;
3220 const char *constraint;
3221 rtx op;
3223 constraint = constraints[i + noutputs];
3224 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3225 constraints.address (),
3226 &allows_mem, &allows_reg);
3227 gcc_assert (ok);
3229 /* EXPAND_INITIALIZER will not generate code for valid initializer
3230 constants, but will still generate code for other types of operand.
3231 This is the behavior we want for constant constraints. */
3232 op = expand_expr (val, NULL_RTX, VOIDmode,
3233 allows_reg ? EXPAND_NORMAL
3234 : allows_mem ? EXPAND_MEMORY
3235 : EXPAND_INITIALIZER);
3237 /* Never pass a CONCAT to an ASM. */
3238 if (GET_CODE (op) == CONCAT)
3239 op = force_reg (GET_MODE (op), op);
3240 else if (MEM_P (op))
3241 op = validize_mem (op);
3243 if (asm_operand_ok (op, constraint, NULL) <= 0)
3245 if (allows_reg && TYPE_MODE (type) != BLKmode)
3246 op = force_reg (TYPE_MODE (type), op);
3247 else if (!allows_mem)
3248 warning (0, "%<asm%> operand %d probably does not match "
3249 "constraints",
3250 i + noutputs);
3251 else if (MEM_P (op))
3253 /* We won't recognize either volatile memory or memory
3254 with a queued address as available a memory_operand
3255 at this point. Ignore it: clearly this *is* a memory. */
3257 else
3258 gcc_unreachable ();
3260 input_rvec[i] = op;
3261 input_mode[i] = TYPE_MODE (type);
3264 /* For in-out operands, copy output rtx to input rtx. */
3265 unsigned ninout = inout_opnum.length();
3266 for (i = 0; i < ninout; i++)
3268 int j = inout_opnum[i];
3269 rtx o = output_rvec[j];
3271 input_rvec.safe_push (o);
3272 input_mode.safe_push (GET_MODE (o));
3274 char buffer[16];
3275 sprintf (buffer, "%d", j);
3276 constraints.safe_push (ggc_strdup (buffer));
3278 ninputs += ninout;
3280 /* Sometimes we wish to automatically clobber registers across an asm.
3281 Case in point is when the i386 backend moved from cc0 to a hard reg --
3282 maintaining source-level compatibility means automatically clobbering
3283 the flags register. */
3284 rtx_insn *after_md_seq = NULL;
3285 if (targetm.md_asm_adjust)
3286 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3287 constraints, clobber_rvec,
3288 clobbered_regs);
3290 /* Do not allow the hook to change the output and input count,
3291 lest it mess up the operand numbering. */
3292 gcc_assert (output_rvec.length() == noutputs);
3293 gcc_assert (input_rvec.length() == ninputs);
3294 gcc_assert (constraints.length() == noutputs + ninputs);
3296 /* But it certainly can adjust the clobbers. */
3297 unsigned nclobbers = clobber_rvec.length ();
3299 /* Third pass checks for easy conflicts. */
3300 /* ??? Why are we doing this on trees instead of rtx. */
3302 bool clobber_conflict_found = 0;
3303 for (i = 0; i < noutputs; ++i)
3304 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3305 clobber_conflict_found = 1;
3306 for (i = 0; i < ninputs - ninout; ++i)
3307 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3308 clobber_conflict_found = 1;
3310 /* Make vectors for the expression-rtx, constraint strings,
3311 and named operands. */
3313 rtvec argvec = rtvec_alloc (ninputs);
3314 rtvec constraintvec = rtvec_alloc (ninputs);
3315 rtvec labelvec = rtvec_alloc (nlabels);
3317 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3318 : GET_MODE (output_rvec[0])),
3319 ggc_strdup (gimple_asm_string (stmt)),
3320 "", 0, argvec, constraintvec,
3321 labelvec, locus);
3322 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3324 for (i = 0; i < ninputs; ++i)
3326 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3327 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3328 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3329 constraints[i + noutputs],
3330 locus);
3333 /* Copy labels to the vector. */
3334 rtx_code_label *fallthru_label = NULL;
3335 if (nlabels > 0)
3337 basic_block fallthru_bb = NULL;
3338 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3339 if (fallthru)
3340 fallthru_bb = fallthru->dest;
3342 for (i = 0; i < nlabels; ++i)
3344 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3345 rtx_insn *r;
3346 /* If asm goto has any labels in the fallthru basic block, use
3347 a label that we emit immediately after the asm goto. Expansion
3348 may insert further instructions into the same basic block after
3349 asm goto and if we don't do this, insertion of instructions on
3350 the fallthru edge might misbehave. See PR58670. */
3351 if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
3353 if (fallthru_label == NULL_RTX)
3354 fallthru_label = gen_label_rtx ();
3355 r = fallthru_label;
3357 else
3358 r = label_rtx (label);
3359 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3363 /* Now, for each output, construct an rtx
3364 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3365 ARGVEC CONSTRAINTS OPNAMES))
3366 If there is more than one, put them inside a PARALLEL. */
3368 if (nlabels > 0 && nclobbers == 0)
3370 gcc_assert (noutputs == 0);
3371 emit_jump_insn (body);
3373 else if (noutputs == 0 && nclobbers == 0)
3375 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3376 emit_insn (body);
3378 else if (noutputs == 1 && nclobbers == 0)
3380 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3381 emit_insn (gen_rtx_SET (output_rvec[0], body));
3383 else
3385 rtx obody = body;
3386 int num = noutputs;
3388 if (num == 0)
3389 num = 1;
3391 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3393 /* For each output operand, store a SET. */
3394 for (i = 0; i < noutputs; ++i)
3396 rtx src, o = output_rvec[i];
3397 if (i == 0)
3399 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3400 src = obody;
3402 else
3404 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3405 ASM_OPERANDS_TEMPLATE (obody),
3406 constraints[i], i, argvec,
3407 constraintvec, labelvec, locus);
3408 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3410 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3413 /* If there are no outputs (but there are some clobbers)
3414 store the bare ASM_OPERANDS into the PARALLEL. */
3415 if (i == 0)
3416 XVECEXP (body, 0, i++) = obody;
3418 /* Store (clobber REG) for each clobbered register specified. */
3419 for (unsigned j = 0; j < nclobbers; ++j)
3421 rtx clobbered_reg = clobber_rvec[j];
3423 /* Do sanity check for overlap between clobbers and respectively
3424 input and outputs that hasn't been handled. Such overlap
3425 should have been detected and reported above. */
3426 if (!clobber_conflict_found && REG_P (clobbered_reg))
3428 /* We test the old body (obody) contents to avoid
3429 tripping over the under-construction body. */
3430 for (unsigned k = 0; k < noutputs; ++k)
3431 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3432 internal_error ("%<asm%> clobber conflict with "
3433 "output operand");
3435 for (unsigned k = 0; k < ninputs - ninout; ++k)
3436 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3437 internal_error ("%<asm%> clobber conflict with "
3438 "input operand");
3441 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3444 if (nlabels > 0)
3445 emit_jump_insn (body);
3446 else
3447 emit_insn (body);
3450 generating_concat_p = old_generating_concat_p;
3452 if (fallthru_label)
3453 emit_label (fallthru_label);
3455 if (after_md_seq)
3456 emit_insn (after_md_seq);
3457 if (after_rtl_seq)
3458 emit_insn (after_rtl_seq);
3460 free_temp_slots ();
3461 crtl->has_asm_statement = 1;
3464 /* Emit code to jump to the address
3465 specified by the pointer expression EXP. */
3467 static void
3468 expand_computed_goto (tree exp)
3470 rtx x = expand_normal (exp);
3472 do_pending_stack_adjust ();
3473 emit_indirect_jump (x);
3476 /* Generate RTL code for a `goto' statement with target label LABEL.
3477 LABEL should be a LABEL_DECL tree node that was or will later be
3478 defined with `expand_label'. */
3480 static void
3481 expand_goto (tree label)
3483 if (flag_checking)
3485 /* Check for a nonlocal goto to a containing function. Should have
3486 gotten translated to __builtin_nonlocal_goto. */
3487 tree context = decl_function_context (label);
3488 gcc_assert (!context || context == current_function_decl);
3491 emit_jump (jump_target_rtx (label));
3494 /* Output a return with no value. */
3496 static void
3497 expand_null_return_1 (void)
3499 clear_pending_stack_adjust ();
3500 do_pending_stack_adjust ();
3501 emit_jump (return_label);
3504 /* Generate RTL to return from the current function, with no value.
3505 (That is, we do not do anything about returning any value.) */
3507 void
3508 expand_null_return (void)
3510 /* If this function was declared to return a value, but we
3511 didn't, clobber the return registers so that they are not
3512 propagated live to the rest of the function. */
3513 clobber_return_register ();
3515 expand_null_return_1 ();
3518 /* Generate RTL to return from the current function, with value VAL. */
3520 static void
3521 expand_value_return (rtx val)
3523 /* Copy the value to the return location unless it's already there. */
3525 tree decl = DECL_RESULT (current_function_decl);
3526 rtx return_reg = DECL_RTL (decl);
3527 if (return_reg != val)
3529 tree funtype = TREE_TYPE (current_function_decl);
3530 tree type = TREE_TYPE (decl);
3531 int unsignedp = TYPE_UNSIGNED (type);
3532 machine_mode old_mode = DECL_MODE (decl);
3533 machine_mode mode;
3534 if (DECL_BY_REFERENCE (decl))
3535 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3536 else
3537 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3539 if (mode != old_mode)
3540 val = convert_modes (mode, old_mode, val, unsignedp);
3542 if (GET_CODE (return_reg) == PARALLEL)
3543 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3544 else
3545 emit_move_insn (return_reg, val);
3548 expand_null_return_1 ();
3551 /* Generate RTL to evaluate the expression RETVAL and return it
3552 from the current function. */
3554 static void
3555 expand_return (tree retval)
3557 rtx result_rtl;
3558 rtx val = 0;
3559 tree retval_rhs;
3561 /* If function wants no value, give it none. */
3562 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3564 expand_normal (retval);
3565 expand_null_return ();
3566 return;
3569 if (retval == error_mark_node)
3571 /* Treat this like a return of no value from a function that
3572 returns a value. */
3573 expand_null_return ();
3574 return;
3576 else if ((TREE_CODE (retval) == MODIFY_EXPR
3577 || TREE_CODE (retval) == INIT_EXPR)
3578 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3579 retval_rhs = TREE_OPERAND (retval, 1);
3580 else
3581 retval_rhs = retval;
3583 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3585 /* If we are returning the RESULT_DECL, then the value has already
3586 been stored into it, so we don't have to do anything special. */
3587 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3588 expand_value_return (result_rtl);
3590 /* If the result is an aggregate that is being returned in one (or more)
3591 registers, load the registers here. */
3593 else if (retval_rhs != 0
3594 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3595 && REG_P (result_rtl))
3597 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3598 if (val)
3600 /* Use the mode of the result value on the return register. */
3601 PUT_MODE (result_rtl, GET_MODE (val));
3602 expand_value_return (val);
3604 else
3605 expand_null_return ();
3607 else if (retval_rhs != 0
3608 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3609 && (REG_P (result_rtl)
3610 || (GET_CODE (result_rtl) == PARALLEL)))
3612 /* Compute the return value into a temporary (usually a pseudo reg). */
3614 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3615 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3616 val = force_not_mem (val);
3617 expand_value_return (val);
3619 else
3621 /* No hard reg used; calculate value into hard return reg. */
3622 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3623 expand_value_return (result_rtl);
3627 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3628 register, tell the rtl optimizers that its value is no longer
3629 needed. */
3631 static void
3632 expand_clobber (tree lhs)
3634 if (DECL_P (lhs))
3636 rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3637 if (decl_rtl && REG_P (decl_rtl))
3639 machine_mode decl_mode = GET_MODE (decl_rtl);
3640 if (maybe_gt (GET_MODE_SIZE (decl_mode),
3641 REGMODE_NATURAL_SIZE (decl_mode)))
3642 emit_clobber (decl_rtl);
3647 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3648 STMT that doesn't require special handling for outgoing edges. That
3649 is no tailcalls and no GIMPLE_COND. */
3651 static void
3652 expand_gimple_stmt_1 (gimple *stmt)
3654 tree op0;
3656 set_curr_insn_location (gimple_location (stmt));
3658 switch (gimple_code (stmt))
3660 case GIMPLE_GOTO:
3661 op0 = gimple_goto_dest (stmt);
3662 if (TREE_CODE (op0) == LABEL_DECL)
3663 expand_goto (op0);
3664 else
3665 expand_computed_goto (op0);
3666 break;
3667 case GIMPLE_LABEL:
3668 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3669 break;
3670 case GIMPLE_NOP:
3671 case GIMPLE_PREDICT:
3672 break;
3673 case GIMPLE_SWITCH:
3675 gswitch *swtch = as_a <gswitch *> (stmt);
3676 if (gimple_switch_num_labels (swtch) == 1)
3677 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3678 else
3679 expand_case (swtch);
3681 break;
3682 case GIMPLE_ASM:
3683 expand_asm_stmt (as_a <gasm *> (stmt));
3684 break;
3685 case GIMPLE_CALL:
3686 expand_call_stmt (as_a <gcall *> (stmt));
3687 break;
3689 case GIMPLE_RETURN:
3691 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3693 /* If a return doesn't have a location, it very likely represents
3694 multiple user returns so we cannot let it inherit the location
3695 of the last statement of the previous basic block in RTL. */
3696 if (!gimple_has_location (stmt))
3697 set_curr_insn_location (cfun->function_end_locus);
3699 if (op0 && op0 != error_mark_node)
3701 tree result = DECL_RESULT (current_function_decl);
3703 /* If we are not returning the current function's RESULT_DECL,
3704 build an assignment to it. */
3705 if (op0 != result)
3707 /* I believe that a function's RESULT_DECL is unique. */
3708 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3710 /* ??? We'd like to use simply expand_assignment here,
3711 but this fails if the value is of BLKmode but the return
3712 decl is a register. expand_return has special handling
3713 for this combination, which eventually should move
3714 to common code. See comments there. Until then, let's
3715 build a modify expression :-/ */
3716 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3717 result, op0);
3721 if (!op0)
3722 expand_null_return ();
3723 else
3724 expand_return (op0);
3726 break;
3728 case GIMPLE_ASSIGN:
3730 gassign *assign_stmt = as_a <gassign *> (stmt);
3731 tree lhs = gimple_assign_lhs (assign_stmt);
3733 /* Tree expand used to fiddle with |= and &= of two bitfield
3734 COMPONENT_REFs here. This can't happen with gimple, the LHS
3735 of binary assigns must be a gimple reg. */
3737 if (TREE_CODE (lhs) != SSA_NAME
3738 || get_gimple_rhs_class (gimple_expr_code (stmt))
3739 == GIMPLE_SINGLE_RHS)
3741 tree rhs = gimple_assign_rhs1 (assign_stmt);
3742 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3743 == GIMPLE_SINGLE_RHS);
3744 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3745 /* Do not put locations on possibly shared trees. */
3746 && !is_gimple_min_invariant (rhs))
3747 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3748 if (TREE_CLOBBER_P (rhs))
3749 /* This is a clobber to mark the going out of scope for
3750 this LHS. */
3751 expand_clobber (lhs);
3752 else
3753 expand_assignment (lhs, rhs,
3754 gimple_assign_nontemporal_move_p (
3755 assign_stmt));
3757 else
3759 rtx target, temp;
3760 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3761 struct separate_ops ops;
3762 bool promoted = false;
3764 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3765 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3766 promoted = true;
3768 ops.code = gimple_assign_rhs_code (assign_stmt);
3769 ops.type = TREE_TYPE (lhs);
3770 switch (get_gimple_rhs_class (ops.code))
3772 case GIMPLE_TERNARY_RHS:
3773 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3774 /* Fallthru */
3775 case GIMPLE_BINARY_RHS:
3776 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3777 /* Fallthru */
3778 case GIMPLE_UNARY_RHS:
3779 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3780 break;
3781 default:
3782 gcc_unreachable ();
3784 ops.location = gimple_location (stmt);
3786 /* If we want to use a nontemporal store, force the value to
3787 register first. If we store into a promoted register,
3788 don't directly expand to target. */
3789 temp = nontemporal || promoted ? NULL_RTX : target;
3790 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3791 EXPAND_NORMAL);
3793 if (temp == target)
3795 else if (promoted)
3797 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3798 /* If TEMP is a VOIDmode constant, use convert_modes to make
3799 sure that we properly convert it. */
3800 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3802 temp = convert_modes (GET_MODE (target),
3803 TYPE_MODE (ops.type),
3804 temp, unsignedp);
3805 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3806 GET_MODE (target), temp, unsignedp);
3809 convert_move (SUBREG_REG (target), temp, unsignedp);
3811 else if (nontemporal && emit_storent_insn (target, temp))
3813 else
3815 temp = force_operand (temp, target);
3816 if (temp != target)
3817 emit_move_insn (target, temp);
3821 break;
3823 default:
3824 gcc_unreachable ();
3828 /* Expand one gimple statement STMT and return the last RTL instruction
3829 before any of the newly generated ones.
3831 In addition to generating the necessary RTL instructions this also
3832 sets REG_EH_REGION notes if necessary and sets the current source
3833 location for diagnostics. */
3835 static rtx_insn *
3836 expand_gimple_stmt (gimple *stmt)
3838 location_t saved_location = input_location;
3839 rtx_insn *last = get_last_insn ();
3840 int lp_nr;
3842 gcc_assert (cfun);
3844 /* We need to save and restore the current source location so that errors
3845 discovered during expansion are emitted with the right location. But
3846 it would be better if the diagnostic routines used the source location
3847 embedded in the tree nodes rather than globals. */
3848 if (gimple_has_location (stmt))
3849 input_location = gimple_location (stmt);
3851 expand_gimple_stmt_1 (stmt);
3853 /* Free any temporaries used to evaluate this statement. */
3854 free_temp_slots ();
3856 input_location = saved_location;
3858 /* Mark all insns that may trap. */
3859 lp_nr = lookup_stmt_eh_lp (stmt);
3860 if (lp_nr)
3862 rtx_insn *insn;
3863 for (insn = next_real_insn (last); insn;
3864 insn = next_real_insn (insn))
3866 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3867 /* If we want exceptions for non-call insns, any
3868 may_trap_p instruction may throw. */
3869 && GET_CODE (PATTERN (insn)) != CLOBBER
3870 && GET_CODE (PATTERN (insn)) != USE
3871 && insn_could_throw_p (insn))
3872 make_reg_eh_region_note (insn, 0, lp_nr);
3876 return last;
3879 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3880 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3881 generated a tail call (something that might be denied by the ABI
3882 rules governing the call; see calls.c).
3884 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3885 can still reach the rest of BB. The case here is __builtin_sqrt,
3886 where the NaN result goes through the external function (with a
3887 tailcall) and the normal result happens via a sqrt instruction. */
3889 static basic_block
3890 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3892 rtx_insn *last2, *last;
3893 edge e;
3894 edge_iterator ei;
3895 profile_probability probability;
3897 last2 = last = expand_gimple_stmt (stmt);
3899 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3900 if (CALL_P (last) && SIBLING_CALL_P (last))
3901 goto found;
3903 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3905 *can_fallthru = true;
3906 return NULL;
3908 found:
3909 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3910 Any instructions emitted here are about to be deleted. */
3911 do_pending_stack_adjust ();
3913 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3914 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3915 EH or abnormal edges, we shouldn't have created a tail call in
3916 the first place. So it seems to me we should just be removing
3917 all edges here, or redirecting the existing fallthru edge to
3918 the exit block. */
3920 probability = profile_probability::never ();
3922 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3924 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3926 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3927 e->dest->count -= e->count ();
3928 probability += e->probability;
3929 remove_edge (e);
3931 else
3932 ei_next (&ei);
3935 /* This is somewhat ugly: the call_expr expander often emits instructions
3936 after the sibcall (to perform the function return). These confuse the
3937 find_many_sub_basic_blocks code, so we need to get rid of these. */
3938 last = NEXT_INSN (last);
3939 gcc_assert (BARRIER_P (last));
3941 *can_fallthru = false;
3942 while (NEXT_INSN (last))
3944 /* For instance an sqrt builtin expander expands if with
3945 sibcall in the then and label for `else`. */
3946 if (LABEL_P (NEXT_INSN (last)))
3948 *can_fallthru = true;
3949 break;
3951 delete_insn (NEXT_INSN (last));
3954 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3955 | EDGE_SIBCALL);
3956 e->probability = probability;
3957 BB_END (bb) = last;
3958 update_bb_for_insn (bb);
3960 if (NEXT_INSN (last))
3962 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3964 last = BB_END (bb);
3965 if (BARRIER_P (last))
3966 BB_END (bb) = PREV_INSN (last);
3969 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3971 return bb;
3974 /* Return the difference between the floor and the truncated result of
3975 a signed division by OP1 with remainder MOD. */
3976 static rtx
3977 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3979 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3980 return gen_rtx_IF_THEN_ELSE
3981 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3982 gen_rtx_IF_THEN_ELSE
3983 (mode, gen_rtx_LT (BImode,
3984 gen_rtx_DIV (mode, op1, mod),
3985 const0_rtx),
3986 constm1_rtx, const0_rtx),
3987 const0_rtx);
3990 /* Return the difference between the ceil and the truncated result of
3991 a signed division by OP1 with remainder MOD. */
3992 static rtx
3993 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3995 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3996 return gen_rtx_IF_THEN_ELSE
3997 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3998 gen_rtx_IF_THEN_ELSE
3999 (mode, gen_rtx_GT (BImode,
4000 gen_rtx_DIV (mode, op1, mod),
4001 const0_rtx),
4002 const1_rtx, const0_rtx),
4003 const0_rtx);
4006 /* Return the difference between the ceil and the truncated result of
4007 an unsigned division by OP1 with remainder MOD. */
4008 static rtx
4009 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
4011 /* (mod != 0 ? 1 : 0) */
4012 return gen_rtx_IF_THEN_ELSE
4013 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4014 const1_rtx, const0_rtx);
4017 /* Return the difference between the rounded and the truncated result
4018 of a signed division by OP1 with remainder MOD. Halfway cases are
4019 rounded away from zero, rather than to the nearest even number. */
4020 static rtx
4021 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4023 /* (abs (mod) >= abs (op1) - abs (mod)
4024 ? (op1 / mod > 0 ? 1 : -1)
4025 : 0) */
4026 return gen_rtx_IF_THEN_ELSE
4027 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
4028 gen_rtx_MINUS (mode,
4029 gen_rtx_ABS (mode, op1),
4030 gen_rtx_ABS (mode, mod))),
4031 gen_rtx_IF_THEN_ELSE
4032 (mode, gen_rtx_GT (BImode,
4033 gen_rtx_DIV (mode, op1, mod),
4034 const0_rtx),
4035 const1_rtx, constm1_rtx),
4036 const0_rtx);
4039 /* Return the difference between the rounded and the truncated result
4040 of a unsigned division by OP1 with remainder MOD. Halfway cases
4041 are rounded away from zero, rather than to the nearest even
4042 number. */
4043 static rtx
4044 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
4046 /* (mod >= op1 - mod ? 1 : 0) */
4047 return gen_rtx_IF_THEN_ELSE
4048 (mode, gen_rtx_GE (BImode, mod,
4049 gen_rtx_MINUS (mode, op1, mod)),
4050 const1_rtx, const0_rtx);
4053 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4054 any rtl. */
4056 static rtx
4057 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4058 addr_space_t as)
4060 #ifndef POINTERS_EXTEND_UNSIGNED
4061 gcc_assert (mode == Pmode
4062 || mode == targetm.addr_space.address_mode (as));
4063 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4064 #else
4065 rtx temp;
4067 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4069 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4070 return x;
4072 /* X must have some form of address mode already. */
4073 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4074 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4075 x = lowpart_subreg (mode, x, xmode);
4076 else if (POINTERS_EXTEND_UNSIGNED > 0)
4077 x = gen_rtx_ZERO_EXTEND (mode, x);
4078 else if (!POINTERS_EXTEND_UNSIGNED)
4079 x = gen_rtx_SIGN_EXTEND (mode, x);
4080 else
4082 switch (GET_CODE (x))
4084 case SUBREG:
4085 if ((SUBREG_PROMOTED_VAR_P (x)
4086 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4087 || (GET_CODE (SUBREG_REG (x)) == PLUS
4088 && REG_P (XEXP (SUBREG_REG (x), 0))
4089 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4090 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4091 && GET_MODE (SUBREG_REG (x)) == mode)
4092 return SUBREG_REG (x);
4093 break;
4094 case LABEL_REF:
4095 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4096 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4097 return temp;
4098 case SYMBOL_REF:
4099 temp = shallow_copy_rtx (x);
4100 PUT_MODE (temp, mode);
4101 return temp;
4102 case CONST:
4103 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4104 if (temp)
4105 temp = gen_rtx_CONST (mode, temp);
4106 return temp;
4107 case PLUS:
4108 case MINUS:
4109 if (CONST_INT_P (XEXP (x, 1)))
4111 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4112 if (temp)
4113 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4115 break;
4116 default:
4117 break;
4119 /* Don't know how to express ptr_extend as operation in debug info. */
4120 return NULL;
4122 #endif /* POINTERS_EXTEND_UNSIGNED */
4124 return x;
4127 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4128 by avoid_deep_ter_for_debug. */
4130 static hash_map<tree, tree> *deep_ter_debug_map;
4132 /* Split too deep TER chains for debug stmts using debug temporaries. */
4134 static void
4135 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4137 use_operand_p use_p;
4138 ssa_op_iter iter;
4139 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4141 tree use = USE_FROM_PTR (use_p);
4142 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4143 continue;
4144 gimple *g = get_gimple_for_ssa_name (use);
4145 if (g == NULL)
4146 continue;
4147 if (depth > 6 && !stmt_ends_bb_p (g))
4149 if (deep_ter_debug_map == NULL)
4150 deep_ter_debug_map = new hash_map<tree, tree>;
4152 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4153 if (vexpr != NULL)
4154 continue;
4155 vexpr = make_node (DEBUG_EXPR_DECL);
4156 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4157 DECL_ARTIFICIAL (vexpr) = 1;
4158 TREE_TYPE (vexpr) = TREE_TYPE (use);
4159 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4160 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4161 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4162 avoid_deep_ter_for_debug (def_temp, 0);
4164 else
4165 avoid_deep_ter_for_debug (g, depth + 1);
4169 /* Return an RTX equivalent to the value of the parameter DECL. */
4171 static rtx
4172 expand_debug_parm_decl (tree decl)
4174 rtx incoming = DECL_INCOMING_RTL (decl);
4176 if (incoming
4177 && GET_MODE (incoming) != BLKmode
4178 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4179 || (MEM_P (incoming)
4180 && REG_P (XEXP (incoming, 0))
4181 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4183 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4185 #ifdef HAVE_window_save
4186 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4187 If the target machine has an explicit window save instruction, the
4188 actual entry value is the corresponding OUTGOING_REGNO instead. */
4189 if (REG_P (incoming)
4190 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4191 incoming
4192 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4193 OUTGOING_REGNO (REGNO (incoming)), 0);
4194 else if (MEM_P (incoming))
4196 rtx reg = XEXP (incoming, 0);
4197 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4199 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4200 incoming = replace_equiv_address_nv (incoming, reg);
4202 else
4203 incoming = copy_rtx (incoming);
4205 #endif
4207 ENTRY_VALUE_EXP (rtl) = incoming;
4208 return rtl;
4211 if (incoming
4212 && GET_MODE (incoming) != BLKmode
4213 && !TREE_ADDRESSABLE (decl)
4214 && MEM_P (incoming)
4215 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4216 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4217 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4218 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4219 return copy_rtx (incoming);
4221 return NULL_RTX;
4224 /* Return an RTX equivalent to the value of the tree expression EXP. */
4226 static rtx
4227 expand_debug_expr (tree exp)
4229 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4230 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4231 machine_mode inner_mode = VOIDmode;
4232 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4233 addr_space_t as;
4234 scalar_int_mode op0_mode, op1_mode, addr_mode;
4236 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4238 case tcc_expression:
4239 switch (TREE_CODE (exp))
4241 case COND_EXPR:
4242 case DOT_PROD_EXPR:
4243 case SAD_EXPR:
4244 case WIDEN_MULT_PLUS_EXPR:
4245 case WIDEN_MULT_MINUS_EXPR:
4246 goto ternary;
4248 case TRUTH_ANDIF_EXPR:
4249 case TRUTH_ORIF_EXPR:
4250 case TRUTH_AND_EXPR:
4251 case TRUTH_OR_EXPR:
4252 case TRUTH_XOR_EXPR:
4253 goto binary;
4255 case TRUTH_NOT_EXPR:
4256 goto unary;
4258 default:
4259 break;
4261 break;
4263 ternary:
4264 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4265 if (!op2)
4266 return NULL_RTX;
4267 /* Fall through. */
4269 binary:
4270 case tcc_binary:
4271 if (mode == BLKmode)
4272 return NULL_RTX;
4273 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4274 if (!op1)
4275 return NULL_RTX;
4276 switch (TREE_CODE (exp))
4278 case LSHIFT_EXPR:
4279 case RSHIFT_EXPR:
4280 case LROTATE_EXPR:
4281 case RROTATE_EXPR:
4282 case WIDEN_LSHIFT_EXPR:
4283 /* Ensure second operand isn't wider than the first one. */
4284 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4285 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4286 && (GET_MODE_UNIT_PRECISION (mode)
4287 < GET_MODE_PRECISION (op1_mode)))
4288 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4289 break;
4290 default:
4291 break;
4293 /* Fall through. */
4295 unary:
4296 case tcc_unary:
4297 if (mode == BLKmode)
4298 return NULL_RTX;
4299 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4300 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4301 if (!op0)
4302 return NULL_RTX;
4303 break;
4305 case tcc_comparison:
4306 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4307 goto binary;
4309 case tcc_type:
4310 case tcc_statement:
4311 gcc_unreachable ();
4313 case tcc_constant:
4314 case tcc_exceptional:
4315 case tcc_declaration:
4316 case tcc_reference:
4317 case tcc_vl_exp:
4318 break;
4321 switch (TREE_CODE (exp))
4323 case STRING_CST:
4324 if (!lookup_constant_def (exp))
4326 if (strlen (TREE_STRING_POINTER (exp)) + 1
4327 != (size_t) TREE_STRING_LENGTH (exp))
4328 return NULL_RTX;
4329 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4330 op0 = gen_rtx_MEM (BLKmode, op0);
4331 set_mem_attributes (op0, exp, 0);
4332 return op0;
4334 /* Fall through. */
4336 case INTEGER_CST:
4337 case REAL_CST:
4338 case FIXED_CST:
4339 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4340 return op0;
4342 case POLY_INT_CST:
4343 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4345 case COMPLEX_CST:
4346 gcc_assert (COMPLEX_MODE_P (mode));
4347 op0 = expand_debug_expr (TREE_REALPART (exp));
4348 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4349 return gen_rtx_CONCAT (mode, op0, op1);
4351 case DEBUG_EXPR_DECL:
4352 op0 = DECL_RTL_IF_SET (exp);
4354 if (op0)
4355 return op0;
4357 op0 = gen_rtx_DEBUG_EXPR (mode);
4358 DEBUG_EXPR_TREE_DECL (op0) = exp;
4359 SET_DECL_RTL (exp, op0);
4361 return op0;
4363 case VAR_DECL:
4364 case PARM_DECL:
4365 case FUNCTION_DECL:
4366 case LABEL_DECL:
4367 case CONST_DECL:
4368 case RESULT_DECL:
4369 op0 = DECL_RTL_IF_SET (exp);
4371 /* This decl was probably optimized away. */
4372 if (!op0
4373 /* At least label RTXen are sometimes replaced by
4374 NOTE_INSN_DELETED_LABEL. Any notes here are not
4375 handled by copy_rtx. */
4376 || NOTE_P (op0))
4378 if (!VAR_P (exp)
4379 || DECL_EXTERNAL (exp)
4380 || !TREE_STATIC (exp)
4381 || !DECL_NAME (exp)
4382 || DECL_HARD_REGISTER (exp)
4383 || DECL_IN_CONSTANT_POOL (exp)
4384 || mode == VOIDmode)
4385 return NULL;
4387 op0 = make_decl_rtl_for_debug (exp);
4388 if (!MEM_P (op0)
4389 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4390 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4391 return NULL;
4393 else
4394 op0 = copy_rtx (op0);
4396 if (GET_MODE (op0) == BLKmode
4397 /* If op0 is not BLKmode, but mode is, adjust_mode
4398 below would ICE. While it is likely a FE bug,
4399 try to be robust here. See PR43166. */
4400 || mode == BLKmode
4401 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4403 gcc_assert (MEM_P (op0));
4404 op0 = adjust_address_nv (op0, mode, 0);
4405 return op0;
4408 /* Fall through. */
4410 adjust_mode:
4411 case PAREN_EXPR:
4412 CASE_CONVERT:
4414 inner_mode = GET_MODE (op0);
4416 if (mode == inner_mode)
4417 return op0;
4419 if (inner_mode == VOIDmode)
4421 if (TREE_CODE (exp) == SSA_NAME)
4422 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4423 else
4424 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4425 if (mode == inner_mode)
4426 return op0;
4429 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4431 if (GET_MODE_UNIT_BITSIZE (mode)
4432 == GET_MODE_UNIT_BITSIZE (inner_mode))
4433 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4434 else if (GET_MODE_UNIT_BITSIZE (mode)
4435 < GET_MODE_UNIT_BITSIZE (inner_mode))
4436 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4437 else
4438 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4440 else if (FLOAT_MODE_P (mode))
4442 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4443 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4444 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4445 else
4446 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4448 else if (FLOAT_MODE_P (inner_mode))
4450 if (unsignedp)
4451 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4452 else
4453 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4455 else if (GET_MODE_UNIT_PRECISION (mode)
4456 == GET_MODE_UNIT_PRECISION (inner_mode))
4457 op0 = lowpart_subreg (mode, op0, inner_mode);
4458 else if (GET_MODE_UNIT_PRECISION (mode)
4459 < GET_MODE_UNIT_PRECISION (inner_mode))
4460 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4461 else if (UNARY_CLASS_P (exp)
4462 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4463 : unsignedp)
4464 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4465 else
4466 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4468 return op0;
4471 case MEM_REF:
4472 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4474 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4475 TREE_OPERAND (exp, 0),
4476 TREE_OPERAND (exp, 1));
4477 if (newexp)
4478 return expand_debug_expr (newexp);
4480 /* FALLTHROUGH */
4481 case INDIRECT_REF:
4482 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4483 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4484 if (!op0)
4485 return NULL;
4487 if (TREE_CODE (exp) == MEM_REF)
4489 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4490 || (GET_CODE (op0) == PLUS
4491 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4492 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4493 Instead just use get_inner_reference. */
4494 goto component_ref;
4496 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4497 poly_int64 offset;
4498 if (!op1 || !poly_int_rtx_p (op1, &offset))
4499 return NULL;
4501 op0 = plus_constant (inner_mode, op0, offset);
4504 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4506 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4507 op0, as);
4508 if (op0 == NULL_RTX)
4509 return NULL;
4511 op0 = gen_rtx_MEM (mode, op0);
4512 set_mem_attributes (op0, exp, 0);
4513 if (TREE_CODE (exp) == MEM_REF
4514 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4515 set_mem_expr (op0, NULL_TREE);
4516 set_mem_addr_space (op0, as);
4518 return op0;
4520 case TARGET_MEM_REF:
4521 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4522 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4523 return NULL;
4525 op0 = expand_debug_expr
4526 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4527 if (!op0)
4528 return NULL;
4530 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4531 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4532 op0, as);
4533 if (op0 == NULL_RTX)
4534 return NULL;
4536 op0 = gen_rtx_MEM (mode, op0);
4538 set_mem_attributes (op0, exp, 0);
4539 set_mem_addr_space (op0, as);
4541 return op0;
4543 component_ref:
4544 case ARRAY_REF:
4545 case ARRAY_RANGE_REF:
4546 case COMPONENT_REF:
4547 case BIT_FIELD_REF:
4548 case REALPART_EXPR:
4549 case IMAGPART_EXPR:
4550 case VIEW_CONVERT_EXPR:
4552 machine_mode mode1;
4553 poly_int64 bitsize, bitpos;
4554 tree offset;
4555 int reversep, volatilep = 0;
4556 tree tem
4557 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4558 &unsignedp, &reversep, &volatilep);
4559 rtx orig_op0;
4561 if (known_eq (bitsize, 0))
4562 return NULL;
4564 orig_op0 = op0 = expand_debug_expr (tem);
4566 if (!op0)
4567 return NULL;
4569 if (offset)
4571 machine_mode addrmode, offmode;
4573 if (!MEM_P (op0))
4574 return NULL;
4576 op0 = XEXP (op0, 0);
4577 addrmode = GET_MODE (op0);
4578 if (addrmode == VOIDmode)
4579 addrmode = Pmode;
4581 op1 = expand_debug_expr (offset);
4582 if (!op1)
4583 return NULL;
4585 offmode = GET_MODE (op1);
4586 if (offmode == VOIDmode)
4587 offmode = TYPE_MODE (TREE_TYPE (offset));
4589 if (addrmode != offmode)
4590 op1 = lowpart_subreg (addrmode, op1, offmode);
4592 /* Don't use offset_address here, we don't need a
4593 recognizable address, and we don't want to generate
4594 code. */
4595 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4596 op0, op1));
4599 if (MEM_P (op0))
4601 if (mode1 == VOIDmode)
4603 if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4604 return NULL;
4605 /* Bitfield. */
4606 mode1 = smallest_int_mode_for_size (bitsize);
4608 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4609 if (maybe_ne (bytepos, 0))
4611 op0 = adjust_address_nv (op0, mode1, bytepos);
4612 bitpos = num_trailing_bits (bitpos);
4614 else if (known_eq (bitpos, 0)
4615 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4616 op0 = adjust_address_nv (op0, mode, 0);
4617 else if (GET_MODE (op0) != mode1)
4618 op0 = adjust_address_nv (op0, mode1, 0);
4619 else
4620 op0 = copy_rtx (op0);
4621 if (op0 == orig_op0)
4622 op0 = shallow_copy_rtx (op0);
4623 if (TREE_CODE (tem) != SSA_NAME)
4624 set_mem_attributes (op0, exp, 0);
4627 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4628 return op0;
4630 if (maybe_lt (bitpos, 0))
4631 return NULL;
4633 if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4634 return NULL;
4636 poly_int64 bytepos;
4637 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4638 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4640 machine_mode opmode = GET_MODE (op0);
4642 if (opmode == VOIDmode)
4643 opmode = TYPE_MODE (TREE_TYPE (tem));
4645 /* This condition may hold if we're expanding the address
4646 right past the end of an array that turned out not to
4647 be addressable (i.e., the address was only computed in
4648 debug stmts). The gen_subreg below would rightfully
4649 crash, and the address doesn't really exist, so just
4650 drop it. */
4651 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4652 return NULL;
4654 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4655 return simplify_gen_subreg (mode, op0, opmode, bytepos);
4658 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4659 && TYPE_UNSIGNED (TREE_TYPE (exp))
4660 ? SIGN_EXTRACT
4661 : ZERO_EXTRACT, mode,
4662 GET_MODE (op0) != VOIDmode
4663 ? GET_MODE (op0)
4664 : TYPE_MODE (TREE_TYPE (tem)),
4665 op0, gen_int_mode (bitsize, word_mode),
4666 gen_int_mode (bitpos, word_mode));
4669 case ABS_EXPR:
4670 case ABSU_EXPR:
4671 return simplify_gen_unary (ABS, mode, op0, mode);
4673 case NEGATE_EXPR:
4674 return simplify_gen_unary (NEG, mode, op0, mode);
4676 case BIT_NOT_EXPR:
4677 return simplify_gen_unary (NOT, mode, op0, mode);
4679 case FLOAT_EXPR:
4680 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4681 0)))
4682 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4683 inner_mode);
4685 case FIX_TRUNC_EXPR:
4686 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4687 inner_mode);
4689 case POINTER_PLUS_EXPR:
4690 /* For the rare target where pointers are not the same size as
4691 size_t, we need to check for mis-matched modes and correct
4692 the addend. */
4693 if (op0 && op1
4694 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4695 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4696 && op0_mode != op1_mode)
4698 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4699 /* If OP0 is a partial mode, then we must truncate, even
4700 if it has the same bitsize as OP1 as GCC's
4701 representation of partial modes is opaque. */
4702 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4703 && (GET_MODE_BITSIZE (op0_mode)
4704 == GET_MODE_BITSIZE (op1_mode))))
4705 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4706 else
4707 /* We always sign-extend, regardless of the signedness of
4708 the operand, because the operand is always unsigned
4709 here even if the original C expression is signed. */
4710 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4712 /* Fall through. */
4713 case PLUS_EXPR:
4714 return simplify_gen_binary (PLUS, mode, op0, op1);
4716 case MINUS_EXPR:
4717 case POINTER_DIFF_EXPR:
4718 return simplify_gen_binary (MINUS, mode, op0, op1);
4720 case MULT_EXPR:
4721 return simplify_gen_binary (MULT, mode, op0, op1);
4723 case RDIV_EXPR:
4724 case TRUNC_DIV_EXPR:
4725 case EXACT_DIV_EXPR:
4726 if (unsignedp)
4727 return simplify_gen_binary (UDIV, mode, op0, op1);
4728 else
4729 return simplify_gen_binary (DIV, mode, op0, op1);
4731 case TRUNC_MOD_EXPR:
4732 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4734 case FLOOR_DIV_EXPR:
4735 if (unsignedp)
4736 return simplify_gen_binary (UDIV, mode, op0, op1);
4737 else
4739 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4740 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4741 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4742 return simplify_gen_binary (PLUS, mode, div, adj);
4745 case FLOOR_MOD_EXPR:
4746 if (unsignedp)
4747 return simplify_gen_binary (UMOD, mode, op0, op1);
4748 else
4750 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4751 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4752 adj = simplify_gen_unary (NEG, mode,
4753 simplify_gen_binary (MULT, mode, adj, op1),
4754 mode);
4755 return simplify_gen_binary (PLUS, mode, mod, adj);
4758 case CEIL_DIV_EXPR:
4759 if (unsignedp)
4761 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4762 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4763 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4764 return simplify_gen_binary (PLUS, mode, div, adj);
4766 else
4768 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4769 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4770 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4771 return simplify_gen_binary (PLUS, mode, div, adj);
4774 case CEIL_MOD_EXPR:
4775 if (unsignedp)
4777 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4778 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4779 adj = simplify_gen_unary (NEG, mode,
4780 simplify_gen_binary (MULT, mode, adj, op1),
4781 mode);
4782 return simplify_gen_binary (PLUS, mode, mod, adj);
4784 else
4786 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4787 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4788 adj = simplify_gen_unary (NEG, mode,
4789 simplify_gen_binary (MULT, mode, adj, op1),
4790 mode);
4791 return simplify_gen_binary (PLUS, mode, mod, adj);
4794 case ROUND_DIV_EXPR:
4795 if (unsignedp)
4797 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4798 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4799 rtx adj = round_udiv_adjust (mode, mod, op1);
4800 return simplify_gen_binary (PLUS, mode, div, adj);
4802 else
4804 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4805 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4806 rtx adj = round_sdiv_adjust (mode, mod, op1);
4807 return simplify_gen_binary (PLUS, mode, div, adj);
4810 case ROUND_MOD_EXPR:
4811 if (unsignedp)
4813 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4814 rtx adj = round_udiv_adjust (mode, mod, op1);
4815 adj = simplify_gen_unary (NEG, mode,
4816 simplify_gen_binary (MULT, mode, adj, op1),
4817 mode);
4818 return simplify_gen_binary (PLUS, mode, mod, adj);
4820 else
4822 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4823 rtx adj = round_sdiv_adjust (mode, mod, op1);
4824 adj = simplify_gen_unary (NEG, mode,
4825 simplify_gen_binary (MULT, mode, adj, op1),
4826 mode);
4827 return simplify_gen_binary (PLUS, mode, mod, adj);
4830 case LSHIFT_EXPR:
4831 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4833 case RSHIFT_EXPR:
4834 if (unsignedp)
4835 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4836 else
4837 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4839 case LROTATE_EXPR:
4840 return simplify_gen_binary (ROTATE, mode, op0, op1);
4842 case RROTATE_EXPR:
4843 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4845 case MIN_EXPR:
4846 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4848 case MAX_EXPR:
4849 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4851 case BIT_AND_EXPR:
4852 case TRUTH_AND_EXPR:
4853 return simplify_gen_binary (AND, mode, op0, op1);
4855 case BIT_IOR_EXPR:
4856 case TRUTH_OR_EXPR:
4857 return simplify_gen_binary (IOR, mode, op0, op1);
4859 case BIT_XOR_EXPR:
4860 case TRUTH_XOR_EXPR:
4861 return simplify_gen_binary (XOR, mode, op0, op1);
4863 case TRUTH_ANDIF_EXPR:
4864 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4866 case TRUTH_ORIF_EXPR:
4867 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4869 case TRUTH_NOT_EXPR:
4870 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4872 case LT_EXPR:
4873 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4874 op0, op1);
4876 case LE_EXPR:
4877 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4878 op0, op1);
4880 case GT_EXPR:
4881 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4882 op0, op1);
4884 case GE_EXPR:
4885 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4886 op0, op1);
4888 case EQ_EXPR:
4889 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4891 case NE_EXPR:
4892 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4894 case UNORDERED_EXPR:
4895 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4897 case ORDERED_EXPR:
4898 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4900 case UNLT_EXPR:
4901 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4903 case UNLE_EXPR:
4904 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4906 case UNGT_EXPR:
4907 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4909 case UNGE_EXPR:
4910 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4912 case UNEQ_EXPR:
4913 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4915 case LTGT_EXPR:
4916 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4918 case COND_EXPR:
4919 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4921 case COMPLEX_EXPR:
4922 gcc_assert (COMPLEX_MODE_P (mode));
4923 if (GET_MODE (op0) == VOIDmode)
4924 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4925 if (GET_MODE (op1) == VOIDmode)
4926 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4927 return gen_rtx_CONCAT (mode, op0, op1);
4929 case CONJ_EXPR:
4930 if (GET_CODE (op0) == CONCAT)
4931 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4932 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4933 XEXP (op0, 1),
4934 GET_MODE_INNER (mode)));
4935 else
4937 scalar_mode imode = GET_MODE_INNER (mode);
4938 rtx re, im;
4940 if (MEM_P (op0))
4942 re = adjust_address_nv (op0, imode, 0);
4943 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4945 else
4947 scalar_int_mode ifmode;
4948 scalar_int_mode ihmode;
4949 rtx halfsize;
4950 if (!int_mode_for_mode (mode).exists (&ifmode)
4951 || !int_mode_for_mode (imode).exists (&ihmode))
4952 return NULL;
4953 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4954 re = op0;
4955 if (mode != ifmode)
4956 re = gen_rtx_SUBREG (ifmode, re, 0);
4957 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4958 if (imode != ihmode)
4959 re = gen_rtx_SUBREG (imode, re, 0);
4960 im = copy_rtx (op0);
4961 if (mode != ifmode)
4962 im = gen_rtx_SUBREG (ifmode, im, 0);
4963 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4964 if (imode != ihmode)
4965 im = gen_rtx_SUBREG (imode, im, 0);
4967 im = gen_rtx_NEG (imode, im);
4968 return gen_rtx_CONCAT (mode, re, im);
4971 case ADDR_EXPR:
4972 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4973 if (!op0 || !MEM_P (op0))
4975 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4976 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4977 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4978 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4979 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4980 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4982 if (handled_component_p (TREE_OPERAND (exp, 0)))
4984 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4985 bool reverse;
4986 tree decl
4987 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4988 &bitsize, &maxsize, &reverse);
4989 if ((VAR_P (decl)
4990 || TREE_CODE (decl) == PARM_DECL
4991 || TREE_CODE (decl) == RESULT_DECL)
4992 && (!TREE_ADDRESSABLE (decl)
4993 || target_for_debug_bind (decl))
4994 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4995 && known_gt (bitsize, 0)
4996 && known_eq (bitsize, maxsize))
4998 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4999 return plus_constant (mode, base, byteoffset);
5003 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
5004 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5005 == ADDR_EXPR)
5007 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5008 0));
5009 if (op0 != NULL
5010 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
5011 || (GET_CODE (op0) == PLUS
5012 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
5013 && CONST_INT_P (XEXP (op0, 1)))))
5015 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5016 1));
5017 poly_int64 offset;
5018 if (!op1 || !poly_int_rtx_p (op1, &offset))
5019 return NULL;
5021 return plus_constant (mode, op0, offset);
5025 return NULL;
5028 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
5029 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5030 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
5032 return op0;
5034 case VECTOR_CST:
5036 unsigned HOST_WIDE_INT i, nelts;
5038 if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
5039 return NULL;
5041 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5043 for (i = 0; i < nelts; ++i)
5045 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
5046 if (!op1)
5047 return NULL;
5048 XVECEXP (op0, 0, i) = op1;
5051 return op0;
5054 case CONSTRUCTOR:
5055 if (TREE_CLOBBER_P (exp))
5056 return NULL;
5057 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5059 unsigned i;
5060 unsigned HOST_WIDE_INT nelts;
5061 tree val;
5063 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5064 goto flag_unsupported;
5066 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5068 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5070 op1 = expand_debug_expr (val);
5071 if (!op1)
5072 return NULL;
5073 XVECEXP (op0, 0, i) = op1;
5076 if (i < nelts)
5078 op1 = expand_debug_expr
5079 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5081 if (!op1)
5082 return NULL;
5084 for (; i < nelts; i++)
5085 XVECEXP (op0, 0, i) = op1;
5088 return op0;
5090 else
5091 goto flag_unsupported;
5093 case CALL_EXPR:
5094 /* ??? Maybe handle some builtins? */
5095 return NULL;
5097 case SSA_NAME:
5099 gimple *g = get_gimple_for_ssa_name (exp);
5100 if (g)
5102 tree t = NULL_TREE;
5103 if (deep_ter_debug_map)
5105 tree *slot = deep_ter_debug_map->get (exp);
5106 if (slot)
5107 t = *slot;
5109 if (t == NULL_TREE)
5110 t = gimple_assign_rhs_to_tree (g);
5111 op0 = expand_debug_expr (t);
5112 if (!op0)
5113 return NULL;
5115 else
5117 /* If this is a reference to an incoming value of
5118 parameter that is never used in the code or where the
5119 incoming value is never used in the code, use
5120 PARM_DECL's DECL_RTL if set. */
5121 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5122 && SSA_NAME_VAR (exp)
5123 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5124 && has_zero_uses (exp))
5126 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5127 if (op0)
5128 goto adjust_mode;
5129 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5130 if (op0)
5131 goto adjust_mode;
5134 int part = var_to_partition (SA.map, exp);
5136 if (part == NO_PARTITION)
5137 return NULL;
5139 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5141 op0 = copy_rtx (SA.partition_to_pseudo[part]);
5143 goto adjust_mode;
5146 case ERROR_MARK:
5147 return NULL;
5149 /* Vector stuff. For most of the codes we don't have rtl codes. */
5150 case REALIGN_LOAD_EXPR:
5151 case VEC_COND_EXPR:
5152 case VEC_PACK_FIX_TRUNC_EXPR:
5153 case VEC_PACK_FLOAT_EXPR:
5154 case VEC_PACK_SAT_EXPR:
5155 case VEC_PACK_TRUNC_EXPR:
5156 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5157 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
5158 case VEC_UNPACK_FLOAT_HI_EXPR:
5159 case VEC_UNPACK_FLOAT_LO_EXPR:
5160 case VEC_UNPACK_HI_EXPR:
5161 case VEC_UNPACK_LO_EXPR:
5162 case VEC_WIDEN_MULT_HI_EXPR:
5163 case VEC_WIDEN_MULT_LO_EXPR:
5164 case VEC_WIDEN_MULT_EVEN_EXPR:
5165 case VEC_WIDEN_MULT_ODD_EXPR:
5166 case VEC_WIDEN_LSHIFT_HI_EXPR:
5167 case VEC_WIDEN_LSHIFT_LO_EXPR:
5168 case VEC_PERM_EXPR:
5169 case VEC_DUPLICATE_EXPR:
5170 case VEC_SERIES_EXPR:
5171 case SAD_EXPR:
5172 return NULL;
5174 /* Misc codes. */
5175 case ADDR_SPACE_CONVERT_EXPR:
5176 case FIXED_CONVERT_EXPR:
5177 case OBJ_TYPE_REF:
5178 case WITH_SIZE_EXPR:
5179 case BIT_INSERT_EXPR:
5180 return NULL;
5182 case DOT_PROD_EXPR:
5183 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5184 && SCALAR_INT_MODE_P (mode))
5187 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5188 0)))
5189 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5190 inner_mode);
5192 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5193 1)))
5194 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5195 inner_mode);
5196 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5197 return simplify_gen_binary (PLUS, mode, op0, op2);
5199 return NULL;
5201 case WIDEN_MULT_EXPR:
5202 case WIDEN_MULT_PLUS_EXPR:
5203 case WIDEN_MULT_MINUS_EXPR:
5204 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5205 && SCALAR_INT_MODE_P (mode))
5207 inner_mode = GET_MODE (op0);
5208 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5209 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5210 else
5211 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5212 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5213 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5214 else
5215 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5216 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5217 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5218 return op0;
5219 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5220 return simplify_gen_binary (PLUS, mode, op0, op2);
5221 else
5222 return simplify_gen_binary (MINUS, mode, op2, op0);
5224 return NULL;
5226 case MULT_HIGHPART_EXPR:
5227 /* ??? Similar to the above. */
5228 return NULL;
5230 case WIDEN_SUM_EXPR:
5231 case WIDEN_LSHIFT_EXPR:
5232 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5233 && SCALAR_INT_MODE_P (mode))
5236 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5237 0)))
5238 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5239 inner_mode);
5240 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5241 ? ASHIFT : PLUS, mode, op0, op1);
5243 return NULL;
5245 default:
5246 flag_unsupported:
5247 if (flag_checking)
5249 debug_tree (exp);
5250 gcc_unreachable ();
5252 return NULL;
5256 /* Return an RTX equivalent to the source bind value of the tree expression
5257 EXP. */
5259 static rtx
5260 expand_debug_source_expr (tree exp)
5262 rtx op0 = NULL_RTX;
5263 machine_mode mode = VOIDmode, inner_mode;
5265 switch (TREE_CODE (exp))
5267 case VAR_DECL:
5268 if (DECL_ABSTRACT_ORIGIN (exp))
5269 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5270 break;
5271 case PARM_DECL:
5273 mode = DECL_MODE (exp);
5274 op0 = expand_debug_parm_decl (exp);
5275 if (op0)
5276 break;
5277 /* See if this isn't an argument that has been completely
5278 optimized out. */
5279 if (!DECL_RTL_SET_P (exp)
5280 && !DECL_INCOMING_RTL (exp)
5281 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5283 tree aexp = DECL_ORIGIN (exp);
5284 if (DECL_CONTEXT (aexp)
5285 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5287 vec<tree, va_gc> **debug_args;
5288 unsigned int ix;
5289 tree ddecl;
5290 debug_args = decl_debug_args_lookup (current_function_decl);
5291 if (debug_args != NULL)
5293 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5294 ix += 2)
5295 if (ddecl == aexp)
5296 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5300 break;
5302 default:
5303 break;
5306 if (op0 == NULL_RTX)
5307 return NULL_RTX;
5309 inner_mode = GET_MODE (op0);
5310 if (mode == inner_mode)
5311 return op0;
5313 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5315 if (GET_MODE_UNIT_BITSIZE (mode)
5316 == GET_MODE_UNIT_BITSIZE (inner_mode))
5317 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5318 else if (GET_MODE_UNIT_BITSIZE (mode)
5319 < GET_MODE_UNIT_BITSIZE (inner_mode))
5320 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5321 else
5322 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5324 else if (FLOAT_MODE_P (mode))
5325 gcc_unreachable ();
5326 else if (FLOAT_MODE_P (inner_mode))
5328 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5329 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5330 else
5331 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5333 else if (GET_MODE_UNIT_PRECISION (mode)
5334 == GET_MODE_UNIT_PRECISION (inner_mode))
5335 op0 = lowpart_subreg (mode, op0, inner_mode);
5336 else if (GET_MODE_UNIT_PRECISION (mode)
5337 < GET_MODE_UNIT_PRECISION (inner_mode))
5338 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5339 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5340 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5341 else
5342 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5344 return op0;
5347 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5348 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5349 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5351 static void
5352 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5354 rtx exp = *exp_p;
5356 if (exp == NULL_RTX)
5357 return;
5359 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5360 return;
5362 if (depth == 4)
5364 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5365 rtx dval = make_debug_expr_from_rtl (exp);
5367 /* Emit a debug bind insn before INSN. */
5368 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5369 DEBUG_EXPR_TREE_DECL (dval), exp,
5370 VAR_INIT_STATUS_INITIALIZED);
5372 emit_debug_insn_before (bind, insn);
5373 *exp_p = dval;
5374 return;
5377 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5378 int i, j;
5379 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5380 switch (*format_ptr++)
5382 case 'e':
5383 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5384 break;
5386 case 'E':
5387 case 'V':
5388 for (j = 0; j < XVECLEN (exp, i); j++)
5389 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5390 break;
5392 default:
5393 break;
5397 /* Expand the _LOCs in debug insns. We run this after expanding all
5398 regular insns, so that any variables referenced in the function
5399 will have their DECL_RTLs set. */
5401 static void
5402 expand_debug_locations (void)
5404 rtx_insn *insn;
5405 rtx_insn *last = get_last_insn ();
5406 int save_strict_alias = flag_strict_aliasing;
5408 /* New alias sets while setting up memory attributes cause
5409 -fcompare-debug failures, even though it doesn't bring about any
5410 codegen changes. */
5411 flag_strict_aliasing = 0;
5413 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5414 if (DEBUG_BIND_INSN_P (insn))
5416 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5417 rtx val;
5418 rtx_insn *prev_insn, *insn2;
5419 machine_mode mode;
5421 if (value == NULL_TREE)
5422 val = NULL_RTX;
5423 else
5425 if (INSN_VAR_LOCATION_STATUS (insn)
5426 == VAR_INIT_STATUS_UNINITIALIZED)
5427 val = expand_debug_source_expr (value);
5428 /* The avoid_deep_ter_for_debug function inserts
5429 debug bind stmts after SSA_NAME definition, with the
5430 SSA_NAME as the whole bind location. Disable temporarily
5431 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5432 being defined in this DEBUG_INSN. */
5433 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5435 tree *slot = deep_ter_debug_map->get (value);
5436 if (slot)
5438 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5439 *slot = NULL_TREE;
5440 else
5441 slot = NULL;
5443 val = expand_debug_expr (value);
5444 if (slot)
5445 *slot = INSN_VAR_LOCATION_DECL (insn);
5447 else
5448 val = expand_debug_expr (value);
5449 gcc_assert (last == get_last_insn ());
5452 if (!val)
5453 val = gen_rtx_UNKNOWN_VAR_LOC ();
5454 else
5456 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5458 gcc_assert (mode == GET_MODE (val)
5459 || (GET_MODE (val) == VOIDmode
5460 && (CONST_SCALAR_INT_P (val)
5461 || GET_CODE (val) == CONST_FIXED
5462 || GET_CODE (val) == LABEL_REF)));
5465 INSN_VAR_LOCATION_LOC (insn) = val;
5466 prev_insn = PREV_INSN (insn);
5467 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5468 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5471 flag_strict_aliasing = save_strict_alias;
5474 /* Performs swapping operands of commutative operations to expand
5475 the expensive one first. */
5477 static void
5478 reorder_operands (basic_block bb)
5480 unsigned int *lattice; /* Hold cost of each statement. */
5481 unsigned int i = 0, n = 0;
5482 gimple_stmt_iterator gsi;
5483 gimple_seq stmts;
5484 gimple *stmt;
5485 bool swap;
5486 tree op0, op1;
5487 ssa_op_iter iter;
5488 use_operand_p use_p;
5489 gimple *def0, *def1;
5491 /* Compute cost of each statement using estimate_num_insns. */
5492 stmts = bb_seq (bb);
5493 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5495 stmt = gsi_stmt (gsi);
5496 if (!is_gimple_debug (stmt))
5497 gimple_set_uid (stmt, n++);
5499 lattice = XNEWVEC (unsigned int, n);
5500 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5502 unsigned cost;
5503 stmt = gsi_stmt (gsi);
5504 if (is_gimple_debug (stmt))
5505 continue;
5506 cost = estimate_num_insns (stmt, &eni_size_weights);
5507 lattice[i] = cost;
5508 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5510 tree use = USE_FROM_PTR (use_p);
5511 gimple *def_stmt;
5512 if (TREE_CODE (use) != SSA_NAME)
5513 continue;
5514 def_stmt = get_gimple_for_ssa_name (use);
5515 if (!def_stmt)
5516 continue;
5517 lattice[i] += lattice[gimple_uid (def_stmt)];
5519 i++;
5520 if (!is_gimple_assign (stmt)
5521 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5522 continue;
5523 op0 = gimple_op (stmt, 1);
5524 op1 = gimple_op (stmt, 2);
5525 if (TREE_CODE (op0) != SSA_NAME
5526 || TREE_CODE (op1) != SSA_NAME)
5527 continue;
5528 /* Swap operands if the second one is more expensive. */
5529 def0 = get_gimple_for_ssa_name (op0);
5530 def1 = get_gimple_for_ssa_name (op1);
5531 if (!def1)
5532 continue;
5533 swap = false;
5534 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5535 swap = true;
5536 if (swap)
5538 if (dump_file && (dump_flags & TDF_DETAILS))
5540 fprintf (dump_file, "Swap operands in stmt:\n");
5541 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5542 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5543 def0 ? lattice[gimple_uid (def0)] : 0,
5544 lattice[gimple_uid (def1)]);
5546 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5547 gimple_assign_rhs2_ptr (stmt));
5550 XDELETE (lattice);
5553 /* Expand basic block BB from GIMPLE trees to RTL. */
5555 static basic_block
5556 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5558 gimple_stmt_iterator gsi;
5559 gimple_seq stmts;
5560 gimple *stmt = NULL;
5561 rtx_note *note = NULL;
5562 rtx_insn *last;
5563 edge e;
5564 edge_iterator ei;
5566 if (dump_file)
5567 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5568 bb->index);
5570 /* Note that since we are now transitioning from GIMPLE to RTL, we
5571 cannot use the gsi_*_bb() routines because they expect the basic
5572 block to be in GIMPLE, instead of RTL. Therefore, we need to
5573 access the BB sequence directly. */
5574 if (optimize)
5575 reorder_operands (bb);
5576 stmts = bb_seq (bb);
5577 bb->il.gimple.seq = NULL;
5578 bb->il.gimple.phi_nodes = NULL;
5579 rtl_profile_for_bb (bb);
5580 init_rtl_bb_info (bb);
5581 bb->flags |= BB_RTL;
5583 /* Remove the RETURN_EXPR if we may fall though to the exit
5584 instead. */
5585 gsi = gsi_last (stmts);
5586 if (!gsi_end_p (gsi)
5587 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5589 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5591 gcc_assert (single_succ_p (bb));
5592 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5594 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5595 && !gimple_return_retval (ret_stmt))
5597 gsi_remove (&gsi, false);
5598 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5602 gsi = gsi_start (stmts);
5603 if (!gsi_end_p (gsi))
5605 stmt = gsi_stmt (gsi);
5606 if (gimple_code (stmt) != GIMPLE_LABEL)
5607 stmt = NULL;
5610 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5612 if (stmt || elt)
5614 gcc_checking_assert (!note);
5615 last = get_last_insn ();
5617 if (stmt)
5619 expand_gimple_stmt (stmt);
5620 gsi_next (&gsi);
5623 if (elt)
5624 emit_label (*elt);
5626 BB_HEAD (bb) = NEXT_INSN (last);
5627 if (NOTE_P (BB_HEAD (bb)))
5628 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5629 gcc_assert (LABEL_P (BB_HEAD (bb)));
5630 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5632 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5634 else
5635 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5637 if (note)
5638 NOTE_BASIC_BLOCK (note) = bb;
5640 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5642 basic_block new_bb;
5644 stmt = gsi_stmt (gsi);
5646 /* If this statement is a non-debug one, and we generate debug
5647 insns, then this one might be the last real use of a TERed
5648 SSA_NAME, but where there are still some debug uses further
5649 down. Expanding the current SSA name in such further debug
5650 uses by their RHS might lead to wrong debug info, as coalescing
5651 might make the operands of such RHS be placed into the same
5652 pseudo as something else. Like so:
5653 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5654 use(a_1);
5655 a_2 = ...
5656 #DEBUG ... => a_1
5657 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5658 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5659 the write to a_2 would actually have clobbered the place which
5660 formerly held a_0.
5662 So, instead of that, we recognize the situation, and generate
5663 debug temporaries at the last real use of TERed SSA names:
5664 a_1 = a_0 + 1;
5665 #DEBUG #D1 => a_1
5666 use(a_1);
5667 a_2 = ...
5668 #DEBUG ... => #D1
5670 if (MAY_HAVE_DEBUG_BIND_INSNS
5671 && SA.values
5672 && !is_gimple_debug (stmt))
5674 ssa_op_iter iter;
5675 tree op;
5676 gimple *def;
5678 location_t sloc = curr_insn_location ();
5680 /* Look for SSA names that have their last use here (TERed
5681 names always have only one real use). */
5682 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5683 if ((def = get_gimple_for_ssa_name (op)))
5685 imm_use_iterator imm_iter;
5686 use_operand_p use_p;
5687 bool have_debug_uses = false;
5689 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5691 if (gimple_debug_bind_p (USE_STMT (use_p)))
5693 have_debug_uses = true;
5694 break;
5698 if (have_debug_uses)
5700 /* OP is a TERed SSA name, with DEF its defining
5701 statement, and where OP is used in further debug
5702 instructions. Generate a debug temporary, and
5703 replace all uses of OP in debug insns with that
5704 temporary. */
5705 gimple *debugstmt;
5706 tree value = gimple_assign_rhs_to_tree (def);
5707 tree vexpr = make_node (DEBUG_EXPR_DECL);
5708 rtx val;
5709 machine_mode mode;
5711 set_curr_insn_location (gimple_location (def));
5713 DECL_ARTIFICIAL (vexpr) = 1;
5714 TREE_TYPE (vexpr) = TREE_TYPE (value);
5715 if (DECL_P (value))
5716 mode = DECL_MODE (value);
5717 else
5718 mode = TYPE_MODE (TREE_TYPE (value));
5719 SET_DECL_MODE (vexpr, mode);
5721 val = gen_rtx_VAR_LOCATION
5722 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5724 emit_debug_insn (val);
5726 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5728 if (!gimple_debug_bind_p (debugstmt))
5729 continue;
5731 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5732 SET_USE (use_p, vexpr);
5734 update_stmt (debugstmt);
5738 set_curr_insn_location (sloc);
5741 currently_expanding_gimple_stmt = stmt;
5743 /* Expand this statement, then evaluate the resulting RTL and
5744 fixup the CFG accordingly. */
5745 if (gimple_code (stmt) == GIMPLE_COND)
5747 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5748 if (new_bb)
5749 return new_bb;
5751 else if (is_gimple_debug (stmt))
5753 location_t sloc = curr_insn_location ();
5754 gimple_stmt_iterator nsi = gsi;
5756 for (;;)
5758 tree var;
5759 tree value = NULL_TREE;
5760 rtx val = NULL_RTX;
5761 machine_mode mode;
5763 if (!gimple_debug_nonbind_marker_p (stmt))
5765 if (gimple_debug_bind_p (stmt))
5767 var = gimple_debug_bind_get_var (stmt);
5769 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5770 && TREE_CODE (var) != LABEL_DECL
5771 && !target_for_debug_bind (var))
5772 goto delink_debug_stmt;
5774 if (DECL_P (var))
5775 mode = DECL_MODE (var);
5776 else
5777 mode = TYPE_MODE (TREE_TYPE (var));
5779 if (gimple_debug_bind_has_value_p (stmt))
5780 value = gimple_debug_bind_get_value (stmt);
5782 val = gen_rtx_VAR_LOCATION
5783 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5785 else if (gimple_debug_source_bind_p (stmt))
5787 var = gimple_debug_source_bind_get_var (stmt);
5789 value = gimple_debug_source_bind_get_value (stmt);
5791 mode = DECL_MODE (var);
5793 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5794 VAR_INIT_STATUS_UNINITIALIZED);
5796 else
5797 gcc_unreachable ();
5799 /* If this function was first compiled with markers
5800 enabled, but they're now disable (e.g. LTO), drop
5801 them on the floor. */
5802 else if (gimple_debug_nonbind_marker_p (stmt)
5803 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5804 goto delink_debug_stmt;
5805 else if (gimple_debug_begin_stmt_p (stmt))
5806 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5807 else if (gimple_debug_inline_entry_p (stmt))
5809 tree block = gimple_block (stmt);
5811 if (block)
5812 val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5813 else
5814 goto delink_debug_stmt;
5816 else
5817 gcc_unreachable ();
5819 last = get_last_insn ();
5821 set_curr_insn_location (gimple_location (stmt));
5823 emit_debug_insn (val);
5825 if (dump_file && (dump_flags & TDF_DETAILS))
5827 /* We can't dump the insn with a TREE where an RTX
5828 is expected. */
5829 if (GET_CODE (val) == VAR_LOCATION)
5831 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5832 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5834 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5835 if (GET_CODE (val) == VAR_LOCATION)
5836 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5839 delink_debug_stmt:
5840 /* In order not to generate too many debug temporaries,
5841 we delink all uses of debug statements we already expanded.
5842 Therefore debug statements between definition and real
5843 use of TERed SSA names will continue to use the SSA name,
5844 and not be replaced with debug temps. */
5845 delink_stmt_imm_use (stmt);
5847 gsi = nsi;
5848 gsi_next (&nsi);
5849 if (gsi_end_p (nsi))
5850 break;
5851 stmt = gsi_stmt (nsi);
5852 if (!is_gimple_debug (stmt))
5853 break;
5856 set_curr_insn_location (sloc);
5858 else
5860 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5861 if (call_stmt
5862 && gimple_call_tail_p (call_stmt)
5863 && disable_tail_calls)
5864 gimple_call_set_tail (call_stmt, false);
5866 if (call_stmt && gimple_call_tail_p (call_stmt))
5868 bool can_fallthru;
5869 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5870 if (new_bb)
5872 if (can_fallthru)
5873 bb = new_bb;
5874 else
5875 return new_bb;
5878 else
5880 def_operand_p def_p;
5881 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5883 if (def_p != NULL)
5885 /* Ignore this stmt if it is in the list of
5886 replaceable expressions. */
5887 if (SA.values
5888 && bitmap_bit_p (SA.values,
5889 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5890 continue;
5892 last = expand_gimple_stmt (stmt);
5893 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5898 currently_expanding_gimple_stmt = NULL;
5900 /* Expand implicit goto and convert goto_locus. */
5901 FOR_EACH_EDGE (e, ei, bb->succs)
5903 if (e->goto_locus != UNKNOWN_LOCATION)
5904 set_curr_insn_location (e->goto_locus);
5905 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5907 emit_jump (label_rtx_for_bb (e->dest));
5908 e->flags &= ~EDGE_FALLTHRU;
5912 /* Expanded RTL can create a jump in the last instruction of block.
5913 This later might be assumed to be a jump to successor and break edge insertion.
5914 We need to insert dummy move to prevent this. PR41440. */
5915 if (single_succ_p (bb)
5916 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5917 && (last = get_last_insn ())
5918 && (JUMP_P (last)
5919 || (DEBUG_INSN_P (last)
5920 && JUMP_P (prev_nondebug_insn (last)))))
5922 rtx dummy = gen_reg_rtx (SImode);
5923 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5926 do_pending_stack_adjust ();
5928 /* Find the block tail. The last insn in the block is the insn
5929 before a barrier and/or table jump insn. */
5930 last = get_last_insn ();
5931 if (BARRIER_P (last))
5932 last = PREV_INSN (last);
5933 if (JUMP_TABLE_DATA_P (last))
5934 last = PREV_INSN (PREV_INSN (last));
5935 if (BARRIER_P (last))
5936 last = PREV_INSN (last);
5937 BB_END (bb) = last;
5939 update_bb_for_insn (bb);
5941 return bb;
5945 /* Create a basic block for initialization code. */
5947 static basic_block
5948 construct_init_block (void)
5950 basic_block init_block, first_block;
5951 edge e = NULL;
5952 int flags;
5954 /* Multiple entry points not supported yet. */
5955 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5956 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5957 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5958 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5959 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5961 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5963 /* When entry edge points to first basic block, we don't need jump,
5964 otherwise we have to jump into proper target. */
5965 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5967 tree label = gimple_block_label (e->dest);
5969 emit_jump (jump_target_rtx (label));
5970 flags = 0;
5972 else
5973 flags = EDGE_FALLTHRU;
5975 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5976 get_last_insn (),
5977 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5978 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5979 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5980 if (e)
5982 first_block = e->dest;
5983 redirect_edge_succ (e, init_block);
5984 make_single_succ_edge (init_block, first_block, flags);
5986 else
5987 make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5988 EDGE_FALLTHRU);
5990 update_bb_for_insn (init_block);
5991 return init_block;
5994 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5995 found in the block tree. */
5997 static void
5998 set_block_levels (tree block, int level)
6000 while (block)
6002 BLOCK_NUMBER (block) = level;
6003 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
6004 block = BLOCK_CHAIN (block);
6008 /* Create a block containing landing pads and similar stuff. */
6010 static void
6011 construct_exit_block (void)
6013 rtx_insn *head = get_last_insn ();
6014 rtx_insn *end;
6015 basic_block exit_block;
6016 edge e, e2;
6017 unsigned ix;
6018 edge_iterator ei;
6019 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
6020 rtx_insn *orig_end = BB_END (prev_bb);
6022 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6024 /* Make sure the locus is set to the end of the function, so that
6025 epilogue line numbers and warnings are set properly. */
6026 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
6027 input_location = cfun->function_end_locus;
6029 /* Generate rtl for function exit. */
6030 expand_function_end ();
6032 end = get_last_insn ();
6033 if (head == end)
6034 return;
6035 /* While emitting the function end we could move end of the last basic
6036 block. */
6037 BB_END (prev_bb) = orig_end;
6038 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
6039 head = NEXT_INSN (head);
6040 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6041 bb count counting will be confused. Any instructions before that
6042 label are emitted for the case where PREV_BB falls through into the
6043 exit block, so append those instructions to prev_bb in that case. */
6044 if (NEXT_INSN (head) != return_label)
6046 while (NEXT_INSN (head) != return_label)
6048 if (!NOTE_P (NEXT_INSN (head)))
6049 BB_END (prev_bb) = NEXT_INSN (head);
6050 head = NEXT_INSN (head);
6053 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
6054 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
6055 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6057 ix = 0;
6058 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6060 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6061 if (!(e->flags & EDGE_ABNORMAL))
6062 redirect_edge_succ (e, exit_block);
6063 else
6064 ix++;
6067 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6068 EDGE_FALLTHRU);
6069 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6070 if (e2 != e)
6072 exit_block->count -= e2->count ();
6074 update_bb_for_insn (exit_block);
6077 /* Helper function for discover_nonconstant_array_refs.
6078 Look for ARRAY_REF nodes with non-constant indexes and mark them
6079 addressable. */
6081 static tree
6082 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6083 void *data ATTRIBUTE_UNUSED)
6085 tree t = *tp;
6087 if (IS_TYPE_OR_DECL_P (t))
6088 *walk_subtrees = 0;
6089 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6091 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6092 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6093 && (!TREE_OPERAND (t, 2)
6094 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6095 || (TREE_CODE (t) == COMPONENT_REF
6096 && (!TREE_OPERAND (t,2)
6097 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6098 || TREE_CODE (t) == BIT_FIELD_REF
6099 || TREE_CODE (t) == REALPART_EXPR
6100 || TREE_CODE (t) == IMAGPART_EXPR
6101 || TREE_CODE (t) == VIEW_CONVERT_EXPR
6102 || CONVERT_EXPR_P (t))
6103 t = TREE_OPERAND (t, 0);
6105 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6107 t = get_base_address (t);
6108 if (t && DECL_P (t)
6109 && DECL_MODE (t) != BLKmode)
6110 TREE_ADDRESSABLE (t) = 1;
6113 *walk_subtrees = 0;
6115 /* References of size POLY_INT_CST to a fixed-size object must go
6116 through memory. It's more efficient to force that here than
6117 to create temporary slots on the fly. */
6118 else if ((TREE_CODE (t) == MEM_REF || TREE_CODE (t) == TARGET_MEM_REF)
6119 && TYPE_SIZE (TREE_TYPE (t))
6120 && POLY_INT_CST_P (TYPE_SIZE (TREE_TYPE (t))))
6122 tree base = get_base_address (t);
6123 if (base
6124 && DECL_P (base)
6125 && DECL_MODE (base) != BLKmode
6126 && GET_MODE_SIZE (DECL_MODE (base)).is_constant ())
6127 TREE_ADDRESSABLE (base) = 1;
6128 *walk_subtrees = 0;
6131 return NULL_TREE;
6134 /* If there's a chance to get a pseudo for t then if it would be of float mode
6135 and the actual access is via an integer mode (lowered memcpy or similar
6136 access) then avoid the register expansion if the mode likely is not storage
6137 suitable for raw bits processing (like XFmode on i?86). */
6139 static void
6140 avoid_type_punning_on_regs (tree t)
6142 machine_mode access_mode = TYPE_MODE (TREE_TYPE (t));
6143 if (access_mode != BLKmode
6144 && !SCALAR_INT_MODE_P (access_mode))
6145 return;
6146 tree base = get_base_address (t);
6147 if (DECL_P (base)
6148 && !TREE_ADDRESSABLE (base)
6149 && FLOAT_MODE_P (DECL_MODE (base))
6150 && maybe_lt (GET_MODE_PRECISION (DECL_MODE (base)),
6151 GET_MODE_BITSIZE (GET_MODE_INNER (DECL_MODE (base))))
6152 /* Double check in the expensive way we really would get a pseudo. */
6153 && use_register_for_decl (base))
6154 TREE_ADDRESSABLE (base) = 1;
6157 /* RTL expansion is not able to compile array references with variable
6158 offsets for arrays stored in single register. Discover such
6159 expressions and mark variables as addressable to avoid this
6160 scenario. */
6162 static void
6163 discover_nonconstant_array_refs (void)
6165 basic_block bb;
6166 gimple_stmt_iterator gsi;
6168 FOR_EACH_BB_FN (bb, cfun)
6169 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6171 gimple *stmt = gsi_stmt (gsi);
6172 if (!is_gimple_debug (stmt))
6174 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6175 gcall *call = dyn_cast <gcall *> (stmt);
6176 if (call && gimple_call_internal_p (call))
6177 switch (gimple_call_internal_fn (call))
6179 case IFN_LOAD_LANES:
6180 /* The source must be a MEM. */
6181 mark_addressable (gimple_call_arg (call, 0));
6182 break;
6183 case IFN_STORE_LANES:
6184 /* The destination must be a MEM. */
6185 mark_addressable (gimple_call_lhs (call));
6186 break;
6187 default:
6188 break;
6190 if (gimple_vdef (stmt))
6192 tree t = gimple_get_lhs (stmt);
6193 if (t && REFERENCE_CLASS_P (t))
6194 avoid_type_punning_on_regs (t);
6200 /* This function sets crtl->args.internal_arg_pointer to a virtual
6201 register if DRAP is needed. Local register allocator will replace
6202 virtual_incoming_args_rtx with the virtual register. */
6204 static void
6205 expand_stack_alignment (void)
6207 rtx drap_rtx;
6208 unsigned int preferred_stack_boundary;
6210 if (! SUPPORTS_STACK_ALIGNMENT)
6211 return;
6213 if (cfun->calls_alloca
6214 || cfun->has_nonlocal_label
6215 || crtl->has_nonlocal_goto)
6216 crtl->need_drap = true;
6218 /* Call update_stack_boundary here again to update incoming stack
6219 boundary. It may set incoming stack alignment to a different
6220 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6221 use the minimum incoming stack alignment to check if it is OK
6222 to perform sibcall optimization since sibcall optimization will
6223 only align the outgoing stack to incoming stack boundary. */
6224 if (targetm.calls.update_stack_boundary)
6225 targetm.calls.update_stack_boundary ();
6227 /* The incoming stack frame has to be aligned at least at
6228 parm_stack_boundary. */
6229 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6231 /* Update crtl->stack_alignment_estimated and use it later to align
6232 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6233 exceptions since callgraph doesn't collect incoming stack alignment
6234 in this case. */
6235 if (cfun->can_throw_non_call_exceptions
6236 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6237 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6238 else
6239 preferred_stack_boundary = crtl->preferred_stack_boundary;
6240 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6241 crtl->stack_alignment_estimated = preferred_stack_boundary;
6242 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6243 crtl->stack_alignment_needed = preferred_stack_boundary;
6245 gcc_assert (crtl->stack_alignment_needed
6246 <= crtl->stack_alignment_estimated);
6248 crtl->stack_realign_needed
6249 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6250 crtl->stack_realign_tried = crtl->stack_realign_needed;
6252 crtl->stack_realign_processed = true;
6254 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6255 alignment. */
6256 gcc_assert (targetm.calls.get_drap_rtx != NULL);
6257 drap_rtx = targetm.calls.get_drap_rtx ();
6259 /* stack_realign_drap and drap_rtx must match. */
6260 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6262 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6263 if (drap_rtx != NULL)
6265 crtl->args.internal_arg_pointer = drap_rtx;
6267 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6268 needed. */
6269 fixup_tail_calls ();
6274 static void
6275 expand_main_function (void)
6277 #if (defined(INVOKE__main) \
6278 || (!defined(HAS_INIT_SECTION) \
6279 && !defined(INIT_SECTION_ASM_OP) \
6280 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6281 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6282 #endif
6286 /* Expand code to initialize the stack_protect_guard. This is invoked at
6287 the beginning of a function to be protected. */
6289 static void
6290 stack_protect_prologue (void)
6292 tree guard_decl = targetm.stack_protect_guard ();
6293 rtx x, y;
6295 crtl->stack_protect_guard_decl = guard_decl;
6296 x = expand_normal (crtl->stack_protect_guard);
6298 if (targetm.have_stack_protect_combined_set () && guard_decl)
6300 gcc_assert (DECL_P (guard_decl));
6301 y = DECL_RTL (guard_decl);
6303 /* Allow the target to compute address of Y and copy it to X without
6304 leaking Y into a register. This combined address + copy pattern
6305 allows the target to prevent spilling of any intermediate results by
6306 splitting it after register allocator. */
6307 if (rtx_insn *insn = targetm.gen_stack_protect_combined_set (x, y))
6309 emit_insn (insn);
6310 return;
6314 if (guard_decl)
6315 y = expand_normal (guard_decl);
6316 else
6317 y = const0_rtx;
6319 /* Allow the target to copy from Y to X without leaking Y into a
6320 register. */
6321 if (targetm.have_stack_protect_set ())
6322 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6324 emit_insn (insn);
6325 return;
6328 /* Otherwise do a straight move. */
6329 emit_move_insn (x, y);
6332 /* Translate the intermediate representation contained in the CFG
6333 from GIMPLE trees to RTL.
6335 We do conversion per basic block and preserve/update the tree CFG.
6336 This implies we have to do some magic as the CFG can simultaneously
6337 consist of basic blocks containing RTL and GIMPLE trees. This can
6338 confuse the CFG hooks, so be careful to not manipulate CFG during
6339 the expansion. */
6341 namespace {
6343 const pass_data pass_data_expand =
6345 RTL_PASS, /* type */
6346 "expand", /* name */
6347 OPTGROUP_NONE, /* optinfo_flags */
6348 TV_EXPAND, /* tv_id */
6349 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6350 | PROP_gimple_lcx
6351 | PROP_gimple_lvec
6352 | PROP_gimple_lva), /* properties_required */
6353 PROP_rtl, /* properties_provided */
6354 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6355 0, /* todo_flags_start */
6356 0, /* todo_flags_finish */
6359 class pass_expand : public rtl_opt_pass
6361 public:
6362 pass_expand (gcc::context *ctxt)
6363 : rtl_opt_pass (pass_data_expand, ctxt)
6366 /* opt_pass methods: */
6367 virtual unsigned int execute (function *);
6369 }; // class pass_expand
6371 unsigned int
6372 pass_expand::execute (function *fun)
6374 basic_block bb, init_block;
6375 edge_iterator ei;
6376 edge e;
6377 rtx_insn *var_seq, *var_ret_seq;
6378 unsigned i;
6380 timevar_push (TV_OUT_OF_SSA);
6381 rewrite_out_of_ssa (&SA);
6382 timevar_pop (TV_OUT_OF_SSA);
6383 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6385 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6387 gimple_stmt_iterator gsi;
6388 FOR_EACH_BB_FN (bb, cfun)
6389 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6390 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6391 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6394 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6395 discover_nonconstant_array_refs ();
6397 /* Make sure all values used by the optimization passes have sane
6398 defaults. */
6399 reg_renumber = 0;
6401 /* Some backends want to know that we are expanding to RTL. */
6402 currently_expanding_to_rtl = 1;
6403 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6404 free_dominance_info (CDI_DOMINATORS);
6406 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6408 insn_locations_init ();
6409 if (!DECL_IS_BUILTIN (current_function_decl))
6411 /* Eventually, all FEs should explicitly set function_start_locus. */
6412 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6413 set_curr_insn_location
6414 (DECL_SOURCE_LOCATION (current_function_decl));
6415 else
6416 set_curr_insn_location (fun->function_start_locus);
6418 else
6419 set_curr_insn_location (UNKNOWN_LOCATION);
6420 prologue_location = curr_insn_location ();
6422 #ifdef INSN_SCHEDULING
6423 init_sched_attrs ();
6424 #endif
6426 /* Make sure first insn is a note even if we don't want linenums.
6427 This makes sure the first insn will never be deleted.
6428 Also, final expects a note to appear there. */
6429 emit_note (NOTE_INSN_DELETED);
6431 targetm.expand_to_rtl_hook ();
6432 crtl->init_stack_alignment ();
6433 fun->cfg->max_jumptable_ents = 0;
6435 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6436 of the function section at exapnsion time to predict distance of calls. */
6437 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6439 /* Expand the variables recorded during gimple lowering. */
6440 timevar_push (TV_VAR_EXPAND);
6441 start_sequence ();
6443 var_ret_seq = expand_used_vars ();
6445 var_seq = get_insns ();
6446 end_sequence ();
6447 timevar_pop (TV_VAR_EXPAND);
6449 /* Honor stack protection warnings. */
6450 if (warn_stack_protect)
6452 if (fun->calls_alloca)
6453 warning (OPT_Wstack_protector,
6454 "stack protector not protecting local variables: "
6455 "variable length buffer");
6456 if (has_short_buffer && !crtl->stack_protect_guard)
6457 warning (OPT_Wstack_protector,
6458 "stack protector not protecting function: "
6459 "all local arrays are less than %d bytes long",
6460 (int) param_ssp_buffer_size);
6463 /* Set up parameters and prepare for return, for the function. */
6464 expand_function_start (current_function_decl);
6466 /* If we emitted any instructions for setting up the variables,
6467 emit them before the FUNCTION_START note. */
6468 if (var_seq)
6470 emit_insn_before (var_seq, parm_birth_insn);
6472 /* In expand_function_end we'll insert the alloca save/restore
6473 before parm_birth_insn. We've just insertted an alloca call.
6474 Adjust the pointer to match. */
6475 parm_birth_insn = var_seq;
6478 /* Now propagate the RTL assignment of each partition to the
6479 underlying var of each SSA_NAME. */
6480 tree name;
6482 FOR_EACH_SSA_NAME (i, name, cfun)
6484 /* We might have generated new SSA names in
6485 update_alias_info_with_stack_vars. They will have a NULL
6486 defining statements, and won't be part of the partitioning,
6487 so ignore those. */
6488 if (!SSA_NAME_DEF_STMT (name))
6489 continue;
6491 adjust_one_expanded_partition_var (name);
6494 /* Clean up RTL of variables that straddle across multiple
6495 partitions, and check that the rtl of any PARM_DECLs that are not
6496 cleaned up is that of their default defs. */
6497 FOR_EACH_SSA_NAME (i, name, cfun)
6499 int part;
6501 /* We might have generated new SSA names in
6502 update_alias_info_with_stack_vars. They will have a NULL
6503 defining statements, and won't be part of the partitioning,
6504 so ignore those. */
6505 if (!SSA_NAME_DEF_STMT (name))
6506 continue;
6507 part = var_to_partition (SA.map, name);
6508 if (part == NO_PARTITION)
6509 continue;
6511 /* If this decl was marked as living in multiple places, reset
6512 this now to NULL. */
6513 tree var = SSA_NAME_VAR (name);
6514 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6515 SET_DECL_RTL (var, NULL);
6516 /* Check that the pseudos chosen by assign_parms are those of
6517 the corresponding default defs. */
6518 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6519 && (TREE_CODE (var) == PARM_DECL
6520 || TREE_CODE (var) == RESULT_DECL))
6522 rtx in = DECL_RTL_IF_SET (var);
6523 gcc_assert (in);
6524 rtx out = SA.partition_to_pseudo[part];
6525 gcc_assert (in == out);
6527 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6528 those expected by debug backends for each parm and for
6529 the result. This is particularly important for stabs,
6530 whose register elimination from parm's DECL_RTL may cause
6531 -fcompare-debug differences as SET_DECL_RTL changes reg's
6532 attrs. So, make sure the RTL already has the parm as the
6533 EXPR, so that it won't change. */
6534 SET_DECL_RTL (var, NULL_RTX);
6535 if (MEM_P (in))
6536 set_mem_attributes (in, var, true);
6537 SET_DECL_RTL (var, in);
6541 /* If this function is `main', emit a call to `__main'
6542 to run global initializers, etc. */
6543 if (DECL_NAME (current_function_decl)
6544 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6545 && DECL_FILE_SCOPE_P (current_function_decl))
6546 expand_main_function ();
6548 /* Initialize the stack_protect_guard field. This must happen after the
6549 call to __main (if any) so that the external decl is initialized. */
6550 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6551 stack_protect_prologue ();
6553 expand_phi_nodes (&SA);
6555 /* Release any stale SSA redirection data. */
6556 redirect_edge_var_map_empty ();
6558 /* Register rtl specific functions for cfg. */
6559 rtl_register_cfg_hooks ();
6561 init_block = construct_init_block ();
6563 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6564 remaining edges later. */
6565 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6566 e->flags &= ~EDGE_EXECUTABLE;
6568 /* If the function has too many markers, drop them while expanding. */
6569 if (cfun->debug_marker_count
6570 >= param_max_debug_marker_count)
6571 cfun->debug_nonbind_markers = false;
6573 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6574 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6575 next_bb)
6576 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6578 if (MAY_HAVE_DEBUG_BIND_INSNS)
6579 expand_debug_locations ();
6581 if (deep_ter_debug_map)
6583 delete deep_ter_debug_map;
6584 deep_ter_debug_map = NULL;
6587 /* Free stuff we no longer need after GIMPLE optimizations. */
6588 free_dominance_info (CDI_DOMINATORS);
6589 free_dominance_info (CDI_POST_DOMINATORS);
6590 delete_tree_cfg_annotations (fun);
6592 timevar_push (TV_OUT_OF_SSA);
6593 finish_out_of_ssa (&SA);
6594 timevar_pop (TV_OUT_OF_SSA);
6596 timevar_push (TV_POST_EXPAND);
6597 /* We are no longer in SSA form. */
6598 fun->gimple_df->in_ssa_p = false;
6599 loops_state_clear (LOOP_CLOSED_SSA);
6601 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6602 conservatively to true until they are all profile aware. */
6603 delete lab_rtx_for_bb;
6604 free_histograms (fun);
6606 construct_exit_block ();
6607 insn_locations_finalize ();
6609 if (var_ret_seq)
6611 rtx_insn *after = return_label;
6612 rtx_insn *next = NEXT_INSN (after);
6613 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6614 after = next;
6615 emit_insn_after (var_ret_seq, after);
6618 /* Zap the tree EH table. */
6619 set_eh_throw_stmt_table (fun, NULL);
6621 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6622 split edges which edge insertions might do. */
6623 rebuild_jump_labels (get_insns ());
6625 /* If we have a single successor to the entry block, put the pending insns
6626 after parm birth, but before NOTE_INSNS_FUNCTION_BEG. */
6627 if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6629 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
6630 if (e->insns.r)
6632 rtx_insn *insns = e->insns.r;
6633 e->insns.r = NULL;
6634 rebuild_jump_labels_chain (insns);
6635 if (NOTE_P (parm_birth_insn)
6636 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6637 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6638 else
6639 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6643 /* Otherwise, as well as for other edges, take the usual way. */
6644 commit_edge_insertions ();
6646 /* We're done expanding trees to RTL. */
6647 currently_expanding_to_rtl = 0;
6649 flush_mark_addressable_queue ();
6651 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6652 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6654 edge e;
6655 edge_iterator ei;
6656 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6658 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6659 e->flags &= ~EDGE_EXECUTABLE;
6661 /* At the moment not all abnormal edges match the RTL
6662 representation. It is safe to remove them here as
6663 find_many_sub_basic_blocks will rediscover them.
6664 In the future we should get this fixed properly. */
6665 if ((e->flags & EDGE_ABNORMAL)
6666 && !(e->flags & EDGE_SIBCALL))
6667 remove_edge (e);
6668 else
6669 ei_next (&ei);
6673 auto_sbitmap blocks (last_basic_block_for_fn (fun));
6674 bitmap_ones (blocks);
6675 find_many_sub_basic_blocks (blocks);
6676 purge_all_dead_edges ();
6678 /* After initial rtl generation, call back to finish generating
6679 exception support code. We need to do this before cleaning up
6680 the CFG as the code does not expect dead landing pads. */
6681 if (fun->eh->region_tree != NULL)
6682 finish_eh_generation ();
6684 /* Call expand_stack_alignment after finishing all
6685 updates to crtl->preferred_stack_boundary. */
6686 expand_stack_alignment ();
6688 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6689 function. */
6690 if (crtl->tail_call_emit)
6691 fixup_tail_calls ();
6693 unsigned HOST_WIDE_INT patch_area_size = function_entry_patch_area_size;
6694 unsigned HOST_WIDE_INT patch_area_entry = function_entry_patch_area_start;
6696 tree patchable_function_entry_attr
6697 = lookup_attribute ("patchable_function_entry",
6698 DECL_ATTRIBUTES (cfun->decl));
6699 if (patchable_function_entry_attr)
6701 tree pp_val = TREE_VALUE (patchable_function_entry_attr);
6702 tree patchable_function_entry_value1 = TREE_VALUE (pp_val);
6704 patch_area_size = tree_to_uhwi (patchable_function_entry_value1);
6705 patch_area_entry = 0;
6706 if (TREE_CHAIN (pp_val) != NULL_TREE)
6708 tree patchable_function_entry_value2
6709 = TREE_VALUE (TREE_CHAIN (pp_val));
6710 patch_area_entry = tree_to_uhwi (patchable_function_entry_value2);
6714 if (patch_area_entry > patch_area_size)
6716 if (patch_area_size > 0)
6717 warning (OPT_Wattributes,
6718 "patchable function entry %wu exceeds size %wu",
6719 patch_area_entry, patch_area_size);
6720 patch_area_entry = 0;
6723 crtl->patch_area_size = patch_area_size;
6724 crtl->patch_area_entry = patch_area_entry;
6726 /* BB subdivision may have created basic blocks that are only reachable
6727 from unlikely bbs but not marked as such in the profile. */
6728 if (optimize)
6729 propagate_unlikely_bbs_forward ();
6731 /* Remove unreachable blocks, otherwise we cannot compute dominators
6732 which are needed for loop state verification. As a side-effect
6733 this also compacts blocks.
6734 ??? We cannot remove trivially dead insns here as for example
6735 the DRAP reg on i?86 is not magically live at this point.
6736 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6737 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6739 checking_verify_flow_info ();
6741 /* Initialize pseudos allocated for hard registers. */
6742 emit_initial_value_sets ();
6744 /* And finally unshare all RTL. */
6745 unshare_all_rtl ();
6747 /* There's no need to defer outputting this function any more; we
6748 know we want to output it. */
6749 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6751 /* Now that we're done expanding trees to RTL, we shouldn't have any
6752 more CONCATs anywhere. */
6753 generating_concat_p = 0;
6755 if (dump_file)
6757 fprintf (dump_file,
6758 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6759 /* And the pass manager will dump RTL for us. */
6762 /* If we're emitting a nested function, make sure its parent gets
6763 emitted as well. Doing otherwise confuses debug info. */
6765 tree parent;
6766 for (parent = DECL_CONTEXT (current_function_decl);
6767 parent != NULL_TREE;
6768 parent = get_containing_scope (parent))
6769 if (TREE_CODE (parent) == FUNCTION_DECL)
6770 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6773 TREE_ASM_WRITTEN (current_function_decl) = 1;
6775 /* After expanding, the return labels are no longer needed. */
6776 return_label = NULL;
6777 naked_return_label = NULL;
6779 /* After expanding, the tm_restart map is no longer needed. */
6780 if (fun->gimple_df->tm_restart)
6781 fun->gimple_df->tm_restart = NULL;
6783 /* Tag the blocks with a depth number so that change_scope can find
6784 the common parent easily. */
6785 set_block_levels (DECL_INITIAL (fun->decl), 0);
6786 default_rtl_profile ();
6788 /* For -dx discard loops now, otherwise IL verify in clean_state will
6789 ICE. */
6790 if (rtl_dump_and_exit)
6792 cfun->curr_properties &= ~PROP_loops;
6793 loop_optimizer_finalize ();
6796 timevar_pop (TV_POST_EXPAND);
6798 return 0;
6801 } // anon namespace
6803 rtl_opt_pass *
6804 make_pass_expand (gcc::context *ctxt)
6806 return new pass_expand (ctxt);