Make vect_model_store_cost take a vec_load_store_type
[official-gcc.git] / gcc / cfgexpand.c
blobf278eb2660642048eb6336fed9fa2b21a453ed59
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber. */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
77 #include "tree-chkp.h"
78 #include "rtl-chkp.h"
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
88 /* This variable holds information helping the rewriting of SSA trees
89 into RTL. */
90 struct ssaexpand SA;
92 /* This variable holds the currently expanded gimple statement for purposes
93 of comminucating the profile info to the builtin expanders. */
94 gimple *currently_expanding_gimple_stmt;
96 static rtx expand_debug_expr (tree);
98 static bool defer_stack_allocation (tree, bool);
100 static void record_alignment_for_reg_var (unsigned int);
102 /* Return an expression tree corresponding to the RHS of GIMPLE
103 statement STMT. */
105 tree
106 gimple_assign_rhs_to_tree (gimple *stmt)
108 tree t;
109 enum gimple_rhs_class grhs_class;
111 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
113 if (grhs_class == GIMPLE_TERNARY_RHS)
114 t = build3 (gimple_assign_rhs_code (stmt),
115 TREE_TYPE (gimple_assign_lhs (stmt)),
116 gimple_assign_rhs1 (stmt),
117 gimple_assign_rhs2 (stmt),
118 gimple_assign_rhs3 (stmt));
119 else if (grhs_class == GIMPLE_BINARY_RHS)
120 t = build2 (gimple_assign_rhs_code (stmt),
121 TREE_TYPE (gimple_assign_lhs (stmt)),
122 gimple_assign_rhs1 (stmt),
123 gimple_assign_rhs2 (stmt));
124 else if (grhs_class == GIMPLE_UNARY_RHS)
125 t = build1 (gimple_assign_rhs_code (stmt),
126 TREE_TYPE (gimple_assign_lhs (stmt)),
127 gimple_assign_rhs1 (stmt));
128 else if (grhs_class == GIMPLE_SINGLE_RHS)
130 t = gimple_assign_rhs1 (stmt);
131 /* Avoid modifying this tree in place below. */
132 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
133 && gimple_location (stmt) != EXPR_LOCATION (t))
134 || (gimple_block (stmt)
135 && currently_expanding_to_rtl
136 && EXPR_P (t)))
137 t = copy_node (t);
139 else
140 gcc_unreachable ();
142 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
143 SET_EXPR_LOCATION (t, gimple_location (stmt));
145 return t;
149 #ifndef STACK_ALIGNMENT_NEEDED
150 #define STACK_ALIGNMENT_NEEDED 1
151 #endif
153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
155 /* Choose either CUR or NEXT as the leader DECL for a partition.
156 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157 out of the same user variable being in multiple partitions (this is
158 less likely for compiler-introduced temps). */
160 static tree
161 leader_merge (tree cur, tree next)
163 if (cur == NULL || cur == next)
164 return next;
166 if (DECL_P (cur) && DECL_IGNORED_P (cur))
167 return cur;
169 if (DECL_P (next) && DECL_IGNORED_P (next))
170 return next;
172 return cur;
175 /* Associate declaration T with storage space X. If T is no
176 SSA name this is exactly SET_DECL_RTL, otherwise make the
177 partition of T associated with X. */
178 static inline void
179 set_rtl (tree t, rtx x)
181 gcc_checking_assert (!x
182 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
183 || (use_register_for_decl (t)
184 ? (REG_P (x)
185 || (GET_CODE (x) == CONCAT
186 && (REG_P (XEXP (x, 0))
187 || SUBREG_P (XEXP (x, 0)))
188 && (REG_P (XEXP (x, 1))
189 || SUBREG_P (XEXP (x, 1))))
190 /* We need to accept PARALLELs for RESUT_DECLs
191 because of vector types with BLKmode returned
192 in multiple registers, but they are supposed
193 to be uncoalesced. */
194 || (GET_CODE (x) == PARALLEL
195 && SSAVAR (t)
196 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
197 && (GET_MODE (x) == BLKmode
198 || !flag_tree_coalesce_vars)))
199 : (MEM_P (x) || x == pc_rtx
200 || (GET_CODE (x) == CONCAT
201 && MEM_P (XEXP (x, 0))
202 && MEM_P (XEXP (x, 1))))));
203 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204 RESULT_DECLs has the expected mode. For memory, we accept
205 unpromoted modes, since that's what we're likely to get. For
206 PARM_DECLs and RESULT_DECLs, we'll have been called by
207 set_parm_rtl, which will give us the default def, so we don't
208 have to compute it ourselves. For RESULT_DECLs, we accept mode
209 mismatches too, as long as we have BLKmode or are not coalescing
210 across variables, so that we don't reject BLKmode PARALLELs or
211 unpromoted REGs. */
212 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
213 || (SSAVAR (t)
214 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
215 && (promote_ssa_mode (t, NULL) == BLKmode
216 || !flag_tree_coalesce_vars))
217 || !use_register_for_decl (t)
218 || GET_MODE (x) == promote_ssa_mode (t, NULL));
220 if (x)
222 bool skip = false;
223 tree cur = NULL_TREE;
224 rtx xm = x;
226 retry:
227 if (MEM_P (xm))
228 cur = MEM_EXPR (xm);
229 else if (REG_P (xm))
230 cur = REG_EXPR (xm);
231 else if (SUBREG_P (xm))
233 gcc_assert (subreg_lowpart_p (xm));
234 xm = SUBREG_REG (xm);
235 goto retry;
237 else if (GET_CODE (xm) == CONCAT)
239 xm = XEXP (xm, 0);
240 goto retry;
242 else if (GET_CODE (xm) == PARALLEL)
244 xm = XVECEXP (xm, 0, 0);
245 gcc_assert (GET_CODE (xm) == EXPR_LIST);
246 xm = XEXP (xm, 0);
247 goto retry;
249 else if (xm == pc_rtx)
250 skip = true;
251 else
252 gcc_unreachable ();
254 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
256 if (cur != next)
258 if (MEM_P (x))
259 set_mem_attributes (x,
260 next && TREE_CODE (next) == SSA_NAME
261 ? TREE_TYPE (next)
262 : next, true);
263 else
264 set_reg_attrs_for_decl_rtl (next, x);
268 if (TREE_CODE (t) == SSA_NAME)
270 int part = var_to_partition (SA.map, t);
271 if (part != NO_PARTITION)
273 if (SA.partition_to_pseudo[part])
274 gcc_assert (SA.partition_to_pseudo[part] == x);
275 else if (x != pc_rtx)
276 SA.partition_to_pseudo[part] = x;
278 /* For the benefit of debug information at -O0 (where
279 vartracking doesn't run) record the place also in the base
280 DECL. For PARMs and RESULTs, do so only when setting the
281 default def. */
282 if (x && x != pc_rtx && SSA_NAME_VAR (t)
283 && (VAR_P (SSA_NAME_VAR (t))
284 || SSA_NAME_IS_DEFAULT_DEF (t)))
286 tree var = SSA_NAME_VAR (t);
287 /* If we don't yet have something recorded, just record it now. */
288 if (!DECL_RTL_SET_P (var))
289 SET_DECL_RTL (var, x);
290 /* If we have it set already to "multiple places" don't
291 change this. */
292 else if (DECL_RTL (var) == pc_rtx)
294 /* If we have something recorded and it's not the same place
295 as we want to record now, we have multiple partitions for the
296 same base variable, with different places. We can't just
297 randomly chose one, hence we have to say that we don't know.
298 This only happens with optimization, and there var-tracking
299 will figure out the right thing. */
300 else if (DECL_RTL (var) != x)
301 SET_DECL_RTL (var, pc_rtx);
304 else
305 SET_DECL_RTL (t, x);
308 /* This structure holds data relevant to one variable that will be
309 placed in a stack slot. */
310 struct stack_var
312 /* The Variable. */
313 tree decl;
315 /* Initially, the size of the variable. Later, the size of the partition,
316 if this variable becomes it's partition's representative. */
317 poly_uint64 size;
319 /* The *byte* alignment required for this variable. Or as, with the
320 size, the alignment for this partition. */
321 unsigned int alignb;
323 /* The partition representative. */
324 size_t representative;
326 /* The next stack variable in the partition, or EOC. */
327 size_t next;
329 /* The numbers of conflicting stack variables. */
330 bitmap conflicts;
333 #define EOC ((size_t)-1)
335 /* We have an array of such objects while deciding allocation. */
336 static struct stack_var *stack_vars;
337 static size_t stack_vars_alloc;
338 static size_t stack_vars_num;
339 static hash_map<tree, size_t> *decl_to_stack_part;
341 /* Conflict bitmaps go on this obstack. This allows us to destroy
342 all of them in one big sweep. */
343 static bitmap_obstack stack_var_bitmap_obstack;
345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
346 is non-decreasing. */
347 static size_t *stack_vars_sorted;
349 /* The phase of the stack frame. This is the known misalignment of
350 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
351 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
352 static int frame_phase;
354 /* Used during expand_used_vars to remember if we saw any decls for
355 which we'd like to enable stack smashing protection. */
356 static bool has_protected_decls;
358 /* Used during expand_used_vars. Remember if we say a character buffer
359 smaller than our cutoff threshold. Used for -Wstack-protector. */
360 static bool has_short_buffer;
362 /* Compute the byte alignment to use for DECL. Ignore alignment
363 we can't do with expected alignment of the stack boundary. */
365 static unsigned int
366 align_local_variable (tree decl)
368 unsigned int align;
370 if (TREE_CODE (decl) == SSA_NAME)
371 align = TYPE_ALIGN (TREE_TYPE (decl));
372 else
374 align = LOCAL_DECL_ALIGNMENT (decl);
375 SET_DECL_ALIGN (decl, align);
377 return align / BITS_PER_UNIT;
380 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
381 down otherwise. Return truncated BASE value. */
383 static inline unsigned HOST_WIDE_INT
384 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
386 return align_up ? (base + align - 1) & -align : base & -align;
389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390 Return the frame offset. */
392 static poly_int64
393 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
395 poly_int64 offset, new_frame_offset;
397 if (FRAME_GROWS_DOWNWARD)
399 new_frame_offset
400 = aligned_lower_bound (frame_offset - frame_phase - size,
401 align) + frame_phase;
402 offset = new_frame_offset;
404 else
406 new_frame_offset
407 = aligned_upper_bound (frame_offset - frame_phase,
408 align) + frame_phase;
409 offset = new_frame_offset;
410 new_frame_offset += size;
412 frame_offset = new_frame_offset;
414 if (frame_offset_overflow (frame_offset, cfun->decl))
415 frame_offset = offset = 0;
417 return offset;
420 /* Accumulate DECL into STACK_VARS. */
422 static void
423 add_stack_var (tree decl)
425 struct stack_var *v;
427 if (stack_vars_num >= stack_vars_alloc)
429 if (stack_vars_alloc)
430 stack_vars_alloc = stack_vars_alloc * 3 / 2;
431 else
432 stack_vars_alloc = 32;
433 stack_vars
434 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
436 if (!decl_to_stack_part)
437 decl_to_stack_part = new hash_map<tree, size_t>;
439 v = &stack_vars[stack_vars_num];
440 decl_to_stack_part->put (decl, stack_vars_num);
442 v->decl = decl;
443 tree size = TREE_CODE (decl) == SSA_NAME
444 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
445 : DECL_SIZE_UNIT (decl);
446 v->size = tree_to_poly_uint64 (size);
447 /* Ensure that all variables have size, so that &a != &b for any two
448 variables that are simultaneously live. */
449 if (known_eq (v->size, 0U))
450 v->size = 1;
451 v->alignb = align_local_variable (decl);
452 /* An alignment of zero can mightily confuse us later. */
453 gcc_assert (v->alignb != 0);
455 /* All variables are initially in their own partition. */
456 v->representative = stack_vars_num;
457 v->next = EOC;
459 /* All variables initially conflict with no other. */
460 v->conflicts = NULL;
462 /* Ensure that this decl doesn't get put onto the list twice. */
463 set_rtl (decl, pc_rtx);
465 stack_vars_num++;
468 /* Make the decls associated with luid's X and Y conflict. */
470 static void
471 add_stack_var_conflict (size_t x, size_t y)
473 struct stack_var *a = &stack_vars[x];
474 struct stack_var *b = &stack_vars[y];
475 if (!a->conflicts)
476 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
477 if (!b->conflicts)
478 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
479 bitmap_set_bit (a->conflicts, y);
480 bitmap_set_bit (b->conflicts, x);
483 /* Check whether the decls associated with luid's X and Y conflict. */
485 static bool
486 stack_var_conflict_p (size_t x, size_t y)
488 struct stack_var *a = &stack_vars[x];
489 struct stack_var *b = &stack_vars[y];
490 if (x == y)
491 return false;
492 /* Partitions containing an SSA name result from gimple registers
493 with things like unsupported modes. They are top-level and
494 hence conflict with everything else. */
495 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
496 return true;
498 if (!a->conflicts || !b->conflicts)
499 return false;
500 return bitmap_bit_p (a->conflicts, y);
503 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
504 enter its partition number into bitmap DATA. */
506 static bool
507 visit_op (gimple *, tree op, tree, void *data)
509 bitmap active = (bitmap)data;
510 op = get_base_address (op);
511 if (op
512 && DECL_P (op)
513 && DECL_RTL_IF_SET (op) == pc_rtx)
515 size_t *v = decl_to_stack_part->get (op);
516 if (v)
517 bitmap_set_bit (active, *v);
519 return false;
522 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
523 record conflicts between it and all currently active other partitions
524 from bitmap DATA. */
526 static bool
527 visit_conflict (gimple *, tree op, tree, void *data)
529 bitmap active = (bitmap)data;
530 op = get_base_address (op);
531 if (op
532 && DECL_P (op)
533 && DECL_RTL_IF_SET (op) == pc_rtx)
535 size_t *v = decl_to_stack_part->get (op);
536 if (v && bitmap_set_bit (active, *v))
538 size_t num = *v;
539 bitmap_iterator bi;
540 unsigned i;
541 gcc_assert (num < stack_vars_num);
542 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
543 add_stack_var_conflict (num, i);
546 return false;
549 /* Helper routine for add_scope_conflicts, calculating the active partitions
550 at the end of BB, leaving the result in WORK. We're called to generate
551 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
552 liveness. */
554 static void
555 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
557 edge e;
558 edge_iterator ei;
559 gimple_stmt_iterator gsi;
560 walk_stmt_load_store_addr_fn visit;
562 bitmap_clear (work);
563 FOR_EACH_EDGE (e, ei, bb->preds)
564 bitmap_ior_into (work, (bitmap)e->src->aux);
566 visit = visit_op;
568 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
570 gimple *stmt = gsi_stmt (gsi);
571 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
573 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
575 gimple *stmt = gsi_stmt (gsi);
577 if (gimple_clobber_p (stmt))
579 tree lhs = gimple_assign_lhs (stmt);
580 size_t *v;
581 /* Nested function lowering might introduce LHSs
582 that are COMPONENT_REFs. */
583 if (!VAR_P (lhs))
584 continue;
585 if (DECL_RTL_IF_SET (lhs) == pc_rtx
586 && (v = decl_to_stack_part->get (lhs)))
587 bitmap_clear_bit (work, *v);
589 else if (!is_gimple_debug (stmt))
591 if (for_conflict
592 && visit == visit_op)
594 /* If this is the first real instruction in this BB we need
595 to add conflicts for everything live at this point now.
596 Unlike classical liveness for named objects we can't
597 rely on seeing a def/use of the names we're interested in.
598 There might merely be indirect loads/stores. We'd not add any
599 conflicts for such partitions. */
600 bitmap_iterator bi;
601 unsigned i;
602 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
604 struct stack_var *a = &stack_vars[i];
605 if (!a->conflicts)
606 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
607 bitmap_ior_into (a->conflicts, work);
609 visit = visit_conflict;
611 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
616 /* Generate stack partition conflicts between all partitions that are
617 simultaneously live. */
619 static void
620 add_scope_conflicts (void)
622 basic_block bb;
623 bool changed;
624 bitmap work = BITMAP_ALLOC (NULL);
625 int *rpo;
626 int n_bbs;
628 /* We approximate the live range of a stack variable by taking the first
629 mention of its name as starting point(s), and by the end-of-scope
630 death clobber added by gimplify as ending point(s) of the range.
631 This overapproximates in the case we for instance moved an address-taken
632 operation upward, without also moving a dereference to it upwards.
633 But it's conservatively correct as a variable never can hold values
634 before its name is mentioned at least once.
636 We then do a mostly classical bitmap liveness algorithm. */
638 FOR_ALL_BB_FN (bb, cfun)
639 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
641 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
642 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
644 changed = true;
645 while (changed)
647 int i;
648 changed = false;
649 for (i = 0; i < n_bbs; i++)
651 bitmap active;
652 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
653 active = (bitmap)bb->aux;
654 add_scope_conflicts_1 (bb, work, false);
655 if (bitmap_ior_into (active, work))
656 changed = true;
660 FOR_EACH_BB_FN (bb, cfun)
661 add_scope_conflicts_1 (bb, work, true);
663 free (rpo);
664 BITMAP_FREE (work);
665 FOR_ALL_BB_FN (bb, cfun)
666 BITMAP_FREE (bb->aux);
669 /* A subroutine of partition_stack_vars. A comparison function for qsort,
670 sorting an array of indices by the properties of the object. */
672 static int
673 stack_var_cmp (const void *a, const void *b)
675 size_t ia = *(const size_t *)a;
676 size_t ib = *(const size_t *)b;
677 unsigned int aligna = stack_vars[ia].alignb;
678 unsigned int alignb = stack_vars[ib].alignb;
679 poly_int64 sizea = stack_vars[ia].size;
680 poly_int64 sizeb = stack_vars[ib].size;
681 tree decla = stack_vars[ia].decl;
682 tree declb = stack_vars[ib].decl;
683 bool largea, largeb;
684 unsigned int uida, uidb;
686 /* Primary compare on "large" alignment. Large comes first. */
687 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
689 if (largea != largeb)
690 return (int)largeb - (int)largea;
692 /* Secondary compare on size, decreasing */
693 int diff = compare_sizes_for_sort (sizeb, sizea);
694 if (diff != 0)
695 return diff;
697 /* Tertiary compare on true alignment, decreasing. */
698 if (aligna < alignb)
699 return -1;
700 if (aligna > alignb)
701 return 1;
703 /* Final compare on ID for sort stability, increasing.
704 Two SSA names are compared by their version, SSA names come before
705 non-SSA names, and two normal decls are compared by their DECL_UID. */
706 if (TREE_CODE (decla) == SSA_NAME)
708 if (TREE_CODE (declb) == SSA_NAME)
709 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710 else
711 return -1;
713 else if (TREE_CODE (declb) == SSA_NAME)
714 return 1;
715 else
716 uida = DECL_UID (decla), uidb = DECL_UID (declb);
717 if (uida < uidb)
718 return 1;
719 if (uida > uidb)
720 return -1;
721 return 0;
724 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
725 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
727 /* If the points-to solution *PI points to variables that are in a partition
728 together with other variables add all partition members to the pointed-to
729 variables bitmap. */
731 static void
732 add_partitioned_vars_to_ptset (struct pt_solution *pt,
733 part_hashmap *decls_to_partitions,
734 hash_set<bitmap> *visited, bitmap temp)
736 bitmap_iterator bi;
737 unsigned i;
738 bitmap *part;
740 if (pt->anything
741 || pt->vars == NULL
742 /* The pointed-to vars bitmap is shared, it is enough to
743 visit it once. */
744 || visited->add (pt->vars))
745 return;
747 bitmap_clear (temp);
749 /* By using a temporary bitmap to store all members of the partitions
750 we have to add we make sure to visit each of the partitions only
751 once. */
752 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753 if ((!temp
754 || !bitmap_bit_p (temp, i))
755 && (part = decls_to_partitions->get (i)))
756 bitmap_ior_into (temp, *part);
757 if (!bitmap_empty_p (temp))
758 bitmap_ior_into (pt->vars, temp);
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762 The bitmaps representing stack partitions will be saved until expand,
763 where partitioned decls used as bases in memory expressions will be
764 rewritten. */
766 static void
767 update_alias_info_with_stack_vars (void)
769 part_hashmap *decls_to_partitions = NULL;
770 size_t i, j;
771 tree var = NULL_TREE;
773 for (i = 0; i < stack_vars_num; i++)
775 bitmap part = NULL;
776 tree name;
777 struct ptr_info_def *pi;
779 /* Not interested in partitions with single variable. */
780 if (stack_vars[i].representative != i
781 || stack_vars[i].next == EOC)
782 continue;
784 if (!decls_to_partitions)
786 decls_to_partitions = new part_hashmap;
787 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
790 /* Create an SSA_NAME that points to the partition for use
791 as base during alias-oracle queries on RTL for bases that
792 have been partitioned. */
793 if (var == NULL_TREE)
794 var = create_tmp_var (ptr_type_node);
795 name = make_ssa_name (var);
797 /* Create bitmaps representing partitions. They will be used for
798 points-to sets later, so use GGC alloc. */
799 part = BITMAP_GGC_ALLOC ();
800 for (j = i; j != EOC; j = stack_vars[j].next)
802 tree decl = stack_vars[j].decl;
803 unsigned int uid = DECL_PT_UID (decl);
804 bitmap_set_bit (part, uid);
805 decls_to_partitions->put (uid, part);
806 cfun->gimple_df->decls_to_pointers->put (decl, name);
807 if (TREE_ADDRESSABLE (decl))
808 TREE_ADDRESSABLE (name) = 1;
811 /* Make the SSA name point to all partition members. */
812 pi = get_ptr_info (name);
813 pt_solution_set (&pi->pt, part, false);
816 /* Make all points-to sets that contain one member of a partition
817 contain all members of the partition. */
818 if (decls_to_partitions)
820 unsigned i;
821 tree name;
822 hash_set<bitmap> visited;
823 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
825 FOR_EACH_SSA_NAME (i, name, cfun)
827 struct ptr_info_def *pi;
829 if (POINTER_TYPE_P (TREE_TYPE (name))
830 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
832 &visited, temp);
835 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
836 decls_to_partitions, &visited, temp);
838 delete decls_to_partitions;
839 BITMAP_FREE (temp);
843 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
844 partitioning algorithm. Partitions A and B are known to be non-conflicting.
845 Merge them into a single partition A. */
847 static void
848 union_stack_vars (size_t a, size_t b)
850 struct stack_var *vb = &stack_vars[b];
851 bitmap_iterator bi;
852 unsigned u;
854 gcc_assert (stack_vars[b].next == EOC);
855 /* Add B to A's partition. */
856 stack_vars[b].next = stack_vars[a].next;
857 stack_vars[b].representative = a;
858 stack_vars[a].next = b;
860 /* Update the required alignment of partition A to account for B. */
861 if (stack_vars[a].alignb < stack_vars[b].alignb)
862 stack_vars[a].alignb = stack_vars[b].alignb;
864 /* Update the interference graph and merge the conflicts. */
865 if (vb->conflicts)
867 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 add_stack_var_conflict (a, stack_vars[u].representative);
869 BITMAP_FREE (vb->conflicts);
873 /* A subroutine of expand_used_vars. Binpack the variables into
874 partitions constrained by the interference graph. The overall
875 algorithm used is as follows:
877 Sort the objects by size in descending order.
878 For each object A {
879 S = size(A)
880 O = 0
881 loop {
882 Look for the largest non-conflicting object B with size <= S.
883 UNION (A, B)
888 static void
889 partition_stack_vars (void)
891 size_t si, sj, n = stack_vars_num;
893 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894 for (si = 0; si < n; ++si)
895 stack_vars_sorted[si] = si;
897 if (n == 1)
898 return;
900 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
902 for (si = 0; si < n; ++si)
904 size_t i = stack_vars_sorted[si];
905 unsigned int ialign = stack_vars[i].alignb;
906 poly_int64 isize = stack_vars[i].size;
908 /* Ignore objects that aren't partition representatives. If we
909 see a var that is not a partition representative, it must
910 have been merged earlier. */
911 if (stack_vars[i].representative != i)
912 continue;
914 for (sj = si + 1; sj < n; ++sj)
916 size_t j = stack_vars_sorted[sj];
917 unsigned int jalign = stack_vars[j].alignb;
918 poly_int64 jsize = stack_vars[j].size;
920 /* Ignore objects that aren't partition representatives. */
921 if (stack_vars[j].representative != j)
922 continue;
924 /* Do not mix objects of "small" (supported) alignment
925 and "large" (unsupported) alignment. */
926 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
928 break;
930 /* For Address Sanitizer do not mix objects with different
931 sizes, as the shorter vars wouldn't be adequately protected.
932 Don't do that for "large" (unsupported) alignment objects,
933 those aren't protected anyway. */
934 if (asan_sanitize_stack_p ()
935 && maybe_ne (isize, jsize)
936 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 break;
939 /* Ignore conflicting objects. */
940 if (stack_var_conflict_p (i, j))
941 continue;
943 /* UNION the objects, placing J at OFFSET. */
944 union_stack_vars (i, j);
948 update_alias_info_with_stack_vars ();
951 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
953 static void
954 dump_stack_var_partition (void)
956 size_t si, i, j, n = stack_vars_num;
958 for (si = 0; si < n; ++si)
960 i = stack_vars_sorted[si];
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars[i].representative != i)
964 continue;
966 fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
967 print_dec (stack_vars[i].size, dump_file);
968 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
970 for (j = i; j != EOC; j = stack_vars[j].next)
972 fputc ('\t', dump_file);
973 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
975 fputc ('\n', dump_file);
979 /* Assign rtl to DECL at BASE + OFFSET. */
981 static void
982 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
983 poly_int64 offset)
985 unsigned align;
986 rtx x;
988 /* If this fails, we've overflowed the stack frame. Error nicely? */
989 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
991 x = plus_constant (Pmode, base, offset);
992 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 ? TYPE_MODE (TREE_TYPE (decl))
994 : DECL_MODE (SSAVAR (decl)), x);
996 if (TREE_CODE (decl) != SSA_NAME)
998 /* Set alignment we actually gave this decl if it isn't an SSA name.
999 If it is we generate stack slots only accidentally so it isn't as
1000 important, we'll simply use the alignment that is already set. */
1001 if (base == virtual_stack_vars_rtx)
1002 offset -= frame_phase;
1003 align = known_alignment (offset);
1004 align *= BITS_PER_UNIT;
1005 if (align == 0 || align > base_align)
1006 align = base_align;
1008 /* One would think that we could assert that we're not decreasing
1009 alignment here, but (at least) the i386 port does exactly this
1010 via the MINIMUM_ALIGNMENT hook. */
1012 SET_DECL_ALIGN (decl, align);
1013 DECL_USER_ALIGN (decl) = 0;
1016 set_rtl (decl, x);
1019 struct stack_vars_data
1021 /* Vector of offset pairs, always end of some padding followed
1022 by start of the padding that needs Address Sanitizer protection.
1023 The vector is in reversed, highest offset pairs come first. */
1024 auto_vec<HOST_WIDE_INT> asan_vec;
1026 /* Vector of partition representative decls in between the paddings. */
1027 auto_vec<tree> asan_decl_vec;
1029 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1030 rtx asan_base;
1032 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1033 unsigned int asan_alignb;
1036 /* A subroutine of expand_used_vars. Give each partition representative
1037 a unique location within the stack frame. Update each partition member
1038 with that location. */
1040 static void
1041 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1043 size_t si, i, j, n = stack_vars_num;
1044 poly_uint64 large_size = 0, large_alloc = 0;
1045 rtx large_base = NULL;
1046 unsigned large_align = 0;
1047 bool large_allocation_done = false;
1048 tree decl;
1050 /* Determine if there are any variables requiring "large" alignment.
1051 Since these are dynamically allocated, we only process these if
1052 no predicate involved. */
1053 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1056 /* Find the total size of these variables. */
1057 for (si = 0; si < n; ++si)
1059 unsigned alignb;
1061 i = stack_vars_sorted[si];
1062 alignb = stack_vars[i].alignb;
1064 /* All "large" alignment decls come before all "small" alignment
1065 decls, but "large" alignment decls are not sorted based on
1066 their alignment. Increase large_align to track the largest
1067 required alignment. */
1068 if ((alignb * BITS_PER_UNIT) > large_align)
1069 large_align = alignb * BITS_PER_UNIT;
1071 /* Stop when we get to the first decl with "small" alignment. */
1072 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 break;
1075 /* Skip variables that aren't partition representatives. */
1076 if (stack_vars[i].representative != i)
1077 continue;
1079 /* Skip variables that have already had rtl assigned. See also
1080 add_stack_var where we perpetrate this pc_rtx hack. */
1081 decl = stack_vars[i].decl;
1082 if (TREE_CODE (decl) == SSA_NAME
1083 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 : DECL_RTL (decl) != pc_rtx)
1085 continue;
1087 large_size = aligned_upper_bound (large_size, alignb);
1088 large_size += stack_vars[i].size;
1092 for (si = 0; si < n; ++si)
1094 rtx base;
1095 unsigned base_align, alignb;
1096 poly_int64 offset;
1098 i = stack_vars_sorted[si];
1100 /* Skip variables that aren't partition representatives, for now. */
1101 if (stack_vars[i].representative != i)
1102 continue;
1104 /* Skip variables that have already had rtl assigned. See also
1105 add_stack_var where we perpetrate this pc_rtx hack. */
1106 decl = stack_vars[i].decl;
1107 if (TREE_CODE (decl) == SSA_NAME
1108 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1109 : DECL_RTL (decl) != pc_rtx)
1110 continue;
1112 /* Check the predicate to see whether this variable should be
1113 allocated in this pass. */
1114 if (pred && !pred (i))
1115 continue;
1117 alignb = stack_vars[i].alignb;
1118 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1120 base = virtual_stack_vars_rtx;
1121 /* ASAN description strings don't yet have a syntax for expressing
1122 polynomial offsets. */
1123 HOST_WIDE_INT prev_offset;
1124 if (asan_sanitize_stack_p ()
1125 && pred
1126 && frame_offset.is_constant (&prev_offset)
1127 && stack_vars[i].size.is_constant ())
1129 prev_offset = align_base (prev_offset,
1130 MAX (alignb, ASAN_RED_ZONE_SIZE),
1131 !FRAME_GROWS_DOWNWARD);
1132 tree repr_decl = NULL_TREE;
1133 offset
1134 = alloc_stack_frame_space (stack_vars[i].size
1135 + ASAN_RED_ZONE_SIZE,
1136 MAX (alignb, ASAN_RED_ZONE_SIZE));
1138 data->asan_vec.safe_push (prev_offset);
1139 /* Allocating a constant amount of space from a constant
1140 starting offset must give a constant result. */
1141 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1142 .to_constant ());
1143 /* Find best representative of the partition.
1144 Prefer those with DECL_NAME, even better
1145 satisfying asan_protect_stack_decl predicate. */
1146 for (j = i; j != EOC; j = stack_vars[j].next)
1147 if (asan_protect_stack_decl (stack_vars[j].decl)
1148 && DECL_NAME (stack_vars[j].decl))
1150 repr_decl = stack_vars[j].decl;
1151 break;
1153 else if (repr_decl == NULL_TREE
1154 && DECL_P (stack_vars[j].decl)
1155 && DECL_NAME (stack_vars[j].decl))
1156 repr_decl = stack_vars[j].decl;
1157 if (repr_decl == NULL_TREE)
1158 repr_decl = stack_vars[i].decl;
1159 data->asan_decl_vec.safe_push (repr_decl);
1160 data->asan_alignb = MAX (data->asan_alignb, alignb);
1161 if (data->asan_base == NULL)
1162 data->asan_base = gen_reg_rtx (Pmode);
1163 base = data->asan_base;
1165 if (!STRICT_ALIGNMENT)
1166 base_align = crtl->max_used_stack_slot_alignment;
1167 else
1168 base_align = MAX (crtl->max_used_stack_slot_alignment,
1169 GET_MODE_ALIGNMENT (SImode)
1170 << ASAN_SHADOW_SHIFT);
1172 else
1174 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1175 base_align = crtl->max_used_stack_slot_alignment;
1178 else
1180 /* Large alignment is only processed in the last pass. */
1181 if (pred)
1182 continue;
1184 /* If there were any variables requiring "large" alignment, allocate
1185 space. */
1186 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1188 poly_int64 loffset;
1189 rtx large_allocsize;
1191 large_allocsize = gen_int_mode (large_size, Pmode);
1192 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1193 loffset = alloc_stack_frame_space
1194 (rtx_to_poly_int64 (large_allocsize),
1195 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1196 large_base = get_dynamic_stack_base (loffset, large_align);
1197 large_allocation_done = true;
1199 gcc_assert (large_base != NULL);
1201 large_alloc = aligned_upper_bound (large_alloc, alignb);
1202 offset = large_alloc;
1203 large_alloc += stack_vars[i].size;
1205 base = large_base;
1206 base_align = large_align;
1209 /* Create rtl for each variable based on their location within the
1210 partition. */
1211 for (j = i; j != EOC; j = stack_vars[j].next)
1213 expand_one_stack_var_at (stack_vars[j].decl,
1214 base, base_align,
1215 offset);
1219 gcc_assert (known_eq (large_alloc, large_size));
1222 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1223 static poly_uint64
1224 account_stack_vars (void)
1226 size_t si, j, i, n = stack_vars_num;
1227 poly_uint64 size = 0;
1229 for (si = 0; si < n; ++si)
1231 i = stack_vars_sorted[si];
1233 /* Skip variables that aren't partition representatives, for now. */
1234 if (stack_vars[i].representative != i)
1235 continue;
1237 size += stack_vars[i].size;
1238 for (j = i; j != EOC; j = stack_vars[j].next)
1239 set_rtl (stack_vars[j].decl, NULL);
1241 return size;
1244 /* Record the RTL assignment X for the default def of PARM. */
1246 extern void
1247 set_parm_rtl (tree parm, rtx x)
1249 gcc_assert (TREE_CODE (parm) == PARM_DECL
1250 || TREE_CODE (parm) == RESULT_DECL);
1252 if (x && !MEM_P (x))
1254 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1255 TYPE_MODE (TREE_TYPE (parm)),
1256 TYPE_ALIGN (TREE_TYPE (parm)));
1258 /* If the variable alignment is very large we'll dynamicaly
1259 allocate it, which means that in-frame portion is just a
1260 pointer. ??? We've got a pseudo for sure here, do we
1261 actually dynamically allocate its spilling area if needed?
1262 ??? Isn't it a problem when POINTER_SIZE also exceeds
1263 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1264 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1265 align = POINTER_SIZE;
1267 record_alignment_for_reg_var (align);
1270 tree ssa = ssa_default_def (cfun, parm);
1271 if (!ssa)
1272 return set_rtl (parm, x);
1274 int part = var_to_partition (SA.map, ssa);
1275 gcc_assert (part != NO_PARTITION);
1277 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1278 gcc_assert (changed);
1280 set_rtl (ssa, x);
1281 gcc_assert (DECL_RTL (parm) == x);
1284 /* A subroutine of expand_one_var. Called to immediately assign rtl
1285 to a variable to be allocated in the stack frame. */
1287 static void
1288 expand_one_stack_var_1 (tree var)
1290 poly_uint64 size;
1291 poly_int64 offset;
1292 unsigned byte_align;
1294 if (TREE_CODE (var) == SSA_NAME)
1296 tree type = TREE_TYPE (var);
1297 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1298 byte_align = TYPE_ALIGN_UNIT (type);
1300 else
1302 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1303 byte_align = align_local_variable (var);
1306 /* We handle highly aligned variables in expand_stack_vars. */
1307 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1309 offset = alloc_stack_frame_space (size, byte_align);
1311 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1312 crtl->max_used_stack_slot_alignment, offset);
1315 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1316 already assigned some MEM. */
1318 static void
1319 expand_one_stack_var (tree var)
1321 if (TREE_CODE (var) == SSA_NAME)
1323 int part = var_to_partition (SA.map, var);
1324 if (part != NO_PARTITION)
1326 rtx x = SA.partition_to_pseudo[part];
1327 gcc_assert (x);
1328 gcc_assert (MEM_P (x));
1329 return;
1333 return expand_one_stack_var_1 (var);
1336 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1337 that will reside in a hard register. */
1339 static void
1340 expand_one_hard_reg_var (tree var)
1342 rest_of_decl_compilation (var, 0, 0);
1345 /* Record the alignment requirements of some variable assigned to a
1346 pseudo. */
1348 static void
1349 record_alignment_for_reg_var (unsigned int align)
1351 if (SUPPORTS_STACK_ALIGNMENT
1352 && crtl->stack_alignment_estimated < align)
1354 /* stack_alignment_estimated shouldn't change after stack
1355 realign decision made */
1356 gcc_assert (!crtl->stack_realign_processed);
1357 crtl->stack_alignment_estimated = align;
1360 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1361 So here we only make sure stack_alignment_needed >= align. */
1362 if (crtl->stack_alignment_needed < align)
1363 crtl->stack_alignment_needed = align;
1364 if (crtl->max_used_stack_slot_alignment < align)
1365 crtl->max_used_stack_slot_alignment = align;
1368 /* Create RTL for an SSA partition. */
1370 static void
1371 expand_one_ssa_partition (tree var)
1373 int part = var_to_partition (SA.map, var);
1374 gcc_assert (part != NO_PARTITION);
1376 if (SA.partition_to_pseudo[part])
1377 return;
1379 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1380 TYPE_MODE (TREE_TYPE (var)),
1381 TYPE_ALIGN (TREE_TYPE (var)));
1383 /* If the variable alignment is very large we'll dynamicaly allocate
1384 it, which means that in-frame portion is just a pointer. */
1385 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1386 align = POINTER_SIZE;
1388 record_alignment_for_reg_var (align);
1390 if (!use_register_for_decl (var))
1392 if (defer_stack_allocation (var, true))
1393 add_stack_var (var);
1394 else
1395 expand_one_stack_var_1 (var);
1396 return;
1399 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1400 rtx x = gen_reg_rtx (reg_mode);
1402 set_rtl (var, x);
1404 /* For a promoted variable, X will not be used directly but wrapped in a
1405 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1406 will assume that its upper bits can be inferred from its lower bits.
1407 Therefore, if X isn't initialized on every path from the entry, then
1408 we must do it manually in order to fulfill the above assumption. */
1409 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1410 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1411 emit_move_insn (x, CONST0_RTX (reg_mode));
1414 /* Record the association between the RTL generated for partition PART
1415 and the underlying variable of the SSA_NAME VAR. */
1417 static void
1418 adjust_one_expanded_partition_var (tree var)
1420 if (!var)
1421 return;
1423 tree decl = SSA_NAME_VAR (var);
1425 int part = var_to_partition (SA.map, var);
1426 if (part == NO_PARTITION)
1427 return;
1429 rtx x = SA.partition_to_pseudo[part];
1431 gcc_assert (x);
1433 set_rtl (var, x);
1435 if (!REG_P (x))
1436 return;
1438 /* Note if the object is a user variable. */
1439 if (decl && !DECL_ARTIFICIAL (decl))
1440 mark_user_reg (x);
1442 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1443 mark_reg_pointer (x, get_pointer_alignment (var));
1446 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1447 that will reside in a pseudo register. */
1449 static void
1450 expand_one_register_var (tree var)
1452 if (TREE_CODE (var) == SSA_NAME)
1454 int part = var_to_partition (SA.map, var);
1455 if (part != NO_PARTITION)
1457 rtx x = SA.partition_to_pseudo[part];
1458 gcc_assert (x);
1459 gcc_assert (REG_P (x));
1460 return;
1462 gcc_unreachable ();
1465 tree decl = var;
1466 tree type = TREE_TYPE (decl);
1467 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1468 rtx x = gen_reg_rtx (reg_mode);
1470 set_rtl (var, x);
1472 /* Note if the object is a user variable. */
1473 if (!DECL_ARTIFICIAL (decl))
1474 mark_user_reg (x);
1476 if (POINTER_TYPE_P (type))
1477 mark_reg_pointer (x, get_pointer_alignment (var));
1480 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1481 has some associated error, e.g. its type is error-mark. We just need
1482 to pick something that won't crash the rest of the compiler. */
1484 static void
1485 expand_one_error_var (tree var)
1487 machine_mode mode = DECL_MODE (var);
1488 rtx x;
1490 if (mode == BLKmode)
1491 x = gen_rtx_MEM (BLKmode, const0_rtx);
1492 else if (mode == VOIDmode)
1493 x = const0_rtx;
1494 else
1495 x = gen_reg_rtx (mode);
1497 SET_DECL_RTL (var, x);
1500 /* A subroutine of expand_one_var. VAR is a variable that will be
1501 allocated to the local stack frame. Return true if we wish to
1502 add VAR to STACK_VARS so that it will be coalesced with other
1503 variables. Return false to allocate VAR immediately.
1505 This function is used to reduce the number of variables considered
1506 for coalescing, which reduces the size of the quadratic problem. */
1508 static bool
1509 defer_stack_allocation (tree var, bool toplevel)
1511 tree size_unit = TREE_CODE (var) == SSA_NAME
1512 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1513 : DECL_SIZE_UNIT (var);
1514 poly_uint64 size;
1516 /* Whether the variable is small enough for immediate allocation not to be
1517 a problem with regard to the frame size. */
1518 bool smallish
1519 = (poly_int_tree_p (size_unit, &size)
1520 && (estimated_poly_value (size)
1521 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1523 /* If stack protection is enabled, *all* stack variables must be deferred,
1524 so that we can re-order the strings to the top of the frame.
1525 Similarly for Address Sanitizer. */
1526 if (flag_stack_protect || asan_sanitize_stack_p ())
1527 return true;
1529 unsigned int align = TREE_CODE (var) == SSA_NAME
1530 ? TYPE_ALIGN (TREE_TYPE (var))
1531 : DECL_ALIGN (var);
1533 /* We handle "large" alignment via dynamic allocation. We want to handle
1534 this extra complication in only one place, so defer them. */
1535 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1536 return true;
1538 bool ignored = TREE_CODE (var) == SSA_NAME
1539 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1540 : DECL_IGNORED_P (var);
1542 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1543 might be detached from their block and appear at toplevel when we reach
1544 here. We want to coalesce them with variables from other blocks when
1545 the immediate contribution to the frame size would be noticeable. */
1546 if (toplevel && optimize > 0 && ignored && !smallish)
1547 return true;
1549 /* Variables declared in the outermost scope automatically conflict
1550 with every other variable. The only reason to want to defer them
1551 at all is that, after sorting, we can more efficiently pack
1552 small variables in the stack frame. Continue to defer at -O2. */
1553 if (toplevel && optimize < 2)
1554 return false;
1556 /* Without optimization, *most* variables are allocated from the
1557 stack, which makes the quadratic problem large exactly when we
1558 want compilation to proceed as quickly as possible. On the
1559 other hand, we don't want the function's stack frame size to
1560 get completely out of hand. So we avoid adding scalars and
1561 "small" aggregates to the list at all. */
1562 if (optimize == 0 && smallish)
1563 return false;
1565 return true;
1568 /* A subroutine of expand_used_vars. Expand one variable according to
1569 its flavor. Variables to be placed on the stack are not actually
1570 expanded yet, merely recorded.
1571 When REALLY_EXPAND is false, only add stack values to be allocated.
1572 Return stack usage this variable is supposed to take.
1575 static poly_uint64
1576 expand_one_var (tree var, bool toplevel, bool really_expand)
1578 unsigned int align = BITS_PER_UNIT;
1579 tree origvar = var;
1581 var = SSAVAR (var);
1583 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1585 if (is_global_var (var))
1586 return 0;
1588 /* Because we don't know if VAR will be in register or on stack,
1589 we conservatively assume it will be on stack even if VAR is
1590 eventually put into register after RA pass. For non-automatic
1591 variables, which won't be on stack, we collect alignment of
1592 type and ignore user specified alignment. Similarly for
1593 SSA_NAMEs for which use_register_for_decl returns true. */
1594 if (TREE_STATIC (var)
1595 || DECL_EXTERNAL (var)
1596 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1597 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1598 TYPE_MODE (TREE_TYPE (var)),
1599 TYPE_ALIGN (TREE_TYPE (var)));
1600 else if (DECL_HAS_VALUE_EXPR_P (var)
1601 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1602 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1603 or variables which were assigned a stack slot already by
1604 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1605 changed from the offset chosen to it. */
1606 align = crtl->stack_alignment_estimated;
1607 else
1608 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1610 /* If the variable alignment is very large we'll dynamicaly allocate
1611 it, which means that in-frame portion is just a pointer. */
1612 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1613 align = POINTER_SIZE;
1616 record_alignment_for_reg_var (align);
1618 poly_uint64 size;
1619 if (TREE_CODE (origvar) == SSA_NAME)
1621 gcc_assert (!VAR_P (var)
1622 || (!DECL_EXTERNAL (var)
1623 && !DECL_HAS_VALUE_EXPR_P (var)
1624 && !TREE_STATIC (var)
1625 && TREE_TYPE (var) != error_mark_node
1626 && !DECL_HARD_REGISTER (var)
1627 && really_expand));
1629 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1631 else if (DECL_EXTERNAL (var))
1633 else if (DECL_HAS_VALUE_EXPR_P (var))
1635 else if (TREE_STATIC (var))
1637 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1639 else if (TREE_TYPE (var) == error_mark_node)
1641 if (really_expand)
1642 expand_one_error_var (var);
1644 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1646 if (really_expand)
1648 expand_one_hard_reg_var (var);
1649 if (!DECL_HARD_REGISTER (var))
1650 /* Invalid register specification. */
1651 expand_one_error_var (var);
1654 else if (use_register_for_decl (var))
1656 if (really_expand)
1657 expand_one_register_var (origvar);
1659 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1660 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1662 /* Reject variables which cover more than half of the address-space. */
1663 if (really_expand)
1665 error ("size of variable %q+D is too large", var);
1666 expand_one_error_var (var);
1669 else if (defer_stack_allocation (var, toplevel))
1670 add_stack_var (origvar);
1671 else
1673 if (really_expand)
1675 if (lookup_attribute ("naked",
1676 DECL_ATTRIBUTES (current_function_decl)))
1677 error ("cannot allocate stack for variable %q+D, naked function.",
1678 var);
1680 expand_one_stack_var (origvar);
1682 return size;
1684 return 0;
1687 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1688 expanding variables. Those variables that can be put into registers
1689 are allocated pseudos; those that can't are put on the stack.
1691 TOPLEVEL is true if this is the outermost BLOCK. */
1693 static void
1694 expand_used_vars_for_block (tree block, bool toplevel)
1696 tree t;
1698 /* Expand all variables at this level. */
1699 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1700 if (TREE_USED (t)
1701 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1702 || !DECL_NONSHAREABLE (t)))
1703 expand_one_var (t, toplevel, true);
1705 /* Expand all variables at containing levels. */
1706 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1707 expand_used_vars_for_block (t, false);
1710 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1711 and clear TREE_USED on all local variables. */
1713 static void
1714 clear_tree_used (tree block)
1716 tree t;
1718 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1719 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1720 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1721 || !DECL_NONSHAREABLE (t))
1722 TREE_USED (t) = 0;
1724 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1725 clear_tree_used (t);
1728 enum {
1729 SPCT_FLAG_DEFAULT = 1,
1730 SPCT_FLAG_ALL = 2,
1731 SPCT_FLAG_STRONG = 3,
1732 SPCT_FLAG_EXPLICIT = 4
1735 /* Examine TYPE and determine a bit mask of the following features. */
1737 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1738 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1739 #define SPCT_HAS_ARRAY 4
1740 #define SPCT_HAS_AGGREGATE 8
1742 static unsigned int
1743 stack_protect_classify_type (tree type)
1745 unsigned int ret = 0;
1746 tree t;
1748 switch (TREE_CODE (type))
1750 case ARRAY_TYPE:
1751 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1752 if (t == char_type_node
1753 || t == signed_char_type_node
1754 || t == unsigned_char_type_node)
1756 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1757 unsigned HOST_WIDE_INT len;
1759 if (!TYPE_SIZE_UNIT (type)
1760 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1761 len = max;
1762 else
1763 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1765 if (len < max)
1766 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1767 else
1768 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1770 else
1771 ret = SPCT_HAS_ARRAY;
1772 break;
1774 case UNION_TYPE:
1775 case QUAL_UNION_TYPE:
1776 case RECORD_TYPE:
1777 ret = SPCT_HAS_AGGREGATE;
1778 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1779 if (TREE_CODE (t) == FIELD_DECL)
1780 ret |= stack_protect_classify_type (TREE_TYPE (t));
1781 break;
1783 default:
1784 break;
1787 return ret;
1790 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1791 part of the local stack frame. Remember if we ever return nonzero for
1792 any variable in this function. The return value is the phase number in
1793 which the variable should be allocated. */
1795 static int
1796 stack_protect_decl_phase (tree decl)
1798 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1799 int ret = 0;
1801 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1802 has_short_buffer = true;
1804 if (flag_stack_protect == SPCT_FLAG_ALL
1805 || flag_stack_protect == SPCT_FLAG_STRONG
1806 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1807 && lookup_attribute ("stack_protect",
1808 DECL_ATTRIBUTES (current_function_decl))))
1810 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1811 && !(bits & SPCT_HAS_AGGREGATE))
1812 ret = 1;
1813 else if (bits & SPCT_HAS_ARRAY)
1814 ret = 2;
1816 else
1817 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1819 if (ret)
1820 has_protected_decls = true;
1822 return ret;
1825 /* Two helper routines that check for phase 1 and phase 2. These are used
1826 as callbacks for expand_stack_vars. */
1828 static bool
1829 stack_protect_decl_phase_1 (size_t i)
1831 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1834 static bool
1835 stack_protect_decl_phase_2 (size_t i)
1837 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1840 /* And helper function that checks for asan phase (with stack protector
1841 it is phase 3). This is used as callback for expand_stack_vars.
1842 Returns true if any of the vars in the partition need to be protected. */
1844 static bool
1845 asan_decl_phase_3 (size_t i)
1847 while (i != EOC)
1849 if (asan_protect_stack_decl (stack_vars[i].decl))
1850 return true;
1851 i = stack_vars[i].next;
1853 return false;
1856 /* Ensure that variables in different stack protection phases conflict
1857 so that they are not merged and share the same stack slot. */
1859 static void
1860 add_stack_protection_conflicts (void)
1862 size_t i, j, n = stack_vars_num;
1863 unsigned char *phase;
1865 phase = XNEWVEC (unsigned char, n);
1866 for (i = 0; i < n; ++i)
1867 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1869 for (i = 0; i < n; ++i)
1871 unsigned char ph_i = phase[i];
1872 for (j = i + 1; j < n; ++j)
1873 if (ph_i != phase[j])
1874 add_stack_var_conflict (i, j);
1877 XDELETEVEC (phase);
1880 /* Create a decl for the guard at the top of the stack frame. */
1882 static void
1883 create_stack_guard (void)
1885 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1886 VAR_DECL, NULL, ptr_type_node);
1887 TREE_THIS_VOLATILE (guard) = 1;
1888 TREE_USED (guard) = 1;
1889 expand_one_stack_var (guard);
1890 crtl->stack_protect_guard = guard;
1893 /* Prepare for expanding variables. */
1894 static void
1895 init_vars_expansion (void)
1897 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1898 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1900 /* A map from decl to stack partition. */
1901 decl_to_stack_part = new hash_map<tree, size_t>;
1903 /* Initialize local stack smashing state. */
1904 has_protected_decls = false;
1905 has_short_buffer = false;
1908 /* Free up stack variable graph data. */
1909 static void
1910 fini_vars_expansion (void)
1912 bitmap_obstack_release (&stack_var_bitmap_obstack);
1913 if (stack_vars)
1914 XDELETEVEC (stack_vars);
1915 if (stack_vars_sorted)
1916 XDELETEVEC (stack_vars_sorted);
1917 stack_vars = NULL;
1918 stack_vars_sorted = NULL;
1919 stack_vars_alloc = stack_vars_num = 0;
1920 delete decl_to_stack_part;
1921 decl_to_stack_part = NULL;
1924 /* Make a fair guess for the size of the stack frame of the function
1925 in NODE. This doesn't have to be exact, the result is only used in
1926 the inline heuristics. So we don't want to run the full stack var
1927 packing algorithm (which is quadratic in the number of stack vars).
1928 Instead, we calculate the total size of all stack vars. This turns
1929 out to be a pretty fair estimate -- packing of stack vars doesn't
1930 happen very often. */
1932 HOST_WIDE_INT
1933 estimated_stack_frame_size (struct cgraph_node *node)
1935 poly_int64 size = 0;
1936 size_t i;
1937 tree var;
1938 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1940 push_cfun (fn);
1942 init_vars_expansion ();
1944 FOR_EACH_LOCAL_DECL (fn, i, var)
1945 if (auto_var_in_fn_p (var, fn->decl))
1946 size += expand_one_var (var, true, false);
1948 if (stack_vars_num > 0)
1950 /* Fake sorting the stack vars for account_stack_vars (). */
1951 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1952 for (i = 0; i < stack_vars_num; ++i)
1953 stack_vars_sorted[i] = i;
1954 size += account_stack_vars ();
1957 fini_vars_expansion ();
1958 pop_cfun ();
1959 return estimated_poly_value (size);
1962 /* Helper routine to check if a record or union contains an array field. */
1964 static int
1965 record_or_union_type_has_array_p (const_tree tree_type)
1967 tree fields = TYPE_FIELDS (tree_type);
1968 tree f;
1970 for (f = fields; f; f = DECL_CHAIN (f))
1971 if (TREE_CODE (f) == FIELD_DECL)
1973 tree field_type = TREE_TYPE (f);
1974 if (RECORD_OR_UNION_TYPE_P (field_type)
1975 && record_or_union_type_has_array_p (field_type))
1976 return 1;
1977 if (TREE_CODE (field_type) == ARRAY_TYPE)
1978 return 1;
1980 return 0;
1983 /* Check if the current function has local referenced variables that
1984 have their addresses taken, contain an array, or are arrays. */
1986 static bool
1987 stack_protect_decl_p ()
1989 unsigned i;
1990 tree var;
1992 FOR_EACH_LOCAL_DECL (cfun, i, var)
1993 if (!is_global_var (var))
1995 tree var_type = TREE_TYPE (var);
1996 if (VAR_P (var)
1997 && (TREE_CODE (var_type) == ARRAY_TYPE
1998 || TREE_ADDRESSABLE (var)
1999 || (RECORD_OR_UNION_TYPE_P (var_type)
2000 && record_or_union_type_has_array_p (var_type))))
2001 return true;
2003 return false;
2006 /* Check if the current function has calls that use a return slot. */
2008 static bool
2009 stack_protect_return_slot_p ()
2011 basic_block bb;
2013 FOR_ALL_BB_FN (bb, cfun)
2014 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2015 !gsi_end_p (gsi); gsi_next (&gsi))
2017 gimple *stmt = gsi_stmt (gsi);
2018 /* This assumes that calls to internal-only functions never
2019 use a return slot. */
2020 if (is_gimple_call (stmt)
2021 && !gimple_call_internal_p (stmt)
2022 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2023 gimple_call_fndecl (stmt)))
2024 return true;
2026 return false;
2029 /* Expand all variables used in the function. */
2031 static rtx_insn *
2032 expand_used_vars (void)
2034 tree var, outer_block = DECL_INITIAL (current_function_decl);
2035 auto_vec<tree> maybe_local_decls;
2036 rtx_insn *var_end_seq = NULL;
2037 unsigned i;
2038 unsigned len;
2039 bool gen_stack_protect_signal = false;
2041 /* Compute the phase of the stack frame for this function. */
2043 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2044 int off = targetm.starting_frame_offset () % align;
2045 frame_phase = off ? align - off : 0;
2048 /* Set TREE_USED on all variables in the local_decls. */
2049 FOR_EACH_LOCAL_DECL (cfun, i, var)
2050 TREE_USED (var) = 1;
2051 /* Clear TREE_USED on all variables associated with a block scope. */
2052 clear_tree_used (DECL_INITIAL (current_function_decl));
2054 init_vars_expansion ();
2056 if (targetm.use_pseudo_pic_reg ())
2057 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2059 for (i = 0; i < SA.map->num_partitions; i++)
2061 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2062 continue;
2064 tree var = partition_to_var (SA.map, i);
2066 gcc_assert (!virtual_operand_p (var));
2068 expand_one_ssa_partition (var);
2071 if (flag_stack_protect == SPCT_FLAG_STRONG)
2072 gen_stack_protect_signal
2073 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2075 /* At this point all variables on the local_decls with TREE_USED
2076 set are not associated with any block scope. Lay them out. */
2078 len = vec_safe_length (cfun->local_decls);
2079 FOR_EACH_LOCAL_DECL (cfun, i, var)
2081 bool expand_now = false;
2083 /* Expanded above already. */
2084 if (is_gimple_reg (var))
2086 TREE_USED (var) = 0;
2087 goto next;
2089 /* We didn't set a block for static or extern because it's hard
2090 to tell the difference between a global variable (re)declared
2091 in a local scope, and one that's really declared there to
2092 begin with. And it doesn't really matter much, since we're
2093 not giving them stack space. Expand them now. */
2094 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2095 expand_now = true;
2097 /* Expand variables not associated with any block now. Those created by
2098 the optimizers could be live anywhere in the function. Those that
2099 could possibly have been scoped originally and detached from their
2100 block will have their allocation deferred so we coalesce them with
2101 others when optimization is enabled. */
2102 else if (TREE_USED (var))
2103 expand_now = true;
2105 /* Finally, mark all variables on the list as used. We'll use
2106 this in a moment when we expand those associated with scopes. */
2107 TREE_USED (var) = 1;
2109 if (expand_now)
2110 expand_one_var (var, true, true);
2112 next:
2113 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2115 rtx rtl = DECL_RTL_IF_SET (var);
2117 /* Keep artificial non-ignored vars in cfun->local_decls
2118 chain until instantiate_decls. */
2119 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2120 add_local_decl (cfun, var);
2121 else if (rtl == NULL_RTX)
2122 /* If rtl isn't set yet, which can happen e.g. with
2123 -fstack-protector, retry before returning from this
2124 function. */
2125 maybe_local_decls.safe_push (var);
2129 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2131 +-----------------+-----------------+
2132 | ...processed... | ...duplicates...|
2133 +-----------------+-----------------+
2135 +-- LEN points here.
2137 We just want the duplicates, as those are the artificial
2138 non-ignored vars that we want to keep until instantiate_decls.
2139 Move them down and truncate the array. */
2140 if (!vec_safe_is_empty (cfun->local_decls))
2141 cfun->local_decls->block_remove (0, len);
2143 /* At this point, all variables within the block tree with TREE_USED
2144 set are actually used by the optimized function. Lay them out. */
2145 expand_used_vars_for_block (outer_block, true);
2147 if (stack_vars_num > 0)
2149 add_scope_conflicts ();
2151 /* If stack protection is enabled, we don't share space between
2152 vulnerable data and non-vulnerable data. */
2153 if (flag_stack_protect != 0
2154 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2155 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2156 && lookup_attribute ("stack_protect",
2157 DECL_ATTRIBUTES (current_function_decl)))))
2158 add_stack_protection_conflicts ();
2160 /* Now that we have collected all stack variables, and have computed a
2161 minimal interference graph, attempt to save some stack space. */
2162 partition_stack_vars ();
2163 if (dump_file)
2164 dump_stack_var_partition ();
2167 switch (flag_stack_protect)
2169 case SPCT_FLAG_ALL:
2170 create_stack_guard ();
2171 break;
2173 case SPCT_FLAG_STRONG:
2174 if (gen_stack_protect_signal
2175 || cfun->calls_alloca || has_protected_decls
2176 || lookup_attribute ("stack_protect",
2177 DECL_ATTRIBUTES (current_function_decl)))
2178 create_stack_guard ();
2179 break;
2181 case SPCT_FLAG_DEFAULT:
2182 if (cfun->calls_alloca || has_protected_decls
2183 || lookup_attribute ("stack_protect",
2184 DECL_ATTRIBUTES (current_function_decl)))
2185 create_stack_guard ();
2186 break;
2188 case SPCT_FLAG_EXPLICIT:
2189 if (lookup_attribute ("stack_protect",
2190 DECL_ATTRIBUTES (current_function_decl)))
2191 create_stack_guard ();
2192 break;
2193 default:
2197 /* Assign rtl to each variable based on these partitions. */
2198 if (stack_vars_num > 0)
2200 struct stack_vars_data data;
2202 data.asan_base = NULL_RTX;
2203 data.asan_alignb = 0;
2205 /* Reorder decls to be protected by iterating over the variables
2206 array multiple times, and allocating out of each phase in turn. */
2207 /* ??? We could probably integrate this into the qsort we did
2208 earlier, such that we naturally see these variables first,
2209 and thus naturally allocate things in the right order. */
2210 if (has_protected_decls)
2212 /* Phase 1 contains only character arrays. */
2213 expand_stack_vars (stack_protect_decl_phase_1, &data);
2215 /* Phase 2 contains other kinds of arrays. */
2216 if (flag_stack_protect == SPCT_FLAG_ALL
2217 || flag_stack_protect == SPCT_FLAG_STRONG
2218 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2219 && lookup_attribute ("stack_protect",
2220 DECL_ATTRIBUTES (current_function_decl))))
2221 expand_stack_vars (stack_protect_decl_phase_2, &data);
2224 if (asan_sanitize_stack_p ())
2225 /* Phase 3, any partitions that need asan protection
2226 in addition to phase 1 and 2. */
2227 expand_stack_vars (asan_decl_phase_3, &data);
2229 /* ASAN description strings don't yet have a syntax for expressing
2230 polynomial offsets. */
2231 HOST_WIDE_INT prev_offset;
2232 if (!data.asan_vec.is_empty ()
2233 && frame_offset.is_constant (&prev_offset))
2235 HOST_WIDE_INT offset, sz, redzonesz;
2236 redzonesz = ASAN_RED_ZONE_SIZE;
2237 sz = data.asan_vec[0] - prev_offset;
2238 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2239 && data.asan_alignb <= 4096
2240 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2241 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2242 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2243 /* Allocating a constant amount of space from a constant
2244 starting offset must give a constant result. */
2245 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2246 .to_constant ());
2247 data.asan_vec.safe_push (prev_offset);
2248 data.asan_vec.safe_push (offset);
2249 /* Leave space for alignment if STRICT_ALIGNMENT. */
2250 if (STRICT_ALIGNMENT)
2251 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2252 << ASAN_SHADOW_SHIFT)
2253 / BITS_PER_UNIT, 1);
2255 var_end_seq
2256 = asan_emit_stack_protection (virtual_stack_vars_rtx,
2257 data.asan_base,
2258 data.asan_alignb,
2259 data.asan_vec.address (),
2260 data.asan_decl_vec.address (),
2261 data.asan_vec.length ());
2264 expand_stack_vars (NULL, &data);
2267 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2268 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2269 virtual_stack_vars_rtx,
2270 var_end_seq);
2272 fini_vars_expansion ();
2274 /* If there were any artificial non-ignored vars without rtl
2275 found earlier, see if deferred stack allocation hasn't assigned
2276 rtl to them. */
2277 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2279 rtx rtl = DECL_RTL_IF_SET (var);
2281 /* Keep artificial non-ignored vars in cfun->local_decls
2282 chain until instantiate_decls. */
2283 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2284 add_local_decl (cfun, var);
2287 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2288 if (STACK_ALIGNMENT_NEEDED)
2290 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2291 if (FRAME_GROWS_DOWNWARD)
2292 frame_offset = aligned_lower_bound (frame_offset, align);
2293 else
2294 frame_offset = aligned_upper_bound (frame_offset, align);
2297 return var_end_seq;
2301 /* If we need to produce a detailed dump, print the tree representation
2302 for STMT to the dump file. SINCE is the last RTX after which the RTL
2303 generated for STMT should have been appended. */
2305 static void
2306 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2308 if (dump_file && (dump_flags & TDF_DETAILS))
2310 fprintf (dump_file, "\n;; ");
2311 print_gimple_stmt (dump_file, stmt, 0,
2312 TDF_SLIM | (dump_flags & TDF_LINENO));
2313 fprintf (dump_file, "\n");
2315 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2319 /* Maps the blocks that do not contain tree labels to rtx labels. */
2321 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2323 /* Returns the label_rtx expression for a label starting basic block BB. */
2325 static rtx_code_label *
2326 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2328 gimple_stmt_iterator gsi;
2329 tree lab;
2331 if (bb->flags & BB_RTL)
2332 return block_label (bb);
2334 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2335 if (elt)
2336 return *elt;
2338 /* Find the tree label if it is present. */
2340 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2342 glabel *lab_stmt;
2344 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2345 if (!lab_stmt)
2346 break;
2348 lab = gimple_label_label (lab_stmt);
2349 if (DECL_NONLOCAL (lab))
2350 break;
2352 return jump_target_rtx (lab);
2355 rtx_code_label *l = gen_label_rtx ();
2356 lab_rtx_for_bb->put (bb, l);
2357 return l;
2361 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2362 of a basic block where we just expanded the conditional at the end,
2363 possibly clean up the CFG and instruction sequence. LAST is the
2364 last instruction before the just emitted jump sequence. */
2366 static void
2367 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2369 /* Special case: when jumpif decides that the condition is
2370 trivial it emits an unconditional jump (and the necessary
2371 barrier). But we still have two edges, the fallthru one is
2372 wrong. purge_dead_edges would clean this up later. Unfortunately
2373 we have to insert insns (and split edges) before
2374 find_many_sub_basic_blocks and hence before purge_dead_edges.
2375 But splitting edges might create new blocks which depend on the
2376 fact that if there are two edges there's no barrier. So the
2377 barrier would get lost and verify_flow_info would ICE. Instead
2378 of auditing all edge splitters to care for the barrier (which
2379 normally isn't there in a cleaned CFG), fix it here. */
2380 if (BARRIER_P (get_last_insn ()))
2382 rtx_insn *insn;
2383 remove_edge (e);
2384 /* Now, we have a single successor block, if we have insns to
2385 insert on the remaining edge we potentially will insert
2386 it at the end of this block (if the dest block isn't feasible)
2387 in order to avoid splitting the edge. This insertion will take
2388 place in front of the last jump. But we might have emitted
2389 multiple jumps (conditional and one unconditional) to the
2390 same destination. Inserting in front of the last one then
2391 is a problem. See PR 40021. We fix this by deleting all
2392 jumps except the last unconditional one. */
2393 insn = PREV_INSN (get_last_insn ());
2394 /* Make sure we have an unconditional jump. Otherwise we're
2395 confused. */
2396 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2397 for (insn = PREV_INSN (insn); insn != last;)
2399 insn = PREV_INSN (insn);
2400 if (JUMP_P (NEXT_INSN (insn)))
2402 if (!any_condjump_p (NEXT_INSN (insn)))
2404 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2405 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2407 delete_insn (NEXT_INSN (insn));
2413 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2414 Returns a new basic block if we've terminated the current basic
2415 block and created a new one. */
2417 static basic_block
2418 expand_gimple_cond (basic_block bb, gcond *stmt)
2420 basic_block new_bb, dest;
2421 edge true_edge;
2422 edge false_edge;
2423 rtx_insn *last2, *last;
2424 enum tree_code code;
2425 tree op0, op1;
2427 code = gimple_cond_code (stmt);
2428 op0 = gimple_cond_lhs (stmt);
2429 op1 = gimple_cond_rhs (stmt);
2430 /* We're sometimes presented with such code:
2431 D.123_1 = x < y;
2432 if (D.123_1 != 0)
2434 This would expand to two comparisons which then later might
2435 be cleaned up by combine. But some pattern matchers like if-conversion
2436 work better when there's only one compare, so make up for this
2437 here as special exception if TER would have made the same change. */
2438 if (SA.values
2439 && TREE_CODE (op0) == SSA_NAME
2440 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2441 && TREE_CODE (op1) == INTEGER_CST
2442 && ((gimple_cond_code (stmt) == NE_EXPR
2443 && integer_zerop (op1))
2444 || (gimple_cond_code (stmt) == EQ_EXPR
2445 && integer_onep (op1)))
2446 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2448 gimple *second = SSA_NAME_DEF_STMT (op0);
2449 if (gimple_code (second) == GIMPLE_ASSIGN)
2451 enum tree_code code2 = gimple_assign_rhs_code (second);
2452 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2454 code = code2;
2455 op0 = gimple_assign_rhs1 (second);
2456 op1 = gimple_assign_rhs2 (second);
2458 /* If jumps are cheap and the target does not support conditional
2459 compare, turn some more codes into jumpy sequences. */
2460 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2461 && targetm.gen_ccmp_first == NULL)
2463 if ((code2 == BIT_AND_EXPR
2464 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2465 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2466 || code2 == TRUTH_AND_EXPR)
2468 code = TRUTH_ANDIF_EXPR;
2469 op0 = gimple_assign_rhs1 (second);
2470 op1 = gimple_assign_rhs2 (second);
2472 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2474 code = TRUTH_ORIF_EXPR;
2475 op0 = gimple_assign_rhs1 (second);
2476 op1 = gimple_assign_rhs2 (second);
2482 last2 = last = get_last_insn ();
2484 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2485 set_curr_insn_location (gimple_location (stmt));
2487 /* These flags have no purpose in RTL land. */
2488 true_edge->flags &= ~EDGE_TRUE_VALUE;
2489 false_edge->flags &= ~EDGE_FALSE_VALUE;
2491 /* We can either have a pure conditional jump with one fallthru edge or
2492 two-way jump that needs to be decomposed into two basic blocks. */
2493 if (false_edge->dest == bb->next_bb)
2495 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2496 true_edge->probability);
2497 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2498 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2499 set_curr_insn_location (true_edge->goto_locus);
2500 false_edge->flags |= EDGE_FALLTHRU;
2501 maybe_cleanup_end_of_block (false_edge, last);
2502 return NULL;
2504 if (true_edge->dest == bb->next_bb)
2506 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2507 false_edge->probability);
2508 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2509 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2510 set_curr_insn_location (false_edge->goto_locus);
2511 true_edge->flags |= EDGE_FALLTHRU;
2512 maybe_cleanup_end_of_block (true_edge, last);
2513 return NULL;
2516 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2517 true_edge->probability);
2518 last = get_last_insn ();
2519 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2520 set_curr_insn_location (false_edge->goto_locus);
2521 emit_jump (label_rtx_for_bb (false_edge->dest));
2523 BB_END (bb) = last;
2524 if (BARRIER_P (BB_END (bb)))
2525 BB_END (bb) = PREV_INSN (BB_END (bb));
2526 update_bb_for_insn (bb);
2528 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2529 dest = false_edge->dest;
2530 redirect_edge_succ (false_edge, new_bb);
2531 false_edge->flags |= EDGE_FALLTHRU;
2532 new_bb->count = false_edge->count ();
2533 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2534 add_bb_to_loop (new_bb, loop);
2535 if (loop->latch == bb
2536 && loop->header == dest)
2537 loop->latch = new_bb;
2538 make_single_succ_edge (new_bb, dest, 0);
2539 if (BARRIER_P (BB_END (new_bb)))
2540 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2541 update_bb_for_insn (new_bb);
2543 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2545 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2547 set_curr_insn_location (true_edge->goto_locus);
2548 true_edge->goto_locus = curr_insn_location ();
2551 return new_bb;
2554 /* Mark all calls that can have a transaction restart. */
2556 static void
2557 mark_transaction_restart_calls (gimple *stmt)
2559 struct tm_restart_node dummy;
2560 tm_restart_node **slot;
2562 if (!cfun->gimple_df->tm_restart)
2563 return;
2565 dummy.stmt = stmt;
2566 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2567 if (slot)
2569 struct tm_restart_node *n = *slot;
2570 tree list = n->label_or_list;
2571 rtx_insn *insn;
2573 for (insn = next_real_insn (get_last_insn ());
2574 !CALL_P (insn);
2575 insn = next_real_insn (insn))
2576 continue;
2578 if (TREE_CODE (list) == LABEL_DECL)
2579 add_reg_note (insn, REG_TM, label_rtx (list));
2580 else
2581 for (; list ; list = TREE_CHAIN (list))
2582 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2586 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2587 statement STMT. */
2589 static void
2590 expand_call_stmt (gcall *stmt)
2592 tree exp, decl, lhs;
2593 bool builtin_p;
2594 size_t i;
2596 if (gimple_call_internal_p (stmt))
2598 expand_internal_call (stmt);
2599 return;
2602 /* If this is a call to a built-in function and it has no effect other
2603 than setting the lhs, try to implement it using an internal function
2604 instead. */
2605 decl = gimple_call_fndecl (stmt);
2606 if (gimple_call_lhs (stmt)
2607 && !gimple_has_side_effects (stmt)
2608 && (optimize || (decl && called_as_built_in (decl))))
2610 internal_fn ifn = replacement_internal_fn (stmt);
2611 if (ifn != IFN_LAST)
2613 expand_internal_call (ifn, stmt);
2614 return;
2618 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2620 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2621 builtin_p = decl && DECL_BUILT_IN (decl);
2623 /* If this is not a builtin function, the function type through which the
2624 call is made may be different from the type of the function. */
2625 if (!builtin_p)
2626 CALL_EXPR_FN (exp)
2627 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2628 CALL_EXPR_FN (exp));
2630 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2631 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2633 for (i = 0; i < gimple_call_num_args (stmt); i++)
2635 tree arg = gimple_call_arg (stmt, i);
2636 gimple *def;
2637 /* TER addresses into arguments of builtin functions so we have a
2638 chance to infer more correct alignment information. See PR39954. */
2639 if (builtin_p
2640 && TREE_CODE (arg) == SSA_NAME
2641 && (def = get_gimple_for_ssa_name (arg))
2642 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2643 arg = gimple_assign_rhs1 (def);
2644 CALL_EXPR_ARG (exp, i) = arg;
2647 if (gimple_has_side_effects (stmt))
2648 TREE_SIDE_EFFECTS (exp) = 1;
2650 if (gimple_call_nothrow_p (stmt))
2651 TREE_NOTHROW (exp) = 1;
2653 if (gimple_no_warning_p (stmt))
2654 TREE_NO_WARNING (exp) = 1;
2656 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2657 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2658 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2659 if (decl
2660 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2661 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2662 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2663 else
2664 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2665 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2666 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2667 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2668 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2670 /* Ensure RTL is created for debug args. */
2671 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2673 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2674 unsigned int ix;
2675 tree dtemp;
2677 if (debug_args)
2678 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2680 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2681 expand_debug_expr (dtemp);
2685 rtx_insn *before_call = get_last_insn ();
2686 lhs = gimple_call_lhs (stmt);
2687 if (lhs)
2688 expand_assignment (lhs, exp, false);
2689 else
2690 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2692 /* If the gimple call is an indirect call and has 'nocf_check'
2693 attribute find a generated CALL insn to mark it as no
2694 control-flow verification is needed. */
2695 if (gimple_call_nocf_check_p (stmt)
2696 && !gimple_call_fndecl (stmt))
2698 rtx_insn *last = get_last_insn ();
2699 while (!CALL_P (last)
2700 && last != before_call)
2701 last = PREV_INSN (last);
2703 if (last != before_call)
2704 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2707 mark_transaction_restart_calls (stmt);
2711 /* Generate RTL for an asm statement (explicit assembler code).
2712 STRING is a STRING_CST node containing the assembler code text,
2713 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2714 insn is volatile; don't optimize it. */
2716 static void
2717 expand_asm_loc (tree string, int vol, location_t locus)
2719 rtx body;
2721 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2722 ggc_strdup (TREE_STRING_POINTER (string)),
2723 locus);
2725 MEM_VOLATILE_P (body) = vol;
2727 /* Non-empty basic ASM implicitly clobbers memory. */
2728 if (TREE_STRING_LENGTH (string) != 0)
2730 rtx asm_op, clob;
2731 unsigned i, nclobbers;
2732 auto_vec<rtx> input_rvec, output_rvec;
2733 auto_vec<const char *> constraints;
2734 auto_vec<rtx> clobber_rvec;
2735 HARD_REG_SET clobbered_regs;
2736 CLEAR_HARD_REG_SET (clobbered_regs);
2738 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2739 clobber_rvec.safe_push (clob);
2741 if (targetm.md_asm_adjust)
2742 targetm.md_asm_adjust (output_rvec, input_rvec,
2743 constraints, clobber_rvec,
2744 clobbered_regs);
2746 asm_op = body;
2747 nclobbers = clobber_rvec.length ();
2748 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2750 XVECEXP (body, 0, 0) = asm_op;
2751 for (i = 0; i < nclobbers; i++)
2752 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2755 emit_insn (body);
2758 /* Return the number of times character C occurs in string S. */
2759 static int
2760 n_occurrences (int c, const char *s)
2762 int n = 0;
2763 while (*s)
2764 n += (*s++ == c);
2765 return n;
2768 /* A subroutine of expand_asm_operands. Check that all operands have
2769 the same number of alternatives. Return true if so. */
2771 static bool
2772 check_operand_nalternatives (const vec<const char *> &constraints)
2774 unsigned len = constraints.length();
2775 if (len > 0)
2777 int nalternatives = n_occurrences (',', constraints[0]);
2779 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2781 error ("too many alternatives in %<asm%>");
2782 return false;
2785 for (unsigned i = 1; i < len; ++i)
2786 if (n_occurrences (',', constraints[i]) != nalternatives)
2788 error ("operand constraints for %<asm%> differ "
2789 "in number of alternatives");
2790 return false;
2793 return true;
2796 /* Check for overlap between registers marked in CLOBBERED_REGS and
2797 anything inappropriate in T. Emit error and return the register
2798 variable definition for error, NULL_TREE for ok. */
2800 static bool
2801 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2803 /* Conflicts between asm-declared register variables and the clobber
2804 list are not allowed. */
2805 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2807 if (overlap)
2809 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2810 DECL_NAME (overlap));
2812 /* Reset registerness to stop multiple errors emitted for a single
2813 variable. */
2814 DECL_REGISTER (overlap) = 0;
2815 return true;
2818 return false;
2821 /* Generate RTL for an asm statement with arguments.
2822 STRING is the instruction template.
2823 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2824 Each output or input has an expression in the TREE_VALUE and
2825 a tree list in TREE_PURPOSE which in turn contains a constraint
2826 name in TREE_VALUE (or NULL_TREE) and a constraint string
2827 in TREE_PURPOSE.
2828 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2829 that is clobbered by this insn.
2831 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2832 should be the fallthru basic block of the asm goto.
2834 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2835 Some elements of OUTPUTS may be replaced with trees representing temporary
2836 values. The caller should copy those temporary values to the originally
2837 specified lvalues.
2839 VOL nonzero means the insn is volatile; don't optimize it. */
2841 static void
2842 expand_asm_stmt (gasm *stmt)
2844 class save_input_location
2846 location_t old;
2848 public:
2849 explicit save_input_location(location_t where)
2851 old = input_location;
2852 input_location = where;
2855 ~save_input_location()
2857 input_location = old;
2861 location_t locus = gimple_location (stmt);
2863 if (gimple_asm_input_p (stmt))
2865 const char *s = gimple_asm_string (stmt);
2866 tree string = build_string (strlen (s), s);
2867 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2868 return;
2871 /* There are some legacy diagnostics in here, and also avoids a
2872 sixth parameger to targetm.md_asm_adjust. */
2873 save_input_location s_i_l(locus);
2875 unsigned noutputs = gimple_asm_noutputs (stmt);
2876 unsigned ninputs = gimple_asm_ninputs (stmt);
2877 unsigned nlabels = gimple_asm_nlabels (stmt);
2878 unsigned i;
2880 /* ??? Diagnose during gimplification? */
2881 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2883 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2884 return;
2887 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2888 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2889 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2891 /* Copy the gimple vectors into new vectors that we can manipulate. */
2893 output_tvec.safe_grow (noutputs);
2894 input_tvec.safe_grow (ninputs);
2895 constraints.safe_grow (noutputs + ninputs);
2897 for (i = 0; i < noutputs; ++i)
2899 tree t = gimple_asm_output_op (stmt, i);
2900 output_tvec[i] = TREE_VALUE (t);
2901 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2903 for (i = 0; i < ninputs; i++)
2905 tree t = gimple_asm_input_op (stmt, i);
2906 input_tvec[i] = TREE_VALUE (t);
2907 constraints[i + noutputs]
2908 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2911 /* ??? Diagnose during gimplification? */
2912 if (! check_operand_nalternatives (constraints))
2913 return;
2915 /* Count the number of meaningful clobbered registers, ignoring what
2916 we would ignore later. */
2917 auto_vec<rtx> clobber_rvec;
2918 HARD_REG_SET clobbered_regs;
2919 CLEAR_HARD_REG_SET (clobbered_regs);
2921 if (unsigned n = gimple_asm_nclobbers (stmt))
2923 clobber_rvec.reserve (n);
2924 for (i = 0; i < n; i++)
2926 tree t = gimple_asm_clobber_op (stmt, i);
2927 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2928 int nregs, j;
2930 j = decode_reg_name_and_count (regname, &nregs);
2931 if (j < 0)
2933 if (j == -2)
2935 /* ??? Diagnose during gimplification? */
2936 error ("unknown register name %qs in %<asm%>", regname);
2938 else if (j == -4)
2940 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2941 clobber_rvec.safe_push (x);
2943 else
2945 /* Otherwise we should have -1 == empty string
2946 or -3 == cc, which is not a register. */
2947 gcc_assert (j == -1 || j == -3);
2950 else
2951 for (int reg = j; reg < j + nregs; reg++)
2953 /* Clobbering the PIC register is an error. */
2954 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2956 /* ??? Diagnose during gimplification? */
2957 error ("PIC register clobbered by %qs in %<asm%>",
2958 regname);
2959 return;
2962 SET_HARD_REG_BIT (clobbered_regs, reg);
2963 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2964 clobber_rvec.safe_push (x);
2968 unsigned nclobbers = clobber_rvec.length();
2970 /* First pass over inputs and outputs checks validity and sets
2971 mark_addressable if needed. */
2972 /* ??? Diagnose during gimplification? */
2974 for (i = 0; i < noutputs; ++i)
2976 tree val = output_tvec[i];
2977 tree type = TREE_TYPE (val);
2978 const char *constraint;
2979 bool is_inout;
2980 bool allows_reg;
2981 bool allows_mem;
2983 /* Try to parse the output constraint. If that fails, there's
2984 no point in going further. */
2985 constraint = constraints[i];
2986 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2987 &allows_mem, &allows_reg, &is_inout))
2988 return;
2990 if (! allows_reg
2991 && (allows_mem
2992 || is_inout
2993 || (DECL_P (val)
2994 && REG_P (DECL_RTL (val))
2995 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2996 mark_addressable (val);
2999 for (i = 0; i < ninputs; ++i)
3001 bool allows_reg, allows_mem;
3002 const char *constraint;
3004 constraint = constraints[i + noutputs];
3005 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3006 constraints.address (),
3007 &allows_mem, &allows_reg))
3008 return;
3010 if (! allows_reg && allows_mem)
3011 mark_addressable (input_tvec[i]);
3014 /* Second pass evaluates arguments. */
3016 /* Make sure stack is consistent for asm goto. */
3017 if (nlabels > 0)
3018 do_pending_stack_adjust ();
3019 int old_generating_concat_p = generating_concat_p;
3021 /* Vector of RTX's of evaluated output operands. */
3022 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3023 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3024 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3026 output_rvec.safe_grow (noutputs);
3028 for (i = 0; i < noutputs; ++i)
3030 tree val = output_tvec[i];
3031 tree type = TREE_TYPE (val);
3032 bool is_inout, allows_reg, allows_mem, ok;
3033 rtx op;
3035 ok = parse_output_constraint (&constraints[i], i, ninputs,
3036 noutputs, &allows_mem, &allows_reg,
3037 &is_inout);
3038 gcc_assert (ok);
3040 /* If an output operand is not a decl or indirect ref and our constraint
3041 allows a register, make a temporary to act as an intermediate.
3042 Make the asm insn write into that, then we will copy it to
3043 the real output operand. Likewise for promoted variables. */
3045 generating_concat_p = 0;
3047 if ((TREE_CODE (val) == INDIRECT_REF
3048 && allows_mem)
3049 || (DECL_P (val)
3050 && (allows_mem || REG_P (DECL_RTL (val)))
3051 && ! (REG_P (DECL_RTL (val))
3052 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3053 || ! allows_reg
3054 || is_inout)
3056 op = expand_expr (val, NULL_RTX, VOIDmode,
3057 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3058 if (MEM_P (op))
3059 op = validize_mem (op);
3061 if (! allows_reg && !MEM_P (op))
3062 error ("output number %d not directly addressable", i);
3063 if ((! allows_mem && MEM_P (op))
3064 || GET_CODE (op) == CONCAT)
3066 rtx old_op = op;
3067 op = gen_reg_rtx (GET_MODE (op));
3069 generating_concat_p = old_generating_concat_p;
3071 if (is_inout)
3072 emit_move_insn (op, old_op);
3074 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3075 emit_move_insn (old_op, op);
3076 after_rtl_seq = get_insns ();
3077 after_rtl_end = get_last_insn ();
3078 end_sequence ();
3081 else
3083 op = assign_temp (type, 0, 1);
3084 op = validize_mem (op);
3085 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3086 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3088 generating_concat_p = old_generating_concat_p;
3090 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3091 expand_assignment (val, make_tree (type, op), false);
3092 after_rtl_seq = get_insns ();
3093 after_rtl_end = get_last_insn ();
3094 end_sequence ();
3096 output_rvec[i] = op;
3098 if (is_inout)
3099 inout_opnum.safe_push (i);
3102 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3103 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3105 input_rvec.safe_grow (ninputs);
3106 input_mode.safe_grow (ninputs);
3108 generating_concat_p = 0;
3110 for (i = 0; i < ninputs; ++i)
3112 tree val = input_tvec[i];
3113 tree type = TREE_TYPE (val);
3114 bool allows_reg, allows_mem, ok;
3115 const char *constraint;
3116 rtx op;
3118 constraint = constraints[i + noutputs];
3119 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3120 constraints.address (),
3121 &allows_mem, &allows_reg);
3122 gcc_assert (ok);
3124 /* EXPAND_INITIALIZER will not generate code for valid initializer
3125 constants, but will still generate code for other types of operand.
3126 This is the behavior we want for constant constraints. */
3127 op = expand_expr (val, NULL_RTX, VOIDmode,
3128 allows_reg ? EXPAND_NORMAL
3129 : allows_mem ? EXPAND_MEMORY
3130 : EXPAND_INITIALIZER);
3132 /* Never pass a CONCAT to an ASM. */
3133 if (GET_CODE (op) == CONCAT)
3134 op = force_reg (GET_MODE (op), op);
3135 else if (MEM_P (op))
3136 op = validize_mem (op);
3138 if (asm_operand_ok (op, constraint, NULL) <= 0)
3140 if (allows_reg && TYPE_MODE (type) != BLKmode)
3141 op = force_reg (TYPE_MODE (type), op);
3142 else if (!allows_mem)
3143 warning (0, "asm operand %d probably doesn%'t match constraints",
3144 i + noutputs);
3145 else if (MEM_P (op))
3147 /* We won't recognize either volatile memory or memory
3148 with a queued address as available a memory_operand
3149 at this point. Ignore it: clearly this *is* a memory. */
3151 else
3152 gcc_unreachable ();
3154 input_rvec[i] = op;
3155 input_mode[i] = TYPE_MODE (type);
3158 /* For in-out operands, copy output rtx to input rtx. */
3159 unsigned ninout = inout_opnum.length();
3160 for (i = 0; i < ninout; i++)
3162 int j = inout_opnum[i];
3163 rtx o = output_rvec[j];
3165 input_rvec.safe_push (o);
3166 input_mode.safe_push (GET_MODE (o));
3168 char buffer[16];
3169 sprintf (buffer, "%d", j);
3170 constraints.safe_push (ggc_strdup (buffer));
3172 ninputs += ninout;
3174 /* Sometimes we wish to automatically clobber registers across an asm.
3175 Case in point is when the i386 backend moved from cc0 to a hard reg --
3176 maintaining source-level compatibility means automatically clobbering
3177 the flags register. */
3178 rtx_insn *after_md_seq = NULL;
3179 if (targetm.md_asm_adjust)
3180 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3181 constraints, clobber_rvec,
3182 clobbered_regs);
3184 /* Do not allow the hook to change the output and input count,
3185 lest it mess up the operand numbering. */
3186 gcc_assert (output_rvec.length() == noutputs);
3187 gcc_assert (input_rvec.length() == ninputs);
3188 gcc_assert (constraints.length() == noutputs + ninputs);
3190 /* But it certainly can adjust the clobbers. */
3191 nclobbers = clobber_rvec.length();
3193 /* Third pass checks for easy conflicts. */
3194 /* ??? Why are we doing this on trees instead of rtx. */
3196 bool clobber_conflict_found = 0;
3197 for (i = 0; i < noutputs; ++i)
3198 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3199 clobber_conflict_found = 1;
3200 for (i = 0; i < ninputs - ninout; ++i)
3201 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3202 clobber_conflict_found = 1;
3204 /* Make vectors for the expression-rtx, constraint strings,
3205 and named operands. */
3207 rtvec argvec = rtvec_alloc (ninputs);
3208 rtvec constraintvec = rtvec_alloc (ninputs);
3209 rtvec labelvec = rtvec_alloc (nlabels);
3211 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3212 : GET_MODE (output_rvec[0])),
3213 ggc_strdup (gimple_asm_string (stmt)),
3214 "", 0, argvec, constraintvec,
3215 labelvec, locus);
3216 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3218 for (i = 0; i < ninputs; ++i)
3220 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3221 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3222 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3223 constraints[i + noutputs],
3224 locus);
3227 /* Copy labels to the vector. */
3228 rtx_code_label *fallthru_label = NULL;
3229 if (nlabels > 0)
3231 basic_block fallthru_bb = NULL;
3232 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3233 if (fallthru)
3234 fallthru_bb = fallthru->dest;
3236 for (i = 0; i < nlabels; ++i)
3238 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3239 rtx_insn *r;
3240 /* If asm goto has any labels in the fallthru basic block, use
3241 a label that we emit immediately after the asm goto. Expansion
3242 may insert further instructions into the same basic block after
3243 asm goto and if we don't do this, insertion of instructions on
3244 the fallthru edge might misbehave. See PR58670. */
3245 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3247 if (fallthru_label == NULL_RTX)
3248 fallthru_label = gen_label_rtx ();
3249 r = fallthru_label;
3251 else
3252 r = label_rtx (label);
3253 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3257 /* Now, for each output, construct an rtx
3258 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3259 ARGVEC CONSTRAINTS OPNAMES))
3260 If there is more than one, put them inside a PARALLEL. */
3262 if (nlabels > 0 && nclobbers == 0)
3264 gcc_assert (noutputs == 0);
3265 emit_jump_insn (body);
3267 else if (noutputs == 0 && nclobbers == 0)
3269 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3270 emit_insn (body);
3272 else if (noutputs == 1 && nclobbers == 0)
3274 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3275 emit_insn (gen_rtx_SET (output_rvec[0], body));
3277 else
3279 rtx obody = body;
3280 int num = noutputs;
3282 if (num == 0)
3283 num = 1;
3285 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3287 /* For each output operand, store a SET. */
3288 for (i = 0; i < noutputs; ++i)
3290 rtx src, o = output_rvec[i];
3291 if (i == 0)
3293 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3294 src = obody;
3296 else
3298 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3299 ASM_OPERANDS_TEMPLATE (obody),
3300 constraints[i], i, argvec,
3301 constraintvec, labelvec, locus);
3302 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3304 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3307 /* If there are no outputs (but there are some clobbers)
3308 store the bare ASM_OPERANDS into the PARALLEL. */
3309 if (i == 0)
3310 XVECEXP (body, 0, i++) = obody;
3312 /* Store (clobber REG) for each clobbered register specified. */
3313 for (unsigned j = 0; j < nclobbers; ++j)
3315 rtx clobbered_reg = clobber_rvec[j];
3317 /* Do sanity check for overlap between clobbers and respectively
3318 input and outputs that hasn't been handled. Such overlap
3319 should have been detected and reported above. */
3320 if (!clobber_conflict_found && REG_P (clobbered_reg))
3322 /* We test the old body (obody) contents to avoid
3323 tripping over the under-construction body. */
3324 for (unsigned k = 0; k < noutputs; ++k)
3325 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3326 internal_error ("asm clobber conflict with output operand");
3328 for (unsigned k = 0; k < ninputs - ninout; ++k)
3329 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3330 internal_error ("asm clobber conflict with input operand");
3333 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3336 if (nlabels > 0)
3337 emit_jump_insn (body);
3338 else
3339 emit_insn (body);
3342 generating_concat_p = old_generating_concat_p;
3344 if (fallthru_label)
3345 emit_label (fallthru_label);
3347 if (after_md_seq)
3348 emit_insn (after_md_seq);
3349 if (after_rtl_seq)
3350 emit_insn (after_rtl_seq);
3352 free_temp_slots ();
3353 crtl->has_asm_statement = 1;
3356 /* Emit code to jump to the address
3357 specified by the pointer expression EXP. */
3359 static void
3360 expand_computed_goto (tree exp)
3362 rtx x = expand_normal (exp);
3364 do_pending_stack_adjust ();
3365 emit_indirect_jump (x);
3368 /* Generate RTL code for a `goto' statement with target label LABEL.
3369 LABEL should be a LABEL_DECL tree node that was or will later be
3370 defined with `expand_label'. */
3372 static void
3373 expand_goto (tree label)
3375 if (flag_checking)
3377 /* Check for a nonlocal goto to a containing function. Should have
3378 gotten translated to __builtin_nonlocal_goto. */
3379 tree context = decl_function_context (label);
3380 gcc_assert (!context || context == current_function_decl);
3383 emit_jump (jump_target_rtx (label));
3386 /* Output a return with no value. */
3388 static void
3389 expand_null_return_1 (void)
3391 clear_pending_stack_adjust ();
3392 do_pending_stack_adjust ();
3393 emit_jump (return_label);
3396 /* Generate RTL to return from the current function, with no value.
3397 (That is, we do not do anything about returning any value.) */
3399 void
3400 expand_null_return (void)
3402 /* If this function was declared to return a value, but we
3403 didn't, clobber the return registers so that they are not
3404 propagated live to the rest of the function. */
3405 clobber_return_register ();
3407 expand_null_return_1 ();
3410 /* Generate RTL to return from the current function, with value VAL. */
3412 static void
3413 expand_value_return (rtx val)
3415 /* Copy the value to the return location unless it's already there. */
3417 tree decl = DECL_RESULT (current_function_decl);
3418 rtx return_reg = DECL_RTL (decl);
3419 if (return_reg != val)
3421 tree funtype = TREE_TYPE (current_function_decl);
3422 tree type = TREE_TYPE (decl);
3423 int unsignedp = TYPE_UNSIGNED (type);
3424 machine_mode old_mode = DECL_MODE (decl);
3425 machine_mode mode;
3426 if (DECL_BY_REFERENCE (decl))
3427 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3428 else
3429 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3431 if (mode != old_mode)
3432 val = convert_modes (mode, old_mode, val, unsignedp);
3434 if (GET_CODE (return_reg) == PARALLEL)
3435 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3436 else
3437 emit_move_insn (return_reg, val);
3440 expand_null_return_1 ();
3443 /* Generate RTL to evaluate the expression RETVAL and return it
3444 from the current function. */
3446 static void
3447 expand_return (tree retval, tree bounds)
3449 rtx result_rtl;
3450 rtx val = 0;
3451 tree retval_rhs;
3452 rtx bounds_rtl;
3454 /* If function wants no value, give it none. */
3455 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3457 expand_normal (retval);
3458 expand_null_return ();
3459 return;
3462 if (retval == error_mark_node)
3464 /* Treat this like a return of no value from a function that
3465 returns a value. */
3466 expand_null_return ();
3467 return;
3469 else if ((TREE_CODE (retval) == MODIFY_EXPR
3470 || TREE_CODE (retval) == INIT_EXPR)
3471 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3472 retval_rhs = TREE_OPERAND (retval, 1);
3473 else
3474 retval_rhs = retval;
3476 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3478 /* Put returned bounds to the right place. */
3479 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3480 if (bounds_rtl)
3482 rtx addr = NULL;
3483 rtx bnd = NULL;
3485 if (bounds && bounds != error_mark_node)
3487 bnd = expand_normal (bounds);
3488 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3490 else if (REG_P (bounds_rtl))
3492 if (bounds)
3493 bnd = chkp_expand_zero_bounds ();
3494 else
3496 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3497 addr = gen_rtx_MEM (Pmode, addr);
3498 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3501 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3503 else
3505 int n;
3507 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3509 if (bounds)
3510 bnd = chkp_expand_zero_bounds ();
3511 else
3513 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3514 addr = gen_rtx_MEM (Pmode, addr);
3517 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3519 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3520 if (!bounds)
3522 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3523 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3524 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3526 targetm.calls.store_returned_bounds (slot, bnd);
3530 else if (chkp_function_instrumented_p (current_function_decl)
3531 && !BOUNDED_P (retval_rhs)
3532 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3533 && TREE_CODE (retval_rhs) != RESULT_DECL)
3535 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3536 addr = gen_rtx_MEM (Pmode, addr);
3538 gcc_assert (MEM_P (result_rtl));
3540 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3543 /* If we are returning the RESULT_DECL, then the value has already
3544 been stored into it, so we don't have to do anything special. */
3545 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3546 expand_value_return (result_rtl);
3548 /* If the result is an aggregate that is being returned in one (or more)
3549 registers, load the registers here. */
3551 else if (retval_rhs != 0
3552 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3553 && REG_P (result_rtl))
3555 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3556 if (val)
3558 /* Use the mode of the result value on the return register. */
3559 PUT_MODE (result_rtl, GET_MODE (val));
3560 expand_value_return (val);
3562 else
3563 expand_null_return ();
3565 else if (retval_rhs != 0
3566 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3567 && (REG_P (result_rtl)
3568 || (GET_CODE (result_rtl) == PARALLEL)))
3570 /* Compute the return value into a temporary (usually a pseudo reg). */
3572 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3573 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3574 val = force_not_mem (val);
3575 expand_value_return (val);
3577 else
3579 /* No hard reg used; calculate value into hard return reg. */
3580 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3581 expand_value_return (result_rtl);
3585 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3586 STMT that doesn't require special handling for outgoing edges. That
3587 is no tailcalls and no GIMPLE_COND. */
3589 static void
3590 expand_gimple_stmt_1 (gimple *stmt)
3592 tree op0;
3594 set_curr_insn_location (gimple_location (stmt));
3596 switch (gimple_code (stmt))
3598 case GIMPLE_GOTO:
3599 op0 = gimple_goto_dest (stmt);
3600 if (TREE_CODE (op0) == LABEL_DECL)
3601 expand_goto (op0);
3602 else
3603 expand_computed_goto (op0);
3604 break;
3605 case GIMPLE_LABEL:
3606 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3607 break;
3608 case GIMPLE_NOP:
3609 case GIMPLE_PREDICT:
3610 break;
3611 case GIMPLE_SWITCH:
3613 gswitch *swtch = as_a <gswitch *> (stmt);
3614 if (gimple_switch_num_labels (swtch) == 1)
3615 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3616 else
3617 expand_case (swtch);
3619 break;
3620 case GIMPLE_ASM:
3621 expand_asm_stmt (as_a <gasm *> (stmt));
3622 break;
3623 case GIMPLE_CALL:
3624 expand_call_stmt (as_a <gcall *> (stmt));
3625 break;
3627 case GIMPLE_RETURN:
3629 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3630 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3632 if (op0 && op0 != error_mark_node)
3634 tree result = DECL_RESULT (current_function_decl);
3636 /* Mark we have return statement with missing bounds. */
3637 if (!bnd
3638 && chkp_function_instrumented_p (cfun->decl)
3639 && !DECL_P (op0))
3640 bnd = error_mark_node;
3642 /* If we are not returning the current function's RESULT_DECL,
3643 build an assignment to it. */
3644 if (op0 != result)
3646 /* I believe that a function's RESULT_DECL is unique. */
3647 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3649 /* ??? We'd like to use simply expand_assignment here,
3650 but this fails if the value is of BLKmode but the return
3651 decl is a register. expand_return has special handling
3652 for this combination, which eventually should move
3653 to common code. See comments there. Until then, let's
3654 build a modify expression :-/ */
3655 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3656 result, op0);
3660 if (!op0)
3661 expand_null_return ();
3662 else
3663 expand_return (op0, bnd);
3665 break;
3667 case GIMPLE_ASSIGN:
3669 gassign *assign_stmt = as_a <gassign *> (stmt);
3670 tree lhs = gimple_assign_lhs (assign_stmt);
3672 /* Tree expand used to fiddle with |= and &= of two bitfield
3673 COMPONENT_REFs here. This can't happen with gimple, the LHS
3674 of binary assigns must be a gimple reg. */
3676 if (TREE_CODE (lhs) != SSA_NAME
3677 || get_gimple_rhs_class (gimple_expr_code (stmt))
3678 == GIMPLE_SINGLE_RHS)
3680 tree rhs = gimple_assign_rhs1 (assign_stmt);
3681 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3682 == GIMPLE_SINGLE_RHS);
3683 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3684 /* Do not put locations on possibly shared trees. */
3685 && !is_gimple_min_invariant (rhs))
3686 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3687 if (TREE_CLOBBER_P (rhs))
3688 /* This is a clobber to mark the going out of scope for
3689 this LHS. */
3691 else
3692 expand_assignment (lhs, rhs,
3693 gimple_assign_nontemporal_move_p (
3694 assign_stmt));
3696 else
3698 rtx target, temp;
3699 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3700 struct separate_ops ops;
3701 bool promoted = false;
3703 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3704 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3705 promoted = true;
3707 ops.code = gimple_assign_rhs_code (assign_stmt);
3708 ops.type = TREE_TYPE (lhs);
3709 switch (get_gimple_rhs_class (ops.code))
3711 case GIMPLE_TERNARY_RHS:
3712 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3713 /* Fallthru */
3714 case GIMPLE_BINARY_RHS:
3715 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3716 /* Fallthru */
3717 case GIMPLE_UNARY_RHS:
3718 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3719 break;
3720 default:
3721 gcc_unreachable ();
3723 ops.location = gimple_location (stmt);
3725 /* If we want to use a nontemporal store, force the value to
3726 register first. If we store into a promoted register,
3727 don't directly expand to target. */
3728 temp = nontemporal || promoted ? NULL_RTX : target;
3729 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3730 EXPAND_NORMAL);
3732 if (temp == target)
3734 else if (promoted)
3736 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3737 /* If TEMP is a VOIDmode constant, use convert_modes to make
3738 sure that we properly convert it. */
3739 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3741 temp = convert_modes (GET_MODE (target),
3742 TYPE_MODE (ops.type),
3743 temp, unsignedp);
3744 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3745 GET_MODE (target), temp, unsignedp);
3748 convert_move (SUBREG_REG (target), temp, unsignedp);
3750 else if (nontemporal && emit_storent_insn (target, temp))
3752 else
3754 temp = force_operand (temp, target);
3755 if (temp != target)
3756 emit_move_insn (target, temp);
3760 break;
3762 default:
3763 gcc_unreachable ();
3767 /* Expand one gimple statement STMT and return the last RTL instruction
3768 before any of the newly generated ones.
3770 In addition to generating the necessary RTL instructions this also
3771 sets REG_EH_REGION notes if necessary and sets the current source
3772 location for diagnostics. */
3774 static rtx_insn *
3775 expand_gimple_stmt (gimple *stmt)
3777 location_t saved_location = input_location;
3778 rtx_insn *last = get_last_insn ();
3779 int lp_nr;
3781 gcc_assert (cfun);
3783 /* We need to save and restore the current source location so that errors
3784 discovered during expansion are emitted with the right location. But
3785 it would be better if the diagnostic routines used the source location
3786 embedded in the tree nodes rather than globals. */
3787 if (gimple_has_location (stmt))
3788 input_location = gimple_location (stmt);
3790 expand_gimple_stmt_1 (stmt);
3792 /* Free any temporaries used to evaluate this statement. */
3793 free_temp_slots ();
3795 input_location = saved_location;
3797 /* Mark all insns that may trap. */
3798 lp_nr = lookup_stmt_eh_lp (stmt);
3799 if (lp_nr)
3801 rtx_insn *insn;
3802 for (insn = next_real_insn (last); insn;
3803 insn = next_real_insn (insn))
3805 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3806 /* If we want exceptions for non-call insns, any
3807 may_trap_p instruction may throw. */
3808 && GET_CODE (PATTERN (insn)) != CLOBBER
3809 && GET_CODE (PATTERN (insn)) != USE
3810 && insn_could_throw_p (insn))
3811 make_reg_eh_region_note (insn, 0, lp_nr);
3815 return last;
3818 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3819 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3820 generated a tail call (something that might be denied by the ABI
3821 rules governing the call; see calls.c).
3823 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3824 can still reach the rest of BB. The case here is __builtin_sqrt,
3825 where the NaN result goes through the external function (with a
3826 tailcall) and the normal result happens via a sqrt instruction. */
3828 static basic_block
3829 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3831 rtx_insn *last2, *last;
3832 edge e;
3833 edge_iterator ei;
3834 profile_probability probability;
3836 last2 = last = expand_gimple_stmt (stmt);
3838 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3839 if (CALL_P (last) && SIBLING_CALL_P (last))
3840 goto found;
3842 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3844 *can_fallthru = true;
3845 return NULL;
3847 found:
3848 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3849 Any instructions emitted here are about to be deleted. */
3850 do_pending_stack_adjust ();
3852 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3853 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3854 EH or abnormal edges, we shouldn't have created a tail call in
3855 the first place. So it seems to me we should just be removing
3856 all edges here, or redirecting the existing fallthru edge to
3857 the exit block. */
3859 probability = profile_probability::never ();
3861 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3863 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3865 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3866 e->dest->count -= e->count ();
3867 probability += e->probability;
3868 remove_edge (e);
3870 else
3871 ei_next (&ei);
3874 /* This is somewhat ugly: the call_expr expander often emits instructions
3875 after the sibcall (to perform the function return). These confuse the
3876 find_many_sub_basic_blocks code, so we need to get rid of these. */
3877 last = NEXT_INSN (last);
3878 gcc_assert (BARRIER_P (last));
3880 *can_fallthru = false;
3881 while (NEXT_INSN (last))
3883 /* For instance an sqrt builtin expander expands if with
3884 sibcall in the then and label for `else`. */
3885 if (LABEL_P (NEXT_INSN (last)))
3887 *can_fallthru = true;
3888 break;
3890 delete_insn (NEXT_INSN (last));
3893 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3894 | EDGE_SIBCALL);
3895 e->probability = probability;
3896 BB_END (bb) = last;
3897 update_bb_for_insn (bb);
3899 if (NEXT_INSN (last))
3901 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3903 last = BB_END (bb);
3904 if (BARRIER_P (last))
3905 BB_END (bb) = PREV_INSN (last);
3908 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3910 return bb;
3913 /* Return the difference between the floor and the truncated result of
3914 a signed division by OP1 with remainder MOD. */
3915 static rtx
3916 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3918 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3919 return gen_rtx_IF_THEN_ELSE
3920 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3921 gen_rtx_IF_THEN_ELSE
3922 (mode, gen_rtx_LT (BImode,
3923 gen_rtx_DIV (mode, op1, mod),
3924 const0_rtx),
3925 constm1_rtx, const0_rtx),
3926 const0_rtx);
3929 /* Return the difference between the ceil and the truncated result of
3930 a signed division by OP1 with remainder MOD. */
3931 static rtx
3932 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3934 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3935 return gen_rtx_IF_THEN_ELSE
3936 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3937 gen_rtx_IF_THEN_ELSE
3938 (mode, gen_rtx_GT (BImode,
3939 gen_rtx_DIV (mode, op1, mod),
3940 const0_rtx),
3941 const1_rtx, const0_rtx),
3942 const0_rtx);
3945 /* Return the difference between the ceil and the truncated result of
3946 an unsigned division by OP1 with remainder MOD. */
3947 static rtx
3948 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3950 /* (mod != 0 ? 1 : 0) */
3951 return gen_rtx_IF_THEN_ELSE
3952 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3953 const1_rtx, const0_rtx);
3956 /* Return the difference between the rounded and the truncated result
3957 of a signed division by OP1 with remainder MOD. Halfway cases are
3958 rounded away from zero, rather than to the nearest even number. */
3959 static rtx
3960 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3962 /* (abs (mod) >= abs (op1) - abs (mod)
3963 ? (op1 / mod > 0 ? 1 : -1)
3964 : 0) */
3965 return gen_rtx_IF_THEN_ELSE
3966 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3967 gen_rtx_MINUS (mode,
3968 gen_rtx_ABS (mode, op1),
3969 gen_rtx_ABS (mode, mod))),
3970 gen_rtx_IF_THEN_ELSE
3971 (mode, gen_rtx_GT (BImode,
3972 gen_rtx_DIV (mode, op1, mod),
3973 const0_rtx),
3974 const1_rtx, constm1_rtx),
3975 const0_rtx);
3978 /* Return the difference between the rounded and the truncated result
3979 of a unsigned division by OP1 with remainder MOD. Halfway cases
3980 are rounded away from zero, rather than to the nearest even
3981 number. */
3982 static rtx
3983 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3985 /* (mod >= op1 - mod ? 1 : 0) */
3986 return gen_rtx_IF_THEN_ELSE
3987 (mode, gen_rtx_GE (BImode, mod,
3988 gen_rtx_MINUS (mode, op1, mod)),
3989 const1_rtx, const0_rtx);
3992 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3993 any rtl. */
3995 static rtx
3996 convert_debug_memory_address (scalar_int_mode mode, rtx x,
3997 addr_space_t as)
3999 #ifndef POINTERS_EXTEND_UNSIGNED
4000 gcc_assert (mode == Pmode
4001 || mode == targetm.addr_space.address_mode (as));
4002 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4003 #else
4004 rtx temp;
4006 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4008 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4009 return x;
4011 /* X must have some form of address mode already. */
4012 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4013 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4014 x = lowpart_subreg (mode, x, xmode);
4015 else if (POINTERS_EXTEND_UNSIGNED > 0)
4016 x = gen_rtx_ZERO_EXTEND (mode, x);
4017 else if (!POINTERS_EXTEND_UNSIGNED)
4018 x = gen_rtx_SIGN_EXTEND (mode, x);
4019 else
4021 switch (GET_CODE (x))
4023 case SUBREG:
4024 if ((SUBREG_PROMOTED_VAR_P (x)
4025 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4026 || (GET_CODE (SUBREG_REG (x)) == PLUS
4027 && REG_P (XEXP (SUBREG_REG (x), 0))
4028 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4029 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4030 && GET_MODE (SUBREG_REG (x)) == mode)
4031 return SUBREG_REG (x);
4032 break;
4033 case LABEL_REF:
4034 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4035 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4036 return temp;
4037 case SYMBOL_REF:
4038 temp = shallow_copy_rtx (x);
4039 PUT_MODE (temp, mode);
4040 return temp;
4041 case CONST:
4042 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4043 if (temp)
4044 temp = gen_rtx_CONST (mode, temp);
4045 return temp;
4046 case PLUS:
4047 case MINUS:
4048 if (CONST_INT_P (XEXP (x, 1)))
4050 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4051 if (temp)
4052 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4054 break;
4055 default:
4056 break;
4058 /* Don't know how to express ptr_extend as operation in debug info. */
4059 return NULL;
4061 #endif /* POINTERS_EXTEND_UNSIGNED */
4063 return x;
4066 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4067 by avoid_deep_ter_for_debug. */
4069 static hash_map<tree, tree> *deep_ter_debug_map;
4071 /* Split too deep TER chains for debug stmts using debug temporaries. */
4073 static void
4074 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4076 use_operand_p use_p;
4077 ssa_op_iter iter;
4078 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4080 tree use = USE_FROM_PTR (use_p);
4081 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4082 continue;
4083 gimple *g = get_gimple_for_ssa_name (use);
4084 if (g == NULL)
4085 continue;
4086 if (depth > 6 && !stmt_ends_bb_p (g))
4088 if (deep_ter_debug_map == NULL)
4089 deep_ter_debug_map = new hash_map<tree, tree>;
4091 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4092 if (vexpr != NULL)
4093 continue;
4094 vexpr = make_node (DEBUG_EXPR_DECL);
4095 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4096 DECL_ARTIFICIAL (vexpr) = 1;
4097 TREE_TYPE (vexpr) = TREE_TYPE (use);
4098 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4099 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4100 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4101 avoid_deep_ter_for_debug (def_temp, 0);
4103 else
4104 avoid_deep_ter_for_debug (g, depth + 1);
4108 /* Return an RTX equivalent to the value of the parameter DECL. */
4110 static rtx
4111 expand_debug_parm_decl (tree decl)
4113 rtx incoming = DECL_INCOMING_RTL (decl);
4115 if (incoming
4116 && GET_MODE (incoming) != BLKmode
4117 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4118 || (MEM_P (incoming)
4119 && REG_P (XEXP (incoming, 0))
4120 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4122 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4124 #ifdef HAVE_window_save
4125 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4126 If the target machine has an explicit window save instruction, the
4127 actual entry value is the corresponding OUTGOING_REGNO instead. */
4128 if (REG_P (incoming)
4129 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4130 incoming
4131 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4132 OUTGOING_REGNO (REGNO (incoming)), 0);
4133 else if (MEM_P (incoming))
4135 rtx reg = XEXP (incoming, 0);
4136 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4138 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4139 incoming = replace_equiv_address_nv (incoming, reg);
4141 else
4142 incoming = copy_rtx (incoming);
4144 #endif
4146 ENTRY_VALUE_EXP (rtl) = incoming;
4147 return rtl;
4150 if (incoming
4151 && GET_MODE (incoming) != BLKmode
4152 && !TREE_ADDRESSABLE (decl)
4153 && MEM_P (incoming)
4154 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4155 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4156 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4157 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4158 return copy_rtx (incoming);
4160 return NULL_RTX;
4163 /* Return an RTX equivalent to the value of the tree expression EXP. */
4165 static rtx
4166 expand_debug_expr (tree exp)
4168 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4169 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4170 machine_mode inner_mode = VOIDmode;
4171 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4172 addr_space_t as;
4173 scalar_int_mode op0_mode, op1_mode, addr_mode;
4175 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4177 case tcc_expression:
4178 switch (TREE_CODE (exp))
4180 case COND_EXPR:
4181 case DOT_PROD_EXPR:
4182 case SAD_EXPR:
4183 case WIDEN_MULT_PLUS_EXPR:
4184 case WIDEN_MULT_MINUS_EXPR:
4185 case FMA_EXPR:
4186 goto ternary;
4188 case TRUTH_ANDIF_EXPR:
4189 case TRUTH_ORIF_EXPR:
4190 case TRUTH_AND_EXPR:
4191 case TRUTH_OR_EXPR:
4192 case TRUTH_XOR_EXPR:
4193 goto binary;
4195 case TRUTH_NOT_EXPR:
4196 goto unary;
4198 default:
4199 break;
4201 break;
4203 ternary:
4204 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4205 if (!op2)
4206 return NULL_RTX;
4207 /* Fall through. */
4209 binary:
4210 case tcc_binary:
4211 if (mode == BLKmode)
4212 return NULL_RTX;
4213 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4214 if (!op1)
4215 return NULL_RTX;
4216 switch (TREE_CODE (exp))
4218 case LSHIFT_EXPR:
4219 case RSHIFT_EXPR:
4220 case LROTATE_EXPR:
4221 case RROTATE_EXPR:
4222 case WIDEN_LSHIFT_EXPR:
4223 /* Ensure second operand isn't wider than the first one. */
4224 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4225 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4226 && (GET_MODE_UNIT_PRECISION (mode)
4227 < GET_MODE_PRECISION (op1_mode)))
4228 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4229 break;
4230 default:
4231 break;
4233 /* Fall through. */
4235 unary:
4236 case tcc_unary:
4237 if (mode == BLKmode)
4238 return NULL_RTX;
4239 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4240 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4241 if (!op0)
4242 return NULL_RTX;
4243 break;
4245 case tcc_comparison:
4246 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4247 goto binary;
4249 case tcc_type:
4250 case tcc_statement:
4251 gcc_unreachable ();
4253 case tcc_constant:
4254 case tcc_exceptional:
4255 case tcc_declaration:
4256 case tcc_reference:
4257 case tcc_vl_exp:
4258 break;
4261 switch (TREE_CODE (exp))
4263 case STRING_CST:
4264 if (!lookup_constant_def (exp))
4266 if (strlen (TREE_STRING_POINTER (exp)) + 1
4267 != (size_t) TREE_STRING_LENGTH (exp))
4268 return NULL_RTX;
4269 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4270 op0 = gen_rtx_MEM (BLKmode, op0);
4271 set_mem_attributes (op0, exp, 0);
4272 return op0;
4274 /* Fall through. */
4276 case INTEGER_CST:
4277 case REAL_CST:
4278 case FIXED_CST:
4279 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4280 return op0;
4282 case POLY_INT_CST:
4283 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4285 case COMPLEX_CST:
4286 gcc_assert (COMPLEX_MODE_P (mode));
4287 op0 = expand_debug_expr (TREE_REALPART (exp));
4288 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4289 return gen_rtx_CONCAT (mode, op0, op1);
4291 case DEBUG_EXPR_DECL:
4292 op0 = DECL_RTL_IF_SET (exp);
4294 if (op0)
4295 return op0;
4297 op0 = gen_rtx_DEBUG_EXPR (mode);
4298 DEBUG_EXPR_TREE_DECL (op0) = exp;
4299 SET_DECL_RTL (exp, op0);
4301 return op0;
4303 case VAR_DECL:
4304 case PARM_DECL:
4305 case FUNCTION_DECL:
4306 case LABEL_DECL:
4307 case CONST_DECL:
4308 case RESULT_DECL:
4309 op0 = DECL_RTL_IF_SET (exp);
4311 /* This decl was probably optimized away. */
4312 if (!op0)
4314 if (!VAR_P (exp)
4315 || DECL_EXTERNAL (exp)
4316 || !TREE_STATIC (exp)
4317 || !DECL_NAME (exp)
4318 || DECL_HARD_REGISTER (exp)
4319 || DECL_IN_CONSTANT_POOL (exp)
4320 || mode == VOIDmode)
4321 return NULL;
4323 op0 = make_decl_rtl_for_debug (exp);
4324 if (!MEM_P (op0)
4325 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4326 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4327 return NULL;
4329 else
4330 op0 = copy_rtx (op0);
4332 if (GET_MODE (op0) == BLKmode
4333 /* If op0 is not BLKmode, but mode is, adjust_mode
4334 below would ICE. While it is likely a FE bug,
4335 try to be robust here. See PR43166. */
4336 || mode == BLKmode
4337 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4339 gcc_assert (MEM_P (op0));
4340 op0 = adjust_address_nv (op0, mode, 0);
4341 return op0;
4344 /* Fall through. */
4346 adjust_mode:
4347 case PAREN_EXPR:
4348 CASE_CONVERT:
4350 inner_mode = GET_MODE (op0);
4352 if (mode == inner_mode)
4353 return op0;
4355 if (inner_mode == VOIDmode)
4357 if (TREE_CODE (exp) == SSA_NAME)
4358 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4359 else
4360 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4361 if (mode == inner_mode)
4362 return op0;
4365 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4367 if (GET_MODE_UNIT_BITSIZE (mode)
4368 == GET_MODE_UNIT_BITSIZE (inner_mode))
4369 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4370 else if (GET_MODE_UNIT_BITSIZE (mode)
4371 < GET_MODE_UNIT_BITSIZE (inner_mode))
4372 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4373 else
4374 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4376 else if (FLOAT_MODE_P (mode))
4378 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4379 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4380 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4381 else
4382 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4384 else if (FLOAT_MODE_P (inner_mode))
4386 if (unsignedp)
4387 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4388 else
4389 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4391 else if (GET_MODE_UNIT_PRECISION (mode)
4392 == GET_MODE_UNIT_PRECISION (inner_mode))
4393 op0 = lowpart_subreg (mode, op0, inner_mode);
4394 else if (GET_MODE_UNIT_PRECISION (mode)
4395 < GET_MODE_UNIT_PRECISION (inner_mode))
4396 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4397 else if (UNARY_CLASS_P (exp)
4398 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4399 : unsignedp)
4400 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4401 else
4402 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4404 return op0;
4407 case MEM_REF:
4408 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4410 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4411 TREE_OPERAND (exp, 0),
4412 TREE_OPERAND (exp, 1));
4413 if (newexp)
4414 return expand_debug_expr (newexp);
4416 /* FALLTHROUGH */
4417 case INDIRECT_REF:
4418 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4419 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4420 if (!op0)
4421 return NULL;
4423 if (TREE_CODE (exp) == MEM_REF)
4425 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4426 || (GET_CODE (op0) == PLUS
4427 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4428 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4429 Instead just use get_inner_reference. */
4430 goto component_ref;
4432 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4433 if (!op1 || !CONST_INT_P (op1))
4434 return NULL;
4436 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4439 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4441 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4442 op0, as);
4443 if (op0 == NULL_RTX)
4444 return NULL;
4446 op0 = gen_rtx_MEM (mode, op0);
4447 set_mem_attributes (op0, exp, 0);
4448 if (TREE_CODE (exp) == MEM_REF
4449 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4450 set_mem_expr (op0, NULL_TREE);
4451 set_mem_addr_space (op0, as);
4453 return op0;
4455 case TARGET_MEM_REF:
4456 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4457 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4458 return NULL;
4460 op0 = expand_debug_expr
4461 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4462 if (!op0)
4463 return NULL;
4465 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4466 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4467 op0, as);
4468 if (op0 == NULL_RTX)
4469 return NULL;
4471 op0 = gen_rtx_MEM (mode, op0);
4473 set_mem_attributes (op0, exp, 0);
4474 set_mem_addr_space (op0, as);
4476 return op0;
4478 component_ref:
4479 case ARRAY_REF:
4480 case ARRAY_RANGE_REF:
4481 case COMPONENT_REF:
4482 case BIT_FIELD_REF:
4483 case REALPART_EXPR:
4484 case IMAGPART_EXPR:
4485 case VIEW_CONVERT_EXPR:
4487 machine_mode mode1;
4488 poly_int64 bitsize, bitpos;
4489 tree offset;
4490 int reversep, volatilep = 0;
4491 tree tem
4492 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4493 &unsignedp, &reversep, &volatilep);
4494 rtx orig_op0;
4496 if (known_eq (bitsize, 0))
4497 return NULL;
4499 orig_op0 = op0 = expand_debug_expr (tem);
4501 if (!op0)
4502 return NULL;
4504 if (offset)
4506 machine_mode addrmode, offmode;
4508 if (!MEM_P (op0))
4509 return NULL;
4511 op0 = XEXP (op0, 0);
4512 addrmode = GET_MODE (op0);
4513 if (addrmode == VOIDmode)
4514 addrmode = Pmode;
4516 op1 = expand_debug_expr (offset);
4517 if (!op1)
4518 return NULL;
4520 offmode = GET_MODE (op1);
4521 if (offmode == VOIDmode)
4522 offmode = TYPE_MODE (TREE_TYPE (offset));
4524 if (addrmode != offmode)
4525 op1 = lowpart_subreg (addrmode, op1, offmode);
4527 /* Don't use offset_address here, we don't need a
4528 recognizable address, and we don't want to generate
4529 code. */
4530 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4531 op0, op1));
4534 if (MEM_P (op0))
4536 if (mode1 == VOIDmode)
4537 /* Bitfield. */
4538 mode1 = smallest_int_mode_for_size (bitsize);
4539 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4540 if (maybe_ne (bytepos, 0))
4542 op0 = adjust_address_nv (op0, mode1, bytepos);
4543 bitpos = num_trailing_bits (bitpos);
4545 else if (known_eq (bitpos, 0)
4546 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4547 op0 = adjust_address_nv (op0, mode, 0);
4548 else if (GET_MODE (op0) != mode1)
4549 op0 = adjust_address_nv (op0, mode1, 0);
4550 else
4551 op0 = copy_rtx (op0);
4552 if (op0 == orig_op0)
4553 op0 = shallow_copy_rtx (op0);
4554 set_mem_attributes (op0, exp, 0);
4557 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4558 return op0;
4560 if (maybe_lt (bitpos, 0))
4561 return NULL;
4563 if (GET_MODE (op0) == BLKmode)
4564 return NULL;
4566 poly_int64 bytepos;
4567 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4568 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4570 machine_mode opmode = GET_MODE (op0);
4572 if (opmode == VOIDmode)
4573 opmode = TYPE_MODE (TREE_TYPE (tem));
4575 /* This condition may hold if we're expanding the address
4576 right past the end of an array that turned out not to
4577 be addressable (i.e., the address was only computed in
4578 debug stmts). The gen_subreg below would rightfully
4579 crash, and the address doesn't really exist, so just
4580 drop it. */
4581 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4582 return NULL;
4584 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4585 return simplify_gen_subreg (mode, op0, opmode, bytepos);
4588 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4589 && TYPE_UNSIGNED (TREE_TYPE (exp))
4590 ? SIGN_EXTRACT
4591 : ZERO_EXTRACT, mode,
4592 GET_MODE (op0) != VOIDmode
4593 ? GET_MODE (op0)
4594 : TYPE_MODE (TREE_TYPE (tem)),
4595 op0, gen_int_mode (bitsize, word_mode),
4596 gen_int_mode (bitpos, word_mode));
4599 case ABS_EXPR:
4600 return simplify_gen_unary (ABS, mode, op0, mode);
4602 case NEGATE_EXPR:
4603 return simplify_gen_unary (NEG, mode, op0, mode);
4605 case BIT_NOT_EXPR:
4606 return simplify_gen_unary (NOT, mode, op0, mode);
4608 case FLOAT_EXPR:
4609 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4610 0)))
4611 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4612 inner_mode);
4614 case FIX_TRUNC_EXPR:
4615 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4616 inner_mode);
4618 case POINTER_PLUS_EXPR:
4619 /* For the rare target where pointers are not the same size as
4620 size_t, we need to check for mis-matched modes and correct
4621 the addend. */
4622 if (op0 && op1
4623 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4624 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4625 && op0_mode != op1_mode)
4627 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4628 /* If OP0 is a partial mode, then we must truncate, even
4629 if it has the same bitsize as OP1 as GCC's
4630 representation of partial modes is opaque. */
4631 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4632 && (GET_MODE_BITSIZE (op0_mode)
4633 == GET_MODE_BITSIZE (op1_mode))))
4634 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4635 else
4636 /* We always sign-extend, regardless of the signedness of
4637 the operand, because the operand is always unsigned
4638 here even if the original C expression is signed. */
4639 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4641 /* Fall through. */
4642 case PLUS_EXPR:
4643 return simplify_gen_binary (PLUS, mode, op0, op1);
4645 case MINUS_EXPR:
4646 case POINTER_DIFF_EXPR:
4647 return simplify_gen_binary (MINUS, mode, op0, op1);
4649 case MULT_EXPR:
4650 return simplify_gen_binary (MULT, mode, op0, op1);
4652 case RDIV_EXPR:
4653 case TRUNC_DIV_EXPR:
4654 case EXACT_DIV_EXPR:
4655 if (unsignedp)
4656 return simplify_gen_binary (UDIV, mode, op0, op1);
4657 else
4658 return simplify_gen_binary (DIV, mode, op0, op1);
4660 case TRUNC_MOD_EXPR:
4661 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4663 case FLOOR_DIV_EXPR:
4664 if (unsignedp)
4665 return simplify_gen_binary (UDIV, mode, op0, op1);
4666 else
4668 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4669 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4670 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4671 return simplify_gen_binary (PLUS, mode, div, adj);
4674 case FLOOR_MOD_EXPR:
4675 if (unsignedp)
4676 return simplify_gen_binary (UMOD, mode, op0, op1);
4677 else
4679 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4680 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4681 adj = simplify_gen_unary (NEG, mode,
4682 simplify_gen_binary (MULT, mode, adj, op1),
4683 mode);
4684 return simplify_gen_binary (PLUS, mode, mod, adj);
4687 case CEIL_DIV_EXPR:
4688 if (unsignedp)
4690 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4691 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4692 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4693 return simplify_gen_binary (PLUS, mode, div, adj);
4695 else
4697 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4698 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4699 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4700 return simplify_gen_binary (PLUS, mode, div, adj);
4703 case CEIL_MOD_EXPR:
4704 if (unsignedp)
4706 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4707 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4708 adj = simplify_gen_unary (NEG, mode,
4709 simplify_gen_binary (MULT, mode, adj, op1),
4710 mode);
4711 return simplify_gen_binary (PLUS, mode, mod, adj);
4713 else
4715 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4716 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4717 adj = simplify_gen_unary (NEG, mode,
4718 simplify_gen_binary (MULT, mode, adj, op1),
4719 mode);
4720 return simplify_gen_binary (PLUS, mode, mod, adj);
4723 case ROUND_DIV_EXPR:
4724 if (unsignedp)
4726 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4727 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4728 rtx adj = round_udiv_adjust (mode, mod, op1);
4729 return simplify_gen_binary (PLUS, mode, div, adj);
4731 else
4733 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4734 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4735 rtx adj = round_sdiv_adjust (mode, mod, op1);
4736 return simplify_gen_binary (PLUS, mode, div, adj);
4739 case ROUND_MOD_EXPR:
4740 if (unsignedp)
4742 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4743 rtx adj = round_udiv_adjust (mode, mod, op1);
4744 adj = simplify_gen_unary (NEG, mode,
4745 simplify_gen_binary (MULT, mode, adj, op1),
4746 mode);
4747 return simplify_gen_binary (PLUS, mode, mod, adj);
4749 else
4751 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4752 rtx adj = round_sdiv_adjust (mode, mod, op1);
4753 adj = simplify_gen_unary (NEG, mode,
4754 simplify_gen_binary (MULT, mode, adj, op1),
4755 mode);
4756 return simplify_gen_binary (PLUS, mode, mod, adj);
4759 case LSHIFT_EXPR:
4760 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4762 case RSHIFT_EXPR:
4763 if (unsignedp)
4764 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4765 else
4766 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4768 case LROTATE_EXPR:
4769 return simplify_gen_binary (ROTATE, mode, op0, op1);
4771 case RROTATE_EXPR:
4772 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4774 case MIN_EXPR:
4775 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4777 case MAX_EXPR:
4778 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4780 case BIT_AND_EXPR:
4781 case TRUTH_AND_EXPR:
4782 return simplify_gen_binary (AND, mode, op0, op1);
4784 case BIT_IOR_EXPR:
4785 case TRUTH_OR_EXPR:
4786 return simplify_gen_binary (IOR, mode, op0, op1);
4788 case BIT_XOR_EXPR:
4789 case TRUTH_XOR_EXPR:
4790 return simplify_gen_binary (XOR, mode, op0, op1);
4792 case TRUTH_ANDIF_EXPR:
4793 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4795 case TRUTH_ORIF_EXPR:
4796 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4798 case TRUTH_NOT_EXPR:
4799 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4801 case LT_EXPR:
4802 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4803 op0, op1);
4805 case LE_EXPR:
4806 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4807 op0, op1);
4809 case GT_EXPR:
4810 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4811 op0, op1);
4813 case GE_EXPR:
4814 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4815 op0, op1);
4817 case EQ_EXPR:
4818 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4820 case NE_EXPR:
4821 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4823 case UNORDERED_EXPR:
4824 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4826 case ORDERED_EXPR:
4827 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4829 case UNLT_EXPR:
4830 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4832 case UNLE_EXPR:
4833 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4835 case UNGT_EXPR:
4836 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4838 case UNGE_EXPR:
4839 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4841 case UNEQ_EXPR:
4842 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4844 case LTGT_EXPR:
4845 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4847 case COND_EXPR:
4848 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4850 case COMPLEX_EXPR:
4851 gcc_assert (COMPLEX_MODE_P (mode));
4852 if (GET_MODE (op0) == VOIDmode)
4853 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4854 if (GET_MODE (op1) == VOIDmode)
4855 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4856 return gen_rtx_CONCAT (mode, op0, op1);
4858 case CONJ_EXPR:
4859 if (GET_CODE (op0) == CONCAT)
4860 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4861 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4862 XEXP (op0, 1),
4863 GET_MODE_INNER (mode)));
4864 else
4866 scalar_mode imode = GET_MODE_INNER (mode);
4867 rtx re, im;
4869 if (MEM_P (op0))
4871 re = adjust_address_nv (op0, imode, 0);
4872 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4874 else
4876 scalar_int_mode ifmode;
4877 scalar_int_mode ihmode;
4878 rtx halfsize;
4879 if (!int_mode_for_mode (mode).exists (&ifmode)
4880 || !int_mode_for_mode (imode).exists (&ihmode))
4881 return NULL;
4882 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4883 re = op0;
4884 if (mode != ifmode)
4885 re = gen_rtx_SUBREG (ifmode, re, 0);
4886 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4887 if (imode != ihmode)
4888 re = gen_rtx_SUBREG (imode, re, 0);
4889 im = copy_rtx (op0);
4890 if (mode != ifmode)
4891 im = gen_rtx_SUBREG (ifmode, im, 0);
4892 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4893 if (imode != ihmode)
4894 im = gen_rtx_SUBREG (imode, im, 0);
4896 im = gen_rtx_NEG (imode, im);
4897 return gen_rtx_CONCAT (mode, re, im);
4900 case ADDR_EXPR:
4901 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4902 if (!op0 || !MEM_P (op0))
4904 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4905 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4906 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4907 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4908 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4909 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4911 if (handled_component_p (TREE_OPERAND (exp, 0)))
4913 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4914 bool reverse;
4915 tree decl
4916 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4917 &bitsize, &maxsize, &reverse);
4918 if ((VAR_P (decl)
4919 || TREE_CODE (decl) == PARM_DECL
4920 || TREE_CODE (decl) == RESULT_DECL)
4921 && (!TREE_ADDRESSABLE (decl)
4922 || target_for_debug_bind (decl))
4923 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4924 && known_gt (bitsize, 0)
4925 && known_eq (bitsize, maxsize))
4927 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4928 return plus_constant (mode, base, byteoffset);
4932 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4933 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4934 == ADDR_EXPR)
4936 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4937 0));
4938 if (op0 != NULL
4939 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4940 || (GET_CODE (op0) == PLUS
4941 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4942 && CONST_INT_P (XEXP (op0, 1)))))
4944 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4945 1));
4946 if (!op1 || !CONST_INT_P (op1))
4947 return NULL;
4949 return plus_constant (mode, op0, INTVAL (op1));
4953 return NULL;
4956 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4957 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
4958 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
4960 return op0;
4962 case VECTOR_CST:
4964 unsigned HOST_WIDE_INT i, nelts;
4966 if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
4967 return NULL;
4969 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4971 for (i = 0; i < nelts; ++i)
4973 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4974 if (!op1)
4975 return NULL;
4976 XVECEXP (op0, 0, i) = op1;
4979 return op0;
4982 case CONSTRUCTOR:
4983 if (TREE_CLOBBER_P (exp))
4984 return NULL;
4985 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4987 unsigned i;
4988 unsigned HOST_WIDE_INT nelts;
4989 tree val;
4991 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
4992 goto flag_unsupported;
4994 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4996 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4998 op1 = expand_debug_expr (val);
4999 if (!op1)
5000 return NULL;
5001 XVECEXP (op0, 0, i) = op1;
5004 if (i < nelts)
5006 op1 = expand_debug_expr
5007 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5009 if (!op1)
5010 return NULL;
5012 for (; i < nelts; i++)
5013 XVECEXP (op0, 0, i) = op1;
5016 return op0;
5018 else
5019 goto flag_unsupported;
5021 case CALL_EXPR:
5022 /* ??? Maybe handle some builtins? */
5023 return NULL;
5025 case SSA_NAME:
5027 gimple *g = get_gimple_for_ssa_name (exp);
5028 if (g)
5030 tree t = NULL_TREE;
5031 if (deep_ter_debug_map)
5033 tree *slot = deep_ter_debug_map->get (exp);
5034 if (slot)
5035 t = *slot;
5037 if (t == NULL_TREE)
5038 t = gimple_assign_rhs_to_tree (g);
5039 op0 = expand_debug_expr (t);
5040 if (!op0)
5041 return NULL;
5043 else
5045 /* If this is a reference to an incoming value of
5046 parameter that is never used in the code or where the
5047 incoming value is never used in the code, use
5048 PARM_DECL's DECL_RTL if set. */
5049 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5050 && SSA_NAME_VAR (exp)
5051 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5052 && has_zero_uses (exp))
5054 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5055 if (op0)
5056 goto adjust_mode;
5057 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5058 if (op0)
5059 goto adjust_mode;
5062 int part = var_to_partition (SA.map, exp);
5064 if (part == NO_PARTITION)
5065 return NULL;
5067 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5069 op0 = copy_rtx (SA.partition_to_pseudo[part]);
5071 goto adjust_mode;
5074 case ERROR_MARK:
5075 return NULL;
5077 /* Vector stuff. For most of the codes we don't have rtl codes. */
5078 case REALIGN_LOAD_EXPR:
5079 case VEC_COND_EXPR:
5080 case VEC_PACK_FIX_TRUNC_EXPR:
5081 case VEC_PACK_SAT_EXPR:
5082 case VEC_PACK_TRUNC_EXPR:
5083 case VEC_UNPACK_FLOAT_HI_EXPR:
5084 case VEC_UNPACK_FLOAT_LO_EXPR:
5085 case VEC_UNPACK_HI_EXPR:
5086 case VEC_UNPACK_LO_EXPR:
5087 case VEC_WIDEN_MULT_HI_EXPR:
5088 case VEC_WIDEN_MULT_LO_EXPR:
5089 case VEC_WIDEN_MULT_EVEN_EXPR:
5090 case VEC_WIDEN_MULT_ODD_EXPR:
5091 case VEC_WIDEN_LSHIFT_HI_EXPR:
5092 case VEC_WIDEN_LSHIFT_LO_EXPR:
5093 case VEC_PERM_EXPR:
5094 case VEC_DUPLICATE_EXPR:
5095 case VEC_SERIES_EXPR:
5096 return NULL;
5098 /* Misc codes. */
5099 case ADDR_SPACE_CONVERT_EXPR:
5100 case FIXED_CONVERT_EXPR:
5101 case OBJ_TYPE_REF:
5102 case WITH_SIZE_EXPR:
5103 case BIT_INSERT_EXPR:
5104 return NULL;
5106 case DOT_PROD_EXPR:
5107 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5108 && SCALAR_INT_MODE_P (mode))
5111 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5112 0)))
5113 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5114 inner_mode);
5116 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5117 1)))
5118 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5119 inner_mode);
5120 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5121 return simplify_gen_binary (PLUS, mode, op0, op2);
5123 return NULL;
5125 case WIDEN_MULT_EXPR:
5126 case WIDEN_MULT_PLUS_EXPR:
5127 case WIDEN_MULT_MINUS_EXPR:
5128 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5129 && SCALAR_INT_MODE_P (mode))
5131 inner_mode = GET_MODE (op0);
5132 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5133 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5134 else
5135 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5136 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5137 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5138 else
5139 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5140 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5141 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5142 return op0;
5143 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5144 return simplify_gen_binary (PLUS, mode, op0, op2);
5145 else
5146 return simplify_gen_binary (MINUS, mode, op2, op0);
5148 return NULL;
5150 case MULT_HIGHPART_EXPR:
5151 /* ??? Similar to the above. */
5152 return NULL;
5154 case WIDEN_SUM_EXPR:
5155 case WIDEN_LSHIFT_EXPR:
5156 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5157 && SCALAR_INT_MODE_P (mode))
5160 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5161 0)))
5162 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5163 inner_mode);
5164 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5165 ? ASHIFT : PLUS, mode, op0, op1);
5167 return NULL;
5169 case FMA_EXPR:
5170 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5172 default:
5173 flag_unsupported:
5174 if (flag_checking)
5176 debug_tree (exp);
5177 gcc_unreachable ();
5179 return NULL;
5183 /* Return an RTX equivalent to the source bind value of the tree expression
5184 EXP. */
5186 static rtx
5187 expand_debug_source_expr (tree exp)
5189 rtx op0 = NULL_RTX;
5190 machine_mode mode = VOIDmode, inner_mode;
5192 switch (TREE_CODE (exp))
5194 case PARM_DECL:
5196 mode = DECL_MODE (exp);
5197 op0 = expand_debug_parm_decl (exp);
5198 if (op0)
5199 break;
5200 /* See if this isn't an argument that has been completely
5201 optimized out. */
5202 if (!DECL_RTL_SET_P (exp)
5203 && !DECL_INCOMING_RTL (exp)
5204 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5206 tree aexp = DECL_ORIGIN (exp);
5207 if (DECL_CONTEXT (aexp)
5208 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5210 vec<tree, va_gc> **debug_args;
5211 unsigned int ix;
5212 tree ddecl;
5213 debug_args = decl_debug_args_lookup (current_function_decl);
5214 if (debug_args != NULL)
5216 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5217 ix += 2)
5218 if (ddecl == aexp)
5219 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5223 break;
5225 default:
5226 break;
5229 if (op0 == NULL_RTX)
5230 return NULL_RTX;
5232 inner_mode = GET_MODE (op0);
5233 if (mode == inner_mode)
5234 return op0;
5236 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5238 if (GET_MODE_UNIT_BITSIZE (mode)
5239 == GET_MODE_UNIT_BITSIZE (inner_mode))
5240 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5241 else if (GET_MODE_UNIT_BITSIZE (mode)
5242 < GET_MODE_UNIT_BITSIZE (inner_mode))
5243 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5244 else
5245 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5247 else if (FLOAT_MODE_P (mode))
5248 gcc_unreachable ();
5249 else if (FLOAT_MODE_P (inner_mode))
5251 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5252 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5253 else
5254 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5256 else if (GET_MODE_UNIT_PRECISION (mode)
5257 == GET_MODE_UNIT_PRECISION (inner_mode))
5258 op0 = lowpart_subreg (mode, op0, inner_mode);
5259 else if (GET_MODE_UNIT_PRECISION (mode)
5260 < GET_MODE_UNIT_PRECISION (inner_mode))
5261 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5262 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5263 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5264 else
5265 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5267 return op0;
5270 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5271 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5272 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5274 static void
5275 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5277 rtx exp = *exp_p;
5279 if (exp == NULL_RTX)
5280 return;
5282 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5283 return;
5285 if (depth == 4)
5287 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5288 rtx dval = make_debug_expr_from_rtl (exp);
5290 /* Emit a debug bind insn before INSN. */
5291 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5292 DEBUG_EXPR_TREE_DECL (dval), exp,
5293 VAR_INIT_STATUS_INITIALIZED);
5295 emit_debug_insn_before (bind, insn);
5296 *exp_p = dval;
5297 return;
5300 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5301 int i, j;
5302 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5303 switch (*format_ptr++)
5305 case 'e':
5306 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5307 break;
5309 case 'E':
5310 case 'V':
5311 for (j = 0; j < XVECLEN (exp, i); j++)
5312 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5313 break;
5315 default:
5316 break;
5320 /* Expand the _LOCs in debug insns. We run this after expanding all
5321 regular insns, so that any variables referenced in the function
5322 will have their DECL_RTLs set. */
5324 static void
5325 expand_debug_locations (void)
5327 rtx_insn *insn;
5328 rtx_insn *last = get_last_insn ();
5329 int save_strict_alias = flag_strict_aliasing;
5331 /* New alias sets while setting up memory attributes cause
5332 -fcompare-debug failures, even though it doesn't bring about any
5333 codegen changes. */
5334 flag_strict_aliasing = 0;
5336 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5337 if (DEBUG_BIND_INSN_P (insn))
5339 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5340 rtx val;
5341 rtx_insn *prev_insn, *insn2;
5342 machine_mode mode;
5344 if (value == NULL_TREE)
5345 val = NULL_RTX;
5346 else
5348 if (INSN_VAR_LOCATION_STATUS (insn)
5349 == VAR_INIT_STATUS_UNINITIALIZED)
5350 val = expand_debug_source_expr (value);
5351 /* The avoid_deep_ter_for_debug function inserts
5352 debug bind stmts after SSA_NAME definition, with the
5353 SSA_NAME as the whole bind location. Disable temporarily
5354 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5355 being defined in this DEBUG_INSN. */
5356 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5358 tree *slot = deep_ter_debug_map->get (value);
5359 if (slot)
5361 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5362 *slot = NULL_TREE;
5363 else
5364 slot = NULL;
5366 val = expand_debug_expr (value);
5367 if (slot)
5368 *slot = INSN_VAR_LOCATION_DECL (insn);
5370 else
5371 val = expand_debug_expr (value);
5372 gcc_assert (last == get_last_insn ());
5375 if (!val)
5376 val = gen_rtx_UNKNOWN_VAR_LOC ();
5377 else
5379 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5381 gcc_assert (mode == GET_MODE (val)
5382 || (GET_MODE (val) == VOIDmode
5383 && (CONST_SCALAR_INT_P (val)
5384 || GET_CODE (val) == CONST_FIXED
5385 || GET_CODE (val) == LABEL_REF)));
5388 INSN_VAR_LOCATION_LOC (insn) = val;
5389 prev_insn = PREV_INSN (insn);
5390 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5391 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5394 flag_strict_aliasing = save_strict_alias;
5397 /* Performs swapping operands of commutative operations to expand
5398 the expensive one first. */
5400 static void
5401 reorder_operands (basic_block bb)
5403 unsigned int *lattice; /* Hold cost of each statement. */
5404 unsigned int i = 0, n = 0;
5405 gimple_stmt_iterator gsi;
5406 gimple_seq stmts;
5407 gimple *stmt;
5408 bool swap;
5409 tree op0, op1;
5410 ssa_op_iter iter;
5411 use_operand_p use_p;
5412 gimple *def0, *def1;
5414 /* Compute cost of each statement using estimate_num_insns. */
5415 stmts = bb_seq (bb);
5416 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5418 stmt = gsi_stmt (gsi);
5419 if (!is_gimple_debug (stmt))
5420 gimple_set_uid (stmt, n++);
5422 lattice = XNEWVEC (unsigned int, n);
5423 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5425 unsigned cost;
5426 stmt = gsi_stmt (gsi);
5427 if (is_gimple_debug (stmt))
5428 continue;
5429 cost = estimate_num_insns (stmt, &eni_size_weights);
5430 lattice[i] = cost;
5431 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5433 tree use = USE_FROM_PTR (use_p);
5434 gimple *def_stmt;
5435 if (TREE_CODE (use) != SSA_NAME)
5436 continue;
5437 def_stmt = get_gimple_for_ssa_name (use);
5438 if (!def_stmt)
5439 continue;
5440 lattice[i] += lattice[gimple_uid (def_stmt)];
5442 i++;
5443 if (!is_gimple_assign (stmt)
5444 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5445 continue;
5446 op0 = gimple_op (stmt, 1);
5447 op1 = gimple_op (stmt, 2);
5448 if (TREE_CODE (op0) != SSA_NAME
5449 || TREE_CODE (op1) != SSA_NAME)
5450 continue;
5451 /* Swap operands if the second one is more expensive. */
5452 def0 = get_gimple_for_ssa_name (op0);
5453 def1 = get_gimple_for_ssa_name (op1);
5454 if (!def1)
5455 continue;
5456 swap = false;
5457 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5458 swap = true;
5459 if (swap)
5461 if (dump_file && (dump_flags & TDF_DETAILS))
5463 fprintf (dump_file, "Swap operands in stmt:\n");
5464 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5465 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5466 def0 ? lattice[gimple_uid (def0)] : 0,
5467 lattice[gimple_uid (def1)]);
5469 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5470 gimple_assign_rhs2_ptr (stmt));
5473 XDELETE (lattice);
5476 /* Expand basic block BB from GIMPLE trees to RTL. */
5478 static basic_block
5479 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5481 gimple_stmt_iterator gsi;
5482 gimple_seq stmts;
5483 gimple *stmt = NULL;
5484 rtx_note *note = NULL;
5485 rtx_insn *last;
5486 edge e;
5487 edge_iterator ei;
5489 if (dump_file)
5490 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5491 bb->index);
5493 /* Note that since we are now transitioning from GIMPLE to RTL, we
5494 cannot use the gsi_*_bb() routines because they expect the basic
5495 block to be in GIMPLE, instead of RTL. Therefore, we need to
5496 access the BB sequence directly. */
5497 if (optimize)
5498 reorder_operands (bb);
5499 stmts = bb_seq (bb);
5500 bb->il.gimple.seq = NULL;
5501 bb->il.gimple.phi_nodes = NULL;
5502 rtl_profile_for_bb (bb);
5503 init_rtl_bb_info (bb);
5504 bb->flags |= BB_RTL;
5506 /* Remove the RETURN_EXPR if we may fall though to the exit
5507 instead. */
5508 gsi = gsi_last (stmts);
5509 if (!gsi_end_p (gsi)
5510 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5512 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5514 gcc_assert (single_succ_p (bb));
5515 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5517 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5518 && !gimple_return_retval (ret_stmt))
5520 gsi_remove (&gsi, false);
5521 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5525 gsi = gsi_start (stmts);
5526 if (!gsi_end_p (gsi))
5528 stmt = gsi_stmt (gsi);
5529 if (gimple_code (stmt) != GIMPLE_LABEL)
5530 stmt = NULL;
5533 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5535 if (stmt || elt)
5537 gcc_checking_assert (!note);
5538 last = get_last_insn ();
5540 if (stmt)
5542 expand_gimple_stmt (stmt);
5543 gsi_next (&gsi);
5546 if (elt)
5547 emit_label (*elt);
5549 BB_HEAD (bb) = NEXT_INSN (last);
5550 if (NOTE_P (BB_HEAD (bb)))
5551 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5552 gcc_assert (LABEL_P (BB_HEAD (bb)));
5553 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5555 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5557 else
5558 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5560 if (note)
5561 NOTE_BASIC_BLOCK (note) = bb;
5563 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5565 basic_block new_bb;
5567 stmt = gsi_stmt (gsi);
5569 /* If this statement is a non-debug one, and we generate debug
5570 insns, then this one might be the last real use of a TERed
5571 SSA_NAME, but where there are still some debug uses further
5572 down. Expanding the current SSA name in such further debug
5573 uses by their RHS might lead to wrong debug info, as coalescing
5574 might make the operands of such RHS be placed into the same
5575 pseudo as something else. Like so:
5576 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5577 use(a_1);
5578 a_2 = ...
5579 #DEBUG ... => a_1
5580 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5581 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5582 the write to a_2 would actually have clobbered the place which
5583 formerly held a_0.
5585 So, instead of that, we recognize the situation, and generate
5586 debug temporaries at the last real use of TERed SSA names:
5587 a_1 = a_0 + 1;
5588 #DEBUG #D1 => a_1
5589 use(a_1);
5590 a_2 = ...
5591 #DEBUG ... => #D1
5593 if (MAY_HAVE_DEBUG_BIND_INSNS
5594 && SA.values
5595 && !is_gimple_debug (stmt))
5597 ssa_op_iter iter;
5598 tree op;
5599 gimple *def;
5601 location_t sloc = curr_insn_location ();
5603 /* Look for SSA names that have their last use here (TERed
5604 names always have only one real use). */
5605 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5606 if ((def = get_gimple_for_ssa_name (op)))
5608 imm_use_iterator imm_iter;
5609 use_operand_p use_p;
5610 bool have_debug_uses = false;
5612 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5614 if (gimple_debug_bind_p (USE_STMT (use_p)))
5616 have_debug_uses = true;
5617 break;
5621 if (have_debug_uses)
5623 /* OP is a TERed SSA name, with DEF its defining
5624 statement, and where OP is used in further debug
5625 instructions. Generate a debug temporary, and
5626 replace all uses of OP in debug insns with that
5627 temporary. */
5628 gimple *debugstmt;
5629 tree value = gimple_assign_rhs_to_tree (def);
5630 tree vexpr = make_node (DEBUG_EXPR_DECL);
5631 rtx val;
5632 machine_mode mode;
5634 set_curr_insn_location (gimple_location (def));
5636 DECL_ARTIFICIAL (vexpr) = 1;
5637 TREE_TYPE (vexpr) = TREE_TYPE (value);
5638 if (DECL_P (value))
5639 mode = DECL_MODE (value);
5640 else
5641 mode = TYPE_MODE (TREE_TYPE (value));
5642 SET_DECL_MODE (vexpr, mode);
5644 val = gen_rtx_VAR_LOCATION
5645 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5647 emit_debug_insn (val);
5649 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5651 if (!gimple_debug_bind_p (debugstmt))
5652 continue;
5654 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5655 SET_USE (use_p, vexpr);
5657 update_stmt (debugstmt);
5661 set_curr_insn_location (sloc);
5664 currently_expanding_gimple_stmt = stmt;
5666 /* Expand this statement, then evaluate the resulting RTL and
5667 fixup the CFG accordingly. */
5668 if (gimple_code (stmt) == GIMPLE_COND)
5670 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5671 if (new_bb)
5672 return new_bb;
5674 else if (is_gimple_debug (stmt))
5676 location_t sloc = curr_insn_location ();
5677 gimple_stmt_iterator nsi = gsi;
5679 for (;;)
5681 tree var;
5682 tree value = NULL_TREE;
5683 rtx val = NULL_RTX;
5684 machine_mode mode;
5686 if (!gimple_debug_nonbind_marker_p (stmt))
5688 if (gimple_debug_bind_p (stmt))
5690 var = gimple_debug_bind_get_var (stmt);
5692 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5693 && TREE_CODE (var) != LABEL_DECL
5694 && !target_for_debug_bind (var))
5695 goto delink_debug_stmt;
5697 if (DECL_P (var))
5698 mode = DECL_MODE (var);
5699 else
5700 mode = TYPE_MODE (TREE_TYPE (var));
5702 if (gimple_debug_bind_has_value_p (stmt))
5703 value = gimple_debug_bind_get_value (stmt);
5705 val = gen_rtx_VAR_LOCATION
5706 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5708 else if (gimple_debug_source_bind_p (stmt))
5710 var = gimple_debug_source_bind_get_var (stmt);
5712 value = gimple_debug_source_bind_get_value (stmt);
5714 mode = DECL_MODE (var);
5716 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5717 VAR_INIT_STATUS_UNINITIALIZED);
5719 else
5720 gcc_unreachable ();
5722 /* If this function was first compiled with markers
5723 enabled, but they're now disable (e.g. LTO), drop
5724 them on the floor. */
5725 else if (gimple_debug_nonbind_marker_p (stmt)
5726 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5727 goto delink_debug_stmt;
5728 else if (gimple_debug_begin_stmt_p (stmt))
5729 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5730 else
5731 gcc_unreachable ();
5733 last = get_last_insn ();
5735 set_curr_insn_location (gimple_location (stmt));
5737 emit_debug_insn (val);
5739 if (dump_file && (dump_flags & TDF_DETAILS))
5741 /* We can't dump the insn with a TREE where an RTX
5742 is expected. */
5743 if (GET_CODE (val) == VAR_LOCATION)
5745 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5746 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5748 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5749 if (GET_CODE (val) == VAR_LOCATION)
5750 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5753 delink_debug_stmt:
5754 /* In order not to generate too many debug temporaries,
5755 we delink all uses of debug statements we already expanded.
5756 Therefore debug statements between definition and real
5757 use of TERed SSA names will continue to use the SSA name,
5758 and not be replaced with debug temps. */
5759 delink_stmt_imm_use (stmt);
5761 gsi = nsi;
5762 gsi_next (&nsi);
5763 if (gsi_end_p (nsi))
5764 break;
5765 stmt = gsi_stmt (nsi);
5766 if (!is_gimple_debug (stmt))
5767 break;
5770 set_curr_insn_location (sloc);
5772 else
5774 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5775 if (call_stmt
5776 && gimple_call_tail_p (call_stmt)
5777 && disable_tail_calls)
5778 gimple_call_set_tail (call_stmt, false);
5780 if (call_stmt && gimple_call_tail_p (call_stmt))
5782 bool can_fallthru;
5783 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5784 if (new_bb)
5786 if (can_fallthru)
5787 bb = new_bb;
5788 else
5789 return new_bb;
5792 else
5794 def_operand_p def_p;
5795 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5797 if (def_p != NULL)
5799 /* Ignore this stmt if it is in the list of
5800 replaceable expressions. */
5801 if (SA.values
5802 && bitmap_bit_p (SA.values,
5803 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5804 continue;
5806 last = expand_gimple_stmt (stmt);
5807 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5812 currently_expanding_gimple_stmt = NULL;
5814 /* Expand implicit goto and convert goto_locus. */
5815 FOR_EACH_EDGE (e, ei, bb->succs)
5817 if (e->goto_locus != UNKNOWN_LOCATION)
5818 set_curr_insn_location (e->goto_locus);
5819 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5821 emit_jump (label_rtx_for_bb (e->dest));
5822 e->flags &= ~EDGE_FALLTHRU;
5826 /* Expanded RTL can create a jump in the last instruction of block.
5827 This later might be assumed to be a jump to successor and break edge insertion.
5828 We need to insert dummy move to prevent this. PR41440. */
5829 if (single_succ_p (bb)
5830 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5831 && (last = get_last_insn ())
5832 && (JUMP_P (last)
5833 || (DEBUG_INSN_P (last)
5834 && JUMP_P (prev_nondebug_insn (last)))))
5836 rtx dummy = gen_reg_rtx (SImode);
5837 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5840 do_pending_stack_adjust ();
5842 /* Find the block tail. The last insn in the block is the insn
5843 before a barrier and/or table jump insn. */
5844 last = get_last_insn ();
5845 if (BARRIER_P (last))
5846 last = PREV_INSN (last);
5847 if (JUMP_TABLE_DATA_P (last))
5848 last = PREV_INSN (PREV_INSN (last));
5849 BB_END (bb) = last;
5851 update_bb_for_insn (bb);
5853 return bb;
5857 /* Create a basic block for initialization code. */
5859 static basic_block
5860 construct_init_block (void)
5862 basic_block init_block, first_block;
5863 edge e = NULL;
5864 int flags;
5866 /* Multiple entry points not supported yet. */
5867 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5868 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5869 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5870 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5871 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5873 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5875 /* When entry edge points to first basic block, we don't need jump,
5876 otherwise we have to jump into proper target. */
5877 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5879 tree label = gimple_block_label (e->dest);
5881 emit_jump (jump_target_rtx (label));
5882 flags = 0;
5884 else
5885 flags = EDGE_FALLTHRU;
5887 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5888 get_last_insn (),
5889 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5890 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5891 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5892 if (e)
5894 first_block = e->dest;
5895 redirect_edge_succ (e, init_block);
5896 e = make_single_succ_edge (init_block, first_block, flags);
5898 else
5899 e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5900 EDGE_FALLTHRU);
5902 update_bb_for_insn (init_block);
5903 return init_block;
5906 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5907 found in the block tree. */
5909 static void
5910 set_block_levels (tree block, int level)
5912 while (block)
5914 BLOCK_NUMBER (block) = level;
5915 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5916 block = BLOCK_CHAIN (block);
5920 /* Create a block containing landing pads and similar stuff. */
5922 static void
5923 construct_exit_block (void)
5925 rtx_insn *head = get_last_insn ();
5926 rtx_insn *end;
5927 basic_block exit_block;
5928 edge e, e2;
5929 unsigned ix;
5930 edge_iterator ei;
5931 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5932 rtx_insn *orig_end = BB_END (prev_bb);
5934 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5936 /* Make sure the locus is set to the end of the function, so that
5937 epilogue line numbers and warnings are set properly. */
5938 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5939 input_location = cfun->function_end_locus;
5941 /* Generate rtl for function exit. */
5942 expand_function_end ();
5944 end = get_last_insn ();
5945 if (head == end)
5946 return;
5947 /* While emitting the function end we could move end of the last basic
5948 block. */
5949 BB_END (prev_bb) = orig_end;
5950 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5951 head = NEXT_INSN (head);
5952 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5953 bb count counting will be confused. Any instructions before that
5954 label are emitted for the case where PREV_BB falls through into the
5955 exit block, so append those instructions to prev_bb in that case. */
5956 if (NEXT_INSN (head) != return_label)
5958 while (NEXT_INSN (head) != return_label)
5960 if (!NOTE_P (NEXT_INSN (head)))
5961 BB_END (prev_bb) = NEXT_INSN (head);
5962 head = NEXT_INSN (head);
5965 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5966 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5967 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5969 ix = 0;
5970 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5972 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5973 if (!(e->flags & EDGE_ABNORMAL))
5974 redirect_edge_succ (e, exit_block);
5975 else
5976 ix++;
5979 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5980 EDGE_FALLTHRU);
5981 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5982 if (e2 != e)
5984 exit_block->count -= e2->count ();
5986 update_bb_for_insn (exit_block);
5989 /* Helper function for discover_nonconstant_array_refs.
5990 Look for ARRAY_REF nodes with non-constant indexes and mark them
5991 addressable. */
5993 static tree
5994 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5995 void *data ATTRIBUTE_UNUSED)
5997 tree t = *tp;
5999 if (IS_TYPE_OR_DECL_P (t))
6000 *walk_subtrees = 0;
6001 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6003 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6004 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6005 && (!TREE_OPERAND (t, 2)
6006 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6007 || (TREE_CODE (t) == COMPONENT_REF
6008 && (!TREE_OPERAND (t,2)
6009 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6010 || TREE_CODE (t) == BIT_FIELD_REF
6011 || TREE_CODE (t) == REALPART_EXPR
6012 || TREE_CODE (t) == IMAGPART_EXPR
6013 || TREE_CODE (t) == VIEW_CONVERT_EXPR
6014 || CONVERT_EXPR_P (t))
6015 t = TREE_OPERAND (t, 0);
6017 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6019 t = get_base_address (t);
6020 if (t && DECL_P (t)
6021 && DECL_MODE (t) != BLKmode)
6022 TREE_ADDRESSABLE (t) = 1;
6025 *walk_subtrees = 0;
6028 return NULL_TREE;
6031 /* RTL expansion is not able to compile array references with variable
6032 offsets for arrays stored in single register. Discover such
6033 expressions and mark variables as addressable to avoid this
6034 scenario. */
6036 static void
6037 discover_nonconstant_array_refs (void)
6039 basic_block bb;
6040 gimple_stmt_iterator gsi;
6042 FOR_EACH_BB_FN (bb, cfun)
6043 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6045 gimple *stmt = gsi_stmt (gsi);
6046 if (!is_gimple_debug (stmt))
6047 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6051 /* This function sets crtl->args.internal_arg_pointer to a virtual
6052 register if DRAP is needed. Local register allocator will replace
6053 virtual_incoming_args_rtx with the virtual register. */
6055 static void
6056 expand_stack_alignment (void)
6058 rtx drap_rtx;
6059 unsigned int preferred_stack_boundary;
6061 if (! SUPPORTS_STACK_ALIGNMENT)
6062 return;
6064 if (cfun->calls_alloca
6065 || cfun->has_nonlocal_label
6066 || crtl->has_nonlocal_goto)
6067 crtl->need_drap = true;
6069 /* Call update_stack_boundary here again to update incoming stack
6070 boundary. It may set incoming stack alignment to a different
6071 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6072 use the minimum incoming stack alignment to check if it is OK
6073 to perform sibcall optimization since sibcall optimization will
6074 only align the outgoing stack to incoming stack boundary. */
6075 if (targetm.calls.update_stack_boundary)
6076 targetm.calls.update_stack_boundary ();
6078 /* The incoming stack frame has to be aligned at least at
6079 parm_stack_boundary. */
6080 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6082 /* Update crtl->stack_alignment_estimated and use it later to align
6083 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6084 exceptions since callgraph doesn't collect incoming stack alignment
6085 in this case. */
6086 if (cfun->can_throw_non_call_exceptions
6087 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6088 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6089 else
6090 preferred_stack_boundary = crtl->preferred_stack_boundary;
6091 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6092 crtl->stack_alignment_estimated = preferred_stack_boundary;
6093 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6094 crtl->stack_alignment_needed = preferred_stack_boundary;
6096 gcc_assert (crtl->stack_alignment_needed
6097 <= crtl->stack_alignment_estimated);
6099 crtl->stack_realign_needed
6100 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6101 crtl->stack_realign_tried = crtl->stack_realign_needed;
6103 crtl->stack_realign_processed = true;
6105 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6106 alignment. */
6107 gcc_assert (targetm.calls.get_drap_rtx != NULL);
6108 drap_rtx = targetm.calls.get_drap_rtx ();
6110 /* stack_realign_drap and drap_rtx must match. */
6111 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6113 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6114 if (drap_rtx != NULL)
6116 crtl->args.internal_arg_pointer = drap_rtx;
6118 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6119 needed. */
6120 fixup_tail_calls ();
6125 static void
6126 expand_main_function (void)
6128 #if (defined(INVOKE__main) \
6129 || (!defined(HAS_INIT_SECTION) \
6130 && !defined(INIT_SECTION_ASM_OP) \
6131 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6132 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6133 #endif
6137 /* Expand code to initialize the stack_protect_guard. This is invoked at
6138 the beginning of a function to be protected. */
6140 static void
6141 stack_protect_prologue (void)
6143 tree guard_decl = targetm.stack_protect_guard ();
6144 rtx x, y;
6146 x = expand_normal (crtl->stack_protect_guard);
6147 if (guard_decl)
6148 y = expand_normal (guard_decl);
6149 else
6150 y = const0_rtx;
6152 /* Allow the target to copy from Y to X without leaking Y into a
6153 register. */
6154 if (targetm.have_stack_protect_set ())
6155 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6157 emit_insn (insn);
6158 return;
6161 /* Otherwise do a straight move. */
6162 emit_move_insn (x, y);
6165 /* Translate the intermediate representation contained in the CFG
6166 from GIMPLE trees to RTL.
6168 We do conversion per basic block and preserve/update the tree CFG.
6169 This implies we have to do some magic as the CFG can simultaneously
6170 consist of basic blocks containing RTL and GIMPLE trees. This can
6171 confuse the CFG hooks, so be careful to not manipulate CFG during
6172 the expansion. */
6174 namespace {
6176 const pass_data pass_data_expand =
6178 RTL_PASS, /* type */
6179 "expand", /* name */
6180 OPTGROUP_NONE, /* optinfo_flags */
6181 TV_EXPAND, /* tv_id */
6182 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6183 | PROP_gimple_lcx
6184 | PROP_gimple_lvec
6185 | PROP_gimple_lva), /* properties_required */
6186 PROP_rtl, /* properties_provided */
6187 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6188 0, /* todo_flags_start */
6189 0, /* todo_flags_finish */
6192 class pass_expand : public rtl_opt_pass
6194 public:
6195 pass_expand (gcc::context *ctxt)
6196 : rtl_opt_pass (pass_data_expand, ctxt)
6199 /* opt_pass methods: */
6200 virtual unsigned int execute (function *);
6202 }; // class pass_expand
6204 unsigned int
6205 pass_expand::execute (function *fun)
6207 basic_block bb, init_block;
6208 edge_iterator ei;
6209 edge e;
6210 rtx_insn *var_seq, *var_ret_seq;
6211 unsigned i;
6213 timevar_push (TV_OUT_OF_SSA);
6214 rewrite_out_of_ssa (&SA);
6215 timevar_pop (TV_OUT_OF_SSA);
6216 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6218 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6220 gimple_stmt_iterator gsi;
6221 FOR_EACH_BB_FN (bb, cfun)
6222 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6223 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6224 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6227 /* Make sure all values used by the optimization passes have sane
6228 defaults. */
6229 reg_renumber = 0;
6231 /* Some backends want to know that we are expanding to RTL. */
6232 currently_expanding_to_rtl = 1;
6233 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6234 free_dominance_info (CDI_DOMINATORS);
6236 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6238 if (chkp_function_instrumented_p (current_function_decl))
6239 chkp_reset_rtl_bounds ();
6241 insn_locations_init ();
6242 if (!DECL_IS_BUILTIN (current_function_decl))
6244 /* Eventually, all FEs should explicitly set function_start_locus. */
6245 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6246 set_curr_insn_location
6247 (DECL_SOURCE_LOCATION (current_function_decl));
6248 else
6249 set_curr_insn_location (fun->function_start_locus);
6251 else
6252 set_curr_insn_location (UNKNOWN_LOCATION);
6253 prologue_location = curr_insn_location ();
6255 #ifdef INSN_SCHEDULING
6256 init_sched_attrs ();
6257 #endif
6259 /* Make sure first insn is a note even if we don't want linenums.
6260 This makes sure the first insn will never be deleted.
6261 Also, final expects a note to appear there. */
6262 emit_note (NOTE_INSN_DELETED);
6264 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6265 discover_nonconstant_array_refs ();
6267 targetm.expand_to_rtl_hook ();
6268 crtl->init_stack_alignment ();
6269 fun->cfg->max_jumptable_ents = 0;
6271 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6272 of the function section at exapnsion time to predict distance of calls. */
6273 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6275 /* Expand the variables recorded during gimple lowering. */
6276 timevar_push (TV_VAR_EXPAND);
6277 start_sequence ();
6279 var_ret_seq = expand_used_vars ();
6281 var_seq = get_insns ();
6282 end_sequence ();
6283 timevar_pop (TV_VAR_EXPAND);
6285 /* Honor stack protection warnings. */
6286 if (warn_stack_protect)
6288 if (fun->calls_alloca)
6289 warning (OPT_Wstack_protector,
6290 "stack protector not protecting local variables: "
6291 "variable length buffer");
6292 if (has_short_buffer && !crtl->stack_protect_guard)
6293 warning (OPT_Wstack_protector,
6294 "stack protector not protecting function: "
6295 "all local arrays are less than %d bytes long",
6296 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6299 /* Set up parameters and prepare for return, for the function. */
6300 expand_function_start (current_function_decl);
6302 /* If we emitted any instructions for setting up the variables,
6303 emit them before the FUNCTION_START note. */
6304 if (var_seq)
6306 emit_insn_before (var_seq, parm_birth_insn);
6308 /* In expand_function_end we'll insert the alloca save/restore
6309 before parm_birth_insn. We've just insertted an alloca call.
6310 Adjust the pointer to match. */
6311 parm_birth_insn = var_seq;
6314 /* Now propagate the RTL assignment of each partition to the
6315 underlying var of each SSA_NAME. */
6316 tree name;
6318 FOR_EACH_SSA_NAME (i, name, cfun)
6320 /* We might have generated new SSA names in
6321 update_alias_info_with_stack_vars. They will have a NULL
6322 defining statements, and won't be part of the partitioning,
6323 so ignore those. */
6324 if (!SSA_NAME_DEF_STMT (name))
6325 continue;
6327 adjust_one_expanded_partition_var (name);
6330 /* Clean up RTL of variables that straddle across multiple
6331 partitions, and check that the rtl of any PARM_DECLs that are not
6332 cleaned up is that of their default defs. */
6333 FOR_EACH_SSA_NAME (i, name, cfun)
6335 int part;
6337 /* We might have generated new SSA names in
6338 update_alias_info_with_stack_vars. They will have a NULL
6339 defining statements, and won't be part of the partitioning,
6340 so ignore those. */
6341 if (!SSA_NAME_DEF_STMT (name))
6342 continue;
6343 part = var_to_partition (SA.map, name);
6344 if (part == NO_PARTITION)
6345 continue;
6347 /* If this decl was marked as living in multiple places, reset
6348 this now to NULL. */
6349 tree var = SSA_NAME_VAR (name);
6350 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6351 SET_DECL_RTL (var, NULL);
6352 /* Check that the pseudos chosen by assign_parms are those of
6353 the corresponding default defs. */
6354 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6355 && (TREE_CODE (var) == PARM_DECL
6356 || TREE_CODE (var) == RESULT_DECL))
6358 rtx in = DECL_RTL_IF_SET (var);
6359 gcc_assert (in);
6360 rtx out = SA.partition_to_pseudo[part];
6361 gcc_assert (in == out);
6363 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6364 those expected by debug backends for each parm and for
6365 the result. This is particularly important for stabs,
6366 whose register elimination from parm's DECL_RTL may cause
6367 -fcompare-debug differences as SET_DECL_RTL changes reg's
6368 attrs. So, make sure the RTL already has the parm as the
6369 EXPR, so that it won't change. */
6370 SET_DECL_RTL (var, NULL_RTX);
6371 if (MEM_P (in))
6372 set_mem_attributes (in, var, true);
6373 SET_DECL_RTL (var, in);
6377 /* If this function is `main', emit a call to `__main'
6378 to run global initializers, etc. */
6379 if (DECL_NAME (current_function_decl)
6380 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6381 && DECL_FILE_SCOPE_P (current_function_decl))
6382 expand_main_function ();
6384 /* Initialize the stack_protect_guard field. This must happen after the
6385 call to __main (if any) so that the external decl is initialized. */
6386 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6387 stack_protect_prologue ();
6389 expand_phi_nodes (&SA);
6391 /* Release any stale SSA redirection data. */
6392 redirect_edge_var_map_empty ();
6394 /* Register rtl specific functions for cfg. */
6395 rtl_register_cfg_hooks ();
6397 init_block = construct_init_block ();
6399 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6400 remaining edges later. */
6401 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6402 e->flags &= ~EDGE_EXECUTABLE;
6404 /* If the function has too many markers, drop them while expanding. */
6405 if (cfun->debug_marker_count
6406 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6407 cfun->debug_nonbind_markers = false;
6409 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6410 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6411 next_bb)
6412 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6414 if (MAY_HAVE_DEBUG_BIND_INSNS)
6415 expand_debug_locations ();
6417 if (deep_ter_debug_map)
6419 delete deep_ter_debug_map;
6420 deep_ter_debug_map = NULL;
6423 /* Free stuff we no longer need after GIMPLE optimizations. */
6424 free_dominance_info (CDI_DOMINATORS);
6425 free_dominance_info (CDI_POST_DOMINATORS);
6426 delete_tree_cfg_annotations (fun);
6428 timevar_push (TV_OUT_OF_SSA);
6429 finish_out_of_ssa (&SA);
6430 timevar_pop (TV_OUT_OF_SSA);
6432 timevar_push (TV_POST_EXPAND);
6433 /* We are no longer in SSA form. */
6434 fun->gimple_df->in_ssa_p = false;
6435 loops_state_clear (LOOP_CLOSED_SSA);
6437 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6438 conservatively to true until they are all profile aware. */
6439 delete lab_rtx_for_bb;
6440 free_histograms (fun);
6442 construct_exit_block ();
6443 insn_locations_finalize ();
6445 if (var_ret_seq)
6447 rtx_insn *after = return_label;
6448 rtx_insn *next = NEXT_INSN (after);
6449 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6450 after = next;
6451 emit_insn_after (var_ret_seq, after);
6454 /* Zap the tree EH table. */
6455 set_eh_throw_stmt_table (fun, NULL);
6457 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6458 split edges which edge insertions might do. */
6459 rebuild_jump_labels (get_insns ());
6461 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6462 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6464 edge e;
6465 edge_iterator ei;
6466 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6468 if (e->insns.r)
6470 rebuild_jump_labels_chain (e->insns.r);
6471 /* Put insns after parm birth, but before
6472 NOTE_INSNS_FUNCTION_BEG. */
6473 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6474 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6476 rtx_insn *insns = e->insns.r;
6477 e->insns.r = NULL;
6478 if (NOTE_P (parm_birth_insn)
6479 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6480 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6481 else
6482 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6484 else
6485 commit_one_edge_insertion (e);
6487 else
6488 ei_next (&ei);
6492 /* We're done expanding trees to RTL. */
6493 currently_expanding_to_rtl = 0;
6495 flush_mark_addressable_queue ();
6497 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6498 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6500 edge e;
6501 edge_iterator ei;
6502 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6504 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6505 e->flags &= ~EDGE_EXECUTABLE;
6507 /* At the moment not all abnormal edges match the RTL
6508 representation. It is safe to remove them here as
6509 find_many_sub_basic_blocks will rediscover them.
6510 In the future we should get this fixed properly. */
6511 if ((e->flags & EDGE_ABNORMAL)
6512 && !(e->flags & EDGE_SIBCALL))
6513 remove_edge (e);
6514 else
6515 ei_next (&ei);
6519 auto_sbitmap blocks (last_basic_block_for_fn (fun));
6520 bitmap_ones (blocks);
6521 find_many_sub_basic_blocks (blocks);
6522 purge_all_dead_edges ();
6524 expand_stack_alignment ();
6526 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6527 function. */
6528 if (crtl->tail_call_emit)
6529 fixup_tail_calls ();
6531 /* After initial rtl generation, call back to finish generating
6532 exception support code. We need to do this before cleaning up
6533 the CFG as the code does not expect dead landing pads. */
6534 if (fun->eh->region_tree != NULL)
6535 finish_eh_generation ();
6537 /* BB subdivision may have created basic blocks that are are only reachable
6538 from unlikely bbs but not marked as such in the profile. */
6539 if (optimize)
6540 propagate_unlikely_bbs_forward ();
6542 /* Remove unreachable blocks, otherwise we cannot compute dominators
6543 which are needed for loop state verification. As a side-effect
6544 this also compacts blocks.
6545 ??? We cannot remove trivially dead insns here as for example
6546 the DRAP reg on i?86 is not magically live at this point.
6547 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6548 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6550 checking_verify_flow_info ();
6552 /* Initialize pseudos allocated for hard registers. */
6553 emit_initial_value_sets ();
6555 /* And finally unshare all RTL. */
6556 unshare_all_rtl ();
6558 /* There's no need to defer outputting this function any more; we
6559 know we want to output it. */
6560 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6562 /* Now that we're done expanding trees to RTL, we shouldn't have any
6563 more CONCATs anywhere. */
6564 generating_concat_p = 0;
6566 if (dump_file)
6568 fprintf (dump_file,
6569 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6570 /* And the pass manager will dump RTL for us. */
6573 /* If we're emitting a nested function, make sure its parent gets
6574 emitted as well. Doing otherwise confuses debug info. */
6576 tree parent;
6577 for (parent = DECL_CONTEXT (current_function_decl);
6578 parent != NULL_TREE;
6579 parent = get_containing_scope (parent))
6580 if (TREE_CODE (parent) == FUNCTION_DECL)
6581 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6584 TREE_ASM_WRITTEN (current_function_decl) = 1;
6586 /* After expanding, the return labels are no longer needed. */
6587 return_label = NULL;
6588 naked_return_label = NULL;
6590 /* After expanding, the tm_restart map is no longer needed. */
6591 if (fun->gimple_df->tm_restart)
6592 fun->gimple_df->tm_restart = NULL;
6594 /* Tag the blocks with a depth number so that change_scope can find
6595 the common parent easily. */
6596 set_block_levels (DECL_INITIAL (fun->decl), 0);
6597 default_rtl_profile ();
6599 /* For -dx discard loops now, otherwise IL verify in clean_state will
6600 ICE. */
6601 if (rtl_dump_and_exit)
6603 cfun->curr_properties &= ~PROP_loops;
6604 loop_optimizer_finalize ();
6607 timevar_pop (TV_POST_EXPAND);
6609 return 0;
6612 } // anon namespace
6614 rtl_opt_pass *
6615 make_pass_expand (gcc::context *ctxt)
6617 return new pass_expand (ctxt);