Fix build on sparc64-linux-gnu.
[official-gcc.git] / gcc / explow.c
blob1dabd6ff9aa9b31bbb8fce900fcbdd87c57766bc
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "target.h"
25 #include "function.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "expmed.h"
31 #include "profile-count.h"
32 #include "optabs.h"
33 #include "emit-rtl.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "stor-layout.h"
37 #include "except.h"
38 #include "dojump.h"
39 #include "explow.h"
40 #include "expr.h"
41 #include "common/common-target.h"
42 #include "output.h"
43 #include "params.h"
45 static rtx break_out_memory_refs (rtx);
46 static void anti_adjust_stack_and_probe_stack_clash (rtx);
49 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
51 HOST_WIDE_INT
52 trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
54 /* Not scalar_int_mode because we also allow pointer bound modes. */
55 scalar_mode smode = as_a <scalar_mode> (mode);
56 int width = GET_MODE_PRECISION (smode);
58 /* You want to truncate to a _what_? */
59 gcc_assert (SCALAR_INT_MODE_P (mode));
61 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
62 if (smode == BImode)
63 return c & 1 ? STORE_FLAG_VALUE : 0;
65 /* Sign-extend for the requested mode. */
67 if (width < HOST_BITS_PER_WIDE_INT)
69 HOST_WIDE_INT sign = 1;
70 sign <<= width - 1;
71 c &= (sign << 1) - 1;
72 c ^= sign;
73 c -= sign;
76 return c;
79 /* Likewise for polynomial values, using the sign-extended representation
80 for each individual coefficient. */
82 poly_int64
83 trunc_int_for_mode (poly_int64 x, machine_mode mode)
85 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
86 x.coeffs[i] = trunc_int_for_mode (x.coeffs[i], mode);
87 return x;
90 /* Return an rtx for the sum of X and the integer C, given that X has
91 mode MODE. INPLACE is true if X can be modified inplace or false
92 if it must be treated as immutable. */
94 rtx
95 plus_constant (machine_mode mode, rtx x, poly_int64 c, bool inplace)
97 RTX_CODE code;
98 rtx y;
99 rtx tem;
100 int all_constant = 0;
102 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
104 if (known_eq (c, 0))
105 return x;
107 restart:
109 code = GET_CODE (x);
110 y = x;
112 switch (code)
114 CASE_CONST_SCALAR_INT:
115 return immed_wide_int_const (wi::add (rtx_mode_t (x, mode), c), mode);
116 case MEM:
117 /* If this is a reference to the constant pool, try replacing it with
118 a reference to a new constant. If the resulting address isn't
119 valid, don't return it because we have no way to validize it. */
120 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
121 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
123 rtx cst = get_pool_constant (XEXP (x, 0));
125 if (GET_CODE (cst) == CONST_VECTOR
126 && GET_MODE_INNER (GET_MODE (cst)) == mode)
128 cst = gen_lowpart (mode, cst);
129 gcc_assert (cst);
131 if (GET_MODE (cst) == VOIDmode || GET_MODE (cst) == mode)
133 tem = plus_constant (mode, cst, c);
134 tem = force_const_mem (GET_MODE (x), tem);
135 /* Targets may disallow some constants in the constant pool, thus
136 force_const_mem may return NULL_RTX. */
137 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
138 return tem;
141 break;
143 case CONST:
144 /* If adding to something entirely constant, set a flag
145 so that we can add a CONST around the result. */
146 if (inplace && shared_const_p (x))
147 inplace = false;
148 x = XEXP (x, 0);
149 all_constant = 1;
150 goto restart;
152 case SYMBOL_REF:
153 case LABEL_REF:
154 all_constant = 1;
155 break;
157 case PLUS:
158 /* The interesting case is adding the integer to a sum. Look
159 for constant term in the sum and combine with C. For an
160 integer constant term or a constant term that is not an
161 explicit integer, we combine or group them together anyway.
163 We may not immediately return from the recursive call here, lest
164 all_constant gets lost. */
166 if (CONSTANT_P (XEXP (x, 1)))
168 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
169 if (term == const0_rtx)
170 x = XEXP (x, 0);
171 else if (inplace)
172 XEXP (x, 1) = term;
173 else
174 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
175 c = 0;
177 else if (rtx *const_loc = find_constant_term_loc (&y))
179 if (!inplace)
181 /* We need to be careful since X may be shared and we can't
182 modify it in place. */
183 x = copy_rtx (x);
184 const_loc = find_constant_term_loc (&x);
186 *const_loc = plus_constant (mode, *const_loc, c, true);
187 c = 0;
189 break;
191 default:
192 if (CONST_POLY_INT_P (x))
193 return immed_wide_int_const (const_poly_int_value (x) + c, mode);
194 break;
197 if (maybe_ne (c, 0))
198 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
200 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
201 return x;
202 else if (all_constant)
203 return gen_rtx_CONST (mode, x);
204 else
205 return x;
208 /* If X is a sum, return a new sum like X but lacking any constant terms.
209 Add all the removed constant terms into *CONSTPTR.
210 X itself is not altered. The result != X if and only if
211 it is not isomorphic to X. */
214 eliminate_constant_term (rtx x, rtx *constptr)
216 rtx x0, x1;
217 rtx tem;
219 if (GET_CODE (x) != PLUS)
220 return x;
222 /* First handle constants appearing at this level explicitly. */
223 if (CONST_INT_P (XEXP (x, 1))
224 && (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
225 XEXP (x, 1))) != 0
226 && CONST_INT_P (tem))
228 *constptr = tem;
229 return eliminate_constant_term (XEXP (x, 0), constptr);
232 tem = const0_rtx;
233 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
234 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
235 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
236 && (tem = simplify_binary_operation (PLUS, GET_MODE (x),
237 *constptr, tem)) != 0
238 && CONST_INT_P (tem))
240 *constptr = tem;
241 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
244 return x;
248 /* Return a copy of X in which all memory references
249 and all constants that involve symbol refs
250 have been replaced with new temporary registers.
251 Also emit code to load the memory locations and constants
252 into those registers.
254 If X contains no such constants or memory references,
255 X itself (not a copy) is returned.
257 If a constant is found in the address that is not a legitimate constant
258 in an insn, it is left alone in the hope that it might be valid in the
259 address.
261 X may contain no arithmetic except addition, subtraction and multiplication.
262 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
264 static rtx
265 break_out_memory_refs (rtx x)
267 if (MEM_P (x)
268 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
269 && GET_MODE (x) != VOIDmode))
270 x = force_reg (GET_MODE (x), x);
271 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
272 || GET_CODE (x) == MULT)
274 rtx op0 = break_out_memory_refs (XEXP (x, 0));
275 rtx op1 = break_out_memory_refs (XEXP (x, 1));
277 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
278 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
281 return x;
284 /* Given X, a memory address in address space AS' pointer mode, convert it to
285 an address in the address space's address mode, or vice versa (TO_MODE says
286 which way). We take advantage of the fact that pointers are not allowed to
287 overflow by commuting arithmetic operations over conversions so that address
288 arithmetic insns can be used. IN_CONST is true if this conversion is inside
289 a CONST. NO_EMIT is true if no insns should be emitted, and instead
290 it should return NULL if it can't be simplified without emitting insns. */
293 convert_memory_address_addr_space_1 (scalar_int_mode to_mode ATTRIBUTE_UNUSED,
294 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
295 bool in_const ATTRIBUTE_UNUSED,
296 bool no_emit ATTRIBUTE_UNUSED)
298 #ifndef POINTERS_EXTEND_UNSIGNED
299 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
300 return x;
301 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
302 scalar_int_mode pointer_mode, address_mode, from_mode;
303 rtx temp;
304 enum rtx_code code;
306 /* If X already has the right mode, just return it. */
307 if (GET_MODE (x) == to_mode)
308 return x;
310 pointer_mode = targetm.addr_space.pointer_mode (as);
311 address_mode = targetm.addr_space.address_mode (as);
312 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
314 /* Here we handle some special cases. If none of them apply, fall through
315 to the default case. */
316 switch (GET_CODE (x))
318 CASE_CONST_SCALAR_INT:
319 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
320 code = TRUNCATE;
321 else if (POINTERS_EXTEND_UNSIGNED < 0)
322 break;
323 else if (POINTERS_EXTEND_UNSIGNED > 0)
324 code = ZERO_EXTEND;
325 else
326 code = SIGN_EXTEND;
327 temp = simplify_unary_operation (code, to_mode, x, from_mode);
328 if (temp)
329 return temp;
330 break;
332 case SUBREG:
333 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
334 && GET_MODE (SUBREG_REG (x)) == to_mode)
335 return SUBREG_REG (x);
336 break;
338 case LABEL_REF:
339 temp = gen_rtx_LABEL_REF (to_mode, label_ref_label (x));
340 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
341 return temp;
343 case SYMBOL_REF:
344 temp = shallow_copy_rtx (x);
345 PUT_MODE (temp, to_mode);
346 return temp;
348 case CONST:
349 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as,
350 true, no_emit);
351 return temp ? gen_rtx_CONST (to_mode, temp) : temp;
353 case PLUS:
354 case MULT:
355 /* For addition we can safely permute the conversion and addition
356 operation if one operand is a constant and converting the constant
357 does not change it or if one operand is a constant and we are
358 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
359 We can always safely permute them if we are making the address
360 narrower. Inside a CONST RTL, this is safe for both pointers
361 zero or sign extended as pointers cannot wrap. */
362 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
363 || (GET_CODE (x) == PLUS
364 && CONST_INT_P (XEXP (x, 1))
365 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
366 || XEXP (x, 1) == convert_memory_address_addr_space_1
367 (to_mode, XEXP (x, 1), as, in_const,
368 no_emit)
369 || POINTERS_EXTEND_UNSIGNED < 0)))
371 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
372 as, in_const, no_emit);
373 return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode,
374 temp, XEXP (x, 1))
375 : temp);
377 break;
379 default:
380 break;
383 if (no_emit)
384 return NULL_RTX;
386 return convert_modes (to_mode, from_mode,
387 x, POINTERS_EXTEND_UNSIGNED);
388 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
391 /* Given X, a memory address in address space AS' pointer mode, convert it to
392 an address in the address space's address mode, or vice versa (TO_MODE says
393 which way). We take advantage of the fact that pointers are not allowed to
394 overflow by commuting arithmetic operations over conversions so that address
395 arithmetic insns can be used. */
398 convert_memory_address_addr_space (scalar_int_mode to_mode, rtx x,
399 addr_space_t as)
401 return convert_memory_address_addr_space_1 (to_mode, x, as, false, false);
405 /* Return something equivalent to X but valid as a memory address for something
406 of mode MODE in the named address space AS. When X is not itself valid,
407 this works by copying X or subexpressions of it into registers. */
410 memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
412 rtx oldx = x;
413 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
415 x = convert_memory_address_addr_space (address_mode, x, as);
417 /* By passing constant addresses through registers
418 we get a chance to cse them. */
419 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
420 x = force_reg (address_mode, x);
422 /* We get better cse by rejecting indirect addressing at this stage.
423 Let the combiner create indirect addresses where appropriate.
424 For now, generate the code so that the subexpressions useful to share
425 are visible. But not if cse won't be done! */
426 else
428 if (! cse_not_expected && !REG_P (x))
429 x = break_out_memory_refs (x);
431 /* At this point, any valid address is accepted. */
432 if (memory_address_addr_space_p (mode, x, as))
433 goto done;
435 /* If it was valid before but breaking out memory refs invalidated it,
436 use it the old way. */
437 if (memory_address_addr_space_p (mode, oldx, as))
439 x = oldx;
440 goto done;
443 /* Perform machine-dependent transformations on X
444 in certain cases. This is not necessary since the code
445 below can handle all possible cases, but machine-dependent
446 transformations can make better code. */
448 rtx orig_x = x;
449 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
450 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
451 goto done;
454 /* PLUS and MULT can appear in special ways
455 as the result of attempts to make an address usable for indexing.
456 Usually they are dealt with by calling force_operand, below.
457 But a sum containing constant terms is special
458 if removing them makes the sum a valid address:
459 then we generate that address in a register
460 and index off of it. We do this because it often makes
461 shorter code, and because the addresses thus generated
462 in registers often become common subexpressions. */
463 if (GET_CODE (x) == PLUS)
465 rtx constant_term = const0_rtx;
466 rtx y = eliminate_constant_term (x, &constant_term);
467 if (constant_term == const0_rtx
468 || ! memory_address_addr_space_p (mode, y, as))
469 x = force_operand (x, NULL_RTX);
470 else
472 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
473 if (! memory_address_addr_space_p (mode, y, as))
474 x = force_operand (x, NULL_RTX);
475 else
476 x = y;
480 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
481 x = force_operand (x, NULL_RTX);
483 /* If we have a register that's an invalid address,
484 it must be a hard reg of the wrong class. Copy it to a pseudo. */
485 else if (REG_P (x))
486 x = copy_to_reg (x);
488 /* Last resort: copy the value to a register, since
489 the register is a valid address. */
490 else
491 x = force_reg (address_mode, x);
494 done:
496 gcc_assert (memory_address_addr_space_p (mode, x, as));
497 /* If we didn't change the address, we are done. Otherwise, mark
498 a reg as a pointer if we have REG or REG + CONST_INT. */
499 if (oldx == x)
500 return x;
501 else if (REG_P (x))
502 mark_reg_pointer (x, BITS_PER_UNIT);
503 else if (GET_CODE (x) == PLUS
504 && REG_P (XEXP (x, 0))
505 && CONST_INT_P (XEXP (x, 1)))
506 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
508 /* OLDX may have been the address on a temporary. Update the address
509 to indicate that X is now used. */
510 update_temp_slot_address (oldx, x);
512 return x;
515 /* Convert a mem ref into one with a valid memory address.
516 Pass through anything else unchanged. */
519 validize_mem (rtx ref)
521 if (!MEM_P (ref))
522 return ref;
523 ref = use_anchored_address (ref);
524 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
525 MEM_ADDR_SPACE (ref)))
526 return ref;
528 /* Don't alter REF itself, since that is probably a stack slot. */
529 return replace_equiv_address (ref, XEXP (ref, 0));
532 /* If X is a memory reference to a member of an object block, try rewriting
533 it to use an anchor instead. Return the new memory reference on success
534 and the old one on failure. */
537 use_anchored_address (rtx x)
539 rtx base;
540 HOST_WIDE_INT offset;
541 machine_mode mode;
543 if (!flag_section_anchors)
544 return x;
546 if (!MEM_P (x))
547 return x;
549 /* Split the address into a base and offset. */
550 base = XEXP (x, 0);
551 offset = 0;
552 if (GET_CODE (base) == CONST
553 && GET_CODE (XEXP (base, 0)) == PLUS
554 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
556 offset += INTVAL (XEXP (XEXP (base, 0), 1));
557 base = XEXP (XEXP (base, 0), 0);
560 /* Check whether BASE is suitable for anchors. */
561 if (GET_CODE (base) != SYMBOL_REF
562 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
563 || SYMBOL_REF_ANCHOR_P (base)
564 || SYMBOL_REF_BLOCK (base) == NULL
565 || !targetm.use_anchors_for_symbol_p (base))
566 return x;
568 /* Decide where BASE is going to be. */
569 place_block_symbol (base);
571 /* Get the anchor we need to use. */
572 offset += SYMBOL_REF_BLOCK_OFFSET (base);
573 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
574 SYMBOL_REF_TLS_MODEL (base));
576 /* Work out the offset from the anchor. */
577 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
579 /* If we're going to run a CSE pass, force the anchor into a register.
580 We will then be able to reuse registers for several accesses, if the
581 target costs say that that's worthwhile. */
582 mode = GET_MODE (base);
583 if (!cse_not_expected)
584 base = force_reg (mode, base);
586 return replace_equiv_address (x, plus_constant (mode, base, offset));
589 /* Copy the value or contents of X to a new temp reg and return that reg. */
592 copy_to_reg (rtx x)
594 rtx temp = gen_reg_rtx (GET_MODE (x));
596 /* If not an operand, must be an address with PLUS and MULT so
597 do the computation. */
598 if (! general_operand (x, VOIDmode))
599 x = force_operand (x, temp);
601 if (x != temp)
602 emit_move_insn (temp, x);
604 return temp;
607 /* Like copy_to_reg but always give the new register mode Pmode
608 in case X is a constant. */
611 copy_addr_to_reg (rtx x)
613 return copy_to_mode_reg (Pmode, x);
616 /* Like copy_to_reg but always give the new register mode MODE
617 in case X is a constant. */
620 copy_to_mode_reg (machine_mode mode, rtx x)
622 rtx temp = gen_reg_rtx (mode);
624 /* If not an operand, must be an address with PLUS and MULT so
625 do the computation. */
626 if (! general_operand (x, VOIDmode))
627 x = force_operand (x, temp);
629 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
630 if (x != temp)
631 emit_move_insn (temp, x);
632 return temp;
635 /* Load X into a register if it is not already one.
636 Use mode MODE for the register.
637 X should be valid for mode MODE, but it may be a constant which
638 is valid for all integer modes; that's why caller must specify MODE.
640 The caller must not alter the value in the register we return,
641 since we mark it as a "constant" register. */
644 force_reg (machine_mode mode, rtx x)
646 rtx temp, set;
647 rtx_insn *insn;
649 if (REG_P (x))
650 return x;
652 if (general_operand (x, mode))
654 temp = gen_reg_rtx (mode);
655 insn = emit_move_insn (temp, x);
657 else
659 temp = force_operand (x, NULL_RTX);
660 if (REG_P (temp))
661 insn = get_last_insn ();
662 else
664 rtx temp2 = gen_reg_rtx (mode);
665 insn = emit_move_insn (temp2, temp);
666 temp = temp2;
670 /* Let optimizers know that TEMP's value never changes
671 and that X can be substituted for it. Don't get confused
672 if INSN set something else (such as a SUBREG of TEMP). */
673 if (CONSTANT_P (x)
674 && (set = single_set (insn)) != 0
675 && SET_DEST (set) == temp
676 && ! rtx_equal_p (x, SET_SRC (set)))
677 set_unique_reg_note (insn, REG_EQUAL, x);
679 /* Let optimizers know that TEMP is a pointer, and if so, the
680 known alignment of that pointer. */
682 unsigned align = 0;
683 if (GET_CODE (x) == SYMBOL_REF)
685 align = BITS_PER_UNIT;
686 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
687 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
689 else if (GET_CODE (x) == LABEL_REF)
690 align = BITS_PER_UNIT;
691 else if (GET_CODE (x) == CONST
692 && GET_CODE (XEXP (x, 0)) == PLUS
693 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
694 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
696 rtx s = XEXP (XEXP (x, 0), 0);
697 rtx c = XEXP (XEXP (x, 0), 1);
698 unsigned sa, ca;
700 sa = BITS_PER_UNIT;
701 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
702 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
704 if (INTVAL (c) == 0)
705 align = sa;
706 else
708 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
709 align = MIN (sa, ca);
713 if (align || (MEM_P (x) && MEM_POINTER (x)))
714 mark_reg_pointer (temp, align);
717 return temp;
720 /* If X is a memory ref, copy its contents to a new temp reg and return
721 that reg. Otherwise, return X. */
724 force_not_mem (rtx x)
726 rtx temp;
728 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
729 return x;
731 temp = gen_reg_rtx (GET_MODE (x));
733 if (MEM_POINTER (x))
734 REG_POINTER (temp) = 1;
736 emit_move_insn (temp, x);
737 return temp;
740 /* Copy X to TARGET (if it's nonzero and a reg)
741 or to a new temp reg and return that reg.
742 MODE is the mode to use for X in case it is a constant. */
745 copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
747 rtx temp;
749 if (target && REG_P (target))
750 temp = target;
751 else
752 temp = gen_reg_rtx (mode);
754 emit_move_insn (temp, x);
755 return temp;
758 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
759 PUNSIGNEDP points to the signedness of the type and may be adjusted
760 to show what signedness to use on extension operations.
762 FOR_RETURN is nonzero if the caller is promoting the return value
763 of FNDECL, else it is for promoting args. */
765 machine_mode
766 promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
767 const_tree funtype, int for_return)
769 /* Called without a type node for a libcall. */
770 if (type == NULL_TREE)
772 if (INTEGRAL_MODE_P (mode))
773 return targetm.calls.promote_function_mode (NULL_TREE, mode,
774 punsignedp, funtype,
775 for_return);
776 else
777 return mode;
780 switch (TREE_CODE (type))
782 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
783 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
784 case POINTER_TYPE: case REFERENCE_TYPE:
785 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
786 for_return);
788 default:
789 return mode;
792 /* Return the mode to use to store a scalar of TYPE and MODE.
793 PUNSIGNEDP points to the signedness of the type and may be adjusted
794 to show what signedness to use on extension operations. */
796 machine_mode
797 promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
798 int *punsignedp ATTRIBUTE_UNUSED)
800 #ifdef PROMOTE_MODE
801 enum tree_code code;
802 int unsignedp;
803 scalar_mode smode;
804 #endif
806 /* For libcalls this is invoked without TYPE from the backends
807 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
808 case. */
809 if (type == NULL_TREE)
810 return mode;
812 /* FIXME: this is the same logic that was there until GCC 4.4, but we
813 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
814 is not defined. The affected targets are M32C, S390, SPARC. */
815 #ifdef PROMOTE_MODE
816 code = TREE_CODE (type);
817 unsignedp = *punsignedp;
819 switch (code)
821 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
822 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
823 /* Values of these types always have scalar mode. */
824 smode = as_a <scalar_mode> (mode);
825 PROMOTE_MODE (smode, unsignedp, type);
826 *punsignedp = unsignedp;
827 return smode;
829 #ifdef POINTERS_EXTEND_UNSIGNED
830 case REFERENCE_TYPE:
831 case POINTER_TYPE:
832 *punsignedp = POINTERS_EXTEND_UNSIGNED;
833 return targetm.addr_space.address_mode
834 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
835 #endif
837 default:
838 return mode;
840 #else
841 return mode;
842 #endif
846 /* Use one of promote_mode or promote_function_mode to find the promoted
847 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
848 of DECL after promotion. */
850 machine_mode
851 promote_decl_mode (const_tree decl, int *punsignedp)
853 tree type = TREE_TYPE (decl);
854 int unsignedp = TYPE_UNSIGNED (type);
855 machine_mode mode = DECL_MODE (decl);
856 machine_mode pmode;
858 if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
859 pmode = promote_function_mode (type, mode, &unsignedp,
860 TREE_TYPE (current_function_decl), 1);
861 else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
862 pmode = promote_function_mode (type, mode, &unsignedp,
863 TREE_TYPE (current_function_decl), 2);
864 else
865 pmode = promote_mode (type, mode, &unsignedp);
867 if (punsignedp)
868 *punsignedp = unsignedp;
869 return pmode;
872 /* Return the promoted mode for name. If it is a named SSA_NAME, it
873 is the same as promote_decl_mode. Otherwise, it is the promoted
874 mode of a temp decl of same type as the SSA_NAME, if we had created
875 one. */
877 machine_mode
878 promote_ssa_mode (const_tree name, int *punsignedp)
880 gcc_assert (TREE_CODE (name) == SSA_NAME);
882 /* Partitions holding parms and results must be promoted as expected
883 by function.c. */
884 if (SSA_NAME_VAR (name)
885 && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
886 || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
888 machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
889 if (mode != BLKmode)
890 return mode;
893 tree type = TREE_TYPE (name);
894 int unsignedp = TYPE_UNSIGNED (type);
895 machine_mode mode = TYPE_MODE (type);
897 /* Bypass TYPE_MODE when it maps vector modes to BLKmode. */
898 if (mode == BLKmode)
900 gcc_assert (VECTOR_TYPE_P (type));
901 mode = type->type_common.mode;
904 machine_mode pmode = promote_mode (type, mode, &unsignedp);
905 if (punsignedp)
906 *punsignedp = unsignedp;
908 return pmode;
913 /* Controls the behavior of {anti_,}adjust_stack. */
914 static bool suppress_reg_args_size;
916 /* A helper for adjust_stack and anti_adjust_stack. */
918 static void
919 adjust_stack_1 (rtx adjust, bool anti_p)
921 rtx temp;
922 rtx_insn *insn;
924 /* Hereafter anti_p means subtract_p. */
925 if (!STACK_GROWS_DOWNWARD)
926 anti_p = !anti_p;
928 temp = expand_binop (Pmode,
929 anti_p ? sub_optab : add_optab,
930 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
931 OPTAB_LIB_WIDEN);
933 if (temp != stack_pointer_rtx)
934 insn = emit_move_insn (stack_pointer_rtx, temp);
935 else
937 insn = get_last_insn ();
938 temp = single_set (insn);
939 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
942 if (!suppress_reg_args_size)
943 add_args_size_note (insn, stack_pointer_delta);
946 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
947 This pops when ADJUST is positive. ADJUST need not be constant. */
949 void
950 adjust_stack (rtx adjust)
952 if (adjust == const0_rtx)
953 return;
955 /* We expect all variable sized adjustments to be multiple of
956 PREFERRED_STACK_BOUNDARY. */
957 poly_int64 const_adjust;
958 if (poly_int_rtx_p (adjust, &const_adjust))
959 stack_pointer_delta -= const_adjust;
961 adjust_stack_1 (adjust, false);
964 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
965 This pushes when ADJUST is positive. ADJUST need not be constant. */
967 void
968 anti_adjust_stack (rtx adjust)
970 if (adjust == const0_rtx)
971 return;
973 /* We expect all variable sized adjustments to be multiple of
974 PREFERRED_STACK_BOUNDARY. */
975 poly_int64 const_adjust;
976 if (poly_int_rtx_p (adjust, &const_adjust))
977 stack_pointer_delta += const_adjust;
979 adjust_stack_1 (adjust, true);
982 /* Round the size of a block to be pushed up to the boundary required
983 by this machine. SIZE is the desired size, which need not be constant. */
985 static rtx
986 round_push (rtx size)
988 rtx align_rtx, alignm1_rtx;
990 if (!SUPPORTS_STACK_ALIGNMENT
991 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
993 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
995 if (align == 1)
996 return size;
998 if (CONST_INT_P (size))
1000 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
1002 if (INTVAL (size) != new_size)
1003 size = GEN_INT (new_size);
1004 return size;
1007 align_rtx = GEN_INT (align);
1008 alignm1_rtx = GEN_INT (align - 1);
1010 else
1012 /* If crtl->preferred_stack_boundary might still grow, use
1013 virtual_preferred_stack_boundary_rtx instead. This will be
1014 substituted by the right value in vregs pass and optimized
1015 during combine. */
1016 align_rtx = virtual_preferred_stack_boundary_rtx;
1017 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
1018 NULL_RTX);
1021 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1022 but we know it can't. So add ourselves and then do
1023 TRUNC_DIV_EXPR. */
1024 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
1025 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1026 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
1027 NULL_RTX, 1);
1028 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
1030 return size;
1033 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1034 to a previously-created save area. If no save area has been allocated,
1035 this function will allocate one. If a save area is specified, it
1036 must be of the proper mode. */
1038 void
1039 emit_stack_save (enum save_level save_level, rtx *psave)
1041 rtx sa = *psave;
1042 /* The default is that we use a move insn and save in a Pmode object. */
1043 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
1044 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1046 /* See if this machine has anything special to do for this kind of save. */
1047 switch (save_level)
1049 case SAVE_BLOCK:
1050 if (targetm.have_save_stack_block ())
1051 fcn = targetm.gen_save_stack_block;
1052 break;
1053 case SAVE_FUNCTION:
1054 if (targetm.have_save_stack_function ())
1055 fcn = targetm.gen_save_stack_function;
1056 break;
1057 case SAVE_NONLOCAL:
1058 if (targetm.have_save_stack_nonlocal ())
1059 fcn = targetm.gen_save_stack_nonlocal;
1060 break;
1061 default:
1062 break;
1065 /* If there is no save area and we have to allocate one, do so. Otherwise
1066 verify the save area is the proper mode. */
1068 if (sa == 0)
1070 if (mode != VOIDmode)
1072 if (save_level == SAVE_NONLOCAL)
1073 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1074 else
1075 *psave = sa = gen_reg_rtx (mode);
1079 do_pending_stack_adjust ();
1080 if (sa != 0)
1081 sa = validize_mem (sa);
1082 emit_insn (fcn (sa, stack_pointer_rtx));
1085 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1086 area made by emit_stack_save. If it is zero, we have nothing to do. */
1088 void
1089 emit_stack_restore (enum save_level save_level, rtx sa)
1091 /* The default is that we use a move insn. */
1092 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
1094 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1095 STACK_POINTER and HARD_FRAME_POINTER.
1096 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1097 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1098 aligned variables, which is reflected in ix86_can_eliminate.
1099 We normally still have the realigned STACK_POINTER that we can use.
1100 But if there is a stack restore still present at reload, it can trigger
1101 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1102 FRAME_POINTER into a hard reg.
1103 To prevent this situation, we force need_drap if we emit a stack
1104 restore. */
1105 if (SUPPORTS_STACK_ALIGNMENT)
1106 crtl->need_drap = true;
1108 /* See if this machine has anything special to do for this kind of save. */
1109 switch (save_level)
1111 case SAVE_BLOCK:
1112 if (targetm.have_restore_stack_block ())
1113 fcn = targetm.gen_restore_stack_block;
1114 break;
1115 case SAVE_FUNCTION:
1116 if (targetm.have_restore_stack_function ())
1117 fcn = targetm.gen_restore_stack_function;
1118 break;
1119 case SAVE_NONLOCAL:
1120 if (targetm.have_restore_stack_nonlocal ())
1121 fcn = targetm.gen_restore_stack_nonlocal;
1122 break;
1123 default:
1124 break;
1127 if (sa != 0)
1129 sa = validize_mem (sa);
1130 /* These clobbers prevent the scheduler from moving
1131 references to variable arrays below the code
1132 that deletes (pops) the arrays. */
1133 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1134 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1137 discard_pending_stack_adjust ();
1139 emit_insn (fcn (stack_pointer_rtx, sa));
1142 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1143 function. This should be called whenever we allocate or deallocate
1144 dynamic stack space. */
1146 void
1147 update_nonlocal_goto_save_area (void)
1149 tree t_save;
1150 rtx r_save;
1152 /* The nonlocal_goto_save_area object is an array of N pointers. The
1153 first one is used for the frame pointer save; the rest are sized by
1154 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1155 of the stack save area slots. */
1156 t_save = build4 (ARRAY_REF,
1157 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1158 cfun->nonlocal_goto_save_area,
1159 integer_one_node, NULL_TREE, NULL_TREE);
1160 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1162 emit_stack_save (SAVE_NONLOCAL, &r_save);
1165 /* Record a new stack level for the current function. This should be called
1166 whenever we allocate or deallocate dynamic stack space. */
1168 void
1169 record_new_stack_level (void)
1171 /* Record the new stack level for nonlocal gotos. */
1172 if (cfun->nonlocal_goto_save_area)
1173 update_nonlocal_goto_save_area ();
1175 /* Record the new stack level for SJLJ exceptions. */
1176 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1177 update_sjlj_context ();
1180 /* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */
1181 static rtx
1182 align_dynamic_address (rtx target, unsigned required_align)
1184 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1185 but we know it can't. So add ourselves and then do
1186 TRUNC_DIV_EXPR. */
1187 target = expand_binop (Pmode, add_optab, target,
1188 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1189 Pmode),
1190 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1191 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1192 gen_int_mode (required_align / BITS_PER_UNIT,
1193 Pmode),
1194 NULL_RTX, 1);
1195 target = expand_mult (Pmode, target,
1196 gen_int_mode (required_align / BITS_PER_UNIT,
1197 Pmode),
1198 NULL_RTX, 1);
1200 return target;
1203 /* Return an rtx through *PSIZE, representing the size of an area of memory to
1204 be dynamically pushed on the stack.
1206 *PSIZE is an rtx representing the size of the area.
1208 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1209 parameter may be zero. If so, a proper value will be extracted
1210 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1212 REQUIRED_ALIGN is the alignment (in bits) required for the region
1213 of memory.
1215 If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for
1216 the additional size returned. */
1217 void
1218 get_dynamic_stack_size (rtx *psize, unsigned size_align,
1219 unsigned required_align,
1220 HOST_WIDE_INT *pstack_usage_size)
1222 rtx size = *psize;
1224 /* Ensure the size is in the proper mode. */
1225 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1226 size = convert_to_mode (Pmode, size, 1);
1228 if (CONST_INT_P (size))
1230 unsigned HOST_WIDE_INT lsb;
1232 lsb = INTVAL (size);
1233 lsb &= -lsb;
1235 /* Watch out for overflow truncating to "unsigned". */
1236 if (lsb > UINT_MAX / BITS_PER_UNIT)
1237 size_align = 1u << (HOST_BITS_PER_INT - 1);
1238 else
1239 size_align = (unsigned)lsb * BITS_PER_UNIT;
1241 else if (size_align < BITS_PER_UNIT)
1242 size_align = BITS_PER_UNIT;
1244 /* We can't attempt to minimize alignment necessary, because we don't
1245 know the final value of preferred_stack_boundary yet while executing
1246 this code. */
1247 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1248 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1250 /* We will need to ensure that the address we return is aligned to
1251 REQUIRED_ALIGN. At this point in the compilation, we don't always
1252 know the final value of the STACK_DYNAMIC_OFFSET used in function.c
1253 (it might depend on the size of the outgoing parameter lists, for
1254 example), so we must preventively align the value. We leave space
1255 in SIZE for the hole that might result from the alignment operation. */
1257 unsigned known_align = REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM);
1258 if (known_align == 0)
1259 known_align = BITS_PER_UNIT;
1260 if (required_align > known_align)
1262 unsigned extra = (required_align - known_align) / BITS_PER_UNIT;
1263 size = plus_constant (Pmode, size, extra);
1264 size = force_operand (size, NULL_RTX);
1265 if (size_align > known_align)
1266 size_align = known_align;
1268 if (flag_stack_usage_info && pstack_usage_size)
1269 *pstack_usage_size += extra;
1272 /* Round the size to a multiple of the required stack alignment.
1273 Since the stack is presumed to be rounded before this allocation,
1274 this will maintain the required alignment.
1276 If the stack grows downward, we could save an insn by subtracting
1277 SIZE from the stack pointer and then aligning the stack pointer.
1278 The problem with this is that the stack pointer may be unaligned
1279 between the execution of the subtraction and alignment insns and
1280 some machines do not allow this. Even on those that do, some
1281 signal handlers malfunction if a signal should occur between those
1282 insns. Since this is an extremely rare event, we have no reliable
1283 way of knowing which systems have this problem. So we avoid even
1284 momentarily mis-aligning the stack. */
1285 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1287 size = round_push (size);
1289 if (flag_stack_usage_info && pstack_usage_size)
1291 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1292 *pstack_usage_size =
1293 (*pstack_usage_size + align - 1) / align * align;
1297 *psize = size;
1300 /* Return the number of bytes to "protect" on the stack for -fstack-check.
1302 "protect" in the context of -fstack-check means how many bytes we
1303 should always ensure are available on the stack. More importantly
1304 this is how many bytes are skipped when probing the stack.
1306 On some targets we want to reuse the -fstack-check prologue support
1307 to give a degree of protection against stack clashing style attacks.
1309 In that scenario we do not want to skip bytes before probing as that
1310 would render the stack clash protections useless.
1312 So we never use STACK_CHECK_PROTECT directly. Instead we indirect though
1313 this helper which allows us to provide different values for
1314 -fstack-check and -fstack-clash-protection. */
1315 HOST_WIDE_INT
1316 get_stack_check_protect (void)
1318 if (flag_stack_clash_protection)
1319 return 0;
1320 return STACK_CHECK_PROTECT;
1323 /* Return an rtx representing the address of an area of memory dynamically
1324 pushed on the stack.
1326 Any required stack pointer alignment is preserved.
1328 SIZE is an rtx representing the size of the area.
1330 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1331 parameter may be zero. If so, a proper value will be extracted
1332 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1334 REQUIRED_ALIGN is the alignment (in bits) required for the region
1335 of memory.
1337 MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if
1338 no such upper bound is known.
1340 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1341 stack space allocated by the generated code cannot be added with itself
1342 in the course of the execution of the function. It is always safe to
1343 pass FALSE here and the following criterion is sufficient in order to
1344 pass TRUE: every path in the CFG that starts at the allocation point and
1345 loops to it executes the associated deallocation code. */
1348 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1349 unsigned required_align,
1350 HOST_WIDE_INT max_size,
1351 bool cannot_accumulate)
1353 HOST_WIDE_INT stack_usage_size = -1;
1354 rtx_code_label *final_label;
1355 rtx final_target, target;
1357 /* If we're asking for zero bytes, it doesn't matter what we point
1358 to since we can't dereference it. But return a reasonable
1359 address anyway. */
1360 if (size == const0_rtx)
1361 return virtual_stack_dynamic_rtx;
1363 /* Otherwise, show we're calling alloca or equivalent. */
1364 cfun->calls_alloca = 1;
1366 /* If stack usage info is requested, look into the size we are passed.
1367 We need to do so this early to avoid the obfuscation that may be
1368 introduced later by the various alignment operations. */
1369 if (flag_stack_usage_info)
1371 if (CONST_INT_P (size))
1372 stack_usage_size = INTVAL (size);
1373 else if (REG_P (size))
1375 /* Look into the last emitted insn and see if we can deduce
1376 something for the register. */
1377 rtx_insn *insn;
1378 rtx set, note;
1379 insn = get_last_insn ();
1380 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1382 if (CONST_INT_P (SET_SRC (set)))
1383 stack_usage_size = INTVAL (SET_SRC (set));
1384 else if ((note = find_reg_equal_equiv_note (insn))
1385 && CONST_INT_P (XEXP (note, 0)))
1386 stack_usage_size = INTVAL (XEXP (note, 0));
1390 /* If the size is not constant, try the maximum size. */
1391 if (stack_usage_size < 0)
1392 stack_usage_size = max_size;
1394 /* If the size is still not constant, we can't say anything. */
1395 if (stack_usage_size < 0)
1397 current_function_has_unbounded_dynamic_stack_size = 1;
1398 stack_usage_size = 0;
1402 get_dynamic_stack_size (&size, size_align, required_align, &stack_usage_size);
1404 target = gen_reg_rtx (Pmode);
1406 /* The size is supposed to be fully adjusted at this point so record it
1407 if stack usage info is requested. */
1408 if (flag_stack_usage_info)
1410 current_function_dynamic_stack_size += stack_usage_size;
1412 /* ??? This is gross but the only safe stance in the absence
1413 of stack usage oriented flow analysis. */
1414 if (!cannot_accumulate)
1415 current_function_has_unbounded_dynamic_stack_size = 1;
1418 do_pending_stack_adjust ();
1420 final_label = NULL;
1421 final_target = NULL_RTX;
1423 /* If we are splitting the stack, we need to ask the backend whether
1424 there is enough room on the current stack. If there isn't, or if
1425 the backend doesn't know how to tell is, then we need to call a
1426 function to allocate memory in some other way. This memory will
1427 be released when we release the current stack segment. The
1428 effect is that stack allocation becomes less efficient, but at
1429 least it doesn't cause a stack overflow. */
1430 if (flag_split_stack)
1432 rtx_code_label *available_label;
1433 rtx ask, space, func;
1435 available_label = NULL;
1437 if (targetm.have_split_stack_space_check ())
1439 available_label = gen_label_rtx ();
1441 /* This instruction will branch to AVAILABLE_LABEL if there
1442 are SIZE bytes available on the stack. */
1443 emit_insn (targetm.gen_split_stack_space_check
1444 (size, available_label));
1447 /* The __morestack_allocate_stack_space function will allocate
1448 memory using malloc. If the alignment of the memory returned
1449 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1450 make sure we allocate enough space. */
1451 if (MALLOC_ABI_ALIGNMENT >= required_align)
1452 ask = size;
1453 else
1454 ask = expand_binop (Pmode, add_optab, size,
1455 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1456 Pmode),
1457 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1459 func = init_one_libfunc ("__morestack_allocate_stack_space");
1461 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1462 ask, Pmode);
1464 if (available_label == NULL_RTX)
1465 return space;
1467 final_target = gen_reg_rtx (Pmode);
1469 emit_move_insn (final_target, space);
1471 final_label = gen_label_rtx ();
1472 emit_jump (final_label);
1474 emit_label (available_label);
1477 /* We ought to be called always on the toplevel and stack ought to be aligned
1478 properly. */
1479 gcc_assert (multiple_p (stack_pointer_delta,
1480 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
1482 /* If needed, check that we have the required amount of stack. Take into
1483 account what has already been checked. */
1484 if (STACK_CHECK_MOVING_SP)
1486 else if (flag_stack_check == GENERIC_STACK_CHECK)
1487 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1488 size);
1489 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1490 probe_stack_range (get_stack_check_protect (), size);
1492 /* Don't let anti_adjust_stack emit notes. */
1493 suppress_reg_args_size = true;
1495 /* Perform the required allocation from the stack. Some systems do
1496 this differently than simply incrementing/decrementing from the
1497 stack pointer, such as acquiring the space by calling malloc(). */
1498 if (targetm.have_allocate_stack ())
1500 struct expand_operand ops[2];
1501 /* We don't have to check against the predicate for operand 0 since
1502 TARGET is known to be a pseudo of the proper mode, which must
1503 be valid for the operand. */
1504 create_fixed_operand (&ops[0], target);
1505 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1506 expand_insn (targetm.code_for_allocate_stack, 2, ops);
1508 else
1510 poly_int64 saved_stack_pointer_delta;
1512 if (!STACK_GROWS_DOWNWARD)
1513 emit_move_insn (target, virtual_stack_dynamic_rtx);
1515 /* Check stack bounds if necessary. */
1516 if (crtl->limit_stack)
1518 rtx available;
1519 rtx_code_label *space_available = gen_label_rtx ();
1520 if (STACK_GROWS_DOWNWARD)
1521 available = expand_binop (Pmode, sub_optab,
1522 stack_pointer_rtx, stack_limit_rtx,
1523 NULL_RTX, 1, OPTAB_WIDEN);
1524 else
1525 available = expand_binop (Pmode, sub_optab,
1526 stack_limit_rtx, stack_pointer_rtx,
1527 NULL_RTX, 1, OPTAB_WIDEN);
1529 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1530 space_available);
1531 if (targetm.have_trap ())
1532 emit_insn (targetm.gen_trap ());
1533 else
1534 error ("stack limits not supported on this target");
1535 emit_barrier ();
1536 emit_label (space_available);
1539 saved_stack_pointer_delta = stack_pointer_delta;
1541 if (flag_stack_check && STACK_CHECK_MOVING_SP)
1542 anti_adjust_stack_and_probe (size, false);
1543 else if (flag_stack_clash_protection)
1544 anti_adjust_stack_and_probe_stack_clash (size);
1545 else
1546 anti_adjust_stack (size);
1548 /* Even if size is constant, don't modify stack_pointer_delta.
1549 The constant size alloca should preserve
1550 crtl->preferred_stack_boundary alignment. */
1551 stack_pointer_delta = saved_stack_pointer_delta;
1553 if (STACK_GROWS_DOWNWARD)
1554 emit_move_insn (target, virtual_stack_dynamic_rtx);
1557 suppress_reg_args_size = false;
1559 /* Finish up the split stack handling. */
1560 if (final_label != NULL_RTX)
1562 gcc_assert (flag_split_stack);
1563 emit_move_insn (final_target, target);
1564 emit_label (final_label);
1565 target = final_target;
1568 target = align_dynamic_address (target, required_align);
1570 /* Now that we've committed to a return value, mark its alignment. */
1571 mark_reg_pointer (target, required_align);
1573 /* Record the new stack level. */
1574 record_new_stack_level ();
1576 return target;
1579 /* Return an rtx representing the address of an area of memory already
1580 statically pushed onto the stack in the virtual stack vars area. (It is
1581 assumed that the area is allocated in the function prologue.)
1583 Any required stack pointer alignment is preserved.
1585 OFFSET is the offset of the area into the virtual stack vars area.
1587 REQUIRED_ALIGN is the alignment (in bits) required for the region
1588 of memory. */
1591 get_dynamic_stack_base (poly_int64 offset, unsigned required_align)
1593 rtx target;
1595 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1596 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1598 target = gen_reg_rtx (Pmode);
1599 emit_move_insn (target, virtual_stack_vars_rtx);
1600 target = expand_binop (Pmode, add_optab, target,
1601 gen_int_mode (offset, Pmode),
1602 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1603 target = align_dynamic_address (target, required_align);
1605 /* Now that we've committed to a return value, mark its alignment. */
1606 mark_reg_pointer (target, required_align);
1608 return target;
1611 /* A front end may want to override GCC's stack checking by providing a
1612 run-time routine to call to check the stack, so provide a mechanism for
1613 calling that routine. */
1615 static GTY(()) rtx stack_check_libfunc;
1617 void
1618 set_stack_check_libfunc (const char *libfunc_name)
1620 gcc_assert (stack_check_libfunc == NULL_RTX);
1621 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1624 /* Emit one stack probe at ADDRESS, an address within the stack. */
1626 void
1627 emit_stack_probe (rtx address)
1629 if (targetm.have_probe_stack_address ())
1631 struct expand_operand ops[1];
1632 insn_code icode = targetm.code_for_probe_stack_address;
1633 create_address_operand (ops, address);
1634 maybe_legitimize_operands (icode, 0, 1, ops);
1635 expand_insn (icode, 1, ops);
1637 else
1639 rtx memref = gen_rtx_MEM (word_mode, address);
1641 MEM_VOLATILE_P (memref) = 1;
1642 memref = validize_mem (memref);
1644 /* See if we have an insn to probe the stack. */
1645 if (targetm.have_probe_stack ())
1646 emit_insn (targetm.gen_probe_stack (memref));
1647 else
1648 emit_move_insn (memref, const0_rtx);
1652 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1653 FIRST is a constant and size is a Pmode RTX. These are offsets from
1654 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1655 or subtract them from the stack pointer. */
1657 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1659 #if STACK_GROWS_DOWNWARD
1660 #define STACK_GROW_OP MINUS
1661 #define STACK_GROW_OPTAB sub_optab
1662 #define STACK_GROW_OFF(off) -(off)
1663 #else
1664 #define STACK_GROW_OP PLUS
1665 #define STACK_GROW_OPTAB add_optab
1666 #define STACK_GROW_OFF(off) (off)
1667 #endif
1669 void
1670 probe_stack_range (HOST_WIDE_INT first, rtx size)
1672 /* First ensure SIZE is Pmode. */
1673 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1674 size = convert_to_mode (Pmode, size, 1);
1676 /* Next see if we have a function to check the stack. */
1677 if (stack_check_libfunc)
1679 rtx addr = memory_address (Pmode,
1680 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1681 stack_pointer_rtx,
1682 plus_constant (Pmode,
1683 size, first)));
1684 emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode,
1685 addr, Pmode);
1688 /* Next see if we have an insn to check the stack. */
1689 else if (targetm.have_check_stack ())
1691 struct expand_operand ops[1];
1692 rtx addr = memory_address (Pmode,
1693 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1694 stack_pointer_rtx,
1695 plus_constant (Pmode,
1696 size, first)));
1697 bool success;
1698 create_input_operand (&ops[0], addr, Pmode);
1699 success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
1700 gcc_assert (success);
1703 /* Otherwise we have to generate explicit probes. If we have a constant
1704 small number of them to generate, that's the easy case. */
1705 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1707 HOST_WIDE_INT isize = INTVAL (size), i;
1708 rtx addr;
1710 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1711 it exceeds SIZE. If only one probe is needed, this will not
1712 generate any code. Then probe at FIRST + SIZE. */
1713 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1715 addr = memory_address (Pmode,
1716 plus_constant (Pmode, stack_pointer_rtx,
1717 STACK_GROW_OFF (first + i)));
1718 emit_stack_probe (addr);
1721 addr = memory_address (Pmode,
1722 plus_constant (Pmode, stack_pointer_rtx,
1723 STACK_GROW_OFF (first + isize)));
1724 emit_stack_probe (addr);
1727 /* In the variable case, do the same as above, but in a loop. Note that we
1728 must be extra careful with variables wrapping around because we might be
1729 at the very top (or the very bottom) of the address space and we have to
1730 be able to handle this case properly; in particular, we use an equality
1731 test for the loop condition. */
1732 else
1734 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1735 rtx_code_label *loop_lab = gen_label_rtx ();
1736 rtx_code_label *end_lab = gen_label_rtx ();
1738 /* Step 1: round SIZE to the previous multiple of the interval. */
1740 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1741 rounded_size
1742 = simplify_gen_binary (AND, Pmode, size,
1743 gen_int_mode (-PROBE_INTERVAL, Pmode));
1744 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1747 /* Step 2: compute initial and final value of the loop counter. */
1749 /* TEST_ADDR = SP + FIRST. */
1750 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1751 stack_pointer_rtx,
1752 gen_int_mode (first, Pmode)),
1753 NULL_RTX);
1755 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1756 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1757 test_addr,
1758 rounded_size_op), NULL_RTX);
1761 /* Step 3: the loop
1763 while (TEST_ADDR != LAST_ADDR)
1765 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1766 probe at TEST_ADDR
1769 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1770 until it is equal to ROUNDED_SIZE. */
1772 emit_label (loop_lab);
1774 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1775 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1776 end_lab);
1778 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1779 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1780 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
1781 1, OPTAB_WIDEN);
1783 gcc_assert (temp == test_addr);
1785 /* Probe at TEST_ADDR. */
1786 emit_stack_probe (test_addr);
1788 emit_jump (loop_lab);
1790 emit_label (end_lab);
1793 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1794 that SIZE is equal to ROUNDED_SIZE. */
1796 /* TEMP = SIZE - ROUNDED_SIZE. */
1797 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1798 if (temp != const0_rtx)
1800 rtx addr;
1802 if (CONST_INT_P (temp))
1804 /* Use [base + disp} addressing mode if supported. */
1805 HOST_WIDE_INT offset = INTVAL (temp);
1806 addr = memory_address (Pmode,
1807 plus_constant (Pmode, last_addr,
1808 STACK_GROW_OFF (offset)));
1810 else
1812 /* Manual CSE if the difference is not known at compile-time. */
1813 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1814 addr = memory_address (Pmode,
1815 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1816 last_addr, temp));
1819 emit_stack_probe (addr);
1823 /* Make sure nothing is scheduled before we are done. */
1824 emit_insn (gen_blockage ());
1827 /* Compute parameters for stack clash probing a dynamic stack
1828 allocation of SIZE bytes.
1830 We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL.
1832 Additionally we conditionally dump the type of probing that will
1833 be needed given the values computed. */
1835 void
1836 compute_stack_clash_protection_loop_data (rtx *rounded_size, rtx *last_addr,
1837 rtx *residual,
1838 HOST_WIDE_INT *probe_interval,
1839 rtx size)
1841 /* Round SIZE down to STACK_CLASH_PROTECTION_PROBE_INTERVAL */
1842 *probe_interval
1843 = 1 << PARAM_VALUE (PARAM_STACK_CLASH_PROTECTION_PROBE_INTERVAL);
1844 *rounded_size = simplify_gen_binary (AND, Pmode, size,
1845 GEN_INT (-*probe_interval));
1847 /* Compute the value of the stack pointer for the last iteration.
1848 It's just SP + ROUNDED_SIZE. */
1849 rtx rounded_size_op = force_operand (*rounded_size, NULL_RTX);
1850 *last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1851 stack_pointer_rtx,
1852 rounded_size_op),
1853 NULL_RTX);
1855 /* Compute any residuals not allocated by the loop above. Residuals
1856 are just the ROUNDED_SIZE - SIZE. */
1857 *residual = simplify_gen_binary (MINUS, Pmode, size, *rounded_size);
1859 /* Dump key information to make writing tests easy. */
1860 if (dump_file)
1862 if (*rounded_size == CONST0_RTX (Pmode))
1863 fprintf (dump_file,
1864 "Stack clash skipped dynamic allocation and probing loop.\n");
1865 else if (CONST_INT_P (*rounded_size)
1866 && INTVAL (*rounded_size) <= 4 * *probe_interval)
1867 fprintf (dump_file,
1868 "Stack clash dynamic allocation and probing inline.\n");
1869 else if (CONST_INT_P (*rounded_size))
1870 fprintf (dump_file,
1871 "Stack clash dynamic allocation and probing in "
1872 "rotated loop.\n");
1873 else
1874 fprintf (dump_file,
1875 "Stack clash dynamic allocation and probing in loop.\n");
1877 if (*residual != CONST0_RTX (Pmode))
1878 fprintf (dump_file,
1879 "Stack clash dynamic allocation and probing residuals.\n");
1880 else
1881 fprintf (dump_file,
1882 "Stack clash skipped dynamic allocation and "
1883 "probing residuals.\n");
1887 /* Emit the start of an allocate/probe loop for stack
1888 clash protection.
1890 LOOP_LAB and END_LAB are returned for use when we emit the
1891 end of the loop.
1893 LAST addr is the value for SP which stops the loop. */
1894 void
1895 emit_stack_clash_protection_probe_loop_start (rtx *loop_lab,
1896 rtx *end_lab,
1897 rtx last_addr,
1898 bool rotated)
1900 /* Essentially we want to emit any setup code, the top of loop
1901 label and the comparison at the top of the loop. */
1902 *loop_lab = gen_label_rtx ();
1903 *end_lab = gen_label_rtx ();
1905 emit_label (*loop_lab);
1906 if (!rotated)
1907 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1908 Pmode, 1, *end_lab);
1911 /* Emit the end of a stack clash probing loop.
1913 This consists of just the jump back to LOOP_LAB and
1914 emitting END_LOOP after the loop. */
1916 void
1917 emit_stack_clash_protection_probe_loop_end (rtx loop_lab, rtx end_loop,
1918 rtx last_addr, bool rotated)
1920 if (rotated)
1921 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, NE, NULL_RTX,
1922 Pmode, 1, loop_lab);
1923 else
1924 emit_jump (loop_lab);
1926 emit_label (end_loop);
1930 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1931 while probing it. This pushes when SIZE is positive. SIZE need not
1932 be constant.
1934 This is subtly different than anti_adjust_stack_and_probe to try and
1935 prevent stack-clash attacks
1937 1. It must assume no knowledge of the probing state, any allocation
1938 must probe.
1940 Consider the case of a 1 byte alloca in a loop. If the sum of the
1941 allocations is large, then this could be used to jump the guard if
1942 probes were not emitted.
1944 2. It never skips probes, whereas anti_adjust_stack_and_probe will
1945 skip probes on the first couple PROBE_INTERVALs on the assumption
1946 they're done elsewhere.
1948 3. It only allocates and probes SIZE bytes, it does not need to
1949 allocate/probe beyond that because this probing style does not
1950 guarantee signal handling capability if the guard is hit. */
1952 static void
1953 anti_adjust_stack_and_probe_stack_clash (rtx size)
1955 /* First ensure SIZE is Pmode. */
1956 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1957 size = convert_to_mode (Pmode, size, 1);
1959 /* We can get here with a constant size on some targets. */
1960 rtx rounded_size, last_addr, residual;
1961 HOST_WIDE_INT probe_interval, probe_range;
1962 bool target_probe_range_p = false;
1963 compute_stack_clash_protection_loop_data (&rounded_size, &last_addr,
1964 &residual, &probe_interval, size);
1966 /* Get the back-end specific probe ranges. */
1967 probe_range = targetm.stack_clash_protection_alloca_probe_range ();
1968 target_probe_range_p = probe_range != 0;
1969 gcc_assert (probe_range >= 0);
1971 /* If no back-end specific range defined, default to the top of the newly
1972 allocated range. */
1973 if (probe_range == 0)
1974 probe_range = probe_interval - GET_MODE_SIZE (word_mode);
1976 if (rounded_size != CONST0_RTX (Pmode))
1978 if (CONST_INT_P (rounded_size)
1979 && INTVAL (rounded_size) <= 4 * probe_interval)
1981 for (HOST_WIDE_INT i = 0;
1982 i < INTVAL (rounded_size);
1983 i += probe_interval)
1985 anti_adjust_stack (GEN_INT (probe_interval));
1986 /* The prologue does not probe residuals. Thus the offset
1987 here to probe just beyond what the prologue had already
1988 allocated. */
1989 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
1990 probe_range));
1992 emit_insn (gen_blockage ());
1995 else
1997 rtx loop_lab, end_loop;
1998 bool rotate_loop = CONST_INT_P (rounded_size);
1999 emit_stack_clash_protection_probe_loop_start (&loop_lab, &end_loop,
2000 last_addr, rotate_loop);
2002 anti_adjust_stack (GEN_INT (probe_interval));
2004 /* The prologue does not probe residuals. Thus the offset here
2005 to probe just beyond what the prologue had already
2006 allocated. */
2007 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
2008 probe_range));
2010 emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop,
2011 last_addr, rotate_loop);
2012 emit_insn (gen_blockage ());
2016 if (residual != CONST0_RTX (Pmode))
2018 rtx label = NULL_RTX;
2019 /* RESIDUAL could be zero at runtime and in that case *sp could
2020 hold live data. Furthermore, we do not want to probe into the
2021 red zone.
2023 If TARGET_PROBE_RANGE_P then the target has promised it's safe to
2024 probe at offset 0. In which case we no longer have to check for
2025 RESIDUAL == 0. However we still need to probe at the right offset
2026 when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
2028 If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
2029 on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
2031 anti_adjust_stack (residual);
2033 if (!CONST_INT_P (residual))
2035 label = gen_label_rtx ();
2036 rtx_code op = target_probe_range_p ? LT : EQ;
2037 rtx probe_cmp_value = target_probe_range_p
2038 ? gen_rtx_CONST_INT (GET_MODE (residual), probe_range)
2039 : CONST0_RTX (GET_MODE (residual));
2041 if (target_probe_range_p)
2042 emit_stack_probe (stack_pointer_rtx);
2044 emit_cmp_and_jump_insns (residual, probe_cmp_value,
2045 op, NULL_RTX, Pmode, 1, label);
2048 rtx x = NULL_RTX;
2050 /* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
2051 by the ABI defined safe value. */
2052 if (!CONST_INT_P (residual) && target_probe_range_p)
2053 x = GEN_INT (probe_range);
2054 /* If RESIDUAL is a constant but smaller than the ABI defined safe value,
2055 we still want to probe up, but the safest amount if a word. */
2056 else if (target_probe_range_p)
2058 if (INTVAL (residual) <= probe_range)
2059 x = GEN_INT (GET_MODE_SIZE (word_mode));
2060 else
2061 x = GEN_INT (probe_range);
2063 else
2064 /* If nothing else, probe at the top of the new allocation. */
2065 x = plus_constant (Pmode, residual, -GET_MODE_SIZE (word_mode));
2067 emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
2069 emit_insn (gen_blockage ());
2070 if (!CONST_INT_P (residual))
2071 emit_label (label);
2076 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
2077 while probing it. This pushes when SIZE is positive. SIZE need not
2078 be constant. If ADJUST_BACK is true, adjust back the stack pointer
2079 by plus SIZE at the end. */
2081 void
2082 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
2084 /* We skip the probe for the first interval + a small dope of 4 words and
2085 probe that many bytes past the specified size to maintain a protection
2086 area at the botton of the stack. */
2087 const int dope = 4 * UNITS_PER_WORD;
2089 /* First ensure SIZE is Pmode. */
2090 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
2091 size = convert_to_mode (Pmode, size, 1);
2093 /* If we have a constant small number of probes to generate, that's the
2094 easy case. */
2095 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
2097 HOST_WIDE_INT isize = INTVAL (size), i;
2098 bool first_probe = true;
2100 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
2101 values of N from 1 until it exceeds SIZE. If only one probe is
2102 needed, this will not generate any code. Then adjust and probe
2103 to PROBE_INTERVAL + SIZE. */
2104 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
2106 if (first_probe)
2108 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
2109 first_probe = false;
2111 else
2112 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2113 emit_stack_probe (stack_pointer_rtx);
2116 if (first_probe)
2117 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
2118 else
2119 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
2120 emit_stack_probe (stack_pointer_rtx);
2123 /* In the variable case, do the same as above, but in a loop. Note that we
2124 must be extra careful with variables wrapping around because we might be
2125 at the very top (or the very bottom) of the address space and we have to
2126 be able to handle this case properly; in particular, we use an equality
2127 test for the loop condition. */
2128 else
2130 rtx rounded_size, rounded_size_op, last_addr, temp;
2131 rtx_code_label *loop_lab = gen_label_rtx ();
2132 rtx_code_label *end_lab = gen_label_rtx ();
2135 /* Step 1: round SIZE to the previous multiple of the interval. */
2137 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
2138 rounded_size
2139 = simplify_gen_binary (AND, Pmode, size,
2140 gen_int_mode (-PROBE_INTERVAL, Pmode));
2141 rounded_size_op = force_operand (rounded_size, NULL_RTX);
2144 /* Step 2: compute initial and final value of the loop counter. */
2146 /* SP = SP_0 + PROBE_INTERVAL. */
2147 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
2149 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
2150 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
2151 stack_pointer_rtx,
2152 rounded_size_op), NULL_RTX);
2155 /* Step 3: the loop
2157 while (SP != LAST_ADDR)
2159 SP = SP + PROBE_INTERVAL
2160 probe at SP
2163 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
2164 values of N from 1 until it is equal to ROUNDED_SIZE. */
2166 emit_label (loop_lab);
2168 /* Jump to END_LAB if SP == LAST_ADDR. */
2169 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
2170 Pmode, 1, end_lab);
2172 /* SP = SP + PROBE_INTERVAL and probe at SP. */
2173 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2174 emit_stack_probe (stack_pointer_rtx);
2176 emit_jump (loop_lab);
2178 emit_label (end_lab);
2181 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
2182 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
2184 /* TEMP = SIZE - ROUNDED_SIZE. */
2185 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
2186 if (temp != const0_rtx)
2188 /* Manual CSE if the difference is not known at compile-time. */
2189 if (GET_CODE (temp) != CONST_INT)
2190 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
2191 anti_adjust_stack (temp);
2192 emit_stack_probe (stack_pointer_rtx);
2196 /* Adjust back and account for the additional first interval. */
2197 if (adjust_back)
2198 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
2199 else
2200 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
2203 /* Return an rtx representing the register or memory location
2204 in which a scalar value of data type VALTYPE
2205 was returned by a function call to function FUNC.
2206 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
2207 function is known, otherwise 0.
2208 OUTGOING is 1 if on a machine with register windows this function
2209 should return the register in which the function will put its result
2210 and 0 otherwise. */
2213 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
2214 int outgoing ATTRIBUTE_UNUSED)
2216 rtx val;
2218 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
2220 if (REG_P (val)
2221 && GET_MODE (val) == BLKmode)
2223 unsigned HOST_WIDE_INT bytes = arg_int_size_in_bytes (valtype);
2224 opt_scalar_int_mode tmpmode;
2226 /* int_size_in_bytes can return -1. We don't need a check here
2227 since the value of bytes will then be large enough that no
2228 mode will match anyway. */
2230 FOR_EACH_MODE_IN_CLASS (tmpmode, MODE_INT)
2232 /* Have we found a large enough mode? */
2233 if (GET_MODE_SIZE (tmpmode.require ()) >= bytes)
2234 break;
2237 PUT_MODE (val, tmpmode.require ());
2239 return val;
2242 /* Return an rtx representing the register or memory location
2243 in which a scalar value of mode MODE was returned by a library call. */
2246 hard_libcall_value (machine_mode mode, rtx fun)
2248 return targetm.calls.libcall_value (mode, fun);
2251 /* Look up the tree code for a given rtx code
2252 to provide the arithmetic operation for real_arithmetic.
2253 The function returns an int because the caller may not know
2254 what `enum tree_code' means. */
2257 rtx_to_tree_code (enum rtx_code code)
2259 enum tree_code tcode;
2261 switch (code)
2263 case PLUS:
2264 tcode = PLUS_EXPR;
2265 break;
2266 case MINUS:
2267 tcode = MINUS_EXPR;
2268 break;
2269 case MULT:
2270 tcode = MULT_EXPR;
2271 break;
2272 case DIV:
2273 tcode = RDIV_EXPR;
2274 break;
2275 case SMIN:
2276 tcode = MIN_EXPR;
2277 break;
2278 case SMAX:
2279 tcode = MAX_EXPR;
2280 break;
2281 default:
2282 tcode = LAST_AND_UNUSED_TREE_CODE;
2283 break;
2285 return ((int) tcode);
2288 #include "gt-explow.h"