1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "profile-count.h"
35 #include "diagnostic-core.h"
36 #include "stor-layout.h"
37 #include "langhooks.h"
42 #include "stringpool.h"
43 #include "common/common-target.h"
46 static rtx
break_out_memory_refs (rtx
);
49 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
52 trunc_int_for_mode (HOST_WIDE_INT c
, machine_mode mode
)
54 /* Not scalar_int_mode because we also allow pointer bound modes. */
55 scalar_mode smode
= as_a
<scalar_mode
> (mode
);
56 int width
= GET_MODE_PRECISION (smode
);
58 /* You want to truncate to a _what_? */
59 gcc_assert (SCALAR_INT_MODE_P (mode
));
61 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
63 return c
& 1 ? STORE_FLAG_VALUE
: 0;
65 /* Sign-extend for the requested mode. */
67 if (width
< HOST_BITS_PER_WIDE_INT
)
69 HOST_WIDE_INT sign
= 1;
79 /* Likewise for polynomial values, using the sign-extended representation
80 for each individual coefficient. */
83 trunc_int_for_mode (poly_int64 x
, machine_mode mode
)
85 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
86 x
.coeffs
[i
] = trunc_int_for_mode (x
.coeffs
[i
], mode
);
90 /* Return an rtx for the sum of X and the integer C, given that X has
91 mode MODE. INPLACE is true if X can be modified inplace or false
92 if it must be treated as immutable. */
95 plus_constant (machine_mode mode
, rtx x
, poly_int64 c
, bool inplace
)
100 int all_constant
= 0;
102 gcc_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
114 CASE_CONST_SCALAR_INT
:
115 return immed_wide_int_const (wi::add (rtx_mode_t (x
, mode
), c
), mode
);
117 /* If this is a reference to the constant pool, try replacing it with
118 a reference to a new constant. If the resulting address isn't
119 valid, don't return it because we have no way to validize it. */
120 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
121 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
123 rtx cst
= get_pool_constant (XEXP (x
, 0));
125 if (GET_CODE (cst
) == CONST_VECTOR
126 && GET_MODE_INNER (GET_MODE (cst
)) == mode
)
128 cst
= gen_lowpart (mode
, cst
);
131 else if (GET_MODE (cst
) == VOIDmode
132 && get_pool_mode (XEXP (x
, 0)) != mode
)
134 if (GET_MODE (cst
) == VOIDmode
|| GET_MODE (cst
) == mode
)
136 tem
= plus_constant (mode
, cst
, c
);
137 tem
= force_const_mem (GET_MODE (x
), tem
);
138 /* Targets may disallow some constants in the constant pool, thus
139 force_const_mem may return NULL_RTX. */
140 if (tem
&& memory_address_p (GET_MODE (tem
), XEXP (tem
, 0)))
147 /* If adding to something entirely constant, set a flag
148 so that we can add a CONST around the result. */
149 if (inplace
&& shared_const_p (x
))
161 /* The interesting case is adding the integer to a sum. Look
162 for constant term in the sum and combine with C. For an
163 integer constant term or a constant term that is not an
164 explicit integer, we combine or group them together anyway.
166 We may not immediately return from the recursive call here, lest
167 all_constant gets lost. */
169 if (CONSTANT_P (XEXP (x
, 1)))
171 rtx term
= plus_constant (mode
, XEXP (x
, 1), c
, inplace
);
172 if (term
== const0_rtx
)
177 x
= gen_rtx_PLUS (mode
, XEXP (x
, 0), term
);
180 else if (rtx
*const_loc
= find_constant_term_loc (&y
))
184 /* We need to be careful since X may be shared and we can't
185 modify it in place. */
187 const_loc
= find_constant_term_loc (&x
);
189 *const_loc
= plus_constant (mode
, *const_loc
, c
, true);
195 if (CONST_POLY_INT_P (x
))
196 return immed_wide_int_const (const_poly_int_value (x
) + c
, mode
);
201 x
= gen_rtx_PLUS (mode
, x
, gen_int_mode (c
, mode
));
203 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
)
205 else if (all_constant
)
206 return gen_rtx_CONST (mode
, x
);
211 /* If X is a sum, return a new sum like X but lacking any constant terms.
212 Add all the removed constant terms into *CONSTPTR.
213 X itself is not altered. The result != X if and only if
214 it is not isomorphic to X. */
217 eliminate_constant_term (rtx x
, rtx
*constptr
)
222 if (GET_CODE (x
) != PLUS
)
225 /* First handle constants appearing at this level explicitly. */
226 if (CONST_INT_P (XEXP (x
, 1))
227 && (tem
= simplify_binary_operation (PLUS
, GET_MODE (x
), *constptr
,
229 && CONST_INT_P (tem
))
232 return eliminate_constant_term (XEXP (x
, 0), constptr
);
236 x0
= eliminate_constant_term (XEXP (x
, 0), &tem
);
237 x1
= eliminate_constant_term (XEXP (x
, 1), &tem
);
238 if ((x1
!= XEXP (x
, 1) || x0
!= XEXP (x
, 0))
239 && (tem
= simplify_binary_operation (PLUS
, GET_MODE (x
),
240 *constptr
, tem
)) != 0
241 && CONST_INT_P (tem
))
244 return gen_rtx_PLUS (GET_MODE (x
), x0
, x1
);
251 /* Return a copy of X in which all memory references
252 and all constants that involve symbol refs
253 have been replaced with new temporary registers.
254 Also emit code to load the memory locations and constants
255 into those registers.
257 If X contains no such constants or memory references,
258 X itself (not a copy) is returned.
260 If a constant is found in the address that is not a legitimate constant
261 in an insn, it is left alone in the hope that it might be valid in the
264 X may contain no arithmetic except addition, subtraction and multiplication.
265 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
268 break_out_memory_refs (rtx x
)
271 || (CONSTANT_P (x
) && CONSTANT_ADDRESS_P (x
)
272 && GET_MODE (x
) != VOIDmode
))
273 x
= force_reg (GET_MODE (x
), x
);
274 else if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
275 || GET_CODE (x
) == MULT
)
277 rtx op0
= break_out_memory_refs (XEXP (x
, 0));
278 rtx op1
= break_out_memory_refs (XEXP (x
, 1));
280 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
281 x
= simplify_gen_binary (GET_CODE (x
), GET_MODE (x
), op0
, op1
);
287 /* Given X, a memory address in address space AS' pointer mode, convert it to
288 an address in the address space's address mode, or vice versa (TO_MODE says
289 which way). We take advantage of the fact that pointers are not allowed to
290 overflow by commuting arithmetic operations over conversions so that address
291 arithmetic insns can be used. IN_CONST is true if this conversion is inside
292 a CONST. NO_EMIT is true if no insns should be emitted, and instead
293 it should return NULL if it can't be simplified without emitting insns. */
296 convert_memory_address_addr_space_1 (scalar_int_mode to_mode ATTRIBUTE_UNUSED
,
297 rtx x
, addr_space_t as ATTRIBUTE_UNUSED
,
298 bool in_const ATTRIBUTE_UNUSED
,
299 bool no_emit ATTRIBUTE_UNUSED
)
301 #ifndef POINTERS_EXTEND_UNSIGNED
302 gcc_assert (GET_MODE (x
) == to_mode
|| GET_MODE (x
) == VOIDmode
);
304 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
305 scalar_int_mode pointer_mode
, address_mode
, from_mode
;
309 /* If X already has the right mode, just return it. */
310 if (GET_MODE (x
) == to_mode
)
313 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
314 address_mode
= targetm
.addr_space
.address_mode (as
);
315 from_mode
= to_mode
== pointer_mode
? address_mode
: pointer_mode
;
317 /* Here we handle some special cases. If none of them apply, fall through
318 to the default case. */
319 switch (GET_CODE (x
))
321 CASE_CONST_SCALAR_INT
:
322 if (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (from_mode
))
324 else if (POINTERS_EXTEND_UNSIGNED
< 0)
326 else if (POINTERS_EXTEND_UNSIGNED
> 0)
330 temp
= simplify_unary_operation (code
, to_mode
, x
, from_mode
);
336 if ((SUBREG_PROMOTED_VAR_P (x
) || REG_POINTER (SUBREG_REG (x
)))
337 && GET_MODE (SUBREG_REG (x
)) == to_mode
)
338 return SUBREG_REG (x
);
342 temp
= gen_rtx_LABEL_REF (to_mode
, label_ref_label (x
));
343 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
347 temp
= shallow_copy_rtx (x
);
348 PUT_MODE (temp
, to_mode
);
353 auto *last
= no_emit
? nullptr : get_last_insn ();
354 temp
= convert_memory_address_addr_space_1 (to_mode
, XEXP (x
, 0), as
,
356 if (temp
&& (no_emit
|| last
== get_last_insn ()))
357 return gen_rtx_CONST (to_mode
, temp
);
363 /* For addition we can safely permute the conversion and addition
364 operation if one operand is a constant and converting the constant
365 does not change it or if one operand is a constant and we are
366 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
367 We can always safely permute them if we are making the address
368 narrower. Inside a CONST RTL, this is safe for both pointers
369 zero or sign extended as pointers cannot wrap. */
370 if (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (from_mode
)
371 || (GET_CODE (x
) == PLUS
372 && CONST_INT_P (XEXP (x
, 1))
373 && ((in_const
&& POINTERS_EXTEND_UNSIGNED
!= 0)
374 || XEXP (x
, 1) == convert_memory_address_addr_space_1
375 (to_mode
, XEXP (x
, 1), as
, in_const
,
377 || POINTERS_EXTEND_UNSIGNED
< 0)))
379 temp
= convert_memory_address_addr_space_1 (to_mode
, XEXP (x
, 0),
380 as
, in_const
, no_emit
);
381 return (temp
? gen_rtx_fmt_ee (GET_CODE (x
), to_mode
,
388 /* Assume that all UNSPECs in a constant address can be converted
389 operand-by-operand. We could add a target hook if some targets
390 require different behavior. */
391 if (in_const
&& GET_MODE (x
) == from_mode
)
393 unsigned int n
= XVECLEN (x
, 0);
394 rtvec v
= gen_rtvec (n
);
395 for (unsigned int i
= 0; i
< n
; ++i
)
397 rtx op
= XVECEXP (x
, 0, i
);
398 if (GET_MODE (op
) == from_mode
)
399 op
= convert_memory_address_addr_space_1 (to_mode
, op
, as
,
401 RTVEC_ELT (v
, i
) = op
;
403 return gen_rtx_UNSPEC (to_mode
, v
, XINT (x
, 1));
414 return convert_modes (to_mode
, from_mode
,
415 x
, POINTERS_EXTEND_UNSIGNED
);
416 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
419 /* Given X, a memory address in address space AS' pointer mode, convert it to
420 an address in the address space's address mode, or vice versa (TO_MODE says
421 which way). We take advantage of the fact that pointers are not allowed to
422 overflow by commuting arithmetic operations over conversions so that address
423 arithmetic insns can be used. */
426 convert_memory_address_addr_space (scalar_int_mode to_mode
, rtx x
,
429 return convert_memory_address_addr_space_1 (to_mode
, x
, as
, false, false);
433 /* Return something equivalent to X but valid as a memory address for something
434 of mode MODE in the named address space AS. When X is not itself valid,
435 this works by copying X or subexpressions of it into registers. */
438 memory_address_addr_space (machine_mode mode
, rtx x
, addr_space_t as
)
441 scalar_int_mode address_mode
= targetm
.addr_space
.address_mode (as
);
443 x
= convert_memory_address_addr_space (address_mode
, x
, as
);
445 /* By passing constant addresses through registers
446 we get a chance to cse them. */
447 if (! cse_not_expected
&& CONSTANT_P (x
) && CONSTANT_ADDRESS_P (x
))
448 x
= force_reg (address_mode
, x
);
450 /* We get better cse by rejecting indirect addressing at this stage.
451 Let the combiner create indirect addresses where appropriate.
452 For now, generate the code so that the subexpressions useful to share
453 are visible. But not if cse won't be done! */
456 if (! cse_not_expected
&& !REG_P (x
))
457 x
= break_out_memory_refs (x
);
459 /* At this point, any valid address is accepted. */
460 if (memory_address_addr_space_p (mode
, x
, as
))
463 /* If it was valid before but breaking out memory refs invalidated it,
464 use it the old way. */
465 if (memory_address_addr_space_p (mode
, oldx
, as
))
471 /* Perform machine-dependent transformations on X
472 in certain cases. This is not necessary since the code
473 below can handle all possible cases, but machine-dependent
474 transformations can make better code. */
477 x
= targetm
.addr_space
.legitimize_address (x
, oldx
, mode
, as
);
478 if (orig_x
!= x
&& memory_address_addr_space_p (mode
, x
, as
))
482 /* PLUS and MULT can appear in special ways
483 as the result of attempts to make an address usable for indexing.
484 Usually they are dealt with by calling force_operand, below.
485 But a sum containing constant terms is special
486 if removing them makes the sum a valid address:
487 then we generate that address in a register
488 and index off of it. We do this because it often makes
489 shorter code, and because the addresses thus generated
490 in registers often become common subexpressions. */
491 if (GET_CODE (x
) == PLUS
)
493 rtx constant_term
= const0_rtx
;
494 rtx y
= eliminate_constant_term (x
, &constant_term
);
495 if (constant_term
== const0_rtx
496 || ! memory_address_addr_space_p (mode
, y
, as
))
497 x
= force_operand (x
, NULL_RTX
);
500 y
= gen_rtx_PLUS (GET_MODE (x
), copy_to_reg (y
), constant_term
);
501 if (! memory_address_addr_space_p (mode
, y
, as
))
502 x
= force_operand (x
, NULL_RTX
);
508 else if (GET_CODE (x
) == MULT
|| GET_CODE (x
) == MINUS
)
509 x
= force_operand (x
, NULL_RTX
);
511 /* If we have a register that's an invalid address,
512 it must be a hard reg of the wrong class. Copy it to a pseudo. */
516 /* Last resort: copy the value to a register, since
517 the register is a valid address. */
519 x
= force_reg (address_mode
, x
);
524 gcc_assert (memory_address_addr_space_p (mode
, x
, as
));
525 /* If we didn't change the address, we are done. Otherwise, mark
526 a reg as a pointer if we have REG or REG + CONST_INT. */
530 mark_reg_pointer (x
, BITS_PER_UNIT
);
531 else if (GET_CODE (x
) == PLUS
532 && REG_P (XEXP (x
, 0))
533 && CONST_INT_P (XEXP (x
, 1)))
534 mark_reg_pointer (XEXP (x
, 0), BITS_PER_UNIT
);
536 /* OLDX may have been the address on a temporary. Update the address
537 to indicate that X is now used. */
538 update_temp_slot_address (oldx
, x
);
543 /* Convert a mem ref into one with a valid memory address.
544 Pass through anything else unchanged. */
547 validize_mem (rtx ref
)
551 ref
= use_anchored_address (ref
);
552 if (memory_address_addr_space_p (GET_MODE (ref
), XEXP (ref
, 0),
553 MEM_ADDR_SPACE (ref
)))
556 /* Don't alter REF itself, since that is probably a stack slot. */
557 return replace_equiv_address (ref
, XEXP (ref
, 0));
560 /* If X is a memory reference to a member of an object block, try rewriting
561 it to use an anchor instead. Return the new memory reference on success
562 and the old one on failure. */
565 use_anchored_address (rtx x
)
568 HOST_WIDE_INT offset
;
571 if (!flag_section_anchors
)
577 /* Split the address into a base and offset. */
580 if (GET_CODE (base
) == CONST
581 && GET_CODE (XEXP (base
, 0)) == PLUS
582 && CONST_INT_P (XEXP (XEXP (base
, 0), 1)))
584 offset
+= INTVAL (XEXP (XEXP (base
, 0), 1));
585 base
= XEXP (XEXP (base
, 0), 0);
588 /* Check whether BASE is suitable for anchors. */
589 if (GET_CODE (base
) != SYMBOL_REF
590 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base
)
591 || SYMBOL_REF_ANCHOR_P (base
)
592 || SYMBOL_REF_BLOCK (base
) == NULL
593 || !targetm
.use_anchors_for_symbol_p (base
))
596 /* Decide where BASE is going to be. */
597 place_block_symbol (base
);
599 /* Get the anchor we need to use. */
600 offset
+= SYMBOL_REF_BLOCK_OFFSET (base
);
601 base
= get_section_anchor (SYMBOL_REF_BLOCK (base
), offset
,
602 SYMBOL_REF_TLS_MODEL (base
));
604 /* Work out the offset from the anchor. */
605 offset
-= SYMBOL_REF_BLOCK_OFFSET (base
);
607 /* If we're going to run a CSE pass, force the anchor into a register.
608 We will then be able to reuse registers for several accesses, if the
609 target costs say that that's worthwhile. */
610 mode
= GET_MODE (base
);
611 if (!cse_not_expected
)
612 base
= force_reg (mode
, base
);
614 return replace_equiv_address (x
, plus_constant (mode
, base
, offset
));
617 /* Copy the value or contents of X to a new temp reg and return that reg. */
622 rtx temp
= gen_reg_rtx (GET_MODE (x
));
624 /* If not an operand, must be an address with PLUS and MULT so
625 do the computation. */
626 if (! general_operand (x
, VOIDmode
))
627 x
= force_operand (x
, temp
);
630 emit_move_insn (temp
, x
);
635 /* Like copy_to_reg but always give the new register mode Pmode
636 in case X is a constant. */
639 copy_addr_to_reg (rtx x
)
641 return copy_to_mode_reg (Pmode
, x
);
644 /* Like copy_to_reg but always give the new register mode MODE
645 in case X is a constant. */
648 copy_to_mode_reg (machine_mode mode
, rtx x
)
650 rtx temp
= gen_reg_rtx (mode
);
652 /* If not an operand, must be an address with PLUS and MULT so
653 do the computation. */
654 if (! general_operand (x
, VOIDmode
))
655 x
= force_operand (x
, temp
);
657 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
659 emit_move_insn (temp
, x
);
663 /* Load X into a register if it is not already one.
664 Use mode MODE for the register.
665 X should be valid for mode MODE, but it may be a constant which
666 is valid for all integer modes; that's why caller must specify MODE.
668 The caller must not alter the value in the register we return,
669 since we mark it as a "constant" register. */
672 force_reg (machine_mode mode
, rtx x
)
680 if (general_operand (x
, mode
))
682 temp
= gen_reg_rtx (mode
);
683 insn
= emit_move_insn (temp
, x
);
687 temp
= force_operand (x
, NULL_RTX
);
689 insn
= get_last_insn ();
692 rtx temp2
= gen_reg_rtx (mode
);
693 insn
= emit_move_insn (temp2
, temp
);
698 /* Let optimizers know that TEMP's value never changes
699 and that X can be substituted for it. Don't get confused
700 if INSN set something else (such as a SUBREG of TEMP). */
702 && (set
= single_set (insn
)) != 0
703 && SET_DEST (set
) == temp
704 && ! rtx_equal_p (x
, SET_SRC (set
)))
705 set_unique_reg_note (insn
, REG_EQUAL
, x
);
707 /* Let optimizers know that TEMP is a pointer, and if so, the
708 known alignment of that pointer. */
711 if (GET_CODE (x
) == SYMBOL_REF
)
713 align
= BITS_PER_UNIT
;
714 if (SYMBOL_REF_DECL (x
) && DECL_P (SYMBOL_REF_DECL (x
)))
715 align
= DECL_ALIGN (SYMBOL_REF_DECL (x
));
717 else if (GET_CODE (x
) == LABEL_REF
)
718 align
= BITS_PER_UNIT
;
719 else if (GET_CODE (x
) == CONST
720 && GET_CODE (XEXP (x
, 0)) == PLUS
721 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
722 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
724 rtx s
= XEXP (XEXP (x
, 0), 0);
725 rtx c
= XEXP (XEXP (x
, 0), 1);
729 if (SYMBOL_REF_DECL (s
) && DECL_P (SYMBOL_REF_DECL (s
)))
730 sa
= DECL_ALIGN (SYMBOL_REF_DECL (s
));
736 ca
= ctz_hwi (INTVAL (c
)) * BITS_PER_UNIT
;
737 align
= MIN (sa
, ca
);
741 if (align
|| (MEM_P (x
) && MEM_POINTER (x
)))
742 mark_reg_pointer (temp
, align
);
748 /* If X is a memory ref, copy its contents to a new temp reg and return
749 that reg. Otherwise, return X. */
752 force_not_mem (rtx x
)
756 if (!MEM_P (x
) || GET_MODE (x
) == BLKmode
)
759 temp
= gen_reg_rtx (GET_MODE (x
));
762 REG_POINTER (temp
) = 1;
764 emit_move_insn (temp
, x
);
768 /* Copy X to TARGET (if it's nonzero and a reg)
769 or to a new temp reg and return that reg.
770 MODE is the mode to use for X in case it is a constant. */
773 copy_to_suggested_reg (rtx x
, rtx target
, machine_mode mode
)
777 if (target
&& REG_P (target
))
780 temp
= gen_reg_rtx (mode
);
782 emit_move_insn (temp
, x
);
786 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
787 PUNSIGNEDP points to the signedness of the type and may be adjusted
788 to show what signedness to use on extension operations.
790 FOR_RETURN is nonzero if the caller is promoting the return value
791 of FNDECL, else it is for promoting args. */
794 promote_function_mode (const_tree type
, machine_mode mode
, int *punsignedp
,
795 const_tree funtype
, int for_return
)
797 /* Called without a type node for a libcall. */
798 if (type
== NULL_TREE
)
800 if (INTEGRAL_MODE_P (mode
))
801 return targetm
.calls
.promote_function_mode (NULL_TREE
, mode
,
808 switch (TREE_CODE (type
))
810 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
811 case REAL_TYPE
: case OFFSET_TYPE
: case FIXED_POINT_TYPE
:
812 case POINTER_TYPE
: case REFERENCE_TYPE
:
813 return targetm
.calls
.promote_function_mode (type
, mode
, punsignedp
, funtype
,
820 /* Return the mode to use to store a scalar of TYPE and MODE.
821 PUNSIGNEDP points to the signedness of the type and may be adjusted
822 to show what signedness to use on extension operations. */
825 promote_mode (const_tree type ATTRIBUTE_UNUSED
, machine_mode mode
,
826 int *punsignedp ATTRIBUTE_UNUSED
)
834 /* For libcalls this is invoked without TYPE from the backends
835 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
837 if (type
== NULL_TREE
)
840 /* FIXME: this is the same logic that was there until GCC 4.4, but we
841 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
842 is not defined. The affected targets are M32C, S390, SPARC. */
844 code
= TREE_CODE (type
);
845 unsignedp
= *punsignedp
;
849 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
850 case REAL_TYPE
: case OFFSET_TYPE
: case FIXED_POINT_TYPE
:
851 /* Values of these types always have scalar mode. */
852 smode
= as_a
<scalar_mode
> (mode
);
853 PROMOTE_MODE (smode
, unsignedp
, type
);
854 *punsignedp
= unsignedp
;
857 #ifdef POINTERS_EXTEND_UNSIGNED
860 *punsignedp
= POINTERS_EXTEND_UNSIGNED
;
861 return targetm
.addr_space
.address_mode
862 (TYPE_ADDR_SPACE (TREE_TYPE (type
)));
874 /* Use one of promote_mode or promote_function_mode to find the promoted
875 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
876 of DECL after promotion. */
879 promote_decl_mode (const_tree decl
, int *punsignedp
)
881 tree type
= TREE_TYPE (decl
);
882 int unsignedp
= TYPE_UNSIGNED (type
);
883 machine_mode mode
= DECL_MODE (decl
);
886 if (TREE_CODE (decl
) == RESULT_DECL
&& !DECL_BY_REFERENCE (decl
))
887 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
888 TREE_TYPE (current_function_decl
), 1);
889 else if (TREE_CODE (decl
) == RESULT_DECL
|| TREE_CODE (decl
) == PARM_DECL
)
890 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
891 TREE_TYPE (current_function_decl
), 2);
893 pmode
= promote_mode (type
, mode
, &unsignedp
);
896 *punsignedp
= unsignedp
;
900 /* Return the promoted mode for name. If it is a named SSA_NAME, it
901 is the same as promote_decl_mode. Otherwise, it is the promoted
902 mode of a temp decl of same type as the SSA_NAME, if we had created
906 promote_ssa_mode (const_tree name
, int *punsignedp
)
908 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
910 /* Partitions holding parms and results must be promoted as expected
912 if (SSA_NAME_VAR (name
)
913 && (TREE_CODE (SSA_NAME_VAR (name
)) == PARM_DECL
914 || TREE_CODE (SSA_NAME_VAR (name
)) == RESULT_DECL
))
916 machine_mode mode
= promote_decl_mode (SSA_NAME_VAR (name
), punsignedp
);
921 tree type
= TREE_TYPE (name
);
922 int unsignedp
= TYPE_UNSIGNED (type
);
923 machine_mode pmode
= promote_mode (type
, TYPE_MODE (type
), &unsignedp
);
925 *punsignedp
= unsignedp
;
932 /* Controls the behavior of {anti_,}adjust_stack. */
933 static bool suppress_reg_args_size
;
935 /* A helper for adjust_stack and anti_adjust_stack. */
938 adjust_stack_1 (rtx adjust
, bool anti_p
)
943 /* Hereafter anti_p means subtract_p. */
944 if (!STACK_GROWS_DOWNWARD
)
947 temp
= expand_binop (Pmode
,
948 anti_p
? sub_optab
: add_optab
,
949 stack_pointer_rtx
, adjust
, stack_pointer_rtx
, 0,
952 if (temp
!= stack_pointer_rtx
)
953 insn
= emit_move_insn (stack_pointer_rtx
, temp
);
956 insn
= get_last_insn ();
957 temp
= single_set (insn
);
958 gcc_assert (temp
!= NULL
&& SET_DEST (temp
) == stack_pointer_rtx
);
961 if (!suppress_reg_args_size
)
962 add_args_size_note (insn
, stack_pointer_delta
);
965 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
966 This pops when ADJUST is positive. ADJUST need not be constant. */
969 adjust_stack (rtx adjust
)
971 if (adjust
== const0_rtx
)
974 /* We expect all variable sized adjustments to be multiple of
975 PREFERRED_STACK_BOUNDARY. */
976 poly_int64 const_adjust
;
977 if (poly_int_rtx_p (adjust
, &const_adjust
))
978 stack_pointer_delta
-= const_adjust
;
980 adjust_stack_1 (adjust
, false);
983 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
984 This pushes when ADJUST is positive. ADJUST need not be constant. */
987 anti_adjust_stack (rtx adjust
)
989 if (adjust
== const0_rtx
)
992 /* We expect all variable sized adjustments to be multiple of
993 PREFERRED_STACK_BOUNDARY. */
994 poly_int64 const_adjust
;
995 if (poly_int_rtx_p (adjust
, &const_adjust
))
996 stack_pointer_delta
+= const_adjust
;
998 adjust_stack_1 (adjust
, true);
1001 /* Round the size of a block to be pushed up to the boundary required
1002 by this machine. SIZE is the desired size, which need not be constant. */
1005 round_push (rtx size
)
1007 rtx align_rtx
, alignm1_rtx
;
1009 if (!SUPPORTS_STACK_ALIGNMENT
1010 || crtl
->preferred_stack_boundary
== MAX_SUPPORTED_STACK_ALIGNMENT
)
1012 int align
= crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
;
1017 if (CONST_INT_P (size
))
1019 HOST_WIDE_INT new_size
= (INTVAL (size
) + align
- 1) / align
* align
;
1021 if (INTVAL (size
) != new_size
)
1022 size
= GEN_INT (new_size
);
1026 align_rtx
= GEN_INT (align
);
1027 alignm1_rtx
= GEN_INT (align
- 1);
1031 /* If crtl->preferred_stack_boundary might still grow, use
1032 virtual_preferred_stack_boundary_rtx instead. This will be
1033 substituted by the right value in vregs pass and optimized
1035 align_rtx
= virtual_preferred_stack_boundary_rtx
;
1036 alignm1_rtx
= force_operand (plus_constant (Pmode
, align_rtx
, -1),
1040 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1041 but we know it can't. So add ourselves and then do
1043 size
= expand_binop (Pmode
, add_optab
, size
, alignm1_rtx
,
1044 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1045 size
= expand_divmod (0, TRUNC_DIV_EXPR
, Pmode
, size
, align_rtx
,
1047 size
= expand_mult (Pmode
, size
, align_rtx
, NULL_RTX
, 1);
1052 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1053 to a previously-created save area. If no save area has been allocated,
1054 this function will allocate one. If a save area is specified, it
1055 must be of the proper mode. */
1058 emit_stack_save (enum save_level save_level
, rtx
*psave
)
1061 /* The default is that we use a move insn and save in a Pmode object. */
1062 rtx_insn
*(*fcn
) (rtx
, rtx
) = gen_move_insn
;
1063 machine_mode mode
= STACK_SAVEAREA_MODE (save_level
);
1065 /* See if this machine has anything special to do for this kind of save. */
1069 if (targetm
.have_save_stack_block ())
1070 fcn
= targetm
.gen_save_stack_block
;
1073 if (targetm
.have_save_stack_function ())
1074 fcn
= targetm
.gen_save_stack_function
;
1077 if (targetm
.have_save_stack_nonlocal ())
1078 fcn
= targetm
.gen_save_stack_nonlocal
;
1084 /* If there is no save area and we have to allocate one, do so. Otherwise
1085 verify the save area is the proper mode. */
1089 if (mode
!= VOIDmode
)
1091 if (save_level
== SAVE_NONLOCAL
)
1092 *psave
= sa
= assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
1094 *psave
= sa
= gen_reg_rtx (mode
);
1098 do_pending_stack_adjust ();
1100 sa
= validize_mem (sa
);
1101 emit_insn (fcn (sa
, stack_pointer_rtx
));
1104 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1105 area made by emit_stack_save. If it is zero, we have nothing to do. */
1108 emit_stack_restore (enum save_level save_level
, rtx sa
)
1110 /* The default is that we use a move insn. */
1111 rtx_insn
*(*fcn
) (rtx
, rtx
) = gen_move_insn
;
1113 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1114 STACK_POINTER and HARD_FRAME_POINTER.
1115 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1116 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1117 aligned variables, which is reflected in ix86_can_eliminate.
1118 We normally still have the realigned STACK_POINTER that we can use.
1119 But if there is a stack restore still present at reload, it can trigger
1120 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1121 FRAME_POINTER into a hard reg.
1122 To prevent this situation, we force need_drap if we emit a stack
1124 if (SUPPORTS_STACK_ALIGNMENT
)
1125 crtl
->need_drap
= true;
1127 /* See if this machine has anything special to do for this kind of save. */
1131 if (targetm
.have_restore_stack_block ())
1132 fcn
= targetm
.gen_restore_stack_block
;
1135 if (targetm
.have_restore_stack_function ())
1136 fcn
= targetm
.gen_restore_stack_function
;
1139 if (targetm
.have_restore_stack_nonlocal ())
1140 fcn
= targetm
.gen_restore_stack_nonlocal
;
1148 sa
= validize_mem (sa
);
1149 /* These clobbers prevent the scheduler from moving
1150 references to variable arrays below the code
1151 that deletes (pops) the arrays. */
1152 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1153 emit_clobber (gen_rtx_MEM (BLKmode
, stack_pointer_rtx
));
1156 discard_pending_stack_adjust ();
1158 emit_insn (fcn (stack_pointer_rtx
, sa
));
1161 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1162 function. This should be called whenever we allocate or deallocate
1163 dynamic stack space. */
1166 update_nonlocal_goto_save_area (void)
1171 /* The nonlocal_goto_save_area object is an array of N pointers. The
1172 first one is used for the frame pointer save; the rest are sized by
1173 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1174 of the stack save area slots. */
1175 t_save
= build4 (ARRAY_REF
,
1176 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
1177 cfun
->nonlocal_goto_save_area
,
1178 integer_one_node
, NULL_TREE
, NULL_TREE
);
1179 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
1181 emit_stack_save (SAVE_NONLOCAL
, &r_save
);
1184 /* Record a new stack level for the current function. This should be called
1185 whenever we allocate or deallocate dynamic stack space. */
1188 record_new_stack_level (void)
1190 /* Record the new stack level for nonlocal gotos. */
1191 if (cfun
->nonlocal_goto_save_area
)
1192 update_nonlocal_goto_save_area ();
1194 /* Record the new stack level for SJLJ exceptions. */
1195 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
1196 update_sjlj_context ();
1199 /* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */
1202 align_dynamic_address (rtx target
, unsigned required_align
)
1204 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1205 but we know it can't. So add ourselves and then do
1207 target
= expand_binop (Pmode
, add_optab
, target
,
1208 gen_int_mode (required_align
/ BITS_PER_UNIT
- 1,
1210 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1211 target
= expand_divmod (0, TRUNC_DIV_EXPR
, Pmode
, target
,
1212 gen_int_mode (required_align
/ BITS_PER_UNIT
,
1215 target
= expand_mult (Pmode
, target
,
1216 gen_int_mode (required_align
/ BITS_PER_UNIT
,
1223 /* Return an rtx through *PSIZE, representing the size of an area of memory to
1224 be dynamically pushed on the stack.
1226 *PSIZE is an rtx representing the size of the area.
1228 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1229 parameter may be zero. If so, a proper value will be extracted
1230 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1232 REQUIRED_ALIGN is the alignment (in bits) required for the region
1235 If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for
1236 the additional size returned. */
1238 get_dynamic_stack_size (rtx
*psize
, unsigned size_align
,
1239 unsigned required_align
,
1240 HOST_WIDE_INT
*pstack_usage_size
)
1244 /* Ensure the size is in the proper mode. */
1245 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1246 size
= convert_to_mode (Pmode
, size
, 1);
1248 if (CONST_INT_P (size
))
1250 unsigned HOST_WIDE_INT lsb
;
1252 lsb
= INTVAL (size
);
1255 /* Watch out for overflow truncating to "unsigned". */
1256 if (lsb
> UINT_MAX
/ BITS_PER_UNIT
)
1257 size_align
= 1u << (HOST_BITS_PER_INT
- 1);
1259 size_align
= (unsigned)lsb
* BITS_PER_UNIT
;
1261 else if (size_align
< BITS_PER_UNIT
)
1262 size_align
= BITS_PER_UNIT
;
1264 /* We can't attempt to minimize alignment necessary, because we don't
1265 know the final value of preferred_stack_boundary yet while executing
1267 if (crtl
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
1268 crtl
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
1270 /* We will need to ensure that the address we return is aligned to
1271 REQUIRED_ALIGN. At this point in the compilation, we don't always
1272 know the final value of the STACK_DYNAMIC_OFFSET used in function.cc
1273 (it might depend on the size of the outgoing parameter lists, for
1274 example), so we must preventively align the value. We leave space
1275 in SIZE for the hole that might result from the alignment operation. */
1277 unsigned known_align
= REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
);
1278 if (known_align
== 0)
1279 known_align
= BITS_PER_UNIT
;
1280 if (required_align
> known_align
)
1282 unsigned extra
= (required_align
- known_align
) / BITS_PER_UNIT
;
1283 size
= plus_constant (Pmode
, size
, extra
);
1284 size
= force_operand (size
, NULL_RTX
);
1285 if (size_align
> known_align
)
1286 size_align
= known_align
;
1288 if (flag_stack_usage_info
&& pstack_usage_size
)
1289 *pstack_usage_size
+= extra
;
1292 /* Round the size to a multiple of the required stack alignment.
1293 Since the stack is presumed to be rounded before this allocation,
1294 this will maintain the required alignment.
1296 If the stack grows downward, we could save an insn by subtracting
1297 SIZE from the stack pointer and then aligning the stack pointer.
1298 The problem with this is that the stack pointer may be unaligned
1299 between the execution of the subtraction and alignment insns and
1300 some machines do not allow this. Even on those that do, some
1301 signal handlers malfunction if a signal should occur between those
1302 insns. Since this is an extremely rare event, we have no reliable
1303 way of knowing which systems have this problem. So we avoid even
1304 momentarily mis-aligning the stack. */
1305 if (size_align
% MAX_SUPPORTED_STACK_ALIGNMENT
!= 0)
1307 size
= round_push (size
);
1309 if (flag_stack_usage_info
&& pstack_usage_size
)
1311 int align
= crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
;
1312 *pstack_usage_size
=
1313 (*pstack_usage_size
+ align
- 1) / align
* align
;
1320 /* Return the number of bytes to "protect" on the stack for -fstack-check.
1322 "protect" in the context of -fstack-check means how many bytes we need
1323 to always ensure are available on the stack; as a consequence, this is
1324 also how many bytes are first skipped when probing the stack.
1326 On some targets we want to reuse the -fstack-check prologue support
1327 to give a degree of protection against stack clashing style attacks.
1329 In that scenario we do not want to skip bytes before probing as that
1330 would render the stack clash protections useless.
1332 So we never use STACK_CHECK_PROTECT directly. Instead we indirectly
1333 use it through this helper, which allows to provide different values
1334 for -fstack-check and -fstack-clash-protection. */
1337 get_stack_check_protect (void)
1339 if (flag_stack_clash_protection
)
1342 return STACK_CHECK_PROTECT
;
1345 /* Return an rtx representing the address of an area of memory dynamically
1346 pushed on the stack.
1348 Any required stack pointer alignment is preserved.
1350 SIZE is an rtx representing the size of the area.
1352 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1353 parameter may be zero. If so, a proper value will be extracted
1354 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1356 REQUIRED_ALIGN is the alignment (in bits) required for the region
1359 MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if
1360 no such upper bound is known.
1362 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1363 stack space allocated by the generated code cannot be added with itself
1364 in the course of the execution of the function. It is always safe to
1365 pass FALSE here and the following criterion is sufficient in order to
1366 pass TRUE: every path in the CFG that starts at the allocation point and
1367 loops to it executes the associated deallocation code. */
1370 allocate_dynamic_stack_space (rtx size
, unsigned size_align
,
1371 unsigned required_align
,
1372 HOST_WIDE_INT max_size
,
1373 bool cannot_accumulate
)
1375 HOST_WIDE_INT stack_usage_size
= -1;
1376 rtx_code_label
*final_label
;
1377 rtx final_target
, target
;
1379 /* If we're asking for zero bytes, it doesn't matter what we point
1380 to since we can't dereference it. But return a reasonable
1382 if (size
== const0_rtx
)
1383 return virtual_stack_dynamic_rtx
;
1385 /* Otherwise, show we're calling alloca or equivalent. */
1386 cfun
->calls_alloca
= 1;
1388 /* If stack usage info is requested, look into the size we are passed.
1389 We need to do so this early to avoid the obfuscation that may be
1390 introduced later by the various alignment operations. */
1391 if (flag_stack_usage_info
)
1393 if (CONST_INT_P (size
))
1394 stack_usage_size
= INTVAL (size
);
1395 else if (REG_P (size
))
1397 /* Look into the last emitted insn and see if we can deduce
1398 something for the register. */
1401 insn
= get_last_insn ();
1402 if ((set
= single_set (insn
)) && rtx_equal_p (SET_DEST (set
), size
))
1404 if (CONST_INT_P (SET_SRC (set
)))
1405 stack_usage_size
= INTVAL (SET_SRC (set
));
1406 else if ((note
= find_reg_equal_equiv_note (insn
))
1407 && CONST_INT_P (XEXP (note
, 0)))
1408 stack_usage_size
= INTVAL (XEXP (note
, 0));
1412 /* If the size is not constant, try the maximum size. */
1413 if (stack_usage_size
< 0)
1414 stack_usage_size
= max_size
;
1416 /* If the size is still not constant, we can't say anything. */
1417 if (stack_usage_size
< 0)
1419 current_function_has_unbounded_dynamic_stack_size
= 1;
1420 stack_usage_size
= 0;
1424 get_dynamic_stack_size (&size
, size_align
, required_align
, &stack_usage_size
);
1426 target
= gen_reg_rtx (Pmode
);
1428 /* The size is supposed to be fully adjusted at this point so record it
1429 if stack usage info is requested. */
1430 if (flag_stack_usage_info
)
1432 current_function_dynamic_stack_size
+= stack_usage_size
;
1434 /* ??? This is gross but the only safe stance in the absence
1435 of stack usage oriented flow analysis. */
1436 if (!cannot_accumulate
)
1437 current_function_has_unbounded_dynamic_stack_size
= 1;
1440 do_pending_stack_adjust ();
1443 final_target
= NULL_RTX
;
1445 /* If we are splitting the stack, we need to ask the backend whether
1446 there is enough room on the current stack. If there isn't, or if
1447 the backend doesn't know how to tell is, then we need to call a
1448 function to allocate memory in some other way. This memory will
1449 be released when we release the current stack segment. The
1450 effect is that stack allocation becomes less efficient, but at
1451 least it doesn't cause a stack overflow. */
1452 if (flag_split_stack
)
1454 rtx_code_label
*available_label
;
1455 rtx ask
, space
, func
;
1457 available_label
= NULL
;
1459 if (targetm
.have_split_stack_space_check ())
1461 available_label
= gen_label_rtx ();
1463 /* This instruction will branch to AVAILABLE_LABEL if there
1464 are SIZE bytes available on the stack. */
1465 emit_insn (targetm
.gen_split_stack_space_check
1466 (size
, available_label
));
1469 /* The __morestack_allocate_stack_space function will allocate
1470 memory using malloc. If the alignment of the memory returned
1471 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1472 make sure we allocate enough space. */
1473 if (MALLOC_ABI_ALIGNMENT
>= required_align
)
1476 ask
= expand_binop (Pmode
, add_optab
, size
,
1477 gen_int_mode (required_align
/ BITS_PER_UNIT
- 1,
1479 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1481 func
= init_one_libfunc ("__morestack_allocate_stack_space");
1483 space
= emit_library_call_value (func
, target
, LCT_NORMAL
, Pmode
,
1486 if (available_label
== NULL_RTX
)
1489 final_target
= gen_reg_rtx (Pmode
);
1491 emit_move_insn (final_target
, space
);
1493 final_label
= gen_label_rtx ();
1494 emit_jump (final_label
);
1496 emit_label (available_label
);
1499 /* We ought to be called always on the toplevel and stack ought to be aligned
1501 gcc_assert (multiple_p (stack_pointer_delta
,
1502 PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
));
1504 /* If needed, check that we have the required amount of stack. Take into
1505 account what has already been checked. */
1506 if (STACK_CHECK_MOVING_SP
)
1508 else if (flag_stack_check
== GENERIC_STACK_CHECK
)
1509 probe_stack_range (STACK_OLD_CHECK_PROTECT
+ STACK_CHECK_MAX_FRAME_SIZE
,
1511 else if (flag_stack_check
== STATIC_BUILTIN_STACK_CHECK
)
1512 probe_stack_range (get_stack_check_protect (), size
);
1514 /* Don't let anti_adjust_stack emit notes. */
1515 suppress_reg_args_size
= true;
1517 /* Perform the required allocation from the stack. Some systems do
1518 this differently than simply incrementing/decrementing from the
1519 stack pointer, such as acquiring the space by calling malloc(). */
1520 if (targetm
.have_allocate_stack ())
1522 class expand_operand ops
[2];
1523 /* We don't have to check against the predicate for operand 0 since
1524 TARGET is known to be a pseudo of the proper mode, which must
1525 be valid for the operand. */
1526 create_fixed_operand (&ops
[0], target
);
1527 create_convert_operand_to (&ops
[1], size
, STACK_SIZE_MODE
, true);
1528 expand_insn (targetm
.code_for_allocate_stack
, 2, ops
);
1532 poly_int64 saved_stack_pointer_delta
;
1534 if (!STACK_GROWS_DOWNWARD
)
1535 emit_move_insn (target
, virtual_stack_dynamic_rtx
);
1537 /* Check stack bounds if necessary. */
1538 if (crtl
->limit_stack
)
1541 rtx_code_label
*space_available
= gen_label_rtx ();
1542 if (STACK_GROWS_DOWNWARD
)
1543 available
= expand_binop (Pmode
, sub_optab
,
1544 stack_pointer_rtx
, stack_limit_rtx
,
1545 NULL_RTX
, 1, OPTAB_WIDEN
);
1547 available
= expand_binop (Pmode
, sub_optab
,
1548 stack_limit_rtx
, stack_pointer_rtx
,
1549 NULL_RTX
, 1, OPTAB_WIDEN
);
1551 emit_cmp_and_jump_insns (available
, size
, GEU
, NULL_RTX
, Pmode
, 1,
1553 if (targetm
.have_trap ())
1554 emit_insn (targetm
.gen_trap ());
1556 error ("stack limits not supported on this target");
1558 emit_label (space_available
);
1561 saved_stack_pointer_delta
= stack_pointer_delta
;
1563 /* If stack checking or stack clash protection is requested,
1564 then probe the stack while allocating space from it. */
1565 if (flag_stack_check
&& STACK_CHECK_MOVING_SP
)
1566 anti_adjust_stack_and_probe (size
, false);
1567 else if (flag_stack_clash_protection
)
1568 anti_adjust_stack_and_probe_stack_clash (size
);
1570 anti_adjust_stack (size
);
1572 /* Even if size is constant, don't modify stack_pointer_delta.
1573 The constant size alloca should preserve
1574 crtl->preferred_stack_boundary alignment. */
1575 stack_pointer_delta
= saved_stack_pointer_delta
;
1577 if (STACK_GROWS_DOWNWARD
)
1578 emit_move_insn (target
, virtual_stack_dynamic_rtx
);
1581 suppress_reg_args_size
= false;
1583 /* Finish up the split stack handling. */
1584 if (final_label
!= NULL_RTX
)
1586 gcc_assert (flag_split_stack
);
1587 emit_move_insn (final_target
, target
);
1588 emit_label (final_label
);
1589 target
= final_target
;
1592 target
= align_dynamic_address (target
, required_align
);
1594 /* Now that we've committed to a return value, mark its alignment. */
1595 mark_reg_pointer (target
, required_align
);
1597 /* Record the new stack level. */
1598 record_new_stack_level ();
1603 /* Return an rtx representing the address of an area of memory already
1604 statically pushed onto the stack in the virtual stack vars area. (It is
1605 assumed that the area is allocated in the function prologue.)
1607 Any required stack pointer alignment is preserved.
1609 OFFSET is the offset of the area into the virtual stack vars area.
1611 REQUIRED_ALIGN is the alignment (in bits) required for the region
1614 BASE is the rtx of the base of this virtual stack vars area.
1615 The only time this is not `virtual_stack_vars_rtx` is when tagging pointers
1619 get_dynamic_stack_base (poly_int64 offset
, unsigned required_align
, rtx base
)
1623 if (crtl
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
1624 crtl
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
1626 target
= gen_reg_rtx (Pmode
);
1627 emit_move_insn (target
, base
);
1628 target
= expand_binop (Pmode
, add_optab
, target
,
1629 gen_int_mode (offset
, Pmode
),
1630 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1631 target
= align_dynamic_address (target
, required_align
);
1633 /* Now that we've committed to a return value, mark its alignment. */
1634 mark_reg_pointer (target
, required_align
);
1639 /* A front end may want to override GCC's stack checking by providing a
1640 run-time routine to call to check the stack, so provide a mechanism for
1641 calling that routine. */
1643 static GTY(()) rtx stack_check_libfunc
;
1646 set_stack_check_libfunc (const char *libfunc_name
)
1648 gcc_assert (stack_check_libfunc
== NULL_RTX
);
1649 stack_check_libfunc
= gen_rtx_SYMBOL_REF (Pmode
, libfunc_name
);
1653 : lang_hooks
.types
.type_for_mode (Pmode
, 1);
1655 = build_function_type_list (void_type_node
, ptype
, NULL_TREE
);
1656 tree decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
1657 get_identifier (libfunc_name
), ftype
);
1658 DECL_EXTERNAL (decl
) = 1;
1659 SET_SYMBOL_REF_DECL (stack_check_libfunc
, decl
);
1662 /* Emit one stack probe at ADDRESS, an address within the stack. */
1665 emit_stack_probe (rtx address
)
1667 if (targetm
.have_probe_stack_address ())
1669 class expand_operand ops
[1];
1670 insn_code icode
= targetm
.code_for_probe_stack_address
;
1671 create_address_operand (ops
, address
);
1672 maybe_legitimize_operands (icode
, 0, 1, ops
);
1673 expand_insn (icode
, 1, ops
);
1677 rtx memref
= gen_rtx_MEM (word_mode
, address
);
1679 MEM_VOLATILE_P (memref
) = 1;
1680 memref
= validize_mem (memref
);
1682 /* See if we have an insn to probe the stack. */
1683 if (targetm
.have_probe_stack ())
1684 emit_insn (targetm
.gen_probe_stack (memref
));
1686 emit_move_insn (memref
, const0_rtx
);
1690 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1691 FIRST is a constant and size is a Pmode RTX. These are offsets from
1692 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1693 or subtract them from the stack pointer. */
1695 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1697 #if STACK_GROWS_DOWNWARD
1698 #define STACK_GROW_OP MINUS
1699 #define STACK_GROW_OPTAB sub_optab
1700 #define STACK_GROW_OFF(off) -(off)
1702 #define STACK_GROW_OP PLUS
1703 #define STACK_GROW_OPTAB add_optab
1704 #define STACK_GROW_OFF(off) (off)
1708 probe_stack_range (HOST_WIDE_INT first
, rtx size
)
1710 /* First ensure SIZE is Pmode. */
1711 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1712 size
= convert_to_mode (Pmode
, size
, 1);
1714 /* Next see if we have a function to check the stack. */
1715 if (stack_check_libfunc
)
1717 rtx addr
= memory_address (Pmode
,
1718 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1720 plus_constant (Pmode
,
1722 emit_library_call (stack_check_libfunc
, LCT_THROW
, VOIDmode
,
1726 /* Next see if we have an insn to check the stack. */
1727 else if (targetm
.have_check_stack ())
1729 class expand_operand ops
[1];
1730 rtx addr
= memory_address (Pmode
,
1731 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1733 plus_constant (Pmode
,
1736 create_input_operand (&ops
[0], addr
, Pmode
);
1737 success
= maybe_expand_insn (targetm
.code_for_check_stack
, 1, ops
);
1738 gcc_assert (success
);
1741 /* Otherwise we have to generate explicit probes. If we have a constant
1742 small number of them to generate, that's the easy case. */
1743 else if (CONST_INT_P (size
) && INTVAL (size
) < 7 * PROBE_INTERVAL
)
1745 HOST_WIDE_INT isize
= INTVAL (size
), i
;
1748 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1749 it exceeds SIZE. If only one probe is needed, this will not
1750 generate any code. Then probe at FIRST + SIZE. */
1751 for (i
= PROBE_INTERVAL
; i
< isize
; i
+= PROBE_INTERVAL
)
1753 addr
= memory_address (Pmode
,
1754 plus_constant (Pmode
, stack_pointer_rtx
,
1755 STACK_GROW_OFF (first
+ i
)));
1756 emit_stack_probe (addr
);
1759 addr
= memory_address (Pmode
,
1760 plus_constant (Pmode
, stack_pointer_rtx
,
1761 STACK_GROW_OFF (first
+ isize
)));
1762 emit_stack_probe (addr
);
1765 /* In the variable case, do the same as above, but in a loop. Note that we
1766 must be extra careful with variables wrapping around because we might be
1767 at the very top (or the very bottom) of the address space and we have to
1768 be able to handle this case properly; in particular, we use an equality
1769 test for the loop condition. */
1772 rtx rounded_size
, rounded_size_op
, test_addr
, last_addr
, temp
;
1773 rtx_code_label
*loop_lab
= gen_label_rtx ();
1774 rtx_code_label
*end_lab
= gen_label_rtx ();
1776 /* Step 1: round SIZE to the previous multiple of the interval. */
1778 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1780 = simplify_gen_binary (AND
, Pmode
, size
,
1781 gen_int_mode (-PROBE_INTERVAL
, Pmode
));
1782 rounded_size_op
= force_operand (rounded_size
, NULL_RTX
);
1785 /* Step 2: compute initial and final value of the loop counter. */
1787 /* TEST_ADDR = SP + FIRST. */
1788 test_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1790 gen_int_mode (first
, Pmode
)),
1793 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1794 last_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1796 rounded_size_op
), NULL_RTX
);
1801 while (TEST_ADDR != LAST_ADDR)
1803 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1807 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1808 until it is equal to ROUNDED_SIZE. */
1810 emit_label (loop_lab
);
1812 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1813 emit_cmp_and_jump_insns (test_addr
, last_addr
, EQ
, NULL_RTX
, Pmode
, 1,
1816 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1817 temp
= expand_binop (Pmode
, STACK_GROW_OPTAB
, test_addr
,
1818 gen_int_mode (PROBE_INTERVAL
, Pmode
), test_addr
,
1821 gcc_assert (temp
== test_addr
);
1823 /* Probe at TEST_ADDR. */
1824 emit_stack_probe (test_addr
);
1826 emit_jump (loop_lab
);
1828 emit_label (end_lab
);
1831 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1832 that SIZE is equal to ROUNDED_SIZE. */
1834 /* TEMP = SIZE - ROUNDED_SIZE. */
1835 temp
= simplify_gen_binary (MINUS
, Pmode
, size
, rounded_size
);
1836 if (temp
!= const0_rtx
)
1840 if (CONST_INT_P (temp
))
1842 /* Use [base + disp} addressing mode if supported. */
1843 HOST_WIDE_INT offset
= INTVAL (temp
);
1844 addr
= memory_address (Pmode
,
1845 plus_constant (Pmode
, last_addr
,
1846 STACK_GROW_OFF (offset
)));
1850 /* Manual CSE if the difference is not known at compile-time. */
1851 temp
= gen_rtx_MINUS (Pmode
, size
, rounded_size_op
);
1852 addr
= memory_address (Pmode
,
1853 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1857 emit_stack_probe (addr
);
1861 /* Make sure nothing is scheduled before we are done. */
1862 emit_insn (gen_blockage ());
1865 /* Compute parameters for stack clash probing a dynamic stack
1866 allocation of SIZE bytes.
1868 We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL.
1870 Additionally we conditionally dump the type of probing that will
1871 be needed given the values computed. */
1874 compute_stack_clash_protection_loop_data (rtx
*rounded_size
, rtx
*last_addr
,
1876 HOST_WIDE_INT
*probe_interval
,
1879 /* Round SIZE down to STACK_CLASH_PROTECTION_PROBE_INTERVAL */
1881 = 1 << param_stack_clash_protection_probe_interval
;
1882 *rounded_size
= simplify_gen_binary (AND
, Pmode
, size
,
1883 GEN_INT (-*probe_interval
));
1885 /* Compute the value of the stack pointer for the last iteration.
1886 It's just SP + ROUNDED_SIZE. */
1887 rtx rounded_size_op
= force_operand (*rounded_size
, NULL_RTX
);
1888 *last_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1893 /* Compute any residuals not allocated by the loop above. Residuals
1894 are just the ROUNDED_SIZE - SIZE. */
1895 *residual
= simplify_gen_binary (MINUS
, Pmode
, size
, *rounded_size
);
1897 /* Dump key information to make writing tests easy. */
1900 if (*rounded_size
== CONST0_RTX (Pmode
))
1902 "Stack clash skipped dynamic allocation and probing loop.\n");
1903 else if (CONST_INT_P (*rounded_size
)
1904 && INTVAL (*rounded_size
) <= 4 * *probe_interval
)
1906 "Stack clash dynamic allocation and probing inline.\n");
1907 else if (CONST_INT_P (*rounded_size
))
1909 "Stack clash dynamic allocation and probing in "
1913 "Stack clash dynamic allocation and probing in loop.\n");
1915 if (*residual
!= CONST0_RTX (Pmode
))
1917 "Stack clash dynamic allocation and probing residuals.\n");
1920 "Stack clash skipped dynamic allocation and "
1921 "probing residuals.\n");
1925 /* Emit the start of an allocate/probe loop for stack
1928 LOOP_LAB and END_LAB are returned for use when we emit the
1931 LAST addr is the value for SP which stops the loop. */
1933 emit_stack_clash_protection_probe_loop_start (rtx
*loop_lab
,
1938 /* Essentially we want to emit any setup code, the top of loop
1939 label and the comparison at the top of the loop. */
1940 *loop_lab
= gen_label_rtx ();
1941 *end_lab
= gen_label_rtx ();
1943 emit_label (*loop_lab
);
1945 emit_cmp_and_jump_insns (stack_pointer_rtx
, last_addr
, EQ
, NULL_RTX
,
1946 Pmode
, 1, *end_lab
);
1949 /* Emit the end of a stack clash probing loop.
1951 This consists of just the jump back to LOOP_LAB and
1952 emitting END_LOOP after the loop. */
1955 emit_stack_clash_protection_probe_loop_end (rtx loop_lab
, rtx end_loop
,
1956 rtx last_addr
, bool rotated
)
1959 emit_cmp_and_jump_insns (stack_pointer_rtx
, last_addr
, NE
, NULL_RTX
,
1960 Pmode
, 1, loop_lab
);
1962 emit_jump (loop_lab
);
1964 emit_label (end_loop
);
1968 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1969 while probing it. This pushes when SIZE is positive. SIZE need not
1972 This is subtly different than anti_adjust_stack_and_probe to try and
1973 prevent stack-clash attacks
1975 1. It must assume no knowledge of the probing state, any allocation
1978 Consider the case of a 1 byte alloca in a loop. If the sum of the
1979 allocations is large, then this could be used to jump the guard if
1980 probes were not emitted.
1982 2. It never skips probes, whereas anti_adjust_stack_and_probe will
1983 skip the probe on the first PROBE_INTERVAL on the assumption it
1984 was already done in the prologue and in previous allocations.
1986 3. It only allocates and probes SIZE bytes, it does not need to
1987 allocate/probe beyond that because this probing style does not
1988 guarantee signal handling capability if the guard is hit. */
1991 anti_adjust_stack_and_probe_stack_clash (rtx size
)
1993 /* First ensure SIZE is Pmode. */
1994 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1995 size
= convert_to_mode (Pmode
, size
, 1);
1997 /* We can get here with a constant size on some targets. */
1998 rtx rounded_size
, last_addr
, residual
;
1999 HOST_WIDE_INT probe_interval
, probe_range
;
2000 bool target_probe_range_p
= false;
2001 compute_stack_clash_protection_loop_data (&rounded_size
, &last_addr
,
2002 &residual
, &probe_interval
, size
);
2004 /* Get the back-end specific probe ranges. */
2005 probe_range
= targetm
.stack_clash_protection_alloca_probe_range ();
2006 target_probe_range_p
= probe_range
!= 0;
2007 gcc_assert (probe_range
>= 0);
2009 /* If no back-end specific range defined, default to the top of the newly
2011 if (probe_range
== 0)
2012 probe_range
= probe_interval
- GET_MODE_SIZE (word_mode
);
2014 if (rounded_size
!= CONST0_RTX (Pmode
))
2016 if (CONST_INT_P (rounded_size
)
2017 && INTVAL (rounded_size
) <= 4 * probe_interval
)
2019 for (HOST_WIDE_INT i
= 0;
2020 i
< INTVAL (rounded_size
);
2021 i
+= probe_interval
)
2023 anti_adjust_stack (GEN_INT (probe_interval
));
2024 /* The prologue does not probe residuals. Thus the offset
2025 here to probe just beyond what the prologue had already
2027 emit_stack_probe (plus_constant (Pmode
, stack_pointer_rtx
,
2030 emit_insn (gen_blockage ());
2035 rtx loop_lab
, end_loop
;
2036 bool rotate_loop
= CONST_INT_P (rounded_size
);
2037 emit_stack_clash_protection_probe_loop_start (&loop_lab
, &end_loop
,
2038 last_addr
, rotate_loop
);
2040 anti_adjust_stack (GEN_INT (probe_interval
));
2042 /* The prologue does not probe residuals. Thus the offset here
2043 to probe just beyond what the prologue had already
2045 emit_stack_probe (plus_constant (Pmode
, stack_pointer_rtx
,
2048 emit_stack_clash_protection_probe_loop_end (loop_lab
, end_loop
,
2049 last_addr
, rotate_loop
);
2050 emit_insn (gen_blockage ());
2054 if (residual
!= CONST0_RTX (Pmode
))
2056 rtx label
= NULL_RTX
;
2057 /* RESIDUAL could be zero at runtime and in that case *sp could
2058 hold live data. Furthermore, we do not want to probe into the
2061 If TARGET_PROBE_RANGE_P then the target has promised it's safe to
2062 probe at offset 0. In which case we no longer have to check for
2063 RESIDUAL == 0. However we still need to probe at the right offset
2064 when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
2066 If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
2067 on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
2069 anti_adjust_stack (residual
);
2071 if (!CONST_INT_P (residual
))
2073 label
= gen_label_rtx ();
2074 rtx_code op
= target_probe_range_p
? LT
: EQ
;
2075 rtx probe_cmp_value
= target_probe_range_p
2076 ? gen_rtx_CONST_INT (GET_MODE (residual
), probe_range
)
2077 : CONST0_RTX (GET_MODE (residual
));
2079 if (target_probe_range_p
)
2080 emit_stack_probe (stack_pointer_rtx
);
2082 emit_cmp_and_jump_insns (residual
, probe_cmp_value
,
2083 op
, NULL_RTX
, Pmode
, 1, label
);
2088 /* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
2089 by the ABI defined safe value. */
2090 if (!CONST_INT_P (residual
) && target_probe_range_p
)
2091 x
= GEN_INT (probe_range
);
2092 /* If RESIDUAL is a constant but smaller than the ABI defined safe value,
2093 we still want to probe up, but the safest amount if a word. */
2094 else if (target_probe_range_p
)
2096 if (INTVAL (residual
) <= probe_range
)
2097 x
= GEN_INT (GET_MODE_SIZE (word_mode
));
2099 x
= GEN_INT (probe_range
);
2102 /* If nothing else, probe at the top of the new allocation. */
2103 x
= plus_constant (Pmode
, residual
, -GET_MODE_SIZE (word_mode
));
2105 emit_stack_probe (gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, x
));
2107 emit_insn (gen_blockage ());
2108 if (!CONST_INT_P (residual
))
2114 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
2115 while probing it. This pushes when SIZE is positive. SIZE need not
2116 be constant. If ADJUST_BACK is true, adjust back the stack pointer
2117 by plus SIZE at the end. */
2120 anti_adjust_stack_and_probe (rtx size
, bool adjust_back
)
2122 /* We skip the probe for the first interval + a small dope of 4 words and
2123 probe that many bytes past the specified size to maintain a protection
2124 area at the botton of the stack. */
2125 const int dope
= 4 * UNITS_PER_WORD
;
2127 /* First ensure SIZE is Pmode. */
2128 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
2129 size
= convert_to_mode (Pmode
, size
, 1);
2131 /* If we have a constant small number of probes to generate, that's the
2133 if (CONST_INT_P (size
) && INTVAL (size
) < 7 * PROBE_INTERVAL
)
2135 HOST_WIDE_INT isize
= INTVAL (size
), i
;
2136 bool first_probe
= true;
2138 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
2139 values of N from 1 until it exceeds SIZE. If only one probe is
2140 needed, this will not generate any code. Then adjust and probe
2141 to PROBE_INTERVAL + SIZE. */
2142 for (i
= PROBE_INTERVAL
; i
< isize
; i
+= PROBE_INTERVAL
)
2146 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL
+ dope
));
2147 first_probe
= false;
2150 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
));
2151 emit_stack_probe (stack_pointer_rtx
);
2155 anti_adjust_stack (plus_constant (Pmode
, size
, PROBE_INTERVAL
+ dope
));
2157 anti_adjust_stack (plus_constant (Pmode
, size
, PROBE_INTERVAL
- i
));
2158 emit_stack_probe (stack_pointer_rtx
);
2161 /* In the variable case, do the same as above, but in a loop. Note that we
2162 must be extra careful with variables wrapping around because we might be
2163 at the very top (or the very bottom) of the address space and we have to
2164 be able to handle this case properly; in particular, we use an equality
2165 test for the loop condition. */
2168 rtx rounded_size
, rounded_size_op
, last_addr
, temp
;
2169 rtx_code_label
*loop_lab
= gen_label_rtx ();
2170 rtx_code_label
*end_lab
= gen_label_rtx ();
2173 /* Step 1: round SIZE to the previous multiple of the interval. */
2175 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
2177 = simplify_gen_binary (AND
, Pmode
, size
,
2178 gen_int_mode (-PROBE_INTERVAL
, Pmode
));
2179 rounded_size_op
= force_operand (rounded_size
, NULL_RTX
);
2182 /* Step 2: compute initial and final value of the loop counter. */
2184 /* SP = SP_0 + PROBE_INTERVAL. */
2185 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
+ dope
));
2187 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
2188 last_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
2190 rounded_size_op
), NULL_RTX
);
2195 while (SP != LAST_ADDR)
2197 SP = SP + PROBE_INTERVAL
2201 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
2202 values of N from 1 until it is equal to ROUNDED_SIZE. */
2204 emit_label (loop_lab
);
2206 /* Jump to END_LAB if SP == LAST_ADDR. */
2207 emit_cmp_and_jump_insns (stack_pointer_rtx
, last_addr
, EQ
, NULL_RTX
,
2210 /* SP = SP + PROBE_INTERVAL and probe at SP. */
2211 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
));
2212 emit_stack_probe (stack_pointer_rtx
);
2214 emit_jump (loop_lab
);
2216 emit_label (end_lab
);
2219 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
2220 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
2222 /* TEMP = SIZE - ROUNDED_SIZE. */
2223 temp
= simplify_gen_binary (MINUS
, Pmode
, size
, rounded_size
);
2224 if (temp
!= const0_rtx
)
2226 /* Manual CSE if the difference is not known at compile-time. */
2227 if (GET_CODE (temp
) != CONST_INT
)
2228 temp
= gen_rtx_MINUS (Pmode
, size
, rounded_size_op
);
2229 anti_adjust_stack (temp
);
2230 emit_stack_probe (stack_pointer_rtx
);
2234 /* Adjust back and account for the additional first interval. */
2236 adjust_stack (plus_constant (Pmode
, size
, PROBE_INTERVAL
+ dope
));
2238 adjust_stack (GEN_INT (PROBE_INTERVAL
+ dope
));
2241 /* Return an rtx representing the register or memory location
2242 in which a scalar value of data type VALTYPE
2243 was returned by a function call to function FUNC.
2244 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
2245 function is known, otherwise 0.
2246 OUTGOING is 1 if on a machine with register windows this function
2247 should return the register in which the function will put its result
2251 hard_function_value (const_tree valtype
, const_tree func
, const_tree fntype
,
2252 int outgoing ATTRIBUTE_UNUSED
)
2256 val
= targetm
.calls
.function_value (valtype
, func
? func
: fntype
, outgoing
);
2259 && GET_MODE (val
) == BLKmode
)
2261 unsigned HOST_WIDE_INT bytes
= arg_int_size_in_bytes (valtype
);
2262 opt_scalar_int_mode tmpmode
;
2264 /* int_size_in_bytes can return -1. We don't need a check here
2265 since the value of bytes will then be large enough that no
2266 mode will match anyway. */
2268 FOR_EACH_MODE_IN_CLASS (tmpmode
, MODE_INT
)
2270 /* Have we found a large enough mode? */
2271 if (GET_MODE_SIZE (tmpmode
.require ()) >= bytes
)
2275 PUT_MODE (val
, tmpmode
.require ());
2280 /* Return an rtx representing the register or memory location
2281 in which a scalar value of mode MODE was returned by a library call. */
2284 hard_libcall_value (machine_mode mode
, rtx fun
)
2286 return targetm
.calls
.libcall_value (mode
, fun
);
2289 /* Look up the tree code for a given rtx code
2290 to provide the arithmetic operation for real_arithmetic.
2291 The function returns an int because the caller may not know
2292 what `enum tree_code' means. */
2295 rtx_to_tree_code (enum rtx_code code
)
2297 enum tree_code tcode
;
2320 tcode
= LAST_AND_UNUSED_TREE_CODE
;
2323 return ((int) tcode
);
2326 #include "gt-explow.h"