1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "diagnostic-core.h"
28 #include "stor-layout.h"
36 #include "hard-reg-set.h"
37 #include "insn-config.h"
40 #include "langhooks.h"
42 #include "common/common-target.h"
45 static rtx
break_out_memory_refs (rtx
);
48 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
51 trunc_int_for_mode (HOST_WIDE_INT c
, enum machine_mode mode
)
53 int width
= GET_MODE_PRECISION (mode
);
55 /* You want to truncate to a _what_? */
56 gcc_assert (SCALAR_INT_MODE_P (mode
)
57 || POINTER_BOUNDS_MODE_P (mode
));
59 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
61 return c
& 1 ? STORE_FLAG_VALUE
: 0;
63 /* Sign-extend for the requested mode. */
65 if (width
< HOST_BITS_PER_WIDE_INT
)
67 HOST_WIDE_INT sign
= 1;
77 /* Return an rtx for the sum of X and the integer C, given that X has
78 mode MODE. INPLACE is true if X can be modified inplace or false
79 if it must be treated as immutable. */
82 plus_constant (enum machine_mode mode
, rtx x
, HOST_WIDE_INT c
,
90 gcc_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
102 CASE_CONST_SCALAR_INT
:
103 return immed_wide_int_const (wi::add (std::make_pair (x
, mode
), c
),
106 /* If this is a reference to the constant pool, try replacing it with
107 a reference to a new constant. If the resulting address isn't
108 valid, don't return it because we have no way to validize it. */
109 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
110 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
112 tem
= plus_constant (mode
, get_pool_constant (XEXP (x
, 0)), c
);
113 tem
= force_const_mem (GET_MODE (x
), tem
);
114 if (memory_address_p (GET_MODE (tem
), XEXP (tem
, 0)))
120 /* If adding to something entirely constant, set a flag
121 so that we can add a CONST around the result. */
122 if (inplace
&& shared_const_p (x
))
134 /* The interesting case is adding the integer to a sum. Look
135 for constant term in the sum and combine with C. For an
136 integer constant term or a constant term that is not an
137 explicit integer, we combine or group them together anyway.
139 We may not immediately return from the recursive call here, lest
140 all_constant gets lost. */
142 if (CONSTANT_P (XEXP (x
, 1)))
144 rtx term
= plus_constant (mode
, XEXP (x
, 1), c
, inplace
);
145 if (term
== const0_rtx
)
150 x
= gen_rtx_PLUS (mode
, XEXP (x
, 0), term
);
153 else if (rtx
*const_loc
= find_constant_term_loc (&y
))
157 /* We need to be careful since X may be shared and we can't
158 modify it in place. */
160 const_loc
= find_constant_term_loc (&x
);
162 *const_loc
= plus_constant (mode
, *const_loc
, c
, true);
172 x
= gen_rtx_PLUS (mode
, x
, gen_int_mode (c
, mode
));
174 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
)
176 else if (all_constant
)
177 return gen_rtx_CONST (mode
, x
);
182 /* If X is a sum, return a new sum like X but lacking any constant terms.
183 Add all the removed constant terms into *CONSTPTR.
184 X itself is not altered. The result != X if and only if
185 it is not isomorphic to X. */
188 eliminate_constant_term (rtx x
, rtx
*constptr
)
193 if (GET_CODE (x
) != PLUS
)
196 /* First handle constants appearing at this level explicitly. */
197 if (CONST_INT_P (XEXP (x
, 1))
198 && 0 != (tem
= simplify_binary_operation (PLUS
, GET_MODE (x
), *constptr
,
200 && CONST_INT_P (tem
))
203 return eliminate_constant_term (XEXP (x
, 0), constptr
);
207 x0
= eliminate_constant_term (XEXP (x
, 0), &tem
);
208 x1
= eliminate_constant_term (XEXP (x
, 1), &tem
);
209 if ((x1
!= XEXP (x
, 1) || x0
!= XEXP (x
, 0))
210 && 0 != (tem
= simplify_binary_operation (PLUS
, GET_MODE (x
),
212 && CONST_INT_P (tem
))
215 return gen_rtx_PLUS (GET_MODE (x
), x0
, x1
);
221 /* Returns a tree for the size of EXP in bytes. */
224 tree_expr_size (const_tree exp
)
227 && DECL_SIZE_UNIT (exp
) != 0)
228 return DECL_SIZE_UNIT (exp
);
230 return size_in_bytes (TREE_TYPE (exp
));
233 /* Return an rtx for the size in bytes of the value of EXP. */
240 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
241 size
= TREE_OPERAND (exp
, 1);
244 size
= tree_expr_size (exp
);
246 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
249 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
252 /* Return a wide integer for the size in bytes of the value of EXP, or -1
253 if the size can vary or is larger than an integer. */
256 int_expr_size (tree exp
)
260 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
261 size
= TREE_OPERAND (exp
, 1);
264 size
= tree_expr_size (exp
);
268 if (size
== 0 || !tree_fits_shwi_p (size
))
271 return tree_to_shwi (size
);
274 /* Return a copy of X in which all memory references
275 and all constants that involve symbol refs
276 have been replaced with new temporary registers.
277 Also emit code to load the memory locations and constants
278 into those registers.
280 If X contains no such constants or memory references,
281 X itself (not a copy) is returned.
283 If a constant is found in the address that is not a legitimate constant
284 in an insn, it is left alone in the hope that it might be valid in the
287 X may contain no arithmetic except addition, subtraction and multiplication.
288 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
291 break_out_memory_refs (rtx x
)
294 || (CONSTANT_P (x
) && CONSTANT_ADDRESS_P (x
)
295 && GET_MODE (x
) != VOIDmode
))
296 x
= force_reg (GET_MODE (x
), x
);
297 else if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
298 || GET_CODE (x
) == MULT
)
300 rtx op0
= break_out_memory_refs (XEXP (x
, 0));
301 rtx op1
= break_out_memory_refs (XEXP (x
, 1));
303 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
304 x
= simplify_gen_binary (GET_CODE (x
), GET_MODE (x
), op0
, op1
);
310 /* Given X, a memory address in address space AS' pointer mode, convert it to
311 an address in the address space's address mode, or vice versa (TO_MODE says
312 which way). We take advantage of the fact that pointers are not allowed to
313 overflow by commuting arithmetic operations over conversions so that address
314 arithmetic insns can be used. */
317 convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED
,
318 rtx x
, addr_space_t as ATTRIBUTE_UNUSED
)
320 #ifndef POINTERS_EXTEND_UNSIGNED
321 gcc_assert (GET_MODE (x
) == to_mode
|| GET_MODE (x
) == VOIDmode
);
323 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
324 enum machine_mode pointer_mode
, address_mode
, from_mode
;
328 /* If X already has the right mode, just return it. */
329 if (GET_MODE (x
) == to_mode
)
332 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
333 address_mode
= targetm
.addr_space
.address_mode (as
);
334 from_mode
= to_mode
== pointer_mode
? address_mode
: pointer_mode
;
336 /* Here we handle some special cases. If none of them apply, fall through
337 to the default case. */
338 switch (GET_CODE (x
))
340 CASE_CONST_SCALAR_INT
:
341 if (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (from_mode
))
343 else if (POINTERS_EXTEND_UNSIGNED
< 0)
345 else if (POINTERS_EXTEND_UNSIGNED
> 0)
349 temp
= simplify_unary_operation (code
, to_mode
, x
, from_mode
);
355 if ((SUBREG_PROMOTED_VAR_P (x
) || REG_POINTER (SUBREG_REG (x
)))
356 && GET_MODE (SUBREG_REG (x
)) == to_mode
)
357 return SUBREG_REG (x
);
361 temp
= gen_rtx_LABEL_REF (to_mode
, XEXP (x
, 0));
362 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
367 temp
= shallow_copy_rtx (x
);
368 PUT_MODE (temp
, to_mode
);
373 return gen_rtx_CONST (to_mode
,
374 convert_memory_address_addr_space
375 (to_mode
, XEXP (x
, 0), as
));
380 /* FIXME: For addition, we used to permute the conversion and
381 addition operation only if one operand is a constant and
382 converting the constant does not change it or if one operand
383 is a constant and we are using a ptr_extend instruction
384 (POINTERS_EXTEND_UNSIGNED < 0) even if the resulting address
385 may overflow/underflow. We relax the condition to include
386 zero-extend (POINTERS_EXTEND_UNSIGNED > 0) since the other
387 parts of the compiler depend on it. See PR 49721.
389 We can always safely permute them if we are making the address
391 if (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (from_mode
)
392 || (GET_CODE (x
) == PLUS
393 && CONST_INT_P (XEXP (x
, 1))
394 && (POINTERS_EXTEND_UNSIGNED
!= 0
395 || XEXP (x
, 1) == convert_memory_address_addr_space
396 (to_mode
, XEXP (x
, 1), as
))))
397 return gen_rtx_fmt_ee (GET_CODE (x
), to_mode
,
398 convert_memory_address_addr_space
399 (to_mode
, XEXP (x
, 0), as
),
407 return convert_modes (to_mode
, from_mode
,
408 x
, POINTERS_EXTEND_UNSIGNED
);
409 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
412 /* Return something equivalent to X but valid as a memory address for something
413 of mode MODE in the named address space AS. When X is not itself valid,
414 this works by copying X or subexpressions of it into registers. */
417 memory_address_addr_space (enum machine_mode mode
, rtx x
, addr_space_t as
)
420 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
422 x
= convert_memory_address_addr_space (address_mode
, x
, as
);
424 /* By passing constant addresses through registers
425 we get a chance to cse them. */
426 if (! cse_not_expected
&& CONSTANT_P (x
) && CONSTANT_ADDRESS_P (x
))
427 x
= force_reg (address_mode
, x
);
429 /* We get better cse by rejecting indirect addressing at this stage.
430 Let the combiner create indirect addresses where appropriate.
431 For now, generate the code so that the subexpressions useful to share
432 are visible. But not if cse won't be done! */
435 if (! cse_not_expected
&& !REG_P (x
))
436 x
= break_out_memory_refs (x
);
438 /* At this point, any valid address is accepted. */
439 if (memory_address_addr_space_p (mode
, x
, as
))
442 /* If it was valid before but breaking out memory refs invalidated it,
443 use it the old way. */
444 if (memory_address_addr_space_p (mode
, oldx
, as
))
450 /* Perform machine-dependent transformations on X
451 in certain cases. This is not necessary since the code
452 below can handle all possible cases, but machine-dependent
453 transformations can make better code. */
456 x
= targetm
.addr_space
.legitimize_address (x
, oldx
, mode
, as
);
457 if (orig_x
!= x
&& memory_address_addr_space_p (mode
, x
, as
))
461 /* PLUS and MULT can appear in special ways
462 as the result of attempts to make an address usable for indexing.
463 Usually they are dealt with by calling force_operand, below.
464 But a sum containing constant terms is special
465 if removing them makes the sum a valid address:
466 then we generate that address in a register
467 and index off of it. We do this because it often makes
468 shorter code, and because the addresses thus generated
469 in registers often become common subexpressions. */
470 if (GET_CODE (x
) == PLUS
)
472 rtx constant_term
= const0_rtx
;
473 rtx y
= eliminate_constant_term (x
, &constant_term
);
474 if (constant_term
== const0_rtx
475 || ! memory_address_addr_space_p (mode
, y
, as
))
476 x
= force_operand (x
, NULL_RTX
);
479 y
= gen_rtx_PLUS (GET_MODE (x
), copy_to_reg (y
), constant_term
);
480 if (! memory_address_addr_space_p (mode
, y
, as
))
481 x
= force_operand (x
, NULL_RTX
);
487 else if (GET_CODE (x
) == MULT
|| GET_CODE (x
) == MINUS
)
488 x
= force_operand (x
, NULL_RTX
);
490 /* If we have a register that's an invalid address,
491 it must be a hard reg of the wrong class. Copy it to a pseudo. */
495 /* Last resort: copy the value to a register, since
496 the register is a valid address. */
498 x
= force_reg (address_mode
, x
);
503 gcc_assert (memory_address_addr_space_p (mode
, x
, as
));
504 /* If we didn't change the address, we are done. Otherwise, mark
505 a reg as a pointer if we have REG or REG + CONST_INT. */
509 mark_reg_pointer (x
, BITS_PER_UNIT
);
510 else if (GET_CODE (x
) == PLUS
511 && REG_P (XEXP (x
, 0))
512 && CONST_INT_P (XEXP (x
, 1)))
513 mark_reg_pointer (XEXP (x
, 0), BITS_PER_UNIT
);
515 /* OLDX may have been the address on a temporary. Update the address
516 to indicate that X is now used. */
517 update_temp_slot_address (oldx
, x
);
522 /* If REF is a MEM with an invalid address, change it into a valid address.
523 Pass through anything else unchanged. REF must be an unshared rtx and
524 the function may modify it in-place. */
527 validize_mem (rtx ref
)
531 ref
= use_anchored_address (ref
);
532 if (memory_address_addr_space_p (GET_MODE (ref
), XEXP (ref
, 0),
533 MEM_ADDR_SPACE (ref
)))
536 return replace_equiv_address (ref
, XEXP (ref
, 0), true);
539 /* If X is a memory reference to a member of an object block, try rewriting
540 it to use an anchor instead. Return the new memory reference on success
541 and the old one on failure. */
544 use_anchored_address (rtx x
)
547 HOST_WIDE_INT offset
;
548 enum machine_mode mode
;
550 if (!flag_section_anchors
)
556 /* Split the address into a base and offset. */
559 if (GET_CODE (base
) == CONST
560 && GET_CODE (XEXP (base
, 0)) == PLUS
561 && CONST_INT_P (XEXP (XEXP (base
, 0), 1)))
563 offset
+= INTVAL (XEXP (XEXP (base
, 0), 1));
564 base
= XEXP (XEXP (base
, 0), 0);
567 /* Check whether BASE is suitable for anchors. */
568 if (GET_CODE (base
) != SYMBOL_REF
569 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base
)
570 || SYMBOL_REF_ANCHOR_P (base
)
571 || SYMBOL_REF_BLOCK (base
) == NULL
572 || !targetm
.use_anchors_for_symbol_p (base
))
575 /* Decide where BASE is going to be. */
576 place_block_symbol (base
);
578 /* Get the anchor we need to use. */
579 offset
+= SYMBOL_REF_BLOCK_OFFSET (base
);
580 base
= get_section_anchor (SYMBOL_REF_BLOCK (base
), offset
,
581 SYMBOL_REF_TLS_MODEL (base
));
583 /* Work out the offset from the anchor. */
584 offset
-= SYMBOL_REF_BLOCK_OFFSET (base
);
586 /* If we're going to run a CSE pass, force the anchor into a register.
587 We will then be able to reuse registers for several accesses, if the
588 target costs say that that's worthwhile. */
589 mode
= GET_MODE (base
);
590 if (!cse_not_expected
)
591 base
= force_reg (mode
, base
);
593 return replace_equiv_address (x
, plus_constant (mode
, base
, offset
));
596 /* Copy the value or contents of X to a new temp reg and return that reg. */
601 rtx temp
= gen_reg_rtx (GET_MODE (x
));
603 /* If not an operand, must be an address with PLUS and MULT so
604 do the computation. */
605 if (! general_operand (x
, VOIDmode
))
606 x
= force_operand (x
, temp
);
609 emit_move_insn (temp
, x
);
614 /* Like copy_to_reg but always give the new register mode Pmode
615 in case X is a constant. */
618 copy_addr_to_reg (rtx x
)
620 return copy_to_mode_reg (Pmode
, x
);
623 /* Like copy_to_reg but always give the new register mode MODE
624 in case X is a constant. */
627 copy_to_mode_reg (enum machine_mode mode
, rtx x
)
629 rtx temp
= gen_reg_rtx (mode
);
631 /* If not an operand, must be an address with PLUS and MULT so
632 do the computation. */
633 if (! general_operand (x
, VOIDmode
))
634 x
= force_operand (x
, temp
);
636 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
638 emit_move_insn (temp
, x
);
642 /* Load X into a register if it is not already one.
643 Use mode MODE for the register.
644 X should be valid for mode MODE, but it may be a constant which
645 is valid for all integer modes; that's why caller must specify MODE.
647 The caller must not alter the value in the register we return,
648 since we mark it as a "constant" register. */
651 force_reg (enum machine_mode mode
, rtx x
)
658 if (general_operand (x
, mode
))
660 temp
= gen_reg_rtx (mode
);
661 insn
= emit_move_insn (temp
, x
);
665 temp
= force_operand (x
, NULL_RTX
);
667 insn
= get_last_insn ();
670 rtx temp2
= gen_reg_rtx (mode
);
671 insn
= emit_move_insn (temp2
, temp
);
676 /* Let optimizers know that TEMP's value never changes
677 and that X can be substituted for it. Don't get confused
678 if INSN set something else (such as a SUBREG of TEMP). */
680 && (set
= single_set (insn
)) != 0
681 && SET_DEST (set
) == temp
682 && ! rtx_equal_p (x
, SET_SRC (set
)))
683 set_unique_reg_note (insn
, REG_EQUAL
, x
);
685 /* Let optimizers know that TEMP is a pointer, and if so, the
686 known alignment of that pointer. */
689 if (GET_CODE (x
) == SYMBOL_REF
)
691 align
= BITS_PER_UNIT
;
692 if (SYMBOL_REF_DECL (x
) && DECL_P (SYMBOL_REF_DECL (x
)))
693 align
= DECL_ALIGN (SYMBOL_REF_DECL (x
));
695 else if (GET_CODE (x
) == LABEL_REF
)
696 align
= BITS_PER_UNIT
;
697 else if (GET_CODE (x
) == CONST
698 && GET_CODE (XEXP (x
, 0)) == PLUS
699 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
700 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
702 rtx s
= XEXP (XEXP (x
, 0), 0);
703 rtx c
= XEXP (XEXP (x
, 0), 1);
707 if (SYMBOL_REF_DECL (s
) && DECL_P (SYMBOL_REF_DECL (s
)))
708 sa
= DECL_ALIGN (SYMBOL_REF_DECL (s
));
714 ca
= ctz_hwi (INTVAL (c
)) * BITS_PER_UNIT
;
715 align
= MIN (sa
, ca
);
719 if (align
|| (MEM_P (x
) && MEM_POINTER (x
)))
720 mark_reg_pointer (temp
, align
);
726 /* If X is a memory ref, copy its contents to a new temp reg and return
727 that reg. Otherwise, return X. */
730 force_not_mem (rtx x
)
734 if (!MEM_P (x
) || GET_MODE (x
) == BLKmode
)
737 temp
= gen_reg_rtx (GET_MODE (x
));
740 REG_POINTER (temp
) = 1;
742 emit_move_insn (temp
, x
);
746 /* Copy X to TARGET (if it's nonzero and a reg)
747 or to a new temp reg and return that reg.
748 MODE is the mode to use for X in case it is a constant. */
751 copy_to_suggested_reg (rtx x
, rtx target
, enum machine_mode mode
)
755 if (target
&& REG_P (target
))
758 temp
= gen_reg_rtx (mode
);
760 emit_move_insn (temp
, x
);
764 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
765 PUNSIGNEDP points to the signedness of the type and may be adjusted
766 to show what signedness to use on extension operations.
768 FOR_RETURN is nonzero if the caller is promoting the return value
769 of FNDECL, else it is for promoting args. */
772 promote_function_mode (const_tree type
, enum machine_mode mode
, int *punsignedp
,
773 const_tree funtype
, int for_return
)
775 /* Called without a type node for a libcall. */
776 if (type
== NULL_TREE
)
778 if (INTEGRAL_MODE_P (mode
))
779 return targetm
.calls
.promote_function_mode (NULL_TREE
, mode
,
786 switch (TREE_CODE (type
))
788 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
789 case REAL_TYPE
: case OFFSET_TYPE
: case FIXED_POINT_TYPE
:
790 case POINTER_TYPE
: case REFERENCE_TYPE
:
791 return targetm
.calls
.promote_function_mode (type
, mode
, punsignedp
, funtype
,
798 /* Return the mode to use to store a scalar of TYPE and MODE.
799 PUNSIGNEDP points to the signedness of the type and may be adjusted
800 to show what signedness to use on extension operations. */
803 promote_mode (const_tree type ATTRIBUTE_UNUSED
, enum machine_mode mode
,
804 int *punsignedp ATTRIBUTE_UNUSED
)
811 /* For libcalls this is invoked without TYPE from the backends
812 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
814 if (type
== NULL_TREE
)
817 /* FIXME: this is the same logic that was there until GCC 4.4, but we
818 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
819 is not defined. The affected targets are M32C, S390, SPARC. */
821 code
= TREE_CODE (type
);
822 unsignedp
= *punsignedp
;
826 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
827 case REAL_TYPE
: case OFFSET_TYPE
: case FIXED_POINT_TYPE
:
828 PROMOTE_MODE (mode
, unsignedp
, type
);
829 *punsignedp
= unsignedp
;
833 #ifdef POINTERS_EXTEND_UNSIGNED
836 *punsignedp
= POINTERS_EXTEND_UNSIGNED
;
837 return targetm
.addr_space
.address_mode
838 (TYPE_ADDR_SPACE (TREE_TYPE (type
)));
851 /* Use one of promote_mode or promote_function_mode to find the promoted
852 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
853 of DECL after promotion. */
856 promote_decl_mode (const_tree decl
, int *punsignedp
)
858 tree type
= TREE_TYPE (decl
);
859 int unsignedp
= TYPE_UNSIGNED (type
);
860 enum machine_mode mode
= DECL_MODE (decl
);
861 enum machine_mode pmode
;
863 if (TREE_CODE (decl
) == RESULT_DECL
864 || TREE_CODE (decl
) == PARM_DECL
)
865 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
866 TREE_TYPE (current_function_decl
), 2);
868 pmode
= promote_mode (type
, mode
, &unsignedp
);
871 *punsignedp
= unsignedp
;
876 /* Controls the behaviour of {anti_,}adjust_stack. */
877 static bool suppress_reg_args_size
;
879 /* A helper for adjust_stack and anti_adjust_stack. */
882 adjust_stack_1 (rtx adjust
, bool anti_p
)
886 #ifndef STACK_GROWS_DOWNWARD
887 /* Hereafter anti_p means subtract_p. */
891 temp
= expand_binop (Pmode
,
892 anti_p
? sub_optab
: add_optab
,
893 stack_pointer_rtx
, adjust
, stack_pointer_rtx
, 0,
896 if (temp
!= stack_pointer_rtx
)
897 insn
= emit_move_insn (stack_pointer_rtx
, temp
);
900 insn
= get_last_insn ();
901 temp
= single_set (insn
);
902 gcc_assert (temp
!= NULL
&& SET_DEST (temp
) == stack_pointer_rtx
);
905 if (!suppress_reg_args_size
)
906 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
909 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
910 This pops when ADJUST is positive. ADJUST need not be constant. */
913 adjust_stack (rtx adjust
)
915 if (adjust
== const0_rtx
)
918 /* We expect all variable sized adjustments to be multiple of
919 PREFERRED_STACK_BOUNDARY. */
920 if (CONST_INT_P (adjust
))
921 stack_pointer_delta
-= INTVAL (adjust
);
923 adjust_stack_1 (adjust
, false);
926 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
927 This pushes when ADJUST is positive. ADJUST need not be constant. */
930 anti_adjust_stack (rtx adjust
)
932 if (adjust
== const0_rtx
)
935 /* We expect all variable sized adjustments to be multiple of
936 PREFERRED_STACK_BOUNDARY. */
937 if (CONST_INT_P (adjust
))
938 stack_pointer_delta
+= INTVAL (adjust
);
940 adjust_stack_1 (adjust
, true);
943 /* Round the size of a block to be pushed up to the boundary required
944 by this machine. SIZE is the desired size, which need not be constant. */
947 round_push (rtx size
)
949 rtx align_rtx
, alignm1_rtx
;
951 if (!SUPPORTS_STACK_ALIGNMENT
952 || crtl
->preferred_stack_boundary
== MAX_SUPPORTED_STACK_ALIGNMENT
)
954 int align
= crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
;
959 if (CONST_INT_P (size
))
961 HOST_WIDE_INT new_size
= (INTVAL (size
) + align
- 1) / align
* align
;
963 if (INTVAL (size
) != new_size
)
964 size
= GEN_INT (new_size
);
968 align_rtx
= GEN_INT (align
);
969 alignm1_rtx
= GEN_INT (align
- 1);
973 /* If crtl->preferred_stack_boundary might still grow, use
974 virtual_preferred_stack_boundary_rtx instead. This will be
975 substituted by the right value in vregs pass and optimized
977 align_rtx
= virtual_preferred_stack_boundary_rtx
;
978 alignm1_rtx
= force_operand (plus_constant (Pmode
, align_rtx
, -1),
982 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
983 but we know it can't. So add ourselves and then do
985 size
= expand_binop (Pmode
, add_optab
, size
, alignm1_rtx
,
986 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
987 size
= expand_divmod (0, TRUNC_DIV_EXPR
, Pmode
, size
, align_rtx
,
989 size
= expand_mult (Pmode
, size
, align_rtx
, NULL_RTX
, 1);
994 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
995 to a previously-created save area. If no save area has been allocated,
996 this function will allocate one. If a save area is specified, it
997 must be of the proper mode. */
1000 emit_stack_save (enum save_level save_level
, rtx
*psave
)
1003 /* The default is that we use a move insn and save in a Pmode object. */
1004 rtx (*fcn
) (rtx
, rtx
) = gen_move_insn
;
1005 enum machine_mode mode
= STACK_SAVEAREA_MODE (save_level
);
1007 /* See if this machine has anything special to do for this kind of save. */
1010 #ifdef HAVE_save_stack_block
1012 if (HAVE_save_stack_block
)
1013 fcn
= gen_save_stack_block
;
1016 #ifdef HAVE_save_stack_function
1018 if (HAVE_save_stack_function
)
1019 fcn
= gen_save_stack_function
;
1022 #ifdef HAVE_save_stack_nonlocal
1024 if (HAVE_save_stack_nonlocal
)
1025 fcn
= gen_save_stack_nonlocal
;
1032 /* If there is no save area and we have to allocate one, do so. Otherwise
1033 verify the save area is the proper mode. */
1037 if (mode
!= VOIDmode
)
1039 if (save_level
== SAVE_NONLOCAL
)
1040 *psave
= sa
= assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
1042 *psave
= sa
= gen_reg_rtx (mode
);
1046 do_pending_stack_adjust ();
1048 sa
= validize_mem (sa
);
1049 emit_insn (fcn (sa
, stack_pointer_rtx
));
1052 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1053 area made by emit_stack_save. If it is zero, we have nothing to do. */
1056 emit_stack_restore (enum save_level save_level
, rtx sa
)
1058 /* The default is that we use a move insn. */
1059 rtx (*fcn
) (rtx
, rtx
) = gen_move_insn
;
1061 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1062 STACK_POINTER and HARD_FRAME_POINTER.
1063 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1064 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1065 aligned variables, which is reflected in ix86_can_eliminate.
1066 We normally still have the realigned STACK_POINTER that we can use.
1067 But if there is a stack restore still present at reload, it can trigger
1068 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1069 FRAME_POINTER into a hard reg.
1070 To prevent this situation, we force need_drap if we emit a stack
1072 if (SUPPORTS_STACK_ALIGNMENT
)
1073 crtl
->need_drap
= true;
1075 /* See if this machine has anything special to do for this kind of save. */
1078 #ifdef HAVE_restore_stack_block
1080 if (HAVE_restore_stack_block
)
1081 fcn
= gen_restore_stack_block
;
1084 #ifdef HAVE_restore_stack_function
1086 if (HAVE_restore_stack_function
)
1087 fcn
= gen_restore_stack_function
;
1090 #ifdef HAVE_restore_stack_nonlocal
1092 if (HAVE_restore_stack_nonlocal
)
1093 fcn
= gen_restore_stack_nonlocal
;
1102 sa
= validize_mem (sa
);
1103 /* These clobbers prevent the scheduler from moving
1104 references to variable arrays below the code
1105 that deletes (pops) the arrays. */
1106 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1107 emit_clobber (gen_rtx_MEM (BLKmode
, stack_pointer_rtx
));
1110 discard_pending_stack_adjust ();
1112 emit_insn (fcn (stack_pointer_rtx
, sa
));
1115 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1116 function. This function should be called whenever we allocate or
1117 deallocate dynamic stack space. */
1120 update_nonlocal_goto_save_area (void)
1125 /* The nonlocal_goto_save_area object is an array of N pointers. The
1126 first one is used for the frame pointer save; the rest are sized by
1127 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1128 of the stack save area slots. */
1129 t_save
= build4 (ARRAY_REF
,
1130 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
1131 cfun
->nonlocal_goto_save_area
,
1132 integer_one_node
, NULL_TREE
, NULL_TREE
);
1133 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
1135 emit_stack_save (SAVE_NONLOCAL
, &r_save
);
1138 /* Return an rtx representing the address of an area of memory dynamically
1139 pushed on the stack.
1141 Any required stack pointer alignment is preserved.
1143 SIZE is an rtx representing the size of the area.
1145 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1146 parameter may be zero. If so, a proper value will be extracted
1147 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1149 REQUIRED_ALIGN is the alignment (in bits) required for the region
1152 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1153 stack space allocated by the generated code cannot be added with itself
1154 in the course of the execution of the function. It is always safe to
1155 pass FALSE here and the following criterion is sufficient in order to
1156 pass TRUE: every path in the CFG that starts at the allocation point and
1157 loops to it executes the associated deallocation code. */
1160 allocate_dynamic_stack_space (rtx size
, unsigned size_align
,
1161 unsigned required_align
, bool cannot_accumulate
)
1163 HOST_WIDE_INT stack_usage_size
= -1;
1164 rtx final_label
, final_target
, target
;
1165 unsigned extra_align
= 0;
1168 /* If we're asking for zero bytes, it doesn't matter what we point
1169 to since we can't dereference it. But return a reasonable
1171 if (size
== const0_rtx
)
1172 return virtual_stack_dynamic_rtx
;
1174 /* Otherwise, show we're calling alloca or equivalent. */
1175 cfun
->calls_alloca
= 1;
1177 /* If stack usage info is requested, look into the size we are passed.
1178 We need to do so this early to avoid the obfuscation that may be
1179 introduced later by the various alignment operations. */
1180 if (flag_stack_usage_info
)
1182 if (CONST_INT_P (size
))
1183 stack_usage_size
= INTVAL (size
);
1184 else if (REG_P (size
))
1186 /* Look into the last emitted insn and see if we can deduce
1187 something for the register. */
1188 rtx insn
, set
, note
;
1189 insn
= get_last_insn ();
1190 if ((set
= single_set (insn
)) && rtx_equal_p (SET_DEST (set
), size
))
1192 if (CONST_INT_P (SET_SRC (set
)))
1193 stack_usage_size
= INTVAL (SET_SRC (set
));
1194 else if ((note
= find_reg_equal_equiv_note (insn
))
1195 && CONST_INT_P (XEXP (note
, 0)))
1196 stack_usage_size
= INTVAL (XEXP (note
, 0));
1200 /* If the size is not constant, we can't say anything. */
1201 if (stack_usage_size
== -1)
1203 current_function_has_unbounded_dynamic_stack_size
= 1;
1204 stack_usage_size
= 0;
1208 /* Ensure the size is in the proper mode. */
1209 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1210 size
= convert_to_mode (Pmode
, size
, 1);
1212 /* Adjust SIZE_ALIGN, if needed. */
1213 if (CONST_INT_P (size
))
1215 unsigned HOST_WIDE_INT lsb
;
1217 lsb
= INTVAL (size
);
1220 /* Watch out for overflow truncating to "unsigned". */
1221 if (lsb
> UINT_MAX
/ BITS_PER_UNIT
)
1222 size_align
= 1u << (HOST_BITS_PER_INT
- 1);
1224 size_align
= (unsigned)lsb
* BITS_PER_UNIT
;
1226 else if (size_align
< BITS_PER_UNIT
)
1227 size_align
= BITS_PER_UNIT
;
1229 /* We can't attempt to minimize alignment necessary, because we don't
1230 know the final value of preferred_stack_boundary yet while executing
1232 if (crtl
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
1233 crtl
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
1235 /* We will need to ensure that the address we return is aligned to
1236 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1237 always know its final value at this point in the compilation (it
1238 might depend on the size of the outgoing parameter lists, for
1239 example), so we must align the value to be returned in that case.
1240 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1241 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1242 We must also do an alignment operation on the returned value if
1243 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1245 If we have to align, we must leave space in SIZE for the hole
1246 that might result from the alignment operation. */
1248 must_align
= (crtl
->preferred_stack_boundary
< required_align
);
1251 if (required_align
> PREFERRED_STACK_BOUNDARY
)
1252 extra_align
= PREFERRED_STACK_BOUNDARY
;
1253 else if (required_align
> STACK_BOUNDARY
)
1254 extra_align
= STACK_BOUNDARY
;
1256 extra_align
= BITS_PER_UNIT
;
1259 /* ??? STACK_POINTER_OFFSET is always defined now. */
1260 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1262 extra_align
= BITS_PER_UNIT
;
1267 unsigned extra
= (required_align
- extra_align
) / BITS_PER_UNIT
;
1269 size
= plus_constant (Pmode
, size
, extra
);
1270 size
= force_operand (size
, NULL_RTX
);
1272 if (flag_stack_usage_info
)
1273 stack_usage_size
+= extra
;
1275 if (extra
&& size_align
> extra_align
)
1276 size_align
= extra_align
;
1279 /* Round the size to a multiple of the required stack alignment.
1280 Since the stack if presumed to be rounded before this allocation,
1281 this will maintain the required alignment.
1283 If the stack grows downward, we could save an insn by subtracting
1284 SIZE from the stack pointer and then aligning the stack pointer.
1285 The problem with this is that the stack pointer may be unaligned
1286 between the execution of the subtraction and alignment insns and
1287 some machines do not allow this. Even on those that do, some
1288 signal handlers malfunction if a signal should occur between those
1289 insns. Since this is an extremely rare event, we have no reliable
1290 way of knowing which systems have this problem. So we avoid even
1291 momentarily mis-aligning the stack. */
1292 if (size_align
% MAX_SUPPORTED_STACK_ALIGNMENT
!= 0)
1294 size
= round_push (size
);
1296 if (flag_stack_usage_info
)
1298 int align
= crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
;
1299 stack_usage_size
= (stack_usage_size
+ align
- 1) / align
* align
;
1303 target
= gen_reg_rtx (Pmode
);
1305 /* The size is supposed to be fully adjusted at this point so record it
1306 if stack usage info is requested. */
1307 if (flag_stack_usage_info
)
1309 current_function_dynamic_stack_size
+= stack_usage_size
;
1311 /* ??? This is gross but the only safe stance in the absence
1312 of stack usage oriented flow analysis. */
1313 if (!cannot_accumulate
)
1314 current_function_has_unbounded_dynamic_stack_size
= 1;
1317 final_label
= NULL_RTX
;
1318 final_target
= NULL_RTX
;
1320 /* If we are splitting the stack, we need to ask the backend whether
1321 there is enough room on the current stack. If there isn't, or if
1322 the backend doesn't know how to tell is, then we need to call a
1323 function to allocate memory in some other way. This memory will
1324 be released when we release the current stack segment. The
1325 effect is that stack allocation becomes less efficient, but at
1326 least it doesn't cause a stack overflow. */
1327 if (flag_split_stack
)
1329 rtx available_label
, ask
, space
, func
;
1331 available_label
= NULL_RTX
;
1333 #ifdef HAVE_split_stack_space_check
1334 if (HAVE_split_stack_space_check
)
1336 available_label
= gen_label_rtx ();
1338 /* This instruction will branch to AVAILABLE_LABEL if there
1339 are SIZE bytes available on the stack. */
1340 emit_insn (gen_split_stack_space_check (size
, available_label
));
1344 /* The __morestack_allocate_stack_space function will allocate
1345 memory using malloc. If the alignment of the memory returned
1346 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1347 make sure we allocate enough space. */
1348 if (MALLOC_ABI_ALIGNMENT
>= required_align
)
1352 ask
= expand_binop (Pmode
, add_optab
, size
,
1353 gen_int_mode (required_align
/ BITS_PER_UNIT
- 1,
1355 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1359 func
= init_one_libfunc ("__morestack_allocate_stack_space");
1361 space
= emit_library_call_value (func
, target
, LCT_NORMAL
, Pmode
,
1364 if (available_label
== NULL_RTX
)
1367 final_target
= gen_reg_rtx (Pmode
);
1369 emit_move_insn (final_target
, space
);
1371 final_label
= gen_label_rtx ();
1372 emit_jump (final_label
);
1374 emit_label (available_label
);
1377 do_pending_stack_adjust ();
1379 /* We ought to be called always on the toplevel and stack ought to be aligned
1381 gcc_assert (!(stack_pointer_delta
1382 % (PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
)));
1384 /* If needed, check that we have the required amount of stack. Take into
1385 account what has already been checked. */
1386 if (STACK_CHECK_MOVING_SP
)
1388 else if (flag_stack_check
== GENERIC_STACK_CHECK
)
1389 probe_stack_range (STACK_OLD_CHECK_PROTECT
+ STACK_CHECK_MAX_FRAME_SIZE
,
1391 else if (flag_stack_check
== STATIC_BUILTIN_STACK_CHECK
)
1392 probe_stack_range (STACK_CHECK_PROTECT
, size
);
1394 /* Don't let anti_adjust_stack emit notes. */
1395 suppress_reg_args_size
= true;
1397 /* Perform the required allocation from the stack. Some systems do
1398 this differently than simply incrementing/decrementing from the
1399 stack pointer, such as acquiring the space by calling malloc(). */
1400 #ifdef HAVE_allocate_stack
1401 if (HAVE_allocate_stack
)
1403 struct expand_operand ops
[2];
1404 /* We don't have to check against the predicate for operand 0 since
1405 TARGET is known to be a pseudo of the proper mode, which must
1406 be valid for the operand. */
1407 create_fixed_operand (&ops
[0], target
);
1408 create_convert_operand_to (&ops
[1], size
, STACK_SIZE_MODE
, true);
1409 expand_insn (CODE_FOR_allocate_stack
, 2, ops
);
1414 int saved_stack_pointer_delta
;
1416 #ifndef STACK_GROWS_DOWNWARD
1417 emit_move_insn (target
, virtual_stack_dynamic_rtx
);
1420 /* Check stack bounds if necessary. */
1421 if (crtl
->limit_stack
)
1424 rtx space_available
= gen_label_rtx ();
1425 #ifdef STACK_GROWS_DOWNWARD
1426 available
= expand_binop (Pmode
, sub_optab
,
1427 stack_pointer_rtx
, stack_limit_rtx
,
1428 NULL_RTX
, 1, OPTAB_WIDEN
);
1430 available
= expand_binop (Pmode
, sub_optab
,
1431 stack_limit_rtx
, stack_pointer_rtx
,
1432 NULL_RTX
, 1, OPTAB_WIDEN
);
1434 emit_cmp_and_jump_insns (available
, size
, GEU
, NULL_RTX
, Pmode
, 1,
1438 emit_insn (gen_trap ());
1441 error ("stack limits not supported on this target");
1443 emit_label (space_available
);
1446 saved_stack_pointer_delta
= stack_pointer_delta
;
1448 if (flag_stack_check
&& STACK_CHECK_MOVING_SP
)
1449 anti_adjust_stack_and_probe (size
, false);
1451 anti_adjust_stack (size
);
1453 /* Even if size is constant, don't modify stack_pointer_delta.
1454 The constant size alloca should preserve
1455 crtl->preferred_stack_boundary alignment. */
1456 stack_pointer_delta
= saved_stack_pointer_delta
;
1458 #ifdef STACK_GROWS_DOWNWARD
1459 emit_move_insn (target
, virtual_stack_dynamic_rtx
);
1463 suppress_reg_args_size
= false;
1465 /* Finish up the split stack handling. */
1466 if (final_label
!= NULL_RTX
)
1468 gcc_assert (flag_split_stack
);
1469 emit_move_insn (final_target
, target
);
1470 emit_label (final_label
);
1471 target
= final_target
;
1476 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1477 but we know it can't. So add ourselves and then do
1479 target
= expand_binop (Pmode
, add_optab
, target
,
1480 gen_int_mode (required_align
/ BITS_PER_UNIT
- 1,
1482 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1483 target
= expand_divmod (0, TRUNC_DIV_EXPR
, Pmode
, target
,
1484 gen_int_mode (required_align
/ BITS_PER_UNIT
,
1487 target
= expand_mult (Pmode
, target
,
1488 gen_int_mode (required_align
/ BITS_PER_UNIT
,
1493 /* Now that we've committed to a return value, mark its alignment. */
1494 mark_reg_pointer (target
, required_align
);
1496 /* Record the new stack level for nonlocal gotos. */
1497 if (cfun
->nonlocal_goto_save_area
!= 0)
1498 update_nonlocal_goto_save_area ();
1503 /* A front end may want to override GCC's stack checking by providing a
1504 run-time routine to call to check the stack, so provide a mechanism for
1505 calling that routine. */
1507 static GTY(()) rtx stack_check_libfunc
;
1510 set_stack_check_libfunc (const char *libfunc_name
)
1512 gcc_assert (stack_check_libfunc
== NULL_RTX
);
1513 stack_check_libfunc
= gen_rtx_SYMBOL_REF (Pmode
, libfunc_name
);
1516 /* Emit one stack probe at ADDRESS, an address within the stack. */
1519 emit_stack_probe (rtx address
)
1521 #ifdef HAVE_probe_stack_address
1522 if (HAVE_probe_stack_address
)
1523 emit_insn (gen_probe_stack_address (address
));
1527 rtx memref
= gen_rtx_MEM (word_mode
, address
);
1529 MEM_VOLATILE_P (memref
) = 1;
1531 /* See if we have an insn to probe the stack. */
1532 #ifdef HAVE_probe_stack
1533 if (HAVE_probe_stack
)
1534 emit_insn (gen_probe_stack (memref
));
1537 emit_move_insn (memref
, const0_rtx
);
1541 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1542 FIRST is a constant and size is a Pmode RTX. These are offsets from
1543 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1544 or subtract them from the stack pointer. */
1546 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1548 #ifdef STACK_GROWS_DOWNWARD
1549 #define STACK_GROW_OP MINUS
1550 #define STACK_GROW_OPTAB sub_optab
1551 #define STACK_GROW_OFF(off) -(off)
1553 #define STACK_GROW_OP PLUS
1554 #define STACK_GROW_OPTAB add_optab
1555 #define STACK_GROW_OFF(off) (off)
1559 probe_stack_range (HOST_WIDE_INT first
, rtx size
)
1561 /* First ensure SIZE is Pmode. */
1562 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1563 size
= convert_to_mode (Pmode
, size
, 1);
1565 /* Next see if we have a function to check the stack. */
1566 if (stack_check_libfunc
)
1568 rtx addr
= memory_address (Pmode
,
1569 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1571 plus_constant (Pmode
,
1573 emit_library_call (stack_check_libfunc
, LCT_NORMAL
, VOIDmode
, 1, addr
,
1577 /* Next see if we have an insn to check the stack. */
1578 #ifdef HAVE_check_stack
1579 else if (HAVE_check_stack
)
1581 struct expand_operand ops
[1];
1582 rtx addr
= memory_address (Pmode
,
1583 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1585 plus_constant (Pmode
,
1588 create_input_operand (&ops
[0], addr
, Pmode
);
1589 success
= maybe_expand_insn (CODE_FOR_check_stack
, 1, ops
);
1590 gcc_assert (success
);
1594 /* Otherwise we have to generate explicit probes. If we have a constant
1595 small number of them to generate, that's the easy case. */
1596 else if (CONST_INT_P (size
) && INTVAL (size
) < 7 * PROBE_INTERVAL
)
1598 HOST_WIDE_INT isize
= INTVAL (size
), i
;
1601 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1602 it exceeds SIZE. If only one probe is needed, this will not
1603 generate any code. Then probe at FIRST + SIZE. */
1604 for (i
= PROBE_INTERVAL
; i
< isize
; i
+= PROBE_INTERVAL
)
1606 addr
= memory_address (Pmode
,
1607 plus_constant (Pmode
, stack_pointer_rtx
,
1608 STACK_GROW_OFF (first
+ i
)));
1609 emit_stack_probe (addr
);
1612 addr
= memory_address (Pmode
,
1613 plus_constant (Pmode
, stack_pointer_rtx
,
1614 STACK_GROW_OFF (first
+ isize
)));
1615 emit_stack_probe (addr
);
1618 /* In the variable case, do the same as above, but in a loop. Note that we
1619 must be extra careful with variables wrapping around because we might be
1620 at the very top (or the very bottom) of the address space and we have to
1621 be able to handle this case properly; in particular, we use an equality
1622 test for the loop condition. */
1625 rtx rounded_size
, rounded_size_op
, test_addr
, last_addr
, temp
;
1626 rtx loop_lab
= gen_label_rtx ();
1627 rtx end_lab
= gen_label_rtx ();
1630 /* Step 1: round SIZE to the previous multiple of the interval. */
1632 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1634 = simplify_gen_binary (AND
, Pmode
, size
,
1635 gen_int_mode (-PROBE_INTERVAL
, Pmode
));
1636 rounded_size_op
= force_operand (rounded_size
, NULL_RTX
);
1639 /* Step 2: compute initial and final value of the loop counter. */
1641 /* TEST_ADDR = SP + FIRST. */
1642 test_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1644 gen_int_mode (first
, Pmode
)),
1647 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1648 last_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1650 rounded_size_op
), NULL_RTX
);
1655 while (TEST_ADDR != LAST_ADDR)
1657 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1661 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1662 until it is equal to ROUNDED_SIZE. */
1664 emit_label (loop_lab
);
1666 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1667 emit_cmp_and_jump_insns (test_addr
, last_addr
, EQ
, NULL_RTX
, Pmode
, 1,
1670 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1671 temp
= expand_binop (Pmode
, STACK_GROW_OPTAB
, test_addr
,
1672 gen_int_mode (PROBE_INTERVAL
, Pmode
), test_addr
,
1675 gcc_assert (temp
== test_addr
);
1677 /* Probe at TEST_ADDR. */
1678 emit_stack_probe (test_addr
);
1680 emit_jump (loop_lab
);
1682 emit_label (end_lab
);
1685 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1686 that SIZE is equal to ROUNDED_SIZE. */
1688 /* TEMP = SIZE - ROUNDED_SIZE. */
1689 temp
= simplify_gen_binary (MINUS
, Pmode
, size
, rounded_size
);
1690 if (temp
!= const0_rtx
)
1694 if (CONST_INT_P (temp
))
1696 /* Use [base + disp} addressing mode if supported. */
1697 HOST_WIDE_INT offset
= INTVAL (temp
);
1698 addr
= memory_address (Pmode
,
1699 plus_constant (Pmode
, last_addr
,
1700 STACK_GROW_OFF (offset
)));
1704 /* Manual CSE if the difference is not known at compile-time. */
1705 temp
= gen_rtx_MINUS (Pmode
, size
, rounded_size_op
);
1706 addr
= memory_address (Pmode
,
1707 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1711 emit_stack_probe (addr
);
1715 /* Make sure nothing is scheduled before we are done. */
1716 emit_insn (gen_blockage ());
1719 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1720 while probing it. This pushes when SIZE is positive. SIZE need not
1721 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1722 by plus SIZE at the end. */
1725 anti_adjust_stack_and_probe (rtx size
, bool adjust_back
)
1727 /* We skip the probe for the first interval + a small dope of 4 words and
1728 probe that many bytes past the specified size to maintain a protection
1729 area at the botton of the stack. */
1730 const int dope
= 4 * UNITS_PER_WORD
;
1732 /* First ensure SIZE is Pmode. */
1733 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1734 size
= convert_to_mode (Pmode
, size
, 1);
1736 /* If we have a constant small number of probes to generate, that's the
1738 if (CONST_INT_P (size
) && INTVAL (size
) < 7 * PROBE_INTERVAL
)
1740 HOST_WIDE_INT isize
= INTVAL (size
), i
;
1741 bool first_probe
= true;
1743 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1744 values of N from 1 until it exceeds SIZE. If only one probe is
1745 needed, this will not generate any code. Then adjust and probe
1746 to PROBE_INTERVAL + SIZE. */
1747 for (i
= PROBE_INTERVAL
; i
< isize
; i
+= PROBE_INTERVAL
)
1751 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL
+ dope
));
1752 first_probe
= false;
1755 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
));
1756 emit_stack_probe (stack_pointer_rtx
);
1760 anti_adjust_stack (plus_constant (Pmode
, size
, PROBE_INTERVAL
+ dope
));
1762 anti_adjust_stack (plus_constant (Pmode
, size
, PROBE_INTERVAL
- i
));
1763 emit_stack_probe (stack_pointer_rtx
);
1766 /* In the variable case, do the same as above, but in a loop. Note that we
1767 must be extra careful with variables wrapping around because we might be
1768 at the very top (or the very bottom) of the address space and we have to
1769 be able to handle this case properly; in particular, we use an equality
1770 test for the loop condition. */
1773 rtx rounded_size
, rounded_size_op
, last_addr
, temp
;
1774 rtx loop_lab
= gen_label_rtx ();
1775 rtx end_lab
= gen_label_rtx ();
1778 /* Step 1: round SIZE to the previous multiple of the interval. */
1780 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1782 = simplify_gen_binary (AND
, Pmode
, size
,
1783 gen_int_mode (-PROBE_INTERVAL
, Pmode
));
1784 rounded_size_op
= force_operand (rounded_size
, NULL_RTX
);
1787 /* Step 2: compute initial and final value of the loop counter. */
1789 /* SP = SP_0 + PROBE_INTERVAL. */
1790 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
+ dope
));
1792 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1793 last_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1795 rounded_size_op
), NULL_RTX
);
1800 while (SP != LAST_ADDR)
1802 SP = SP + PROBE_INTERVAL
1806 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1807 values of N from 1 until it is equal to ROUNDED_SIZE. */
1809 emit_label (loop_lab
);
1811 /* Jump to END_LAB if SP == LAST_ADDR. */
1812 emit_cmp_and_jump_insns (stack_pointer_rtx
, last_addr
, EQ
, NULL_RTX
,
1815 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1816 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
));
1817 emit_stack_probe (stack_pointer_rtx
);
1819 emit_jump (loop_lab
);
1821 emit_label (end_lab
);
1824 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1825 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1827 /* TEMP = SIZE - ROUNDED_SIZE. */
1828 temp
= simplify_gen_binary (MINUS
, Pmode
, size
, rounded_size
);
1829 if (temp
!= const0_rtx
)
1831 /* Manual CSE if the difference is not known at compile-time. */
1832 if (GET_CODE (temp
) != CONST_INT
)
1833 temp
= gen_rtx_MINUS (Pmode
, size
, rounded_size_op
);
1834 anti_adjust_stack (temp
);
1835 emit_stack_probe (stack_pointer_rtx
);
1839 /* Adjust back and account for the additional first interval. */
1841 adjust_stack (plus_constant (Pmode
, size
, PROBE_INTERVAL
+ dope
));
1843 adjust_stack (GEN_INT (PROBE_INTERVAL
+ dope
));
1846 /* Return an rtx representing the register or memory location
1847 in which a scalar value of data type VALTYPE
1848 was returned by a function call to function FUNC.
1849 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1850 function is known, otherwise 0.
1851 OUTGOING is 1 if on a machine with register windows this function
1852 should return the register in which the function will put its result
1856 hard_function_value (const_tree valtype
, const_tree func
, const_tree fntype
,
1857 int outgoing ATTRIBUTE_UNUSED
)
1861 val
= targetm
.calls
.function_value (valtype
, func
? func
: fntype
, outgoing
);
1864 && GET_MODE (val
) == BLKmode
)
1866 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (valtype
);
1867 enum machine_mode tmpmode
;
1869 /* int_size_in_bytes can return -1. We don't need a check here
1870 since the value of bytes will then be large enough that no
1871 mode will match anyway. */
1873 for (tmpmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1874 tmpmode
!= VOIDmode
;
1875 tmpmode
= GET_MODE_WIDER_MODE (tmpmode
))
1877 /* Have we found a large enough mode? */
1878 if (GET_MODE_SIZE (tmpmode
) >= bytes
)
1882 /* No suitable mode found. */
1883 gcc_assert (tmpmode
!= VOIDmode
);
1885 PUT_MODE (val
, tmpmode
);
1890 /* Return an rtx representing the register or memory location
1891 in which a scalar value of mode MODE was returned by a library call. */
1894 hard_libcall_value (enum machine_mode mode
, rtx fun
)
1896 return targetm
.calls
.libcall_value (mode
, fun
);
1899 /* Look up the tree code for a given rtx code
1900 to provide the arithmetic operation for REAL_ARITHMETIC.
1901 The function returns an int because the caller may not know
1902 what `enum tree_code' means. */
1905 rtx_to_tree_code (enum rtx_code code
)
1907 enum tree_code tcode
;
1930 tcode
= LAST_AND_UNUSED_TREE_CODE
;
1933 return ((int) tcode
);
1936 #include "gt-explow.h"