1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
27 #include "diagnostic-core.h"
38 #include "hard-reg-set.h"
39 #include "insn-config.h"
42 #include "langhooks.h"
46 static rtx
break_out_memory_refs (rtx
);
49 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
52 trunc_int_for_mode (HOST_WIDE_INT c
, enum machine_mode mode
)
54 int width
= GET_MODE_BITSIZE (mode
);
56 /* You want to truncate to a _what_? */
57 gcc_assert (SCALAR_INT_MODE_P (mode
));
59 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
61 return c
& 1 ? STORE_FLAG_VALUE
: 0;
63 /* Sign-extend for the requested mode. */
65 if (width
< HOST_BITS_PER_WIDE_INT
)
67 HOST_WIDE_INT sign
= 1;
77 /* Return an rtx for the sum of X and the integer C. */
80 plus_constant (rtx x
, HOST_WIDE_INT c
)
84 enum machine_mode mode
;
100 return GEN_INT (INTVAL (x
) + c
);
104 unsigned HOST_WIDE_INT l1
= CONST_DOUBLE_LOW (x
);
105 HOST_WIDE_INT h1
= CONST_DOUBLE_HIGH (x
);
106 unsigned HOST_WIDE_INT l2
= c
;
107 HOST_WIDE_INT h2
= c
< 0 ? ~0 : 0;
108 unsigned HOST_WIDE_INT lv
;
111 add_double (l1
, h1
, l2
, h2
, &lv
, &hv
);
113 return immed_double_const (lv
, hv
, VOIDmode
);
117 /* If this is a reference to the constant pool, try replacing it with
118 a reference to a new constant. If the resulting address isn't
119 valid, don't return it because we have no way to validize it. */
120 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
121 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
124 = force_const_mem (GET_MODE (x
),
125 plus_constant (get_pool_constant (XEXP (x
, 0)),
127 if (memory_address_p (GET_MODE (tem
), XEXP (tem
, 0)))
133 /* If adding to something entirely constant, set a flag
134 so that we can add a CONST around the result. */
145 /* The interesting case is adding the integer to a sum.
146 Look for constant term in the sum and combine
147 with C. For an integer constant term, we make a combined
148 integer. For a constant term that is not an explicit integer,
149 we cannot really combine, but group them together anyway.
151 Restart or use a recursive call in case the remaining operand is
152 something that we handle specially, such as a SYMBOL_REF.
154 We may not immediately return from the recursive call here, lest
155 all_constant gets lost. */
157 if (CONST_INT_P (XEXP (x
, 1)))
159 c
+= INTVAL (XEXP (x
, 1));
161 if (GET_MODE (x
) != VOIDmode
)
162 c
= trunc_int_for_mode (c
, GET_MODE (x
));
167 else if (CONSTANT_P (XEXP (x
, 1)))
169 x
= gen_rtx_PLUS (mode
, XEXP (x
, 0), plus_constant (XEXP (x
, 1), c
));
172 else if (find_constant_term_loc (&y
))
174 /* We need to be careful since X may be shared and we can't
175 modify it in place. */
176 rtx copy
= copy_rtx (x
);
177 rtx
*const_loc
= find_constant_term_loc (©
);
179 *const_loc
= plus_constant (*const_loc
, c
);
190 x
= gen_rtx_PLUS (mode
, x
, GEN_INT (c
));
192 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
)
194 else if (all_constant
)
195 return gen_rtx_CONST (mode
, x
);
200 /* If X is a sum, return a new sum like X but lacking any constant terms.
201 Add all the removed constant terms into *CONSTPTR.
202 X itself is not altered. The result != X if and only if
203 it is not isomorphic to X. */
206 eliminate_constant_term (rtx x
, rtx
*constptr
)
211 if (GET_CODE (x
) != PLUS
)
214 /* First handle constants appearing at this level explicitly. */
215 if (CONST_INT_P (XEXP (x
, 1))
216 && 0 != (tem
= simplify_binary_operation (PLUS
, GET_MODE (x
), *constptr
,
218 && CONST_INT_P (tem
))
221 return eliminate_constant_term (XEXP (x
, 0), constptr
);
225 x0
= eliminate_constant_term (XEXP (x
, 0), &tem
);
226 x1
= eliminate_constant_term (XEXP (x
, 1), &tem
);
227 if ((x1
!= XEXP (x
, 1) || x0
!= XEXP (x
, 0))
228 && 0 != (tem
= simplify_binary_operation (PLUS
, GET_MODE (x
),
230 && CONST_INT_P (tem
))
233 return gen_rtx_PLUS (GET_MODE (x
), x0
, x1
);
239 /* Return an rtx for the size in bytes of the value of EXP. */
246 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
247 size
= TREE_OPERAND (exp
, 1);
250 size
= tree_expr_size (exp
);
252 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
255 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
258 /* Return a wide integer for the size in bytes of the value of EXP, or -1
259 if the size can vary or is larger than an integer. */
262 int_expr_size (tree exp
)
266 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
267 size
= TREE_OPERAND (exp
, 1);
270 size
= tree_expr_size (exp
);
274 if (size
== 0 || !host_integerp (size
, 0))
277 return tree_low_cst (size
, 0);
280 /* Return a copy of X in which all memory references
281 and all constants that involve symbol refs
282 have been replaced with new temporary registers.
283 Also emit code to load the memory locations and constants
284 into those registers.
286 If X contains no such constants or memory references,
287 X itself (not a copy) is returned.
289 If a constant is found in the address that is not a legitimate constant
290 in an insn, it is left alone in the hope that it might be valid in the
293 X may contain no arithmetic except addition, subtraction and multiplication.
294 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
297 break_out_memory_refs (rtx x
)
300 || (CONSTANT_P (x
) && CONSTANT_ADDRESS_P (x
)
301 && GET_MODE (x
) != VOIDmode
))
302 x
= force_reg (GET_MODE (x
), x
);
303 else if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
304 || GET_CODE (x
) == MULT
)
306 rtx op0
= break_out_memory_refs (XEXP (x
, 0));
307 rtx op1
= break_out_memory_refs (XEXP (x
, 1));
309 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
310 x
= simplify_gen_binary (GET_CODE (x
), GET_MODE (x
), op0
, op1
);
316 /* Given X, a memory address in address space AS' pointer mode, convert it to
317 an address in the address space's address mode, or vice versa (TO_MODE says
318 which way). We take advantage of the fact that pointers are not allowed to
319 overflow by commuting arithmetic operations over conversions so that address
320 arithmetic insns can be used. */
323 convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED
,
324 rtx x
, addr_space_t as ATTRIBUTE_UNUSED
)
326 #ifndef POINTERS_EXTEND_UNSIGNED
327 gcc_assert (GET_MODE (x
) == to_mode
|| GET_MODE (x
) == VOIDmode
);
329 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
330 enum machine_mode pointer_mode
, address_mode
, from_mode
;
334 /* If X already has the right mode, just return it. */
335 if (GET_MODE (x
) == to_mode
)
338 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
339 address_mode
= targetm
.addr_space
.address_mode (as
);
340 from_mode
= to_mode
== pointer_mode
? address_mode
: pointer_mode
;
342 /* Here we handle some special cases. If none of them apply, fall through
343 to the default case. */
344 switch (GET_CODE (x
))
348 if (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (from_mode
))
350 else if (POINTERS_EXTEND_UNSIGNED
< 0)
352 else if (POINTERS_EXTEND_UNSIGNED
> 0)
356 temp
= simplify_unary_operation (code
, to_mode
, x
, from_mode
);
362 if ((SUBREG_PROMOTED_VAR_P (x
) || REG_POINTER (SUBREG_REG (x
)))
363 && GET_MODE (SUBREG_REG (x
)) == to_mode
)
364 return SUBREG_REG (x
);
368 temp
= gen_rtx_LABEL_REF (to_mode
, XEXP (x
, 0));
369 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
374 temp
= shallow_copy_rtx (x
);
375 PUT_MODE (temp
, to_mode
);
380 return gen_rtx_CONST (to_mode
,
381 convert_memory_address_addr_space
382 (to_mode
, XEXP (x
, 0), as
));
387 /* For addition we can safely permute the conversion and addition
388 operation if one operand is a constant and converting the constant
389 does not change it or if one operand is a constant and we are
390 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
391 We can always safely permute them if we are making the address
393 if (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (from_mode
)
394 || (GET_CODE (x
) == PLUS
395 && CONST_INT_P (XEXP (x
, 1))
396 && (XEXP (x
, 1) == convert_memory_address_addr_space
397 (to_mode
, XEXP (x
, 1), as
)
398 || POINTERS_EXTEND_UNSIGNED
< 0)))
399 return gen_rtx_fmt_ee (GET_CODE (x
), to_mode
,
400 convert_memory_address_addr_space
401 (to_mode
, XEXP (x
, 0), as
),
409 return convert_modes (to_mode
, from_mode
,
410 x
, POINTERS_EXTEND_UNSIGNED
);
411 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
414 /* Return something equivalent to X but valid as a memory address for something
415 of mode MODE in the named address space AS. When X is not itself valid,
416 this works by copying X or subexpressions of it into registers. */
419 memory_address_addr_space (enum machine_mode mode
, rtx x
, addr_space_t as
)
422 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
424 x
= convert_memory_address_addr_space (address_mode
, x
, as
);
426 /* By passing constant addresses through registers
427 we get a chance to cse them. */
428 if (! cse_not_expected
&& CONSTANT_P (x
) && CONSTANT_ADDRESS_P (x
))
429 x
= force_reg (address_mode
, x
);
431 /* We get better cse by rejecting indirect addressing at this stage.
432 Let the combiner create indirect addresses where appropriate.
433 For now, generate the code so that the subexpressions useful to share
434 are visible. But not if cse won't be done! */
437 if (! cse_not_expected
&& !REG_P (x
))
438 x
= break_out_memory_refs (x
);
440 /* At this point, any valid address is accepted. */
441 if (memory_address_addr_space_p (mode
, x
, as
))
444 /* If it was valid before but breaking out memory refs invalidated it,
445 use it the old way. */
446 if (memory_address_addr_space_p (mode
, oldx
, as
))
452 /* Perform machine-dependent transformations on X
453 in certain cases. This is not necessary since the code
454 below can handle all possible cases, but machine-dependent
455 transformations can make better code. */
458 x
= targetm
.addr_space
.legitimize_address (x
, oldx
, mode
, as
);
459 if (orig_x
!= x
&& memory_address_addr_space_p (mode
, x
, as
))
463 /* PLUS and MULT can appear in special ways
464 as the result of attempts to make an address usable for indexing.
465 Usually they are dealt with by calling force_operand, below.
466 But a sum containing constant terms is special
467 if removing them makes the sum a valid address:
468 then we generate that address in a register
469 and index off of it. We do this because it often makes
470 shorter code, and because the addresses thus generated
471 in registers often become common subexpressions. */
472 if (GET_CODE (x
) == PLUS
)
474 rtx constant_term
= const0_rtx
;
475 rtx y
= eliminate_constant_term (x
, &constant_term
);
476 if (constant_term
== const0_rtx
477 || ! memory_address_addr_space_p (mode
, y
, as
))
478 x
= force_operand (x
, NULL_RTX
);
481 y
= gen_rtx_PLUS (GET_MODE (x
), copy_to_reg (y
), constant_term
);
482 if (! memory_address_addr_space_p (mode
, y
, as
))
483 x
= force_operand (x
, NULL_RTX
);
489 else if (GET_CODE (x
) == MULT
|| GET_CODE (x
) == MINUS
)
490 x
= force_operand (x
, NULL_RTX
);
492 /* If we have a register that's an invalid address,
493 it must be a hard reg of the wrong class. Copy it to a pseudo. */
497 /* Last resort: copy the value to a register, since
498 the register is a valid address. */
500 x
= force_reg (address_mode
, x
);
505 gcc_assert (memory_address_addr_space_p (mode
, x
, as
));
506 /* If we didn't change the address, we are done. Otherwise, mark
507 a reg as a pointer if we have REG or REG + CONST_INT. */
511 mark_reg_pointer (x
, BITS_PER_UNIT
);
512 else if (GET_CODE (x
) == PLUS
513 && REG_P (XEXP (x
, 0))
514 && CONST_INT_P (XEXP (x
, 1)))
515 mark_reg_pointer (XEXP (x
, 0), BITS_PER_UNIT
);
517 /* OLDX may have been the address on a temporary. Update the address
518 to indicate that X is now used. */
519 update_temp_slot_address (oldx
, x
);
524 /* Convert a mem ref into one with a valid memory address.
525 Pass through anything else unchanged. */
528 validize_mem (rtx ref
)
532 ref
= use_anchored_address (ref
);
533 if (memory_address_addr_space_p (GET_MODE (ref
), XEXP (ref
, 0),
534 MEM_ADDR_SPACE (ref
)))
537 /* Don't alter REF itself, since that is probably a stack slot. */
538 return replace_equiv_address (ref
, XEXP (ref
, 0));
541 /* If X is a memory reference to a member of an object block, try rewriting
542 it to use an anchor instead. Return the new memory reference on success
543 and the old one on failure. */
546 use_anchored_address (rtx x
)
549 HOST_WIDE_INT offset
;
551 if (!flag_section_anchors
)
557 /* Split the address into a base and offset. */
560 if (GET_CODE (base
) == CONST
561 && GET_CODE (XEXP (base
, 0)) == PLUS
562 && CONST_INT_P (XEXP (XEXP (base
, 0), 1)))
564 offset
+= INTVAL (XEXP (XEXP (base
, 0), 1));
565 base
= XEXP (XEXP (base
, 0), 0);
568 /* Check whether BASE is suitable for anchors. */
569 if (GET_CODE (base
) != SYMBOL_REF
570 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base
)
571 || SYMBOL_REF_ANCHOR_P (base
)
572 || SYMBOL_REF_BLOCK (base
) == NULL
573 || !targetm
.use_anchors_for_symbol_p (base
))
576 /* Decide where BASE is going to be. */
577 place_block_symbol (base
);
579 /* Get the anchor we need to use. */
580 offset
+= SYMBOL_REF_BLOCK_OFFSET (base
);
581 base
= get_section_anchor (SYMBOL_REF_BLOCK (base
), offset
,
582 SYMBOL_REF_TLS_MODEL (base
));
584 /* Work out the offset from the anchor. */
585 offset
-= SYMBOL_REF_BLOCK_OFFSET (base
);
587 /* If we're going to run a CSE pass, force the anchor into a register.
588 We will then be able to reuse registers for several accesses, if the
589 target costs say that that's worthwhile. */
590 if (!cse_not_expected
)
591 base
= force_reg (GET_MODE (base
), base
);
593 return replace_equiv_address (x
, plus_constant (base
, offset
));
596 /* Copy the value or contents of X to a new temp reg and return that reg. */
601 rtx temp
= gen_reg_rtx (GET_MODE (x
));
603 /* If not an operand, must be an address with PLUS and MULT so
604 do the computation. */
605 if (! general_operand (x
, VOIDmode
))
606 x
= force_operand (x
, temp
);
609 emit_move_insn (temp
, x
);
614 /* Like copy_to_reg but always give the new register mode Pmode
615 in case X is a constant. */
618 copy_addr_to_reg (rtx x
)
620 return copy_to_mode_reg (Pmode
, x
);
623 /* Like copy_to_reg but always give the new register mode MODE
624 in case X is a constant. */
627 copy_to_mode_reg (enum machine_mode mode
, rtx x
)
629 rtx temp
= gen_reg_rtx (mode
);
631 /* If not an operand, must be an address with PLUS and MULT so
632 do the computation. */
633 if (! general_operand (x
, VOIDmode
))
634 x
= force_operand (x
, temp
);
636 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
638 emit_move_insn (temp
, x
);
642 /* Load X into a register if it is not already one.
643 Use mode MODE for the register.
644 X should be valid for mode MODE, but it may be a constant which
645 is valid for all integer modes; that's why caller must specify MODE.
647 The caller must not alter the value in the register we return,
648 since we mark it as a "constant" register. */
651 force_reg (enum machine_mode mode
, rtx x
)
658 if (general_operand (x
, mode
))
660 temp
= gen_reg_rtx (mode
);
661 insn
= emit_move_insn (temp
, x
);
665 temp
= force_operand (x
, NULL_RTX
);
667 insn
= get_last_insn ();
670 rtx temp2
= gen_reg_rtx (mode
);
671 insn
= emit_move_insn (temp2
, temp
);
676 /* Let optimizers know that TEMP's value never changes
677 and that X can be substituted for it. Don't get confused
678 if INSN set something else (such as a SUBREG of TEMP). */
680 && (set
= single_set (insn
)) != 0
681 && SET_DEST (set
) == temp
682 && ! rtx_equal_p (x
, SET_SRC (set
)))
683 set_unique_reg_note (insn
, REG_EQUAL
, x
);
685 /* Let optimizers know that TEMP is a pointer, and if so, the
686 known alignment of that pointer. */
689 if (GET_CODE (x
) == SYMBOL_REF
)
691 align
= BITS_PER_UNIT
;
692 if (SYMBOL_REF_DECL (x
) && DECL_P (SYMBOL_REF_DECL (x
)))
693 align
= DECL_ALIGN (SYMBOL_REF_DECL (x
));
695 else if (GET_CODE (x
) == LABEL_REF
)
696 align
= BITS_PER_UNIT
;
697 else if (GET_CODE (x
) == CONST
698 && GET_CODE (XEXP (x
, 0)) == PLUS
699 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
700 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
702 rtx s
= XEXP (XEXP (x
, 0), 0);
703 rtx c
= XEXP (XEXP (x
, 0), 1);
707 if (SYMBOL_REF_DECL (s
) && DECL_P (SYMBOL_REF_DECL (s
)))
708 sa
= DECL_ALIGN (SYMBOL_REF_DECL (s
));
714 ca
= ctz_hwi (INTVAL (c
)) * BITS_PER_UNIT
;
715 align
= MIN (sa
, ca
);
719 if (align
|| (MEM_P (x
) && MEM_POINTER (x
)))
720 mark_reg_pointer (temp
, align
);
726 /* If X is a memory ref, copy its contents to a new temp reg and return
727 that reg. Otherwise, return X. */
730 force_not_mem (rtx x
)
734 if (!MEM_P (x
) || GET_MODE (x
) == BLKmode
)
737 temp
= gen_reg_rtx (GET_MODE (x
));
740 REG_POINTER (temp
) = 1;
742 emit_move_insn (temp
, x
);
746 /* Copy X to TARGET (if it's nonzero and a reg)
747 or to a new temp reg and return that reg.
748 MODE is the mode to use for X in case it is a constant. */
751 copy_to_suggested_reg (rtx x
, rtx target
, enum machine_mode mode
)
755 if (target
&& REG_P (target
))
758 temp
= gen_reg_rtx (mode
);
760 emit_move_insn (temp
, x
);
764 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
765 PUNSIGNEDP points to the signedness of the type and may be adjusted
766 to show what signedness to use on extension operations.
768 FOR_RETURN is nonzero if the caller is promoting the return value
769 of FNDECL, else it is for promoting args. */
772 promote_function_mode (const_tree type
, enum machine_mode mode
, int *punsignedp
,
773 const_tree funtype
, int for_return
)
775 switch (TREE_CODE (type
))
777 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
778 case REAL_TYPE
: case OFFSET_TYPE
: case FIXED_POINT_TYPE
:
779 case POINTER_TYPE
: case REFERENCE_TYPE
:
780 return targetm
.calls
.promote_function_mode (type
, mode
, punsignedp
, funtype
,
787 /* Return the mode to use to store a scalar of TYPE and MODE.
788 PUNSIGNEDP points to the signedness of the type and may be adjusted
789 to show what signedness to use on extension operations. */
792 promote_mode (const_tree type ATTRIBUTE_UNUSED
, enum machine_mode mode
,
793 int *punsignedp ATTRIBUTE_UNUSED
)
795 /* FIXME: this is the same logic that was there until GCC 4.4, but we
796 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
797 is not defined. The affected targets are M32C, S390, SPARC. */
799 const enum tree_code code
= TREE_CODE (type
);
800 int unsignedp
= *punsignedp
;
804 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
805 case REAL_TYPE
: case OFFSET_TYPE
: case FIXED_POINT_TYPE
:
806 PROMOTE_MODE (mode
, unsignedp
, type
);
807 *punsignedp
= unsignedp
;
811 #ifdef POINTERS_EXTEND_UNSIGNED
814 *punsignedp
= POINTERS_EXTEND_UNSIGNED
;
815 return targetm
.addr_space
.address_mode
816 (TYPE_ADDR_SPACE (TREE_TYPE (type
)));
829 /* Use one of promote_mode or promote_function_mode to find the promoted
830 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
831 of DECL after promotion. */
834 promote_decl_mode (const_tree decl
, int *punsignedp
)
836 tree type
= TREE_TYPE (decl
);
837 int unsignedp
= TYPE_UNSIGNED (type
);
838 enum machine_mode mode
= DECL_MODE (decl
);
839 enum machine_mode pmode
;
841 if (TREE_CODE (decl
) == RESULT_DECL
842 || TREE_CODE (decl
) == PARM_DECL
)
843 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
844 TREE_TYPE (current_function_decl
), 2);
846 pmode
= promote_mode (type
, mode
, &unsignedp
);
849 *punsignedp
= unsignedp
;
854 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
855 This pops when ADJUST is positive. ADJUST need not be constant. */
858 adjust_stack (rtx adjust
)
862 if (adjust
== const0_rtx
)
865 /* We expect all variable sized adjustments to be multiple of
866 PREFERRED_STACK_BOUNDARY. */
867 if (CONST_INT_P (adjust
))
868 stack_pointer_delta
-= INTVAL (adjust
);
870 temp
= expand_binop (Pmode
,
871 #ifdef STACK_GROWS_DOWNWARD
876 stack_pointer_rtx
, adjust
, stack_pointer_rtx
, 0,
879 if (temp
!= stack_pointer_rtx
)
880 emit_move_insn (stack_pointer_rtx
, temp
);
883 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
884 This pushes when ADJUST is positive. ADJUST need not be constant. */
887 anti_adjust_stack (rtx adjust
)
891 if (adjust
== const0_rtx
)
894 /* We expect all variable sized adjustments to be multiple of
895 PREFERRED_STACK_BOUNDARY. */
896 if (CONST_INT_P (adjust
))
897 stack_pointer_delta
+= INTVAL (adjust
);
899 temp
= expand_binop (Pmode
,
900 #ifdef STACK_GROWS_DOWNWARD
905 stack_pointer_rtx
, adjust
, stack_pointer_rtx
, 0,
908 if (temp
!= stack_pointer_rtx
)
909 emit_move_insn (stack_pointer_rtx
, temp
);
912 /* Round the size of a block to be pushed up to the boundary required
913 by this machine. SIZE is the desired size, which need not be constant. */
916 round_push (rtx size
)
918 rtx align_rtx
, alignm1_rtx
;
920 if (!SUPPORTS_STACK_ALIGNMENT
921 || crtl
->preferred_stack_boundary
== MAX_SUPPORTED_STACK_ALIGNMENT
)
923 int align
= crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
;
928 if (CONST_INT_P (size
))
930 HOST_WIDE_INT new_size
= (INTVAL (size
) + align
- 1) / align
* align
;
932 if (INTVAL (size
) != new_size
)
933 size
= GEN_INT (new_size
);
937 align_rtx
= GEN_INT (align
);
938 alignm1_rtx
= GEN_INT (align
- 1);
942 /* If crtl->preferred_stack_boundary might still grow, use
943 virtual_preferred_stack_boundary_rtx instead. This will be
944 substituted by the right value in vregs pass and optimized
946 align_rtx
= virtual_preferred_stack_boundary_rtx
;
947 alignm1_rtx
= force_operand (plus_constant (align_rtx
, -1), NULL_RTX
);
950 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
951 but we know it can't. So add ourselves and then do
953 size
= expand_binop (Pmode
, add_optab
, size
, alignm1_rtx
,
954 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
955 size
= expand_divmod (0, TRUNC_DIV_EXPR
, Pmode
, size
, align_rtx
,
957 size
= expand_mult (Pmode
, size
, align_rtx
, NULL_RTX
, 1);
962 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
963 to a previously-created save area. If no save area has been allocated,
964 this function will allocate one. If a save area is specified, it
965 must be of the proper mode.
967 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
968 are emitted at the current position. */
971 emit_stack_save (enum save_level save_level
, rtx
*psave
, rtx after
)
974 /* The default is that we use a move insn and save in a Pmode object. */
975 rtx (*fcn
) (rtx
, rtx
) = gen_move_insn
;
976 enum machine_mode mode
= STACK_SAVEAREA_MODE (save_level
);
978 /* See if this machine has anything special to do for this kind of save. */
981 #ifdef HAVE_save_stack_block
983 if (HAVE_save_stack_block
)
984 fcn
= gen_save_stack_block
;
987 #ifdef HAVE_save_stack_function
989 if (HAVE_save_stack_function
)
990 fcn
= gen_save_stack_function
;
993 #ifdef HAVE_save_stack_nonlocal
995 if (HAVE_save_stack_nonlocal
)
996 fcn
= gen_save_stack_nonlocal
;
1003 /* If there is no save area and we have to allocate one, do so. Otherwise
1004 verify the save area is the proper mode. */
1008 if (mode
!= VOIDmode
)
1010 if (save_level
== SAVE_NONLOCAL
)
1011 *psave
= sa
= assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
1013 *psave
= sa
= gen_reg_rtx (mode
);
1022 do_pending_stack_adjust ();
1023 /* We must validize inside the sequence, to ensure that any instructions
1024 created by the validize call also get moved to the right place. */
1026 sa
= validize_mem (sa
);
1027 emit_insn (fcn (sa
, stack_pointer_rtx
));
1030 emit_insn_after (seq
, after
);
1034 do_pending_stack_adjust ();
1036 sa
= validize_mem (sa
);
1037 emit_insn (fcn (sa
, stack_pointer_rtx
));
1041 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1042 area made by emit_stack_save. If it is zero, we have nothing to do.
1044 Put any emitted insns after insn AFTER, if nonzero, otherwise at
1045 current position. */
1048 emit_stack_restore (enum save_level save_level
, rtx sa
, rtx after
)
1050 /* The default is that we use a move insn. */
1051 rtx (*fcn
) (rtx
, rtx
) = gen_move_insn
;
1053 /* See if this machine has anything special to do for this kind of save. */
1056 #ifdef HAVE_restore_stack_block
1058 if (HAVE_restore_stack_block
)
1059 fcn
= gen_restore_stack_block
;
1062 #ifdef HAVE_restore_stack_function
1064 if (HAVE_restore_stack_function
)
1065 fcn
= gen_restore_stack_function
;
1068 #ifdef HAVE_restore_stack_nonlocal
1070 if (HAVE_restore_stack_nonlocal
)
1071 fcn
= gen_restore_stack_nonlocal
;
1080 sa
= validize_mem (sa
);
1081 /* These clobbers prevent the scheduler from moving
1082 references to variable arrays below the code
1083 that deletes (pops) the arrays. */
1084 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1085 emit_clobber (gen_rtx_MEM (BLKmode
, stack_pointer_rtx
));
1088 discard_pending_stack_adjust ();
1095 emit_insn (fcn (stack_pointer_rtx
, sa
));
1098 emit_insn_after (seq
, after
);
1101 emit_insn (fcn (stack_pointer_rtx
, sa
));
1104 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1105 function. This function should be called whenever we allocate or
1106 deallocate dynamic stack space. */
1109 update_nonlocal_goto_save_area (void)
1114 /* The nonlocal_goto_save_area object is an array of N pointers. The
1115 first one is used for the frame pointer save; the rest are sized by
1116 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1117 of the stack save area slots. */
1118 t_save
= build4 (ARRAY_REF
, ptr_type_node
, cfun
->nonlocal_goto_save_area
,
1119 integer_one_node
, NULL_TREE
, NULL_TREE
);
1120 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
1122 emit_stack_save (SAVE_NONLOCAL
, &r_save
, NULL_RTX
);
1125 /* Return an rtx representing the address of an area of memory dynamically
1126 pushed on the stack.
1128 Any required stack pointer alignment is preserved.
1130 SIZE is an rtx representing the size of the area.
1132 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1133 parameter may be zero. If so, a proper value will be extracted
1134 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1136 REQUIRED_ALIGN is the alignment (in bits) required for the region
1139 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1140 stack space allocated by the generated code cannot be added with itself
1141 in the course of the execution of the function. It is always safe to
1142 pass FALSE here and the following criterion is sufficient in order to
1143 pass TRUE: every path in the CFG that starts at the allocation point and
1144 loops to it executes the associated deallocation code. */
1147 allocate_dynamic_stack_space (rtx size
, unsigned size_align
,
1148 unsigned required_align
, bool cannot_accumulate
)
1150 HOST_WIDE_INT stack_usage_size
= -1;
1151 rtx final_label
, final_target
, target
;
1154 /* If we're asking for zero bytes, it doesn't matter what we point
1155 to since we can't dereference it. But return a reasonable
1157 if (size
== const0_rtx
)
1158 return virtual_stack_dynamic_rtx
;
1160 /* Otherwise, show we're calling alloca or equivalent. */
1161 cfun
->calls_alloca
= 1;
1163 /* If stack usage info is requested, look into the size we are passed.
1164 We need to do so this early to avoid the obfuscation that may be
1165 introduced later by the various alignment operations. */
1166 if (flag_stack_usage
)
1168 if (CONST_INT_P (size
))
1169 stack_usage_size
= INTVAL (size
);
1170 else if (REG_P (size
))
1172 /* Look into the last emitted insn and see if we can deduce
1173 something for the register. */
1174 rtx insn
, set
, note
;
1175 insn
= get_last_insn ();
1176 if ((set
= single_set (insn
)) && rtx_equal_p (SET_DEST (set
), size
))
1178 if (CONST_INT_P (SET_SRC (set
)))
1179 stack_usage_size
= INTVAL (SET_SRC (set
));
1180 else if ((note
= find_reg_equal_equiv_note (insn
))
1181 && CONST_INT_P (XEXP (note
, 0)))
1182 stack_usage_size
= INTVAL (XEXP (note
, 0));
1186 /* If the size is not constant, we can't say anything. */
1187 if (stack_usage_size
== -1)
1189 current_function_has_unbounded_dynamic_stack_size
= 1;
1190 stack_usage_size
= 0;
1194 /* Ensure the size is in the proper mode. */
1195 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1196 size
= convert_to_mode (Pmode
, size
, 1);
1198 /* Adjust SIZE_ALIGN, if needed. */
1199 if (CONST_INT_P (size
))
1201 unsigned HOST_WIDE_INT lsb
;
1203 lsb
= INTVAL (size
);
1206 /* Watch out for overflow truncating to "unsigned". */
1207 if (lsb
> UINT_MAX
/ BITS_PER_UNIT
)
1208 size_align
= 1u << (HOST_BITS_PER_INT
- 1);
1210 size_align
= (unsigned)lsb
* BITS_PER_UNIT
;
1212 else if (size_align
< BITS_PER_UNIT
)
1213 size_align
= BITS_PER_UNIT
;
1215 /* We can't attempt to minimize alignment necessary, because we don't
1216 know the final value of preferred_stack_boundary yet while executing
1218 if (crtl
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
1219 crtl
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
1221 /* We will need to ensure that the address we return is aligned to
1222 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1223 always know its final value at this point in the compilation (it
1224 might depend on the size of the outgoing parameter lists, for
1225 example), so we must align the value to be returned in that case.
1226 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1227 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1228 We must also do an alignment operation on the returned value if
1229 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1231 If we have to align, we must leave space in SIZE for the hole
1232 that might result from the alignment operation. */
1234 must_align
= (crtl
->preferred_stack_boundary
< required_align
);
1235 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1241 unsigned extra
, extra_align
;
1243 if (required_align
> PREFERRED_STACK_BOUNDARY
)
1244 extra_align
= PREFERRED_STACK_BOUNDARY
;
1245 else if (required_align
> STACK_BOUNDARY
)
1246 extra_align
= STACK_BOUNDARY
;
1248 extra_align
= BITS_PER_UNIT
;
1249 extra
= (required_align
- extra_align
) / BITS_PER_UNIT
;
1251 size
= plus_constant (size
, extra
);
1252 size
= force_operand (size
, NULL_RTX
);
1254 if (flag_stack_usage
)
1255 stack_usage_size
+= extra
;
1257 if (extra
&& size_align
> extra_align
)
1258 size_align
= extra_align
;
1261 #ifdef SETJMP_VIA_SAVE_AREA
1262 /* If setjmp restores regs from a save area in the stack frame,
1263 avoid clobbering the reg save area. Note that the offset of
1264 virtual_incoming_args_rtx includes the preallocated stack args space.
1265 It would be no problem to clobber that, but it's on the wrong side
1266 of the old save area.
1268 What used to happen is that, since we did not know for sure
1269 whether setjmp() was invoked until after RTL generation, we
1270 would use reg notes to store the "optimized" size and fix things
1271 up later. These days we know this information before we ever
1272 start building RTL so the reg notes are unnecessary. */
1273 if (cfun
->calls_setjmp
)
1276 = expand_binop (Pmode
, sub_optab
, virtual_stack_dynamic_rtx
,
1277 stack_pointer_rtx
, NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1279 size
= expand_binop (Pmode
, add_optab
, size
, dynamic_offset
,
1280 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1282 /* The above dynamic offset cannot be computed statically at this
1283 point, but it will be possible to do so after RTL expansion is
1284 done. Record how many times we will need to add it. */
1285 if (flag_stack_usage
)
1286 current_function_dynamic_alloc_count
++;
1288 /* ??? Can we infer a minimum of STACK_BOUNDARY here? */
1289 size_align
= BITS_PER_UNIT
;
1291 #endif /* SETJMP_VIA_SAVE_AREA */
1293 /* Round the size to a multiple of the required stack alignment.
1294 Since the stack if presumed to be rounded before this allocation,
1295 this will maintain the required alignment.
1297 If the stack grows downward, we could save an insn by subtracting
1298 SIZE from the stack pointer and then aligning the stack pointer.
1299 The problem with this is that the stack pointer may be unaligned
1300 between the execution of the subtraction and alignment insns and
1301 some machines do not allow this. Even on those that do, some
1302 signal handlers malfunction if a signal should occur between those
1303 insns. Since this is an extremely rare event, we have no reliable
1304 way of knowing which systems have this problem. So we avoid even
1305 momentarily mis-aligning the stack. */
1306 if (size_align
% MAX_SUPPORTED_STACK_ALIGNMENT
!= 0)
1308 size
= round_push (size
);
1310 if (flag_stack_usage
)
1312 int align
= crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
;
1313 stack_usage_size
= (stack_usage_size
+ align
- 1) / align
* align
;
1317 target
= gen_reg_rtx (Pmode
);
1319 /* The size is supposed to be fully adjusted at this point so record it
1320 if stack usage info is requested. */
1321 if (flag_stack_usage
)
1323 current_function_dynamic_stack_size
+= stack_usage_size
;
1325 /* ??? This is gross but the only safe stance in the absence
1326 of stack usage oriented flow analysis. */
1327 if (!cannot_accumulate
)
1328 current_function_has_unbounded_dynamic_stack_size
= 1;
1331 final_label
= NULL_RTX
;
1332 final_target
= NULL_RTX
;
1334 /* If we are splitting the stack, we need to ask the backend whether
1335 there is enough room on the current stack. If there isn't, or if
1336 the backend doesn't know how to tell is, then we need to call a
1337 function to allocate memory in some other way. This memory will
1338 be released when we release the current stack segment. The
1339 effect is that stack allocation becomes less efficient, but at
1340 least it doesn't cause a stack overflow. */
1341 if (flag_split_stack
)
1343 rtx available_label
, space
, func
;
1345 available_label
= NULL_RTX
;
1347 #ifdef HAVE_split_stack_space_check
1348 if (HAVE_split_stack_space_check
)
1350 available_label
= gen_label_rtx ();
1352 /* This instruction will branch to AVAILABLE_LABEL if there
1353 are SIZE bytes available on the stack. */
1354 emit_insn (gen_split_stack_space_check (size
, available_label
));
1358 func
= init_one_libfunc ("__morestack_allocate_stack_space");
1360 space
= emit_library_call_value (func
, target
, LCT_NORMAL
, Pmode
,
1363 if (available_label
== NULL_RTX
)
1366 final_target
= gen_reg_rtx (Pmode
);
1368 emit_move_insn (final_target
, space
);
1370 final_label
= gen_label_rtx ();
1371 emit_jump (final_label
);
1373 emit_label (available_label
);
1376 do_pending_stack_adjust ();
1378 /* We ought to be called always on the toplevel and stack ought to be aligned
1380 gcc_assert (!(stack_pointer_delta
1381 % (PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
)));
1383 /* If needed, check that we have the required amount of stack. Take into
1384 account what has already been checked. */
1385 if (STACK_CHECK_MOVING_SP
)
1387 else if (flag_stack_check
== GENERIC_STACK_CHECK
)
1388 probe_stack_range (STACK_OLD_CHECK_PROTECT
+ STACK_CHECK_MAX_FRAME_SIZE
,
1390 else if (flag_stack_check
== STATIC_BUILTIN_STACK_CHECK
)
1391 probe_stack_range (STACK_CHECK_PROTECT
, size
);
1393 /* Perform the required allocation from the stack. Some systems do
1394 this differently than simply incrementing/decrementing from the
1395 stack pointer, such as acquiring the space by calling malloc(). */
1396 #ifdef HAVE_allocate_stack
1397 if (HAVE_allocate_stack
)
1399 enum machine_mode mode
= STACK_SIZE_MODE
;
1400 insn_operand_predicate_fn pred
;
1402 /* We don't have to check against the predicate for operand 0 since
1403 TARGET is known to be a pseudo of the proper mode, which must
1404 be valid for the operand. For operand 1, convert to the
1405 proper mode and validate. */
1406 if (mode
== VOIDmode
)
1407 mode
= insn_data
[(int) CODE_FOR_allocate_stack
].operand
[1].mode
;
1409 pred
= insn_data
[(int) CODE_FOR_allocate_stack
].operand
[1].predicate
;
1410 if (pred
&& ! ((*pred
) (size
, mode
)))
1411 size
= copy_to_mode_reg (mode
, convert_to_mode (mode
, size
, 1));
1413 emit_insn (gen_allocate_stack (target
, size
));
1418 int saved_stack_pointer_delta
;
1420 #ifndef STACK_GROWS_DOWNWARD
1421 emit_move_insn (target
, virtual_stack_dynamic_rtx
);
1424 /* Check stack bounds if necessary. */
1425 if (crtl
->limit_stack
)
1428 rtx space_available
= gen_label_rtx ();
1429 #ifdef STACK_GROWS_DOWNWARD
1430 available
= expand_binop (Pmode
, sub_optab
,
1431 stack_pointer_rtx
, stack_limit_rtx
,
1432 NULL_RTX
, 1, OPTAB_WIDEN
);
1434 available
= expand_binop (Pmode
, sub_optab
,
1435 stack_limit_rtx
, stack_pointer_rtx
,
1436 NULL_RTX
, 1, OPTAB_WIDEN
);
1438 emit_cmp_and_jump_insns (available
, size
, GEU
, NULL_RTX
, Pmode
, 1,
1442 emit_insn (gen_trap ());
1445 error ("stack limits not supported on this target");
1447 emit_label (space_available
);
1450 saved_stack_pointer_delta
= stack_pointer_delta
;
1451 if (flag_stack_check
&& STACK_CHECK_MOVING_SP
)
1452 anti_adjust_stack_and_probe (size
, false);
1454 anti_adjust_stack (size
);
1455 /* Even if size is constant, don't modify stack_pointer_delta.
1456 The constant size alloca should preserve
1457 crtl->preferred_stack_boundary alignment. */
1458 stack_pointer_delta
= saved_stack_pointer_delta
;
1460 #ifdef STACK_GROWS_DOWNWARD
1461 emit_move_insn (target
, virtual_stack_dynamic_rtx
);
1465 /* Finish up the split stack handling. */
1466 if (final_label
!= NULL_RTX
)
1468 gcc_assert (flag_split_stack
);
1469 emit_move_insn (final_target
, target
);
1470 emit_label (final_label
);
1471 target
= final_target
;
1476 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1477 but we know it can't. So add ourselves and then do
1479 target
= expand_binop (Pmode
, add_optab
, target
,
1480 GEN_INT (required_align
/ BITS_PER_UNIT
- 1),
1481 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1482 target
= expand_divmod (0, TRUNC_DIV_EXPR
, Pmode
, target
,
1483 GEN_INT (required_align
/ BITS_PER_UNIT
),
1485 target
= expand_mult (Pmode
, target
,
1486 GEN_INT (required_align
/ BITS_PER_UNIT
),
1490 /* Now that we've committed to a return value, mark its alignment. */
1491 mark_reg_pointer (target
, required_align
);
1493 /* Record the new stack level for nonlocal gotos. */
1494 if (cfun
->nonlocal_goto_save_area
!= 0)
1495 update_nonlocal_goto_save_area ();
1500 /* A front end may want to override GCC's stack checking by providing a
1501 run-time routine to call to check the stack, so provide a mechanism for
1502 calling that routine. */
1504 static GTY(()) rtx stack_check_libfunc
;
1507 set_stack_check_libfunc (const char *libfunc_name
)
1509 gcc_assert (stack_check_libfunc
== NULL_RTX
);
1510 stack_check_libfunc
= gen_rtx_SYMBOL_REF (Pmode
, libfunc_name
);
1513 /* Emit one stack probe at ADDRESS, an address within the stack. */
1516 emit_stack_probe (rtx address
)
1518 rtx memref
= gen_rtx_MEM (word_mode
, address
);
1520 MEM_VOLATILE_P (memref
) = 1;
1522 /* See if we have an insn to probe the stack. */
1523 #ifdef HAVE_probe_stack
1524 if (HAVE_probe_stack
)
1525 emit_insn (gen_probe_stack (memref
));
1528 emit_move_insn (memref
, const0_rtx
);
1531 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1532 FIRST is a constant and size is a Pmode RTX. These are offsets from
1533 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1534 or subtract them from the stack pointer. */
1536 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1538 #ifdef STACK_GROWS_DOWNWARD
1539 #define STACK_GROW_OP MINUS
1540 #define STACK_GROW_OPTAB sub_optab
1541 #define STACK_GROW_OFF(off) -(off)
1543 #define STACK_GROW_OP PLUS
1544 #define STACK_GROW_OPTAB add_optab
1545 #define STACK_GROW_OFF(off) (off)
1549 probe_stack_range (HOST_WIDE_INT first
, rtx size
)
1551 /* First ensure SIZE is Pmode. */
1552 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1553 size
= convert_to_mode (Pmode
, size
, 1);
1555 /* Next see if we have a function to check the stack. */
1556 if (stack_check_libfunc
)
1558 rtx addr
= memory_address (Pmode
,
1559 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1561 plus_constant (size
, first
)));
1562 emit_library_call (stack_check_libfunc
, LCT_NORMAL
, VOIDmode
, 1, addr
,
1566 /* Next see if we have an insn to check the stack. */
1567 #ifdef HAVE_check_stack
1568 else if (HAVE_check_stack
)
1570 rtx addr
= memory_address (Pmode
,
1571 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1573 plus_constant (size
, first
)));
1574 insn_operand_predicate_fn pred
1575 = insn_data
[(int) CODE_FOR_check_stack
].operand
[0].predicate
;
1576 if (pred
&& !((*pred
) (addr
, Pmode
)))
1577 addr
= copy_to_mode_reg (Pmode
, addr
);
1579 emit_insn (gen_check_stack (addr
));
1583 /* Otherwise we have to generate explicit probes. If we have a constant
1584 small number of them to generate, that's the easy case. */
1585 else if (CONST_INT_P (size
) && INTVAL (size
) < 7 * PROBE_INTERVAL
)
1587 HOST_WIDE_INT isize
= INTVAL (size
), i
;
1590 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1591 it exceeds SIZE. If only one probe is needed, this will not
1592 generate any code. Then probe at FIRST + SIZE. */
1593 for (i
= PROBE_INTERVAL
; i
< isize
; i
+= PROBE_INTERVAL
)
1595 addr
= memory_address (Pmode
,
1596 plus_constant (stack_pointer_rtx
,
1597 STACK_GROW_OFF (first
+ i
)));
1598 emit_stack_probe (addr
);
1601 addr
= memory_address (Pmode
,
1602 plus_constant (stack_pointer_rtx
,
1603 STACK_GROW_OFF (first
+ isize
)));
1604 emit_stack_probe (addr
);
1607 /* In the variable case, do the same as above, but in a loop. Note that we
1608 must be extra careful with variables wrapping around because we might be
1609 at the very top (or the very bottom) of the address space and we have to
1610 be able to handle this case properly; in particular, we use an equality
1611 test for the loop condition. */
1614 rtx rounded_size
, rounded_size_op
, test_addr
, last_addr
, temp
;
1615 rtx loop_lab
= gen_label_rtx ();
1616 rtx end_lab
= gen_label_rtx ();
1619 /* Step 1: round SIZE to the previous multiple of the interval. */
1621 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1623 = simplify_gen_binary (AND
, Pmode
, size
, GEN_INT (-PROBE_INTERVAL
));
1624 rounded_size_op
= force_operand (rounded_size
, NULL_RTX
);
1627 /* Step 2: compute initial and final value of the loop counter. */
1629 /* TEST_ADDR = SP + FIRST. */
1630 test_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1632 GEN_INT (first
)), NULL_RTX
);
1634 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1635 last_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1637 rounded_size_op
), NULL_RTX
);
1642 while (TEST_ADDR != LAST_ADDR)
1644 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1648 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1649 until it is equal to ROUNDED_SIZE. */
1651 emit_label (loop_lab
);
1653 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1654 emit_cmp_and_jump_insns (test_addr
, last_addr
, EQ
, NULL_RTX
, Pmode
, 1,
1657 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1658 temp
= expand_binop (Pmode
, STACK_GROW_OPTAB
, test_addr
,
1659 GEN_INT (PROBE_INTERVAL
), test_addr
,
1662 gcc_assert (temp
== test_addr
);
1664 /* Probe at TEST_ADDR. */
1665 emit_stack_probe (test_addr
);
1667 emit_jump (loop_lab
);
1669 emit_label (end_lab
);
1672 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1673 that SIZE is equal to ROUNDED_SIZE. */
1675 /* TEMP = SIZE - ROUNDED_SIZE. */
1676 temp
= simplify_gen_binary (MINUS
, Pmode
, size
, rounded_size
);
1677 if (temp
!= const0_rtx
)
1681 if (CONST_INT_P (temp
))
1683 /* Use [base + disp} addressing mode if supported. */
1684 HOST_WIDE_INT offset
= INTVAL (temp
);
1685 addr
= memory_address (Pmode
,
1686 plus_constant (last_addr
,
1687 STACK_GROW_OFF (offset
)));
1691 /* Manual CSE if the difference is not known at compile-time. */
1692 temp
= gen_rtx_MINUS (Pmode
, size
, rounded_size_op
);
1693 addr
= memory_address (Pmode
,
1694 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1698 emit_stack_probe (addr
);
1703 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1704 while probing it. This pushes when SIZE is positive. SIZE need not
1705 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1706 by plus SIZE at the end. */
1709 anti_adjust_stack_and_probe (rtx size
, bool adjust_back
)
1711 /* We skip the probe for the first interval + a small dope of 4 words and
1712 probe that many bytes past the specified size to maintain a protection
1713 area at the botton of the stack. */
1714 const int dope
= 4 * UNITS_PER_WORD
;
1716 /* First ensure SIZE is Pmode. */
1717 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1718 size
= convert_to_mode (Pmode
, size
, 1);
1720 /* If we have a constant small number of probes to generate, that's the
1722 if (CONST_INT_P (size
) && INTVAL (size
) < 7 * PROBE_INTERVAL
)
1724 HOST_WIDE_INT isize
= INTVAL (size
), i
;
1725 bool first_probe
= true;
1727 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1728 values of N from 1 until it exceeds SIZE. If only one probe is
1729 needed, this will not generate any code. Then adjust and probe
1730 to PROBE_INTERVAL + SIZE. */
1731 for (i
= PROBE_INTERVAL
; i
< isize
; i
+= PROBE_INTERVAL
)
1735 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL
+ dope
));
1736 first_probe
= false;
1739 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
));
1740 emit_stack_probe (stack_pointer_rtx
);
1744 anti_adjust_stack (plus_constant (size
, PROBE_INTERVAL
+ dope
));
1746 anti_adjust_stack (plus_constant (size
, PROBE_INTERVAL
- i
));
1747 emit_stack_probe (stack_pointer_rtx
);
1750 /* In the variable case, do the same as above, but in a loop. Note that we
1751 must be extra careful with variables wrapping around because we might be
1752 at the very top (or the very bottom) of the address space and we have to
1753 be able to handle this case properly; in particular, we use an equality
1754 test for the loop condition. */
1757 rtx rounded_size
, rounded_size_op
, last_addr
, temp
;
1758 rtx loop_lab
= gen_label_rtx ();
1759 rtx end_lab
= gen_label_rtx ();
1762 /* Step 1: round SIZE to the previous multiple of the interval. */
1764 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1766 = simplify_gen_binary (AND
, Pmode
, size
, GEN_INT (-PROBE_INTERVAL
));
1767 rounded_size_op
= force_operand (rounded_size
, NULL_RTX
);
1770 /* Step 2: compute initial and final value of the loop counter. */
1772 /* SP = SP_0 + PROBE_INTERVAL. */
1773 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
+ dope
));
1775 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1776 last_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1778 rounded_size_op
), NULL_RTX
);
1783 while (SP != LAST_ADDR)
1785 SP = SP + PROBE_INTERVAL
1789 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1790 values of N from 1 until it is equal to ROUNDED_SIZE. */
1792 emit_label (loop_lab
);
1794 /* Jump to END_LAB if SP == LAST_ADDR. */
1795 emit_cmp_and_jump_insns (stack_pointer_rtx
, last_addr
, EQ
, NULL_RTX
,
1798 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1799 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
));
1800 emit_stack_probe (stack_pointer_rtx
);
1802 emit_jump (loop_lab
);
1804 emit_label (end_lab
);
1807 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1808 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1810 /* TEMP = SIZE - ROUNDED_SIZE. */
1811 temp
= simplify_gen_binary (MINUS
, Pmode
, size
, rounded_size
);
1812 if (temp
!= const0_rtx
)
1814 /* Manual CSE if the difference is not known at compile-time. */
1815 if (GET_CODE (temp
) != CONST_INT
)
1816 temp
= gen_rtx_MINUS (Pmode
, size
, rounded_size_op
);
1817 anti_adjust_stack (temp
);
1818 emit_stack_probe (stack_pointer_rtx
);
1822 /* Adjust back and account for the additional first interval. */
1824 adjust_stack (plus_constant (size
, PROBE_INTERVAL
+ dope
));
1826 adjust_stack (GEN_INT (PROBE_INTERVAL
+ dope
));
1829 /* Return an rtx representing the register or memory location
1830 in which a scalar value of data type VALTYPE
1831 was returned by a function call to function FUNC.
1832 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1833 function is known, otherwise 0.
1834 OUTGOING is 1 if on a machine with register windows this function
1835 should return the register in which the function will put its result
1839 hard_function_value (const_tree valtype
, const_tree func
, const_tree fntype
,
1840 int outgoing ATTRIBUTE_UNUSED
)
1844 val
= targetm
.calls
.function_value (valtype
, func
? func
: fntype
, outgoing
);
1847 && GET_MODE (val
) == BLKmode
)
1849 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (valtype
);
1850 enum machine_mode tmpmode
;
1852 /* int_size_in_bytes can return -1. We don't need a check here
1853 since the value of bytes will then be large enough that no
1854 mode will match anyway. */
1856 for (tmpmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1857 tmpmode
!= VOIDmode
;
1858 tmpmode
= GET_MODE_WIDER_MODE (tmpmode
))
1860 /* Have we found a large enough mode? */
1861 if (GET_MODE_SIZE (tmpmode
) >= bytes
)
1865 /* No suitable mode found. */
1866 gcc_assert (tmpmode
!= VOIDmode
);
1868 PUT_MODE (val
, tmpmode
);
1873 /* Return an rtx representing the register or memory location
1874 in which a scalar value of mode MODE was returned by a library call. */
1877 hard_libcall_value (enum machine_mode mode
, rtx fun
)
1879 return targetm
.calls
.libcall_value (mode
, fun
);
1882 /* Look up the tree code for a given rtx code
1883 to provide the arithmetic operation for REAL_ARITHMETIC.
1884 The function returns an int because the caller may not know
1885 what `enum tree_code' means. */
1888 rtx_to_tree_code (enum rtx_code code
)
1890 enum tree_code tcode
;
1913 tcode
= LAST_AND_UNUSED_TREE_CODE
;
1916 return ((int) tcode
);
1919 #include "gt-explow.h"