1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
35 #include "insn-flags.h"
36 #include "insn-codes.h"
38 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
39 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
42 static rtx break_out_memory_refs
PARAMS ((rtx
));
43 static void emit_stack_probe
PARAMS ((rtx
));
46 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
49 trunc_int_for_mode (c
, mode
)
51 enum machine_mode mode
;
53 int width
= GET_MODE_BITSIZE (mode
);
55 /* We clear out all bits that don't belong in MODE, unless they and our
56 sign bit are all one. So we get either a reasonable negative
57 value or a reasonable unsigned value. */
59 if (width
< HOST_BITS_PER_WIDE_INT
60 && ((c
& ((HOST_WIDE_INT
) (-1) << (width
- 1)))
61 != ((HOST_WIDE_INT
) (-1) << (width
- 1))))
62 c
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
64 /* If this would be an entire word for the target, but is not for
65 the host, then sign-extend on the host so that the number will look
66 the same way on the host that it would on the target.
68 For example, when building a 64 bit alpha hosted 32 bit sparc
69 targeted compiler, then we want the 32 bit unsigned value -1 to be
70 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
71 The later confuses the sparc backend. */
73 if (BITS_PER_WORD
< HOST_BITS_PER_WIDE_INT
74 && BITS_PER_WORD
== width
75 && (c
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
76 c
|= ((HOST_WIDE_INT
) (-1) << width
);
81 /* Return an rtx for the sum of X and the integer C.
83 This function should be used via the `plus_constant' macro. */
86 plus_constant_wide (x
, c
)
88 register HOST_WIDE_INT c
;
90 register RTX_CODE code
;
91 register enum machine_mode mode
;
105 return GEN_INT (INTVAL (x
) + c
);
109 unsigned HOST_WIDE_INT l1
= CONST_DOUBLE_LOW (x
);
110 HOST_WIDE_INT h1
= CONST_DOUBLE_HIGH (x
);
111 unsigned HOST_WIDE_INT l2
= c
;
112 HOST_WIDE_INT h2
= c
< 0 ? ~0 : 0;
113 unsigned HOST_WIDE_INT lv
;
116 add_double (l1
, h1
, l2
, h2
, &lv
, &hv
);
118 return immed_double_const (lv
, hv
, VOIDmode
);
122 /* If this is a reference to the constant pool, try replacing it with
123 a reference to a new constant. If the resulting address isn't
124 valid, don't return it because we have no way to validize it. */
125 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
126 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
128 /* Any rtl we create here must go in a saveable obstack, since
129 we might have been called from within combine. */
130 push_obstacks_nochange ();
131 rtl_in_saveable_obstack ();
133 = force_const_mem (GET_MODE (x
),
134 plus_constant (get_pool_constant (XEXP (x
, 0)),
137 if (memory_address_p (GET_MODE (tem
), XEXP (tem
, 0)))
143 /* If adding to something entirely constant, set a flag
144 so that we can add a CONST around the result. */
155 /* The interesting case is adding the integer to a sum.
156 Look for constant term in the sum and combine
157 with C. For an integer constant term, we make a combined
158 integer. For a constant term that is not an explicit integer,
159 we cannot really combine, but group them together anyway.
161 Restart or use a recursive call in case the remaining operand is
162 something that we handle specially, such as a SYMBOL_REF.
164 We may not immediately return from the recursive call here, lest
165 all_constant gets lost. */
167 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
169 c
+= INTVAL (XEXP (x
, 1));
171 if (GET_MODE (x
) != VOIDmode
)
172 c
= trunc_int_for_mode (c
, GET_MODE (x
));
177 else if (CONSTANT_P (XEXP (x
, 0)))
179 x
= gen_rtx_PLUS (mode
,
180 plus_constant (XEXP (x
, 0), c
),
184 else if (CONSTANT_P (XEXP (x
, 1)))
186 x
= gen_rtx_PLUS (mode
,
188 plus_constant (XEXP (x
, 1), c
));
198 x
= gen_rtx_PLUS (mode
, x
, GEN_INT (c
));
200 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
)
202 else if (all_constant
)
203 return gen_rtx_CONST (mode
, x
);
208 /* This is the same as `plus_constant', except that it handles LO_SUM.
210 This function should be used via the `plus_constant_for_output' macro. */
213 plus_constant_for_output_wide (x
, c
)
215 register HOST_WIDE_INT c
;
217 register enum machine_mode mode
= GET_MODE (x
);
219 if (GET_CODE (x
) == LO_SUM
)
220 return gen_rtx_LO_SUM (mode
, XEXP (x
, 0),
221 plus_constant_for_output (XEXP (x
, 1), c
));
224 return plus_constant (x
, c
);
227 /* If X is a sum, return a new sum like X but lacking any constant terms.
228 Add all the removed constant terms into *CONSTPTR.
229 X itself is not altered. The result != X if and only if
230 it is not isomorphic to X. */
233 eliminate_constant_term (x
, constptr
)
240 if (GET_CODE (x
) != PLUS
)
243 /* First handle constants appearing at this level explicitly. */
244 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
245 && 0 != (tem
= simplify_binary_operation (PLUS
, GET_MODE (x
), *constptr
,
247 && GET_CODE (tem
) == CONST_INT
)
250 return eliminate_constant_term (XEXP (x
, 0), constptr
);
254 x0
= eliminate_constant_term (XEXP (x
, 0), &tem
);
255 x1
= eliminate_constant_term (XEXP (x
, 1), &tem
);
256 if ((x1
!= XEXP (x
, 1) || x0
!= XEXP (x
, 0))
257 && 0 != (tem
= simplify_binary_operation (PLUS
, GET_MODE (x
),
259 && GET_CODE (tem
) == CONST_INT
)
262 return gen_rtx_PLUS (GET_MODE (x
), x0
, x1
);
268 /* Returns the insn that next references REG after INSN, or 0
269 if REG is clobbered before next referenced or we cannot find
270 an insn that references REG in a straight-line piece of code. */
273 find_next_ref (reg
, insn
)
279 for (insn
= NEXT_INSN (insn
); insn
; insn
= next
)
281 next
= NEXT_INSN (insn
);
282 if (GET_CODE (insn
) == NOTE
)
284 if (GET_CODE (insn
) == CODE_LABEL
285 || GET_CODE (insn
) == BARRIER
)
287 if (GET_CODE (insn
) == INSN
288 || GET_CODE (insn
) == JUMP_INSN
289 || GET_CODE (insn
) == CALL_INSN
)
291 if (reg_set_p (reg
, insn
))
293 if (reg_mentioned_p (reg
, PATTERN (insn
)))
295 if (GET_CODE (insn
) == JUMP_INSN
)
297 if (any_uncondjump_p (insn
))
298 next
= JUMP_LABEL (insn
);
302 if (GET_CODE (insn
) == CALL_INSN
303 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
304 && call_used_regs
[REGNO (reg
)])
313 /* Return an rtx for the size in bytes of the value of EXP. */
319 tree size
= size_in_bytes (TREE_TYPE (exp
));
321 if (TREE_CODE (size
) != INTEGER_CST
322 && contains_placeholder_p (size
))
323 size
= build (WITH_RECORD_EXPR
, sizetype
, size
, exp
);
325 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
),
326 EXPAND_MEMORY_USE_BAD
);
329 /* Return a copy of X in which all memory references
330 and all constants that involve symbol refs
331 have been replaced with new temporary registers.
332 Also emit code to load the memory locations and constants
333 into those registers.
335 If X contains no such constants or memory references,
336 X itself (not a copy) is returned.
338 If a constant is found in the address that is not a legitimate constant
339 in an insn, it is left alone in the hope that it might be valid in the
342 X may contain no arithmetic except addition, subtraction and multiplication.
343 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
346 break_out_memory_refs (x
)
349 if (GET_CODE (x
) == MEM
350 || (CONSTANT_P (x
) && CONSTANT_ADDRESS_P (x
)
351 && GET_MODE (x
) != VOIDmode
))
352 x
= force_reg (GET_MODE (x
), x
);
353 else if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
354 || GET_CODE (x
) == MULT
)
356 register rtx op0
= break_out_memory_refs (XEXP (x
, 0));
357 register rtx op1
= break_out_memory_refs (XEXP (x
, 1));
359 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
360 x
= gen_rtx_fmt_ee (GET_CODE (x
), Pmode
, op0
, op1
);
366 #ifdef POINTERS_EXTEND_UNSIGNED
368 /* Given X, a memory address in ptr_mode, convert it to an address
369 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
370 the fact that pointers are not allowed to overflow by commuting arithmetic
371 operations over conversions so that address arithmetic insns can be
375 convert_memory_address (to_mode
, x
)
376 enum machine_mode to_mode
;
379 enum machine_mode from_mode
= to_mode
== ptr_mode
? Pmode
: ptr_mode
;
382 /* Here we handle some special cases. If none of them apply, fall through
383 to the default case. */
384 switch (GET_CODE (x
))
391 temp
= gen_rtx_LABEL_REF (to_mode
, XEXP (x
, 0));
392 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
396 temp
= gen_rtx_SYMBOL_REF (to_mode
, XSTR (x
, 0));
397 SYMBOL_REF_FLAG (temp
) = SYMBOL_REF_FLAG (x
);
398 CONSTANT_POOL_ADDRESS_P (temp
) = CONSTANT_POOL_ADDRESS_P (x
);
402 return gen_rtx_CONST (to_mode
,
403 convert_memory_address (to_mode
, XEXP (x
, 0)));
407 /* For addition the second operand is a small constant, we can safely
408 permute the conversion and addition operation. We can always safely
409 permute them if we are making the address narrower. In addition,
410 always permute the operations if this is a constant. */
411 if (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (from_mode
)
412 || (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
413 && (INTVAL (XEXP (x
, 1)) + 20000 < 40000
414 || CONSTANT_P (XEXP (x
, 0)))))
415 return gen_rtx_fmt_ee (GET_CODE (x
), to_mode
,
416 convert_memory_address (to_mode
, XEXP (x
, 0)),
417 convert_memory_address (to_mode
, XEXP (x
, 1)));
424 return convert_modes (to_mode
, from_mode
,
425 x
, POINTERS_EXTEND_UNSIGNED
);
429 /* Given a memory address or facsimile X, construct a new address,
430 currently equivalent, that is stable: future stores won't change it.
432 X must be composed of constants, register and memory references
433 combined with addition, subtraction and multiplication:
434 in other words, just what you can get from expand_expr if sum_ok is 1.
436 Works by making copies of all regs and memory locations used
437 by X and combining them the same way X does.
438 You could also stabilize the reference to this address
439 by copying the address to a register with copy_to_reg;
440 but then you wouldn't get indexed addressing in the reference. */
446 if (GET_CODE (x
) == REG
)
448 if (REGNO (x
) != FRAME_POINTER_REGNUM
449 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
450 && REGNO (x
) != HARD_FRAME_POINTER_REGNUM
455 else if (GET_CODE (x
) == MEM
)
457 else if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
458 || GET_CODE (x
) == MULT
)
460 register rtx op0
= copy_all_regs (XEXP (x
, 0));
461 register rtx op1
= copy_all_regs (XEXP (x
, 1));
462 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
463 x
= gen_rtx_fmt_ee (GET_CODE (x
), Pmode
, op0
, op1
);
468 /* Return something equivalent to X but valid as a memory address
469 for something of mode MODE. When X is not itself valid, this
470 works by copying X or subexpressions of it into registers. */
473 memory_address (mode
, x
)
474 enum machine_mode mode
;
477 register rtx oldx
= x
;
479 if (GET_CODE (x
) == ADDRESSOF
)
482 #ifdef POINTERS_EXTEND_UNSIGNED
483 if (GET_MODE (x
) == ptr_mode
)
484 x
= convert_memory_address (Pmode
, x
);
487 /* By passing constant addresses thru registers
488 we get a chance to cse them. */
489 if (! cse_not_expected
&& CONSTANT_P (x
) && CONSTANT_ADDRESS_P (x
))
490 x
= force_reg (Pmode
, x
);
492 /* Accept a QUEUED that refers to a REG
493 even though that isn't a valid address.
494 On attempting to put this in an insn we will call protect_from_queue
495 which will turn it into a REG, which is valid. */
496 else if (GET_CODE (x
) == QUEUED
497 && GET_CODE (QUEUED_VAR (x
)) == REG
)
500 /* We get better cse by rejecting indirect addressing at this stage.
501 Let the combiner create indirect addresses where appropriate.
502 For now, generate the code so that the subexpressions useful to share
503 are visible. But not if cse won't be done! */
506 if (! cse_not_expected
&& GET_CODE (x
) != REG
)
507 x
= break_out_memory_refs (x
);
509 /* At this point, any valid address is accepted. */
510 GO_IF_LEGITIMATE_ADDRESS (mode
, x
, win
);
512 /* If it was valid before but breaking out memory refs invalidated it,
513 use it the old way. */
514 if (memory_address_p (mode
, oldx
))
517 /* Perform machine-dependent transformations on X
518 in certain cases. This is not necessary since the code
519 below can handle all possible cases, but machine-dependent
520 transformations can make better code. */
521 LEGITIMIZE_ADDRESS (x
, oldx
, mode
, win
);
523 /* PLUS and MULT can appear in special ways
524 as the result of attempts to make an address usable for indexing.
525 Usually they are dealt with by calling force_operand, below.
526 But a sum containing constant terms is special
527 if removing them makes the sum a valid address:
528 then we generate that address in a register
529 and index off of it. We do this because it often makes
530 shorter code, and because the addresses thus generated
531 in registers often become common subexpressions. */
532 if (GET_CODE (x
) == PLUS
)
534 rtx constant_term
= const0_rtx
;
535 rtx y
= eliminate_constant_term (x
, &constant_term
);
536 if (constant_term
== const0_rtx
537 || ! memory_address_p (mode
, y
))
538 x
= force_operand (x
, NULL_RTX
);
541 y
= gen_rtx_PLUS (GET_MODE (x
), copy_to_reg (y
), constant_term
);
542 if (! memory_address_p (mode
, y
))
543 x
= force_operand (x
, NULL_RTX
);
549 else if (GET_CODE (x
) == MULT
|| GET_CODE (x
) == MINUS
)
550 x
= force_operand (x
, NULL_RTX
);
552 /* If we have a register that's an invalid address,
553 it must be a hard reg of the wrong class. Copy it to a pseudo. */
554 else if (GET_CODE (x
) == REG
)
557 /* Last resort: copy the value to a register, since
558 the register is a valid address. */
560 x
= force_reg (Pmode
, x
);
567 if (flag_force_addr
&& ! cse_not_expected
&& GET_CODE (x
) != REG
568 /* Don't copy an addr via a reg if it is one of our stack slots. */
569 && ! (GET_CODE (x
) == PLUS
570 && (XEXP (x
, 0) == virtual_stack_vars_rtx
571 || XEXP (x
, 0) == virtual_incoming_args_rtx
)))
573 if (general_operand (x
, Pmode
))
574 x
= force_reg (Pmode
, x
);
576 x
= force_operand (x
, NULL_RTX
);
582 /* If we didn't change the address, we are done. Otherwise, mark
583 a reg as a pointer if we have REG or REG + CONST_INT. */
586 else if (GET_CODE (x
) == REG
)
587 mark_reg_pointer (x
, BITS_PER_UNIT
);
588 else if (GET_CODE (x
) == PLUS
589 && GET_CODE (XEXP (x
, 0)) == REG
590 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
591 mark_reg_pointer (XEXP (x
, 0), BITS_PER_UNIT
);
593 /* OLDX may have been the address on a temporary. Update the address
594 to indicate that X is now used. */
595 update_temp_slot_address (oldx
, x
);
600 /* Like `memory_address' but pretend `flag_force_addr' is 0. */
603 memory_address_noforce (mode
, x
)
604 enum machine_mode mode
;
607 int ambient_force_addr
= flag_force_addr
;
611 val
= memory_address (mode
, x
);
612 flag_force_addr
= ambient_force_addr
;
616 /* Convert a mem ref into one with a valid memory address.
617 Pass through anything else unchanged. */
623 if (GET_CODE (ref
) != MEM
)
625 if (memory_address_p (GET_MODE (ref
), XEXP (ref
, 0)))
627 /* Don't alter REF itself, since that is probably a stack slot. */
628 return change_address (ref
, GET_MODE (ref
), XEXP (ref
, 0));
631 /* Given REF, either a MEM or a REG, and T, either the type of X or
632 the expression corresponding to REF, set RTX_UNCHANGING_P if
636 maybe_set_unchanging (ref
, t
)
640 /* We can set RTX_UNCHANGING_P from TREE_READONLY for decls whose
641 initialization is only executed once, or whose initializer always
642 has the same value. Currently we simplify this to PARM_DECLs in the
643 first case, and decls with TREE_CONSTANT initializers in the second. */
644 if ((TREE_READONLY (t
) && DECL_P (t
)
645 && (TREE_CODE (t
) == PARM_DECL
646 || DECL_INITIAL (t
) == NULL_TREE
647 || TREE_CONSTANT (DECL_INITIAL (t
))))
648 || TREE_CODE_CLASS (TREE_CODE (t
)) == 'c')
649 RTX_UNCHANGING_P (ref
) = 1;
652 /* Given REF, a MEM, and T, either the type of X or the expression
653 corresponding to REF, set the memory attributes. OBJECTP is nonzero
654 if we are making a new object of this type. */
657 set_mem_attributes (ref
, t
, objectp
)
664 /* It can happen that type_for_mode was given a mode for which there
665 is no language-level type. In which case it returns NULL, which
670 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
672 /* Get the alias set from the expression or type (perhaps using a
673 front-end routine) and then copy bits from the type. */
675 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY (type)
676 here, because, in C and C++, the fact that a location is accessed
677 through a const expression does not mean that the value there can
679 MEM_ALIAS_SET (ref
) = get_alias_set (t
);
680 MEM_VOLATILE_P (ref
) = TYPE_VOLATILE (type
);
681 MEM_IN_STRUCT_P (ref
) = AGGREGATE_TYPE_P (type
);
683 /* If we are making an object of this type, we know that it is a scalar if
684 the type is not an aggregate. */
685 if (objectp
&& ! AGGREGATE_TYPE_P (type
))
686 MEM_SCALAR_P (ref
) = 1;
688 /* If T is a type, this is all we can do. Otherwise, we may be able
689 to deduce some more information about the expression. */
693 maybe_set_unchanging (ref
, t
);
694 if (TREE_THIS_VOLATILE (t
))
695 MEM_VOLATILE_P (ref
) = 1;
697 /* Now see if we can say more about whether it's an aggregate or
698 scalar. If we already know it's an aggregate, don't bother. */
699 if (MEM_IN_STRUCT_P (ref
))
702 /* Now remove any NOPs: they don't change what the underlying object is.
703 Likewise for SAVE_EXPR. */
704 while (TREE_CODE (t
) == NOP_EXPR
|| TREE_CODE (t
) == CONVERT_EXPR
705 || TREE_CODE (t
) == NON_LVALUE_EXPR
|| TREE_CODE (t
) == SAVE_EXPR
)
706 t
= TREE_OPERAND (t
, 0);
708 /* Since we already know the type isn't an aggregate, if this is a decl,
709 it must be a scalar. Or if it is a reference into an aggregate,
710 this is part of an aggregate. Otherwise we don't know. */
712 MEM_SCALAR_P (ref
) = 1;
713 else if (TREE_CODE (t
) == COMPONENT_REF
|| TREE_CODE (t
) == ARRAY_REF
714 || TREE_CODE (t
) == BIT_FIELD_REF
)
715 MEM_IN_STRUCT_P (ref
) = 1;
718 /* Return a modified copy of X with its memory address copied
719 into a temporary register to protect it from side effects.
720 If X is not a MEM, it is returned unchanged (and not copied).
721 Perhaps even if it is a MEM, if there is no need to change it. */
729 if (GET_CODE (x
) != MEM
)
733 if (rtx_unstable_p (addr
))
735 rtx temp
= force_reg (Pmode
, copy_all_regs (addr
));
736 rtx mem
= gen_rtx_MEM (GET_MODE (x
), temp
);
738 MEM_COPY_ATTRIBUTES (mem
, x
);
744 /* Copy the value or contents of X to a new temp reg and return that reg. */
750 register rtx temp
= gen_reg_rtx (GET_MODE (x
));
752 /* If not an operand, must be an address with PLUS and MULT so
753 do the computation. */
754 if (! general_operand (x
, VOIDmode
))
755 x
= force_operand (x
, temp
);
758 emit_move_insn (temp
, x
);
763 /* Like copy_to_reg but always give the new register mode Pmode
764 in case X is a constant. */
770 return copy_to_mode_reg (Pmode
, x
);
773 /* Like copy_to_reg but always give the new register mode MODE
774 in case X is a constant. */
777 copy_to_mode_reg (mode
, x
)
778 enum machine_mode mode
;
781 register rtx temp
= gen_reg_rtx (mode
);
783 /* If not an operand, must be an address with PLUS and MULT so
784 do the computation. */
785 if (! general_operand (x
, VOIDmode
))
786 x
= force_operand (x
, temp
);
788 if (GET_MODE (x
) != mode
&& GET_MODE (x
) != VOIDmode
)
791 emit_move_insn (temp
, x
);
795 /* Load X into a register if it is not already one.
796 Use mode MODE for the register.
797 X should be valid for mode MODE, but it may be a constant which
798 is valid for all integer modes; that's why caller must specify MODE.
800 The caller must not alter the value in the register we return,
801 since we mark it as a "constant" register. */
805 enum machine_mode mode
;
808 register rtx temp
, insn
, set
;
810 if (GET_CODE (x
) == REG
)
813 temp
= gen_reg_rtx (mode
);
815 if (! general_operand (x
, mode
))
816 x
= force_operand (x
, NULL_RTX
);
818 insn
= emit_move_insn (temp
, x
);
820 /* Let optimizers know that TEMP's value never changes
821 and that X can be substituted for it. Don't get confused
822 if INSN set something else (such as a SUBREG of TEMP). */
824 && (set
= single_set (insn
)) != 0
825 && SET_DEST (set
) == temp
)
827 rtx note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
832 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_EQUAL
, x
, REG_NOTES (insn
));
837 /* If X is a memory ref, copy its contents to a new temp reg and return
838 that reg. Otherwise, return X. */
846 if (GET_CODE (x
) != MEM
|| GET_MODE (x
) == BLKmode
)
849 temp
= gen_reg_rtx (GET_MODE (x
));
850 emit_move_insn (temp
, x
);
854 /* Copy X to TARGET (if it's nonzero and a reg)
855 or to a new temp reg and return that reg.
856 MODE is the mode to use for X in case it is a constant. */
859 copy_to_suggested_reg (x
, target
, mode
)
861 enum machine_mode mode
;
865 if (target
&& GET_CODE (target
) == REG
)
868 temp
= gen_reg_rtx (mode
);
870 emit_move_insn (temp
, x
);
874 /* Return the mode to use to store a scalar of TYPE and MODE.
875 PUNSIGNEDP points to the signedness of the type and may be adjusted
876 to show what signedness to use on extension operations.
878 FOR_CALL is non-zero if this call is promoting args for a call. */
881 promote_mode (type
, mode
, punsignedp
, for_call
)
883 enum machine_mode mode
;
885 int for_call ATTRIBUTE_UNUSED
;
887 enum tree_code code
= TREE_CODE (type
);
888 int unsignedp
= *punsignedp
;
890 #ifdef PROMOTE_FOR_CALL_ONLY
898 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
899 case CHAR_TYPE
: case REAL_TYPE
: case OFFSET_TYPE
:
900 PROMOTE_MODE (mode
, unsignedp
, type
);
904 #ifdef POINTERS_EXTEND_UNSIGNED
908 unsignedp
= POINTERS_EXTEND_UNSIGNED
;
916 *punsignedp
= unsignedp
;
920 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
921 This pops when ADJUST is positive. ADJUST need not be constant. */
924 adjust_stack (adjust
)
928 adjust
= protect_from_queue (adjust
, 0);
930 if (adjust
== const0_rtx
)
933 /* We expect all variable sized adjustments to be multiple of
934 PREFERRED_STACK_BOUNDARY. */
935 if (GET_CODE (adjust
) == CONST_INT
)
936 stack_pointer_delta
-= INTVAL (adjust
);
938 temp
= expand_binop (Pmode
,
939 #ifdef STACK_GROWS_DOWNWARD
944 stack_pointer_rtx
, adjust
, stack_pointer_rtx
, 0,
947 if (temp
!= stack_pointer_rtx
)
948 emit_move_insn (stack_pointer_rtx
, temp
);
951 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
952 This pushes when ADJUST is positive. ADJUST need not be constant. */
955 anti_adjust_stack (adjust
)
959 adjust
= protect_from_queue (adjust
, 0);
961 if (adjust
== const0_rtx
)
964 /* We expect all variable sized adjustments to be multiple of
965 PREFERRED_STACK_BOUNDARY. */
966 if (GET_CODE (adjust
) == CONST_INT
)
967 stack_pointer_delta
+= INTVAL (adjust
);
969 temp
= expand_binop (Pmode
,
970 #ifdef STACK_GROWS_DOWNWARD
975 stack_pointer_rtx
, adjust
, stack_pointer_rtx
, 0,
978 if (temp
!= stack_pointer_rtx
)
979 emit_move_insn (stack_pointer_rtx
, temp
);
982 /* Round the size of a block to be pushed up to the boundary required
983 by this machine. SIZE is the desired size, which need not be constant. */
989 #ifdef PREFERRED_STACK_BOUNDARY
990 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
993 if (GET_CODE (size
) == CONST_INT
)
995 int new = (INTVAL (size
) + align
- 1) / align
* align
;
996 if (INTVAL (size
) != new)
997 size
= GEN_INT (new);
1001 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1002 but we know it can't. So add ourselves and then do
1004 size
= expand_binop (Pmode
, add_optab
, size
, GEN_INT (align
- 1),
1005 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1006 size
= expand_divmod (0, TRUNC_DIV_EXPR
, Pmode
, size
, GEN_INT (align
),
1008 size
= expand_mult (Pmode
, size
, GEN_INT (align
), NULL_RTX
, 1);
1010 #endif /* PREFERRED_STACK_BOUNDARY */
1014 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1015 to a previously-created save area. If no save area has been allocated,
1016 this function will allocate one. If a save area is specified, it
1017 must be of the proper mode.
1019 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
1020 are emitted at the current position. */
1023 emit_stack_save (save_level
, psave
, after
)
1024 enum save_level save_level
;
1029 /* The default is that we use a move insn and save in a Pmode object. */
1030 rtx (*fcn
) PARAMS ((rtx
, rtx
)) = gen_move_insn
;
1031 enum machine_mode mode
= STACK_SAVEAREA_MODE (save_level
);
1033 /* See if this machine has anything special to do for this kind of save. */
1036 #ifdef HAVE_save_stack_block
1038 if (HAVE_save_stack_block
)
1039 fcn
= gen_save_stack_block
;
1042 #ifdef HAVE_save_stack_function
1044 if (HAVE_save_stack_function
)
1045 fcn
= gen_save_stack_function
;
1048 #ifdef HAVE_save_stack_nonlocal
1050 if (HAVE_save_stack_nonlocal
)
1051 fcn
= gen_save_stack_nonlocal
;
1058 /* If there is no save area and we have to allocate one, do so. Otherwise
1059 verify the save area is the proper mode. */
1063 if (mode
!= VOIDmode
)
1065 if (save_level
== SAVE_NONLOCAL
)
1066 *psave
= sa
= assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
1068 *psave
= sa
= gen_reg_rtx (mode
);
1073 if (mode
== VOIDmode
|| GET_MODE (sa
) != mode
)
1082 /* We must validize inside the sequence, to ensure that any instructions
1083 created by the validize call also get moved to the right place. */
1085 sa
= validize_mem (sa
);
1086 emit_insn (fcn (sa
, stack_pointer_rtx
));
1087 seq
= gen_sequence ();
1089 emit_insn_after (seq
, after
);
1094 sa
= validize_mem (sa
);
1095 emit_insn (fcn (sa
, stack_pointer_rtx
));
1099 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1100 area made by emit_stack_save. If it is zero, we have nothing to do.
1102 Put any emitted insns after insn AFTER, if nonzero, otherwise at
1103 current position. */
1106 emit_stack_restore (save_level
, sa
, after
)
1107 enum save_level save_level
;
1111 /* The default is that we use a move insn. */
1112 rtx (*fcn
) PARAMS ((rtx
, rtx
)) = gen_move_insn
;
1114 /* See if this machine has anything special to do for this kind of save. */
1117 #ifdef HAVE_restore_stack_block
1119 if (HAVE_restore_stack_block
)
1120 fcn
= gen_restore_stack_block
;
1123 #ifdef HAVE_restore_stack_function
1125 if (HAVE_restore_stack_function
)
1126 fcn
= gen_restore_stack_function
;
1129 #ifdef HAVE_restore_stack_nonlocal
1131 if (HAVE_restore_stack_nonlocal
)
1132 fcn
= gen_restore_stack_nonlocal
;
1140 sa
= validize_mem (sa
);
1147 emit_insn (fcn (stack_pointer_rtx
, sa
));
1148 seq
= gen_sequence ();
1150 emit_insn_after (seq
, after
);
1153 emit_insn (fcn (stack_pointer_rtx
, sa
));
1156 #ifdef SETJMP_VIA_SAVE_AREA
1157 /* Optimize RTL generated by allocate_dynamic_stack_space for targets
1158 where SETJMP_VIA_SAVE_AREA is true. The problem is that on these
1159 platforms, the dynamic stack space used can corrupt the original
1160 frame, thus causing a crash if a longjmp unwinds to it. */
1163 optimize_save_area_alloca (insns
)
1168 for (insn
= insns
; insn
; insn
= NEXT_INSN(insn
))
1172 if (GET_CODE (insn
) != INSN
)
1175 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1177 if (REG_NOTE_KIND (note
) != REG_SAVE_AREA
)
1180 if (!current_function_calls_setjmp
)
1182 rtx pat
= PATTERN (insn
);
1184 /* If we do not see the note in a pattern matching
1185 these precise characteristics, we did something
1186 entirely wrong in allocate_dynamic_stack_space.
1188 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1189 was defined on a machine where stacks grow towards higher
1192 Right now only supported port with stack that grow upward
1193 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */
1194 if (GET_CODE (pat
) != SET
1195 || SET_DEST (pat
) != stack_pointer_rtx
1196 || GET_CODE (SET_SRC (pat
)) != MINUS
1197 || XEXP (SET_SRC (pat
), 0) != stack_pointer_rtx
)
1200 /* This will now be transformed into a (set REG REG)
1201 so we can just blow away all the other notes. */
1202 XEXP (SET_SRC (pat
), 1) = XEXP (note
, 0);
1203 REG_NOTES (insn
) = NULL_RTX
;
1207 /* setjmp was called, we must remove the REG_SAVE_AREA
1208 note so that later passes do not get confused by its
1210 if (note
== REG_NOTES (insn
))
1212 REG_NOTES (insn
) = XEXP (note
, 1);
1218 for (srch
= REG_NOTES (insn
); srch
; srch
= XEXP (srch
, 1))
1219 if (XEXP (srch
, 1) == note
)
1222 if (srch
== NULL_RTX
)
1225 XEXP (srch
, 1) = XEXP (note
, 1);
1228 /* Once we've seen the note of interest, we need not look at
1229 the rest of them. */
1234 #endif /* SETJMP_VIA_SAVE_AREA */
1236 /* Return an rtx representing the address of an area of memory dynamically
1237 pushed on the stack. This region of memory is always aligned to
1238 a multiple of BIGGEST_ALIGNMENT.
1240 Any required stack pointer alignment is preserved.
1242 SIZE is an rtx representing the size of the area.
1243 TARGET is a place in which the address can be placed.
1245 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
1248 allocate_dynamic_stack_space (size
, target
, known_align
)
1253 #ifdef SETJMP_VIA_SAVE_AREA
1254 rtx setjmpless_size
= NULL_RTX
;
1257 /* If we're asking for zero bytes, it doesn't matter what we point
1258 to since we can't dereference it. But return a reasonable
1260 if (size
== const0_rtx
)
1261 return virtual_stack_dynamic_rtx
;
1263 /* Otherwise, show we're calling alloca or equivalent. */
1264 current_function_calls_alloca
= 1;
1266 /* Ensure the size is in the proper mode. */
1267 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1268 size
= convert_to_mode (Pmode
, size
, 1);
1270 /* We can't attempt to minimize alignment necessary, because we don't
1271 know the final value of preferred_stack_boundary yet while executing
1273 #ifdef PREFERRED_STACK_BOUNDARY
1274 cfun
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
1277 /* We will need to ensure that the address we return is aligned to
1278 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1279 always know its final value at this point in the compilation (it
1280 might depend on the size of the outgoing parameter lists, for
1281 example), so we must align the value to be returned in that case.
1282 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1283 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1284 We must also do an alignment operation on the returned value if
1285 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1287 If we have to align, we must leave space in SIZE for the hole
1288 that might result from the alignment operation. */
1290 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (PREFERRED_STACK_BOUNDARY)
1291 #define MUST_ALIGN 1
1293 #define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1298 if (GET_CODE (size
) == CONST_INT
)
1299 size
= GEN_INT (INTVAL (size
)
1300 + (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
- 1));
1302 size
= expand_binop (Pmode
, add_optab
, size
,
1303 GEN_INT (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
- 1),
1304 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1307 #ifdef SETJMP_VIA_SAVE_AREA
1308 /* If setjmp restores regs from a save area in the stack frame,
1309 avoid clobbering the reg save area. Note that the offset of
1310 virtual_incoming_args_rtx includes the preallocated stack args space.
1311 It would be no problem to clobber that, but it's on the wrong side
1312 of the old save area. */
1315 = expand_binop (Pmode
, sub_optab
, virtual_stack_dynamic_rtx
,
1316 stack_pointer_rtx
, NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1318 if (!current_function_calls_setjmp
)
1320 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1322 /* See optimize_save_area_alloca to understand what is being
1325 #if !defined(PREFERRED_STACK_BOUNDARY) || !defined(MUST_ALIGN) || (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
1326 /* If anyone creates a target with these characteristics, let them
1327 know that our optimization cannot work correctly in such a case. */
1331 if (GET_CODE (size
) == CONST_INT
)
1333 int new = INTVAL (size
) / align
* align
;
1335 if (INTVAL (size
) != new)
1336 setjmpless_size
= GEN_INT (new);
1338 setjmpless_size
= size
;
1342 /* Since we know overflow is not possible, we avoid using
1343 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1344 setjmpless_size
= expand_divmod (0, TRUNC_DIV_EXPR
, Pmode
, size
,
1345 GEN_INT (align
), NULL_RTX
, 1);
1346 setjmpless_size
= expand_mult (Pmode
, setjmpless_size
,
1347 GEN_INT (align
), NULL_RTX
, 1);
1349 /* Our optimization works based upon being able to perform a simple
1350 transformation of this RTL into a (set REG REG) so make sure things
1351 did in fact end up in a REG. */
1352 if (!register_operand (setjmpless_size
, Pmode
))
1353 setjmpless_size
= force_reg (Pmode
, setjmpless_size
);
1356 size
= expand_binop (Pmode
, add_optab
, size
, dynamic_offset
,
1357 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1359 #endif /* SETJMP_VIA_SAVE_AREA */
1361 /* Round the size to a multiple of the required stack alignment.
1362 Since the stack if presumed to be rounded before this allocation,
1363 this will maintain the required alignment.
1365 If the stack grows downward, we could save an insn by subtracting
1366 SIZE from the stack pointer and then aligning the stack pointer.
1367 The problem with this is that the stack pointer may be unaligned
1368 between the execution of the subtraction and alignment insns and
1369 some machines do not allow this. Even on those that do, some
1370 signal handlers malfunction if a signal should occur between those
1371 insns. Since this is an extremely rare event, we have no reliable
1372 way of knowing which systems have this problem. So we avoid even
1373 momentarily mis-aligning the stack. */
1375 #ifdef PREFERRED_STACK_BOUNDARY
1376 /* If we added a variable amount to SIZE,
1377 we can no longer assume it is aligned. */
1378 #if !defined (SETJMP_VIA_SAVE_AREA)
1379 if (MUST_ALIGN
|| known_align
% PREFERRED_STACK_BOUNDARY
!= 0)
1381 size
= round_push (size
);
1384 do_pending_stack_adjust ();
1386 /* We ought to be called always on the toplevel and stack ought to be aligned
1388 #ifdef PREFERRED_STACK_BOUNDARY
1389 if (stack_pointer_delta
% (PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
))
1393 /* If needed, check that we have the required amount of stack. Take into
1394 account what has already been checked. */
1395 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
1396 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE
+ STACK_CHECK_PROTECT
, size
);
1398 /* Don't use a TARGET that isn't a pseudo. */
1399 if (target
== 0 || GET_CODE (target
) != REG
1400 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
1401 target
= gen_reg_rtx (Pmode
);
1403 mark_reg_pointer (target
, known_align
);
1405 /* Perform the required allocation from the stack. Some systems do
1406 this differently than simply incrementing/decrementing from the
1407 stack pointer, such as acquiring the space by calling malloc(). */
1408 #ifdef HAVE_allocate_stack
1409 if (HAVE_allocate_stack
)
1411 enum machine_mode mode
= STACK_SIZE_MODE
;
1412 insn_operand_predicate_fn pred
;
1414 pred
= insn_data
[(int) CODE_FOR_allocate_stack
].operand
[0].predicate
;
1415 if (pred
&& ! ((*pred
) (target
, Pmode
)))
1416 #ifdef POINTERS_EXTEND_UNSIGNED
1417 target
= convert_memory_address (Pmode
, target
);
1419 target
= copy_to_mode_reg (Pmode
, target
);
1422 if (mode
== VOIDmode
)
1425 size
= convert_modes (mode
, ptr_mode
, size
, 1);
1426 pred
= insn_data
[(int) CODE_FOR_allocate_stack
].operand
[1].predicate
;
1427 if (pred
&& ! ((*pred
) (size
, mode
)))
1428 size
= copy_to_mode_reg (mode
, size
);
1430 emit_insn (gen_allocate_stack (target
, size
));
1435 #ifndef STACK_GROWS_DOWNWARD
1436 emit_move_insn (target
, virtual_stack_dynamic_rtx
);
1438 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
1440 /* Check stack bounds if necessary. */
1441 if (current_function_limit_stack
)
1444 rtx space_available
= gen_label_rtx ();
1445 #ifdef STACK_GROWS_DOWNWARD
1446 available
= expand_binop (Pmode
, sub_optab
,
1447 stack_pointer_rtx
, stack_limit_rtx
,
1448 NULL_RTX
, 1, OPTAB_WIDEN
);
1450 available
= expand_binop (Pmode
, sub_optab
,
1451 stack_limit_rtx
, stack_pointer_rtx
,
1452 NULL_RTX
, 1, OPTAB_WIDEN
);
1454 emit_cmp_and_jump_insns (available
, size
, GEU
, NULL_RTX
, Pmode
, 1,
1455 0, space_available
);
1458 emit_insn (gen_trap ());
1461 error ("stack limits not supported on this target");
1463 emit_label (space_available
);
1466 anti_adjust_stack (size
);
1467 #ifdef SETJMP_VIA_SAVE_AREA
1468 if (setjmpless_size
!= NULL_RTX
)
1470 rtx note_target
= get_last_insn ();
1472 REG_NOTES (note_target
)
1473 = gen_rtx_EXPR_LIST (REG_SAVE_AREA
, setjmpless_size
,
1474 REG_NOTES (note_target
));
1476 #endif /* SETJMP_VIA_SAVE_AREA */
1477 #ifdef STACK_GROWS_DOWNWARD
1478 emit_move_insn (target
, virtual_stack_dynamic_rtx
);
1484 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1485 but we know it can't. So add ourselves and then do
1487 target
= expand_binop (Pmode
, add_optab
, target
,
1488 GEN_INT (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
- 1),
1489 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1490 target
= expand_divmod (0, TRUNC_DIV_EXPR
, Pmode
, target
,
1491 GEN_INT (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
),
1493 target
= expand_mult (Pmode
, target
,
1494 GEN_INT (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
),
1498 /* Some systems require a particular insn to refer to the stack
1499 to make the pages exist. */
1502 emit_insn (gen_probe ());
1505 /* Record the new stack level for nonlocal gotos. */
1506 if (nonlocal_goto_handler_slots
!= 0)
1507 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
, NULL_RTX
);
1512 /* A front end may want to override GCC's stack checking by providing a
1513 run-time routine to call to check the stack, so provide a mechanism for
1514 calling that routine. */
1516 static rtx stack_check_libfunc
;
1519 set_stack_check_libfunc (libfunc
)
1522 stack_check_libfunc
= libfunc
;
1525 /* Emit one stack probe at ADDRESS, an address within the stack. */
1528 emit_stack_probe (address
)
1531 rtx memref
= gen_rtx_MEM (word_mode
, address
);
1533 MEM_VOLATILE_P (memref
) = 1;
1535 if (STACK_CHECK_PROBE_LOAD
)
1536 emit_move_insn (gen_reg_rtx (word_mode
), memref
);
1538 emit_move_insn (memref
, const0_rtx
);
1541 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1542 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1543 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1544 subtract from the stack. If SIZE is constant, this is done
1545 with a fixed number of probes. Otherwise, we must make a loop. */
1547 #ifdef STACK_GROWS_DOWNWARD
1548 #define STACK_GROW_OP MINUS
1550 #define STACK_GROW_OP PLUS
1554 probe_stack_range (first
, size
)
1555 HOST_WIDE_INT first
;
1558 /* First see if the front end has set up a function for us to call to
1560 if (stack_check_libfunc
!= 0)
1561 emit_library_call (stack_check_libfunc
, 0, VOIDmode
, 1,
1562 memory_address (QImode
,
1563 gen_rtx (STACK_GROW_OP
, Pmode
,
1565 plus_constant (size
, first
))),
1568 /* Next see if we have an insn to check the stack. Use it if so. */
1569 #ifdef HAVE_check_stack
1570 else if (HAVE_check_stack
)
1572 insn_operand_predicate_fn pred
;
1574 = force_operand (gen_rtx_STACK_GROW_OP (Pmode
,
1576 plus_constant (size
, first
)),
1579 pred
= insn_data
[(int) CODE_FOR_check_stack
].operand
[0].predicate
;
1580 if (pred
&& ! ((*pred
) (last_addr
, Pmode
)))
1581 last_addr
= copy_to_mode_reg (Pmode
, last_addr
);
1583 emit_insn (gen_check_stack (last_addr
));
1587 /* If we have to generate explicit probes, see if we have a constant
1588 small number of them to generate. If so, that's the easy case. */
1589 else if (GET_CODE (size
) == CONST_INT
1590 && INTVAL (size
) < 10 * STACK_CHECK_PROBE_INTERVAL
)
1592 HOST_WIDE_INT offset
;
1594 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1595 for values of N from 1 until it exceeds LAST. If only one
1596 probe is needed, this will not generate any code. Then probe
1598 for (offset
= first
+ STACK_CHECK_PROBE_INTERVAL
;
1599 offset
< INTVAL (size
);
1600 offset
= offset
+ STACK_CHECK_PROBE_INTERVAL
)
1601 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1605 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1607 plus_constant (size
, first
)));
1610 /* In the variable case, do the same as above, but in a loop. We emit loop
1611 notes so that loop optimization can be done. */
1615 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1617 GEN_INT (first
+ STACK_CHECK_PROBE_INTERVAL
)),
1620 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1622 plus_constant (size
, first
)),
1624 rtx incr
= GEN_INT (STACK_CHECK_PROBE_INTERVAL
);
1625 rtx loop_lab
= gen_label_rtx ();
1626 rtx test_lab
= gen_label_rtx ();
1627 rtx end_lab
= gen_label_rtx ();
1630 if (GET_CODE (test_addr
) != REG
1631 || REGNO (test_addr
) < FIRST_PSEUDO_REGISTER
)
1632 test_addr
= force_reg (Pmode
, test_addr
);
1634 emit_note (NULL_PTR
, NOTE_INSN_LOOP_BEG
);
1635 emit_jump (test_lab
);
1637 emit_label (loop_lab
);
1638 emit_stack_probe (test_addr
);
1640 emit_note (NULL_PTR
, NOTE_INSN_LOOP_CONT
);
1642 #ifdef STACK_GROWS_DOWNWARD
1643 #define CMP_OPCODE GTU
1644 temp
= expand_binop (Pmode
, sub_optab
, test_addr
, incr
, test_addr
,
1647 #define CMP_OPCODE LTU
1648 temp
= expand_binop (Pmode
, add_optab
, test_addr
, incr
, test_addr
,
1652 if (temp
!= test_addr
)
1655 emit_label (test_lab
);
1656 emit_cmp_and_jump_insns (test_addr
, last_addr
, CMP_OPCODE
,
1657 NULL_RTX
, Pmode
, 1, 0, loop_lab
);
1658 emit_jump (end_lab
);
1659 emit_note (NULL_PTR
, NOTE_INSN_LOOP_END
);
1660 emit_label (end_lab
);
1662 emit_stack_probe (last_addr
);
1666 /* Return an rtx representing the register or memory location
1667 in which a scalar value of data type VALTYPE
1668 was returned by a function call to function FUNC.
1669 FUNC is a FUNCTION_DECL node if the precise function is known,
1671 OUTGOING is 1 if on a machine with register windows this function
1672 should return the register in which the function will put its result
1676 hard_function_value (valtype
, func
, outgoing
)
1678 tree func ATTRIBUTE_UNUSED
;
1679 int outgoing ATTRIBUTE_UNUSED
;
1683 #ifdef FUNCTION_OUTGOING_VALUE
1685 val
= FUNCTION_OUTGOING_VALUE (valtype
, func
);
1688 val
= FUNCTION_VALUE (valtype
, func
);
1690 if (GET_CODE (val
) == REG
1691 && GET_MODE (val
) == BLKmode
)
1693 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (valtype
);
1694 enum machine_mode tmpmode
;
1696 for (tmpmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1697 tmpmode
!= VOIDmode
;
1698 tmpmode
= GET_MODE_WIDER_MODE (tmpmode
))
1700 /* Have we found a large enough mode? */
1701 if (GET_MODE_SIZE (tmpmode
) >= bytes
)
1705 /* No suitable mode found. */
1706 if (tmpmode
== VOIDmode
)
1709 PUT_MODE (val
, tmpmode
);
1714 /* Return an rtx representing the register or memory location
1715 in which a scalar value of mode MODE was returned by a library call. */
1718 hard_libcall_value (mode
)
1719 enum machine_mode mode
;
1721 return LIBCALL_VALUE (mode
);
1724 /* Look up the tree code for a given rtx code
1725 to provide the arithmetic operation for REAL_ARITHMETIC.
1726 The function returns an int because the caller may not know
1727 what `enum tree_code' means. */
1730 rtx_to_tree_code (code
)
1733 enum tree_code tcode
;
1756 tcode
= LAST_AND_UNUSED_TREE_CODE
;
1759 return ((int) tcode
);