new (placement delete): Remove unused paramater names.
[official-gcc.git] / gcc / explow.c
blob3cda410467232e6bd142c211b83ba17dfe353f41
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "expr.h"
32 #include "optabs.h"
33 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "ggc.h"
36 #include "recog.h"
38 static rtx break_out_memory_refs PARAMS ((rtx));
39 static void emit_stack_probe PARAMS ((rtx));
42 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
44 HOST_WIDE_INT
45 trunc_int_for_mode (c, mode)
46 HOST_WIDE_INT c;
47 enum machine_mode mode;
49 int width = GET_MODE_BITSIZE (mode);
51 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
52 if (mode == BImode)
53 return c & 1 ? STORE_FLAG_VALUE : 0;
55 /* Sign-extend for the requested mode. */
57 if (width < HOST_BITS_PER_WIDE_INT)
59 HOST_WIDE_INT sign = 1;
60 sign <<= width - 1;
61 c &= (sign << 1) - 1;
62 c ^= sign;
63 c -= sign;
66 return c;
69 /* Return an rtx for the sum of X and the integer C.
71 This function should be used via the `plus_constant' macro. */
73 rtx
74 plus_constant_wide (x, c)
75 rtx x;
76 HOST_WIDE_INT c;
78 RTX_CODE code;
79 rtx y;
80 enum machine_mode mode;
81 rtx tem;
82 int all_constant = 0;
84 if (c == 0)
85 return x;
87 restart:
89 code = GET_CODE (x);
90 mode = GET_MODE (x);
91 y = x;
93 switch (code)
95 case CONST_INT:
96 return GEN_INT (INTVAL (x) + c);
98 case CONST_DOUBLE:
100 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
101 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
102 unsigned HOST_WIDE_INT l2 = c;
103 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
104 unsigned HOST_WIDE_INT lv;
105 HOST_WIDE_INT hv;
107 add_double (l1, h1, l2, h2, &lv, &hv);
109 return immed_double_const (lv, hv, VOIDmode);
112 case MEM:
113 /* If this is a reference to the constant pool, try replacing it with
114 a reference to a new constant. If the resulting address isn't
115 valid, don't return it because we have no way to validize it. */
116 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
117 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
120 = force_const_mem (GET_MODE (x),
121 plus_constant (get_pool_constant (XEXP (x, 0)),
122 c));
123 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
124 return tem;
126 break;
128 case CONST:
129 /* If adding to something entirely constant, set a flag
130 so that we can add a CONST around the result. */
131 x = XEXP (x, 0);
132 all_constant = 1;
133 goto restart;
135 case SYMBOL_REF:
136 case LABEL_REF:
137 all_constant = 1;
138 break;
140 case PLUS:
141 /* The interesting case is adding the integer to a sum.
142 Look for constant term in the sum and combine
143 with C. For an integer constant term, we make a combined
144 integer. For a constant term that is not an explicit integer,
145 we cannot really combine, but group them together anyway.
147 Restart or use a recursive call in case the remaining operand is
148 something that we handle specially, such as a SYMBOL_REF.
150 We may not immediately return from the recursive call here, lest
151 all_constant gets lost. */
153 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
155 c += INTVAL (XEXP (x, 1));
157 if (GET_MODE (x) != VOIDmode)
158 c = trunc_int_for_mode (c, GET_MODE (x));
160 x = XEXP (x, 0);
161 goto restart;
163 else if (CONSTANT_P (XEXP (x, 1)))
165 x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
166 c = 0;
168 else if (find_constant_term_loc (&y))
170 /* We need to be careful since X may be shared and we can't
171 modify it in place. */
172 rtx copy = copy_rtx (x);
173 rtx *const_loc = find_constant_term_loc (&copy);
175 *const_loc = plus_constant (*const_loc, c);
176 x = copy;
177 c = 0;
179 break;
181 default:
182 break;
185 if (c != 0)
186 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
188 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
189 return x;
190 else if (all_constant)
191 return gen_rtx_CONST (mode, x);
192 else
193 return x;
196 /* If X is a sum, return a new sum like X but lacking any constant terms.
197 Add all the removed constant terms into *CONSTPTR.
198 X itself is not altered. The result != X if and only if
199 it is not isomorphic to X. */
202 eliminate_constant_term (x, constptr)
203 rtx x;
204 rtx *constptr;
206 rtx x0, x1;
207 rtx tem;
209 if (GET_CODE (x) != PLUS)
210 return x;
212 /* First handle constants appearing at this level explicitly. */
213 if (GET_CODE (XEXP (x, 1)) == CONST_INT
214 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
215 XEXP (x, 1)))
216 && GET_CODE (tem) == CONST_INT)
218 *constptr = tem;
219 return eliminate_constant_term (XEXP (x, 0), constptr);
222 tem = const0_rtx;
223 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
224 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
225 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
226 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
227 *constptr, tem))
228 && GET_CODE (tem) == CONST_INT)
230 *constptr = tem;
231 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
234 return x;
237 /* Returns the insn that next references REG after INSN, or 0
238 if REG is clobbered before next referenced or we cannot find
239 an insn that references REG in a straight-line piece of code. */
242 find_next_ref (reg, insn)
243 rtx reg;
244 rtx insn;
246 rtx next;
248 for (insn = NEXT_INSN (insn); insn; insn = next)
250 next = NEXT_INSN (insn);
251 if (GET_CODE (insn) == NOTE)
252 continue;
253 if (GET_CODE (insn) == CODE_LABEL
254 || GET_CODE (insn) == BARRIER)
255 return 0;
256 if (GET_CODE (insn) == INSN
257 || GET_CODE (insn) == JUMP_INSN
258 || GET_CODE (insn) == CALL_INSN)
260 if (reg_set_p (reg, insn))
261 return 0;
262 if (reg_mentioned_p (reg, PATTERN (insn)))
263 return insn;
264 if (GET_CODE (insn) == JUMP_INSN)
266 if (any_uncondjump_p (insn))
267 next = JUMP_LABEL (insn);
268 else
269 return 0;
271 if (GET_CODE (insn) == CALL_INSN
272 && REGNO (reg) < FIRST_PSEUDO_REGISTER
273 && call_used_regs[REGNO (reg)])
274 return 0;
276 else
277 abort ();
279 return 0;
282 /* Return an rtx for the size in bytes of the value of EXP. */
285 expr_size (exp)
286 tree exp;
288 tree size;
290 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
291 && DECL_SIZE_UNIT (exp) != 0)
292 size = DECL_SIZE_UNIT (exp);
293 else
294 size = size_in_bytes (TREE_TYPE (exp));
296 if (TREE_CODE (size) != INTEGER_CST
297 && contains_placeholder_p (size))
298 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
300 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), 0);
304 /* Return a copy of X in which all memory references
305 and all constants that involve symbol refs
306 have been replaced with new temporary registers.
307 Also emit code to load the memory locations and constants
308 into those registers.
310 If X contains no such constants or memory references,
311 X itself (not a copy) is returned.
313 If a constant is found in the address that is not a legitimate constant
314 in an insn, it is left alone in the hope that it might be valid in the
315 address.
317 X may contain no arithmetic except addition, subtraction and multiplication.
318 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
320 static rtx
321 break_out_memory_refs (x)
322 rtx x;
324 if (GET_CODE (x) == MEM
325 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
326 && GET_MODE (x) != VOIDmode))
327 x = force_reg (GET_MODE (x), x);
328 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
329 || GET_CODE (x) == MULT)
331 rtx op0 = break_out_memory_refs (XEXP (x, 0));
332 rtx op1 = break_out_memory_refs (XEXP (x, 1));
334 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
335 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
338 return x;
341 #ifdef POINTERS_EXTEND_UNSIGNED
343 /* Given X, a memory address in ptr_mode, convert it to an address
344 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
345 the fact that pointers are not allowed to overflow by commuting arithmetic
346 operations over conversions so that address arithmetic insns can be
347 used. */
350 convert_memory_address (to_mode, x)
351 enum machine_mode to_mode;
352 rtx x;
354 enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
355 rtx temp;
356 enum rtx_code code;
358 /* Here we handle some special cases. If none of them apply, fall through
359 to the default case. */
360 switch (GET_CODE (x))
362 case CONST_INT:
363 case CONST_DOUBLE:
364 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
365 code = TRUNCATE;
366 else if (POINTERS_EXTEND_UNSIGNED < 0)
367 break;
368 else if (POINTERS_EXTEND_UNSIGNED > 0)
369 code = ZERO_EXTEND;
370 else
371 code = SIGN_EXTEND;
372 temp = simplify_unary_operation (code, to_mode, x, from_mode);
373 if (temp)
374 return temp;
375 break;
377 case SUBREG:
378 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
379 && GET_MODE (SUBREG_REG (x)) == to_mode)
380 return SUBREG_REG (x);
381 break;
383 case LABEL_REF:
384 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
385 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
386 return temp;
387 break;
389 case SYMBOL_REF:
390 temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
391 SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
392 CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
393 STRING_POOL_ADDRESS_P (temp) = STRING_POOL_ADDRESS_P (x);
394 return temp;
395 break;
397 case CONST:
398 return gen_rtx_CONST (to_mode,
399 convert_memory_address (to_mode, XEXP (x, 0)));
400 break;
402 case PLUS:
403 case MULT:
404 /* For addition we can safely permute the conversion and addition
405 operation if one operand is a constant and converting the constant
406 does not change it. We can always safely permute them if we are
407 making the address narrower. */
408 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
409 || (GET_CODE (x) == PLUS
410 && GET_CODE (XEXP (x, 1)) == CONST_INT
411 && XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))))
412 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
413 convert_memory_address (to_mode, XEXP (x, 0)),
414 XEXP (x, 1));
415 break;
417 default:
418 break;
421 return convert_modes (to_mode, from_mode,
422 x, POINTERS_EXTEND_UNSIGNED);
424 #endif
426 /* Given a memory address or facsimile X, construct a new address,
427 currently equivalent, that is stable: future stores won't change it.
429 X must be composed of constants, register and memory references
430 combined with addition, subtraction and multiplication:
431 in other words, just what you can get from expand_expr if sum_ok is 1.
433 Works by making copies of all regs and memory locations used
434 by X and combining them the same way X does.
435 You could also stabilize the reference to this address
436 by copying the address to a register with copy_to_reg;
437 but then you wouldn't get indexed addressing in the reference. */
440 copy_all_regs (x)
441 rtx x;
443 if (GET_CODE (x) == REG)
445 if (REGNO (x) != FRAME_POINTER_REGNUM
446 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
447 && REGNO (x) != HARD_FRAME_POINTER_REGNUM
448 #endif
450 x = copy_to_reg (x);
452 else if (GET_CODE (x) == MEM)
453 x = copy_to_reg (x);
454 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
455 || GET_CODE (x) == MULT)
457 rtx op0 = copy_all_regs (XEXP (x, 0));
458 rtx op1 = copy_all_regs (XEXP (x, 1));
459 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
460 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
462 return x;
465 /* Return something equivalent to X but valid as a memory address
466 for something of mode MODE. When X is not itself valid, this
467 works by copying X or subexpressions of it into registers. */
470 memory_address (mode, x)
471 enum machine_mode mode;
472 rtx x;
474 rtx oldx = x;
476 if (GET_CODE (x) == ADDRESSOF)
477 return x;
479 #ifdef POINTERS_EXTEND_UNSIGNED
480 if (GET_MODE (x) != Pmode)
481 x = convert_memory_address (Pmode, x);
482 #endif
484 /* By passing constant addresses thru registers
485 we get a chance to cse them. */
486 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
487 x = force_reg (Pmode, x);
489 /* Accept a QUEUED that refers to a REG
490 even though that isn't a valid address.
491 On attempting to put this in an insn we will call protect_from_queue
492 which will turn it into a REG, which is valid. */
493 else if (GET_CODE (x) == QUEUED
494 && GET_CODE (QUEUED_VAR (x)) == REG)
497 /* We get better cse by rejecting indirect addressing at this stage.
498 Let the combiner create indirect addresses where appropriate.
499 For now, generate the code so that the subexpressions useful to share
500 are visible. But not if cse won't be done! */
501 else
503 if (! cse_not_expected && GET_CODE (x) != REG)
504 x = break_out_memory_refs (x);
506 /* At this point, any valid address is accepted. */
507 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
509 /* If it was valid before but breaking out memory refs invalidated it,
510 use it the old way. */
511 if (memory_address_p (mode, oldx))
512 goto win2;
514 /* Perform machine-dependent transformations on X
515 in certain cases. This is not necessary since the code
516 below can handle all possible cases, but machine-dependent
517 transformations can make better code. */
518 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
520 /* PLUS and MULT can appear in special ways
521 as the result of attempts to make an address usable for indexing.
522 Usually they are dealt with by calling force_operand, below.
523 But a sum containing constant terms is special
524 if removing them makes the sum a valid address:
525 then we generate that address in a register
526 and index off of it. We do this because it often makes
527 shorter code, and because the addresses thus generated
528 in registers often become common subexpressions. */
529 if (GET_CODE (x) == PLUS)
531 rtx constant_term = const0_rtx;
532 rtx y = eliminate_constant_term (x, &constant_term);
533 if (constant_term == const0_rtx
534 || ! memory_address_p (mode, y))
535 x = force_operand (x, NULL_RTX);
536 else
538 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
539 if (! memory_address_p (mode, y))
540 x = force_operand (x, NULL_RTX);
541 else
542 x = y;
546 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
547 x = force_operand (x, NULL_RTX);
549 /* If we have a register that's an invalid address,
550 it must be a hard reg of the wrong class. Copy it to a pseudo. */
551 else if (GET_CODE (x) == REG)
552 x = copy_to_reg (x);
554 /* Last resort: copy the value to a register, since
555 the register is a valid address. */
556 else
557 x = force_reg (Pmode, x);
559 goto done;
561 win2:
562 x = oldx;
563 win:
564 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
565 /* Don't copy an addr via a reg if it is one of our stack slots. */
566 && ! (GET_CODE (x) == PLUS
567 && (XEXP (x, 0) == virtual_stack_vars_rtx
568 || XEXP (x, 0) == virtual_incoming_args_rtx)))
570 if (general_operand (x, Pmode))
571 x = force_reg (Pmode, x);
572 else
573 x = force_operand (x, NULL_RTX);
577 done:
579 /* If we didn't change the address, we are done. Otherwise, mark
580 a reg as a pointer if we have REG or REG + CONST_INT. */
581 if (oldx == x)
582 return x;
583 else if (GET_CODE (x) == REG)
584 mark_reg_pointer (x, BITS_PER_UNIT);
585 else if (GET_CODE (x) == PLUS
586 && GET_CODE (XEXP (x, 0)) == REG
587 && GET_CODE (XEXP (x, 1)) == CONST_INT)
588 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
590 /* OLDX may have been the address on a temporary. Update the address
591 to indicate that X is now used. */
592 update_temp_slot_address (oldx, x);
594 return x;
597 /* Like `memory_address' but pretend `flag_force_addr' is 0. */
600 memory_address_noforce (mode, x)
601 enum machine_mode mode;
602 rtx x;
604 int ambient_force_addr = flag_force_addr;
605 rtx val;
607 flag_force_addr = 0;
608 val = memory_address (mode, x);
609 flag_force_addr = ambient_force_addr;
610 return val;
613 /* Convert a mem ref into one with a valid memory address.
614 Pass through anything else unchanged. */
617 validize_mem (ref)
618 rtx ref;
620 if (GET_CODE (ref) != MEM)
621 return ref;
622 if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
623 && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
624 return ref;
626 /* Don't alter REF itself, since that is probably a stack slot. */
627 return replace_equiv_address (ref, XEXP (ref, 0));
630 /* Given REF, either a MEM or a REG, and T, either the type of X or
631 the expression corresponding to REF, set RTX_UNCHANGING_P if
632 appropriate. */
634 void
635 maybe_set_unchanging (ref, t)
636 rtx ref;
637 tree t;
639 /* We can set RTX_UNCHANGING_P from TREE_READONLY for decls whose
640 initialization is only executed once, or whose initializer always
641 has the same value. Currently we simplify this to PARM_DECLs in the
642 first case, and decls with TREE_CONSTANT initializers in the second. */
643 if ((TREE_READONLY (t) && DECL_P (t)
644 && (TREE_CODE (t) == PARM_DECL
645 || DECL_INITIAL (t) == NULL_TREE
646 || TREE_CONSTANT (DECL_INITIAL (t))))
647 || TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
648 RTX_UNCHANGING_P (ref) = 1;
651 /* Return a modified copy of X with its memory address copied
652 into a temporary register to protect it from side effects.
653 If X is not a MEM, it is returned unchanged (and not copied).
654 Perhaps even if it is a MEM, if there is no need to change it. */
657 stabilize (x)
658 rtx x;
661 if (GET_CODE (x) != MEM
662 || ! rtx_unstable_p (XEXP (x, 0)))
663 return x;
665 return
666 replace_equiv_address (x, force_reg (Pmode, copy_all_regs (XEXP (x, 0))));
669 /* Copy the value or contents of X to a new temp reg and return that reg. */
672 copy_to_reg (x)
673 rtx x;
675 rtx temp = gen_reg_rtx (GET_MODE (x));
677 /* If not an operand, must be an address with PLUS and MULT so
678 do the computation. */
679 if (! general_operand (x, VOIDmode))
680 x = force_operand (x, temp);
682 if (x != temp)
683 emit_move_insn (temp, x);
685 return temp;
688 /* Like copy_to_reg but always give the new register mode Pmode
689 in case X is a constant. */
692 copy_addr_to_reg (x)
693 rtx x;
695 return copy_to_mode_reg (Pmode, x);
698 /* Like copy_to_reg but always give the new register mode MODE
699 in case X is a constant. */
702 copy_to_mode_reg (mode, x)
703 enum machine_mode mode;
704 rtx x;
706 rtx temp = gen_reg_rtx (mode);
708 /* If not an operand, must be an address with PLUS and MULT so
709 do the computation. */
710 if (! general_operand (x, VOIDmode))
711 x = force_operand (x, temp);
713 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
714 abort ();
715 if (x != temp)
716 emit_move_insn (temp, x);
717 return temp;
720 /* Load X into a register if it is not already one.
721 Use mode MODE for the register.
722 X should be valid for mode MODE, but it may be a constant which
723 is valid for all integer modes; that's why caller must specify MODE.
725 The caller must not alter the value in the register we return,
726 since we mark it as a "constant" register. */
729 force_reg (mode, x)
730 enum machine_mode mode;
731 rtx x;
733 rtx temp, insn, set;
735 if (GET_CODE (x) == REG)
736 return x;
738 if (general_operand (x, mode))
740 temp = gen_reg_rtx (mode);
741 insn = emit_move_insn (temp, x);
743 else
745 temp = force_operand (x, NULL_RTX);
746 if (GET_CODE (temp) == REG)
747 insn = get_last_insn ();
748 else
750 rtx temp2 = gen_reg_rtx (mode);
751 insn = emit_move_insn (temp2, temp);
752 temp = temp2;
756 /* Let optimizers know that TEMP's value never changes
757 and that X can be substituted for it. Don't get confused
758 if INSN set something else (such as a SUBREG of TEMP). */
759 if (CONSTANT_P (x)
760 && (set = single_set (insn)) != 0
761 && SET_DEST (set) == temp)
762 set_unique_reg_note (insn, REG_EQUAL, x);
764 return temp;
767 /* If X is a memory ref, copy its contents to a new temp reg and return
768 that reg. Otherwise, return X. */
771 force_not_mem (x)
772 rtx x;
774 rtx temp;
776 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
777 return x;
779 temp = gen_reg_rtx (GET_MODE (x));
780 emit_move_insn (temp, x);
781 return temp;
784 /* Copy X to TARGET (if it's nonzero and a reg)
785 or to a new temp reg and return that reg.
786 MODE is the mode to use for X in case it is a constant. */
789 copy_to_suggested_reg (x, target, mode)
790 rtx x, target;
791 enum machine_mode mode;
793 rtx temp;
795 if (target && GET_CODE (target) == REG)
796 temp = target;
797 else
798 temp = gen_reg_rtx (mode);
800 emit_move_insn (temp, x);
801 return temp;
804 /* Return the mode to use to store a scalar of TYPE and MODE.
805 PUNSIGNEDP points to the signedness of the type and may be adjusted
806 to show what signedness to use on extension operations.
808 FOR_CALL is non-zero if this call is promoting args for a call. */
810 enum machine_mode
811 promote_mode (type, mode, punsignedp, for_call)
812 tree type;
813 enum machine_mode mode;
814 int *punsignedp;
815 int for_call ATTRIBUTE_UNUSED;
817 enum tree_code code = TREE_CODE (type);
818 int unsignedp = *punsignedp;
820 #ifdef PROMOTE_FOR_CALL_ONLY
821 if (! for_call)
822 return mode;
823 #endif
825 switch (code)
827 #ifdef PROMOTE_MODE
828 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
829 case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE:
830 PROMOTE_MODE (mode, unsignedp, type);
831 break;
832 #endif
834 #ifdef POINTERS_EXTEND_UNSIGNED
835 case REFERENCE_TYPE:
836 case POINTER_TYPE:
837 mode = Pmode;
838 unsignedp = POINTERS_EXTEND_UNSIGNED;
839 break;
840 #endif
842 default:
843 break;
846 *punsignedp = unsignedp;
847 return mode;
850 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
851 This pops when ADJUST is positive. ADJUST need not be constant. */
853 void
854 adjust_stack (adjust)
855 rtx adjust;
857 rtx temp;
858 adjust = protect_from_queue (adjust, 0);
860 if (adjust == const0_rtx)
861 return;
863 /* We expect all variable sized adjustments to be multiple of
864 PREFERRED_STACK_BOUNDARY. */
865 if (GET_CODE (adjust) == CONST_INT)
866 stack_pointer_delta -= INTVAL (adjust);
868 temp = expand_binop (Pmode,
869 #ifdef STACK_GROWS_DOWNWARD
870 add_optab,
871 #else
872 sub_optab,
873 #endif
874 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
875 OPTAB_LIB_WIDEN);
877 if (temp != stack_pointer_rtx)
878 emit_move_insn (stack_pointer_rtx, temp);
881 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
882 This pushes when ADJUST is positive. ADJUST need not be constant. */
884 void
885 anti_adjust_stack (adjust)
886 rtx adjust;
888 rtx temp;
889 adjust = protect_from_queue (adjust, 0);
891 if (adjust == const0_rtx)
892 return;
894 /* We expect all variable sized adjustments to be multiple of
895 PREFERRED_STACK_BOUNDARY. */
896 if (GET_CODE (adjust) == CONST_INT)
897 stack_pointer_delta += INTVAL (adjust);
899 temp = expand_binop (Pmode,
900 #ifdef STACK_GROWS_DOWNWARD
901 sub_optab,
902 #else
903 add_optab,
904 #endif
905 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
906 OPTAB_LIB_WIDEN);
908 if (temp != stack_pointer_rtx)
909 emit_move_insn (stack_pointer_rtx, temp);
912 /* Round the size of a block to be pushed up to the boundary required
913 by this machine. SIZE is the desired size, which need not be constant. */
916 round_push (size)
917 rtx size;
919 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
920 if (align == 1)
921 return size;
922 if (GET_CODE (size) == CONST_INT)
924 int new = (INTVAL (size) + align - 1) / align * align;
925 if (INTVAL (size) != new)
926 size = GEN_INT (new);
928 else
930 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
931 but we know it can't. So add ourselves and then do
932 TRUNC_DIV_EXPR. */
933 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
934 NULL_RTX, 1, OPTAB_LIB_WIDEN);
935 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
936 NULL_RTX, 1);
937 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
939 return size;
942 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
943 to a previously-created save area. If no save area has been allocated,
944 this function will allocate one. If a save area is specified, it
945 must be of the proper mode.
947 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
948 are emitted at the current position. */
950 void
951 emit_stack_save (save_level, psave, after)
952 enum save_level save_level;
953 rtx *psave;
954 rtx after;
956 rtx sa = *psave;
957 /* The default is that we use a move insn and save in a Pmode object. */
958 rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
959 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
961 /* See if this machine has anything special to do for this kind of save. */
962 switch (save_level)
964 #ifdef HAVE_save_stack_block
965 case SAVE_BLOCK:
966 if (HAVE_save_stack_block)
967 fcn = gen_save_stack_block;
968 break;
969 #endif
970 #ifdef HAVE_save_stack_function
971 case SAVE_FUNCTION:
972 if (HAVE_save_stack_function)
973 fcn = gen_save_stack_function;
974 break;
975 #endif
976 #ifdef HAVE_save_stack_nonlocal
977 case SAVE_NONLOCAL:
978 if (HAVE_save_stack_nonlocal)
979 fcn = gen_save_stack_nonlocal;
980 break;
981 #endif
982 default:
983 break;
986 /* If there is no save area and we have to allocate one, do so. Otherwise
987 verify the save area is the proper mode. */
989 if (sa == 0)
991 if (mode != VOIDmode)
993 if (save_level == SAVE_NONLOCAL)
994 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
995 else
996 *psave = sa = gen_reg_rtx (mode);
999 else
1001 if (mode == VOIDmode || GET_MODE (sa) != mode)
1002 abort ();
1005 if (after)
1007 rtx seq;
1009 start_sequence ();
1010 /* We must validize inside the sequence, to ensure that any instructions
1011 created by the validize call also get moved to the right place. */
1012 if (sa != 0)
1013 sa = validize_mem (sa);
1014 emit_insn (fcn (sa, stack_pointer_rtx));
1015 seq = get_insns ();
1016 end_sequence ();
1017 emit_insn_after (seq, after);
1019 else
1021 if (sa != 0)
1022 sa = validize_mem (sa);
1023 emit_insn (fcn (sa, stack_pointer_rtx));
1027 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1028 area made by emit_stack_save. If it is zero, we have nothing to do.
1030 Put any emitted insns after insn AFTER, if nonzero, otherwise at
1031 current position. */
1033 void
1034 emit_stack_restore (save_level, sa, after)
1035 enum save_level save_level;
1036 rtx after;
1037 rtx sa;
1039 /* The default is that we use a move insn. */
1040 rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1042 /* See if this machine has anything special to do for this kind of save. */
1043 switch (save_level)
1045 #ifdef HAVE_restore_stack_block
1046 case SAVE_BLOCK:
1047 if (HAVE_restore_stack_block)
1048 fcn = gen_restore_stack_block;
1049 break;
1050 #endif
1051 #ifdef HAVE_restore_stack_function
1052 case SAVE_FUNCTION:
1053 if (HAVE_restore_stack_function)
1054 fcn = gen_restore_stack_function;
1055 break;
1056 #endif
1057 #ifdef HAVE_restore_stack_nonlocal
1058 case SAVE_NONLOCAL:
1059 if (HAVE_restore_stack_nonlocal)
1060 fcn = gen_restore_stack_nonlocal;
1061 break;
1062 #endif
1063 default:
1064 break;
1067 if (sa != 0)
1068 sa = validize_mem (sa);
1070 if (after)
1072 rtx seq;
1074 start_sequence ();
1075 emit_insn (fcn (stack_pointer_rtx, sa));
1076 seq = get_insns ();
1077 end_sequence ();
1078 emit_insn_after (seq, after);
1080 else
1081 emit_insn (fcn (stack_pointer_rtx, sa));
1084 #ifdef SETJMP_VIA_SAVE_AREA
1085 /* Optimize RTL generated by allocate_dynamic_stack_space for targets
1086 where SETJMP_VIA_SAVE_AREA is true. The problem is that on these
1087 platforms, the dynamic stack space used can corrupt the original
1088 frame, thus causing a crash if a longjmp unwinds to it. */
1090 void
1091 optimize_save_area_alloca (insns)
1092 rtx insns;
1094 rtx insn;
1096 for (insn = insns; insn; insn = NEXT_INSN(insn))
1098 rtx note;
1100 if (GET_CODE (insn) != INSN)
1101 continue;
1103 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1105 if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1106 continue;
1108 if (!current_function_calls_setjmp)
1110 rtx pat = PATTERN (insn);
1112 /* If we do not see the note in a pattern matching
1113 these precise characteristics, we did something
1114 entirely wrong in allocate_dynamic_stack_space.
1116 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1117 was defined on a machine where stacks grow towards higher
1118 addresses.
1120 Right now only supported port with stack that grow upward
1121 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */
1122 if (GET_CODE (pat) != SET
1123 || SET_DEST (pat) != stack_pointer_rtx
1124 || GET_CODE (SET_SRC (pat)) != MINUS
1125 || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1126 abort ();
1128 /* This will now be transformed into a (set REG REG)
1129 so we can just blow away all the other notes. */
1130 XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1131 REG_NOTES (insn) = NULL_RTX;
1133 else
1135 /* setjmp was called, we must remove the REG_SAVE_AREA
1136 note so that later passes do not get confused by its
1137 presence. */
1138 if (note == REG_NOTES (insn))
1140 REG_NOTES (insn) = XEXP (note, 1);
1142 else
1144 rtx srch;
1146 for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1147 if (XEXP (srch, 1) == note)
1148 break;
1150 if (srch == NULL_RTX)
1151 abort ();
1153 XEXP (srch, 1) = XEXP (note, 1);
1156 /* Once we've seen the note of interest, we need not look at
1157 the rest of them. */
1158 break;
1162 #endif /* SETJMP_VIA_SAVE_AREA */
1164 /* Return an rtx representing the address of an area of memory dynamically
1165 pushed on the stack. This region of memory is always aligned to
1166 a multiple of BIGGEST_ALIGNMENT.
1168 Any required stack pointer alignment is preserved.
1170 SIZE is an rtx representing the size of the area.
1171 TARGET is a place in which the address can be placed.
1173 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
1176 allocate_dynamic_stack_space (size, target, known_align)
1177 rtx size;
1178 rtx target;
1179 int known_align;
1181 #ifdef SETJMP_VIA_SAVE_AREA
1182 rtx setjmpless_size = NULL_RTX;
1183 #endif
1185 /* If we're asking for zero bytes, it doesn't matter what we point
1186 to since we can't dereference it. But return a reasonable
1187 address anyway. */
1188 if (size == const0_rtx)
1189 return virtual_stack_dynamic_rtx;
1191 /* Otherwise, show we're calling alloca or equivalent. */
1192 current_function_calls_alloca = 1;
1194 /* Ensure the size is in the proper mode. */
1195 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1196 size = convert_to_mode (Pmode, size, 1);
1198 /* We can't attempt to minimize alignment necessary, because we don't
1199 know the final value of preferred_stack_boundary yet while executing
1200 this code. */
1201 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1203 /* We will need to ensure that the address we return is aligned to
1204 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1205 always know its final value at this point in the compilation (it
1206 might depend on the size of the outgoing parameter lists, for
1207 example), so we must align the value to be returned in that case.
1208 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1209 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1210 We must also do an alignment operation on the returned value if
1211 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1213 If we have to align, we must leave space in SIZE for the hole
1214 that might result from the alignment operation. */
1216 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1217 #define MUST_ALIGN 1
1218 #else
1219 #define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1220 #endif
1222 if (MUST_ALIGN)
1223 size
1224 = force_operand (plus_constant (size,
1225 BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1226 NULL_RTX);
1228 #ifdef SETJMP_VIA_SAVE_AREA
1229 /* If setjmp restores regs from a save area in the stack frame,
1230 avoid clobbering the reg save area. Note that the offset of
1231 virtual_incoming_args_rtx includes the preallocated stack args space.
1232 It would be no problem to clobber that, but it's on the wrong side
1233 of the old save area. */
1235 rtx dynamic_offset
1236 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1237 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1239 if (!current_function_calls_setjmp)
1241 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1243 /* See optimize_save_area_alloca to understand what is being
1244 set up here. */
1246 /* ??? Code below assumes that the save area needs maximal
1247 alignment. This constraint may be too strong. */
1248 if (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
1249 abort ();
1251 if (GET_CODE (size) == CONST_INT)
1253 HOST_WIDE_INT new = INTVAL (size) / align * align;
1255 if (INTVAL (size) != new)
1256 setjmpless_size = GEN_INT (new);
1257 else
1258 setjmpless_size = size;
1260 else
1262 /* Since we know overflow is not possible, we avoid using
1263 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1264 setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1265 GEN_INT (align), NULL_RTX, 1);
1266 setjmpless_size = expand_mult (Pmode, setjmpless_size,
1267 GEN_INT (align), NULL_RTX, 1);
1269 /* Our optimization works based upon being able to perform a simple
1270 transformation of this RTL into a (set REG REG) so make sure things
1271 did in fact end up in a REG. */
1272 if (!register_operand (setjmpless_size, Pmode))
1273 setjmpless_size = force_reg (Pmode, setjmpless_size);
1276 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1277 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1279 #endif /* SETJMP_VIA_SAVE_AREA */
1281 /* Round the size to a multiple of the required stack alignment.
1282 Since the stack if presumed to be rounded before this allocation,
1283 this will maintain the required alignment.
1285 If the stack grows downward, we could save an insn by subtracting
1286 SIZE from the stack pointer and then aligning the stack pointer.
1287 The problem with this is that the stack pointer may be unaligned
1288 between the execution of the subtraction and alignment insns and
1289 some machines do not allow this. Even on those that do, some
1290 signal handlers malfunction if a signal should occur between those
1291 insns. Since this is an extremely rare event, we have no reliable
1292 way of knowing which systems have this problem. So we avoid even
1293 momentarily mis-aligning the stack. */
1295 /* If we added a variable amount to SIZE,
1296 we can no longer assume it is aligned. */
1297 #if !defined (SETJMP_VIA_SAVE_AREA)
1298 if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1299 #endif
1300 size = round_push (size);
1302 do_pending_stack_adjust ();
1304 /* We ought to be called always on the toplevel and stack ought to be aligned
1305 properly. */
1306 if (stack_pointer_delta % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))
1307 abort ();
1309 /* If needed, check that we have the required amount of stack. Take into
1310 account what has already been checked. */
1311 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1312 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1314 /* Don't use a TARGET that isn't a pseudo or is the wrong mode. */
1315 if (target == 0 || GET_CODE (target) != REG
1316 || REGNO (target) < FIRST_PSEUDO_REGISTER
1317 || GET_MODE (target) != Pmode)
1318 target = gen_reg_rtx (Pmode);
1320 mark_reg_pointer (target, known_align);
1322 /* Perform the required allocation from the stack. Some systems do
1323 this differently than simply incrementing/decrementing from the
1324 stack pointer, such as acquiring the space by calling malloc(). */
1325 #ifdef HAVE_allocate_stack
1326 if (HAVE_allocate_stack)
1328 enum machine_mode mode = STACK_SIZE_MODE;
1329 insn_operand_predicate_fn pred;
1331 /* We don't have to check against the predicate for operand 0 since
1332 TARGET is known to be a pseudo of the proper mode, which must
1333 be valid for the operand. For operand 1, convert to the
1334 proper mode and validate. */
1335 if (mode == VOIDmode)
1336 mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
1338 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1339 if (pred && ! ((*pred) (size, mode)))
1340 size = copy_to_mode_reg (mode, size);
1342 emit_insn (gen_allocate_stack (target, size));
1344 else
1345 #endif
1347 #ifndef STACK_GROWS_DOWNWARD
1348 emit_move_insn (target, virtual_stack_dynamic_rtx);
1349 #endif
1351 /* Check stack bounds if necessary. */
1352 if (current_function_limit_stack)
1354 rtx available;
1355 rtx space_available = gen_label_rtx ();
1356 #ifdef STACK_GROWS_DOWNWARD
1357 available = expand_binop (Pmode, sub_optab,
1358 stack_pointer_rtx, stack_limit_rtx,
1359 NULL_RTX, 1, OPTAB_WIDEN);
1360 #else
1361 available = expand_binop (Pmode, sub_optab,
1362 stack_limit_rtx, stack_pointer_rtx,
1363 NULL_RTX, 1, OPTAB_WIDEN);
1364 #endif
1365 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1366 space_available);
1367 #ifdef HAVE_trap
1368 if (HAVE_trap)
1369 emit_insn (gen_trap ());
1370 else
1371 #endif
1372 error ("stack limits not supported on this target");
1373 emit_barrier ();
1374 emit_label (space_available);
1377 anti_adjust_stack (size);
1378 #ifdef SETJMP_VIA_SAVE_AREA
1379 if (setjmpless_size != NULL_RTX)
1381 rtx note_target = get_last_insn ();
1383 REG_NOTES (note_target)
1384 = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1385 REG_NOTES (note_target));
1387 #endif /* SETJMP_VIA_SAVE_AREA */
1389 #ifdef STACK_GROWS_DOWNWARD
1390 emit_move_insn (target, virtual_stack_dynamic_rtx);
1391 #endif
1394 if (MUST_ALIGN)
1396 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1397 but we know it can't. So add ourselves and then do
1398 TRUNC_DIV_EXPR. */
1399 target = expand_binop (Pmode, add_optab, target,
1400 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1401 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1402 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1403 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1404 NULL_RTX, 1);
1405 target = expand_mult (Pmode, target,
1406 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1407 NULL_RTX, 1);
1410 /* Some systems require a particular insn to refer to the stack
1411 to make the pages exist. */
1412 #ifdef HAVE_probe
1413 if (HAVE_probe)
1414 emit_insn (gen_probe ());
1415 #endif
1417 /* Record the new stack level for nonlocal gotos. */
1418 if (nonlocal_goto_handler_slots != 0)
1419 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1421 return target;
1424 /* A front end may want to override GCC's stack checking by providing a
1425 run-time routine to call to check the stack, so provide a mechanism for
1426 calling that routine. */
1428 static GTY(()) rtx stack_check_libfunc;
1430 void
1431 set_stack_check_libfunc (libfunc)
1432 rtx libfunc;
1434 stack_check_libfunc = libfunc;
1437 /* Emit one stack probe at ADDRESS, an address within the stack. */
1439 static void
1440 emit_stack_probe (address)
1441 rtx address;
1443 rtx memref = gen_rtx_MEM (word_mode, address);
1445 MEM_VOLATILE_P (memref) = 1;
1447 if (STACK_CHECK_PROBE_LOAD)
1448 emit_move_insn (gen_reg_rtx (word_mode), memref);
1449 else
1450 emit_move_insn (memref, const0_rtx);
1453 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1454 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1455 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1456 subtract from the stack. If SIZE is constant, this is done
1457 with a fixed number of probes. Otherwise, we must make a loop. */
1459 #ifdef STACK_GROWS_DOWNWARD
1460 #define STACK_GROW_OP MINUS
1461 #else
1462 #define STACK_GROW_OP PLUS
1463 #endif
1465 void
1466 probe_stack_range (first, size)
1467 HOST_WIDE_INT first;
1468 rtx size;
1470 /* First ensure SIZE is Pmode. */
1471 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1472 size = convert_to_mode (Pmode, size, 1);
1474 /* Next see if the front end has set up a function for us to call to
1475 check the stack. */
1476 if (stack_check_libfunc != 0)
1478 rtx addr = memory_address (QImode,
1479 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1480 stack_pointer_rtx,
1481 plus_constant (size, first)));
1483 #ifdef POINTERS_EXTEND_UNSIGNED
1484 if (GET_MODE (addr) != ptr_mode)
1485 addr = convert_memory_address (ptr_mode, addr);
1486 #endif
1488 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1489 ptr_mode);
1492 /* Next see if we have an insn to check the stack. Use it if so. */
1493 #ifdef HAVE_check_stack
1494 else if (HAVE_check_stack)
1496 insn_operand_predicate_fn pred;
1497 rtx last_addr
1498 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1499 stack_pointer_rtx,
1500 plus_constant (size, first)),
1501 NULL_RTX);
1503 pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1504 if (pred && ! ((*pred) (last_addr, Pmode)))
1505 last_addr = copy_to_mode_reg (Pmode, last_addr);
1507 emit_insn (gen_check_stack (last_addr));
1509 #endif
1511 /* If we have to generate explicit probes, see if we have a constant
1512 small number of them to generate. If so, that's the easy case. */
1513 else if (GET_CODE (size) == CONST_INT
1514 && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1516 HOST_WIDE_INT offset;
1518 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1519 for values of N from 1 until it exceeds LAST. If only one
1520 probe is needed, this will not generate any code. Then probe
1521 at LAST. */
1522 for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1523 offset < INTVAL (size);
1524 offset = offset + STACK_CHECK_PROBE_INTERVAL)
1525 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1526 stack_pointer_rtx,
1527 GEN_INT (offset)));
1529 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1530 stack_pointer_rtx,
1531 plus_constant (size, first)));
1534 /* In the variable case, do the same as above, but in a loop. We emit loop
1535 notes so that loop optimization can be done. */
1536 else
1538 rtx test_addr
1539 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1540 stack_pointer_rtx,
1541 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1542 NULL_RTX);
1543 rtx last_addr
1544 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1545 stack_pointer_rtx,
1546 plus_constant (size, first)),
1547 NULL_RTX);
1548 rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1549 rtx loop_lab = gen_label_rtx ();
1550 rtx test_lab = gen_label_rtx ();
1551 rtx end_lab = gen_label_rtx ();
1552 rtx temp;
1554 if (GET_CODE (test_addr) != REG
1555 || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1556 test_addr = force_reg (Pmode, test_addr);
1558 emit_note (NULL, NOTE_INSN_LOOP_BEG);
1559 emit_jump (test_lab);
1561 emit_label (loop_lab);
1562 emit_stack_probe (test_addr);
1564 emit_note (NULL, NOTE_INSN_LOOP_CONT);
1566 #ifdef STACK_GROWS_DOWNWARD
1567 #define CMP_OPCODE GTU
1568 temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1569 1, OPTAB_WIDEN);
1570 #else
1571 #define CMP_OPCODE LTU
1572 temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1573 1, OPTAB_WIDEN);
1574 #endif
1576 if (temp != test_addr)
1577 abort ();
1579 emit_label (test_lab);
1580 emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1581 NULL_RTX, Pmode, 1, loop_lab);
1582 emit_jump (end_lab);
1583 emit_note (NULL, NOTE_INSN_LOOP_END);
1584 emit_label (end_lab);
1586 emit_stack_probe (last_addr);
1590 /* Return an rtx representing the register or memory location
1591 in which a scalar value of data type VALTYPE
1592 was returned by a function call to function FUNC.
1593 FUNC is a FUNCTION_DECL node if the precise function is known,
1594 otherwise 0.
1595 OUTGOING is 1 if on a machine with register windows this function
1596 should return the register in which the function will put its result
1597 and 0 otherwise. */
1600 hard_function_value (valtype, func, outgoing)
1601 tree valtype;
1602 tree func ATTRIBUTE_UNUSED;
1603 int outgoing ATTRIBUTE_UNUSED;
1605 rtx val;
1607 #ifdef FUNCTION_OUTGOING_VALUE
1608 if (outgoing)
1609 val = FUNCTION_OUTGOING_VALUE (valtype, func);
1610 else
1611 #endif
1612 val = FUNCTION_VALUE (valtype, func);
1614 if (GET_CODE (val) == REG
1615 && GET_MODE (val) == BLKmode)
1617 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1618 enum machine_mode tmpmode;
1620 /* int_size_in_bytes can return -1. We don't need a check here
1621 since the value of bytes will be large enough that no mode
1622 will match and we will abort later in this function. */
1624 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1625 tmpmode != VOIDmode;
1626 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1628 /* Have we found a large enough mode? */
1629 if (GET_MODE_SIZE (tmpmode) >= bytes)
1630 break;
1633 /* No suitable mode found. */
1634 if (tmpmode == VOIDmode)
1635 abort ();
1637 PUT_MODE (val, tmpmode);
1639 return val;
1642 /* Return an rtx representing the register or memory location
1643 in which a scalar value of mode MODE was returned by a library call. */
1646 hard_libcall_value (mode)
1647 enum machine_mode mode;
1649 return LIBCALL_VALUE (mode);
1652 /* Look up the tree code for a given rtx code
1653 to provide the arithmetic operation for REAL_ARITHMETIC.
1654 The function returns an int because the caller may not know
1655 what `enum tree_code' means. */
1658 rtx_to_tree_code (code)
1659 enum rtx_code code;
1661 enum tree_code tcode;
1663 switch (code)
1665 case PLUS:
1666 tcode = PLUS_EXPR;
1667 break;
1668 case MINUS:
1669 tcode = MINUS_EXPR;
1670 break;
1671 case MULT:
1672 tcode = MULT_EXPR;
1673 break;
1674 case DIV:
1675 tcode = RDIV_EXPR;
1676 break;
1677 case SMIN:
1678 tcode = MIN_EXPR;
1679 break;
1680 case SMAX:
1681 tcode = MAX_EXPR;
1682 break;
1683 default:
1684 tcode = LAST_AND_UNUSED_TREE_CODE;
1685 break;
1687 return ((int) tcode);
1690 #include "gt-explow.h"