* m68k/m68k.c (m68k_last_compare_had_fp_operands): New variable.
[official-gcc.git] / gcc / explow.c
blobdb0fbc855ba2d27a5926c895abc1220aa1a55be5
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 91, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "expr.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "insn-flags.h"
31 #include "insn-codes.h"
33 static rtx break_out_memory_refs PROTO((rtx));
35 /* Return an rtx for the sum of X and the integer C.
37 This function should be used via the `plus_constant' macro. */
39 rtx
40 plus_constant_wide (x, c)
41 register rtx x;
42 register HOST_WIDE_INT c;
44 register RTX_CODE code;
45 register enum machine_mode mode;
46 register rtx tem;
47 int all_constant = 0;
49 if (c == 0)
50 return x;
52 restart:
54 code = GET_CODE (x);
55 mode = GET_MODE (x);
56 switch (code)
58 case CONST_INT:
59 return GEN_INT (INTVAL (x) + c);
61 case CONST_DOUBLE:
63 HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
64 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
65 HOST_WIDE_INT l2 = c;
66 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
67 HOST_WIDE_INT lv, hv;
69 add_double (l1, h1, l2, h2, &lv, &hv);
71 return immed_double_const (lv, hv, VOIDmode);
74 case MEM:
75 /* If this is a reference to the constant pool, try replacing it with
76 a reference to a new constant. If the resulting address isn't
77 valid, don't return it because we have no way to validize it. */
78 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
79 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
81 tem
82 = force_const_mem (GET_MODE (x),
83 plus_constant (get_pool_constant (XEXP (x, 0)),
84 c));
85 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
86 return tem;
88 break;
90 case CONST:
91 /* If adding to something entirely constant, set a flag
92 so that we can add a CONST around the result. */
93 x = XEXP (x, 0);
94 all_constant = 1;
95 goto restart;
97 case SYMBOL_REF:
98 case LABEL_REF:
99 all_constant = 1;
100 break;
102 case PLUS:
103 /* The interesting case is adding the integer to a sum.
104 Look for constant term in the sum and combine
105 with C. For an integer constant term, we make a combined
106 integer. For a constant term that is not an explicit integer,
107 we cannot really combine, but group them together anyway.
109 Use a recursive call in case the remaining operand is something
110 that we handle specially, such as a SYMBOL_REF. */
112 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
113 return plus_constant (XEXP (x, 0), c + INTVAL (XEXP (x, 1)));
114 else if (CONSTANT_P (XEXP (x, 0)))
115 return gen_rtx (PLUS, mode,
116 plus_constant (XEXP (x, 0), c),
117 XEXP (x, 1));
118 else if (CONSTANT_P (XEXP (x, 1)))
119 return gen_rtx (PLUS, mode,
120 XEXP (x, 0),
121 plus_constant (XEXP (x, 1), c));
124 if (c != 0)
125 x = gen_rtx (PLUS, mode, x, GEN_INT (c));
127 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
128 return x;
129 else if (all_constant)
130 return gen_rtx (CONST, mode, x);
131 else
132 return x;
135 /* This is the same as `plus_constant', except that it handles LO_SUM.
137 This function should be used via the `plus_constant_for_output' macro. */
140 plus_constant_for_output_wide (x, c)
141 register rtx x;
142 register HOST_WIDE_INT c;
144 register RTX_CODE code = GET_CODE (x);
145 register enum machine_mode mode = GET_MODE (x);
146 int all_constant = 0;
148 if (GET_CODE (x) == LO_SUM)
149 return gen_rtx (LO_SUM, mode, XEXP (x, 0),
150 plus_constant_for_output (XEXP (x, 1), c));
152 else
153 return plus_constant (x, c);
156 /* If X is a sum, return a new sum like X but lacking any constant terms.
157 Add all the removed constant terms into *CONSTPTR.
158 X itself is not altered. The result != X if and only if
159 it is not isomorphic to X. */
162 eliminate_constant_term (x, constptr)
163 rtx x;
164 rtx *constptr;
166 register rtx x0, x1;
167 rtx tem;
169 if (GET_CODE (x) != PLUS)
170 return x;
172 /* First handle constants appearing at this level explicitly. */
173 if (GET_CODE (XEXP (x, 1)) == CONST_INT
174 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
175 XEXP (x, 1)))
176 && GET_CODE (tem) == CONST_INT)
178 *constptr = tem;
179 return eliminate_constant_term (XEXP (x, 0), constptr);
182 tem = const0_rtx;
183 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
184 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
185 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
186 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
187 *constptr, tem))
188 && GET_CODE (tem) == CONST_INT)
190 *constptr = tem;
191 return gen_rtx (PLUS, GET_MODE (x), x0, x1);
194 return x;
197 /* Returns the insn that next references REG after INSN, or 0
198 if REG is clobbered before next referenced or we cannot find
199 an insn that references REG in a straight-line piece of code. */
202 find_next_ref (reg, insn)
203 rtx reg;
204 rtx insn;
206 rtx next;
208 for (insn = NEXT_INSN (insn); insn; insn = next)
210 next = NEXT_INSN (insn);
211 if (GET_CODE (insn) == NOTE)
212 continue;
213 if (GET_CODE (insn) == CODE_LABEL
214 || GET_CODE (insn) == BARRIER)
215 return 0;
216 if (GET_CODE (insn) == INSN
217 || GET_CODE (insn) == JUMP_INSN
218 || GET_CODE (insn) == CALL_INSN)
220 if (reg_set_p (reg, insn))
221 return 0;
222 if (reg_mentioned_p (reg, PATTERN (insn)))
223 return insn;
224 if (GET_CODE (insn) == JUMP_INSN)
226 if (simplejump_p (insn))
227 next = JUMP_LABEL (insn);
228 else
229 return 0;
231 if (GET_CODE (insn) == CALL_INSN
232 && REGNO (reg) < FIRST_PSEUDO_REGISTER
233 && call_used_regs[REGNO (reg)])
234 return 0;
236 else
237 abort ();
239 return 0;
242 /* Return an rtx for the size in bytes of the value of EXP. */
245 expr_size (exp)
246 tree exp;
248 tree size = size_in_bytes (TREE_TYPE (exp));
250 if (TREE_CODE (size) != INTEGER_CST
251 && contains_placeholder_p (size))
252 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
254 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), 0);
257 /* Return a copy of X in which all memory references
258 and all constants that involve symbol refs
259 have been replaced with new temporary registers.
260 Also emit code to load the memory locations and constants
261 into those registers.
263 If X contains no such constants or memory references,
264 X itself (not a copy) is returned.
266 If a constant is found in the address that is not a legitimate constant
267 in an insn, it is left alone in the hope that it might be valid in the
268 address.
270 X may contain no arithmetic except addition, subtraction and multiplication.
271 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
273 static rtx
274 break_out_memory_refs (x)
275 register rtx x;
277 if (GET_CODE (x) == MEM
278 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
279 && GET_MODE (x) != VOIDmode))
280 x = force_reg (GET_MODE (x), x);
281 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
282 || GET_CODE (x) == MULT)
284 register rtx op0 = break_out_memory_refs (XEXP (x, 0));
285 register rtx op1 = break_out_memory_refs (XEXP (x, 1));
287 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
288 x = gen_rtx (GET_CODE (x), Pmode, op0, op1);
291 return x;
294 #ifdef POINTERS_EXTEND_UNSIGNED
296 /* Given X, a memory address in ptr_mode, convert it to an address
297 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
298 the fact that pointers are not allowed to overflow by commuting arithmetic
299 operations over conversions so that address arithmetic insns can be
300 used. */
303 convert_memory_address (to_mode, x)
304 enum machine_mode to_mode;
305 rtx x;
307 enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
308 rtx temp;
310 /* Here we handle some special cases. If none of them apply, fall through
311 to the default case. */
312 switch (GET_CODE (x))
314 case CONST_INT:
315 case CONST_DOUBLE:
316 return x;
318 case LABEL_REF:
319 return gen_rtx (LABEL_REF, to_mode, XEXP (x, 0));
321 case SYMBOL_REF:
322 temp = gen_rtx (SYMBOL_REF, to_mode, XSTR (x, 0));
323 SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
324 CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
325 return temp;
327 case CONST:
328 return gen_rtx (CONST, to_mode,
329 convert_memory_address (to_mode, XEXP (x, 0)));
331 case PLUS:
332 case MULT:
333 /* For addition the second operand is a small constant, we can safely
334 permute the converstion and addition operation. We can always safely
335 permute them if we are making the address narrower. In addition,
336 always permute the operations if this is a constant. */
337 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
338 || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
339 && (INTVAL (XEXP (x, 1)) + 20000 < 40000
340 || CONSTANT_P (XEXP (x, 0)))))
341 return gen_rtx (GET_CODE (x), to_mode,
342 convert_memory_address (to_mode, XEXP (x, 0)),
343 convert_memory_address (to_mode, XEXP (x, 1)));
346 return convert_modes (to_mode, from_mode,
347 x, POINTERS_EXTEND_UNSIGNED);
349 #endif
351 /* Given a memory address or facsimile X, construct a new address,
352 currently equivalent, that is stable: future stores won't change it.
354 X must be composed of constants, register and memory references
355 combined with addition, subtraction and multiplication:
356 in other words, just what you can get from expand_expr if sum_ok is 1.
358 Works by making copies of all regs and memory locations used
359 by X and combining them the same way X does.
360 You could also stabilize the reference to this address
361 by copying the address to a register with copy_to_reg;
362 but then you wouldn't get indexed addressing in the reference. */
365 copy_all_regs (x)
366 register rtx x;
368 if (GET_CODE (x) == REG)
370 if (REGNO (x) != FRAME_POINTER_REGNUM
371 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
372 && REGNO (x) != HARD_FRAME_POINTER_REGNUM
373 #endif
375 x = copy_to_reg (x);
377 else if (GET_CODE (x) == MEM)
378 x = copy_to_reg (x);
379 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
380 || GET_CODE (x) == MULT)
382 register rtx op0 = copy_all_regs (XEXP (x, 0));
383 register rtx op1 = copy_all_regs (XEXP (x, 1));
384 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
385 x = gen_rtx (GET_CODE (x), Pmode, op0, op1);
387 return x;
390 /* Return something equivalent to X but valid as a memory address
391 for something of mode MODE. When X is not itself valid, this
392 works by copying X or subexpressions of it into registers. */
395 memory_address (mode, x)
396 enum machine_mode mode;
397 register rtx x;
399 register rtx oldx = x;
401 #ifdef POINTERS_EXTEND_UNSIGNED
402 if (GET_MODE (x) == ptr_mode)
403 x = convert_memory_address (Pmode, x);
404 #endif
406 /* By passing constant addresses thru registers
407 we get a chance to cse them. */
408 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
409 x = force_reg (Pmode, x);
411 /* Accept a QUEUED that refers to a REG
412 even though that isn't a valid address.
413 On attempting to put this in an insn we will call protect_from_queue
414 which will turn it into a REG, which is valid. */
415 else if (GET_CODE (x) == QUEUED
416 && GET_CODE (QUEUED_VAR (x)) == REG)
419 /* We get better cse by rejecting indirect addressing at this stage.
420 Let the combiner create indirect addresses where appropriate.
421 For now, generate the code so that the subexpressions useful to share
422 are visible. But not if cse won't be done! */
423 else
425 if (! cse_not_expected && GET_CODE (x) != REG)
426 x = break_out_memory_refs (x);
428 /* At this point, any valid address is accepted. */
429 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
431 /* If it was valid before but breaking out memory refs invalidated it,
432 use it the old way. */
433 if (memory_address_p (mode, oldx))
434 goto win2;
436 /* Perform machine-dependent transformations on X
437 in certain cases. This is not necessary since the code
438 below can handle all possible cases, but machine-dependent
439 transformations can make better code. */
440 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
442 /* PLUS and MULT can appear in special ways
443 as the result of attempts to make an address usable for indexing.
444 Usually they are dealt with by calling force_operand, below.
445 But a sum containing constant terms is special
446 if removing them makes the sum a valid address:
447 then we generate that address in a register
448 and index off of it. We do this because it often makes
449 shorter code, and because the addresses thus generated
450 in registers often become common subexpressions. */
451 if (GET_CODE (x) == PLUS)
453 rtx constant_term = const0_rtx;
454 rtx y = eliminate_constant_term (x, &constant_term);
455 if (constant_term == const0_rtx
456 || ! memory_address_p (mode, y))
457 x = force_operand (x, NULL_RTX);
458 else
460 y = gen_rtx (PLUS, GET_MODE (x), copy_to_reg (y), constant_term);
461 if (! memory_address_p (mode, y))
462 x = force_operand (x, NULL_RTX);
463 else
464 x = y;
468 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
469 x = force_operand (x, NULL_RTX);
471 /* If we have a register that's an invalid address,
472 it must be a hard reg of the wrong class. Copy it to a pseudo. */
473 else if (GET_CODE (x) == REG)
474 x = copy_to_reg (x);
476 /* Last resort: copy the value to a register, since
477 the register is a valid address. */
478 else
479 x = force_reg (Pmode, x);
481 goto done;
483 win2:
484 x = oldx;
485 win:
486 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
487 /* Don't copy an addr via a reg if it is one of our stack slots. */
488 && ! (GET_CODE (x) == PLUS
489 && (XEXP (x, 0) == virtual_stack_vars_rtx
490 || XEXP (x, 0) == virtual_incoming_args_rtx)))
492 if (general_operand (x, Pmode))
493 x = force_reg (Pmode, x);
494 else
495 x = force_operand (x, NULL_RTX);
499 done:
501 /* If we didn't change the address, we are done. Otherwise, mark
502 a reg as a pointer if we have REG or REG + CONST_INT. */
503 if (oldx == x)
504 return x;
505 else if (GET_CODE (x) == REG)
506 mark_reg_pointer (x, 1);
507 else if (GET_CODE (x) == PLUS
508 && GET_CODE (XEXP (x, 0)) == REG
509 && GET_CODE (XEXP (x, 1)) == CONST_INT)
510 mark_reg_pointer (XEXP (x, 0), 1);
512 /* OLDX may have been the address on a temporary. Update the address
513 to indicate that X is now used. */
514 update_temp_slot_address (oldx, x);
516 return x;
519 /* Like `memory_address' but pretend `flag_force_addr' is 0. */
522 memory_address_noforce (mode, x)
523 enum machine_mode mode;
524 rtx x;
526 int ambient_force_addr = flag_force_addr;
527 rtx val;
529 flag_force_addr = 0;
530 val = memory_address (mode, x);
531 flag_force_addr = ambient_force_addr;
532 return val;
535 /* Convert a mem ref into one with a valid memory address.
536 Pass through anything else unchanged. */
539 validize_mem (ref)
540 rtx ref;
542 if (GET_CODE (ref) != MEM)
543 return ref;
544 if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
545 return ref;
546 /* Don't alter REF itself, since that is probably a stack slot. */
547 return change_address (ref, GET_MODE (ref), XEXP (ref, 0));
550 /* Return a modified copy of X with its memory address copied
551 into a temporary register to protect it from side effects.
552 If X is not a MEM, it is returned unchanged (and not copied).
553 Perhaps even if it is a MEM, if there is no need to change it. */
556 stabilize (x)
557 rtx x;
559 register rtx addr;
560 if (GET_CODE (x) != MEM)
561 return x;
562 addr = XEXP (x, 0);
563 if (rtx_unstable_p (addr))
565 rtx temp = copy_all_regs (addr);
566 rtx mem;
567 if (GET_CODE (temp) != REG)
568 temp = copy_to_reg (temp);
569 mem = gen_rtx (MEM, GET_MODE (x), temp);
571 /* Mark returned memref with in_struct if it's in an array or
572 structure. Copy const and volatile from original memref. */
574 MEM_IN_STRUCT_P (mem) = MEM_IN_STRUCT_P (x) || GET_CODE (addr) == PLUS;
575 RTX_UNCHANGING_P (mem) = RTX_UNCHANGING_P (x);
576 MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (x);
577 return mem;
579 return x;
582 /* Copy the value or contents of X to a new temp reg and return that reg. */
585 copy_to_reg (x)
586 rtx x;
588 register rtx temp = gen_reg_rtx (GET_MODE (x));
590 /* If not an operand, must be an address with PLUS and MULT so
591 do the computation. */
592 if (! general_operand (x, VOIDmode))
593 x = force_operand (x, temp);
595 if (x != temp)
596 emit_move_insn (temp, x);
598 return temp;
601 /* Like copy_to_reg but always give the new register mode Pmode
602 in case X is a constant. */
605 copy_addr_to_reg (x)
606 rtx x;
608 return copy_to_mode_reg (Pmode, x);
611 /* Like copy_to_reg but always give the new register mode MODE
612 in case X is a constant. */
615 copy_to_mode_reg (mode, x)
616 enum machine_mode mode;
617 rtx x;
619 register rtx temp = gen_reg_rtx (mode);
621 /* If not an operand, must be an address with PLUS and MULT so
622 do the computation. */
623 if (! general_operand (x, VOIDmode))
624 x = force_operand (x, temp);
626 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
627 abort ();
628 if (x != temp)
629 emit_move_insn (temp, x);
630 return temp;
633 /* Load X into a register if it is not already one.
634 Use mode MODE for the register.
635 X should be valid for mode MODE, but it may be a constant which
636 is valid for all integer modes; that's why caller must specify MODE.
638 The caller must not alter the value in the register we return,
639 since we mark it as a "constant" register. */
642 force_reg (mode, x)
643 enum machine_mode mode;
644 rtx x;
646 register rtx temp, insn, set;
648 if (GET_CODE (x) == REG)
649 return x;
650 temp = gen_reg_rtx (mode);
651 insn = emit_move_insn (temp, x);
653 /* Let optimizers know that TEMP's value never changes
654 and that X can be substituted for it. Don't get confused
655 if INSN set something else (such as a SUBREG of TEMP). */
656 if (CONSTANT_P (x)
657 && (set = single_set (insn)) != 0
658 && SET_DEST (set) == temp)
660 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
662 if (note)
663 XEXP (note, 0) = x;
664 else
665 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL, x, REG_NOTES (insn));
667 return temp;
670 /* If X is a memory ref, copy its contents to a new temp reg and return
671 that reg. Otherwise, return X. */
674 force_not_mem (x)
675 rtx x;
677 register rtx temp;
678 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
679 return x;
680 temp = gen_reg_rtx (GET_MODE (x));
681 emit_move_insn (temp, x);
682 return temp;
685 /* Copy X to TARGET (if it's nonzero and a reg)
686 or to a new temp reg and return that reg.
687 MODE is the mode to use for X in case it is a constant. */
690 copy_to_suggested_reg (x, target, mode)
691 rtx x, target;
692 enum machine_mode mode;
694 register rtx temp;
696 if (target && GET_CODE (target) == REG)
697 temp = target;
698 else
699 temp = gen_reg_rtx (mode);
701 emit_move_insn (temp, x);
702 return temp;
705 /* Return the mode to use to store a scalar of TYPE and MODE.
706 PUNSIGNEDP points to the signedness of the type and may be adjusted
707 to show what signedness to use on extension operations.
709 FOR_CALL is non-zero if this call is promoting args for a call. */
711 enum machine_mode
712 promote_mode (type, mode, punsignedp, for_call)
713 tree type;
714 enum machine_mode mode;
715 int *punsignedp;
716 int for_call;
718 enum tree_code code = TREE_CODE (type);
719 int unsignedp = *punsignedp;
721 #ifdef PROMOTE_FOR_CALL_ONLY
722 if (! for_call)
723 return mode;
724 #endif
726 switch (code)
728 #ifdef PROMOTE_MODE
729 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
730 case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE:
731 PROMOTE_MODE (mode, unsignedp, type);
732 break;
733 #endif
735 #ifdef POINTERS_EXTEND_UNSIGNED
736 case REFERENCE_TYPE:
737 case POINTER_TYPE:
738 mode = Pmode;
739 unsignedp = POINTERS_EXTEND_UNSIGNED;
740 break;
741 #endif
744 *punsignedp = unsignedp;
745 return mode;
748 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
749 This pops when ADJUST is positive. ADJUST need not be constant. */
751 void
752 adjust_stack (adjust)
753 rtx adjust;
755 rtx temp;
756 adjust = protect_from_queue (adjust, 0);
758 if (adjust == const0_rtx)
759 return;
761 temp = expand_binop (Pmode,
762 #ifdef STACK_GROWS_DOWNWARD
763 add_optab,
764 #else
765 sub_optab,
766 #endif
767 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
768 OPTAB_LIB_WIDEN);
770 if (temp != stack_pointer_rtx)
771 emit_move_insn (stack_pointer_rtx, temp);
774 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
775 This pushes when ADJUST is positive. ADJUST need not be constant. */
777 void
778 anti_adjust_stack (adjust)
779 rtx adjust;
781 rtx temp;
782 adjust = protect_from_queue (adjust, 0);
784 if (adjust == const0_rtx)
785 return;
787 temp = expand_binop (Pmode,
788 #ifdef STACK_GROWS_DOWNWARD
789 sub_optab,
790 #else
791 add_optab,
792 #endif
793 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
794 OPTAB_LIB_WIDEN);
796 if (temp != stack_pointer_rtx)
797 emit_move_insn (stack_pointer_rtx, temp);
800 /* Round the size of a block to be pushed up to the boundary required
801 by this machine. SIZE is the desired size, which need not be constant. */
804 round_push (size)
805 rtx size;
807 #ifdef STACK_BOUNDARY
808 int align = STACK_BOUNDARY / BITS_PER_UNIT;
809 if (align == 1)
810 return size;
811 if (GET_CODE (size) == CONST_INT)
813 int new = (INTVAL (size) + align - 1) / align * align;
814 if (INTVAL (size) != new)
815 size = GEN_INT (new);
817 else
819 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
820 but we know it can't. So add ourselves and then do
821 TRUNC_DIV_EXPR. */
822 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
823 NULL_RTX, 1, OPTAB_LIB_WIDEN);
824 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
825 NULL_RTX, 1);
826 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
828 #endif /* STACK_BOUNDARY */
829 return size;
832 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
833 to a previously-created save area. If no save area has been allocated,
834 this function will allocate one. If a save area is specified, it
835 must be of the proper mode.
837 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
838 are emitted at the current position. */
840 void
841 emit_stack_save (save_level, psave, after)
842 enum save_level save_level;
843 rtx *psave;
844 rtx after;
846 rtx sa = *psave;
847 /* The default is that we use a move insn and save in a Pmode object. */
848 rtx (*fcn) () = gen_move_insn;
849 enum machine_mode mode = Pmode;
851 /* See if this machine has anything special to do for this kind of save. */
852 switch (save_level)
854 #ifdef HAVE_save_stack_block
855 case SAVE_BLOCK:
856 if (HAVE_save_stack_block)
858 fcn = gen_save_stack_block;
859 mode = insn_operand_mode[CODE_FOR_save_stack_block][0];
861 break;
862 #endif
863 #ifdef HAVE_save_stack_function
864 case SAVE_FUNCTION:
865 if (HAVE_save_stack_function)
867 fcn = gen_save_stack_function;
868 mode = insn_operand_mode[CODE_FOR_save_stack_function][0];
870 break;
871 #endif
872 #ifdef HAVE_save_stack_nonlocal
873 case SAVE_NONLOCAL:
874 if (HAVE_save_stack_nonlocal)
876 fcn = gen_save_stack_nonlocal;
877 mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
879 break;
880 #endif
883 /* If there is no save area and we have to allocate one, do so. Otherwise
884 verify the save area is the proper mode. */
886 if (sa == 0)
888 if (mode != VOIDmode)
890 if (save_level == SAVE_NONLOCAL)
891 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
892 else
893 *psave = sa = gen_reg_rtx (mode);
896 else
898 if (mode == VOIDmode || GET_MODE (sa) != mode)
899 abort ();
902 if (after)
904 rtx seq;
906 start_sequence ();
907 /* We must validize inside the sequence, to ensure that any instructions
908 created by the validize call also get moved to the right place. */
909 if (sa != 0)
910 sa = validize_mem (sa);
911 emit_insn (fcn (sa, stack_pointer_rtx));
912 seq = gen_sequence ();
913 end_sequence ();
914 emit_insn_after (seq, after);
916 else
918 if (sa != 0)
919 sa = validize_mem (sa);
920 emit_insn (fcn (sa, stack_pointer_rtx));
924 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
925 area made by emit_stack_save. If it is zero, we have nothing to do.
927 Put any emitted insns after insn AFTER, if nonzero, otherwise at
928 current position. */
930 void
931 emit_stack_restore (save_level, sa, after)
932 enum save_level save_level;
933 rtx after;
934 rtx sa;
936 /* The default is that we use a move insn. */
937 rtx (*fcn) () = gen_move_insn;
939 /* See if this machine has anything special to do for this kind of save. */
940 switch (save_level)
942 #ifdef HAVE_restore_stack_block
943 case SAVE_BLOCK:
944 if (HAVE_restore_stack_block)
945 fcn = gen_restore_stack_block;
946 break;
947 #endif
948 #ifdef HAVE_restore_stack_function
949 case SAVE_FUNCTION:
950 if (HAVE_restore_stack_function)
951 fcn = gen_restore_stack_function;
952 break;
953 #endif
954 #ifdef HAVE_restore_stack_nonlocal
956 case SAVE_NONLOCAL:
957 if (HAVE_restore_stack_nonlocal)
958 fcn = gen_restore_stack_nonlocal;
959 break;
960 #endif
963 if (sa != 0)
964 sa = validize_mem (sa);
966 if (after)
968 rtx seq;
970 start_sequence ();
971 emit_insn (fcn (stack_pointer_rtx, sa));
972 seq = gen_sequence ();
973 end_sequence ();
974 emit_insn_after (seq, after);
976 else
977 emit_insn (fcn (stack_pointer_rtx, sa));
980 /* Return an rtx representing the address of an area of memory dynamically
981 pushed on the stack. This region of memory is always aligned to
982 a multiple of BIGGEST_ALIGNMENT.
984 Any required stack pointer alignment is preserved.
986 SIZE is an rtx representing the size of the area.
987 TARGET is a place in which the address can be placed.
989 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
992 allocate_dynamic_stack_space (size, target, known_align)
993 rtx size;
994 rtx target;
995 int known_align;
997 /* If we're asking for zero bytes, it doesn't matter what we point
998 to since we can't dereference it. But return a reasonable
999 address anyway. */
1000 if (size == const0_rtx)
1001 return virtual_stack_dynamic_rtx;
1003 /* Otherwise, show we're calling alloca or equivalent. */
1004 current_function_calls_alloca = 1;
1006 /* Ensure the size is in the proper mode. */
1007 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1008 size = convert_to_mode (Pmode, size, 1);
1010 /* We will need to ensure that the address we return is aligned to
1011 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1012 always know its final value at this point in the compilation (it
1013 might depend on the size of the outgoing parameter lists, for
1014 example), so we must align the value to be returned in that case.
1015 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1016 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1017 We must also do an alignment operation on the returned value if
1018 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1020 If we have to align, we must leave space in SIZE for the hole
1021 that might result from the alignment operation. */
1023 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (STACK_BOUNDARY)
1024 #define MUST_ALIGN 1
1025 #else
1026 #define MUST_ALIGN (STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1027 #endif
1029 if (MUST_ALIGN)
1031 if (GET_CODE (size) == CONST_INT)
1032 size = GEN_INT (INTVAL (size)
1033 + (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1));
1034 else
1035 size = expand_binop (Pmode, add_optab, size,
1036 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1037 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1040 #ifdef SETJMP_VIA_SAVE_AREA
1041 /* If setjmp restores regs from a save area in the stack frame,
1042 avoid clobbering the reg save area. Note that the offset of
1043 virtual_incoming_args_rtx includes the preallocated stack args space.
1044 It would be no problem to clobber that, but it's on the wrong side
1045 of the old save area. */
1047 rtx dynamic_offset
1048 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1049 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1050 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1051 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1053 #endif /* SETJMP_VIA_SAVE_AREA */
1055 /* Round the size to a multiple of the required stack alignment.
1056 Since the stack if presumed to be rounded before this allocation,
1057 this will maintain the required alignment.
1059 If the stack grows downward, we could save an insn by subtracting
1060 SIZE from the stack pointer and then aligning the stack pointer.
1061 The problem with this is that the stack pointer may be unaligned
1062 between the execution of the subtraction and alignment insns and
1063 some machines do not allow this. Even on those that do, some
1064 signal handlers malfunction if a signal should occur between those
1065 insns. Since this is an extremely rare event, we have no reliable
1066 way of knowing which systems have this problem. So we avoid even
1067 momentarily mis-aligning the stack. */
1069 #ifdef STACK_BOUNDARY
1070 /* If we added a variable amount to SIZE,
1071 we can no longer assume it is aligned. */
1072 #if !defined (SETJMP_VIA_SAVE_AREA)
1073 if (MUST_ALIGN || known_align % STACK_BOUNDARY != 0)
1074 #endif
1075 size = round_push (size);
1076 #endif
1078 do_pending_stack_adjust ();
1080 /* Don't use a TARGET that isn't a pseudo. */
1081 if (target == 0 || GET_CODE (target) != REG
1082 || REGNO (target) < FIRST_PSEUDO_REGISTER)
1083 target = gen_reg_rtx (Pmode);
1085 mark_reg_pointer (target, known_align / BITS_PER_UNIT);
1087 #ifndef STACK_GROWS_DOWNWARD
1088 emit_move_insn (target, virtual_stack_dynamic_rtx);
1089 #endif
1091 /* Perform the required allocation from the stack. Some systems do
1092 this differently than simply incrementing/decrementing from the
1093 stack pointer. */
1094 #ifdef HAVE_allocate_stack
1095 if (HAVE_allocate_stack)
1097 enum machine_mode mode
1098 = insn_operand_mode[(int) CODE_FOR_allocate_stack][0];
1100 size = convert_modes (mode, ptr_mode, size, 1);
1102 if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][0]
1103 && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][0])
1104 (size, mode)))
1105 size = copy_to_mode_reg (mode, size);
1107 emit_insn (gen_allocate_stack (size));
1109 else
1110 #endif
1112 size = convert_modes (Pmode, ptr_mode, size, 1);
1113 anti_adjust_stack (size);
1116 #ifdef STACK_GROWS_DOWNWARD
1117 emit_move_insn (target, virtual_stack_dynamic_rtx);
1118 #endif
1120 if (MUST_ALIGN)
1122 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1123 but we know it can't. So add ourselves and then do
1124 TRUNC_DIV_EXPR. */
1125 target = expand_binop (Pmode, add_optab, target,
1126 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1127 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1128 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1129 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1130 NULL_RTX, 1);
1131 target = expand_mult (Pmode, target,
1132 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1133 NULL_RTX, 1);
1136 /* Some systems require a particular insn to refer to the stack
1137 to make the pages exist. */
1138 #ifdef HAVE_probe
1139 if (HAVE_probe)
1140 emit_insn (gen_probe ());
1141 #endif
1143 /* Record the new stack level for nonlocal gotos. */
1144 if (nonlocal_goto_handler_slot != 0)
1145 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1147 return target;
1150 /* Return an rtx representing the register or memory location
1151 in which a scalar value of data type VALTYPE
1152 was returned by a function call to function FUNC.
1153 FUNC is a FUNCTION_DECL node if the precise function is known,
1154 otherwise 0. */
1157 hard_function_value (valtype, func)
1158 tree valtype;
1159 tree func;
1161 rtx val = FUNCTION_VALUE (valtype, func);
1162 if (GET_CODE (val) == REG
1163 && GET_MODE (val) == BLKmode)
1165 int bytes = int_size_in_bytes (valtype);
1166 enum machine_mode tmpmode;
1167 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1168 tmpmode != MAX_MACHINE_MODE;
1169 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1171 /* Have we found a large enough mode? */
1172 if (GET_MODE_SIZE (tmpmode) >= bytes)
1173 break;
1176 /* No suitable mode found. */
1177 if (tmpmode == MAX_MACHINE_MODE)
1178 abort ();
1180 PUT_MODE (val, tmpmode);
1182 return val;
1185 /* Return an rtx representing the register or memory location
1186 in which a scalar value of mode MODE was returned by a library call. */
1189 hard_libcall_value (mode)
1190 enum machine_mode mode;
1192 return LIBCALL_VALUE (mode);
1195 /* Look up the tree code for a given rtx code
1196 to provide the arithmetic operation for REAL_ARITHMETIC.
1197 The function returns an int because the caller may not know
1198 what `enum tree_code' means. */
1201 rtx_to_tree_code (code)
1202 enum rtx_code code;
1204 enum tree_code tcode;
1206 switch (code)
1208 case PLUS:
1209 tcode = PLUS_EXPR;
1210 break;
1211 case MINUS:
1212 tcode = MINUS_EXPR;
1213 break;
1214 case MULT:
1215 tcode = MULT_EXPR;
1216 break;
1217 case DIV:
1218 tcode = RDIV_EXPR;
1219 break;
1220 case SMIN:
1221 tcode = MIN_EXPR;
1222 break;
1223 case SMAX:
1224 tcode = MAX_EXPR;
1225 break;
1226 default:
1227 tcode = LAST_AND_UNUSED_TREE_CODE;
1228 break;
1230 return ((int) tcode);