2012-07-26 Kazu Hirata <kazu@codesourcery.com>
[official-gcc.git] / gcc / explow.c
blob1cfe93bc7dbd73088ca4f82ff91f15300e7e0022
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
3 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "diagnostic-core.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "except.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "optabs.h"
36 #include "libfuncs.h"
37 #include "hard-reg-set.h"
38 #include "insn-config.h"
39 #include "ggc.h"
40 #include "recog.h"
41 #include "langhooks.h"
42 #include "target.h"
43 #include "common/common-target.h"
44 #include "output.h"
46 static rtx break_out_memory_refs (rtx);
49 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
51 HOST_WIDE_INT
52 trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
54 int width = GET_MODE_PRECISION (mode);
56 /* You want to truncate to a _what_? */
57 gcc_assert (SCALAR_INT_MODE_P (mode));
59 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
60 if (mode == BImode)
61 return c & 1 ? STORE_FLAG_VALUE : 0;
63 /* Sign-extend for the requested mode. */
65 if (width < HOST_BITS_PER_WIDE_INT)
67 HOST_WIDE_INT sign = 1;
68 sign <<= width - 1;
69 c &= (sign << 1) - 1;
70 c ^= sign;
71 c -= sign;
74 return c;
77 /* Return an rtx for the sum of X and the integer C, given that X has
78 mode MODE. */
80 rtx
81 plus_constant (enum machine_mode mode, rtx x, HOST_WIDE_INT c)
83 RTX_CODE code;
84 rtx y;
85 rtx tem;
86 int all_constant = 0;
88 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
90 if (c == 0)
91 return x;
93 restart:
95 code = GET_CODE (x);
96 y = x;
98 switch (code)
100 case CONST_INT:
101 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
103 unsigned HOST_WIDE_INT l1 = INTVAL (x);
104 HOST_WIDE_INT h1 = (l1 >> (HOST_BITS_PER_WIDE_INT - 1)) ? -1 : 0;
105 unsigned HOST_WIDE_INT l2 = c;
106 HOST_WIDE_INT h2 = c < 0 ? -1 : 0;
107 unsigned HOST_WIDE_INT lv;
108 HOST_WIDE_INT hv;
110 if (add_double_with_sign (l1, h1, l2, h2, &lv, &hv, false))
111 gcc_unreachable ();
113 return immed_double_const (lv, hv, VOIDmode);
116 return GEN_INT (INTVAL (x) + c);
118 case CONST_DOUBLE:
120 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
121 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
122 unsigned HOST_WIDE_INT l2 = c;
123 HOST_WIDE_INT h2 = c < 0 ? -1 : 0;
124 unsigned HOST_WIDE_INT lv;
125 HOST_WIDE_INT hv;
127 if (add_double_with_sign (l1, h1, l2, h2, &lv, &hv, false))
128 /* Sorry, we have no way to represent overflows this wide.
129 To fix, add constant support wider than CONST_DOUBLE. */
130 gcc_assert (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_DOUBLE_INT);
132 return immed_double_const (lv, hv, VOIDmode);
135 case MEM:
136 /* If this is a reference to the constant pool, try replacing it with
137 a reference to a new constant. If the resulting address isn't
138 valid, don't return it because we have no way to validize it. */
139 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
140 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
142 tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c);
143 tem = force_const_mem (GET_MODE (x), tem);
144 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
145 return tem;
147 break;
149 case CONST:
150 /* If adding to something entirely constant, set a flag
151 so that we can add a CONST around the result. */
152 x = XEXP (x, 0);
153 all_constant = 1;
154 goto restart;
156 case SYMBOL_REF:
157 case LABEL_REF:
158 all_constant = 1;
159 break;
161 case PLUS:
162 /* The interesting case is adding the integer to a sum. Look
163 for constant term in the sum and combine with C. For an
164 integer constant term or a constant term that is not an
165 explicit integer, we combine or group them together anyway.
167 We may not immediately return from the recursive call here, lest
168 all_constant gets lost. */
170 if (CONSTANT_P (XEXP (x, 1)))
172 x = gen_rtx_PLUS (mode, XEXP (x, 0),
173 plus_constant (mode, XEXP (x, 1), c));
174 c = 0;
176 else if (find_constant_term_loc (&y))
178 /* We need to be careful since X may be shared and we can't
179 modify it in place. */
180 rtx copy = copy_rtx (x);
181 rtx *const_loc = find_constant_term_loc (&copy);
183 *const_loc = plus_constant (mode, *const_loc, c);
184 x = copy;
185 c = 0;
187 break;
189 default:
190 break;
193 if (c != 0)
194 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
196 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
197 return x;
198 else if (all_constant)
199 return gen_rtx_CONST (mode, x);
200 else
201 return x;
204 /* If X is a sum, return a new sum like X but lacking any constant terms.
205 Add all the removed constant terms into *CONSTPTR.
206 X itself is not altered. The result != X if and only if
207 it is not isomorphic to X. */
210 eliminate_constant_term (rtx x, rtx *constptr)
212 rtx x0, x1;
213 rtx tem;
215 if (GET_CODE (x) != PLUS)
216 return x;
218 /* First handle constants appearing at this level explicitly. */
219 if (CONST_INT_P (XEXP (x, 1))
220 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
221 XEXP (x, 1)))
222 && CONST_INT_P (tem))
224 *constptr = tem;
225 return eliminate_constant_term (XEXP (x, 0), constptr);
228 tem = const0_rtx;
229 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
230 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
231 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
232 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
233 *constptr, tem))
234 && CONST_INT_P (tem))
236 *constptr = tem;
237 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
240 return x;
243 /* Return an rtx for the size in bytes of the value of EXP. */
246 expr_size (tree exp)
248 tree size;
250 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
251 size = TREE_OPERAND (exp, 1);
252 else
254 size = tree_expr_size (exp);
255 gcc_assert (size);
256 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
259 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
262 /* Return a wide integer for the size in bytes of the value of EXP, or -1
263 if the size can vary or is larger than an integer. */
265 HOST_WIDE_INT
266 int_expr_size (tree exp)
268 tree size;
270 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
271 size = TREE_OPERAND (exp, 1);
272 else
274 size = tree_expr_size (exp);
275 gcc_assert (size);
278 if (size == 0 || !host_integerp (size, 0))
279 return -1;
281 return tree_low_cst (size, 0);
284 /* Return a copy of X in which all memory references
285 and all constants that involve symbol refs
286 have been replaced with new temporary registers.
287 Also emit code to load the memory locations and constants
288 into those registers.
290 If X contains no such constants or memory references,
291 X itself (not a copy) is returned.
293 If a constant is found in the address that is not a legitimate constant
294 in an insn, it is left alone in the hope that it might be valid in the
295 address.
297 X may contain no arithmetic except addition, subtraction and multiplication.
298 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
300 static rtx
301 break_out_memory_refs (rtx x)
303 if (MEM_P (x)
304 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
305 && GET_MODE (x) != VOIDmode))
306 x = force_reg (GET_MODE (x), x);
307 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
308 || GET_CODE (x) == MULT)
310 rtx op0 = break_out_memory_refs (XEXP (x, 0));
311 rtx op1 = break_out_memory_refs (XEXP (x, 1));
313 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
314 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
317 return x;
320 /* Given X, a memory address in address space AS' pointer mode, convert it to
321 an address in the address space's address mode, or vice versa (TO_MODE says
322 which way). We take advantage of the fact that pointers are not allowed to
323 overflow by commuting arithmetic operations over conversions so that address
324 arithmetic insns can be used. */
327 convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED,
328 rtx x, addr_space_t as ATTRIBUTE_UNUSED)
330 #ifndef POINTERS_EXTEND_UNSIGNED
331 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
332 return x;
333 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
334 enum machine_mode pointer_mode, address_mode, from_mode;
335 rtx temp;
336 enum rtx_code code;
338 /* If X already has the right mode, just return it. */
339 if (GET_MODE (x) == to_mode)
340 return x;
342 pointer_mode = targetm.addr_space.pointer_mode (as);
343 address_mode = targetm.addr_space.address_mode (as);
344 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
346 /* Here we handle some special cases. If none of them apply, fall through
347 to the default case. */
348 switch (GET_CODE (x))
350 case CONST_INT:
351 case CONST_DOUBLE:
352 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
353 code = TRUNCATE;
354 else if (POINTERS_EXTEND_UNSIGNED < 0)
355 break;
356 else if (POINTERS_EXTEND_UNSIGNED > 0)
357 code = ZERO_EXTEND;
358 else
359 code = SIGN_EXTEND;
360 temp = simplify_unary_operation (code, to_mode, x, from_mode);
361 if (temp)
362 return temp;
363 break;
365 case SUBREG:
366 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
367 && GET_MODE (SUBREG_REG (x)) == to_mode)
368 return SUBREG_REG (x);
369 break;
371 case LABEL_REF:
372 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
373 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
374 return temp;
375 break;
377 case SYMBOL_REF:
378 temp = shallow_copy_rtx (x);
379 PUT_MODE (temp, to_mode);
380 return temp;
381 break;
383 case CONST:
384 return gen_rtx_CONST (to_mode,
385 convert_memory_address_addr_space
386 (to_mode, XEXP (x, 0), as));
387 break;
389 case PLUS:
390 case MULT:
391 /* FIXME: For addition, we used to permute the conversion and
392 addition operation only if one operand is a constant and
393 converting the constant does not change it or if one operand
394 is a constant and we are using a ptr_extend instruction
395 (POINTERS_EXTEND_UNSIGNED < 0) even if the resulting address
396 may overflow/underflow. We relax the condition to include
397 zero-extend (POINTERS_EXTEND_UNSIGNED > 0) since the other
398 parts of the compiler depend on it. See PR 49721.
400 We can always safely permute them if we are making the address
401 narrower. */
402 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
403 || (GET_CODE (x) == PLUS
404 && CONST_INT_P (XEXP (x, 1))
405 && (POINTERS_EXTEND_UNSIGNED != 0
406 || XEXP (x, 1) == convert_memory_address_addr_space
407 (to_mode, XEXP (x, 1), as))))
408 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
409 convert_memory_address_addr_space
410 (to_mode, XEXP (x, 0), as),
411 XEXP (x, 1));
412 break;
414 default:
415 break;
418 return convert_modes (to_mode, from_mode,
419 x, POINTERS_EXTEND_UNSIGNED);
420 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
423 /* Return something equivalent to X but valid as a memory address for something
424 of mode MODE in the named address space AS. When X is not itself valid,
425 this works by copying X or subexpressions of it into registers. */
428 memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
430 rtx oldx = x;
431 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
433 x = convert_memory_address_addr_space (address_mode, x, as);
435 /* By passing constant addresses through registers
436 we get a chance to cse them. */
437 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
438 x = force_reg (address_mode, x);
440 /* We get better cse by rejecting indirect addressing at this stage.
441 Let the combiner create indirect addresses where appropriate.
442 For now, generate the code so that the subexpressions useful to share
443 are visible. But not if cse won't be done! */
444 else
446 if (! cse_not_expected && !REG_P (x))
447 x = break_out_memory_refs (x);
449 /* At this point, any valid address is accepted. */
450 if (memory_address_addr_space_p (mode, x, as))
451 goto done;
453 /* If it was valid before but breaking out memory refs invalidated it,
454 use it the old way. */
455 if (memory_address_addr_space_p (mode, oldx, as))
457 x = oldx;
458 goto done;
461 /* Perform machine-dependent transformations on X
462 in certain cases. This is not necessary since the code
463 below can handle all possible cases, but machine-dependent
464 transformations can make better code. */
466 rtx orig_x = x;
467 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
468 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
469 goto done;
472 /* PLUS and MULT can appear in special ways
473 as the result of attempts to make an address usable for indexing.
474 Usually they are dealt with by calling force_operand, below.
475 But a sum containing constant terms is special
476 if removing them makes the sum a valid address:
477 then we generate that address in a register
478 and index off of it. We do this because it often makes
479 shorter code, and because the addresses thus generated
480 in registers often become common subexpressions. */
481 if (GET_CODE (x) == PLUS)
483 rtx constant_term = const0_rtx;
484 rtx y = eliminate_constant_term (x, &constant_term);
485 if (constant_term == const0_rtx
486 || ! memory_address_addr_space_p (mode, y, as))
487 x = force_operand (x, NULL_RTX);
488 else
490 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
491 if (! memory_address_addr_space_p (mode, y, as))
492 x = force_operand (x, NULL_RTX);
493 else
494 x = y;
498 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
499 x = force_operand (x, NULL_RTX);
501 /* If we have a register that's an invalid address,
502 it must be a hard reg of the wrong class. Copy it to a pseudo. */
503 else if (REG_P (x))
504 x = copy_to_reg (x);
506 /* Last resort: copy the value to a register, since
507 the register is a valid address. */
508 else
509 x = force_reg (address_mode, x);
512 done:
514 gcc_assert (memory_address_addr_space_p (mode, x, as));
515 /* If we didn't change the address, we are done. Otherwise, mark
516 a reg as a pointer if we have REG or REG + CONST_INT. */
517 if (oldx == x)
518 return x;
519 else if (REG_P (x))
520 mark_reg_pointer (x, BITS_PER_UNIT);
521 else if (GET_CODE (x) == PLUS
522 && REG_P (XEXP (x, 0))
523 && CONST_INT_P (XEXP (x, 1)))
524 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
526 /* OLDX may have been the address on a temporary. Update the address
527 to indicate that X is now used. */
528 update_temp_slot_address (oldx, x);
530 return x;
533 /* Convert a mem ref into one with a valid memory address.
534 Pass through anything else unchanged. */
537 validize_mem (rtx ref)
539 if (!MEM_P (ref))
540 return ref;
541 ref = use_anchored_address (ref);
542 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
543 MEM_ADDR_SPACE (ref)))
544 return ref;
546 /* Don't alter REF itself, since that is probably a stack slot. */
547 return replace_equiv_address (ref, XEXP (ref, 0));
550 /* If X is a memory reference to a member of an object block, try rewriting
551 it to use an anchor instead. Return the new memory reference on success
552 and the old one on failure. */
555 use_anchored_address (rtx x)
557 rtx base;
558 HOST_WIDE_INT offset;
559 enum machine_mode mode;
561 if (!flag_section_anchors)
562 return x;
564 if (!MEM_P (x))
565 return x;
567 /* Split the address into a base and offset. */
568 base = XEXP (x, 0);
569 offset = 0;
570 if (GET_CODE (base) == CONST
571 && GET_CODE (XEXP (base, 0)) == PLUS
572 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
574 offset += INTVAL (XEXP (XEXP (base, 0), 1));
575 base = XEXP (XEXP (base, 0), 0);
578 /* Check whether BASE is suitable for anchors. */
579 if (GET_CODE (base) != SYMBOL_REF
580 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
581 || SYMBOL_REF_ANCHOR_P (base)
582 || SYMBOL_REF_BLOCK (base) == NULL
583 || !targetm.use_anchors_for_symbol_p (base))
584 return x;
586 /* Decide where BASE is going to be. */
587 place_block_symbol (base);
589 /* Get the anchor we need to use. */
590 offset += SYMBOL_REF_BLOCK_OFFSET (base);
591 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
592 SYMBOL_REF_TLS_MODEL (base));
594 /* Work out the offset from the anchor. */
595 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
597 /* If we're going to run a CSE pass, force the anchor into a register.
598 We will then be able to reuse registers for several accesses, if the
599 target costs say that that's worthwhile. */
600 mode = GET_MODE (base);
601 if (!cse_not_expected)
602 base = force_reg (mode, base);
604 return replace_equiv_address (x, plus_constant (mode, base, offset));
607 /* Copy the value or contents of X to a new temp reg and return that reg. */
610 copy_to_reg (rtx x)
612 rtx temp = gen_reg_rtx (GET_MODE (x));
614 /* If not an operand, must be an address with PLUS and MULT so
615 do the computation. */
616 if (! general_operand (x, VOIDmode))
617 x = force_operand (x, temp);
619 if (x != temp)
620 emit_move_insn (temp, x);
622 return temp;
625 /* Like copy_to_reg but always give the new register mode Pmode
626 in case X is a constant. */
629 copy_addr_to_reg (rtx x)
631 return copy_to_mode_reg (Pmode, x);
634 /* Like copy_to_reg but always give the new register mode MODE
635 in case X is a constant. */
638 copy_to_mode_reg (enum machine_mode mode, rtx x)
640 rtx temp = gen_reg_rtx (mode);
642 /* If not an operand, must be an address with PLUS and MULT so
643 do the computation. */
644 if (! general_operand (x, VOIDmode))
645 x = force_operand (x, temp);
647 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
648 if (x != temp)
649 emit_move_insn (temp, x);
650 return temp;
653 /* Load X into a register if it is not already one.
654 Use mode MODE for the register.
655 X should be valid for mode MODE, but it may be a constant which
656 is valid for all integer modes; that's why caller must specify MODE.
658 The caller must not alter the value in the register we return,
659 since we mark it as a "constant" register. */
662 force_reg (enum machine_mode mode, rtx x)
664 rtx temp, insn, set;
666 if (REG_P (x))
667 return x;
669 if (general_operand (x, mode))
671 temp = gen_reg_rtx (mode);
672 insn = emit_move_insn (temp, x);
674 else
676 temp = force_operand (x, NULL_RTX);
677 if (REG_P (temp))
678 insn = get_last_insn ();
679 else
681 rtx temp2 = gen_reg_rtx (mode);
682 insn = emit_move_insn (temp2, temp);
683 temp = temp2;
687 /* Let optimizers know that TEMP's value never changes
688 and that X can be substituted for it. Don't get confused
689 if INSN set something else (such as a SUBREG of TEMP). */
690 if (CONSTANT_P (x)
691 && (set = single_set (insn)) != 0
692 && SET_DEST (set) == temp
693 && ! rtx_equal_p (x, SET_SRC (set)))
694 set_unique_reg_note (insn, REG_EQUAL, x);
696 /* Let optimizers know that TEMP is a pointer, and if so, the
697 known alignment of that pointer. */
699 unsigned align = 0;
700 if (GET_CODE (x) == SYMBOL_REF)
702 align = BITS_PER_UNIT;
703 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
704 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
706 else if (GET_CODE (x) == LABEL_REF)
707 align = BITS_PER_UNIT;
708 else if (GET_CODE (x) == CONST
709 && GET_CODE (XEXP (x, 0)) == PLUS
710 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
711 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
713 rtx s = XEXP (XEXP (x, 0), 0);
714 rtx c = XEXP (XEXP (x, 0), 1);
715 unsigned sa, ca;
717 sa = BITS_PER_UNIT;
718 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
719 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
721 if (INTVAL (c) == 0)
722 align = sa;
723 else
725 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
726 align = MIN (sa, ca);
730 if (align || (MEM_P (x) && MEM_POINTER (x)))
731 mark_reg_pointer (temp, align);
734 return temp;
737 /* If X is a memory ref, copy its contents to a new temp reg and return
738 that reg. Otherwise, return X. */
741 force_not_mem (rtx x)
743 rtx temp;
745 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
746 return x;
748 temp = gen_reg_rtx (GET_MODE (x));
750 if (MEM_POINTER (x))
751 REG_POINTER (temp) = 1;
753 emit_move_insn (temp, x);
754 return temp;
757 /* Copy X to TARGET (if it's nonzero and a reg)
758 or to a new temp reg and return that reg.
759 MODE is the mode to use for X in case it is a constant. */
762 copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
764 rtx temp;
766 if (target && REG_P (target))
767 temp = target;
768 else
769 temp = gen_reg_rtx (mode);
771 emit_move_insn (temp, x);
772 return temp;
775 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
776 PUNSIGNEDP points to the signedness of the type and may be adjusted
777 to show what signedness to use on extension operations.
779 FOR_RETURN is nonzero if the caller is promoting the return value
780 of FNDECL, else it is for promoting args. */
782 enum machine_mode
783 promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
784 const_tree funtype, int for_return)
786 /* Called without a type node for a libcall. */
787 if (type == NULL_TREE)
789 if (INTEGRAL_MODE_P (mode))
790 return targetm.calls.promote_function_mode (NULL_TREE, mode,
791 punsignedp, funtype,
792 for_return);
793 else
794 return mode;
797 switch (TREE_CODE (type))
799 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
800 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
801 case POINTER_TYPE: case REFERENCE_TYPE:
802 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
803 for_return);
805 default:
806 return mode;
809 /* Return the mode to use to store a scalar of TYPE and MODE.
810 PUNSIGNEDP points to the signedness of the type and may be adjusted
811 to show what signedness to use on extension operations. */
813 enum machine_mode
814 promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
815 int *punsignedp ATTRIBUTE_UNUSED)
817 #ifdef PROMOTE_MODE
818 enum tree_code code;
819 int unsignedp;
820 #endif
822 /* For libcalls this is invoked without TYPE from the backends
823 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
824 case. */
825 if (type == NULL_TREE)
826 return mode;
828 /* FIXME: this is the same logic that was there until GCC 4.4, but we
829 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
830 is not defined. The affected targets are M32C, S390, SPARC. */
831 #ifdef PROMOTE_MODE
832 code = TREE_CODE (type);
833 unsignedp = *punsignedp;
835 switch (code)
837 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
838 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
839 PROMOTE_MODE (mode, unsignedp, type);
840 *punsignedp = unsignedp;
841 return mode;
842 break;
844 #ifdef POINTERS_EXTEND_UNSIGNED
845 case REFERENCE_TYPE:
846 case POINTER_TYPE:
847 *punsignedp = POINTERS_EXTEND_UNSIGNED;
848 return targetm.addr_space.address_mode
849 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
850 break;
851 #endif
853 default:
854 return mode;
856 #else
857 return mode;
858 #endif
862 /* Use one of promote_mode or promote_function_mode to find the promoted
863 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
864 of DECL after promotion. */
866 enum machine_mode
867 promote_decl_mode (const_tree decl, int *punsignedp)
869 tree type = TREE_TYPE (decl);
870 int unsignedp = TYPE_UNSIGNED (type);
871 enum machine_mode mode = DECL_MODE (decl);
872 enum machine_mode pmode;
874 if (TREE_CODE (decl) == RESULT_DECL
875 || TREE_CODE (decl) == PARM_DECL)
876 pmode = promote_function_mode (type, mode, &unsignedp,
877 TREE_TYPE (current_function_decl), 2);
878 else
879 pmode = promote_mode (type, mode, &unsignedp);
881 if (punsignedp)
882 *punsignedp = unsignedp;
883 return pmode;
887 /* Controls the behaviour of {anti_,}adjust_stack. */
888 static bool suppress_reg_args_size;
890 /* A helper for adjust_stack and anti_adjust_stack. */
892 static void
893 adjust_stack_1 (rtx adjust, bool anti_p)
895 rtx temp, insn;
897 #ifndef STACK_GROWS_DOWNWARD
898 /* Hereafter anti_p means subtract_p. */
899 anti_p = !anti_p;
900 #endif
902 temp = expand_binop (Pmode,
903 anti_p ? sub_optab : add_optab,
904 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
905 OPTAB_LIB_WIDEN);
907 if (temp != stack_pointer_rtx)
908 insn = emit_move_insn (stack_pointer_rtx, temp);
909 else
911 insn = get_last_insn ();
912 temp = single_set (insn);
913 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
916 if (!suppress_reg_args_size)
917 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
920 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
921 This pops when ADJUST is positive. ADJUST need not be constant. */
923 void
924 adjust_stack (rtx adjust)
926 if (adjust == const0_rtx)
927 return;
929 /* We expect all variable sized adjustments to be multiple of
930 PREFERRED_STACK_BOUNDARY. */
931 if (CONST_INT_P (adjust))
932 stack_pointer_delta -= INTVAL (adjust);
934 adjust_stack_1 (adjust, false);
937 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
938 This pushes when ADJUST is positive. ADJUST need not be constant. */
940 void
941 anti_adjust_stack (rtx adjust)
943 if (adjust == const0_rtx)
944 return;
946 /* We expect all variable sized adjustments to be multiple of
947 PREFERRED_STACK_BOUNDARY. */
948 if (CONST_INT_P (adjust))
949 stack_pointer_delta += INTVAL (adjust);
951 adjust_stack_1 (adjust, true);
954 /* Round the size of a block to be pushed up to the boundary required
955 by this machine. SIZE is the desired size, which need not be constant. */
957 static rtx
958 round_push (rtx size)
960 rtx align_rtx, alignm1_rtx;
962 if (!SUPPORTS_STACK_ALIGNMENT
963 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
965 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
967 if (align == 1)
968 return size;
970 if (CONST_INT_P (size))
972 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
974 if (INTVAL (size) != new_size)
975 size = GEN_INT (new_size);
976 return size;
979 align_rtx = GEN_INT (align);
980 alignm1_rtx = GEN_INT (align - 1);
982 else
984 /* If crtl->preferred_stack_boundary might still grow, use
985 virtual_preferred_stack_boundary_rtx instead. This will be
986 substituted by the right value in vregs pass and optimized
987 during combine. */
988 align_rtx = virtual_preferred_stack_boundary_rtx;
989 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
990 NULL_RTX);
993 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
994 but we know it can't. So add ourselves and then do
995 TRUNC_DIV_EXPR. */
996 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
997 NULL_RTX, 1, OPTAB_LIB_WIDEN);
998 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
999 NULL_RTX, 1);
1000 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
1002 return size;
1005 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1006 to a previously-created save area. If no save area has been allocated,
1007 this function will allocate one. If a save area is specified, it
1008 must be of the proper mode. */
1010 void
1011 emit_stack_save (enum save_level save_level, rtx *psave)
1013 rtx sa = *psave;
1014 /* The default is that we use a move insn and save in a Pmode object. */
1015 rtx (*fcn) (rtx, rtx) = gen_move_insn;
1016 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1018 /* See if this machine has anything special to do for this kind of save. */
1019 switch (save_level)
1021 #ifdef HAVE_save_stack_block
1022 case SAVE_BLOCK:
1023 if (HAVE_save_stack_block)
1024 fcn = gen_save_stack_block;
1025 break;
1026 #endif
1027 #ifdef HAVE_save_stack_function
1028 case SAVE_FUNCTION:
1029 if (HAVE_save_stack_function)
1030 fcn = gen_save_stack_function;
1031 break;
1032 #endif
1033 #ifdef HAVE_save_stack_nonlocal
1034 case SAVE_NONLOCAL:
1035 if (HAVE_save_stack_nonlocal)
1036 fcn = gen_save_stack_nonlocal;
1037 break;
1038 #endif
1039 default:
1040 break;
1043 /* If there is no save area and we have to allocate one, do so. Otherwise
1044 verify the save area is the proper mode. */
1046 if (sa == 0)
1048 if (mode != VOIDmode)
1050 if (save_level == SAVE_NONLOCAL)
1051 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1052 else
1053 *psave = sa = gen_reg_rtx (mode);
1057 do_pending_stack_adjust ();
1058 if (sa != 0)
1059 sa = validize_mem (sa);
1060 emit_insn (fcn (sa, stack_pointer_rtx));
1063 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1064 area made by emit_stack_save. If it is zero, we have nothing to do. */
1066 void
1067 emit_stack_restore (enum save_level save_level, rtx sa)
1069 /* The default is that we use a move insn. */
1070 rtx (*fcn) (rtx, rtx) = gen_move_insn;
1072 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1073 STACK_POINTER and HARD_FRAME_POINTER.
1074 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1075 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1076 aligned variables, which is reflected in ix86_can_eliminate.
1077 We normally still have the realigned STACK_POINTER that we can use.
1078 But if there is a stack restore still present at reload, it can trigger
1079 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1080 FRAME_POINTER into a hard reg.
1081 To prevent this situation, we force need_drap if we emit a stack
1082 restore. */
1083 if (SUPPORTS_STACK_ALIGNMENT)
1084 crtl->need_drap = true;
1086 /* See if this machine has anything special to do for this kind of save. */
1087 switch (save_level)
1089 #ifdef HAVE_restore_stack_block
1090 case SAVE_BLOCK:
1091 if (HAVE_restore_stack_block)
1092 fcn = gen_restore_stack_block;
1093 break;
1094 #endif
1095 #ifdef HAVE_restore_stack_function
1096 case SAVE_FUNCTION:
1097 if (HAVE_restore_stack_function)
1098 fcn = gen_restore_stack_function;
1099 break;
1100 #endif
1101 #ifdef HAVE_restore_stack_nonlocal
1102 case SAVE_NONLOCAL:
1103 if (HAVE_restore_stack_nonlocal)
1104 fcn = gen_restore_stack_nonlocal;
1105 break;
1106 #endif
1107 default:
1108 break;
1111 if (sa != 0)
1113 sa = validize_mem (sa);
1114 /* These clobbers prevent the scheduler from moving
1115 references to variable arrays below the code
1116 that deletes (pops) the arrays. */
1117 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1118 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1121 discard_pending_stack_adjust ();
1123 emit_insn (fcn (stack_pointer_rtx, sa));
1126 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1127 function. This function should be called whenever we allocate or
1128 deallocate dynamic stack space. */
1130 void
1131 update_nonlocal_goto_save_area (void)
1133 tree t_save;
1134 rtx r_save;
1136 /* The nonlocal_goto_save_area object is an array of N pointers. The
1137 first one is used for the frame pointer save; the rest are sized by
1138 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1139 of the stack save area slots. */
1140 t_save = build4 (ARRAY_REF,
1141 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1142 cfun->nonlocal_goto_save_area,
1143 integer_one_node, NULL_TREE, NULL_TREE);
1144 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1146 emit_stack_save (SAVE_NONLOCAL, &r_save);
1149 /* Return an rtx representing the address of an area of memory dynamically
1150 pushed on the stack.
1152 Any required stack pointer alignment is preserved.
1154 SIZE is an rtx representing the size of the area.
1156 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1157 parameter may be zero. If so, a proper value will be extracted
1158 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1160 REQUIRED_ALIGN is the alignment (in bits) required for the region
1161 of memory.
1163 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1164 stack space allocated by the generated code cannot be added with itself
1165 in the course of the execution of the function. It is always safe to
1166 pass FALSE here and the following criterion is sufficient in order to
1167 pass TRUE: every path in the CFG that starts at the allocation point and
1168 loops to it executes the associated deallocation code. */
1171 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1172 unsigned required_align, bool cannot_accumulate)
1174 HOST_WIDE_INT stack_usage_size = -1;
1175 rtx final_label, final_target, target;
1176 unsigned extra_align = 0;
1177 bool must_align;
1179 /* If we're asking for zero bytes, it doesn't matter what we point
1180 to since we can't dereference it. But return a reasonable
1181 address anyway. */
1182 if (size == const0_rtx)
1183 return virtual_stack_dynamic_rtx;
1185 /* Otherwise, show we're calling alloca or equivalent. */
1186 cfun->calls_alloca = 1;
1188 /* If stack usage info is requested, look into the size we are passed.
1189 We need to do so this early to avoid the obfuscation that may be
1190 introduced later by the various alignment operations. */
1191 if (flag_stack_usage_info)
1193 if (CONST_INT_P (size))
1194 stack_usage_size = INTVAL (size);
1195 else if (REG_P (size))
1197 /* Look into the last emitted insn and see if we can deduce
1198 something for the register. */
1199 rtx insn, set, note;
1200 insn = get_last_insn ();
1201 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1203 if (CONST_INT_P (SET_SRC (set)))
1204 stack_usage_size = INTVAL (SET_SRC (set));
1205 else if ((note = find_reg_equal_equiv_note (insn))
1206 && CONST_INT_P (XEXP (note, 0)))
1207 stack_usage_size = INTVAL (XEXP (note, 0));
1211 /* If the size is not constant, we can't say anything. */
1212 if (stack_usage_size == -1)
1214 current_function_has_unbounded_dynamic_stack_size = 1;
1215 stack_usage_size = 0;
1219 /* Ensure the size is in the proper mode. */
1220 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1221 size = convert_to_mode (Pmode, size, 1);
1223 /* Adjust SIZE_ALIGN, if needed. */
1224 if (CONST_INT_P (size))
1226 unsigned HOST_WIDE_INT lsb;
1228 lsb = INTVAL (size);
1229 lsb &= -lsb;
1231 /* Watch out for overflow truncating to "unsigned". */
1232 if (lsb > UINT_MAX / BITS_PER_UNIT)
1233 size_align = 1u << (HOST_BITS_PER_INT - 1);
1234 else
1235 size_align = (unsigned)lsb * BITS_PER_UNIT;
1237 else if (size_align < BITS_PER_UNIT)
1238 size_align = BITS_PER_UNIT;
1240 /* We can't attempt to minimize alignment necessary, because we don't
1241 know the final value of preferred_stack_boundary yet while executing
1242 this code. */
1243 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1244 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1246 /* We will need to ensure that the address we return is aligned to
1247 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1248 always know its final value at this point in the compilation (it
1249 might depend on the size of the outgoing parameter lists, for
1250 example), so we must align the value to be returned in that case.
1251 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1252 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1253 We must also do an alignment operation on the returned value if
1254 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1256 If we have to align, we must leave space in SIZE for the hole
1257 that might result from the alignment operation. */
1259 must_align = (crtl->preferred_stack_boundary < required_align);
1260 if (must_align)
1262 if (required_align > PREFERRED_STACK_BOUNDARY)
1263 extra_align = PREFERRED_STACK_BOUNDARY;
1264 else if (required_align > STACK_BOUNDARY)
1265 extra_align = STACK_BOUNDARY;
1266 else
1267 extra_align = BITS_PER_UNIT;
1270 /* ??? STACK_POINTER_OFFSET is always defined now. */
1271 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1272 must_align = true;
1273 extra_align = BITS_PER_UNIT;
1274 #endif
1276 if (must_align)
1278 unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
1280 size = plus_constant (Pmode, size, extra);
1281 size = force_operand (size, NULL_RTX);
1283 if (flag_stack_usage_info)
1284 stack_usage_size += extra;
1286 if (extra && size_align > extra_align)
1287 size_align = extra_align;
1290 /* Round the size to a multiple of the required stack alignment.
1291 Since the stack if presumed to be rounded before this allocation,
1292 this will maintain the required alignment.
1294 If the stack grows downward, we could save an insn by subtracting
1295 SIZE from the stack pointer and then aligning the stack pointer.
1296 The problem with this is that the stack pointer may be unaligned
1297 between the execution of the subtraction and alignment insns and
1298 some machines do not allow this. Even on those that do, some
1299 signal handlers malfunction if a signal should occur between those
1300 insns. Since this is an extremely rare event, we have no reliable
1301 way of knowing which systems have this problem. So we avoid even
1302 momentarily mis-aligning the stack. */
1303 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1305 size = round_push (size);
1307 if (flag_stack_usage_info)
1309 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1310 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1314 target = gen_reg_rtx (Pmode);
1316 /* The size is supposed to be fully adjusted at this point so record it
1317 if stack usage info is requested. */
1318 if (flag_stack_usage_info)
1320 current_function_dynamic_stack_size += stack_usage_size;
1322 /* ??? This is gross but the only safe stance in the absence
1323 of stack usage oriented flow analysis. */
1324 if (!cannot_accumulate)
1325 current_function_has_unbounded_dynamic_stack_size = 1;
1328 final_label = NULL_RTX;
1329 final_target = NULL_RTX;
1331 /* If we are splitting the stack, we need to ask the backend whether
1332 there is enough room on the current stack. If there isn't, or if
1333 the backend doesn't know how to tell is, then we need to call a
1334 function to allocate memory in some other way. This memory will
1335 be released when we release the current stack segment. The
1336 effect is that stack allocation becomes less efficient, but at
1337 least it doesn't cause a stack overflow. */
1338 if (flag_split_stack)
1340 rtx available_label, ask, space, func;
1342 available_label = NULL_RTX;
1344 #ifdef HAVE_split_stack_space_check
1345 if (HAVE_split_stack_space_check)
1347 available_label = gen_label_rtx ();
1349 /* This instruction will branch to AVAILABLE_LABEL if there
1350 are SIZE bytes available on the stack. */
1351 emit_insn (gen_split_stack_space_check (size, available_label));
1353 #endif
1355 /* The __morestack_allocate_stack_space function will allocate
1356 memory using malloc. If the alignment of the memory returned
1357 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1358 make sure we allocate enough space. */
1359 if (MALLOC_ABI_ALIGNMENT >= required_align)
1360 ask = size;
1361 else
1363 ask = expand_binop (Pmode, add_optab, size,
1364 GEN_INT (required_align / BITS_PER_UNIT - 1),
1365 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1366 must_align = true;
1369 func = init_one_libfunc ("__morestack_allocate_stack_space");
1371 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1372 1, ask, Pmode);
1374 if (available_label == NULL_RTX)
1375 return space;
1377 final_target = gen_reg_rtx (Pmode);
1379 emit_move_insn (final_target, space);
1381 final_label = gen_label_rtx ();
1382 emit_jump (final_label);
1384 emit_label (available_label);
1387 do_pending_stack_adjust ();
1389 /* We ought to be called always on the toplevel and stack ought to be aligned
1390 properly. */
1391 gcc_assert (!(stack_pointer_delta
1392 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1394 /* If needed, check that we have the required amount of stack. Take into
1395 account what has already been checked. */
1396 if (STACK_CHECK_MOVING_SP)
1398 else if (flag_stack_check == GENERIC_STACK_CHECK)
1399 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1400 size);
1401 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1402 probe_stack_range (STACK_CHECK_PROTECT, size);
1404 /* Don't let anti_adjust_stack emit notes. */
1405 suppress_reg_args_size = true;
1407 /* Perform the required allocation from the stack. Some systems do
1408 this differently than simply incrementing/decrementing from the
1409 stack pointer, such as acquiring the space by calling malloc(). */
1410 #ifdef HAVE_allocate_stack
1411 if (HAVE_allocate_stack)
1413 struct expand_operand ops[2];
1414 /* We don't have to check against the predicate for operand 0 since
1415 TARGET is known to be a pseudo of the proper mode, which must
1416 be valid for the operand. */
1417 create_fixed_operand (&ops[0], target);
1418 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1419 expand_insn (CODE_FOR_allocate_stack, 2, ops);
1421 else
1422 #endif
1424 int saved_stack_pointer_delta;
1426 #ifndef STACK_GROWS_DOWNWARD
1427 emit_move_insn (target, virtual_stack_dynamic_rtx);
1428 #endif
1430 /* Check stack bounds if necessary. */
1431 if (crtl->limit_stack)
1433 rtx available;
1434 rtx space_available = gen_label_rtx ();
1435 #ifdef STACK_GROWS_DOWNWARD
1436 available = expand_binop (Pmode, sub_optab,
1437 stack_pointer_rtx, stack_limit_rtx,
1438 NULL_RTX, 1, OPTAB_WIDEN);
1439 #else
1440 available = expand_binop (Pmode, sub_optab,
1441 stack_limit_rtx, stack_pointer_rtx,
1442 NULL_RTX, 1, OPTAB_WIDEN);
1443 #endif
1444 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1445 space_available);
1446 #ifdef HAVE_trap
1447 if (HAVE_trap)
1448 emit_insn (gen_trap ());
1449 else
1450 #endif
1451 error ("stack limits not supported on this target");
1452 emit_barrier ();
1453 emit_label (space_available);
1456 saved_stack_pointer_delta = stack_pointer_delta;
1458 if (flag_stack_check && STACK_CHECK_MOVING_SP)
1459 anti_adjust_stack_and_probe (size, false);
1460 else
1461 anti_adjust_stack (size);
1463 /* Even if size is constant, don't modify stack_pointer_delta.
1464 The constant size alloca should preserve
1465 crtl->preferred_stack_boundary alignment. */
1466 stack_pointer_delta = saved_stack_pointer_delta;
1468 #ifdef STACK_GROWS_DOWNWARD
1469 emit_move_insn (target, virtual_stack_dynamic_rtx);
1470 #endif
1473 suppress_reg_args_size = false;
1475 /* Finish up the split stack handling. */
1476 if (final_label != NULL_RTX)
1478 gcc_assert (flag_split_stack);
1479 emit_move_insn (final_target, target);
1480 emit_label (final_label);
1481 target = final_target;
1484 if (must_align)
1486 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1487 but we know it can't. So add ourselves and then do
1488 TRUNC_DIV_EXPR. */
1489 target = expand_binop (Pmode, add_optab, target,
1490 GEN_INT (required_align / BITS_PER_UNIT - 1),
1491 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1492 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1493 GEN_INT (required_align / BITS_PER_UNIT),
1494 NULL_RTX, 1);
1495 target = expand_mult (Pmode, target,
1496 GEN_INT (required_align / BITS_PER_UNIT),
1497 NULL_RTX, 1);
1500 /* Now that we've committed to a return value, mark its alignment. */
1501 mark_reg_pointer (target, required_align);
1503 /* Record the new stack level for nonlocal gotos. */
1504 if (cfun->nonlocal_goto_save_area != 0)
1505 update_nonlocal_goto_save_area ();
1507 return target;
1510 /* A front end may want to override GCC's stack checking by providing a
1511 run-time routine to call to check the stack, so provide a mechanism for
1512 calling that routine. */
1514 static GTY(()) rtx stack_check_libfunc;
1516 void
1517 set_stack_check_libfunc (const char *libfunc_name)
1519 gcc_assert (stack_check_libfunc == NULL_RTX);
1520 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1523 /* Emit one stack probe at ADDRESS, an address within the stack. */
1525 void
1526 emit_stack_probe (rtx address)
1528 #ifdef HAVE_probe_stack_address
1529 if (HAVE_probe_stack_address)
1530 emit_insn (gen_probe_stack_address (address));
1531 else
1532 #endif
1534 rtx memref = gen_rtx_MEM (word_mode, address);
1536 MEM_VOLATILE_P (memref) = 1;
1538 /* See if we have an insn to probe the stack. */
1539 #ifdef HAVE_probe_stack
1540 if (HAVE_probe_stack)
1541 emit_insn (gen_probe_stack (memref));
1542 else
1543 #endif
1544 emit_move_insn (memref, const0_rtx);
1548 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1549 FIRST is a constant and size is a Pmode RTX. These are offsets from
1550 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1551 or subtract them from the stack pointer. */
1553 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1555 #ifdef STACK_GROWS_DOWNWARD
1556 #define STACK_GROW_OP MINUS
1557 #define STACK_GROW_OPTAB sub_optab
1558 #define STACK_GROW_OFF(off) -(off)
1559 #else
1560 #define STACK_GROW_OP PLUS
1561 #define STACK_GROW_OPTAB add_optab
1562 #define STACK_GROW_OFF(off) (off)
1563 #endif
1565 void
1566 probe_stack_range (HOST_WIDE_INT first, rtx size)
1568 /* First ensure SIZE is Pmode. */
1569 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1570 size = convert_to_mode (Pmode, size, 1);
1572 /* Next see if we have a function to check the stack. */
1573 if (stack_check_libfunc)
1575 rtx addr = memory_address (Pmode,
1576 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1577 stack_pointer_rtx,
1578 plus_constant (Pmode,
1579 size, first)));
1580 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1581 Pmode);
1584 /* Next see if we have an insn to check the stack. */
1585 #ifdef HAVE_check_stack
1586 else if (HAVE_check_stack)
1588 struct expand_operand ops[1];
1589 rtx addr = memory_address (Pmode,
1590 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1591 stack_pointer_rtx,
1592 plus_constant (Pmode,
1593 size, first)));
1594 bool success;
1595 create_input_operand (&ops[0], addr, Pmode);
1596 success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops);
1597 gcc_assert (success);
1599 #endif
1601 /* Otherwise we have to generate explicit probes. If we have a constant
1602 small number of them to generate, that's the easy case. */
1603 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1605 HOST_WIDE_INT isize = INTVAL (size), i;
1606 rtx addr;
1608 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1609 it exceeds SIZE. If only one probe is needed, this will not
1610 generate any code. Then probe at FIRST + SIZE. */
1611 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1613 addr = memory_address (Pmode,
1614 plus_constant (Pmode, stack_pointer_rtx,
1615 STACK_GROW_OFF (first + i)));
1616 emit_stack_probe (addr);
1619 addr = memory_address (Pmode,
1620 plus_constant (Pmode, stack_pointer_rtx,
1621 STACK_GROW_OFF (first + isize)));
1622 emit_stack_probe (addr);
1625 /* In the variable case, do the same as above, but in a loop. Note that we
1626 must be extra careful with variables wrapping around because we might be
1627 at the very top (or the very bottom) of the address space and we have to
1628 be able to handle this case properly; in particular, we use an equality
1629 test for the loop condition. */
1630 else
1632 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1633 rtx loop_lab = gen_label_rtx ();
1634 rtx end_lab = gen_label_rtx ();
1637 /* Step 1: round SIZE to the previous multiple of the interval. */
1639 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1640 rounded_size
1641 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1642 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1645 /* Step 2: compute initial and final value of the loop counter. */
1647 /* TEST_ADDR = SP + FIRST. */
1648 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1649 stack_pointer_rtx,
1650 GEN_INT (first)), NULL_RTX);
1652 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1653 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1654 test_addr,
1655 rounded_size_op), NULL_RTX);
1658 /* Step 3: the loop
1660 while (TEST_ADDR != LAST_ADDR)
1662 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1663 probe at TEST_ADDR
1666 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1667 until it is equal to ROUNDED_SIZE. */
1669 emit_label (loop_lab);
1671 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1672 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1673 end_lab);
1675 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1676 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1677 GEN_INT (PROBE_INTERVAL), test_addr,
1678 1, OPTAB_WIDEN);
1680 gcc_assert (temp == test_addr);
1682 /* Probe at TEST_ADDR. */
1683 emit_stack_probe (test_addr);
1685 emit_jump (loop_lab);
1687 emit_label (end_lab);
1690 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1691 that SIZE is equal to ROUNDED_SIZE. */
1693 /* TEMP = SIZE - ROUNDED_SIZE. */
1694 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1695 if (temp != const0_rtx)
1697 rtx addr;
1699 if (CONST_INT_P (temp))
1701 /* Use [base + disp} addressing mode if supported. */
1702 HOST_WIDE_INT offset = INTVAL (temp);
1703 addr = memory_address (Pmode,
1704 plus_constant (Pmode, last_addr,
1705 STACK_GROW_OFF (offset)));
1707 else
1709 /* Manual CSE if the difference is not known at compile-time. */
1710 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1711 addr = memory_address (Pmode,
1712 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1713 last_addr, temp));
1716 emit_stack_probe (addr);
1721 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1722 while probing it. This pushes when SIZE is positive. SIZE need not
1723 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1724 by plus SIZE at the end. */
1726 void
1727 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
1729 /* We skip the probe for the first interval + a small dope of 4 words and
1730 probe that many bytes past the specified size to maintain a protection
1731 area at the botton of the stack. */
1732 const int dope = 4 * UNITS_PER_WORD;
1734 /* First ensure SIZE is Pmode. */
1735 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1736 size = convert_to_mode (Pmode, size, 1);
1738 /* If we have a constant small number of probes to generate, that's the
1739 easy case. */
1740 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1742 HOST_WIDE_INT isize = INTVAL (size), i;
1743 bool first_probe = true;
1745 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1746 values of N from 1 until it exceeds SIZE. If only one probe is
1747 needed, this will not generate any code. Then adjust and probe
1748 to PROBE_INTERVAL + SIZE. */
1749 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1751 if (first_probe)
1753 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1754 first_probe = false;
1756 else
1757 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1758 emit_stack_probe (stack_pointer_rtx);
1761 if (first_probe)
1762 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1763 else
1764 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
1765 emit_stack_probe (stack_pointer_rtx);
1768 /* In the variable case, do the same as above, but in a loop. Note that we
1769 must be extra careful with variables wrapping around because we might be
1770 at the very top (or the very bottom) of the address space and we have to
1771 be able to handle this case properly; in particular, we use an equality
1772 test for the loop condition. */
1773 else
1775 rtx rounded_size, rounded_size_op, last_addr, temp;
1776 rtx loop_lab = gen_label_rtx ();
1777 rtx end_lab = gen_label_rtx ();
1780 /* Step 1: round SIZE to the previous multiple of the interval. */
1782 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1783 rounded_size
1784 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1785 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1788 /* Step 2: compute initial and final value of the loop counter. */
1790 /* SP = SP_0 + PROBE_INTERVAL. */
1791 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1793 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1794 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1795 stack_pointer_rtx,
1796 rounded_size_op), NULL_RTX);
1799 /* Step 3: the loop
1801 while (SP != LAST_ADDR)
1803 SP = SP + PROBE_INTERVAL
1804 probe at SP
1807 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1808 values of N from 1 until it is equal to ROUNDED_SIZE. */
1810 emit_label (loop_lab);
1812 /* Jump to END_LAB if SP == LAST_ADDR. */
1813 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1814 Pmode, 1, end_lab);
1816 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1817 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1818 emit_stack_probe (stack_pointer_rtx);
1820 emit_jump (loop_lab);
1822 emit_label (end_lab);
1825 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1826 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1828 /* TEMP = SIZE - ROUNDED_SIZE. */
1829 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1830 if (temp != const0_rtx)
1832 /* Manual CSE if the difference is not known at compile-time. */
1833 if (GET_CODE (temp) != CONST_INT)
1834 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1835 anti_adjust_stack (temp);
1836 emit_stack_probe (stack_pointer_rtx);
1840 /* Adjust back and account for the additional first interval. */
1841 if (adjust_back)
1842 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1843 else
1844 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1847 /* Return an rtx representing the register or memory location
1848 in which a scalar value of data type VALTYPE
1849 was returned by a function call to function FUNC.
1850 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1851 function is known, otherwise 0.
1852 OUTGOING is 1 if on a machine with register windows this function
1853 should return the register in which the function will put its result
1854 and 0 otherwise. */
1857 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1858 int outgoing ATTRIBUTE_UNUSED)
1860 rtx val;
1862 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1864 if (REG_P (val)
1865 && GET_MODE (val) == BLKmode)
1867 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1868 enum machine_mode tmpmode;
1870 /* int_size_in_bytes can return -1. We don't need a check here
1871 since the value of bytes will then be large enough that no
1872 mode will match anyway. */
1874 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1875 tmpmode != VOIDmode;
1876 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1878 /* Have we found a large enough mode? */
1879 if (GET_MODE_SIZE (tmpmode) >= bytes)
1880 break;
1883 /* No suitable mode found. */
1884 gcc_assert (tmpmode != VOIDmode);
1886 PUT_MODE (val, tmpmode);
1888 return val;
1891 /* Return an rtx representing the register or memory location
1892 in which a scalar value of mode MODE was returned by a library call. */
1895 hard_libcall_value (enum machine_mode mode, rtx fun)
1897 return targetm.calls.libcall_value (mode, fun);
1900 /* Look up the tree code for a given rtx code
1901 to provide the arithmetic operation for REAL_ARITHMETIC.
1902 The function returns an int because the caller may not know
1903 what `enum tree_code' means. */
1906 rtx_to_tree_code (enum rtx_code code)
1908 enum tree_code tcode;
1910 switch (code)
1912 case PLUS:
1913 tcode = PLUS_EXPR;
1914 break;
1915 case MINUS:
1916 tcode = MINUS_EXPR;
1917 break;
1918 case MULT:
1919 tcode = MULT_EXPR;
1920 break;
1921 case DIV:
1922 tcode = RDIV_EXPR;
1923 break;
1924 case SMIN:
1925 tcode = MIN_EXPR;
1926 break;
1927 case SMAX:
1928 tcode = MAX_EXPR;
1929 break;
1930 default:
1931 tcode = LAST_AND_UNUSED_TREE_CODE;
1932 break;
1934 return ((int) tcode);
1937 #include "gt-explow.h"