re PR rtl-optimization/6305 (c++ gets ICE in reload_cse_simplify_operands)
[official-gcc.git] / gcc / config / s390 / s390.c
blob131f6ecbb60488ec3777d1b3580f6883a1c9eca3
1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "except.h"
37 #include "function.h"
38 #include "recog.h"
39 #include "expr.h"
40 #include "reload.h"
41 #include "toplev.h"
42 #include "basic-block.h"
43 #include "integrate.h"
44 #include "ggc.h"
45 #include "target.h"
46 #include "target-def.h"
47 #include "debug.h"
48 #include "langhooks.h"
50 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
51 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
52 static int s390_adjust_priority PARAMS ((rtx, int));
54 #undef TARGET_ASM_ALIGNED_HI_OP
55 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
56 #undef TARGET_ASM_ALIGNED_DI_OP
57 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
58 #undef TARGET_ASM_INTEGER
59 #define TARGET_ASM_INTEGER s390_assemble_integer
61 #undef TARGET_ASM_FUNCTION_PROLOGUE
62 #define TARGET_ASM_FUNCTION_PROLOGUE s390_function_prologue
64 #undef TARGET_ASM_FUNCTION_EPILOGUE
65 #define TARGET_ASM_FUNCTION_EPILOGUE s390_function_epilogue
67 #undef TARGET_ASM_OPEN_PAREN
68 #define TARGET_ASM_OPEN_PAREN ""
70 #undef TARGET_ASM_CLOSE_PAREN
71 #define TARGET_ASM_CLOSE_PAREN ""
73 #undef TARGET_SCHED_ADJUST_COST
74 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
76 #undef TARGET_SCHED_ADJUST_PRIORITY
77 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
79 struct gcc_target targetm = TARGET_INITIALIZER;
81 extern int reload_completed;
83 /* The alias set for prologue/epilogue register save/restore. */
84 static int s390_sr_alias_set = 0;
86 /* Function count for creating unique internal labels in a compile unit. */
87 int s390_function_count = 0;
89 /* Save information from a "cmpxx" operation until the branch or scc is
90 emitted. */
91 rtx s390_compare_op0, s390_compare_op1;
93 /* Structure used to hold the components of a S/390 memory
94 address. A legitimate address on S/390 is of the general
95 form
96 base + index + displacement
97 where any of the components is optional.
99 base and index are registers of the class ADDR_REGS,
100 displacement is an unsigned 12-bit immediate constant. */
102 struct s390_address
104 rtx base;
105 rtx indx;
106 rtx disp;
107 int pointer;
110 /* Structure containing information for prologue and epilogue. */
112 struct s390_frame
114 int frame_pointer_p;
115 int return_reg_saved_p;
116 int save_fprs_p;
117 int first_save_gpr;
118 int first_restore_gpr;
119 int last_save_gpr;
120 int arg_frame_offset;
122 HOST_WIDE_INT frame_size;
125 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
126 static int s390_branch_condition_mask PARAMS ((rtx));
127 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
128 static int check_mode PARAMS ((rtx, enum machine_mode *));
129 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
130 static int s390_decompose_address PARAMS ((rtx, struct s390_address *, int));
131 static int reg_used_in_mem_p PARAMS ((int, rtx));
132 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
133 static void s390_split_branches PARAMS ((void));
134 static void s390_chunkify_pool PARAMS ((void));
135 static int save_fprs_p PARAMS ((void));
136 static int find_unused_clobbered_reg PARAMS ((void));
137 static void s390_frame_info PARAMS ((struct s390_frame *));
138 static rtx save_fpr PARAMS ((rtx, int, int));
139 static rtx restore_fpr PARAMS ((rtx, int, int));
140 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
143 /* Return true if SET either doesn't set the CC register, or else
144 the source and destination have matching CC modes and that
145 CC mode is at least as constrained as REQ_MODE. */
147 static int
148 s390_match_ccmode_set (set, req_mode)
149 rtx set;
150 enum machine_mode req_mode;
152 enum machine_mode set_mode;
154 if (GET_CODE (set) != SET)
155 abort ();
157 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
158 return 1;
160 set_mode = GET_MODE (SET_DEST (set));
161 switch (set_mode)
163 case CCSmode:
164 if (req_mode != CCSmode)
165 return 0;
166 break;
167 case CCUmode:
168 if (req_mode != CCUmode)
169 return 0;
170 break;
171 case CCLmode:
172 if (req_mode != CCLmode)
173 return 0;
174 break;
175 case CCZmode:
176 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode)
177 return 0;
178 break;
180 default:
181 abort ();
184 return (GET_MODE (SET_SRC (set)) == set_mode);
187 /* Return true if every SET in INSN that sets the CC register
188 has source and destination with matching CC modes and that
189 CC mode is at least as constrained as REQ_MODE. */
192 s390_match_ccmode (insn, req_mode)
193 rtx insn;
194 enum machine_mode req_mode;
196 int i;
198 if (GET_CODE (PATTERN (insn)) == SET)
199 return s390_match_ccmode_set (PATTERN (insn), req_mode);
201 if (GET_CODE (PATTERN (insn)) == PARALLEL)
202 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
204 rtx set = XVECEXP (PATTERN (insn), 0, i);
205 if (GET_CODE (set) == SET)
206 if (!s390_match_ccmode_set (set, req_mode))
207 return 0;
210 return 1;
213 /* Given a comparison code OP (EQ, NE, etc.) and the operands
214 OP0 and OP1 of a COMPARE, return the mode to be used for the
215 comparison. */
217 enum machine_mode
218 s390_select_ccmode (code, op0, op1)
219 enum rtx_code code;
220 rtx op0;
221 rtx op1;
223 switch (code)
225 case EQ:
226 case NE:
227 if (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
228 || GET_CODE (op1) == NEG)
229 return CCLmode;
231 return CCZmode;
233 case LE:
234 case LT:
235 case GE:
236 case GT:
237 case UNORDERED:
238 case ORDERED:
239 case UNEQ:
240 case UNLE:
241 case UNLT:
242 case UNGE:
243 case UNGT:
244 case LTGT:
245 return CCSmode;
247 case LEU:
248 case LTU:
249 case GEU:
250 case GTU:
251 return CCUmode;
253 default:
254 abort ();
258 /* Return branch condition mask to implement a branch
259 specified by CODE. */
261 static int
262 s390_branch_condition_mask (code)
263 rtx code;
265 const int CC0 = 1 << 3;
266 const int CC1 = 1 << 2;
267 const int CC2 = 1 << 1;
268 const int CC3 = 1 << 0;
270 if (GET_CODE (XEXP (code, 0)) != REG
271 || REGNO (XEXP (code, 0)) != CC_REGNUM
272 || XEXP (code, 1) != const0_rtx)
273 abort ();
275 switch (GET_MODE (XEXP (code, 0)))
277 case CCZmode:
278 switch (GET_CODE (code))
280 case EQ: return CC0;
281 case NE: return CC1 | CC2 | CC3;
282 default:
283 abort ();
285 break;
287 case CCLmode:
288 switch (GET_CODE (code))
290 case EQ: return CC0 | CC2;
291 case NE: return CC1 | CC3;
292 case UNORDERED: return CC2 | CC3; /* carry */
293 case ORDERED: return CC0 | CC1; /* no carry */
294 default:
295 abort ();
297 break;
299 case CCUmode:
300 switch (GET_CODE (code))
302 case EQ: return CC0;
303 case NE: return CC1 | CC2 | CC3;
304 case LTU: return CC1;
305 case GTU: return CC2;
306 case LEU: return CC0 | CC1;
307 case GEU: return CC0 | CC2;
308 default:
309 abort ();
311 break;
313 case CCSmode:
314 switch (GET_CODE (code))
316 case EQ: return CC0;
317 case NE: return CC1 | CC2 | CC3;
318 case LT: return CC1;
319 case GT: return CC2;
320 case LE: return CC0 | CC1;
321 case GE: return CC0 | CC2;
322 case UNORDERED: return CC3;
323 case ORDERED: return CC0 | CC1 | CC2;
324 case UNEQ: return CC0 | CC3;
325 case UNLT: return CC1 | CC3;
326 case UNGT: return CC2 | CC3;
327 case UNLE: return CC0 | CC1 | CC3;
328 case UNGE: return CC0 | CC2 | CC3;
329 case LTGT: return CC1 | CC2;
330 default:
331 abort ();
334 default:
335 abort ();
339 /* If INV is false, return assembler mnemonic string to implement
340 a branch specified by CODE. If INV is true, return mnemonic
341 for the corresponding inverted branch. */
343 static const char *
344 s390_branch_condition_mnemonic (code, inv)
345 rtx code;
346 int inv;
348 static const char *const mnemonic[16] =
350 NULL, "o", "h", "nle",
351 "l", "nhe", "lh", "ne",
352 "e", "nlh", "he", "nl",
353 "le", "nh", "no", NULL
356 int mask = s390_branch_condition_mask (code);
358 if (inv)
359 mask ^= 15;
361 if (mask < 1 || mask > 14)
362 abort ();
364 return mnemonic[mask];
367 /* If OP is an integer constant of mode MODE with exactly one
368 HImode subpart unequal to DEF, return the number of that
369 subpart. As a special case, all HImode subparts of OP are
370 equal to DEF, return zero. Otherwise, return -1. */
373 s390_single_hi (op, mode, def)
374 rtx op;
375 enum machine_mode mode;
376 int def;
378 if (GET_CODE (op) == CONST_INT)
380 unsigned HOST_WIDE_INT value;
381 int n_parts = GET_MODE_SIZE (mode) / 2;
382 int i, part = -1;
384 for (i = 0; i < n_parts; i++)
386 if (i == 0)
387 value = (unsigned HOST_WIDE_INT) INTVAL (op);
388 else
389 value >>= 16;
391 if ((value & 0xffff) != (unsigned)(def & 0xffff))
393 if (part != -1)
394 return -1;
395 else
396 part = i;
400 return part == -1 ? 0 : (n_parts - 1 - part);
403 else if (GET_CODE (op) == CONST_DOUBLE
404 && GET_MODE (op) == VOIDmode)
406 unsigned HOST_WIDE_INT value;
407 int n_parts = GET_MODE_SIZE (mode) / 2;
408 int i, part = -1;
410 for (i = 0; i < n_parts; i++)
412 if (i == 0)
413 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
414 else if (i == HOST_BITS_PER_WIDE_INT / 16)
415 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
416 else
417 value >>= 16;
419 if ((value & 0xffff) != (unsigned)(def & 0xffff))
421 if (part != -1)
422 return -1;
423 else
424 part = i;
428 return part == -1 ? 0 : (n_parts - 1 - part);
431 return -1;
434 /* Extract the HImode part number PART from integer
435 constant OP of mode MODE. */
438 s390_extract_hi (op, mode, part)
439 rtx op;
440 enum machine_mode mode;
441 int part;
443 int n_parts = GET_MODE_SIZE (mode) / 2;
444 if (part < 0 || part >= n_parts)
445 abort();
446 else
447 part = n_parts - 1 - part;
449 if (GET_CODE (op) == CONST_INT)
451 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
452 return ((value >> (16 * part)) & 0xffff);
454 else if (GET_CODE (op) == CONST_DOUBLE
455 && GET_MODE (op) == VOIDmode)
457 unsigned HOST_WIDE_INT value;
458 if (part < HOST_BITS_PER_WIDE_INT / 16)
459 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
460 else
461 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
462 part -= HOST_BITS_PER_WIDE_INT / 16;
464 return ((value >> (16 * part)) & 0xffff);
467 abort ();
470 /* If OP is an integer constant of mode MODE with exactly one
471 QImode subpart unequal to DEF, return the number of that
472 subpart. As a special case, all QImode subparts of OP are
473 equal to DEF, return zero. Otherwise, return -1. */
476 s390_single_qi (op, mode, def)
477 rtx op;
478 enum machine_mode mode;
479 int def;
481 if (GET_CODE (op) == CONST_INT)
483 unsigned HOST_WIDE_INT value;
484 int n_parts = GET_MODE_SIZE (mode);
485 int i, part = -1;
487 for (i = 0; i < n_parts; i++)
489 if (i == 0)
490 value = (unsigned HOST_WIDE_INT) INTVAL (op);
491 else
492 value >>= 8;
494 if ((value & 0xff) != (unsigned)(def & 0xff))
496 if (part != -1)
497 return -1;
498 else
499 part = i;
503 return part == -1 ? 0 : (n_parts - 1 - part);
506 else if (GET_CODE (op) == CONST_DOUBLE
507 && GET_MODE (op) == VOIDmode)
509 unsigned HOST_WIDE_INT value;
510 int n_parts = GET_MODE_SIZE (mode);
511 int i, part = -1;
513 for (i = 0; i < n_parts; i++)
515 if (i == 0)
516 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
517 else if (i == HOST_BITS_PER_WIDE_INT / 8)
518 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
519 else
520 value >>= 8;
522 if ((value & 0xff) != (unsigned)(def & 0xff))
524 if (part != -1)
525 return -1;
526 else
527 part = i;
531 return part == -1 ? 0 : (n_parts - 1 - part);
534 return -1;
537 /* Extract the QImode part number PART from integer
538 constant OP of mode MODE. */
541 s390_extract_qi (op, mode, part)
542 rtx op;
543 enum machine_mode mode;
544 int part;
546 int n_parts = GET_MODE_SIZE (mode);
547 if (part < 0 || part >= n_parts)
548 abort();
549 else
550 part = n_parts - 1 - part;
552 if (GET_CODE (op) == CONST_INT)
554 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
555 return ((value >> (8 * part)) & 0xff);
557 else if (GET_CODE (op) == CONST_DOUBLE
558 && GET_MODE (op) == VOIDmode)
560 unsigned HOST_WIDE_INT value;
561 if (part < HOST_BITS_PER_WIDE_INT / 8)
562 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
563 else
564 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
565 part -= HOST_BITS_PER_WIDE_INT / 8;
567 return ((value >> (8 * part)) & 0xff);
570 abort ();
574 /* Change optimizations to be performed, depending on the
575 optimization level.
577 LEVEL is the optimization level specified; 2 if `-O2' is
578 specified, 1 if `-O' is specified, and 0 if neither is specified.
580 SIZE is non-zero if `-Os' is specified and zero otherwise. */
582 void
583 optimization_options (level, size)
584 int level ATTRIBUTE_UNUSED;
585 int size ATTRIBUTE_UNUSED;
587 #ifdef HAVE_decrement_and_branch_on_count
588 /* When optimizing, enable use of BRCT instruction. */
589 if (level >= 1)
590 flag_branch_on_count_reg = 1;
591 #endif
594 void
595 override_options ()
597 /* Acquire a unique set number for our register saves and restores. */
598 s390_sr_alias_set = new_alias_set ();
602 /* Map for smallest class containing reg regno. */
604 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
605 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
606 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
607 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
608 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
609 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
610 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
611 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
612 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
613 ADDR_REGS, NO_REGS, ADDR_REGS
617 /* Return true if OP a (const_int 0) operand.
618 OP is the current operation.
619 MODE is the current operation mode. */
622 const0_operand (op, mode)
623 register rtx op;
624 enum machine_mode mode;
626 return op == CONST0_RTX (mode);
629 /* Return true if the mode of operand OP matches MODE.
630 If MODE is set to VOIDmode, set it to the mode of OP. */
632 static int
633 check_mode (op, mode)
634 register rtx op;
635 enum machine_mode *mode;
637 if (*mode == VOIDmode)
638 *mode = GET_MODE (op);
639 else
641 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
642 return 0;
644 return 1;
647 /* Return true if OP a valid operand for the LARL instruction.
648 OP is the current operation.
649 MODE is the current operation mode. */
652 larl_operand (op, mode)
653 register rtx op;
654 enum machine_mode mode;
656 if (! check_mode (op, &mode))
657 return 0;
659 /* Allow labels and local symbols. */
660 if (GET_CODE (op) == LABEL_REF)
661 return 1;
662 if (GET_CODE (op) == SYMBOL_REF
663 && (!flag_pic || SYMBOL_REF_FLAG (op)
664 || CONSTANT_POOL_ADDRESS_P (op)))
665 return 1;
667 /* Everything else must have a CONST, so strip it. */
668 if (GET_CODE (op) != CONST)
669 return 0;
670 op = XEXP (op, 0);
672 /* Allow adding *even* constants. */
673 if (GET_CODE (op) == PLUS)
675 if (GET_CODE (XEXP (op, 1)) != CONST_INT
676 || (INTVAL (XEXP (op, 1)) & 1) != 0)
677 return 0;
678 op = XEXP (op, 0);
681 /* Labels and local symbols allowed here as well. */
682 if (GET_CODE (op) == LABEL_REF)
683 return 1;
684 if (GET_CODE (op) == SYMBOL_REF
685 && (!flag_pic || SYMBOL_REF_FLAG (op)
686 || CONSTANT_POOL_ADDRESS_P (op)))
687 return 1;
689 /* Now we must have a @GOTENT offset or @PLT stub. */
690 if (GET_CODE (op) == UNSPEC
691 && XINT (op, 1) == 111)
692 return 1;
693 if (GET_CODE (op) == UNSPEC
694 && XINT (op, 1) == 113)
695 return 1;
697 return 0;
700 /* Return true if OP is a valid FP-Register.
701 OP is the current operation.
702 MODE is the current operation mode. */
705 fp_operand (op, mode)
706 register rtx op;
707 enum machine_mode mode;
709 register enum rtx_code code = GET_CODE (op);
710 if (! check_mode (op, &mode))
711 return 0;
712 if (code == REG && REGNO_OK_FOR_FP_P (REGNO (op)))
713 return 1;
714 else
715 return 0;
718 /* Helper routine to implement s_operand and s_imm_operand.
719 OP is the current operation.
720 MODE is the current operation mode.
721 ALLOW_IMMEDIATE specifies whether immediate operands should
722 be accepted or not. */
724 static int
725 general_s_operand (op, mode, allow_immediate)
726 register rtx op;
727 enum machine_mode mode;
728 int allow_immediate;
730 struct s390_address addr;
732 /* Call general_operand first, so that we don't have to
733 check for many special cases. */
734 if (!general_operand (op, mode))
735 return 0;
737 /* Just like memory_operand, allow (subreg (mem ...))
738 after reload. */
739 if (reload_completed
740 && GET_CODE (op) == SUBREG
741 && GET_CODE (SUBREG_REG (op)) == MEM)
742 op = SUBREG_REG (op);
744 switch (GET_CODE (op))
746 /* Constants that we are sure will be forced to the
747 literal pool in reload are OK as s-operand. Note
748 that we cannot call s390_preferred_reload_class here
749 because it might not be known yet at this point
750 whether the current function is a leaf or not. */
751 case CONST_INT:
752 case CONST_DOUBLE:
753 if (!allow_immediate || reload_completed)
754 break;
755 if (!legitimate_reload_constant_p (op))
756 return 1;
757 if (!TARGET_64BIT)
758 return 1;
759 break;
761 /* Memory operands are OK unless they already use an
762 index register. */
763 case MEM:
764 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
765 return 1;
766 if (s390_decompose_address (XEXP (op, 0), &addr, FALSE)
767 && !addr.indx)
768 return 1;
769 break;
771 default:
772 break;
775 return 0;
778 /* Return true if OP is a valid S-type operand.
779 OP is the current operation.
780 MODE is the current operation mode. */
783 s_operand (op, mode)
784 register rtx op;
785 enum machine_mode mode;
787 return general_s_operand (op, mode, 0);
790 /* Return true if OP is a valid S-type operand or an immediate
791 operand that can be addressed as S-type operand by forcing
792 it into the literal pool.
793 OP is the current operation.
794 MODE is the current operation mode. */
797 s_imm_operand (op, mode)
798 register rtx op;
799 enum machine_mode mode;
801 return general_s_operand (op, mode, 1);
804 /* Return true if OP is a valid operand for the BRAS instruction.
805 OP is the current operation.
806 MODE is the current operation mode. */
809 bras_sym_operand (op, mode)
810 register rtx op;
811 enum machine_mode mode ATTRIBUTE_UNUSED;
813 register enum rtx_code code = GET_CODE (op);
815 /* Allow SYMBOL_REFs. */
816 if (code == SYMBOL_REF)
817 return 1;
819 /* Allow @PLT stubs. */
820 if (code == CONST
821 && GET_CODE (XEXP (op, 0)) == UNSPEC
822 && XINT (XEXP (op, 0), 1) == 113)
823 return 1;
824 return 0;
828 /* Return true if OP is a load multiple operation. It is known to be a
829 PARALLEL and the first section will be tested.
830 OP is the current operation.
831 MODE is the current operation mode. */
834 load_multiple_operation (op, mode)
835 rtx op;
836 enum machine_mode mode ATTRIBUTE_UNUSED;
838 int count = XVECLEN (op, 0);
839 unsigned int dest_regno;
840 rtx src_addr;
841 int i, off;
844 /* Perform a quick check so we don't blow up below. */
845 if (count <= 1
846 || GET_CODE (XVECEXP (op, 0, 0)) != SET
847 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
848 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
849 return 0;
851 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
852 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
854 /* Check, is base, or base + displacement. */
856 if (GET_CODE (src_addr) == REG)
857 off = 0;
858 else if (GET_CODE (src_addr) == PLUS
859 && GET_CODE (XEXP (src_addr, 0)) == REG
860 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
862 off = INTVAL (XEXP (src_addr, 1));
863 src_addr = XEXP (src_addr, 0);
865 else
866 return 0;
868 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
869 return 0;
871 for (i = 1; i < count; i++)
873 rtx elt = XVECEXP (op, 0, i);
875 if (GET_CODE (elt) != SET
876 || GET_CODE (SET_DEST (elt)) != REG
877 || GET_MODE (SET_DEST (elt)) != Pmode
878 || REGNO (SET_DEST (elt)) != dest_regno + i
879 || GET_CODE (SET_SRC (elt)) != MEM
880 || GET_MODE (SET_SRC (elt)) != Pmode
881 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
882 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
883 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
884 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
885 != off + i * UNITS_PER_WORD)
886 return 0;
889 return 1;
892 /* Return true if OP is a store multiple operation. It is known to be a
893 PARALLEL and the first section will be tested.
894 OP is the current operation.
895 MODE is the current operation mode. */
898 store_multiple_operation (op, mode)
899 rtx op;
900 enum machine_mode mode ATTRIBUTE_UNUSED;
902 int count = XVECLEN (op, 0);
903 unsigned int src_regno;
904 rtx dest_addr;
905 int i, off;
907 /* Perform a quick check so we don't blow up below. */
908 if (count <= 1
909 || GET_CODE (XVECEXP (op, 0, 0)) != SET
910 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
911 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
912 return 0;
914 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
915 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
917 /* Check, is base, or base + displacement. */
919 if (GET_CODE (dest_addr) == REG)
920 off = 0;
921 else if (GET_CODE (dest_addr) == PLUS
922 && GET_CODE (XEXP (dest_addr, 0)) == REG
923 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
925 off = INTVAL (XEXP (dest_addr, 1));
926 dest_addr = XEXP (dest_addr, 0);
928 else
929 return 0;
931 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
932 return 0;
934 for (i = 1; i < count; i++)
936 rtx elt = XVECEXP (op, 0, i);
938 if (GET_CODE (elt) != SET
939 || GET_CODE (SET_SRC (elt)) != REG
940 || GET_MODE (SET_SRC (elt)) != Pmode
941 || REGNO (SET_SRC (elt)) != src_regno + i
942 || GET_CODE (SET_DEST (elt)) != MEM
943 || GET_MODE (SET_DEST (elt)) != Pmode
944 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
945 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
946 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
947 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
948 != off + i * UNITS_PER_WORD)
949 return 0;
951 return 1;
955 /* Return true if OP contains a symbol reference */
958 symbolic_reference_mentioned_p (op)
959 rtx op;
961 register const char *fmt;
962 register int i;
964 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
965 return 1;
967 fmt = GET_RTX_FORMAT (GET_CODE (op));
968 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
970 if (fmt[i] == 'E')
972 register int j;
974 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
975 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
976 return 1;
979 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
980 return 1;
983 return 0;
987 /* Return true if OP is a legitimate general operand when
988 generating PIC code. It is given that flag_pic is on
989 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
992 legitimate_pic_operand_p (op)
993 register rtx op;
995 /* Accept all non-symbolic constants. */
996 if (!SYMBOLIC_CONST (op))
997 return 1;
999 /* Accept immediate LARL operands. */
1000 if (TARGET_64BIT)
1001 return larl_operand (op, VOIDmode);
1003 /* Reject everything else; must be handled
1004 via emit_pic_move. */
1005 return 0;
1008 /* Returns true if the constant value OP is a legitimate general operand.
1009 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1012 legitimate_constant_p (op)
1013 register rtx op;
1015 /* Accept all non-symbolic constants. */
1016 if (!SYMBOLIC_CONST (op))
1017 return 1;
1019 /* In the PIC case, symbolic constants must *not* be
1020 forced into the literal pool. We accept them here,
1021 so that they will be handled by emit_pic_move. */
1022 if (flag_pic)
1023 return 1;
1025 /* Even in the non-PIC case, we can accept immediate
1026 LARL operands here. */
1027 if (TARGET_64BIT)
1028 return larl_operand (op, VOIDmode);
1030 /* All remaining non-PIC symbolic constants are
1031 forced into the literal pool. */
1032 return 0;
1035 /* Returns true if the constant value OP is a legitimate general
1036 operand during and after reload. The difference to
1037 legitimate_constant_p is that this function will not accept
1038 a constant that would need to be forced to the literal pool
1039 before it can be used as operand. */
1042 legitimate_reload_constant_p (op)
1043 register rtx op;
1045 /* Accept l(g)hi operands. */
1046 if (GET_CODE (op) == CONST_INT
1047 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1048 return 1;
1050 /* Accept lliXX operands. */
1051 if (TARGET_64BIT
1052 && s390_single_hi (op, DImode, 0) >= 0)
1053 return 1;
1055 /* Accept larl operands. */
1056 if (TARGET_64BIT
1057 && larl_operand (op, VOIDmode))
1058 return 1;
1060 /* If reload is completed, and we do not already have a
1061 literal pool, and OP must be forced to the literal
1062 pool, then something must have gone wrong earlier.
1063 We *cannot* force the constant any more, because the
1064 prolog generation already decided we don't need to
1065 set up the base register. */
1066 if (reload_completed && !regs_ever_live[BASE_REGISTER])
1067 abort ();
1069 /* Everything else cannot be handled without reload. */
1070 return 0;
1073 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1074 return the class of reg to actually use. */
1076 enum reg_class
1077 s390_preferred_reload_class (op, class)
1078 rtx op;
1079 enum reg_class class;
1081 /* This can happen if a floating point constant is being
1082 reloaded into an integer register. Leave well alone. */
1083 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1084 && class != FP_REGS)
1085 return class;
1087 switch (GET_CODE (op))
1089 /* Constants we cannot reload must be forced into the
1090 literal pool. For constants we *could* handle directly,
1091 it might still be preferable to put them in the pool and
1092 use a memory-to-memory instruction.
1094 However, try to avoid needlessly allocating a literal
1095 pool in a routine that wouldn't otherwise need any.
1096 Heuristically, we assume that 64-bit leaf functions
1097 typically don't need a literal pool, all others do. */
1098 case CONST_DOUBLE:
1099 case CONST_INT:
1100 if (!legitimate_reload_constant_p (op))
1101 return NO_REGS;
1103 if (TARGET_64BIT && current_function_is_leaf)
1104 return class;
1106 return NO_REGS;
1108 /* If a symbolic constant or a PLUS is reloaded,
1109 it is most likely being used as an address, so
1110 prefer ADDR_REGS. If 'class' is not a superset
1111 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1112 case PLUS:
1113 case LABEL_REF:
1114 case SYMBOL_REF:
1115 case CONST:
1116 if (reg_class_subset_p (ADDR_REGS, class))
1117 return ADDR_REGS;
1118 else
1119 return NO_REGS;
1121 default:
1122 break;
1125 return class;
1128 /* Return the register class of a scratch register needed to
1129 load IN into a register of class CLASS in MODE.
1131 We need a temporary when loading a PLUS expression which
1132 is not a legitimate operand of the LOAD ADDRESS instruction. */
1134 enum reg_class
1135 s390_secondary_input_reload_class (class, mode, in)
1136 enum reg_class class ATTRIBUTE_UNUSED;
1137 enum machine_mode mode;
1138 rtx in;
1140 if (s390_plus_operand (in, mode))
1141 return ADDR_REGS;
1143 return NO_REGS;
1146 /* Return true if OP is a PLUS that is not a legitimate
1147 operand for the LA instruction.
1148 OP is the current operation.
1149 MODE is the current operation mode. */
1152 s390_plus_operand (op, mode)
1153 register rtx op;
1154 enum machine_mode mode;
1156 if (!check_mode (op, &mode) || mode != Pmode)
1157 return FALSE;
1159 if (GET_CODE (op) != PLUS)
1160 return FALSE;
1162 if (legitimate_la_operand_p (op))
1163 return FALSE;
1165 return TRUE;
1168 /* Generate code to load SRC, which is PLUS that is not a
1169 legitimate operand for the LA instruction, into TARGET.
1170 SCRATCH may be used as scratch register. */
1172 void
1173 s390_expand_plus_operand (target, src, scratch_in)
1174 register rtx target;
1175 register rtx src;
1176 register rtx scratch_in;
1178 rtx sum1, sum2, scratch;
1180 /* ??? reload apparently does not ensure that the scratch register
1181 and the target do not overlap. We absolutely require this to be
1182 the case, however. Therefore the reload_in[sd]i patterns ask for
1183 a double-sized scratch register, and if one part happens to be
1184 equal to the target, we use the other one. */
1185 scratch = gen_rtx_REG (Pmode, REGNO (scratch_in));
1186 if (rtx_equal_p (scratch, target))
1187 scratch = gen_rtx_REG (Pmode, REGNO (scratch_in) + 1);
1189 /* src must be a PLUS; get its two operands. */
1190 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1191 abort ();
1193 /* Check if any of the two operands is already scheduled
1194 for replacement by reload. This can happen e.g. when
1195 float registers occur in an address. */
1196 sum1 = find_replacement (&XEXP (src, 0));
1197 sum2 = find_replacement (&XEXP (src, 1));
1199 /* If one of the two operands is equal to the target,
1200 make it the first one. If one is a constant, make
1201 it the second one. */
1202 if (rtx_equal_p (target, sum2)
1203 || GET_CODE (sum1) == CONST_INT)
1205 rtx tem = sum2;
1206 sum2 = sum1;
1207 sum1 = tem;
1210 /* If the first operand is not an address register,
1211 we reload it into the target. */
1212 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1214 emit_move_insn (target, sum1);
1215 sum1 = target;
1218 /* Likewise for the second operand. However, take
1219 care not to clobber the target if we already used
1220 it for the first operand. Use the scratch instead.
1221 Also, allow an immediate offset if it is in range. */
1222 if ((true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1223 && !(GET_CODE (sum2) == CONST_INT
1224 && INTVAL (sum2) >= 0 && INTVAL (sum2) < 4096))
1226 if (!rtx_equal_p (target, sum1))
1228 emit_move_insn (target, sum2);
1229 sum2 = target;
1231 else
1233 emit_move_insn (scratch, sum2);
1234 sum2 = scratch;
1238 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1239 is only ever performed on addresses, so we can mark the
1240 sum as legitimate for LA in any case. */
1241 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1242 src = legitimize_la_operand (src);
1243 emit_insn (gen_rtx_SET (VOIDmode, target, src));
1247 /* Decompose a RTL expression ADDR for a memory address into
1248 its components, returned in OUT. The boolean STRICT
1249 specifies whether strict register checking applies.
1250 Returns 0 if ADDR is not a valid memory address, nonzero
1251 otherwise. If OUT is NULL, don't return the components,
1252 but check for validity only.
1254 Note: Only addresses in canonical form are recognized.
1255 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1256 canonical form so that they will be recognized. */
1258 static int
1259 s390_decompose_address (addr, out, strict)
1260 register rtx addr;
1261 struct s390_address *out;
1262 int strict;
1264 rtx base = NULL_RTX;
1265 rtx indx = NULL_RTX;
1266 rtx disp = NULL_RTX;
1267 int pointer = FALSE;
1269 /* Decompose address into base + index + displacement. */
1271 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1272 base = addr;
1274 else if (GET_CODE (addr) == PLUS)
1276 rtx op0 = XEXP (addr, 0);
1277 rtx op1 = XEXP (addr, 1);
1278 enum rtx_code code0 = GET_CODE (op0);
1279 enum rtx_code code1 = GET_CODE (op1);
1281 if (code0 == REG || code0 == UNSPEC)
1283 if (code1 == REG || code1 == UNSPEC)
1285 indx = op0; /* index + base */
1286 base = op1;
1289 else
1291 base = op0; /* base + displacement */
1292 disp = op1;
1296 else if (code0 == PLUS)
1298 indx = XEXP (op0, 0); /* index + base + disp */
1299 base = XEXP (op0, 1);
1300 disp = op1;
1303 else
1305 return FALSE;
1309 else
1310 disp = addr; /* displacement */
1313 /* Validate base register. */
1314 if (base)
1316 if (GET_CODE (base) == UNSPEC)
1318 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
1319 return FALSE;
1320 base = XVECEXP (base, 0, 0);
1321 pointer = TRUE;
1324 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1325 return FALSE;
1327 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (base))
1328 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (base)))
1329 return FALSE;
1331 if (REGNO (base) == BASE_REGISTER
1332 || REGNO (base) == STACK_POINTER_REGNUM
1333 || REGNO (base) == FRAME_POINTER_REGNUM
1334 || ((reload_completed || reload_in_progress)
1335 && frame_pointer_needed
1336 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1337 || (flag_pic
1338 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1339 pointer = TRUE;
1342 /* Validate index register. */
1343 if (indx)
1345 if (GET_CODE (indx) == UNSPEC)
1347 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
1348 return FALSE;
1349 indx = XVECEXP (indx, 0, 0);
1350 pointer = TRUE;
1353 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
1354 return FALSE;
1356 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (indx))
1357 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (indx)))
1358 return FALSE;
1360 if (REGNO (indx) == BASE_REGISTER
1361 || REGNO (indx) == STACK_POINTER_REGNUM
1362 || REGNO (indx) == FRAME_POINTER_REGNUM
1363 || ((reload_completed || reload_in_progress)
1364 && frame_pointer_needed
1365 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1366 || (flag_pic
1367 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1368 pointer = TRUE;
1371 /* Validate displacement. */
1372 if (disp)
1374 /* Allow integer constant in range. */
1375 if (GET_CODE (disp) == CONST_INT)
1377 if (INTVAL (disp) < 0 || INTVAL (disp) >= 4096)
1378 return FALSE;
1381 /* In the small-PIC case, the linker converts @GOT12
1382 offsets to possible displacements. */
1383 else if (GET_CODE (disp) == CONST
1384 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1385 && XINT (XEXP (disp, 0), 1) == 110)
1387 if (flag_pic != 1)
1388 return FALSE;
1390 pointer = TRUE;
1393 /* We can convert literal pool addresses to
1394 displacements by basing them off the base register. */
1395 else
1397 /* In some cases, we can accept an additional
1398 small constant offset. Split these off here. */
1400 unsigned int offset = 0;
1402 if (GET_CODE (disp) == CONST
1403 && GET_CODE (XEXP (disp, 0)) == PLUS
1404 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1406 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1407 disp = XEXP (XEXP (disp, 0), 0);
1410 /* Now we must have a literal pool address. */
1411 if (GET_CODE (disp) != SYMBOL_REF
1412 || !CONSTANT_POOL_ADDRESS_P (disp))
1413 return FALSE;
1415 /* In 64-bit PIC mode we cannot accept symbolic
1416 constants in the constant pool. */
1417 if (TARGET_64BIT && flag_pic
1418 && SYMBOLIC_CONST (get_pool_constant (disp)))
1419 return FALSE;
1421 /* If we have an offset, make sure it does not
1422 exceed the size of the constant pool entry. */
1423 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
1424 return FALSE;
1426 /* Either base or index must be free to
1427 hold the base register. */
1428 if (base && indx)
1429 return FALSE;
1431 /* Convert the address. */
1432 if (base)
1433 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
1434 else
1435 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1437 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
1438 disp = gen_rtx_CONST (Pmode, disp);
1440 if (offset)
1441 disp = plus_constant (disp, offset);
1443 pointer = TRUE;
1447 if (!base && !indx)
1448 pointer = TRUE;
1450 if (out)
1452 out->base = base;
1453 out->indx = indx;
1454 out->disp = disp;
1455 out->pointer = pointer;
1458 return TRUE;
1461 /* Return nonzero if ADDR is a valid memory address.
1462 STRICT specifies whether strict register checking applies. */
1465 legitimate_address_p (mode, addr, strict)
1466 enum machine_mode mode ATTRIBUTE_UNUSED;
1467 register rtx addr;
1468 int strict;
1470 return s390_decompose_address (addr, NULL, strict);
1473 /* Return 1 if OP is a valid operand for the LA instruction.
1474 In 31-bit, we need to prove that the result is used as an
1475 address, as LA performs only a 31-bit addition. */
1478 legitimate_la_operand_p (op)
1479 register rtx op;
1481 struct s390_address addr;
1482 if (!s390_decompose_address (op, &addr, FALSE))
1483 return FALSE;
1485 if (TARGET_64BIT || addr.pointer)
1486 return TRUE;
1488 return FALSE;
1491 /* Return a modified variant of OP that is guaranteed to
1492 be accepted by legitimate_la_operand_p. */
1495 legitimize_la_operand (op)
1496 register rtx op;
1498 struct s390_address addr;
1499 if (!s390_decompose_address (op, &addr, FALSE))
1500 abort ();
1502 if (TARGET_64BIT || addr.pointer)
1503 return op;
1505 if (!addr.base)
1506 abort ();
1508 op = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr.base), 101);
1509 if (addr.indx)
1510 op = gen_rtx_PLUS (Pmode, op, addr.indx);
1511 if (addr.disp)
1512 op = gen_rtx_PLUS (Pmode, op, addr.disp);
1514 return op;
1517 /* Return a legitimate reference for ORIG (an address) using the
1518 register REG. If REG is 0, a new pseudo is generated.
1520 There are two types of references that must be handled:
1522 1. Global data references must load the address from the GOT, via
1523 the PIC reg. An insn is emitted to do this load, and the reg is
1524 returned.
1526 2. Static data references, constant pool addresses, and code labels
1527 compute the address as an offset from the GOT, whose base is in
1528 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
1529 differentiate them from global data objects. The returned
1530 address is the PIC reg + an unspec constant.
1532 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
1533 reg also appears in the address. */
1536 legitimize_pic_address (orig, reg)
1537 rtx orig;
1538 rtx reg;
1540 rtx addr = orig;
1541 rtx new = orig;
1542 rtx base;
1544 if (GET_CODE (addr) == LABEL_REF
1545 || (GET_CODE (addr) == SYMBOL_REF
1546 && (SYMBOL_REF_FLAG (addr)
1547 || CONSTANT_POOL_ADDRESS_P (addr))))
1549 /* This is a local symbol. */
1550 if (TARGET_64BIT)
1552 /* Access local symbols PC-relative via LARL.
1553 This is the same as in the non-PIC case, so it is
1554 handled automatically ... */
1556 else
1558 /* Access local symbols relative to the literal pool. */
1560 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1562 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 100);
1563 addr = gen_rtx_CONST (SImode, addr);
1564 addr = force_const_mem (SImode, addr);
1565 emit_move_insn (temp, addr);
1567 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1568 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1569 new = gen_rtx_PLUS (Pmode, base, temp);
1571 if (reg != 0)
1573 emit_move_insn (reg, new);
1574 new = reg;
1578 else if (GET_CODE (addr) == SYMBOL_REF)
1580 if (reg == 0)
1581 reg = gen_reg_rtx (Pmode);
1583 if (flag_pic == 1)
1585 /* Assume GOT offset < 4k. This is handled the same way
1586 in both 31- and 64-bit code (@GOT12). */
1588 current_function_uses_pic_offset_table = 1;
1590 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
1591 new = gen_rtx_CONST (Pmode, new);
1592 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
1593 new = gen_rtx_MEM (Pmode, new);
1594 RTX_UNCHANGING_P (new) = 1;
1595 emit_move_insn (reg, new);
1596 new = reg;
1598 else if (TARGET_64BIT)
1600 /* If the GOT offset might be >= 4k, we determine the position
1601 of the GOT entry via a PC-relative LARL (@GOTENT). */
1603 rtx temp = gen_reg_rtx (Pmode);
1605 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
1606 new = gen_rtx_CONST (Pmode, new);
1607 emit_move_insn (temp, new);
1609 new = gen_rtx_MEM (Pmode, temp);
1610 RTX_UNCHANGING_P (new) = 1;
1611 emit_move_insn (reg, new);
1612 new = reg;
1614 else
1616 /* If the GOT offset might be >= 4k, we have to load it
1617 from the literal pool (@GOT). */
1619 rtx temp = gen_reg_rtx (Pmode);
1621 current_function_uses_pic_offset_table = 1;
1623 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 112);
1624 addr = gen_rtx_CONST (SImode, addr);
1625 addr = force_const_mem (SImode, addr);
1626 emit_move_insn (temp, addr);
1628 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
1629 new = gen_rtx_MEM (Pmode, new);
1630 RTX_UNCHANGING_P (new) = 1;
1631 emit_move_insn (reg, new);
1632 new = reg;
1635 else
1637 if (GET_CODE (addr) == CONST)
1639 addr = XEXP (addr, 0);
1640 if (GET_CODE (addr) == UNSPEC)
1642 if (XVECLEN (addr, 0) != 1)
1643 abort ();
1644 switch (XINT (addr, 1))
1646 /* If someone moved an @GOT or lt-relative UNSPEC
1647 out of the literal pool, force them back in. */
1648 case 100:
1649 case 112:
1650 case 114:
1651 new = force_const_mem (SImode, orig);
1652 break;
1654 /* @GOTENT is OK as is. */
1655 case 111:
1656 break;
1658 /* @PLT is OK as is on 64-bit, must be converted to
1659 lt-relative PLT on 31-bit. */
1660 case 113:
1661 if (!TARGET_64BIT)
1663 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1665 addr = XVECEXP (addr, 0, 0);
1666 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 114);
1667 addr = gen_rtx_CONST (SImode, addr);
1668 addr = force_const_mem (SImode, addr);
1669 emit_move_insn (temp, addr);
1671 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1672 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1673 new = gen_rtx_PLUS (Pmode, base, temp);
1675 if (reg != 0)
1677 emit_move_insn (reg, new);
1678 new = reg;
1681 break;
1683 /* Everything else cannot happen. */
1684 default:
1685 abort ();
1688 else if (GET_CODE (addr) != PLUS)
1689 abort ();
1691 if (GET_CODE (addr) == PLUS)
1693 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
1694 /* Check first to see if this is a constant offset
1695 from a local symbol reference. */
1696 if ((GET_CODE (op0) == LABEL_REF
1697 || (GET_CODE (op0) == SYMBOL_REF
1698 && (SYMBOL_REF_FLAG (op0)
1699 || CONSTANT_POOL_ADDRESS_P (op0))))
1700 && GET_CODE (op1) == CONST_INT)
1702 if (TARGET_64BIT)
1704 if (INTVAL (op1) & 1)
1706 /* LARL can't handle odd offsets, so emit a
1707 pair of LARL and LA. */
1708 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1710 if (INTVAL (op1) < 0 || INTVAL (op1) >= 4096)
1712 int even = INTVAL (op1) - 1;
1713 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
1714 op1 = GEN_INT (1);
1717 emit_move_insn (temp, op0);
1718 new = gen_rtx_PLUS (Pmode, temp, op1);
1720 if (reg != 0)
1722 emit_move_insn (reg, new);
1723 new = reg;
1726 else
1728 /* If the offset is even, we can just use LARL.
1729 This will happen automatically. */
1732 else
1734 /* Access local symbols relative to the literal pool. */
1736 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1738 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, op0), 100);
1739 addr = gen_rtx_PLUS (SImode, addr, op1);
1740 addr = gen_rtx_CONST (SImode, addr);
1741 addr = force_const_mem (SImode, addr);
1742 emit_move_insn (temp, addr);
1744 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1745 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1746 new = gen_rtx_PLUS (Pmode, base, temp);
1748 if (reg != 0)
1750 emit_move_insn (reg, new);
1751 new = reg;
1756 /* Now, check whether it is an LT-relative symbol plus offset
1757 that was pulled out of the literal pool. Force it back in. */
1759 else if (GET_CODE (op0) == UNSPEC
1760 && GET_CODE (op1) == CONST_INT)
1762 if (XVECLEN (op0, 0) != 1)
1763 abort ();
1764 if (XINT (op0, 1) != 100)
1765 abort ();
1767 new = force_const_mem (SImode, orig);
1770 /* Otherwise, compute the sum. */
1771 else
1773 base = legitimize_pic_address (XEXP (addr, 0), reg);
1774 new = legitimize_pic_address (XEXP (addr, 1),
1775 base == reg ? NULL_RTX : reg);
1776 if (GET_CODE (new) == CONST_INT)
1777 new = plus_constant (base, INTVAL (new));
1778 else
1780 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
1782 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
1783 new = XEXP (new, 1);
1785 new = gen_rtx_PLUS (Pmode, base, new);
1788 if (GET_CODE (new) == CONST)
1789 new = XEXP (new, 0);
1790 new = force_operand (new, 0);
1794 return new;
1797 /* Emit insns to move operands[1] into operands[0]. */
1799 void
1800 emit_pic_move (operands, mode)
1801 rtx *operands;
1802 enum machine_mode mode ATTRIBUTE_UNUSED;
1804 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
1806 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1807 operands[1] = force_reg (Pmode, operands[1]);
1808 else
1809 operands[1] = legitimize_pic_address (operands[1], temp);
1812 /* Try machine-dependent ways of modifying an illegitimate address X
1813 to be legitimate. If we find one, return the new, valid address.
1815 OLDX is the address as it was before break_out_memory_refs was called.
1816 In some cases it is useful to look at this to decide what needs to be done.
1818 MODE is the mode of the operand pointed to by X.
1820 When -fpic is used, special handling is needed for symbolic references.
1821 See comments by legitimize_pic_address for details. */
1824 legitimize_address (x, oldx, mode)
1825 register rtx x;
1826 register rtx oldx ATTRIBUTE_UNUSED;
1827 enum machine_mode mode ATTRIBUTE_UNUSED;
1829 rtx constant_term = const0_rtx;
1831 if (flag_pic)
1833 if (SYMBOLIC_CONST (x)
1834 || (GET_CODE (x) == PLUS
1835 && (SYMBOLIC_CONST (XEXP (x, 0))
1836 || SYMBOLIC_CONST (XEXP (x, 1)))))
1837 x = legitimize_pic_address (x, 0);
1839 if (legitimate_address_p (mode, x, FALSE))
1840 return x;
1843 x = eliminate_constant_term (x, &constant_term);
1845 if (GET_CODE (x) == PLUS)
1847 if (GET_CODE (XEXP (x, 0)) == REG)
1849 register rtx temp = gen_reg_rtx (Pmode);
1850 register rtx val = force_operand (XEXP (x, 1), temp);
1851 if (val != temp)
1852 emit_move_insn (temp, val);
1854 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
1857 else if (GET_CODE (XEXP (x, 1)) == REG)
1859 register rtx temp = gen_reg_rtx (Pmode);
1860 register rtx val = force_operand (XEXP (x, 0), temp);
1861 if (val != temp)
1862 emit_move_insn (temp, val);
1864 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
1868 if (constant_term != const0_rtx)
1869 x = gen_rtx_PLUS (Pmode, x, constant_term);
1871 return x;
1874 /* In the name of slightly smaller debug output, and to cater to
1875 general assembler losage, recognize various UNSPEC sequences
1876 and turn them back into a direct symbol reference. */
1879 s390_simplify_dwarf_addr (orig_x)
1880 rtx orig_x;
1882 rtx x = orig_x, y;
1884 if (GET_CODE (x) != MEM)
1885 return orig_x;
1887 x = XEXP (x, 0);
1888 if (GET_CODE (x) == PLUS
1889 && GET_CODE (XEXP (x, 1)) == CONST
1890 && GET_CODE (XEXP (x, 0)) == REG
1891 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1893 y = XEXP (XEXP (x, 1), 0);
1894 if (GET_CODE (y) == UNSPEC
1895 && XINT (y, 1) == 110)
1896 return XVECEXP (y, 0, 0);
1897 return orig_x;
1900 if (GET_CODE (x) == CONST)
1902 y = XEXP (x, 0);
1903 if (GET_CODE (y) == UNSPEC
1904 && XINT (y, 1) == 111)
1905 return XVECEXP (y, 0, 0);
1906 return orig_x;
1909 return orig_x;
1912 /* Output symbolic constant X in assembler syntax to
1913 stdio stream FILE. */
1915 void
1916 s390_output_symbolic_const (file, x)
1917 FILE *file;
1918 rtx x;
1920 switch (GET_CODE (x))
1922 case CONST:
1923 case ZERO_EXTEND:
1924 case SIGN_EXTEND:
1925 s390_output_symbolic_const (file, XEXP (x, 0));
1926 break;
1928 case PLUS:
1929 s390_output_symbolic_const (file, XEXP (x, 0));
1930 fprintf (file, "+");
1931 s390_output_symbolic_const (file, XEXP (x, 1));
1932 break;
1934 case MINUS:
1935 s390_output_symbolic_const (file, XEXP (x, 0));
1936 fprintf (file, "-");
1937 s390_output_symbolic_const (file, XEXP (x, 1));
1938 break;
1940 case CONST_INT:
1941 output_addr_const (file, x);
1942 break;
1944 case LABEL_REF:
1945 case CODE_LABEL:
1946 output_addr_const (file, x);
1947 break;
1949 case SYMBOL_REF:
1950 output_addr_const (file, x);
1951 if (CONSTANT_POOL_ADDRESS_P (x) && s390_pool_count != 0)
1952 fprintf (file, "_%X", s390_pool_count);
1953 break;
1955 case UNSPEC:
1956 if (XVECLEN (x, 0) != 1)
1957 output_operand_lossage ("invalid UNSPEC as operand (1)");
1958 switch (XINT (x, 1))
1960 case 100:
1961 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1962 fprintf (file, "-.LT%X_%X",
1963 s390_function_count, s390_pool_count);
1964 break;
1965 case 110:
1966 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1967 fprintf (file, "@GOT12");
1968 break;
1969 case 111:
1970 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1971 fprintf (file, "@GOTENT");
1972 break;
1973 case 112:
1974 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1975 fprintf (file, "@GOT");
1976 break;
1977 case 113:
1978 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1979 fprintf (file, "@PLT");
1980 break;
1981 case 114:
1982 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1983 fprintf (file, "@PLT-.LT%X_%X",
1984 s390_function_count, s390_pool_count);
1985 break;
1986 default:
1987 output_operand_lossage ("invalid UNSPEC as operand (2)");
1988 break;
1990 break;
1992 default:
1993 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
1994 break;
1998 /* Output address operand ADDR in assembler syntax to
1999 stdio stream FILE. */
2001 void
2002 print_operand_address (file, addr)
2003 FILE *file;
2004 rtx addr;
2006 struct s390_address ad;
2008 if (!s390_decompose_address (addr, &ad, TRUE))
2009 output_operand_lossage ("Cannot decompose address.");
2011 if (ad.disp)
2012 s390_output_symbolic_const (file, ad.disp);
2013 else
2014 fprintf (file, "0");
2016 if (ad.base && ad.indx)
2017 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
2018 reg_names[REGNO (ad.base)]);
2019 else if (ad.base)
2020 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
2023 /* Output operand X in assembler syntax to stdio stream FILE.
2024 CODE specified the format flag. The following format flags
2025 are recognized:
2027 'C': print opcode suffix for branch condition.
2028 'D': print opcode suffix for inverse branch condition.
2029 'Y': print current constant pool address (pc-relative).
2030 'y': print current constant pool address (absolute).
2031 'O': print only the displacement of a memory reference.
2032 'R': print only the base register of a memory reference.
2033 'N': print the second word of a DImode operand.
2034 'M': print the second word of a TImode operand.
2036 'b': print integer X as if it's an unsigned byte.
2037 'x': print integer X as if it's an unsigned word.
2038 'h': print integer X as if it's a signed word. */
2040 void
2041 print_operand (file, x, code)
2042 FILE *file;
2043 rtx x;
2044 int code;
2046 switch (code)
2048 case 'C':
2049 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
2050 return;
2052 case 'D':
2053 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
2054 return;
2056 case 'Y':
2057 fprintf (file, ".LT%X_%X-.", s390_function_count, s390_pool_count);
2058 return;
2060 case 'y':
2061 fprintf (file, ".LT%X_%X", s390_function_count, s390_pool_count);
2062 return;
2064 case 'O':
2066 struct s390_address ad;
2068 if (GET_CODE (x) != MEM
2069 || !s390_decompose_address (XEXP (x, 0), &ad, TRUE)
2070 || ad.indx)
2071 abort ();
2073 if (ad.disp)
2074 s390_output_symbolic_const (file, ad.disp);
2075 else
2076 fprintf (file, "0");
2078 return;
2080 case 'R':
2082 struct s390_address ad;
2084 if (GET_CODE (x) != MEM
2085 || !s390_decompose_address (XEXP (x, 0), &ad, TRUE)
2086 || ad.indx)
2087 abort ();
2089 if (ad.base)
2090 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
2091 else
2092 fprintf (file, "0");
2094 return;
2096 case 'N':
2097 if (GET_CODE (x) == REG)
2098 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2099 else if (GET_CODE (x) == MEM)
2100 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
2101 else
2102 abort ();
2103 break;
2105 case 'M':
2106 if (GET_CODE (x) == REG)
2107 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2108 else if (GET_CODE (x) == MEM)
2109 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
2110 else
2111 abort ();
2112 break;
2115 switch (GET_CODE (x))
2117 case REG:
2118 fprintf (file, "%s", reg_names[REGNO (x)]);
2119 break;
2121 case MEM:
2122 output_address (XEXP (x, 0));
2123 break;
2125 case CONST:
2126 case CODE_LABEL:
2127 case LABEL_REF:
2128 case SYMBOL_REF:
2129 s390_output_symbolic_const (file, x);
2130 break;
2132 case CONST_INT:
2133 if (code == 'b')
2134 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
2135 else if (code == 'x')
2136 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
2137 else if (code == 'h')
2138 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
2139 else
2140 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
2141 break;
2143 case CONST_DOUBLE:
2144 if (GET_MODE (x) != VOIDmode)
2145 abort ();
2146 if (code == 'b')
2147 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
2148 else if (code == 'x')
2149 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
2150 else if (code == 'h')
2151 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
2152 else
2153 abort ();
2154 break;
2156 default:
2157 fatal_insn ("UNKNOWN in print_operand !?", x);
2158 break;
2162 /* Target hook for assembling integer objects. We need to define it
2163 here to work a round a bug in some versions of GAS, which couldn't
2164 handle values smaller than INT_MIN when printed in decimal. */
2166 static bool
2167 s390_assemble_integer (x, size, aligned_p)
2168 rtx x;
2169 unsigned int size;
2170 int aligned_p;
2172 if (size == 8 && aligned_p
2173 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
2175 fputs ("\t.quad\t", asm_out_file);
2176 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2177 putc ('\n', asm_out_file);
2178 return true;
2180 return default_assemble_integer (x, size, aligned_p);
2184 #define DEBUG_SCHED 0
2186 /* Returns true if register REGNO is used for forming
2187 a memory address in expression X. */
2189 static int
2190 reg_used_in_mem_p (regno, x)
2191 int regno;
2192 rtx x;
2194 enum rtx_code code = GET_CODE (x);
2195 int i, j;
2196 const char *fmt;
2198 if (code == MEM)
2200 if (refers_to_regno_p (regno, regno+1,
2201 XEXP (x, 0), 0))
2202 return 1;
2204 else if (code == SET
2205 && GET_CODE (SET_DEST (x)) == PC)
2207 if (refers_to_regno_p (regno, regno+1,
2208 SET_SRC (x), 0))
2209 return 1;
2212 fmt = GET_RTX_FORMAT (code);
2213 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2215 if (fmt[i] == 'e'
2216 && reg_used_in_mem_p (regno, XEXP (x, i)))
2217 return 1;
2219 else if (fmt[i] == 'E')
2220 for (j = 0; j < XVECLEN (x, i); j++)
2221 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
2222 return 1;
2224 return 0;
2227 /* Returns true if expression DEP_RTX sets an address register
2228 used by instruction INSN to address memory. */
2230 static int
2231 addr_generation_dependency_p (dep_rtx, insn)
2232 rtx dep_rtx;
2233 rtx insn;
2235 rtx target, pat;
2237 if (GET_CODE (dep_rtx) == SET)
2239 target = SET_DEST (dep_rtx);
2241 if (GET_CODE (target) == REG)
2243 int regno = REGNO (target);
2245 if (get_attr_type (insn) == TYPE_LA)
2247 pat = PATTERN (insn);
2248 if (GET_CODE (pat) == PARALLEL)
2250 if (XVECLEN (pat, 0) != 2)
2251 abort();
2252 pat = XVECEXP (pat, 0, 0);
2254 if (GET_CODE (pat) == SET)
2255 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
2256 else
2257 abort();
2259 else if (get_attr_atype (insn) == ATYPE_MEM)
2260 return reg_used_in_mem_p (regno, PATTERN (insn));
2263 return 0;
2267 /* Return the modified cost of the dependency of instruction INSN
2268 on instruction DEP_INSN through the link LINK. COST is the
2269 default cost of that dependency.
2271 Data dependencies are all handled without delay. However, if a
2272 register is modified and subsequently used as base or index
2273 register of a memory reference, at least 4 cycles need to pass
2274 between setting and using the register to avoid pipeline stalls.
2275 An exception is the LA instruction. An address generated by LA can
2276 be used by introducing only a one cycle stall on the pipeline. */
2278 static int
2279 s390_adjust_cost (insn, link, dep_insn, cost)
2280 rtx insn;
2281 rtx link;
2282 rtx dep_insn;
2283 int cost;
2285 rtx dep_rtx;
2286 int i;
2288 /* If the dependence is an anti-dependence, there is no cost. For an
2289 output dependence, there is sometimes a cost, but it doesn't seem
2290 worth handling those few cases. */
2292 if (REG_NOTE_KIND (link) != 0)
2293 return 0;
2295 /* If we can't recognize the insns, we can't really do anything. */
2296 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
2297 return cost;
2299 dep_rtx = PATTERN (dep_insn);
2301 if (GET_CODE (dep_rtx) == SET)
2303 if (addr_generation_dependency_p (dep_rtx, insn))
2305 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2306 if (DEBUG_SCHED)
2308 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n",
2309 cost);
2310 debug_rtx (dep_insn);
2311 debug_rtx (insn);
2315 else if (GET_CODE (dep_rtx) == PARALLEL)
2317 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
2319 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i),
2320 insn))
2322 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2323 if (DEBUG_SCHED)
2325 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n"
2326 ,cost);
2327 debug_rtx (dep_insn);
2328 debug_rtx (insn);
2334 return cost;
2338 /* A C statement (sans semicolon) to update the integer scheduling priority
2339 INSN_PRIORITY (INSN). Reduce the priority to execute the INSN earlier,
2340 increase the priority to execute INSN later. Do not define this macro if
2341 you do not need to adjust the scheduling priorities of insns.
2343 A LA instruction maybe scheduled later, since the pipeline bypasses the
2344 calculated value. */
2346 static int
2347 s390_adjust_priority (insn, priority)
2348 rtx insn ATTRIBUTE_UNUSED;
2349 int priority;
2351 if (! INSN_P (insn))
2352 return priority;
2354 if (GET_CODE (PATTERN (insn)) == USE
2355 || GET_CODE (PATTERN (insn)) == CLOBBER)
2356 return priority;
2358 switch (get_attr_type (insn))
2360 default:
2361 break;
2363 case TYPE_LA:
2364 if (priority >= 0 && priority < 0x01000000)
2365 priority <<= 3;
2366 break;
2367 case TYPE_LM:
2368 /* LM in epilogue should never be scheduled. This
2369 is due to literal access done in function body.
2370 The usage of register 13 is not mentioned explicitly,
2371 leading to scheduling 'LM' accross this instructions.
2373 priority = 0x7fffffff;
2374 break;
2377 return priority;
2381 /* Pool concept for Linux 390:
2382 - Function prologue saves used register
2383 - literal pool is dumped in prologue and jump across with bras
2384 - If function has more than 4 k literals, at about every
2385 S390_CHUNK_MAX offset in the function a literal pool will be
2386 dumped
2387 - in this case, a branch from one chunk to other chunk needs
2388 a reload of base register at the code label branched to. */
2390 /* Index of constant pool chunk that is currently being processed.
2391 Set to -1 before function output has started. */
2392 int s390_pool_count = -1;
2394 /* First insn using the constant pool chunk that is currently being
2395 processed. */
2396 rtx s390_pool_start_insn = NULL_RTX;
2398 /* Called from the ASM_OUTPUT_POOL_PROLOGUE macro to
2399 prepare for printing a literal pool chunk to stdio stream FILE.
2401 FNAME and FNDECL specify the name and type of the current function.
2402 SIZE is the size in bytes of the current literal pool. */
2404 void
2405 s390_asm_output_pool_prologue (file, fname, fndecl, size)
2406 FILE *file;
2407 const char *fname ATTRIBUTE_UNUSED;
2408 tree fndecl;
2409 int size ATTRIBUTE_UNUSED;
2412 if (s390_pool_count>0) {
2414 * We are in an internal pool, branch over
2416 if (TARGET_64BIT)
2418 fprintf (file, "\tlarl\t%s,.LT%X_%X\n",
2419 reg_names[BASE_REGISTER],
2420 s390_function_count, s390_pool_count);
2421 readonly_data_section ();
2422 ASM_OUTPUT_ALIGN (file, floor_log2 (3));
2423 fprintf (file, ".LT%X_%X:\t# Pool %d\n",
2424 s390_function_count, s390_pool_count, s390_pool_count);
2426 else
2427 fprintf (file,"\t.align 4\n\tbras\t%s,0f\n.LT%X_%X:\t# Pool %d \n",
2428 reg_names[BASE_REGISTER],
2429 s390_function_count, s390_pool_count, s390_pool_count);
2431 if (!TARGET_64BIT)
2432 function_section (fndecl);
2435 /* Split all branches that exceed the maximum distance. */
2437 static void
2438 s390_split_branches (void)
2440 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
2441 rtx insn, pat, label, target, jump, tmp;
2443 /* In 64-bit mode we can jump +- 4GB. */
2445 if (TARGET_64BIT)
2446 return;
2448 /* Find all branches that exceed 64KB, and split them. */
2450 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2452 if (GET_CODE (insn) != JUMP_INSN)
2453 continue;
2455 pat = PATTERN (insn);
2456 if (GET_CODE (pat) != SET)
2457 continue;
2459 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
2461 label = SET_SRC (pat);
2463 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
2465 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
2466 label = XEXP (SET_SRC (pat), 1);
2467 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
2468 label = XEXP (SET_SRC (pat), 2);
2469 else
2470 continue;
2472 else
2473 continue;
2475 if (get_attr_length (insn) == 4)
2476 continue;
2478 if (flag_pic)
2480 target = gen_rtx_UNSPEC (SImode, gen_rtvec (1, label), 100);
2481 target = gen_rtx_CONST (SImode, target);
2482 target = force_const_mem (SImode, target);
2483 jump = gen_rtx_REG (Pmode, BASE_REGISTER);
2484 jump = gen_rtx_PLUS (Pmode, jump, temp_reg);
2486 else
2488 target = force_const_mem (Pmode, label);
2489 jump = temp_reg;
2492 if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
2494 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
2495 jump = gen_rtx_IF_THEN_ELSE (VOIDmode, XEXP (SET_SRC (pat), 0),
2496 jump, pc_rtx);
2497 else
2498 jump = gen_rtx_IF_THEN_ELSE (VOIDmode, XEXP (SET_SRC (pat), 0),
2499 pc_rtx, jump);
2502 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
2503 INSN_ADDRESSES_NEW (tmp, -1);
2505 tmp = emit_jump_insn_before (gen_rtx_SET (VOIDmode, pc_rtx, jump), insn);
2506 INSN_ADDRESSES_NEW (tmp, -1);
2508 remove_insn (insn);
2509 insn = tmp;
2513 /* Chunkify the literal pool if required. */
2515 static void
2516 s390_chunkify_pool (void)
2518 int *ltorg_uids, max_ltorg, chunk, last_addr, next_addr;
2519 rtx insn;
2521 /* Do we need to chunkify the literal pool? */
2523 if (get_pool_size () <= S390_POOL_MAX)
2524 return;
2526 /* Find all insns where a literal pool chunk must be inserted. */
2528 ltorg_uids = alloca (insn_current_address / 1024 + 1024);
2529 max_ltorg = 0;
2531 last_addr = 0;
2532 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2534 if (INSN_ADDRESSES (INSN_UID (insn)) - last_addr < S390_CHUNK_MAX)
2535 continue;
2536 if (INSN_ADDRESSES (INSN_UID (insn)) - last_addr > S390_CHUNK_OV)
2537 abort ();
2539 if (GET_CODE (insn) == CODE_LABEL
2540 && !(GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
2541 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
2542 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC)))
2544 ltorg_uids[max_ltorg++] = INSN_UID (prev_real_insn (insn));
2545 last_addr = INSN_ADDRESSES (ltorg_uids[max_ltorg-1]);
2546 continue;
2549 if (GET_CODE (insn) == CALL_INSN)
2551 ltorg_uids[max_ltorg++] = INSN_UID (insn);
2552 last_addr = INSN_ADDRESSES (ltorg_uids[max_ltorg-1]);
2553 continue;
2557 ltorg_uids[max_ltorg] = -1;
2559 /* Find and mark all labels that are branched into
2560 from an insn belonging to a different chunk. */
2562 chunk = last_addr = 0;
2563 next_addr = ltorg_uids[chunk] == -1 ? insn_current_address + 1
2564 : INSN_ADDRESSES (ltorg_uids[chunk]);
2566 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2568 if (GET_CODE (insn) == JUMP_INSN)
2570 rtx pat = PATTERN (insn);
2571 if (GET_CODE (pat) == SET)
2573 rtx label = 0;
2575 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
2577 label = XEXP (SET_SRC (pat), 0);
2579 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
2581 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
2582 label = XEXP (XEXP (SET_SRC (pat), 1), 0);
2583 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
2584 label = XEXP (XEXP (SET_SRC (pat), 2), 0);
2587 if (label)
2589 if (INSN_ADDRESSES (INSN_UID (label)) <= last_addr
2590 || INSN_ADDRESSES (INSN_UID (label)) > next_addr)
2591 SYMBOL_REF_USED (label) = 1;
2594 else if (GET_CODE (pat) == ADDR_VEC
2595 || GET_CODE (pat) == ADDR_DIFF_VEC)
2597 int i, diff_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2599 for (i = 0; i < XVECLEN (pat, diff_p); i++)
2601 rtx label = XEXP (XVECEXP (pat, diff_p, i), 0);
2603 if (INSN_ADDRESSES (INSN_UID (label)) <= last_addr
2604 || INSN_ADDRESSES (INSN_UID (label)) > next_addr)
2605 SYMBOL_REF_USED (label) = 1;
2610 if (INSN_UID (insn) == ltorg_uids[chunk])
2612 last_addr = INSN_ADDRESSES (ltorg_uids[chunk++]);
2613 next_addr = ltorg_uids[chunk] == -1 ? insn_current_address + 1
2614 : INSN_ADDRESSES (ltorg_uids[chunk]);
2618 /* Insert literal pools and base register reload insns. */
2620 chunk = 0;
2621 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2623 if (INSN_UID (insn) == ltorg_uids[chunk])
2625 rtx new_insn = gen_ltorg (GEN_INT (chunk++));
2626 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
2629 if (GET_CODE (insn) == CODE_LABEL && SYMBOL_REF_USED (insn))
2631 rtx new_insn = gen_reload_base (insn);
2632 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
2636 /* Recompute insn addresses. */
2638 init_insn_lengths ();
2639 shorten_branches (get_insns ());
2642 /* Return true if INSN is a 'ltorg' insn. */
2644 int
2645 s390_stop_dump_lit_p (insn)
2646 rtx insn;
2648 rtx body=PATTERN (insn);
2649 if (GET_CODE (body) == PARALLEL
2650 && GET_CODE (XVECEXP (body, 0, 0)) == SET
2651 && GET_CODE (XVECEXP (body, 0, 1)) == USE
2652 && GET_CODE (XEXP ((XVECEXP (body, 0, 1)),0)) == CONST_INT
2653 && GET_CODE (SET_DEST (XVECEXP (body, 0, 0))) == REG
2654 && REGNO (SET_DEST (XVECEXP (body, 0, 0))) == BASE_REGISTER
2655 && SET_SRC (XVECEXP (body, 0, 0)) == pc_rtx) {
2656 return 1;
2658 else
2659 return 0;
2662 /* Output literal pool chunk to be used for insns
2663 between insn ACT_INSN and the insn with UID STOP. */
2665 void
2666 s390_dump_literal_pool (act_insn, stop)
2667 rtx act_insn;
2668 rtx stop;
2670 s390_pool_start_insn = act_insn;
2671 s390_pool_count++;
2672 output_constant_pool (current_function_name, current_function_decl);
2673 function_section (current_function_decl);
2676 /* Number of elements of current constant pool. */
2677 int s390_nr_constants;
2679 /* Return true if floating point registers need to be saved. */
2681 static int
2682 save_fprs_p ()
2684 int i;
2685 if (!TARGET_64BIT)
2686 return 0;
2687 for (i=24; i<=31; i++)
2689 if (regs_ever_live[i] == 1)
2690 return 1;
2692 return 0;
2695 /* Output main constant pool to stdio stream FILE. */
2697 void
2698 s390_output_constant_pool (file)
2699 FILE *file;
2701 /* Output constant pool. */
2702 if (s390_nr_constants)
2704 s390_pool_count = 0;
2705 if (TARGET_64BIT)
2707 fprintf (file, "\tlarl\t%s,.LT%X_%X\n", reg_names[BASE_REGISTER],
2708 s390_function_count, s390_pool_count);
2709 readonly_data_section ();
2710 ASM_OUTPUT_ALIGN (file, floor_log2 (3));
2712 else
2714 fprintf (file, "\tbras\t%s,.LTN%X_%X\n", reg_names[BASE_REGISTER],
2715 s390_function_count, s390_pool_count);
2717 fprintf (file, ".LT%X_%X:\n", s390_function_count, s390_pool_count);
2718 output_constant_pool (current_function_name, current_function_decl);
2719 fprintf (file, ".LTN%X_%X:\n", s390_function_count,
2720 s390_pool_count);
2721 if (TARGET_64BIT)
2722 function_section (current_function_decl);
2726 /* Find first call clobbered register unsused in a function.
2727 This could be used as base register in a leaf function
2728 or for holding the return address before epilogue. */
2730 static int
2731 find_unused_clobbered_reg ()
2733 int i;
2734 for (i = 0; i < 6; i++)
2735 if (!regs_ever_live[i])
2736 return i;
2737 return 0;
2740 /* Fill FRAME with info about frame of current function. */
2742 static void
2743 s390_frame_info (frame)
2744 struct s390_frame *frame;
2746 int i, j;
2747 HOST_WIDE_INT fsize = get_frame_size ();
2749 if (fsize > 0x7fff0000)
2750 fatal_error ("Total size of local variables exceeds architecture limit.");
2752 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
2753 frame->save_fprs_p = save_fprs_p ();
2755 frame->frame_size = fsize + frame->save_fprs_p * 64;
2757 /* Does function need to setup frame and save area. */
2759 if (! current_function_is_leaf
2760 || frame->frame_size > 0
2761 || current_function_calls_alloca
2762 || current_function_stdarg
2763 || current_function_varargs)
2764 frame->frame_size += STARTING_FRAME_OFFSET;
2766 /* If we need to allocate a frame, the stack pointer is changed. */
2768 if (frame->frame_size > 0)
2769 regs_ever_live[STACK_POINTER_REGNUM] = 1;
2771 /* If there is (possibly) any pool entry, we need to
2772 load base register. */
2774 if (get_pool_size ()
2775 || !CONST_OK_FOR_LETTER_P (frame->frame_size, 'K')
2776 || (!TARGET_64BIT && current_function_uses_pic_offset_table))
2777 regs_ever_live[BASE_REGISTER] = 1;
2779 /* If we need the GOT pointer, remember to save/restore it. */
2781 if (current_function_uses_pic_offset_table)
2782 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2784 /* Frame pointer needed. */
2786 frame->frame_pointer_p = frame_pointer_needed;
2788 /* Find first and last gpr to be saved. */
2790 for (i = 6; i < 16; i++)
2791 if (regs_ever_live[i])
2792 break;
2794 for (j = 15; j > i; j--)
2795 if (regs_ever_live[j])
2796 break;
2798 if (i == 16)
2800 /* Nothing to save / restore. */
2801 frame->first_save_gpr = -1;
2802 frame->first_restore_gpr = -1;
2803 frame->last_save_gpr = -1;
2804 frame->return_reg_saved_p = 0;
2806 else
2808 /* Save / Restore from gpr i to j. */
2809 frame->first_save_gpr = i;
2810 frame->first_restore_gpr = i;
2811 frame->last_save_gpr = j;
2812 frame->return_reg_saved_p = (j >= RETURN_REGNUM && i <= RETURN_REGNUM);
2815 if (current_function_stdarg || current_function_varargs)
2817 /* Varargs function need to save from gpr 2 to gpr 15. */
2818 frame->first_save_gpr = 2;
2822 /* Return offset between argument pointer and frame pointer
2823 initially after prologue. */
2825 int
2826 s390_arg_frame_offset ()
2828 struct s390_frame frame;
2830 /* Compute frame_info. */
2832 s390_frame_info (&frame);
2834 return frame.frame_size + STACK_POINTER_OFFSET;
2837 /* Emit insn to save fpr REGNUM at offset OFFSET relative
2838 to register BASE. Return generated insn. */
2840 static rtx
2841 save_fpr (base, offset, regnum)
2842 rtx base;
2843 int offset;
2844 int regnum;
2846 rtx addr;
2847 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
2848 set_mem_alias_set (addr, s390_sr_alias_set);
2850 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
2853 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
2854 to register BASE. Return generated insn. */
2856 static rtx
2857 restore_fpr (base, offset, regnum)
2858 rtx base;
2859 int offset;
2860 int regnum;
2862 rtx addr;
2863 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
2864 set_mem_alias_set (addr, s390_sr_alias_set);
2866 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
2869 /* Output the function prologue assembly code to the
2870 stdio stream FILE. The local frame size is passed
2871 in LSIZE. */
2873 void
2874 s390_function_prologue (file, lsize)
2875 FILE *file ATTRIBUTE_UNUSED;
2876 HOST_WIDE_INT lsize ATTRIBUTE_UNUSED;
2878 s390_chunkify_pool ();
2879 s390_split_branches ();
2882 /* Output the function epilogue assembly code to the
2883 stdio stream FILE. The local frame size is passed
2884 in LSIZE. */
2886 void
2887 s390_function_epilogue (file, lsize)
2888 FILE *file ATTRIBUTE_UNUSED;
2889 HOST_WIDE_INT lsize ATTRIBUTE_UNUSED;
2891 current_function_uses_pic_offset_table = 0;
2892 s390_pool_start_insn = NULL_RTX;
2893 s390_pool_count = -1;
2894 s390_function_count++;
2897 /* Expand the prologue into a bunch of separate insns. */
2899 void
2900 s390_emit_prologue ()
2902 struct s390_frame frame;
2903 rtx insn, addr;
2904 rtx temp_reg;
2905 int i;
2907 /* Compute frame_info. */
2909 s390_frame_info (&frame);
2911 /* Choose best register to use for temp use within prologue. */
2913 if (frame.return_reg_saved_p
2914 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM))
2915 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
2916 else
2917 temp_reg = gen_rtx_REG (Pmode, 1);
2919 /* Save call saved gprs. */
2921 if (frame.first_save_gpr != -1)
2923 addr = plus_constant (stack_pointer_rtx,
2924 frame.first_save_gpr * UNITS_PER_WORD);
2925 addr = gen_rtx_MEM (Pmode, addr);
2926 set_mem_alias_set (addr, s390_sr_alias_set);
2928 if (frame.first_save_gpr != frame.last_save_gpr )
2930 insn = emit_insn (gen_store_multiple (addr,
2931 gen_rtx_REG (Pmode, frame.first_save_gpr),
2932 GEN_INT (frame.last_save_gpr
2933 - frame.first_save_gpr + 1)));
2935 /* We need to set the FRAME_RELATED flag on all SETs
2936 inside the store-multiple pattern.
2938 However, we must not emit DWARF records for registers 2..5
2939 if they are stored for use by variable arguments ...
2941 ??? Unfortunately, it is not enough to simply not the the
2942 FRAME_RELATED flags for those SETs, because the first SET
2943 of the PARALLEL is always treated as if it had the flag
2944 set, even if it does not. Therefore we emit a new pattern
2945 without those registers as REG_FRAME_RELATED_EXPR note. */
2947 if (frame.first_save_gpr >= 6)
2949 rtx pat = PATTERN (insn);
2951 for (i = 0; i < XVECLEN (pat, 0); i++)
2952 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
2953 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
2955 RTX_FRAME_RELATED_P (insn) = 1;
2957 else if (frame.last_save_gpr >= 6)
2959 rtx note, naddr;
2960 naddr = plus_constant (stack_pointer_rtx, 6 * UNITS_PER_WORD);
2961 note = gen_store_multiple (gen_rtx_MEM (Pmode, naddr),
2962 gen_rtx_REG (Pmode, 6),
2963 GEN_INT (frame.last_save_gpr - 6 + 1));
2964 REG_NOTES (insn) =
2965 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
2966 note, REG_NOTES (insn));
2968 for (i = 0; i < XVECLEN (note, 0); i++)
2969 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
2970 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
2972 RTX_FRAME_RELATED_P (insn) = 1;
2975 else
2977 insn = emit_move_insn (addr,
2978 gen_rtx_REG (Pmode, frame.first_save_gpr));
2979 RTX_FRAME_RELATED_P (insn) = 1;
2983 /* Dump constant pool and set constant pool register (13). */
2985 insn = emit_insn (gen_lit ());
2987 /* Save fprs for variable args. */
2989 if (current_function_stdarg || current_function_varargs)
2991 /* Save fpr 0 and 2. */
2993 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
2994 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
2996 if (TARGET_64BIT)
2998 /* Save fpr 4 and 6. */
3000 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
3001 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
3005 /* Save fprs 4 and 6 if used (31 bit ABI). */
3007 if (!TARGET_64BIT)
3009 /* Save fpr 4 and 6. */
3010 if (regs_ever_live[18])
3012 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
3013 RTX_FRAME_RELATED_P (insn) = 1;
3015 if (regs_ever_live[19])
3017 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
3018 RTX_FRAME_RELATED_P (insn) = 1;
3022 /* Decrement stack pointer. */
3024 if (frame.frame_size > 0)
3026 rtx frame_off = GEN_INT (-frame.frame_size);
3028 /* Save incoming stack pointer into temp reg. */
3030 if (TARGET_BACKCHAIN || frame.save_fprs_p)
3032 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
3035 /* Substract frame size from stack pointer. */
3037 frame_off = GEN_INT (-frame.frame_size);
3038 if (!CONST_OK_FOR_LETTER_P (-frame.frame_size, 'K'))
3039 frame_off = force_const_mem (Pmode, frame_off);
3041 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
3042 RTX_FRAME_RELATED_P (insn) = 1;
3043 REG_NOTES (insn) =
3044 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
3045 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
3046 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3047 GEN_INT (-frame.frame_size))),
3048 REG_NOTES (insn));
3050 /* Set backchain. */
3052 if (TARGET_BACKCHAIN)
3054 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
3055 set_mem_alias_set (addr, s390_sr_alias_set);
3056 insn = emit_insn (gen_move_insn (addr, temp_reg));
3060 /* Save fprs 8 - 15 (64 bit ABI). */
3062 if (frame.save_fprs_p)
3064 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
3066 for (i = 24; i < 32; i++)
3067 if (regs_ever_live[i])
3069 rtx addr = plus_constant (stack_pointer_rtx,
3070 frame.frame_size - 64 + (i-24)*8);
3072 insn = save_fpr (temp_reg, (i-24)*8, i);
3073 RTX_FRAME_RELATED_P (insn) = 1;
3074 REG_NOTES (insn) =
3075 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
3076 gen_rtx_SET (VOIDmode,
3077 gen_rtx_MEM (DFmode, addr),
3078 gen_rtx_REG (DFmode, i)),
3079 REG_NOTES (insn));
3083 /* Set frame pointer, if needed. */
3085 if (frame.frame_pointer_p)
3087 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
3088 RTX_FRAME_RELATED_P (insn) = 1;
3091 /* Set up got pointer, if needed. */
3093 if (current_function_uses_pic_offset_table)
3095 rtx got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3096 SYMBOL_REF_FLAG (got_symbol) = 1;
3098 if (TARGET_64BIT)
3100 insn = emit_insn (gen_movdi (pic_offset_table_rtx,
3101 got_symbol));
3103 /* It can happen that the GOT pointer isn't really needed ... */
3104 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
3105 REG_NOTES (insn));
3107 else
3109 got_symbol = gen_rtx_UNSPEC (VOIDmode,
3110 gen_rtvec (1, got_symbol), 100);
3111 got_symbol = gen_rtx_CONST (VOIDmode, got_symbol);
3112 got_symbol = force_const_mem (Pmode, got_symbol);
3113 insn = emit_move_insn (pic_offset_table_rtx,
3114 got_symbol);
3115 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
3116 REG_NOTES (insn));
3118 insn = emit_insn (gen_add2_insn (pic_offset_table_rtx,
3119 gen_rtx_REG (Pmode, BASE_REGISTER)));
3120 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
3121 REG_NOTES (insn));
3126 /* Expand the epilogue into a bunch of separate insns. */
3128 void
3129 s390_emit_epilogue ()
3131 struct s390_frame frame;
3132 rtx frame_pointer, return_reg;
3133 int area_bottom, area_top, offset;
3134 rtvec p;
3136 /* Compute frame_info. */
3138 s390_frame_info (&frame);
3140 /* Check whether to use frame or stack pointer for restore. */
3142 frame_pointer = frame.frame_pointer_p ?
3143 hard_frame_pointer_rtx : stack_pointer_rtx;
3145 /* Compute which parts of the save area we need to access. */
3147 if (frame.first_restore_gpr != -1)
3149 area_bottom = frame.first_restore_gpr * UNITS_PER_WORD;
3150 area_top = (frame.last_save_gpr + 1) * UNITS_PER_WORD;
3152 else
3154 area_bottom = INT_MAX;
3155 area_top = INT_MIN;
3158 if (TARGET_64BIT)
3160 if (frame.save_fprs_p)
3162 if (area_bottom > -64)
3163 area_bottom = -64;
3164 if (area_top < 0)
3165 area_top = 0;
3168 else
3170 if (regs_ever_live[18])
3172 if (area_bottom > STACK_POINTER_OFFSET - 16)
3173 area_bottom = STACK_POINTER_OFFSET - 16;
3174 if (area_top < STACK_POINTER_OFFSET - 8)
3175 area_top = STACK_POINTER_OFFSET - 8;
3177 if (regs_ever_live[19])
3179 if (area_bottom > STACK_POINTER_OFFSET - 8)
3180 area_bottom = STACK_POINTER_OFFSET - 8;
3181 if (area_top < STACK_POINTER_OFFSET)
3182 area_top = STACK_POINTER_OFFSET;
3186 /* Check whether we can access the register save area.
3187 If not, increment the frame pointer as required. */
3189 if (area_top <= area_bottom)
3191 /* Nothing to restore. */
3193 else if (frame.frame_size + area_bottom >= 0
3194 && frame.frame_size + area_top <= 4096)
3196 /* Area is in range. */
3197 offset = frame.frame_size;
3199 else
3201 rtx insn, frame_off;
3203 offset = area_bottom < 0 ? -area_bottom : 0;
3204 frame_off = GEN_INT (frame.frame_size - offset);
3206 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
3207 frame_off = force_const_mem (Pmode, frame_off);
3209 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
3210 RTX_FRAME_RELATED_P (insn) = 1;
3211 REG_NOTES (insn) =
3212 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
3213 gen_rtx_SET (VOIDmode, frame_pointer,
3214 gen_rtx_PLUS (Pmode, frame_pointer,
3215 GEN_INT (frame.frame_size - offset))),
3216 REG_NOTES (insn));
3219 /* Restore call saved fprs. */
3221 if (TARGET_64BIT)
3223 int i;
3225 if (frame.save_fprs_p)
3226 for (i = 24; i < 32; i++)
3227 if (regs_ever_live[i] && !global_regs[i])
3228 restore_fpr (frame_pointer,
3229 offset - 64 + (i-24) * 8, i);
3231 else
3233 if (regs_ever_live[18] && !global_regs[18])
3234 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
3235 if (regs_ever_live[19] && !global_regs[19])
3236 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
3239 /* Return register. */
3241 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
3243 /* Restore call saved gprs. */
3245 if (frame.first_restore_gpr != -1)
3247 rtx addr;
3248 int i;
3250 /* Check for global register and save them
3251 to stack location from where they get restored. */
3253 for (i = frame.first_restore_gpr;
3254 i <= frame.last_save_gpr;
3255 i++)
3257 if (global_regs[i])
3259 addr = plus_constant (frame_pointer,
3260 offset + i * UNITS_PER_WORD);
3261 addr = gen_rtx_MEM (Pmode, addr);
3262 set_mem_alias_set (addr, s390_sr_alias_set);
3263 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
3267 /* Fetch return address from stack before load multiple,
3268 this will do good for scheduling. */
3270 if (frame.last_save_gpr >= RETURN_REGNUM
3271 && frame.first_restore_gpr < RETURN_REGNUM)
3273 int return_regnum = find_unused_clobbered_reg();
3274 if (!return_regnum)
3275 return_regnum = 4;
3276 return_reg = gen_rtx_REG (Pmode, return_regnum);
3278 addr = plus_constant (frame_pointer,
3279 offset + RETURN_REGNUM * UNITS_PER_WORD);
3280 addr = gen_rtx_MEM (Pmode, addr);
3281 set_mem_alias_set (addr, s390_sr_alias_set);
3282 emit_move_insn (return_reg, addr);
3285 /* ??? As references to the base register are not made
3286 explicit in insn RTX code, we have to add a barrier here
3287 to prevent incorrect scheduling. */
3289 emit_insn (gen_blockage());
3291 addr = plus_constant (frame_pointer,
3292 offset + frame.first_restore_gpr * UNITS_PER_WORD);
3293 addr = gen_rtx_MEM (Pmode, addr);
3294 set_mem_alias_set (addr, s390_sr_alias_set);
3296 if (frame.first_restore_gpr != frame.last_save_gpr)
3298 emit_insn (gen_load_multiple (
3299 gen_rtx_REG (Pmode, frame.first_restore_gpr),
3300 addr,
3301 GEN_INT (frame.last_save_gpr - frame.first_restore_gpr + 1)));
3303 else
3305 emit_move_insn (gen_rtx_REG (Pmode, frame.first_restore_gpr),
3306 addr);
3310 /* Return to caller. */
3312 p = rtvec_alloc (2);
3314 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
3315 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
3316 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3320 /* Return the size in bytes of a function argument of
3321 type TYPE and/or mode MODE. At least one of TYPE or
3322 MODE must be specified. */
3324 static int
3325 s390_function_arg_size (mode, type)
3326 enum machine_mode mode;
3327 tree type;
3329 if (type)
3330 return int_size_in_bytes (type);
3332 /* No type info available for some library calls ... */
3333 if (mode != BLKmode)
3334 return GET_MODE_SIZE (mode);
3336 /* If we have neither type nor mode, abort */
3337 abort ();
3340 /* Return 1 if a function argument of type TYPE and mode MODE
3341 is to be passed by reference. The ABI specifies that only
3342 structures of size 1, 2, 4, or 8 bytes are passed by value,
3343 all other structures (and complex numbers) are passed by
3344 reference. */
3347 s390_function_arg_pass_by_reference (mode, type)
3348 enum machine_mode mode;
3349 tree type;
3351 int size = s390_function_arg_size (mode, type);
3353 if (type)
3355 if (AGGREGATE_TYPE_P (type) &&
3356 size != 1 && size != 2 && size != 4 && size != 8)
3357 return 1;
3359 if (TREE_CODE (type) == COMPLEX_TYPE)
3360 return 1;
3362 return 0;
3366 /* Update the data in CUM to advance over an argument of mode MODE and
3367 data type TYPE. (TYPE is null for libcalls where that information
3368 may not be available.). The boolean NAMED specifies whether the
3369 argument is a named argument (as opposed to an unnamed argument
3370 matching an ellipsis). */
3372 void
3373 s390_function_arg_advance (cum, mode, type, named)
3374 CUMULATIVE_ARGS *cum;
3375 enum machine_mode mode;
3376 tree type;
3377 int named ATTRIBUTE_UNUSED;
3379 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
3381 cum->fprs++;
3383 else if (s390_function_arg_pass_by_reference (mode, type))
3385 cum->gprs += 1;
3387 else
3389 int size = s390_function_arg_size (mode, type);
3390 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
3394 /* Define where to put the arguments to a function.
3395 Value is zero to push the argument on the stack,
3396 or a hard register in which to store the argument.
3398 MODE is the argument's machine mode.
3399 TYPE is the data type of the argument (as a tree).
3400 This is null for libcalls where that information may
3401 not be available.
3402 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3403 the preceding args and about the function being called.
3404 NAMED is nonzero if this argument is a named parameter
3405 (otherwise it is an extra parameter matching an ellipsis).
3407 On S/390, we use general purpose registers 2 through 6 to
3408 pass integer, pointer, and certain structure arguments, and
3409 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
3410 to pass floating point arguments. All remaining arguments
3411 are pushed to the stack. */
3414 s390_function_arg (cum, mode, type, named)
3415 CUMULATIVE_ARGS *cum;
3416 enum machine_mode mode;
3417 tree type;
3418 int named ATTRIBUTE_UNUSED;
3420 if (s390_function_arg_pass_by_reference (mode, type))
3421 return 0;
3423 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
3425 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
3426 return 0;
3427 else
3428 return gen_rtx (REG, mode, cum->fprs + 16);
3430 else
3432 int size = s390_function_arg_size (mode, type);
3433 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
3435 if (cum->gprs + n_gprs > 5)
3436 return 0;
3437 else
3438 return gen_rtx (REG, mode, cum->gprs + 2);
3443 /* Create and return the va_list datatype.
3445 On S/390, va_list is an array type equivalent to
3447 typedef struct __va_list_tag
3449 long __gpr;
3450 long __fpr;
3451 void *__overflow_arg_area;
3452 void *__reg_save_area;
3454 } va_list[1];
3456 where __gpr and __fpr hold the number of general purpose
3457 or floating point arguments used up to now, respectively,
3458 __overflow_arg_area points to the stack location of the
3459 next argument passed on the stack, and __reg_save_area
3460 always points to the start of the register area in the
3461 call frame of the current function. The function prologue
3462 saves all registers used for argument passing into this
3463 area if the function uses variable arguments. */
3465 tree
3466 s390_build_va_list ()
3468 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3470 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3472 type_decl =
3473 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3475 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
3476 long_integer_type_node);
3477 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
3478 long_integer_type_node);
3479 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
3480 ptr_type_node);
3481 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
3482 ptr_type_node);
3484 DECL_FIELD_CONTEXT (f_gpr) = record;
3485 DECL_FIELD_CONTEXT (f_fpr) = record;
3486 DECL_FIELD_CONTEXT (f_ovf) = record;
3487 DECL_FIELD_CONTEXT (f_sav) = record;
3489 TREE_CHAIN (record) = type_decl;
3490 TYPE_NAME (record) = type_decl;
3491 TYPE_FIELDS (record) = f_gpr;
3492 TREE_CHAIN (f_gpr) = f_fpr;
3493 TREE_CHAIN (f_fpr) = f_ovf;
3494 TREE_CHAIN (f_ovf) = f_sav;
3496 layout_type (record);
3498 /* The correct type is an array type of one element. */
3499 return build_array_type (record, build_index_type (size_zero_node));
3502 /* Implement va_start by filling the va_list structure VALIST.
3503 STDARG_P is true if implementing __builtin_stdarg_va_start,
3504 false if implementing __builtin_varargs_va_start. NEXTARG
3505 points to the first anonymous stack argument.
3507 The following global variables are used to initialize
3508 the va_list structure:
3510 current_function_args_info:
3511 holds number of gprs and fprs used for named arguments.
3512 current_function_arg_offset_rtx:
3513 holds the offset of the first anonymous stack argument
3514 (relative to the virtual arg pointer). */
3516 void
3517 s390_va_start (stdarg_p, valist, nextarg)
3518 int stdarg_p;
3519 tree valist;
3520 rtx nextarg ATTRIBUTE_UNUSED;
3522 HOST_WIDE_INT n_gpr, n_fpr;
3523 int off;
3524 tree f_gpr, f_fpr, f_ovf, f_sav;
3525 tree gpr, fpr, ovf, sav, t;
3527 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3528 f_fpr = TREE_CHAIN (f_gpr);
3529 f_ovf = TREE_CHAIN (f_fpr);
3530 f_sav = TREE_CHAIN (f_ovf);
3532 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3533 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3534 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3535 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3536 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3538 /* Count number of gp and fp argument registers used. */
3540 n_gpr = current_function_args_info.gprs;
3541 n_fpr = current_function_args_info.fprs;
3543 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3544 TREE_SIDE_EFFECTS (t) = 1;
3545 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3547 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3548 TREE_SIDE_EFFECTS (t) = 1;
3549 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3551 /* Find the overflow area. */
3552 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3554 off = INTVAL (current_function_arg_offset_rtx);
3555 off = off < 0 ? 0 : off;
3556 if (! stdarg_p)
3557 off = off > 0 ? off - UNITS_PER_WORD : off;
3558 if (TARGET_DEBUG_ARG)
3559 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
3560 (int)n_gpr, (int)n_fpr, off);
3562 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
3564 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3565 TREE_SIDE_EFFECTS (t) = 1;
3566 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3568 /* Find the register save area. */
3569 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
3570 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3571 build_int_2 (-STACK_POINTER_OFFSET, -1));
3572 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3573 TREE_SIDE_EFFECTS (t) = 1;
3574 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3577 /* Implement va_arg by updating the va_list structure
3578 VALIST as required to retrieve an argument of type
3579 TYPE, and returning that argument.
3581 Generates code equivalent to:
3583 if (integral value) {
3584 if (size <= 4 && args.gpr < 5 ||
3585 size > 4 && args.gpr < 4 )
3586 ret = args.reg_save_area[args.gpr+8]
3587 else
3588 ret = *args.overflow_arg_area++;
3589 } else if (float value) {
3590 if (args.fgpr < 2)
3591 ret = args.reg_save_area[args.fpr+64]
3592 else
3593 ret = *args.overflow_arg_area++;
3594 } else if (aggregate value) {
3595 if (args.gpr < 5)
3596 ret = *args.reg_save_area[args.gpr]
3597 else
3598 ret = **args.overflow_arg_area++;
3599 } */
3602 s390_va_arg (valist, type)
3603 tree valist;
3604 tree type;
3606 tree f_gpr, f_fpr, f_ovf, f_sav;
3607 tree gpr, fpr, ovf, sav, reg, t, u;
3608 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
3609 rtx lab_false, lab_over, addr_rtx, r;
3611 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3612 f_fpr = TREE_CHAIN (f_gpr);
3613 f_ovf = TREE_CHAIN (f_fpr);
3614 f_sav = TREE_CHAIN (f_ovf);
3616 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3617 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3618 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3619 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3620 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3622 size = int_size_in_bytes (type);
3624 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
3626 if (TARGET_DEBUG_ARG)
3628 fprintf (stderr, "va_arg: aggregate type");
3629 debug_tree (type);
3632 /* Aggregates are passed by reference. */
3633 indirect_p = 1;
3634 reg = gpr;
3635 n_reg = 1;
3636 sav_ofs = 2 * UNITS_PER_WORD;
3637 sav_scale = UNITS_PER_WORD;
3638 size = UNITS_PER_WORD;
3639 max_reg = 4;
3641 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3643 if (TARGET_DEBUG_ARG)
3645 fprintf (stderr, "va_arg: float type");
3646 debug_tree (type);
3649 /* FP args go in FP registers, if present. */
3650 indirect_p = 0;
3651 reg = fpr;
3652 n_reg = 1;
3653 sav_ofs = 16 * UNITS_PER_WORD;
3654 sav_scale = 8;
3655 /* TARGET_64BIT has up to 4 parameter in fprs */
3656 max_reg = TARGET_64BIT ? 3 : 1;
3658 else
3660 if (TARGET_DEBUG_ARG)
3662 fprintf (stderr, "va_arg: other type");
3663 debug_tree (type);
3666 /* Otherwise into GP registers. */
3667 indirect_p = 0;
3668 reg = gpr;
3669 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3670 sav_ofs = 2 * UNITS_PER_WORD;
3671 if (TARGET_64BIT)
3672 sav_ofs += TYPE_MODE (type) == SImode ? 4 :
3673 TYPE_MODE (type) == HImode ? 6 :
3674 TYPE_MODE (type) == QImode ? 7 : 0;
3675 else
3676 sav_ofs += TYPE_MODE (type) == HImode ? 2 :
3677 TYPE_MODE (type) == QImode ? 3 : 0;
3679 sav_scale = UNITS_PER_WORD;
3680 if (n_reg > 1)
3681 max_reg = 3;
3682 else
3683 max_reg = 4;
3686 /* Pull the value out of the saved registers ... */
3688 lab_false = gen_label_rtx ();
3689 lab_over = gen_label_rtx ();
3690 addr_rtx = gen_reg_rtx (Pmode);
3692 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
3693 GEN_INT (max_reg),
3694 GT, const1_rtx, Pmode, 0, lab_false);
3696 if (sav_ofs)
3697 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3698 else
3699 t = sav;
3701 u = build (MULT_EXPR, long_integer_type_node,
3702 reg, build_int_2 (sav_scale, 0));
3703 TREE_SIDE_EFFECTS (u) = 1;
3705 t = build (PLUS_EXPR, ptr_type_node, t, u);
3706 TREE_SIDE_EFFECTS (t) = 1;
3708 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3709 if (r != addr_rtx)
3710 emit_move_insn (addr_rtx, r);
3713 emit_jump_insn (gen_jump (lab_over));
3714 emit_barrier ();
3715 emit_label (lab_false);
3717 /* ... Otherwise out of the overflow area. */
3719 t = save_expr (ovf);
3722 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
3723 if (size < UNITS_PER_WORD)
3725 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
3726 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3727 TREE_SIDE_EFFECTS (t) = 1;
3728 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3730 t = save_expr (ovf);
3733 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3734 if (r != addr_rtx)
3735 emit_move_insn (addr_rtx, r);
3737 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3738 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3739 TREE_SIDE_EFFECTS (t) = 1;
3740 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3742 emit_label (lab_over);
3744 /* If less than max_regs a registers are retrieved out
3745 of register save area, increment. */
3747 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
3748 build_int_2 (n_reg, 0));
3749 TREE_SIDE_EFFECTS (u) = 1;
3750 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3752 if (indirect_p)
3754 r = gen_rtx_MEM (Pmode, addr_rtx);
3755 set_mem_alias_set (r, get_varargs_alias_set ());
3756 emit_move_insn (addr_rtx, r);
3760 return addr_rtx;
3764 /* Output assembly code for the trampoline template to
3765 stdio stream FILE.
3767 On S/390, we use gpr 1 internally in the trampoline code;
3768 gpr 0 is used to hold the static chain. */
3770 void
3771 s390_trampoline_template (file)
3772 FILE *file;
3774 if (TARGET_64BIT)
3776 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
3777 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
3778 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
3779 fprintf (file, "br\t%s\n", reg_names[1]);
3780 fprintf (file, "0:\t.quad\t0\n");
3781 fprintf (file, ".quad\t0\n");
3783 else
3785 fprintf (file, "basr\t%s,0\n", reg_names[1]);
3786 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
3787 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
3788 fprintf (file, "br\t%s\n", reg_names[1]);
3789 fprintf (file, ".long\t0\n");
3790 fprintf (file, ".long\t0\n");
3794 /* Emit RTL insns to initialize the variable parts of a trampoline.
3795 FNADDR is an RTX for the address of the function's pure code.
3796 CXT is an RTX for the static chain value for the function. */
3798 void
3799 s390_initialize_trampoline (addr, fnaddr, cxt)
3800 rtx addr;
3801 rtx fnaddr;
3802 rtx cxt;
3804 emit_move_insn (gen_rtx
3805 (MEM, Pmode,
3806 memory_address (Pmode,
3807 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
3808 emit_move_insn (gen_rtx
3809 (MEM, Pmode,
3810 memory_address (Pmode,
3811 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
3814 /* Return rtx for 64-bit constant formed from the 32-bit subwords
3815 LOW and HIGH, independent of the host word size. */
3818 s390_gen_rtx_const_DI (high, low)
3819 int high;
3820 int low;
3822 #if HOST_BITS_PER_WIDE_INT >= 64
3823 HOST_WIDE_INT val;
3824 val = (HOST_WIDE_INT)high;
3825 val <<= 32;
3826 val |= (HOST_WIDE_INT)low;
3828 return GEN_INT (val);
3829 #else
3830 #if HOST_BITS_PER_WIDE_INT >= 32
3831 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
3832 #else
3833 abort ();
3834 #endif
3835 #endif
3838 /* Output assembler code to FILE to increment profiler label # LABELNO
3839 for profiling a function entry. */
3841 void
3842 s390_function_profiler (file, labelno)
3843 FILE *file;
3844 int labelno;
3846 rtx op[7];
3848 char label[128];
3849 sprintf (label, "%sP%d", LPREFIX, labelno);
3851 fprintf (file, "# function profiler \n");
3853 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
3854 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
3855 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
3857 op[2] = gen_rtx_REG (Pmode, 1);
3858 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
3859 SYMBOL_REF_FLAG (op[3]) = 1;
3861 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
3862 if (flag_pic)
3864 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), 113);
3865 op[4] = gen_rtx_CONST (Pmode, op[4]);
3868 if (TARGET_64BIT)
3870 output_asm_insn ("stg\t%0,%1", op);
3871 output_asm_insn ("larl\t%2,%3", op);
3872 output_asm_insn ("brasl\t%0,%4", op);
3873 output_asm_insn ("lg\t%0,%1", op);
3875 else if (!flag_pic)
3877 op[6] = gen_label_rtx ();
3879 output_asm_insn ("st\t%0,%1", op);
3880 output_asm_insn ("bras\t%2,%l6", op);
3881 output_asm_insn (".long\t%4", op);
3882 output_asm_insn (".long\t%3", op);
3883 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
3884 output_asm_insn ("l\t%0,0(%2)", op);
3885 output_asm_insn ("l\t%2,4(%2)", op);
3886 output_asm_insn ("basr\t%0,%0", op);
3887 output_asm_insn ("l\t%0,%1", op);
3889 else
3891 op[5] = gen_label_rtx ();
3892 op[6] = gen_label_rtx ();
3894 output_asm_insn ("st\t%0,%1", op);
3895 output_asm_insn ("bras\t%2,%l6", op);
3896 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
3897 output_asm_insn (".long\t%4-%l5", op);
3898 output_asm_insn (".long\t%3-%l5", op);
3899 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
3900 output_asm_insn ("lr\t%0,%2", op);
3901 output_asm_insn ("a\t%0,0(%2)", op);
3902 output_asm_insn ("a\t%2,4(%2)", op);
3903 output_asm_insn ("basr\t%0,%0", op);
3904 output_asm_insn ("l\t%0,%1", op);