2007-02-15 Sandra Loosemore <sandra@codesourcery.com>
[official-gcc.git] / gcc / config / s390 / s390.c
blob26484664b54105170d859170b500e08f7826a5ec
1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "conditions.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "except.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "expr.h"
43 #include "reload.h"
44 #include "toplev.h"
45 #include "basic-block.h"
46 #include "integrate.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "debug.h"
51 #include "langhooks.h"
52 #include "optabs.h"
53 #include "tree-gimple.h"
56 /* Define the specific costs for a given cpu. */
58 struct processor_costs
60 /* multiplication */
61 const int m; /* cost of an M instruction. */
62 const int mghi; /* cost of an MGHI instruction. */
63 const int mh; /* cost of an MH instruction. */
64 const int mhi; /* cost of an MHI instruction. */
65 const int ml; /* cost of an ML instruction. */
66 const int mr; /* cost of an MR instruction. */
67 const int ms; /* cost of an MS instruction. */
68 const int msg; /* cost of an MSG instruction. */
69 const int msgf; /* cost of an MSGF instruction. */
70 const int msgfr; /* cost of an MSGFR instruction. */
71 const int msgr; /* cost of an MSGR instruction. */
72 const int msr; /* cost of an MSR instruction. */
73 const int mult_df; /* cost of multiplication in DFmode. */
74 const int mxbr;
75 /* square root */
76 const int sqxbr; /* cost of square root in TFmode. */
77 const int sqdbr; /* cost of square root in DFmode. */
78 const int sqebr; /* cost of square root in SFmode. */
79 /* multiply and add */
80 const int madbr; /* cost of multiply and add in DFmode. */
81 const int maebr; /* cost of multiply and add in SFmode. */
82 /* division */
83 const int dxbr;
84 const int dxr;
85 const int ddbr;
86 const int ddr;
87 const int debr;
88 const int der;
89 const int dlgr;
90 const int dlr;
91 const int dr;
92 const int dsgfr;
93 const int dsgr;
96 const struct processor_costs *s390_cost;
98 static const
99 struct processor_costs z900_cost =
101 COSTS_N_INSNS (5), /* M */
102 COSTS_N_INSNS (10), /* MGHI */
103 COSTS_N_INSNS (5), /* MH */
104 COSTS_N_INSNS (4), /* MHI */
105 COSTS_N_INSNS (5), /* ML */
106 COSTS_N_INSNS (5), /* MR */
107 COSTS_N_INSNS (4), /* MS */
108 COSTS_N_INSNS (15), /* MSG */
109 COSTS_N_INSNS (7), /* MSGF */
110 COSTS_N_INSNS (7), /* MSGFR */
111 COSTS_N_INSNS (10), /* MSGR */
112 COSTS_N_INSNS (4), /* MSR */
113 COSTS_N_INSNS (7), /* multiplication in DFmode */
114 COSTS_N_INSNS (13), /* MXBR */
115 COSTS_N_INSNS (136), /* SQXBR */
116 COSTS_N_INSNS (44), /* SQDBR */
117 COSTS_N_INSNS (35), /* SQEBR */
118 COSTS_N_INSNS (18), /* MADBR */
119 COSTS_N_INSNS (13), /* MAEBR */
120 COSTS_N_INSNS (134), /* DXBR */
121 COSTS_N_INSNS (135), /* DXR */
122 COSTS_N_INSNS (30), /* DDBR */
123 COSTS_N_INSNS (30), /* DDR */
124 COSTS_N_INSNS (27), /* DEBR */
125 COSTS_N_INSNS (26), /* DER */
126 COSTS_N_INSNS (220), /* DLGR */
127 COSTS_N_INSNS (34), /* DLR */
128 COSTS_N_INSNS (34), /* DR */
129 COSTS_N_INSNS (32), /* DSGFR */
130 COSTS_N_INSNS (32), /* DSGR */
133 static const
134 struct processor_costs z990_cost =
136 COSTS_N_INSNS (4), /* M */
137 COSTS_N_INSNS (2), /* MGHI */
138 COSTS_N_INSNS (2), /* MH */
139 COSTS_N_INSNS (2), /* MHI */
140 COSTS_N_INSNS (4), /* ML */
141 COSTS_N_INSNS (4), /* MR */
142 COSTS_N_INSNS (5), /* MS */
143 COSTS_N_INSNS (6), /* MSG */
144 COSTS_N_INSNS (4), /* MSGF */
145 COSTS_N_INSNS (4), /* MSGFR */
146 COSTS_N_INSNS (4), /* MSGR */
147 COSTS_N_INSNS (4), /* MSR */
148 COSTS_N_INSNS (1), /* multiplication in DFmode */
149 COSTS_N_INSNS (28), /* MXBR */
150 COSTS_N_INSNS (130), /* SQXBR */
151 COSTS_N_INSNS (66), /* SQDBR */
152 COSTS_N_INSNS (38), /* SQEBR */
153 COSTS_N_INSNS (1), /* MADBR */
154 COSTS_N_INSNS (1), /* MAEBR */
155 COSTS_N_INSNS (60), /* DXBR */
156 COSTS_N_INSNS (72), /* DXR */
157 COSTS_N_INSNS (40), /* DDBR */
158 COSTS_N_INSNS (44), /* DDR */
159 COSTS_N_INSNS (26), /* DDBR */
160 COSTS_N_INSNS (28), /* DER */
161 COSTS_N_INSNS (176), /* DLGR */
162 COSTS_N_INSNS (31), /* DLR */
163 COSTS_N_INSNS (31), /* DR */
164 COSTS_N_INSNS (31), /* DSGFR */
165 COSTS_N_INSNS (31), /* DSGR */
168 static const
169 struct processor_costs z9_109_cost =
171 COSTS_N_INSNS (4), /* M */
172 COSTS_N_INSNS (2), /* MGHI */
173 COSTS_N_INSNS (2), /* MH */
174 COSTS_N_INSNS (2), /* MHI */
175 COSTS_N_INSNS (4), /* ML */
176 COSTS_N_INSNS (4), /* MR */
177 COSTS_N_INSNS (5), /* MS */
178 COSTS_N_INSNS (6), /* MSG */
179 COSTS_N_INSNS (4), /* MSGF */
180 COSTS_N_INSNS (4), /* MSGFR */
181 COSTS_N_INSNS (4), /* MSGR */
182 COSTS_N_INSNS (4), /* MSR */
183 COSTS_N_INSNS (1), /* multiplication in DFmode */
184 COSTS_N_INSNS (28), /* MXBR */
185 COSTS_N_INSNS (130), /* SQXBR */
186 COSTS_N_INSNS (66), /* SQDBR */
187 COSTS_N_INSNS (38), /* SQEBR */
188 COSTS_N_INSNS (1), /* MADBR */
189 COSTS_N_INSNS (1), /* MAEBR */
190 COSTS_N_INSNS (60), /* DXBR */
191 COSTS_N_INSNS (72), /* DXR */
192 COSTS_N_INSNS (40), /* DDBR */
193 COSTS_N_INSNS (37), /* DDR */
194 COSTS_N_INSNS (26), /* DDBR */
195 COSTS_N_INSNS (28), /* DER */
196 COSTS_N_INSNS (30), /* DLGR */
197 COSTS_N_INSNS (23), /* DLR */
198 COSTS_N_INSNS (23), /* DR */
199 COSTS_N_INSNS (24), /* DSGFR */
200 COSTS_N_INSNS (24), /* DSGR */
203 extern int reload_completed;
205 /* Save information from a "cmpxx" operation until the branch or scc is
206 emitted. */
207 rtx s390_compare_op0, s390_compare_op1;
209 /* Save the result of a compare_and_swap until the branch or scc is
210 emitted. */
211 rtx s390_compare_emitted = NULL_RTX;
213 /* Structure used to hold the components of a S/390 memory
214 address. A legitimate address on S/390 is of the general
215 form
216 base + index + displacement
217 where any of the components is optional.
219 base and index are registers of the class ADDR_REGS,
220 displacement is an unsigned 12-bit immediate constant. */
222 struct s390_address
224 rtx base;
225 rtx indx;
226 rtx disp;
227 bool pointer;
228 bool literal_pool;
231 /* Which cpu are we tuning for. */
232 enum processor_type s390_tune = PROCESSOR_max;
233 enum processor_flags s390_tune_flags;
234 /* Which instruction set architecture to use. */
235 enum processor_type s390_arch;
236 enum processor_flags s390_arch_flags;
238 HOST_WIDE_INT s390_warn_framesize = 0;
239 HOST_WIDE_INT s390_stack_size = 0;
240 HOST_WIDE_INT s390_stack_guard = 0;
242 /* The following structure is embedded in the machine
243 specific part of struct function. */
245 struct s390_frame_layout GTY (())
247 /* Offset within stack frame. */
248 HOST_WIDE_INT gprs_offset;
249 HOST_WIDE_INT f0_offset;
250 HOST_WIDE_INT f4_offset;
251 HOST_WIDE_INT f8_offset;
252 HOST_WIDE_INT backchain_offset;
254 /* Number of first and last gpr where slots in the register
255 save area are reserved for. */
256 int first_save_gpr_slot;
257 int last_save_gpr_slot;
259 /* Number of first and last gpr to be saved, restored. */
260 int first_save_gpr;
261 int first_restore_gpr;
262 int last_save_gpr;
263 int last_restore_gpr;
265 /* Bits standing for floating point registers. Set, if the
266 respective register has to be saved. Starting with reg 16 (f0)
267 at the rightmost bit.
268 Bit 15 - 8 7 6 5 4 3 2 1 0
269 fpr 15 - 8 7 5 3 1 6 4 2 0
270 reg 31 - 24 23 22 21 20 19 18 17 16 */
271 unsigned int fpr_bitmap;
273 /* Number of floating point registers f8-f15 which must be saved. */
274 int high_fprs;
276 /* Set if return address needs to be saved.
277 This flag is set by s390_return_addr_rtx if it could not use
278 the initial value of r14 and therefore depends on r14 saved
279 to the stack. */
280 bool save_return_addr_p;
282 /* Size of stack frame. */
283 HOST_WIDE_INT frame_size;
286 /* Define the structure for the machine field in struct function. */
288 struct machine_function GTY(())
290 struct s390_frame_layout frame_layout;
292 /* Literal pool base register. */
293 rtx base_reg;
295 /* True if we may need to perform branch splitting. */
296 bool split_branches_pending_p;
298 /* True during final stage of literal pool processing. */
299 bool decomposed_literal_pool_addresses_ok_p;
301 /* Some local-dynamic TLS symbol name. */
302 const char *some_ld_name;
304 bool has_landing_pad_p;
307 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
309 #define cfun_frame_layout (cfun->machine->frame_layout)
310 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
311 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
312 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_WORD)
313 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
314 (1 << (BITNUM)))
315 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
316 (1 << (BITNUM))))
318 /* Number of GPRs and FPRs used for argument passing. */
319 #define GP_ARG_NUM_REG 5
320 #define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
322 /* A couple of shortcuts. */
323 #define CONST_OK_FOR_J(x) \
324 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
325 #define CONST_OK_FOR_K(x) \
326 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
327 #define CONST_OK_FOR_Os(x) \
328 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
329 #define CONST_OK_FOR_Op(x) \
330 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
331 #define CONST_OK_FOR_On(x) \
332 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
334 #define REGNO_PAIR_OK(REGNO, MODE) \
335 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
337 /* Return true if the back end supports mode MODE. */
338 static bool
339 s390_scalar_mode_supported_p (enum machine_mode mode)
341 if (DECIMAL_FLOAT_MODE_P (mode))
342 return true;
343 else
344 return default_scalar_mode_supported_p (mode);
347 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
349 void
350 s390_set_has_landing_pad_p (bool value)
352 cfun->machine->has_landing_pad_p = value;
355 /* If two condition code modes are compatible, return a condition code
356 mode which is compatible with both. Otherwise, return
357 VOIDmode. */
359 static enum machine_mode
360 s390_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
362 if (m1 == m2)
363 return m1;
365 switch (m1)
367 case CCZmode:
368 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
369 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
370 return m2;
371 return VOIDmode;
373 case CCSmode:
374 case CCUmode:
375 case CCTmode:
376 case CCSRmode:
377 case CCURmode:
378 case CCZ1mode:
379 if (m2 == CCZmode)
380 return m1;
382 return VOIDmode;
384 default:
385 return VOIDmode;
387 return VOIDmode;
390 /* Return true if SET either doesn't set the CC register, or else
391 the source and destination have matching CC modes and that
392 CC mode is at least as constrained as REQ_MODE. */
394 static bool
395 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
397 enum machine_mode set_mode;
399 gcc_assert (GET_CODE (set) == SET);
401 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
402 return 1;
404 set_mode = GET_MODE (SET_DEST (set));
405 switch (set_mode)
407 case CCSmode:
408 case CCSRmode:
409 case CCUmode:
410 case CCURmode:
411 case CCLmode:
412 case CCL1mode:
413 case CCL2mode:
414 case CCL3mode:
415 case CCT1mode:
416 case CCT2mode:
417 case CCT3mode:
418 if (req_mode != set_mode)
419 return 0;
420 break;
422 case CCZmode:
423 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
424 && req_mode != CCSRmode && req_mode != CCURmode)
425 return 0;
426 break;
428 case CCAPmode:
429 case CCANmode:
430 if (req_mode != CCAmode)
431 return 0;
432 break;
434 default:
435 gcc_unreachable ();
438 return (GET_MODE (SET_SRC (set)) == set_mode);
441 /* Return true if every SET in INSN that sets the CC register
442 has source and destination with matching CC modes and that
443 CC mode is at least as constrained as REQ_MODE.
444 If REQ_MODE is VOIDmode, always return false. */
446 bool
447 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
449 int i;
451 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
452 if (req_mode == VOIDmode)
453 return false;
455 if (GET_CODE (PATTERN (insn)) == SET)
456 return s390_match_ccmode_set (PATTERN (insn), req_mode);
458 if (GET_CODE (PATTERN (insn)) == PARALLEL)
459 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
461 rtx set = XVECEXP (PATTERN (insn), 0, i);
462 if (GET_CODE (set) == SET)
463 if (!s390_match_ccmode_set (set, req_mode))
464 return false;
467 return true;
470 /* If a test-under-mask instruction can be used to implement
471 (compare (and ... OP1) OP2), return the CC mode required
472 to do that. Otherwise, return VOIDmode.
473 MIXED is true if the instruction can distinguish between
474 CC1 and CC2 for mixed selected bits (TMxx), it is false
475 if the instruction cannot (TM). */
477 enum machine_mode
478 s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
480 int bit0, bit1;
482 /* ??? Fixme: should work on CONST_DOUBLE as well. */
483 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
484 return VOIDmode;
486 /* Selected bits all zero: CC0.
487 e.g.: int a; if ((a & (16 + 128)) == 0) */
488 if (INTVAL (op2) == 0)
489 return CCTmode;
491 /* Selected bits all one: CC3.
492 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
493 if (INTVAL (op2) == INTVAL (op1))
494 return CCT3mode;
496 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
497 int a;
498 if ((a & (16 + 128)) == 16) -> CCT1
499 if ((a & (16 + 128)) == 128) -> CCT2 */
500 if (mixed)
502 bit1 = exact_log2 (INTVAL (op2));
503 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
504 if (bit0 != -1 && bit1 != -1)
505 return bit0 > bit1 ? CCT1mode : CCT2mode;
508 return VOIDmode;
511 /* Given a comparison code OP (EQ, NE, etc.) and the operands
512 OP0 and OP1 of a COMPARE, return the mode to be used for the
513 comparison. */
515 enum machine_mode
516 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
518 switch (code)
520 case EQ:
521 case NE:
522 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
523 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
524 return CCAPmode;
525 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
526 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
527 return CCAPmode;
528 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
529 || GET_CODE (op1) == NEG)
530 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
531 return CCLmode;
533 if (GET_CODE (op0) == AND)
535 /* Check whether we can potentially do it via TM. */
536 enum machine_mode ccmode;
537 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
538 if (ccmode != VOIDmode)
540 /* Relax CCTmode to CCZmode to allow fall-back to AND
541 if that turns out to be beneficial. */
542 return ccmode == CCTmode ? CCZmode : ccmode;
546 if (register_operand (op0, HImode)
547 && GET_CODE (op1) == CONST_INT
548 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
549 return CCT3mode;
550 if (register_operand (op0, QImode)
551 && GET_CODE (op1) == CONST_INT
552 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
553 return CCT3mode;
555 return CCZmode;
557 case LE:
558 case LT:
559 case GE:
560 case GT:
561 /* The only overflow condition of NEG and ABS happens when
562 -INT_MAX is used as parameter, which stays negative. So
563 we have an overflow from a positive value to a negative.
564 Using CCAP mode the resulting cc can be used for comparisons. */
565 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
566 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
567 return CCAPmode;
569 /* If constants are involved in an add instruction it is possible to use
570 the resulting cc for comparisons with zero. Knowing the sign of the
571 constant the overflow behavior gets predictable. e.g.:
572 int a, b; if ((b = a + c) > 0)
573 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
574 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
575 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
577 if (INTVAL (XEXP((op0), 1)) < 0)
578 return CCANmode;
579 else
580 return CCAPmode;
582 /* Fall through. */
583 case UNORDERED:
584 case ORDERED:
585 case UNEQ:
586 case UNLE:
587 case UNLT:
588 case UNGE:
589 case UNGT:
590 case LTGT:
591 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
592 && GET_CODE (op1) != CONST_INT)
593 return CCSRmode;
594 return CCSmode;
596 case LTU:
597 case GEU:
598 if (GET_CODE (op0) == PLUS
599 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
600 return CCL1mode;
602 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
603 && GET_CODE (op1) != CONST_INT)
604 return CCURmode;
605 return CCUmode;
607 case LEU:
608 case GTU:
609 if (GET_CODE (op0) == MINUS
610 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
611 return CCL2mode;
613 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
614 && GET_CODE (op1) != CONST_INT)
615 return CCURmode;
616 return CCUmode;
618 default:
619 gcc_unreachable ();
623 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
624 that we can implement more efficiently. */
626 void
627 s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
629 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
630 if ((*code == EQ || *code == NE)
631 && *op1 == const0_rtx
632 && GET_CODE (*op0) == ZERO_EXTRACT
633 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
634 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
635 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
637 rtx inner = XEXP (*op0, 0);
638 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
639 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
640 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
642 if (len > 0 && len < modesize
643 && pos >= 0 && pos + len <= modesize
644 && modesize <= HOST_BITS_PER_WIDE_INT)
646 unsigned HOST_WIDE_INT block;
647 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
648 block <<= modesize - pos - len;
650 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
651 gen_int_mode (block, GET_MODE (inner)));
655 /* Narrow AND of memory against immediate to enable TM. */
656 if ((*code == EQ || *code == NE)
657 && *op1 == const0_rtx
658 && GET_CODE (*op0) == AND
659 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
660 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
662 rtx inner = XEXP (*op0, 0);
663 rtx mask = XEXP (*op0, 1);
665 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
666 if (GET_CODE (inner) == SUBREG
667 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
668 && (GET_MODE_SIZE (GET_MODE (inner))
669 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
670 && ((INTVAL (mask)
671 & GET_MODE_MASK (GET_MODE (inner))
672 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
673 == 0))
674 inner = SUBREG_REG (inner);
676 /* Do not change volatile MEMs. */
677 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
679 int part = s390_single_part (XEXP (*op0, 1),
680 GET_MODE (inner), QImode, 0);
681 if (part >= 0)
683 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
684 inner = adjust_address_nv (inner, QImode, part);
685 *op0 = gen_rtx_AND (QImode, inner, mask);
690 /* Narrow comparisons against 0xffff to HImode if possible. */
691 if ((*code == EQ || *code == NE)
692 && GET_CODE (*op1) == CONST_INT
693 && INTVAL (*op1) == 0xffff
694 && SCALAR_INT_MODE_P (GET_MODE (*op0))
695 && (nonzero_bits (*op0, GET_MODE (*op0))
696 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
698 *op0 = gen_lowpart (HImode, *op0);
699 *op1 = constm1_rtx;
703 /* Remove redundant UNSPEC_CMPINT conversions if possible. */
704 if (GET_CODE (*op0) == UNSPEC
705 && XINT (*op0, 1) == UNSPEC_CMPINT
706 && XVECLEN (*op0, 0) == 1
707 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
708 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
709 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
710 && *op1 == const0_rtx)
712 enum rtx_code new_code = UNKNOWN;
713 switch (*code)
715 case EQ: new_code = EQ; break;
716 case NE: new_code = NE; break;
717 case LT: new_code = GTU; break;
718 case GT: new_code = LTU; break;
719 case LE: new_code = GEU; break;
720 case GE: new_code = LEU; break;
721 default: break;
724 if (new_code != UNKNOWN)
726 *op0 = XVECEXP (*op0, 0, 0);
727 *code = new_code;
731 /* Simplify cascaded EQ, NE with const0_rtx. */
732 if ((*code == NE || *code == EQ)
733 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
734 && GET_MODE (*op0) == SImode
735 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
736 && REG_P (XEXP (*op0, 0))
737 && XEXP (*op0, 1) == const0_rtx
738 && *op1 == const0_rtx)
740 if ((*code == EQ && GET_CODE (*op0) == NE)
741 || (*code == NE && GET_CODE (*op0) == EQ))
742 *code = EQ;
743 else
744 *code = NE;
745 *op0 = XEXP (*op0, 0);
748 /* Prefer register over memory as first operand. */
749 if (MEM_P (*op0) && REG_P (*op1))
751 rtx tem = *op0; *op0 = *op1; *op1 = tem;
752 *code = swap_condition (*code);
756 /* Emit a compare instruction suitable to implement the comparison
757 OP0 CODE OP1. Return the correct condition RTL to be placed in
758 the IF_THEN_ELSE of the conditional branch testing the result. */
761 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
763 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
764 rtx ret = NULL_RTX;
766 /* Do not output a redundant compare instruction if a compare_and_swap
767 pattern already computed the result and the machine modes are compatible. */
768 if (s390_compare_emitted
769 && (s390_cc_modes_compatible (GET_MODE (s390_compare_emitted), mode)
770 == GET_MODE (s390_compare_emitted)))
771 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
772 else
774 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
776 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
777 ret = gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
779 s390_compare_emitted = NULL_RTX;
780 return ret;
783 /* Emit a SImode compare and swap instruction setting MEM to NEW if OLD
784 matches CMP.
785 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
786 conditional branch testing the result. */
788 static rtx
789 s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem, rtx cmp, rtx new)
791 rtx ret;
793 emit_insn (gen_sync_compare_and_swap_ccsi (old, mem, cmp, new));
794 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
796 s390_compare_emitted = NULL_RTX;
798 return ret;
801 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
802 unconditional jump, else a conditional jump under condition COND. */
804 void
805 s390_emit_jump (rtx target, rtx cond)
807 rtx insn;
809 target = gen_rtx_LABEL_REF (VOIDmode, target);
810 if (cond)
811 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
813 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
814 emit_jump_insn (insn);
817 /* Return branch condition mask to implement a branch
818 specified by CODE. Return -1 for invalid comparisons. */
821 s390_branch_condition_mask (rtx code)
823 const int CC0 = 1 << 3;
824 const int CC1 = 1 << 2;
825 const int CC2 = 1 << 1;
826 const int CC3 = 1 << 0;
828 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
829 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
830 gcc_assert (XEXP (code, 1) == const0_rtx);
832 switch (GET_MODE (XEXP (code, 0)))
834 case CCZmode:
835 case CCZ1mode:
836 switch (GET_CODE (code))
838 case EQ: return CC0;
839 case NE: return CC1 | CC2 | CC3;
840 default: return -1;
842 break;
844 case CCT1mode:
845 switch (GET_CODE (code))
847 case EQ: return CC1;
848 case NE: return CC0 | CC2 | CC3;
849 default: return -1;
851 break;
853 case CCT2mode:
854 switch (GET_CODE (code))
856 case EQ: return CC2;
857 case NE: return CC0 | CC1 | CC3;
858 default: return -1;
860 break;
862 case CCT3mode:
863 switch (GET_CODE (code))
865 case EQ: return CC3;
866 case NE: return CC0 | CC1 | CC2;
867 default: return -1;
869 break;
871 case CCLmode:
872 switch (GET_CODE (code))
874 case EQ: return CC0 | CC2;
875 case NE: return CC1 | CC3;
876 default: return -1;
878 break;
880 case CCL1mode:
881 switch (GET_CODE (code))
883 case LTU: return CC2 | CC3; /* carry */
884 case GEU: return CC0 | CC1; /* no carry */
885 default: return -1;
887 break;
889 case CCL2mode:
890 switch (GET_CODE (code))
892 case GTU: return CC0 | CC1; /* borrow */
893 case LEU: return CC2 | CC3; /* no borrow */
894 default: return -1;
896 break;
898 case CCL3mode:
899 switch (GET_CODE (code))
901 case EQ: return CC0 | CC2;
902 case NE: return CC1 | CC3;
903 case LTU: return CC1;
904 case GTU: return CC3;
905 case LEU: return CC1 | CC2;
906 case GEU: return CC2 | CC3;
907 default: return -1;
910 case CCUmode:
911 switch (GET_CODE (code))
913 case EQ: return CC0;
914 case NE: return CC1 | CC2 | CC3;
915 case LTU: return CC1;
916 case GTU: return CC2;
917 case LEU: return CC0 | CC1;
918 case GEU: return CC0 | CC2;
919 default: return -1;
921 break;
923 case CCURmode:
924 switch (GET_CODE (code))
926 case EQ: return CC0;
927 case NE: return CC2 | CC1 | CC3;
928 case LTU: return CC2;
929 case GTU: return CC1;
930 case LEU: return CC0 | CC2;
931 case GEU: return CC0 | CC1;
932 default: return -1;
934 break;
936 case CCAPmode:
937 switch (GET_CODE (code))
939 case EQ: return CC0;
940 case NE: return CC1 | CC2 | CC3;
941 case LT: return CC1 | CC3;
942 case GT: return CC2;
943 case LE: return CC0 | CC1 | CC3;
944 case GE: return CC0 | CC2;
945 default: return -1;
947 break;
949 case CCANmode:
950 switch (GET_CODE (code))
952 case EQ: return CC0;
953 case NE: return CC1 | CC2 | CC3;
954 case LT: return CC1;
955 case GT: return CC2 | CC3;
956 case LE: return CC0 | CC1;
957 case GE: return CC0 | CC2 | CC3;
958 default: return -1;
960 break;
962 case CCSmode:
963 switch (GET_CODE (code))
965 case EQ: return CC0;
966 case NE: return CC1 | CC2 | CC3;
967 case LT: return CC1;
968 case GT: return CC2;
969 case LE: return CC0 | CC1;
970 case GE: return CC0 | CC2;
971 case UNORDERED: return CC3;
972 case ORDERED: return CC0 | CC1 | CC2;
973 case UNEQ: return CC0 | CC3;
974 case UNLT: return CC1 | CC3;
975 case UNGT: return CC2 | CC3;
976 case UNLE: return CC0 | CC1 | CC3;
977 case UNGE: return CC0 | CC2 | CC3;
978 case LTGT: return CC1 | CC2;
979 default: return -1;
981 break;
983 case CCSRmode:
984 switch (GET_CODE (code))
986 case EQ: return CC0;
987 case NE: return CC2 | CC1 | CC3;
988 case LT: return CC2;
989 case GT: return CC1;
990 case LE: return CC0 | CC2;
991 case GE: return CC0 | CC1;
992 case UNORDERED: return CC3;
993 case ORDERED: return CC0 | CC2 | CC1;
994 case UNEQ: return CC0 | CC3;
995 case UNLT: return CC2 | CC3;
996 case UNGT: return CC1 | CC3;
997 case UNLE: return CC0 | CC2 | CC3;
998 case UNGE: return CC0 | CC1 | CC3;
999 case LTGT: return CC2 | CC1;
1000 default: return -1;
1002 break;
1004 default:
1005 return -1;
1009 /* If INV is false, return assembler mnemonic string to implement
1010 a branch specified by CODE. If INV is true, return mnemonic
1011 for the corresponding inverted branch. */
1013 static const char *
1014 s390_branch_condition_mnemonic (rtx code, int inv)
1016 static const char *const mnemonic[16] =
1018 NULL, "o", "h", "nle",
1019 "l", "nhe", "lh", "ne",
1020 "e", "nlh", "he", "nl",
1021 "le", "nh", "no", NULL
1024 int mask = s390_branch_condition_mask (code);
1025 gcc_assert (mask >= 0);
1027 if (inv)
1028 mask ^= 15;
1030 gcc_assert (mask >= 1 && mask <= 14);
1032 return mnemonic[mask];
1035 /* Return the part of op which has a value different from def.
1036 The size of the part is determined by mode.
1037 Use this function only if you already know that op really
1038 contains such a part. */
1040 unsigned HOST_WIDE_INT
1041 s390_extract_part (rtx op, enum machine_mode mode, int def)
1043 unsigned HOST_WIDE_INT value = 0;
1044 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1045 int part_bits = GET_MODE_BITSIZE (mode);
1046 unsigned HOST_WIDE_INT part_mask
1047 = ((unsigned HOST_WIDE_INT)1 << part_bits) - 1;
1048 int i;
1050 for (i = 0; i < max_parts; i++)
1052 if (i == 0)
1053 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1054 else
1055 value >>= part_bits;
1057 if ((value & part_mask) != (def & part_mask))
1058 return value & part_mask;
1061 gcc_unreachable ();
1064 /* If OP is an integer constant of mode MODE with exactly one
1065 part of mode PART_MODE unequal to DEF, return the number of that
1066 part. Otherwise, return -1. */
1069 s390_single_part (rtx op,
1070 enum machine_mode mode,
1071 enum machine_mode part_mode,
1072 int def)
1074 unsigned HOST_WIDE_INT value = 0;
1075 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1076 unsigned HOST_WIDE_INT part_mask
1077 = ((unsigned HOST_WIDE_INT)1 << GET_MODE_BITSIZE (part_mode)) - 1;
1078 int i, part = -1;
1080 if (GET_CODE (op) != CONST_INT)
1081 return -1;
1083 for (i = 0; i < n_parts; i++)
1085 if (i == 0)
1086 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1087 else
1088 value >>= GET_MODE_BITSIZE (part_mode);
1090 if ((value & part_mask) != (def & part_mask))
1092 if (part != -1)
1093 return -1;
1094 else
1095 part = i;
1098 return part == -1 ? -1 : n_parts - 1 - part;
1101 /* Check whether we can (and want to) split a double-word
1102 move in mode MODE from SRC to DST into two single-word
1103 moves, moving the subword FIRST_SUBWORD first. */
1105 bool
1106 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
1108 /* Floating point registers cannot be split. */
1109 if (FP_REG_P (src) || FP_REG_P (dst))
1110 return false;
1112 /* We don't need to split if operands are directly accessible. */
1113 if (s_operand (src, mode) || s_operand (dst, mode))
1114 return false;
1116 /* Non-offsettable memory references cannot be split. */
1117 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1118 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1119 return false;
1121 /* Moving the first subword must not clobber a register
1122 needed to move the second subword. */
1123 if (register_operand (dst, mode))
1125 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1126 if (reg_overlap_mentioned_p (subreg, src))
1127 return false;
1130 return true;
1133 /* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1134 and [MEM2, MEM2 + SIZE] do overlap and false
1135 otherwise. */
1137 bool
1138 s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
1140 rtx addr1, addr2, addr_delta;
1141 HOST_WIDE_INT delta;
1143 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1144 return true;
1146 if (size == 0)
1147 return false;
1149 addr1 = XEXP (mem1, 0);
1150 addr2 = XEXP (mem2, 0);
1152 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1154 /* This overlapping check is used by peepholes merging memory block operations.
1155 Overlapping operations would otherwise be recognized by the S/390 hardware
1156 and would fall back to a slower implementation. Allowing overlapping
1157 operations would lead to slow code but not to wrong code. Therefore we are
1158 somewhat optimistic if we cannot prove that the memory blocks are
1159 overlapping.
1160 That's why we return false here although this may accept operations on
1161 overlapping memory areas. */
1162 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
1163 return false;
1165 delta = INTVAL (addr_delta);
1167 if (delta == 0
1168 || (delta > 0 && delta < size)
1169 || (delta < 0 && -delta < size))
1170 return true;
1172 return false;
1175 /* Check whether the address of memory reference MEM2 equals exactly
1176 the address of memory reference MEM1 plus DELTA. Return true if
1177 we can prove this to be the case, false otherwise. */
1179 bool
1180 s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1182 rtx addr1, addr2, addr_delta;
1184 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1185 return false;
1187 addr1 = XEXP (mem1, 0);
1188 addr2 = XEXP (mem2, 0);
1190 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1191 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1192 return false;
1194 return true;
1197 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1199 void
1200 s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1201 rtx *operands)
1203 enum machine_mode wmode = mode;
1204 rtx dst = operands[0];
1205 rtx src1 = operands[1];
1206 rtx src2 = operands[2];
1207 rtx op, clob, tem;
1209 /* If we cannot handle the operation directly, use a temp register. */
1210 if (!s390_logical_operator_ok_p (operands))
1211 dst = gen_reg_rtx (mode);
1213 /* QImode and HImode patterns make sense only if we have a destination
1214 in memory. Otherwise perform the operation in SImode. */
1215 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1216 wmode = SImode;
1218 /* Widen operands if required. */
1219 if (mode != wmode)
1221 if (GET_CODE (dst) == SUBREG
1222 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1223 dst = tem;
1224 else if (REG_P (dst))
1225 dst = gen_rtx_SUBREG (wmode, dst, 0);
1226 else
1227 dst = gen_reg_rtx (wmode);
1229 if (GET_CODE (src1) == SUBREG
1230 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1231 src1 = tem;
1232 else if (GET_MODE (src1) != VOIDmode)
1233 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1235 if (GET_CODE (src2) == SUBREG
1236 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1237 src2 = tem;
1238 else if (GET_MODE (src2) != VOIDmode)
1239 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1242 /* Emit the instruction. */
1243 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1244 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1245 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1247 /* Fix up the destination if needed. */
1248 if (dst != operands[0])
1249 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1252 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1254 bool
1255 s390_logical_operator_ok_p (rtx *operands)
1257 /* If the destination operand is in memory, it needs to coincide
1258 with one of the source operands. After reload, it has to be
1259 the first source operand. */
1260 if (GET_CODE (operands[0]) == MEM)
1261 return rtx_equal_p (operands[0], operands[1])
1262 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1264 return true;
1267 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1268 operand IMMOP to switch from SS to SI type instructions. */
1270 void
1271 s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1273 int def = code == AND ? -1 : 0;
1274 HOST_WIDE_INT mask;
1275 int part;
1277 gcc_assert (GET_CODE (*memop) == MEM);
1278 gcc_assert (!MEM_VOLATILE_P (*memop));
1280 mask = s390_extract_part (*immop, QImode, def);
1281 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1282 gcc_assert (part >= 0);
1284 *memop = adjust_address (*memop, QImode, part);
1285 *immop = gen_int_mode (mask, QImode);
1289 /* How to allocate a 'struct machine_function'. */
1291 static struct machine_function *
1292 s390_init_machine_status (void)
1294 return ggc_alloc_cleared (sizeof (struct machine_function));
1297 /* Change optimizations to be performed, depending on the
1298 optimization level.
1300 LEVEL is the optimization level specified; 2 if `-O2' is
1301 specified, 1 if `-O' is specified, and 0 if neither is specified.
1303 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1305 void
1306 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1308 /* ??? There are apparently still problems with -fcaller-saves. */
1309 flag_caller_saves = 0;
1311 /* By default, always emit DWARF-2 unwind info. This allows debugging
1312 without maintaining a stack frame back-chain. */
1313 flag_asynchronous_unwind_tables = 1;
1315 /* Use MVCLE instructions to decrease code size if requested. */
1316 if (size != 0)
1317 target_flags |= MASK_MVCLE;
1320 /* Return true if ARG is the name of a processor. Set *TYPE and *FLAGS
1321 to the associated processor_type and processor_flags if so. */
1323 static bool
1324 s390_handle_arch_option (const char *arg,
1325 enum processor_type *type,
1326 enum processor_flags *flags)
1328 static struct pta
1330 const char *const name; /* processor name or nickname. */
1331 const enum processor_type processor;
1332 const enum processor_flags flags;
1334 const processor_alias_table[] =
1336 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1337 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1338 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
1339 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
1340 | PF_LONG_DISPLACEMENT},
1341 {"z9-109", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1342 | PF_LONG_DISPLACEMENT | PF_EXTIMM},
1344 size_t i;
1346 for (i = 0; i < ARRAY_SIZE (processor_alias_table); i++)
1347 if (strcmp (arg, processor_alias_table[i].name) == 0)
1349 *type = processor_alias_table[i].processor;
1350 *flags = processor_alias_table[i].flags;
1351 return true;
1353 return false;
1356 /* Implement TARGET_HANDLE_OPTION. */
1358 static bool
1359 s390_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
1361 switch (code)
1363 case OPT_march_:
1364 return s390_handle_arch_option (arg, &s390_arch, &s390_arch_flags);
1366 case OPT_mstack_guard_:
1367 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_guard) != 1)
1368 return false;
1369 if (exact_log2 (s390_stack_guard) == -1)
1370 error ("stack guard value must be an exact power of 2");
1371 return true;
1373 case OPT_mstack_size_:
1374 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_size) != 1)
1375 return false;
1376 if (exact_log2 (s390_stack_size) == -1)
1377 error ("stack size must be an exact power of 2");
1378 return true;
1380 case OPT_mtune_:
1381 return s390_handle_arch_option (arg, &s390_tune, &s390_tune_flags);
1383 case OPT_mwarn_framesize_:
1384 return sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_warn_framesize) == 1;
1386 default:
1387 return true;
1391 void
1392 override_options (void)
1394 /* Set up function hooks. */
1395 init_machine_status = s390_init_machine_status;
1397 /* Architecture mode defaults according to ABI. */
1398 if (!(target_flags_explicit & MASK_ZARCH))
1400 if (TARGET_64BIT)
1401 target_flags |= MASK_ZARCH;
1402 else
1403 target_flags &= ~MASK_ZARCH;
1406 /* Determine processor architectural level. */
1407 if (!s390_arch_string)
1409 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1410 s390_handle_arch_option (s390_arch_string, &s390_arch, &s390_arch_flags);
1413 /* Determine processor to tune for. */
1414 if (s390_tune == PROCESSOR_max)
1416 s390_tune = s390_arch;
1417 s390_tune_flags = s390_arch_flags;
1420 /* Sanity checks. */
1421 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
1422 error ("z/Architecture mode not supported on %s", s390_arch_string);
1423 if (TARGET_64BIT && !TARGET_ZARCH)
1424 error ("64-bit ABI not supported in ESA/390 mode");
1426 /* Set processor cost function. */
1427 if (s390_tune == PROCESSOR_2094_Z9_109)
1428 s390_cost = &z9_109_cost;
1429 else if (s390_tune == PROCESSOR_2084_Z990)
1430 s390_cost = &z990_cost;
1431 else
1432 s390_cost = &z900_cost;
1434 if (TARGET_BACKCHAIN && TARGET_PACKED_STACK && TARGET_HARD_FLOAT)
1435 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
1436 "in combination");
1438 if (s390_stack_size)
1440 if (!s390_stack_guard)
1441 error ("-mstack-size implies use of -mstack-guard");
1442 else if (s390_stack_guard >= s390_stack_size)
1443 error ("stack size must be greater than the stack guard value");
1444 else if (s390_stack_size > 1 << 16)
1445 error ("stack size must not be greater than 64k");
1447 else if (s390_stack_guard)
1448 error ("-mstack-guard implies use of -mstack-size");
1450 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1451 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
1452 target_flags |= MASK_LONG_DOUBLE_128;
1453 #endif
1456 /* Map for smallest class containing reg regno. */
1458 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1459 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1460 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1461 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1462 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1463 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1464 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1465 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1466 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1467 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1468 ACCESS_REGS, ACCESS_REGS
1471 /* Return attribute type of insn. */
1473 static enum attr_type
1474 s390_safe_attr_type (rtx insn)
1476 if (recog_memoized (insn) >= 0)
1477 return get_attr_type (insn);
1478 else
1479 return TYPE_NONE;
1482 /* Return true if DISP is a valid short displacement. */
1484 static bool
1485 s390_short_displacement (rtx disp)
1487 /* No displacement is OK. */
1488 if (!disp)
1489 return true;
1491 /* Integer displacement in range. */
1492 if (GET_CODE (disp) == CONST_INT)
1493 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1495 /* GOT offset is not OK, the GOT can be large. */
1496 if (GET_CODE (disp) == CONST
1497 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1498 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
1499 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
1500 return false;
1502 /* All other symbolic constants are literal pool references,
1503 which are OK as the literal pool must be small. */
1504 if (GET_CODE (disp) == CONST)
1505 return true;
1507 return false;
1510 /* Decompose a RTL expression ADDR for a memory address into
1511 its components, returned in OUT.
1513 Returns false if ADDR is not a valid memory address, true
1514 otherwise. If OUT is NULL, don't return the components,
1515 but check for validity only.
1517 Note: Only addresses in canonical form are recognized.
1518 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1519 canonical form so that they will be recognized. */
1521 static int
1522 s390_decompose_address (rtx addr, struct s390_address *out)
1524 HOST_WIDE_INT offset = 0;
1525 rtx base = NULL_RTX;
1526 rtx indx = NULL_RTX;
1527 rtx disp = NULL_RTX;
1528 rtx orig_disp;
1529 bool pointer = false;
1530 bool base_ptr = false;
1531 bool indx_ptr = false;
1532 bool literal_pool = false;
1534 /* We may need to substitute the literal pool base register into the address
1535 below. However, at this point we do not know which register is going to
1536 be used as base, so we substitute the arg pointer register. This is going
1537 to be treated as holding a pointer below -- it shouldn't be used for any
1538 other purpose. */
1539 rtx fake_pool_base = gen_rtx_REG (Pmode, ARG_POINTER_REGNUM);
1541 /* Decompose address into base + index + displacement. */
1543 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1544 base = addr;
1546 else if (GET_CODE (addr) == PLUS)
1548 rtx op0 = XEXP (addr, 0);
1549 rtx op1 = XEXP (addr, 1);
1550 enum rtx_code code0 = GET_CODE (op0);
1551 enum rtx_code code1 = GET_CODE (op1);
1553 if (code0 == REG || code0 == UNSPEC)
1555 if (code1 == REG || code1 == UNSPEC)
1557 indx = op0; /* index + base */
1558 base = op1;
1561 else
1563 base = op0; /* base + displacement */
1564 disp = op1;
1568 else if (code0 == PLUS)
1570 indx = XEXP (op0, 0); /* index + base + disp */
1571 base = XEXP (op0, 1);
1572 disp = op1;
1575 else
1577 return false;
1581 else
1582 disp = addr; /* displacement */
1584 /* Extract integer part of displacement. */
1585 orig_disp = disp;
1586 if (disp)
1588 if (GET_CODE (disp) == CONST_INT)
1590 offset = INTVAL (disp);
1591 disp = NULL_RTX;
1593 else if (GET_CODE (disp) == CONST
1594 && GET_CODE (XEXP (disp, 0)) == PLUS
1595 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1597 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1598 disp = XEXP (XEXP (disp, 0), 0);
1602 /* Strip off CONST here to avoid special case tests later. */
1603 if (disp && GET_CODE (disp) == CONST)
1604 disp = XEXP (disp, 0);
1606 /* We can convert literal pool addresses to
1607 displacements by basing them off the base register. */
1608 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
1610 /* Either base or index must be free to hold the base register. */
1611 if (!base)
1612 base = fake_pool_base, literal_pool = true;
1613 else if (!indx)
1614 indx = fake_pool_base, literal_pool = true;
1615 else
1616 return false;
1618 /* Mark up the displacement. */
1619 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
1620 UNSPEC_LTREL_OFFSET);
1623 /* Validate base register. */
1624 if (base)
1626 if (GET_CODE (base) == UNSPEC)
1627 switch (XINT (base, 1))
1629 case UNSPEC_LTREF:
1630 if (!disp)
1631 disp = gen_rtx_UNSPEC (Pmode,
1632 gen_rtvec (1, XVECEXP (base, 0, 0)),
1633 UNSPEC_LTREL_OFFSET);
1634 else
1635 return false;
1637 base = XVECEXP (base, 0, 1);
1638 break;
1640 case UNSPEC_LTREL_BASE:
1641 if (XVECLEN (base, 0) == 1)
1642 base = fake_pool_base, literal_pool = true;
1643 else
1644 base = XVECEXP (base, 0, 1);
1645 break;
1647 default:
1648 return false;
1651 if (!REG_P (base)
1652 || (GET_MODE (base) != SImode
1653 && GET_MODE (base) != Pmode))
1654 return false;
1656 if (REGNO (base) == STACK_POINTER_REGNUM
1657 || REGNO (base) == FRAME_POINTER_REGNUM
1658 || ((reload_completed || reload_in_progress)
1659 && frame_pointer_needed
1660 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1661 || REGNO (base) == ARG_POINTER_REGNUM
1662 || (flag_pic
1663 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1664 pointer = base_ptr = true;
1666 if ((reload_completed || reload_in_progress)
1667 && base == cfun->machine->base_reg)
1668 pointer = base_ptr = literal_pool = true;
1671 /* Validate index register. */
1672 if (indx)
1674 if (GET_CODE (indx) == UNSPEC)
1675 switch (XINT (indx, 1))
1677 case UNSPEC_LTREF:
1678 if (!disp)
1679 disp = gen_rtx_UNSPEC (Pmode,
1680 gen_rtvec (1, XVECEXP (indx, 0, 0)),
1681 UNSPEC_LTREL_OFFSET);
1682 else
1683 return false;
1685 indx = XVECEXP (indx, 0, 1);
1686 break;
1688 case UNSPEC_LTREL_BASE:
1689 if (XVECLEN (indx, 0) == 1)
1690 indx = fake_pool_base, literal_pool = true;
1691 else
1692 indx = XVECEXP (indx, 0, 1);
1693 break;
1695 default:
1696 return false;
1699 if (!REG_P (indx)
1700 || (GET_MODE (indx) != SImode
1701 && GET_MODE (indx) != Pmode))
1702 return false;
1704 if (REGNO (indx) == STACK_POINTER_REGNUM
1705 || REGNO (indx) == FRAME_POINTER_REGNUM
1706 || ((reload_completed || reload_in_progress)
1707 && frame_pointer_needed
1708 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1709 || REGNO (indx) == ARG_POINTER_REGNUM
1710 || (flag_pic
1711 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1712 pointer = indx_ptr = true;
1714 if ((reload_completed || reload_in_progress)
1715 && indx == cfun->machine->base_reg)
1716 pointer = indx_ptr = literal_pool = true;
1719 /* Prefer to use pointer as base, not index. */
1720 if (base && indx && !base_ptr
1721 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
1723 rtx tmp = base;
1724 base = indx;
1725 indx = tmp;
1728 /* Validate displacement. */
1729 if (!disp)
1731 /* If virtual registers are involved, the displacement will change later
1732 anyway as the virtual registers get eliminated. This could make a
1733 valid displacement invalid, but it is more likely to make an invalid
1734 displacement valid, because we sometimes access the register save area
1735 via negative offsets to one of those registers.
1736 Thus we don't check the displacement for validity here. If after
1737 elimination the displacement turns out to be invalid after all,
1738 this is fixed up by reload in any case. */
1739 if (base != arg_pointer_rtx
1740 && indx != arg_pointer_rtx
1741 && base != return_address_pointer_rtx
1742 && indx != return_address_pointer_rtx
1743 && base != frame_pointer_rtx
1744 && indx != frame_pointer_rtx
1745 && base != virtual_stack_vars_rtx
1746 && indx != virtual_stack_vars_rtx)
1747 if (!DISP_IN_RANGE (offset))
1748 return false;
1750 else
1752 /* All the special cases are pointers. */
1753 pointer = true;
1755 /* In the small-PIC case, the linker converts @GOT
1756 and @GOTNTPOFF offsets to possible displacements. */
1757 if (GET_CODE (disp) == UNSPEC
1758 && (XINT (disp, 1) == UNSPEC_GOT
1759 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
1760 && flag_pic == 1)
1765 /* Accept chunkified literal pool symbol references. */
1766 else if (cfun && cfun->machine
1767 && cfun->machine->decomposed_literal_pool_addresses_ok_p
1768 && GET_CODE (disp) == MINUS
1769 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
1770 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
1775 /* Accept literal pool references. */
1776 else if (GET_CODE (disp) == UNSPEC
1777 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
1779 orig_disp = gen_rtx_CONST (Pmode, disp);
1780 if (offset)
1782 /* If we have an offset, make sure it does not
1783 exceed the size of the constant pool entry. */
1784 rtx sym = XVECEXP (disp, 0, 0);
1785 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
1786 return false;
1788 orig_disp = plus_constant (orig_disp, offset);
1792 else
1793 return false;
1796 if (!base && !indx)
1797 pointer = true;
1799 if (out)
1801 out->base = base;
1802 out->indx = indx;
1803 out->disp = orig_disp;
1804 out->pointer = pointer;
1805 out->literal_pool = literal_pool;
1808 return true;
1811 /* Decompose a RTL expression OP for a shift count into its components,
1812 and return the base register in BASE and the offset in OFFSET.
1814 Return true if OP is a valid shift count, false if not. */
1816 bool
1817 s390_decompose_shift_count (rtx op, rtx *base, HOST_WIDE_INT *offset)
1819 HOST_WIDE_INT off = 0;
1821 /* We can have an integer constant, an address register,
1822 or a sum of the two. */
1823 if (GET_CODE (op) == CONST_INT)
1825 off = INTVAL (op);
1826 op = NULL_RTX;
1828 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1830 off = INTVAL (XEXP (op, 1));
1831 op = XEXP (op, 0);
1833 while (op && GET_CODE (op) == SUBREG)
1834 op = SUBREG_REG (op);
1836 if (op && GET_CODE (op) != REG)
1837 return false;
1839 if (offset)
1840 *offset = off;
1841 if (base)
1842 *base = op;
1844 return true;
1848 /* Return true if CODE is a valid address without index. */
1850 bool
1851 s390_legitimate_address_without_index_p (rtx op)
1853 struct s390_address addr;
1855 if (!s390_decompose_address (XEXP (op, 0), &addr))
1856 return false;
1857 if (addr.indx)
1858 return false;
1860 return true;
1864 /* Evaluates constraint strings described by the regular expression
1865 ([A|B](Q|R|S|T))|U|W and returns 1 if OP is a valid operand for the
1866 constraint given in STR, or 0 else. */
1869 s390_mem_constraint (const char *str, rtx op)
1871 struct s390_address addr;
1872 char c = str[0];
1874 /* Check for offsettable variants of memory constraints. */
1875 if (c == 'A')
1877 /* Only accept non-volatile MEMs. */
1878 if (!MEM_P (op) || MEM_VOLATILE_P (op))
1879 return 0;
1881 if ((reload_completed || reload_in_progress)
1882 ? !offsettable_memref_p (op) : !offsettable_nonstrict_memref_p (op))
1883 return 0;
1885 c = str[1];
1888 /* Check for non-literal-pool variants of memory constraints. */
1889 else if (c == 'B')
1891 if (GET_CODE (op) != MEM)
1892 return 0;
1893 if (!s390_decompose_address (XEXP (op, 0), &addr))
1894 return 0;
1895 if (addr.literal_pool)
1896 return 0;
1898 c = str[1];
1901 switch (c)
1903 case 'Q':
1904 if (GET_CODE (op) != MEM)
1905 return 0;
1906 if (!s390_decompose_address (XEXP (op, 0), &addr))
1907 return 0;
1908 if (addr.indx)
1909 return 0;
1911 if (TARGET_LONG_DISPLACEMENT)
1913 if (!s390_short_displacement (addr.disp))
1914 return 0;
1916 break;
1918 case 'R':
1919 if (GET_CODE (op) != MEM)
1920 return 0;
1922 if (TARGET_LONG_DISPLACEMENT)
1924 if (!s390_decompose_address (XEXP (op, 0), &addr))
1925 return 0;
1926 if (!s390_short_displacement (addr.disp))
1927 return 0;
1929 break;
1931 case 'S':
1932 if (!TARGET_LONG_DISPLACEMENT)
1933 return 0;
1934 if (GET_CODE (op) != MEM)
1935 return 0;
1936 if (!s390_decompose_address (XEXP (op, 0), &addr))
1937 return 0;
1938 if (addr.indx)
1939 return 0;
1940 if (s390_short_displacement (addr.disp))
1941 return 0;
1942 break;
1944 case 'T':
1945 if (!TARGET_LONG_DISPLACEMENT)
1946 return 0;
1947 if (GET_CODE (op) != MEM)
1948 return 0;
1949 /* Any invalid address here will be fixed up by reload,
1950 so accept it for the most generic constraint. */
1951 if (s390_decompose_address (XEXP (op, 0), &addr)
1952 && s390_short_displacement (addr.disp))
1953 return 0;
1954 break;
1956 case 'U':
1957 if (TARGET_LONG_DISPLACEMENT)
1959 if (!s390_decompose_address (op, &addr))
1960 return 0;
1961 if (!s390_short_displacement (addr.disp))
1962 return 0;
1964 break;
1966 case 'W':
1967 if (!TARGET_LONG_DISPLACEMENT)
1968 return 0;
1969 /* Any invalid address here will be fixed up by reload,
1970 so accept it for the most generic constraint. */
1971 if (s390_decompose_address (op, &addr)
1972 && s390_short_displacement (addr.disp))
1973 return 0;
1974 break;
1976 case 'Y':
1977 /* Simply check for the basic form of a shift count. Reload will
1978 take care of making sure we have a proper base register. */
1979 if (!s390_decompose_shift_count (op, NULL, NULL))
1980 return 0;
1981 break;
1983 default:
1984 return 0;
1987 return 1;
1992 /* Evaluates constraint strings starting with letter O. Input
1993 parameter C is the second letter following the "O" in the constraint
1994 string. Returns 1 if VALUE meets the respective constraint and 0
1995 otherwise. */
1998 s390_O_constraint_str (const char c, HOST_WIDE_INT value)
2000 if (!TARGET_EXTIMM)
2001 return 0;
2003 switch (c)
2005 case 's':
2006 return trunc_int_for_mode (value, SImode) == value;
2008 case 'p':
2009 return value == 0
2010 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
2012 case 'n':
2013 return value == -1
2014 || s390_single_part (GEN_INT (value), DImode, SImode, -1) == 1;
2016 default:
2017 gcc_unreachable ();
2022 /* Evaluates constraint strings starting with letter N. Parameter STR
2023 contains the letters following letter "N" in the constraint string.
2024 Returns true if VALUE matches the constraint. */
2027 s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
2029 enum machine_mode mode, part_mode;
2030 int def;
2031 int part, part_goal;
2034 if (str[0] == 'x')
2035 part_goal = -1;
2036 else
2037 part_goal = str[0] - '0';
2039 switch (str[1])
2041 case 'Q':
2042 part_mode = QImode;
2043 break;
2044 case 'H':
2045 part_mode = HImode;
2046 break;
2047 case 'S':
2048 part_mode = SImode;
2049 break;
2050 default:
2051 return 0;
2054 switch (str[2])
2056 case 'H':
2057 mode = HImode;
2058 break;
2059 case 'S':
2060 mode = SImode;
2061 break;
2062 case 'D':
2063 mode = DImode;
2064 break;
2065 default:
2066 return 0;
2069 switch (str[3])
2071 case '0':
2072 def = 0;
2073 break;
2074 case 'F':
2075 def = -1;
2076 break;
2077 default:
2078 return 0;
2081 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
2082 return 0;
2084 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
2085 if (part < 0)
2086 return 0;
2087 if (part_goal != -1 && part_goal != part)
2088 return 0;
2090 return 1;
2094 /* Returns true if the input parameter VALUE is a float zero. */
2097 s390_float_const_zero_p (rtx value)
2099 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
2100 && value == CONST0_RTX (GET_MODE (value)));
2104 /* Compute a (partial) cost for rtx X. Return true if the complete
2105 cost has been computed, and false if subexpressions should be
2106 scanned. In either case, *TOTAL contains the cost result.
2107 CODE contains GET_CODE (x), OUTER_CODE contains the code
2108 of the superexpression of x. */
2110 static bool
2111 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
2113 switch (code)
2115 case CONST:
2116 case CONST_INT:
2117 case LABEL_REF:
2118 case SYMBOL_REF:
2119 case CONST_DOUBLE:
2120 case MEM:
2121 *total = 0;
2122 return true;
2124 case ASHIFT:
2125 case ASHIFTRT:
2126 case LSHIFTRT:
2127 case ROTATE:
2128 case ROTATERT:
2129 case AND:
2130 case IOR:
2131 case XOR:
2132 case NEG:
2133 case NOT:
2134 *total = COSTS_N_INSNS (1);
2135 return false;
2137 case PLUS:
2138 case MINUS:
2139 /* Check for multiply and add. */
2140 if ((GET_MODE (x) == DFmode || GET_MODE (x) == SFmode)
2141 && GET_CODE (XEXP (x, 0)) == MULT
2142 && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT && TARGET_FUSED_MADD)
2144 /* This is the multiply and add case. */
2145 if (GET_MODE (x) == DFmode)
2146 *total = s390_cost->madbr;
2147 else
2148 *total = s390_cost->maebr;
2149 *total += rtx_cost (XEXP (XEXP (x, 0), 0), MULT)
2150 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT)
2151 + rtx_cost (XEXP (x, 1), code);
2152 return true; /* Do not do an additional recursive descent. */
2154 *total = COSTS_N_INSNS (1);
2155 return false;
2157 case MULT:
2158 switch (GET_MODE (x))
2160 case SImode:
2162 rtx left = XEXP (x, 0);
2163 rtx right = XEXP (x, 1);
2164 if (GET_CODE (right) == CONST_INT
2165 && CONST_OK_FOR_K (INTVAL (right)))
2166 *total = s390_cost->mhi;
2167 else if (GET_CODE (left) == SIGN_EXTEND)
2168 *total = s390_cost->mh;
2169 else
2170 *total = s390_cost->ms; /* msr, ms, msy */
2171 break;
2173 case DImode:
2175 rtx left = XEXP (x, 0);
2176 rtx right = XEXP (x, 1);
2177 if (TARGET_64BIT)
2179 if (GET_CODE (right) == CONST_INT
2180 && CONST_OK_FOR_K (INTVAL (right)))
2181 *total = s390_cost->mghi;
2182 else if (GET_CODE (left) == SIGN_EXTEND)
2183 *total = s390_cost->msgf;
2184 else
2185 *total = s390_cost->msg; /* msgr, msg */
2187 else /* TARGET_31BIT */
2189 if (GET_CODE (left) == SIGN_EXTEND
2190 && GET_CODE (right) == SIGN_EXTEND)
2191 /* mulsidi case: mr, m */
2192 *total = s390_cost->m;
2193 else if (GET_CODE (left) == ZERO_EXTEND
2194 && GET_CODE (right) == ZERO_EXTEND
2195 && TARGET_CPU_ZARCH)
2196 /* umulsidi case: ml, mlr */
2197 *total = s390_cost->ml;
2198 else
2199 /* Complex calculation is required. */
2200 *total = COSTS_N_INSNS (40);
2202 break;
2204 case SFmode:
2205 case DFmode:
2206 *total = s390_cost->mult_df;
2207 break;
2208 case TFmode:
2209 *total = s390_cost->mxbr;
2210 break;
2211 default:
2212 return false;
2214 return false;
2216 case UDIV:
2217 case UMOD:
2218 if (GET_MODE (x) == TImode) /* 128 bit division */
2219 *total = s390_cost->dlgr;
2220 else if (GET_MODE (x) == DImode)
2222 rtx right = XEXP (x, 1);
2223 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2224 *total = s390_cost->dlr;
2225 else /* 64 by 64 bit division */
2226 *total = s390_cost->dlgr;
2228 else if (GET_MODE (x) == SImode) /* 32 bit division */
2229 *total = s390_cost->dlr;
2230 return false;
2232 case DIV:
2233 case MOD:
2234 if (GET_MODE (x) == DImode)
2236 rtx right = XEXP (x, 1);
2237 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2238 if (TARGET_64BIT)
2239 *total = s390_cost->dsgfr;
2240 else
2241 *total = s390_cost->dr;
2242 else /* 64 by 64 bit division */
2243 *total = s390_cost->dsgr;
2245 else if (GET_MODE (x) == SImode) /* 32 bit division */
2246 *total = s390_cost->dlr;
2247 else if (GET_MODE (x) == SFmode)
2249 if (TARGET_IEEE_FLOAT)
2250 *total = s390_cost->debr;
2251 else /* TARGET_IBM_FLOAT */
2252 *total = s390_cost->der;
2254 else if (GET_MODE (x) == DFmode)
2256 if (TARGET_IEEE_FLOAT)
2257 *total = s390_cost->ddbr;
2258 else /* TARGET_IBM_FLOAT */
2259 *total = s390_cost->ddr;
2261 else if (GET_MODE (x) == TFmode)
2263 if (TARGET_IEEE_FLOAT)
2264 *total = s390_cost->dxbr;
2265 else /* TARGET_IBM_FLOAT */
2266 *total = s390_cost->dxr;
2268 return false;
2270 case SQRT:
2271 if (GET_MODE (x) == SFmode)
2272 *total = s390_cost->sqebr;
2273 else if (GET_MODE (x) == DFmode)
2274 *total = s390_cost->sqdbr;
2275 else /* TFmode */
2276 *total = s390_cost->sqxbr;
2277 return false;
2279 case SIGN_EXTEND:
2280 case ZERO_EXTEND:
2281 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
2282 || outer_code == PLUS || outer_code == MINUS
2283 || outer_code == COMPARE)
2284 *total = 0;
2285 return false;
2287 case COMPARE:
2288 *total = COSTS_N_INSNS (1);
2289 if (GET_CODE (XEXP (x, 0)) == AND
2290 && GET_CODE (XEXP (x, 1)) == CONST_INT
2291 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2293 rtx op0 = XEXP (XEXP (x, 0), 0);
2294 rtx op1 = XEXP (XEXP (x, 0), 1);
2295 rtx op2 = XEXP (x, 1);
2297 if (memory_operand (op0, GET_MODE (op0))
2298 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
2299 return true;
2300 if (register_operand (op0, GET_MODE (op0))
2301 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
2302 return true;
2304 return false;
2306 default:
2307 return false;
2311 /* Return the cost of an address rtx ADDR. */
2313 static int
2314 s390_address_cost (rtx addr)
2316 struct s390_address ad;
2317 if (!s390_decompose_address (addr, &ad))
2318 return 1000;
2320 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2323 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2324 otherwise return 0. */
2327 tls_symbolic_operand (rtx op)
2329 if (GET_CODE (op) != SYMBOL_REF)
2330 return 0;
2331 return SYMBOL_REF_TLS_MODEL (op);
2334 /* Split DImode access register reference REG (on 64-bit) into its constituent
2335 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2336 gen_highpart cannot be used as they assume all registers are word-sized,
2337 while our access registers have only half that size. */
2339 void
2340 s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2342 gcc_assert (TARGET_64BIT);
2343 gcc_assert (ACCESS_REG_P (reg));
2344 gcc_assert (GET_MODE (reg) == DImode);
2345 gcc_assert (!(REGNO (reg) & 1));
2347 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2348 *hi = gen_rtx_REG (SImode, REGNO (reg));
2351 /* Return true if OP contains a symbol reference */
2353 bool
2354 symbolic_reference_mentioned_p (rtx op)
2356 const char *fmt;
2357 int i;
2359 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2360 return 1;
2362 fmt = GET_RTX_FORMAT (GET_CODE (op));
2363 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2365 if (fmt[i] == 'E')
2367 int j;
2369 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2370 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2371 return 1;
2374 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2375 return 1;
2378 return 0;
2381 /* Return true if OP contains a reference to a thread-local symbol. */
2383 bool
2384 tls_symbolic_reference_mentioned_p (rtx op)
2386 const char *fmt;
2387 int i;
2389 if (GET_CODE (op) == SYMBOL_REF)
2390 return tls_symbolic_operand (op);
2392 fmt = GET_RTX_FORMAT (GET_CODE (op));
2393 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2395 if (fmt[i] == 'E')
2397 int j;
2399 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2400 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2401 return true;
2404 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2405 return true;
2408 return false;
2412 /* Return true if OP is a legitimate general operand when
2413 generating PIC code. It is given that flag_pic is on
2414 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2417 legitimate_pic_operand_p (rtx op)
2419 /* Accept all non-symbolic constants. */
2420 if (!SYMBOLIC_CONST (op))
2421 return 1;
2423 /* Reject everything else; must be handled
2424 via emit_symbolic_move. */
2425 return 0;
2428 /* Returns true if the constant value OP is a legitimate general operand.
2429 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2432 legitimate_constant_p (rtx op)
2434 /* Accept all non-symbolic constants. */
2435 if (!SYMBOLIC_CONST (op))
2436 return 1;
2438 /* Accept immediate LARL operands. */
2439 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
2440 return 1;
2442 /* Thread-local symbols are never legal constants. This is
2443 so that emit_call knows that computing such addresses
2444 might require a function call. */
2445 if (TLS_SYMBOLIC_CONST (op))
2446 return 0;
2448 /* In the PIC case, symbolic constants must *not* be
2449 forced into the literal pool. We accept them here,
2450 so that they will be handled by emit_symbolic_move. */
2451 if (flag_pic)
2452 return 1;
2454 /* All remaining non-PIC symbolic constants are
2455 forced into the literal pool. */
2456 return 0;
2459 /* Determine if it's legal to put X into the constant pool. This
2460 is not possible if X contains the address of a symbol that is
2461 not constant (TLS) or not known at final link time (PIC). */
2463 static bool
2464 s390_cannot_force_const_mem (rtx x)
2466 switch (GET_CODE (x))
2468 case CONST_INT:
2469 case CONST_DOUBLE:
2470 /* Accept all non-symbolic constants. */
2471 return false;
2473 case LABEL_REF:
2474 /* Labels are OK iff we are non-PIC. */
2475 return flag_pic != 0;
2477 case SYMBOL_REF:
2478 /* 'Naked' TLS symbol references are never OK,
2479 non-TLS symbols are OK iff we are non-PIC. */
2480 if (tls_symbolic_operand (x))
2481 return true;
2482 else
2483 return flag_pic != 0;
2485 case CONST:
2486 return s390_cannot_force_const_mem (XEXP (x, 0));
2487 case PLUS:
2488 case MINUS:
2489 return s390_cannot_force_const_mem (XEXP (x, 0))
2490 || s390_cannot_force_const_mem (XEXP (x, 1));
2492 case UNSPEC:
2493 switch (XINT (x, 1))
2495 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2496 case UNSPEC_LTREL_OFFSET:
2497 case UNSPEC_GOT:
2498 case UNSPEC_GOTOFF:
2499 case UNSPEC_PLTOFF:
2500 case UNSPEC_TLSGD:
2501 case UNSPEC_TLSLDM:
2502 case UNSPEC_NTPOFF:
2503 case UNSPEC_DTPOFF:
2504 case UNSPEC_GOTNTPOFF:
2505 case UNSPEC_INDNTPOFF:
2506 return false;
2508 /* If the literal pool shares the code section, be put
2509 execute template placeholders into the pool as well. */
2510 case UNSPEC_INSN:
2511 return TARGET_CPU_ZARCH;
2513 default:
2514 return true;
2516 break;
2518 default:
2519 gcc_unreachable ();
2523 /* Returns true if the constant value OP is a legitimate general
2524 operand during and after reload. The difference to
2525 legitimate_constant_p is that this function will not accept
2526 a constant that would need to be forced to the literal pool
2527 before it can be used as operand. */
2529 bool
2530 legitimate_reload_constant_p (rtx op)
2532 /* Accept la(y) operands. */
2533 if (GET_CODE (op) == CONST_INT
2534 && DISP_IN_RANGE (INTVAL (op)))
2535 return true;
2537 /* Accept l(g)hi/l(g)fi operands. */
2538 if (GET_CODE (op) == CONST_INT
2539 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
2540 return true;
2542 /* Accept lliXX operands. */
2543 if (TARGET_ZARCH
2544 && GET_CODE (op) == CONST_INT
2545 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2546 && s390_single_part (op, word_mode, HImode, 0) >= 0)
2547 return true;
2549 if (TARGET_EXTIMM
2550 && GET_CODE (op) == CONST_INT
2551 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2552 && s390_single_part (op, word_mode, SImode, 0) >= 0)
2553 return true;
2555 /* Accept larl operands. */
2556 if (TARGET_CPU_ZARCH
2557 && larl_operand (op, VOIDmode))
2558 return true;
2560 /* Accept lzXX operands. */
2561 if (GET_CODE (op) == CONST_DOUBLE
2562 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', "G"))
2563 return true;
2565 /* Accept double-word operands that can be split. */
2566 if (GET_CODE (op) == CONST_INT
2567 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op))
2569 enum machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
2570 rtx hi = operand_subword (op, 0, 0, dword_mode);
2571 rtx lo = operand_subword (op, 1, 0, dword_mode);
2572 return legitimate_reload_constant_p (hi)
2573 && legitimate_reload_constant_p (lo);
2576 /* Everything else cannot be handled without reload. */
2577 return false;
2580 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2581 return the class of reg to actually use. */
2583 enum reg_class
2584 s390_preferred_reload_class (rtx op, enum reg_class class)
2586 switch (GET_CODE (op))
2588 /* Constants we cannot reload must be forced into the
2589 literal pool. */
2591 case CONST_DOUBLE:
2592 case CONST_INT:
2593 if (legitimate_reload_constant_p (op))
2594 return class;
2595 else
2596 return NO_REGS;
2598 /* If a symbolic constant or a PLUS is reloaded,
2599 it is most likely being used as an address, so
2600 prefer ADDR_REGS. If 'class' is not a superset
2601 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2602 case PLUS:
2603 case LABEL_REF:
2604 case SYMBOL_REF:
2605 case CONST:
2606 if (reg_class_subset_p (ADDR_REGS, class))
2607 return ADDR_REGS;
2608 else
2609 return NO_REGS;
2611 default:
2612 break;
2615 return class;
2618 /* Return the register class of a scratch register needed to
2619 load IN into a register of class CLASS in MODE.
2621 We need a temporary when loading a PLUS expression which
2622 is not a legitimate operand of the LOAD ADDRESS instruction. */
2624 enum reg_class
2625 s390_secondary_input_reload_class (enum reg_class class,
2626 enum machine_mode mode, rtx in)
2628 if (s390_plus_operand (in, mode))
2629 return ADDR_REGS;
2631 if (reg_classes_intersect_p (FP_REGS, class)
2632 && mode == TFmode
2633 && GET_CODE (in) == MEM
2634 && GET_CODE (XEXP (in, 0)) == PLUS
2635 && GET_CODE (XEXP (XEXP (in, 0), 1)) == CONST_INT
2636 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (in, 0), 1))
2637 + GET_MODE_SIZE (mode) - 1))
2638 return ADDR_REGS;
2640 if (reg_classes_intersect_p (CC_REGS, class))
2641 return GENERAL_REGS;
2643 return NO_REGS;
2646 /* Return the register class of a scratch register needed to
2647 store a register of class CLASS in MODE into OUT:
2649 We need a temporary when storing a double-word to a
2650 non-offsettable memory address. */
2652 enum reg_class
2653 s390_secondary_output_reload_class (enum reg_class class,
2654 enum machine_mode mode, rtx out)
2656 if ((TARGET_64BIT ? (mode == TImode || mode == TFmode)
2657 : (mode == DImode || mode == DFmode))
2658 && reg_classes_intersect_p (GENERAL_REGS, class)
2659 && GET_CODE (out) == MEM
2660 && GET_CODE (XEXP (out, 0)) == PLUS
2661 && GET_CODE (XEXP (XEXP (out, 0), 0)) == PLUS
2662 && GET_CODE (XEXP (XEXP (out, 0), 1)) == CONST_INT
2663 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (out, 0), 1))
2664 + GET_MODE_SIZE (mode) - 1))
2665 return ADDR_REGS;
2667 if (reg_classes_intersect_p (FP_REGS, class)
2668 && mode == TFmode
2669 && GET_CODE (out) == MEM
2670 && GET_CODE (XEXP (out, 0)) == PLUS
2671 && GET_CODE (XEXP (XEXP (out, 0), 1)) == CONST_INT
2672 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (out, 0), 1))
2673 + GET_MODE_SIZE (mode) - 1))
2674 return ADDR_REGS;
2676 if (reg_classes_intersect_p (CC_REGS, class))
2677 return GENERAL_REGS;
2679 return NO_REGS;
2682 /* Generate code to load SRC, which is PLUS that is not a
2683 legitimate operand for the LA instruction, into TARGET.
2684 SCRATCH may be used as scratch register. */
2686 void
2687 s390_expand_plus_operand (rtx target, rtx src,
2688 rtx scratch)
2690 rtx sum1, sum2;
2691 struct s390_address ad;
2693 /* src must be a PLUS; get its two operands. */
2694 gcc_assert (GET_CODE (src) == PLUS);
2695 gcc_assert (GET_MODE (src) == Pmode);
2697 /* Check if any of the two operands is already scheduled
2698 for replacement by reload. This can happen e.g. when
2699 float registers occur in an address. */
2700 sum1 = find_replacement (&XEXP (src, 0));
2701 sum2 = find_replacement (&XEXP (src, 1));
2702 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2704 /* If the address is already strictly valid, there's nothing to do. */
2705 if (!s390_decompose_address (src, &ad)
2706 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
2707 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
2709 /* Otherwise, one of the operands cannot be an address register;
2710 we reload its value into the scratch register. */
2711 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2713 emit_move_insn (scratch, sum1);
2714 sum1 = scratch;
2716 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2718 emit_move_insn (scratch, sum2);
2719 sum2 = scratch;
2722 /* According to the way these invalid addresses are generated
2723 in reload.c, it should never happen (at least on s390) that
2724 *neither* of the PLUS components, after find_replacements
2725 was applied, is an address register. */
2726 if (sum1 == scratch && sum2 == scratch)
2728 debug_rtx (src);
2729 gcc_unreachable ();
2732 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2735 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2736 is only ever performed on addresses, so we can mark the
2737 sum as legitimate for LA in any case. */
2738 s390_load_address (target, src);
2742 /* Return true if ADDR is a valid memory address.
2743 STRICT specifies whether strict register checking applies. */
2745 bool
2746 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2747 rtx addr, int strict)
2749 struct s390_address ad;
2750 if (!s390_decompose_address (addr, &ad))
2751 return false;
2753 if (strict)
2755 if (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
2756 return false;
2758 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx)))
2759 return false;
2761 else
2763 if (ad.base
2764 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER
2765 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS))
2766 return false;
2768 if (ad.indx
2769 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER
2770 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS))
2771 return false;
2773 return true;
2776 /* Return true if OP is a valid operand for the LA instruction.
2777 In 31-bit, we need to prove that the result is used as an
2778 address, as LA performs only a 31-bit addition. */
2780 bool
2781 legitimate_la_operand_p (rtx op)
2783 struct s390_address addr;
2784 if (!s390_decompose_address (op, &addr))
2785 return false;
2787 return (TARGET_64BIT || addr.pointer);
2790 /* Return true if it is valid *and* preferable to use LA to
2791 compute the sum of OP1 and OP2. */
2793 bool
2794 preferred_la_operand_p (rtx op1, rtx op2)
2796 struct s390_address addr;
2798 if (op2 != const0_rtx)
2799 op1 = gen_rtx_PLUS (Pmode, op1, op2);
2801 if (!s390_decompose_address (op1, &addr))
2802 return false;
2803 if (addr.base && !REGNO_OK_FOR_BASE_P (REGNO (addr.base)))
2804 return false;
2805 if (addr.indx && !REGNO_OK_FOR_INDEX_P (REGNO (addr.indx)))
2806 return false;
2808 if (!TARGET_64BIT && !addr.pointer)
2809 return false;
2811 if (addr.pointer)
2812 return true;
2814 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2815 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2816 return true;
2818 return false;
2821 /* Emit a forced load-address operation to load SRC into DST.
2822 This will use the LOAD ADDRESS instruction even in situations
2823 where legitimate_la_operand_p (SRC) returns false. */
2825 void
2826 s390_load_address (rtx dst, rtx src)
2828 if (TARGET_64BIT)
2829 emit_move_insn (dst, src);
2830 else
2831 emit_insn (gen_force_la_31 (dst, src));
2834 /* Return a legitimate reference for ORIG (an address) using the
2835 register REG. If REG is 0, a new pseudo is generated.
2837 There are two types of references that must be handled:
2839 1. Global data references must load the address from the GOT, via
2840 the PIC reg. An insn is emitted to do this load, and the reg is
2841 returned.
2843 2. Static data references, constant pool addresses, and code labels
2844 compute the address as an offset from the GOT, whose base is in
2845 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2846 differentiate them from global data objects. The returned
2847 address is the PIC reg + an unspec constant.
2849 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2850 reg also appears in the address. */
2853 legitimize_pic_address (rtx orig, rtx reg)
2855 rtx addr = orig;
2856 rtx new = orig;
2857 rtx base;
2859 gcc_assert (!TLS_SYMBOLIC_CONST (addr));
2861 if (GET_CODE (addr) == LABEL_REF
2862 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2864 /* This is a local symbol. */
2865 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
2867 /* Access local symbols PC-relative via LARL.
2868 This is the same as in the non-PIC case, so it is
2869 handled automatically ... */
2871 else
2873 /* Access local symbols relative to the GOT. */
2875 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2877 if (reload_in_progress || reload_completed)
2878 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2880 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2881 addr = gen_rtx_CONST (Pmode, addr);
2882 addr = force_const_mem (Pmode, addr);
2883 emit_move_insn (temp, addr);
2885 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2886 if (reg != 0)
2888 s390_load_address (reg, new);
2889 new = reg;
2893 else if (GET_CODE (addr) == SYMBOL_REF)
2895 if (reg == 0)
2896 reg = gen_reg_rtx (Pmode);
2898 if (flag_pic == 1)
2900 /* Assume GOT offset < 4k. This is handled the same way
2901 in both 31- and 64-bit code (@GOT). */
2903 if (reload_in_progress || reload_completed)
2904 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2906 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2907 new = gen_rtx_CONST (Pmode, new);
2908 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2909 new = gen_const_mem (Pmode, new);
2910 emit_move_insn (reg, new);
2911 new = reg;
2913 else if (TARGET_CPU_ZARCH)
2915 /* If the GOT offset might be >= 4k, we determine the position
2916 of the GOT entry via a PC-relative LARL (@GOTENT). */
2918 rtx temp = gen_reg_rtx (Pmode);
2920 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2921 new = gen_rtx_CONST (Pmode, new);
2922 emit_move_insn (temp, new);
2924 new = gen_const_mem (Pmode, temp);
2925 emit_move_insn (reg, new);
2926 new = reg;
2928 else
2930 /* If the GOT offset might be >= 4k, we have to load it
2931 from the literal pool (@GOT). */
2933 rtx temp = gen_reg_rtx (Pmode);
2935 if (reload_in_progress || reload_completed)
2936 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2938 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2939 addr = gen_rtx_CONST (Pmode, addr);
2940 addr = force_const_mem (Pmode, addr);
2941 emit_move_insn (temp, addr);
2943 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2944 new = gen_const_mem (Pmode, new);
2945 emit_move_insn (reg, new);
2946 new = reg;
2949 else
2951 if (GET_CODE (addr) == CONST)
2953 addr = XEXP (addr, 0);
2954 if (GET_CODE (addr) == UNSPEC)
2956 gcc_assert (XVECLEN (addr, 0) == 1);
2957 switch (XINT (addr, 1))
2959 /* If someone moved a GOT-relative UNSPEC
2960 out of the literal pool, force them back in. */
2961 case UNSPEC_GOTOFF:
2962 case UNSPEC_PLTOFF:
2963 new = force_const_mem (Pmode, orig);
2964 break;
2966 /* @GOT is OK as is if small. */
2967 case UNSPEC_GOT:
2968 if (flag_pic == 2)
2969 new = force_const_mem (Pmode, orig);
2970 break;
2972 /* @GOTENT is OK as is. */
2973 case UNSPEC_GOTENT:
2974 break;
2976 /* @PLT is OK as is on 64-bit, must be converted to
2977 GOT-relative @PLTOFF on 31-bit. */
2978 case UNSPEC_PLT:
2979 if (!TARGET_CPU_ZARCH)
2981 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2983 if (reload_in_progress || reload_completed)
2984 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2986 addr = XVECEXP (addr, 0, 0);
2987 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
2988 UNSPEC_PLTOFF);
2989 addr = gen_rtx_CONST (Pmode, addr);
2990 addr = force_const_mem (Pmode, addr);
2991 emit_move_insn (temp, addr);
2993 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2994 if (reg != 0)
2996 s390_load_address (reg, new);
2997 new = reg;
3000 break;
3002 /* Everything else cannot happen. */
3003 default:
3004 gcc_unreachable ();
3007 else
3008 gcc_assert (GET_CODE (addr) == PLUS);
3010 if (GET_CODE (addr) == PLUS)
3012 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
3014 gcc_assert (!TLS_SYMBOLIC_CONST (op0));
3015 gcc_assert (!TLS_SYMBOLIC_CONST (op1));
3017 /* Check first to see if this is a constant offset
3018 from a local symbol reference. */
3019 if ((GET_CODE (op0) == LABEL_REF
3020 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
3021 && GET_CODE (op1) == CONST_INT)
3023 if (TARGET_CPU_ZARCH
3024 && larl_operand (op0, VOIDmode)
3025 && INTVAL (op1) < (HOST_WIDE_INT)1 << 31
3026 && INTVAL (op1) >= -((HOST_WIDE_INT)1 << 31))
3028 if (INTVAL (op1) & 1)
3030 /* LARL can't handle odd offsets, so emit a
3031 pair of LARL and LA. */
3032 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3034 if (!DISP_IN_RANGE (INTVAL (op1)))
3036 HOST_WIDE_INT even = INTVAL (op1) - 1;
3037 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
3038 op0 = gen_rtx_CONST (Pmode, op0);
3039 op1 = const1_rtx;
3042 emit_move_insn (temp, op0);
3043 new = gen_rtx_PLUS (Pmode, temp, op1);
3045 if (reg != 0)
3047 s390_load_address (reg, new);
3048 new = reg;
3051 else
3053 /* If the offset is even, we can just use LARL.
3054 This will happen automatically. */
3057 else
3059 /* Access local symbols relative to the GOT. */
3061 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3063 if (reload_in_progress || reload_completed)
3064 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3066 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
3067 UNSPEC_GOTOFF);
3068 addr = gen_rtx_PLUS (Pmode, addr, op1);
3069 addr = gen_rtx_CONST (Pmode, addr);
3070 addr = force_const_mem (Pmode, addr);
3071 emit_move_insn (temp, addr);
3073 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3074 if (reg != 0)
3076 s390_load_address (reg, new);
3077 new = reg;
3082 /* Now, check whether it is a GOT relative symbol plus offset
3083 that was pulled out of the literal pool. Force it back in. */
3085 else if (GET_CODE (op0) == UNSPEC
3086 && GET_CODE (op1) == CONST_INT
3087 && XINT (op0, 1) == UNSPEC_GOTOFF)
3089 gcc_assert (XVECLEN (op0, 0) == 1);
3091 new = force_const_mem (Pmode, orig);
3094 /* Otherwise, compute the sum. */
3095 else
3097 base = legitimize_pic_address (XEXP (addr, 0), reg);
3098 new = legitimize_pic_address (XEXP (addr, 1),
3099 base == reg ? NULL_RTX : reg);
3100 if (GET_CODE (new) == CONST_INT)
3101 new = plus_constant (base, INTVAL (new));
3102 else
3104 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3106 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3107 new = XEXP (new, 1);
3109 new = gen_rtx_PLUS (Pmode, base, new);
3112 if (GET_CODE (new) == CONST)
3113 new = XEXP (new, 0);
3114 new = force_operand (new, 0);
3118 return new;
3121 /* Load the thread pointer into a register. */
3124 s390_get_thread_pointer (void)
3126 rtx tp = gen_reg_rtx (Pmode);
3128 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
3129 mark_reg_pointer (tp, BITS_PER_WORD);
3131 return tp;
3134 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3135 in s390_tls_symbol which always refers to __tls_get_offset.
3136 The returned offset is written to RESULT_REG and an USE rtx is
3137 generated for TLS_CALL. */
3139 static GTY(()) rtx s390_tls_symbol;
3141 static void
3142 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
3144 rtx insn;
3146 gcc_assert (flag_pic);
3148 if (!s390_tls_symbol)
3149 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3151 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3152 gen_rtx_REG (Pmode, RETURN_REGNUM));
3154 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3155 CONST_OR_PURE_CALL_P (insn) = 1;
3158 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3159 this (thread-local) address. REG may be used as temporary. */
3161 static rtx
3162 legitimize_tls_address (rtx addr, rtx reg)
3164 rtx new, tls_call, temp, base, r2, insn;
3166 if (GET_CODE (addr) == SYMBOL_REF)
3167 switch (tls_symbolic_operand (addr))
3169 case TLS_MODEL_GLOBAL_DYNAMIC:
3170 start_sequence ();
3171 r2 = gen_rtx_REG (Pmode, 2);
3172 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3173 new = gen_rtx_CONST (Pmode, tls_call);
3174 new = force_const_mem (Pmode, new);
3175 emit_move_insn (r2, new);
3176 s390_emit_tls_call_insn (r2, tls_call);
3177 insn = get_insns ();
3178 end_sequence ();
3180 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3181 temp = gen_reg_rtx (Pmode);
3182 emit_libcall_block (insn, temp, r2, new);
3184 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3185 if (reg != 0)
3187 s390_load_address (reg, new);
3188 new = reg;
3190 break;
3192 case TLS_MODEL_LOCAL_DYNAMIC:
3193 start_sequence ();
3194 r2 = gen_rtx_REG (Pmode, 2);
3195 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3196 new = gen_rtx_CONST (Pmode, tls_call);
3197 new = force_const_mem (Pmode, new);
3198 emit_move_insn (r2, new);
3199 s390_emit_tls_call_insn (r2, tls_call);
3200 insn = get_insns ();
3201 end_sequence ();
3203 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3204 temp = gen_reg_rtx (Pmode);
3205 emit_libcall_block (insn, temp, r2, new);
3207 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3208 base = gen_reg_rtx (Pmode);
3209 s390_load_address (base, new);
3211 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3212 new = gen_rtx_CONST (Pmode, new);
3213 new = force_const_mem (Pmode, new);
3214 temp = gen_reg_rtx (Pmode);
3215 emit_move_insn (temp, new);
3217 new = gen_rtx_PLUS (Pmode, base, temp);
3218 if (reg != 0)
3220 s390_load_address (reg, new);
3221 new = reg;
3223 break;
3225 case TLS_MODEL_INITIAL_EXEC:
3226 if (flag_pic == 1)
3228 /* Assume GOT offset < 4k. This is handled the same way
3229 in both 31- and 64-bit code. */
3231 if (reload_in_progress || reload_completed)
3232 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3234 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3235 new = gen_rtx_CONST (Pmode, new);
3236 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
3237 new = gen_const_mem (Pmode, new);
3238 temp = gen_reg_rtx (Pmode);
3239 emit_move_insn (temp, new);
3241 else if (TARGET_CPU_ZARCH)
3243 /* If the GOT offset might be >= 4k, we determine the position
3244 of the GOT entry via a PC-relative LARL. */
3246 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3247 new = gen_rtx_CONST (Pmode, new);
3248 temp = gen_reg_rtx (Pmode);
3249 emit_move_insn (temp, new);
3251 new = gen_const_mem (Pmode, temp);
3252 temp = gen_reg_rtx (Pmode);
3253 emit_move_insn (temp, new);
3255 else if (flag_pic)
3257 /* If the GOT offset might be >= 4k, we have to load it
3258 from the literal pool. */
3260 if (reload_in_progress || reload_completed)
3261 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3263 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3264 new = gen_rtx_CONST (Pmode, new);
3265 new = force_const_mem (Pmode, new);
3266 temp = gen_reg_rtx (Pmode);
3267 emit_move_insn (temp, new);
3269 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3270 new = gen_const_mem (Pmode, new);
3272 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3273 temp = gen_reg_rtx (Pmode);
3274 emit_insn (gen_rtx_SET (Pmode, temp, new));
3276 else
3278 /* In position-dependent code, load the absolute address of
3279 the GOT entry from the literal pool. */
3281 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3282 new = gen_rtx_CONST (Pmode, new);
3283 new = force_const_mem (Pmode, new);
3284 temp = gen_reg_rtx (Pmode);
3285 emit_move_insn (temp, new);
3287 new = temp;
3288 new = gen_const_mem (Pmode, new);
3289 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3290 temp = gen_reg_rtx (Pmode);
3291 emit_insn (gen_rtx_SET (Pmode, temp, new));
3294 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3295 if (reg != 0)
3297 s390_load_address (reg, new);
3298 new = reg;
3300 break;
3302 case TLS_MODEL_LOCAL_EXEC:
3303 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3304 new = gen_rtx_CONST (Pmode, new);
3305 new = force_const_mem (Pmode, new);
3306 temp = gen_reg_rtx (Pmode);
3307 emit_move_insn (temp, new);
3309 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3310 if (reg != 0)
3312 s390_load_address (reg, new);
3313 new = reg;
3315 break;
3317 default:
3318 gcc_unreachable ();
3321 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3323 switch (XINT (XEXP (addr, 0), 1))
3325 case UNSPEC_INDNTPOFF:
3326 gcc_assert (TARGET_CPU_ZARCH);
3327 new = addr;
3328 break;
3330 default:
3331 gcc_unreachable ();
3335 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3336 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3338 new = XEXP (XEXP (addr, 0), 0);
3339 if (GET_CODE (new) != SYMBOL_REF)
3340 new = gen_rtx_CONST (Pmode, new);
3342 new = legitimize_tls_address (new, reg);
3343 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3344 new = force_operand (new, 0);
3347 else
3348 gcc_unreachable (); /* for now ... */
3350 return new;
3353 /* Emit insns to move operands[1] into operands[0]. */
3355 void
3356 emit_symbolic_move (rtx *operands)
3358 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
3360 if (GET_CODE (operands[0]) == MEM)
3361 operands[1] = force_reg (Pmode, operands[1]);
3362 else if (TLS_SYMBOLIC_CONST (operands[1]))
3363 operands[1] = legitimize_tls_address (operands[1], temp);
3364 else if (flag_pic)
3365 operands[1] = legitimize_pic_address (operands[1], temp);
3368 /* Try machine-dependent ways of modifying an illegitimate address X
3369 to be legitimate. If we find one, return the new, valid address.
3371 OLDX is the address as it was before break_out_memory_refs was called.
3372 In some cases it is useful to look at this to decide what needs to be done.
3374 MODE is the mode of the operand pointed to by X.
3376 When -fpic is used, special handling is needed for symbolic references.
3377 See comments by legitimize_pic_address for details. */
3380 legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3381 enum machine_mode mode ATTRIBUTE_UNUSED)
3383 rtx constant_term = const0_rtx;
3385 if (TLS_SYMBOLIC_CONST (x))
3387 x = legitimize_tls_address (x, 0);
3389 if (legitimate_address_p (mode, x, FALSE))
3390 return x;
3392 else if (GET_CODE (x) == PLUS
3393 && (TLS_SYMBOLIC_CONST (XEXP (x, 0))
3394 || TLS_SYMBOLIC_CONST (XEXP (x, 1))))
3396 return x;
3398 else if (flag_pic)
3400 if (SYMBOLIC_CONST (x)
3401 || (GET_CODE (x) == PLUS
3402 && (SYMBOLIC_CONST (XEXP (x, 0))
3403 || SYMBOLIC_CONST (XEXP (x, 1)))))
3404 x = legitimize_pic_address (x, 0);
3406 if (legitimate_address_p (mode, x, FALSE))
3407 return x;
3410 x = eliminate_constant_term (x, &constant_term);
3412 /* Optimize loading of large displacements by splitting them
3413 into the multiple of 4K and the rest; this allows the
3414 former to be CSE'd if possible.
3416 Don't do this if the displacement is added to a register
3417 pointing into the stack frame, as the offsets will
3418 change later anyway. */
3420 if (GET_CODE (constant_term) == CONST_INT
3421 && !TARGET_LONG_DISPLACEMENT
3422 && !DISP_IN_RANGE (INTVAL (constant_term))
3423 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3425 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3426 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3428 rtx temp = gen_reg_rtx (Pmode);
3429 rtx val = force_operand (GEN_INT (upper), temp);
3430 if (val != temp)
3431 emit_move_insn (temp, val);
3433 x = gen_rtx_PLUS (Pmode, x, temp);
3434 constant_term = GEN_INT (lower);
3437 if (GET_CODE (x) == PLUS)
3439 if (GET_CODE (XEXP (x, 0)) == REG)
3441 rtx temp = gen_reg_rtx (Pmode);
3442 rtx val = force_operand (XEXP (x, 1), temp);
3443 if (val != temp)
3444 emit_move_insn (temp, val);
3446 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3449 else if (GET_CODE (XEXP (x, 1)) == REG)
3451 rtx temp = gen_reg_rtx (Pmode);
3452 rtx val = force_operand (XEXP (x, 0), temp);
3453 if (val != temp)
3454 emit_move_insn (temp, val);
3456 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3460 if (constant_term != const0_rtx)
3461 x = gen_rtx_PLUS (Pmode, x, constant_term);
3463 return x;
3466 /* Try a machine-dependent way of reloading an illegitimate address AD
3467 operand. If we find one, push the reload and and return the new address.
3469 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3470 and TYPE is the reload type of the current reload. */
3472 rtx
3473 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3474 int opnum, int type)
3476 if (!optimize || TARGET_LONG_DISPLACEMENT)
3477 return NULL_RTX;
3479 if (GET_CODE (ad) == PLUS)
3481 rtx tem = simplify_binary_operation (PLUS, Pmode,
3482 XEXP (ad, 0), XEXP (ad, 1));
3483 if (tem)
3484 ad = tem;
3487 if (GET_CODE (ad) == PLUS
3488 && GET_CODE (XEXP (ad, 0)) == REG
3489 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3490 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3492 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3493 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3494 rtx cst, tem, new;
3496 cst = GEN_INT (upper);
3497 if (!legitimate_reload_constant_p (cst))
3498 cst = force_const_mem (Pmode, cst);
3500 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3501 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3503 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3504 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3505 opnum, (enum reload_type) type);
3506 return new;
3509 return NULL_RTX;
3512 /* Emit code to move LEN bytes from DST to SRC. */
3514 void
3515 s390_expand_movmem (rtx dst, rtx src, rtx len)
3517 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3519 if (INTVAL (len) > 0)
3520 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3523 else if (TARGET_MVCLE)
3525 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3528 else
3530 rtx dst_addr, src_addr, count, blocks, temp;
3531 rtx loop_start_label = gen_label_rtx ();
3532 rtx loop_end_label = gen_label_rtx ();
3533 rtx end_label = gen_label_rtx ();
3534 enum machine_mode mode;
3536 mode = GET_MODE (len);
3537 if (mode == VOIDmode)
3538 mode = Pmode;
3540 dst_addr = gen_reg_rtx (Pmode);
3541 src_addr = gen_reg_rtx (Pmode);
3542 count = gen_reg_rtx (mode);
3543 blocks = gen_reg_rtx (mode);
3545 convert_move (count, len, 1);
3546 emit_cmp_and_jump_insns (count, const0_rtx,
3547 EQ, NULL_RTX, mode, 1, end_label);
3549 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3550 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3551 dst = change_address (dst, VOIDmode, dst_addr);
3552 src = change_address (src, VOIDmode, src_addr);
3554 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3555 if (temp != count)
3556 emit_move_insn (count, temp);
3558 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3559 if (temp != blocks)
3560 emit_move_insn (blocks, temp);
3562 emit_cmp_and_jump_insns (blocks, const0_rtx,
3563 EQ, NULL_RTX, mode, 1, loop_end_label);
3565 emit_label (loop_start_label);
3567 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3568 s390_load_address (dst_addr,
3569 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3570 s390_load_address (src_addr,
3571 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3573 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3574 if (temp != blocks)
3575 emit_move_insn (blocks, temp);
3577 emit_cmp_and_jump_insns (blocks, const0_rtx,
3578 EQ, NULL_RTX, mode, 1, loop_end_label);
3580 emit_jump (loop_start_label);
3581 emit_label (loop_end_label);
3583 emit_insn (gen_movmem_short (dst, src,
3584 convert_to_mode (Pmode, count, 1)));
3585 emit_label (end_label);
3589 /* Emit code to set LEN bytes at DST to VAL.
3590 Make use of clrmem if VAL is zero. */
3592 void
3593 s390_expand_setmem (rtx dst, rtx len, rtx val)
3595 gcc_assert (GET_CODE (len) != CONST_INT || INTVAL (len) > 0);
3596 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
3598 if (GET_CODE (len) == CONST_INT && INTVAL (len) <= 257)
3600 if (val == const0_rtx && INTVAL (len) <= 256)
3601 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
3602 else
3604 /* Initialize memory by storing the first byte. */
3605 emit_move_insn (adjust_address (dst, QImode, 0), val);
3607 if (INTVAL (len) > 1)
3609 /* Initiate 1 byte overlap move.
3610 The first byte of DST is propagated through DSTP1.
3611 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
3612 DST is set to size 1 so the rest of the memory location
3613 does not count as source operand. */
3614 rtx dstp1 = adjust_address (dst, VOIDmode, 1);
3615 set_mem_size (dst, const1_rtx);
3617 emit_insn (gen_movmem_short (dstp1, dst,
3618 GEN_INT (INTVAL (len) - 2)));
3623 else if (TARGET_MVCLE)
3625 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
3626 emit_insn (gen_setmem_long (dst, convert_to_mode (Pmode, len, 1), val));
3629 else
3631 rtx dst_addr, src_addr, count, blocks, temp, dstp1 = NULL_RTX;
3632 rtx loop_start_label = gen_label_rtx ();
3633 rtx loop_end_label = gen_label_rtx ();
3634 rtx end_label = gen_label_rtx ();
3635 enum machine_mode mode;
3637 mode = GET_MODE (len);
3638 if (mode == VOIDmode)
3639 mode = Pmode;
3641 dst_addr = gen_reg_rtx (Pmode);
3642 src_addr = gen_reg_rtx (Pmode);
3643 count = gen_reg_rtx (mode);
3644 blocks = gen_reg_rtx (mode);
3646 convert_move (count, len, 1);
3647 emit_cmp_and_jump_insns (count, const0_rtx,
3648 EQ, NULL_RTX, mode, 1, end_label);
3650 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3651 dst = change_address (dst, VOIDmode, dst_addr);
3653 if (val == const0_rtx)
3654 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3655 else
3657 dstp1 = adjust_address (dst, VOIDmode, 1);
3658 set_mem_size (dst, const1_rtx);
3660 /* Initialize memory by storing the first byte. */
3661 emit_move_insn (adjust_address (dst, QImode, 0), val);
3663 /* If count is 1 we are done. */
3664 emit_cmp_and_jump_insns (count, const1_rtx,
3665 EQ, NULL_RTX, mode, 1, end_label);
3667 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1, 0);
3669 if (temp != count)
3670 emit_move_insn (count, temp);
3672 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3673 if (temp != blocks)
3674 emit_move_insn (blocks, temp);
3676 emit_cmp_and_jump_insns (blocks, const0_rtx,
3677 EQ, NULL_RTX, mode, 1, loop_end_label);
3679 emit_label (loop_start_label);
3681 if (val == const0_rtx)
3682 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
3683 else
3684 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255)));
3685 s390_load_address (dst_addr,
3686 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3688 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3689 if (temp != blocks)
3690 emit_move_insn (blocks, temp);
3692 emit_cmp_and_jump_insns (blocks, const0_rtx,
3693 EQ, NULL_RTX, mode, 1, loop_end_label);
3695 emit_jump (loop_start_label);
3696 emit_label (loop_end_label);
3698 if (val == const0_rtx)
3699 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
3700 else
3701 emit_insn (gen_movmem_short (dstp1, dst, convert_to_mode (Pmode, count, 1)));
3702 emit_label (end_label);
3706 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3707 and return the result in TARGET. */
3709 void
3710 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
3712 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
3713 rtx tmp;
3715 /* As the result of CMPINT is inverted compared to what we need,
3716 we have to swap the operands. */
3717 tmp = op0; op0 = op1; op1 = tmp;
3719 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3721 if (INTVAL (len) > 0)
3723 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
3724 emit_insn (gen_cmpint (target, ccreg));
3726 else
3727 emit_move_insn (target, const0_rtx);
3729 else if (TARGET_MVCLE)
3731 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
3732 emit_insn (gen_cmpint (target, ccreg));
3734 else
3736 rtx addr0, addr1, count, blocks, temp;
3737 rtx loop_start_label = gen_label_rtx ();
3738 rtx loop_end_label = gen_label_rtx ();
3739 rtx end_label = gen_label_rtx ();
3740 enum machine_mode mode;
3742 mode = GET_MODE (len);
3743 if (mode == VOIDmode)
3744 mode = Pmode;
3746 addr0 = gen_reg_rtx (Pmode);
3747 addr1 = gen_reg_rtx (Pmode);
3748 count = gen_reg_rtx (mode);
3749 blocks = gen_reg_rtx (mode);
3751 convert_move (count, len, 1);
3752 emit_cmp_and_jump_insns (count, const0_rtx,
3753 EQ, NULL_RTX, mode, 1, end_label);
3755 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3756 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3757 op0 = change_address (op0, VOIDmode, addr0);
3758 op1 = change_address (op1, VOIDmode, addr1);
3760 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3761 if (temp != count)
3762 emit_move_insn (count, temp);
3764 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3765 if (temp != blocks)
3766 emit_move_insn (blocks, temp);
3768 emit_cmp_and_jump_insns (blocks, const0_rtx,
3769 EQ, NULL_RTX, mode, 1, loop_end_label);
3771 emit_label (loop_start_label);
3773 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
3774 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
3775 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3776 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3777 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3778 emit_jump_insn (temp);
3780 s390_load_address (addr0,
3781 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3782 s390_load_address (addr1,
3783 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3785 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3786 if (temp != blocks)
3787 emit_move_insn (blocks, temp);
3789 emit_cmp_and_jump_insns (blocks, const0_rtx,
3790 EQ, NULL_RTX, mode, 1, loop_end_label);
3792 emit_jump (loop_start_label);
3793 emit_label (loop_end_label);
3795 emit_insn (gen_cmpmem_short (op0, op1,
3796 convert_to_mode (Pmode, count, 1)));
3797 emit_label (end_label);
3799 emit_insn (gen_cmpint (target, ccreg));
3804 /* Expand conditional increment or decrement using alc/slb instructions.
3805 Should generate code setting DST to either SRC or SRC + INCREMENT,
3806 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3807 Returns true if successful, false otherwise.
3809 That makes it possible to implement some if-constructs without jumps e.g.:
3810 (borrow = CC0 | CC1 and carry = CC2 | CC3)
3811 unsigned int a, b, c;
3812 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
3813 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
3814 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
3815 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
3817 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
3818 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
3819 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
3820 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
3821 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
3823 bool
3824 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3825 rtx dst, rtx src, rtx increment)
3827 enum machine_mode cmp_mode;
3828 enum machine_mode cc_mode;
3829 rtx op_res;
3830 rtx insn;
3831 rtvec p;
3832 int ret;
3834 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3835 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3836 cmp_mode = SImode;
3837 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3838 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3839 cmp_mode = DImode;
3840 else
3841 return false;
3843 /* Try ADD LOGICAL WITH CARRY. */
3844 if (increment == const1_rtx)
3846 /* Determine CC mode to use. */
3847 if (cmp_code == EQ || cmp_code == NE)
3849 if (cmp_op1 != const0_rtx)
3851 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3852 NULL_RTX, 0, OPTAB_WIDEN);
3853 cmp_op1 = const0_rtx;
3856 cmp_code = cmp_code == EQ ? LEU : GTU;
3859 if (cmp_code == LTU || cmp_code == LEU)
3861 rtx tem = cmp_op0;
3862 cmp_op0 = cmp_op1;
3863 cmp_op1 = tem;
3864 cmp_code = swap_condition (cmp_code);
3867 switch (cmp_code)
3869 case GTU:
3870 cc_mode = CCUmode;
3871 break;
3873 case GEU:
3874 cc_mode = CCL3mode;
3875 break;
3877 default:
3878 return false;
3881 /* Emit comparison instruction pattern. */
3882 if (!register_operand (cmp_op0, cmp_mode))
3883 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3885 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3886 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3887 /* We use insn_invalid_p here to add clobbers if required. */
3888 ret = insn_invalid_p (emit_insn (insn));
3889 gcc_assert (!ret);
3891 /* Emit ALC instruction pattern. */
3892 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3893 gen_rtx_REG (cc_mode, CC_REGNUM),
3894 const0_rtx);
3896 if (src != const0_rtx)
3898 if (!register_operand (src, GET_MODE (dst)))
3899 src = force_reg (GET_MODE (dst), src);
3901 src = gen_rtx_PLUS (GET_MODE (dst), src, const0_rtx);
3902 op_res = gen_rtx_PLUS (GET_MODE (dst), src, op_res);
3905 p = rtvec_alloc (2);
3906 RTVEC_ELT (p, 0) =
3907 gen_rtx_SET (VOIDmode, dst, op_res);
3908 RTVEC_ELT (p, 1) =
3909 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3910 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3912 return true;
3915 /* Try SUBTRACT LOGICAL WITH BORROW. */
3916 if (increment == constm1_rtx)
3918 /* Determine CC mode to use. */
3919 if (cmp_code == EQ || cmp_code == NE)
3921 if (cmp_op1 != const0_rtx)
3923 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3924 NULL_RTX, 0, OPTAB_WIDEN);
3925 cmp_op1 = const0_rtx;
3928 cmp_code = cmp_code == EQ ? LEU : GTU;
3931 if (cmp_code == GTU || cmp_code == GEU)
3933 rtx tem = cmp_op0;
3934 cmp_op0 = cmp_op1;
3935 cmp_op1 = tem;
3936 cmp_code = swap_condition (cmp_code);
3939 switch (cmp_code)
3941 case LEU:
3942 cc_mode = CCUmode;
3943 break;
3945 case LTU:
3946 cc_mode = CCL3mode;
3947 break;
3949 default:
3950 return false;
3953 /* Emit comparison instruction pattern. */
3954 if (!register_operand (cmp_op0, cmp_mode))
3955 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3957 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3958 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3959 /* We use insn_invalid_p here to add clobbers if required. */
3960 ret = insn_invalid_p (emit_insn (insn));
3961 gcc_assert (!ret);
3963 /* Emit SLB instruction pattern. */
3964 if (!register_operand (src, GET_MODE (dst)))
3965 src = force_reg (GET_MODE (dst), src);
3967 op_res = gen_rtx_MINUS (GET_MODE (dst),
3968 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3969 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3970 gen_rtx_REG (cc_mode, CC_REGNUM),
3971 const0_rtx));
3972 p = rtvec_alloc (2);
3973 RTVEC_ELT (p, 0) =
3974 gen_rtx_SET (VOIDmode, dst, op_res);
3975 RTVEC_ELT (p, 1) =
3976 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3977 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3979 return true;
3982 return false;
3985 /* Expand code for the insv template. Return true if successful, false else. */
3987 bool
3988 s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
3990 int bitsize = INTVAL (op1);
3991 int bitpos = INTVAL (op2);
3993 /* We need byte alignment. */
3994 if (bitsize % BITS_PER_UNIT)
3995 return false;
3997 if (bitpos == 0
3998 && memory_operand (dest, VOIDmode)
3999 && (register_operand (src, word_mode)
4000 || const_int_operand (src, VOIDmode)))
4002 /* Emit standard pattern if possible. */
4003 enum machine_mode mode = smallest_mode_for_size (bitsize, MODE_INT);
4004 if (GET_MODE_BITSIZE (mode) == bitsize)
4005 emit_move_insn (adjust_address (dest, mode, 0), gen_lowpart (mode, src));
4007 /* (set (ze (mem)) (const_int)). */
4008 else if (const_int_operand (src, VOIDmode))
4010 int size = bitsize / BITS_PER_UNIT;
4011 rtx src_mem = adjust_address (force_const_mem (word_mode, src), BLKmode,
4012 GET_MODE_SIZE (word_mode) - size);
4014 dest = adjust_address (dest, BLKmode, 0);
4015 set_mem_size (dest, GEN_INT (size));
4016 s390_expand_movmem (dest, src_mem, GEN_INT (size));
4019 /* (set (ze (mem)) (reg)). */
4020 else if (register_operand (src, word_mode))
4022 if (bitsize <= GET_MODE_BITSIZE (SImode))
4023 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
4024 const0_rtx), src);
4025 else
4027 /* Emit st,stcmh sequence. */
4028 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode);
4029 int size = stcmh_width / BITS_PER_UNIT;
4031 emit_move_insn (adjust_address (dest, SImode, size),
4032 gen_lowpart (SImode, src));
4033 set_mem_size (dest, GEN_INT (size));
4034 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT
4035 (stcmh_width), const0_rtx),
4036 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT
4037 (GET_MODE_BITSIZE (SImode))));
4040 else
4041 return false;
4043 return true;
4046 /* (set (ze (reg)) (const_int)). */
4047 if (TARGET_ZARCH
4048 && register_operand (dest, word_mode)
4049 && (bitpos % 16) == 0
4050 && (bitsize % 16) == 0
4051 && const_int_operand (src, VOIDmode))
4053 HOST_WIDE_INT val = INTVAL (src);
4054 int regpos = bitpos + bitsize;
4056 while (regpos > bitpos)
4058 enum machine_mode putmode;
4059 int putsize;
4061 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
4062 putmode = SImode;
4063 else
4064 putmode = HImode;
4066 putsize = GET_MODE_BITSIZE (putmode);
4067 regpos -= putsize;
4068 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
4069 GEN_INT (putsize),
4070 GEN_INT (regpos)),
4071 gen_int_mode (val, putmode));
4072 val >>= putsize;
4074 gcc_assert (regpos == bitpos);
4075 return true;
4078 return false;
4081 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4082 register that holds VAL of mode MODE shifted by COUNT bits. */
4084 static inline rtx
4085 s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
4087 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
4088 NULL_RTX, 1, OPTAB_DIRECT);
4089 return expand_simple_binop (SImode, ASHIFT, val, count,
4090 NULL_RTX, 1, OPTAB_DIRECT);
4093 /* Structure to hold the initial parameters for a compare_and_swap operation
4094 in HImode and QImode. */
4096 struct alignment_context
4098 rtx memsi; /* SI aligned memory location. */
4099 rtx shift; /* Bit offset with regard to lsb. */
4100 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
4101 rtx modemaski; /* ~modemask */
4102 bool aligned; /* True if memory is aligned, false else. */
4105 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4106 structure AC for transparent simplifying, if the memory alignment is known
4107 to be at least 32bit. MEM is the memory location for the actual operation
4108 and MODE its mode. */
4110 static void
4111 init_alignment_context (struct alignment_context *ac, rtx mem,
4112 enum machine_mode mode)
4114 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
4115 ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
4117 if (ac->aligned)
4118 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
4119 else
4121 /* Alignment is unknown. */
4122 rtx byteoffset, addr, align;
4124 /* Force the address into a register. */
4125 addr = force_reg (Pmode, XEXP (mem, 0));
4127 /* Align it to SImode. */
4128 align = expand_simple_binop (Pmode, AND, addr,
4129 GEN_INT (-GET_MODE_SIZE (SImode)),
4130 NULL_RTX, 1, OPTAB_DIRECT);
4131 /* Generate MEM. */
4132 ac->memsi = gen_rtx_MEM (SImode, align);
4133 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
4134 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
4135 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
4137 /* Calculate shiftcount. */
4138 byteoffset = expand_simple_binop (Pmode, AND, addr,
4139 GEN_INT (GET_MODE_SIZE (SImode) - 1),
4140 NULL_RTX, 1, OPTAB_DIRECT);
4141 /* As we already have some offset, evaluate the remaining distance. */
4142 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
4143 NULL_RTX, 1, OPTAB_DIRECT);
4146 /* Shift is the byte count, but we need the bitcount. */
4147 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, GEN_INT (BITS_PER_UNIT),
4148 NULL_RTX, 1, OPTAB_DIRECT);
4149 /* Calculate masks. */
4150 ac->modemask = expand_simple_binop (SImode, ASHIFT,
4151 GEN_INT (GET_MODE_MASK (mode)), ac->shift,
4152 NULL_RTX, 1, OPTAB_DIRECT);
4153 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
4156 /* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4157 the memory location, CMP the old value to compare MEM with and NEW the value
4158 to set if CMP == MEM.
4159 CMP is never in memory for compare_and_swap_cc because
4160 expand_bool_compare_and_swap puts it into a register for later compare. */
4162 void
4163 s390_expand_cs_hqi (enum machine_mode mode, rtx target, rtx mem, rtx cmp, rtx new)
4165 struct alignment_context ac;
4166 rtx cmpv, newv, val, resv, cc;
4167 rtx res = gen_reg_rtx (SImode);
4168 rtx csloop = gen_label_rtx ();
4169 rtx csend = gen_label_rtx ();
4171 gcc_assert (register_operand (target, VOIDmode));
4172 gcc_assert (MEM_P (mem));
4174 init_alignment_context (&ac, mem, mode);
4176 /* Shift the values to the correct bit positions. */
4177 if (!(ac.aligned && MEM_P (cmp)))
4178 cmp = s390_expand_mask_and_shift (cmp, mode, ac.shift);
4179 if (!(ac.aligned && MEM_P (new)))
4180 new = s390_expand_mask_and_shift (new, mode, ac.shift);
4182 /* Load full word. Subsequent loads are performed by CS. */
4183 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski,
4184 NULL_RTX, 1, OPTAB_DIRECT);
4186 /* Start CS loop. */
4187 emit_label (csloop);
4188 /* val = "<mem>00..0<mem>"
4189 * cmp = "00..0<cmp>00..0"
4190 * new = "00..0<new>00..0"
4193 /* Patch cmp and new with val at correct position. */
4194 if (ac.aligned && MEM_P (cmp))
4196 cmpv = force_reg (SImode, val);
4197 store_bit_field (cmpv, GET_MODE_BITSIZE (mode), 0, SImode, cmp);
4199 else
4200 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
4201 NULL_RTX, 1, OPTAB_DIRECT));
4202 if (ac.aligned && MEM_P (new))
4204 newv = force_reg (SImode, val);
4205 store_bit_field (newv, GET_MODE_BITSIZE (mode), 0, SImode, new);
4207 else
4208 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new, val,
4209 NULL_RTX, 1, OPTAB_DIRECT));
4211 /* Jump to end if we're done (likely?). */
4212 s390_emit_jump (csend, s390_emit_compare_and_swap (EQ, res, ac.memsi,
4213 cmpv, newv));
4215 /* Check for changes outside mode. */
4216 resv = expand_simple_binop (SImode, AND, res, ac.modemaski,
4217 NULL_RTX, 1, OPTAB_DIRECT);
4218 cc = s390_emit_compare (NE, resv, val);
4219 emit_move_insn (val, resv);
4220 /* Loop internal if so. */
4221 s390_emit_jump (csloop, cc);
4223 emit_label (csend);
4225 /* Return the correct part of the bitfield. */
4226 convert_move (target, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
4227 NULL_RTX, 1, OPTAB_DIRECT), 1);
4230 /* Expand an atomic operation CODE of mode MODE. MEM is the memory location
4231 and VAL the value to play with. If AFTER is true then store the the value
4232 MEM holds after the operation, if AFTER is false then store the value MEM
4233 holds before the operation. If TARGET is zero then discard that value, else
4234 store it to TARGET. */
4236 void
4237 s390_expand_atomic (enum machine_mode mode, enum rtx_code code,
4238 rtx target, rtx mem, rtx val, bool after)
4240 struct alignment_context ac;
4241 rtx cmp;
4242 rtx new = gen_reg_rtx (SImode);
4243 rtx orig = gen_reg_rtx (SImode);
4244 rtx csloop = gen_label_rtx ();
4246 gcc_assert (!target || register_operand (target, VOIDmode));
4247 gcc_assert (MEM_P (mem));
4249 init_alignment_context (&ac, mem, mode);
4251 /* Shift val to the correct bit positions.
4252 Preserve "icm", but prevent "ex icm". */
4253 if (!(ac.aligned && code == SET && MEM_P (val)))
4254 val = s390_expand_mask_and_shift (val, mode, ac.shift);
4256 /* Further preparation insns. */
4257 if (code == PLUS || code == MINUS)
4258 emit_move_insn (orig, val);
4259 else if (code == MULT || code == AND) /* val = "11..1<val>11..1" */
4260 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
4261 NULL_RTX, 1, OPTAB_DIRECT);
4263 /* Load full word. Subsequent loads are performed by CS. */
4264 cmp = force_reg (SImode, ac.memsi);
4266 /* Start CS loop. */
4267 emit_label (csloop);
4268 emit_move_insn (new, cmp);
4270 /* Patch new with val at correct position. */
4271 switch (code)
4273 case PLUS:
4274 case MINUS:
4275 val = expand_simple_binop (SImode, code, new, orig,
4276 NULL_RTX, 1, OPTAB_DIRECT);
4277 val = expand_simple_binop (SImode, AND, val, ac.modemask,
4278 NULL_RTX, 1, OPTAB_DIRECT);
4279 /* FALLTHRU */
4280 case SET:
4281 if (ac.aligned && MEM_P (val))
4282 store_bit_field (new, GET_MODE_BITSIZE (mode), 0, SImode, val);
4283 else
4285 new = expand_simple_binop (SImode, AND, new, ac.modemaski,
4286 NULL_RTX, 1, OPTAB_DIRECT);
4287 new = expand_simple_binop (SImode, IOR, new, val,
4288 NULL_RTX, 1, OPTAB_DIRECT);
4290 break;
4291 case AND:
4292 case IOR:
4293 case XOR:
4294 new = expand_simple_binop (SImode, code, new, val,
4295 NULL_RTX, 1, OPTAB_DIRECT);
4296 break;
4297 case MULT: /* NAND */
4298 new = expand_simple_binop (SImode, XOR, new, ac.modemask,
4299 NULL_RTX, 1, OPTAB_DIRECT);
4300 new = expand_simple_binop (SImode, AND, new, val,
4301 NULL_RTX, 1, OPTAB_DIRECT);
4302 break;
4303 default:
4304 gcc_unreachable ();
4307 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, cmp,
4308 ac.memsi, cmp, new));
4310 /* Return the correct part of the bitfield. */
4311 if (target)
4312 convert_move (target, expand_simple_binop (SImode, LSHIFTRT,
4313 after ? new : cmp, ac.shift,
4314 NULL_RTX, 1, OPTAB_DIRECT), 1);
4317 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
4318 We need to emit DTP-relative relocations. */
4320 static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
4322 static void
4323 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
4325 switch (size)
4327 case 4:
4328 fputs ("\t.long\t", file);
4329 break;
4330 case 8:
4331 fputs ("\t.quad\t", file);
4332 break;
4333 default:
4334 gcc_unreachable ();
4336 output_addr_const (file, x);
4337 fputs ("@DTPOFF", file);
4340 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
4341 /* Implement TARGET_MANGLE_FUNDAMENTAL_TYPE. */
4343 static const char *
4344 s390_mangle_fundamental_type (tree type)
4346 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
4347 && TARGET_LONG_DOUBLE_128)
4348 return "g";
4350 /* For all other types, use normal C++ mangling. */
4351 return NULL;
4353 #endif
4355 /* In the name of slightly smaller debug output, and to cater to
4356 general assembler lossage, recognize various UNSPEC sequences
4357 and turn them back into a direct symbol reference. */
4359 static rtx
4360 s390_delegitimize_address (rtx orig_x)
4362 rtx x = orig_x, y;
4364 if (GET_CODE (x) != MEM)
4365 return orig_x;
4367 x = XEXP (x, 0);
4368 if (GET_CODE (x) == PLUS
4369 && GET_CODE (XEXP (x, 1)) == CONST
4370 && GET_CODE (XEXP (x, 0)) == REG
4371 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
4373 y = XEXP (XEXP (x, 1), 0);
4374 if (GET_CODE (y) == UNSPEC
4375 && XINT (y, 1) == UNSPEC_GOT)
4376 return XVECEXP (y, 0, 0);
4377 return orig_x;
4380 if (GET_CODE (x) == CONST)
4382 y = XEXP (x, 0);
4383 if (GET_CODE (y) == UNSPEC
4384 && XINT (y, 1) == UNSPEC_GOTENT)
4385 return XVECEXP (y, 0, 0);
4386 return orig_x;
4389 return orig_x;
4392 /* Output operand OP to stdio stream FILE.
4393 OP is an address (register + offset) which is not used to address data;
4394 instead the rightmost bits are interpreted as the value. */
4396 static void
4397 print_shift_count_operand (FILE *file, rtx op)
4399 HOST_WIDE_INT offset;
4400 rtx base;
4402 /* Extract base register and offset. */
4403 if (!s390_decompose_shift_count (op, &base, &offset))
4404 gcc_unreachable ();
4406 /* Sanity check. */
4407 if (base)
4409 gcc_assert (GET_CODE (base) == REG);
4410 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
4411 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
4414 /* Offsets are constricted to twelve bits. */
4415 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
4416 if (base)
4417 fprintf (file, "(%s)", reg_names[REGNO (base)]);
4420 /* See 'get_some_local_dynamic_name'. */
4422 static int
4423 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
4425 rtx x = *px;
4427 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
4429 x = get_pool_constant (x);
4430 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
4433 if (GET_CODE (x) == SYMBOL_REF
4434 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
4436 cfun->machine->some_ld_name = XSTR (x, 0);
4437 return 1;
4440 return 0;
4443 /* Locate some local-dynamic symbol still in use by this function
4444 so that we can print its name in local-dynamic base patterns. */
4446 static const char *
4447 get_some_local_dynamic_name (void)
4449 rtx insn;
4451 if (cfun->machine->some_ld_name)
4452 return cfun->machine->some_ld_name;
4454 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
4455 if (INSN_P (insn)
4456 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
4457 return cfun->machine->some_ld_name;
4459 gcc_unreachable ();
4462 /* Output machine-dependent UNSPECs occurring in address constant X
4463 in assembler syntax to stdio stream FILE. Returns true if the
4464 constant X could be recognized, false otherwise. */
4466 bool
4467 s390_output_addr_const_extra (FILE *file, rtx x)
4469 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
4470 switch (XINT (x, 1))
4472 case UNSPEC_GOTENT:
4473 output_addr_const (file, XVECEXP (x, 0, 0));
4474 fprintf (file, "@GOTENT");
4475 return true;
4476 case UNSPEC_GOT:
4477 output_addr_const (file, XVECEXP (x, 0, 0));
4478 fprintf (file, "@GOT");
4479 return true;
4480 case UNSPEC_GOTOFF:
4481 output_addr_const (file, XVECEXP (x, 0, 0));
4482 fprintf (file, "@GOTOFF");
4483 return true;
4484 case UNSPEC_PLT:
4485 output_addr_const (file, XVECEXP (x, 0, 0));
4486 fprintf (file, "@PLT");
4487 return true;
4488 case UNSPEC_PLTOFF:
4489 output_addr_const (file, XVECEXP (x, 0, 0));
4490 fprintf (file, "@PLTOFF");
4491 return true;
4492 case UNSPEC_TLSGD:
4493 output_addr_const (file, XVECEXP (x, 0, 0));
4494 fprintf (file, "@TLSGD");
4495 return true;
4496 case UNSPEC_TLSLDM:
4497 assemble_name (file, get_some_local_dynamic_name ());
4498 fprintf (file, "@TLSLDM");
4499 return true;
4500 case UNSPEC_DTPOFF:
4501 output_addr_const (file, XVECEXP (x, 0, 0));
4502 fprintf (file, "@DTPOFF");
4503 return true;
4504 case UNSPEC_NTPOFF:
4505 output_addr_const (file, XVECEXP (x, 0, 0));
4506 fprintf (file, "@NTPOFF");
4507 return true;
4508 case UNSPEC_GOTNTPOFF:
4509 output_addr_const (file, XVECEXP (x, 0, 0));
4510 fprintf (file, "@GOTNTPOFF");
4511 return true;
4512 case UNSPEC_INDNTPOFF:
4513 output_addr_const (file, XVECEXP (x, 0, 0));
4514 fprintf (file, "@INDNTPOFF");
4515 return true;
4518 return false;
4521 /* Output address operand ADDR in assembler syntax to
4522 stdio stream FILE. */
4524 void
4525 print_operand_address (FILE *file, rtx addr)
4527 struct s390_address ad;
4529 if (!s390_decompose_address (addr, &ad)
4530 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
4531 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
4532 output_operand_lossage ("cannot decompose address");
4534 if (ad.disp)
4535 output_addr_const (file, ad.disp);
4536 else
4537 fprintf (file, "0");
4539 if (ad.base && ad.indx)
4540 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4541 reg_names[REGNO (ad.base)]);
4542 else if (ad.base)
4543 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4546 /* Output operand X in assembler syntax to stdio stream FILE.
4547 CODE specified the format flag. The following format flags
4548 are recognized:
4550 'C': print opcode suffix for branch condition.
4551 'D': print opcode suffix for inverse branch condition.
4552 'J': print tls_load/tls_gdcall/tls_ldcall suffix
4553 'G': print the size of the operand in bytes.
4554 'O': print only the displacement of a memory reference.
4555 'R': print only the base register of a memory reference.
4556 'S': print S-type memory reference (base+displacement).
4557 'N': print the second word of a DImode operand.
4558 'M': print the second word of a TImode operand.
4559 'Y': print shift count operand.
4561 'b': print integer X as if it's an unsigned byte.
4562 'x': print integer X as if it's an unsigned halfword.
4563 'h': print integer X as if it's a signed halfword.
4564 'i': print the first nonzero HImode part of X.
4565 'j': print the first HImode part unequal to -1 of X.
4566 'k': print the first nonzero SImode part of X.
4567 'm': print the first SImode part unequal to -1 of X.
4568 'o': print integer X as if it's an unsigned 32bit word. */
4570 void
4571 print_operand (FILE *file, rtx x, int code)
4573 switch (code)
4575 case 'C':
4576 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4577 return;
4579 case 'D':
4580 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4581 return;
4583 case 'J':
4584 if (GET_CODE (x) == SYMBOL_REF)
4586 fprintf (file, "%s", ":tls_load:");
4587 output_addr_const (file, x);
4589 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4591 fprintf (file, "%s", ":tls_gdcall:");
4592 output_addr_const (file, XVECEXP (x, 0, 0));
4594 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4596 fprintf (file, "%s", ":tls_ldcall:");
4597 assemble_name (file, get_some_local_dynamic_name ());
4599 else
4600 gcc_unreachable ();
4601 return;
4603 case 'G':
4604 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
4605 return;
4607 case 'O':
4609 struct s390_address ad;
4610 int ret;
4612 gcc_assert (GET_CODE (x) == MEM);
4613 ret = s390_decompose_address (XEXP (x, 0), &ad);
4614 gcc_assert (ret);
4615 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
4616 gcc_assert (!ad.indx);
4618 if (ad.disp)
4619 output_addr_const (file, ad.disp);
4620 else
4621 fprintf (file, "0");
4623 return;
4625 case 'R':
4627 struct s390_address ad;
4628 int ret;
4630 gcc_assert (GET_CODE (x) == MEM);
4631 ret = s390_decompose_address (XEXP (x, 0), &ad);
4632 gcc_assert (ret);
4633 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
4634 gcc_assert (!ad.indx);
4636 if (ad.base)
4637 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
4638 else
4639 fprintf (file, "0");
4641 return;
4643 case 'S':
4645 struct s390_address ad;
4646 int ret;
4648 gcc_assert (GET_CODE (x) == MEM);
4649 ret = s390_decompose_address (XEXP (x, 0), &ad);
4650 gcc_assert (ret);
4651 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
4652 gcc_assert (!ad.indx);
4654 if (ad.disp)
4655 output_addr_const (file, ad.disp);
4656 else
4657 fprintf (file, "0");
4659 if (ad.base)
4660 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4662 return;
4664 case 'N':
4665 if (GET_CODE (x) == REG)
4666 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4667 else if (GET_CODE (x) == MEM)
4668 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
4669 else
4670 gcc_unreachable ();
4671 break;
4673 case 'M':
4674 if (GET_CODE (x) == REG)
4675 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4676 else if (GET_CODE (x) == MEM)
4677 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
4678 else
4679 gcc_unreachable ();
4680 break;
4682 case 'Y':
4683 print_shift_count_operand (file, x);
4684 return;
4687 switch (GET_CODE (x))
4689 case REG:
4690 fprintf (file, "%s", reg_names[REGNO (x)]);
4691 break;
4693 case MEM:
4694 output_address (XEXP (x, 0));
4695 break;
4697 case CONST:
4698 case CODE_LABEL:
4699 case LABEL_REF:
4700 case SYMBOL_REF:
4701 output_addr_const (file, x);
4702 break;
4704 case CONST_INT:
4705 if (code == 'b')
4706 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
4707 else if (code == 'x')
4708 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
4709 else if (code == 'h')
4710 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
4711 else if (code == 'i')
4712 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4713 s390_extract_part (x, HImode, 0));
4714 else if (code == 'j')
4715 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4716 s390_extract_part (x, HImode, -1));
4717 else if (code == 'k')
4718 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4719 s390_extract_part (x, SImode, 0));
4720 else if (code == 'm')
4721 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4722 s390_extract_part (x, SImode, -1));
4723 else if (code == 'o')
4724 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffffffff);
4725 else
4726 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4727 break;
4729 case CONST_DOUBLE:
4730 gcc_assert (GET_MODE (x) == VOIDmode);
4731 if (code == 'b')
4732 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
4733 else if (code == 'x')
4734 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
4735 else if (code == 'h')
4736 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
4737 else
4738 gcc_unreachable ();
4739 break;
4741 default:
4742 fatal_insn ("UNKNOWN in print_operand !?", x);
4743 break;
4747 /* Target hook for assembling integer objects. We need to define it
4748 here to work a round a bug in some versions of GAS, which couldn't
4749 handle values smaller than INT_MIN when printed in decimal. */
4751 static bool
4752 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
4754 if (size == 8 && aligned_p
4755 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
4757 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
4758 INTVAL (x));
4759 return true;
4761 return default_assemble_integer (x, size, aligned_p);
4764 /* Returns true if register REGNO is used for forming
4765 a memory address in expression X. */
4767 static bool
4768 reg_used_in_mem_p (int regno, rtx x)
4770 enum rtx_code code = GET_CODE (x);
4771 int i, j;
4772 const char *fmt;
4774 if (code == MEM)
4776 if (refers_to_regno_p (regno, regno+1,
4777 XEXP (x, 0), 0))
4778 return true;
4780 else if (code == SET
4781 && GET_CODE (SET_DEST (x)) == PC)
4783 if (refers_to_regno_p (regno, regno+1,
4784 SET_SRC (x), 0))
4785 return true;
4788 fmt = GET_RTX_FORMAT (code);
4789 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4791 if (fmt[i] == 'e'
4792 && reg_used_in_mem_p (regno, XEXP (x, i)))
4793 return true;
4795 else if (fmt[i] == 'E')
4796 for (j = 0; j < XVECLEN (x, i); j++)
4797 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
4798 return true;
4800 return false;
4803 /* Returns true if expression DEP_RTX sets an address register
4804 used by instruction INSN to address memory. */
4806 static bool
4807 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
4809 rtx target, pat;
4811 if (GET_CODE (dep_rtx) == INSN)
4812 dep_rtx = PATTERN (dep_rtx);
4814 if (GET_CODE (dep_rtx) == SET)
4816 target = SET_DEST (dep_rtx);
4817 if (GET_CODE (target) == STRICT_LOW_PART)
4818 target = XEXP (target, 0);
4819 while (GET_CODE (target) == SUBREG)
4820 target = SUBREG_REG (target);
4822 if (GET_CODE (target) == REG)
4824 int regno = REGNO (target);
4826 if (s390_safe_attr_type (insn) == TYPE_LA)
4828 pat = PATTERN (insn);
4829 if (GET_CODE (pat) == PARALLEL)
4831 gcc_assert (XVECLEN (pat, 0) == 2);
4832 pat = XVECEXP (pat, 0, 0);
4834 gcc_assert (GET_CODE (pat) == SET);
4835 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
4837 else if (get_attr_atype (insn) == ATYPE_AGEN)
4838 return reg_used_in_mem_p (regno, PATTERN (insn));
4841 return false;
4844 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
4847 s390_agen_dep_p (rtx dep_insn, rtx insn)
4849 rtx dep_rtx = PATTERN (dep_insn);
4850 int i;
4852 if (GET_CODE (dep_rtx) == SET
4853 && addr_generation_dependency_p (dep_rtx, insn))
4854 return 1;
4855 else if (GET_CODE (dep_rtx) == PARALLEL)
4857 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
4859 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
4860 return 1;
4863 return 0;
4866 /* A C statement (sans semicolon) to update the integer scheduling priority
4867 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4868 reduce the priority to execute INSN later. Do not define this macro if
4869 you do not need to adjust the scheduling priorities of insns.
4871 A STD instruction should be scheduled earlier,
4872 in order to use the bypass. */
4874 static int
4875 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
4877 if (! INSN_P (insn))
4878 return priority;
4880 if (s390_tune != PROCESSOR_2084_Z990
4881 && s390_tune != PROCESSOR_2094_Z9_109)
4882 return priority;
4884 switch (s390_safe_attr_type (insn))
4886 case TYPE_FSTOREDF:
4887 case TYPE_FSTORESF:
4888 priority = priority << 3;
4889 break;
4890 case TYPE_STORE:
4891 case TYPE_STM:
4892 priority = priority << 1;
4893 break;
4894 default:
4895 break;
4897 return priority;
4900 /* The number of instructions that can be issued per cycle. */
4902 static int
4903 s390_issue_rate (void)
4905 if (s390_tune == PROCESSOR_2084_Z990
4906 || s390_tune == PROCESSOR_2094_Z9_109)
4907 return 3;
4908 return 1;
4911 static int
4912 s390_first_cycle_multipass_dfa_lookahead (void)
4914 return 4;
4918 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4919 Fix up MEMs as required. */
4921 static void
4922 annotate_constant_pool_refs (rtx *x)
4924 int i, j;
4925 const char *fmt;
4927 gcc_assert (GET_CODE (*x) != SYMBOL_REF
4928 || !CONSTANT_POOL_ADDRESS_P (*x));
4930 /* Literal pool references can only occur inside a MEM ... */
4931 if (GET_CODE (*x) == MEM)
4933 rtx memref = XEXP (*x, 0);
4935 if (GET_CODE (memref) == SYMBOL_REF
4936 && CONSTANT_POOL_ADDRESS_P (memref))
4938 rtx base = cfun->machine->base_reg;
4939 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4940 UNSPEC_LTREF);
4942 *x = replace_equiv_address (*x, addr);
4943 return;
4946 if (GET_CODE (memref) == CONST
4947 && GET_CODE (XEXP (memref, 0)) == PLUS
4948 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4949 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4950 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4952 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4953 rtx sym = XEXP (XEXP (memref, 0), 0);
4954 rtx base = cfun->machine->base_reg;
4955 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4956 UNSPEC_LTREF);
4958 *x = replace_equiv_address (*x, plus_constant (addr, off));
4959 return;
4963 /* ... or a load-address type pattern. */
4964 if (GET_CODE (*x) == SET)
4966 rtx addrref = SET_SRC (*x);
4968 if (GET_CODE (addrref) == SYMBOL_REF
4969 && CONSTANT_POOL_ADDRESS_P (addrref))
4971 rtx base = cfun->machine->base_reg;
4972 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
4973 UNSPEC_LTREF);
4975 SET_SRC (*x) = addr;
4976 return;
4979 if (GET_CODE (addrref) == CONST
4980 && GET_CODE (XEXP (addrref, 0)) == PLUS
4981 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
4982 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
4983 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
4985 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
4986 rtx sym = XEXP (XEXP (addrref, 0), 0);
4987 rtx base = cfun->machine->base_reg;
4988 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4989 UNSPEC_LTREF);
4991 SET_SRC (*x) = plus_constant (addr, off);
4992 return;
4996 /* Annotate LTREL_BASE as well. */
4997 if (GET_CODE (*x) == UNSPEC
4998 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5000 rtx base = cfun->machine->base_reg;
5001 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
5002 UNSPEC_LTREL_BASE);
5003 return;
5006 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5007 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5009 if (fmt[i] == 'e')
5011 annotate_constant_pool_refs (&XEXP (*x, i));
5013 else if (fmt[i] == 'E')
5015 for (j = 0; j < XVECLEN (*x, i); j++)
5016 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
5021 /* Split all branches that exceed the maximum distance.
5022 Returns true if this created a new literal pool entry. */
5024 static int
5025 s390_split_branches (void)
5027 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5028 int new_literal = 0, ret;
5029 rtx insn, pat, tmp, target;
5030 rtx *label;
5032 /* We need correct insn addresses. */
5034 shorten_branches (get_insns ());
5036 /* Find all branches that exceed 64KB, and split them. */
5038 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5040 if (GET_CODE (insn) != JUMP_INSN)
5041 continue;
5043 pat = PATTERN (insn);
5044 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5045 pat = XVECEXP (pat, 0, 0);
5046 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
5047 continue;
5049 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
5051 label = &SET_SRC (pat);
5053 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
5055 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
5056 label = &XEXP (SET_SRC (pat), 1);
5057 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
5058 label = &XEXP (SET_SRC (pat), 2);
5059 else
5060 continue;
5062 else
5063 continue;
5065 if (get_attr_length (insn) <= 4)
5066 continue;
5068 /* We are going to use the return register as scratch register,
5069 make sure it will be saved/restored by the prologue/epilogue. */
5070 cfun_frame_layout.save_return_addr_p = 1;
5072 if (!flag_pic)
5074 new_literal = 1;
5075 tmp = force_const_mem (Pmode, *label);
5076 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
5077 INSN_ADDRESSES_NEW (tmp, -1);
5078 annotate_constant_pool_refs (&PATTERN (tmp));
5080 target = temp_reg;
5082 else
5084 new_literal = 1;
5085 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
5086 UNSPEC_LTREL_OFFSET);
5087 target = gen_rtx_CONST (Pmode, target);
5088 target = force_const_mem (Pmode, target);
5089 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
5090 INSN_ADDRESSES_NEW (tmp, -1);
5091 annotate_constant_pool_refs (&PATTERN (tmp));
5093 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
5094 cfun->machine->base_reg),
5095 UNSPEC_LTREL_BASE);
5096 target = gen_rtx_PLUS (Pmode, temp_reg, target);
5099 ret = validate_change (insn, label, target, 0);
5100 gcc_assert (ret);
5103 return new_literal;
5107 /* Find an annotated literal pool symbol referenced in RTX X,
5108 and store it at REF. Will abort if X contains references to
5109 more than one such pool symbol; multiple references to the same
5110 symbol are allowed, however.
5112 The rtx pointed to by REF must be initialized to NULL_RTX
5113 by the caller before calling this routine. */
5115 static void
5116 find_constant_pool_ref (rtx x, rtx *ref)
5118 int i, j;
5119 const char *fmt;
5121 /* Ignore LTREL_BASE references. */
5122 if (GET_CODE (x) == UNSPEC
5123 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5124 return;
5125 /* Likewise POOL_ENTRY insns. */
5126 if (GET_CODE (x) == UNSPEC_VOLATILE
5127 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
5128 return;
5130 gcc_assert (GET_CODE (x) != SYMBOL_REF
5131 || !CONSTANT_POOL_ADDRESS_P (x));
5133 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
5135 rtx sym = XVECEXP (x, 0, 0);
5136 gcc_assert (GET_CODE (sym) == SYMBOL_REF
5137 && CONSTANT_POOL_ADDRESS_P (sym));
5139 if (*ref == NULL_RTX)
5140 *ref = sym;
5141 else
5142 gcc_assert (*ref == sym);
5144 return;
5147 fmt = GET_RTX_FORMAT (GET_CODE (x));
5148 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5150 if (fmt[i] == 'e')
5152 find_constant_pool_ref (XEXP (x, i), ref);
5154 else if (fmt[i] == 'E')
5156 for (j = 0; j < XVECLEN (x, i); j++)
5157 find_constant_pool_ref (XVECEXP (x, i, j), ref);
5162 /* Replace every reference to the annotated literal pool
5163 symbol REF in X by its base plus OFFSET. */
5165 static void
5166 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
5168 int i, j;
5169 const char *fmt;
5171 gcc_assert (*x != ref);
5173 if (GET_CODE (*x) == UNSPEC
5174 && XINT (*x, 1) == UNSPEC_LTREF
5175 && XVECEXP (*x, 0, 0) == ref)
5177 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
5178 return;
5181 if (GET_CODE (*x) == PLUS
5182 && GET_CODE (XEXP (*x, 1)) == CONST_INT
5183 && GET_CODE (XEXP (*x, 0)) == UNSPEC
5184 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
5185 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
5187 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
5188 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
5189 return;
5192 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5193 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5195 if (fmt[i] == 'e')
5197 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
5199 else if (fmt[i] == 'E')
5201 for (j = 0; j < XVECLEN (*x, i); j++)
5202 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
5207 /* Check whether X contains an UNSPEC_LTREL_BASE.
5208 Return its constant pool symbol if found, NULL_RTX otherwise. */
5210 static rtx
5211 find_ltrel_base (rtx x)
5213 int i, j;
5214 const char *fmt;
5216 if (GET_CODE (x) == UNSPEC
5217 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5218 return XVECEXP (x, 0, 0);
5220 fmt = GET_RTX_FORMAT (GET_CODE (x));
5221 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5223 if (fmt[i] == 'e')
5225 rtx fnd = find_ltrel_base (XEXP (x, i));
5226 if (fnd)
5227 return fnd;
5229 else if (fmt[i] == 'E')
5231 for (j = 0; j < XVECLEN (x, i); j++)
5233 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
5234 if (fnd)
5235 return fnd;
5240 return NULL_RTX;
5243 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
5245 static void
5246 replace_ltrel_base (rtx *x)
5248 int i, j;
5249 const char *fmt;
5251 if (GET_CODE (*x) == UNSPEC
5252 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5254 *x = XVECEXP (*x, 0, 1);
5255 return;
5258 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5259 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5261 if (fmt[i] == 'e')
5263 replace_ltrel_base (&XEXP (*x, i));
5265 else if (fmt[i] == 'E')
5267 for (j = 0; j < XVECLEN (*x, i); j++)
5268 replace_ltrel_base (&XVECEXP (*x, i, j));
5274 /* We keep a list of constants which we have to add to internal
5275 constant tables in the middle of large functions. */
5277 #define NR_C_MODES 11
5278 enum machine_mode constant_modes[NR_C_MODES] =
5280 TFmode, TImode, TDmode,
5281 DFmode, DImode, DDmode,
5282 SFmode, SImode, SDmode,
5283 HImode,
5284 QImode
5287 struct constant
5289 struct constant *next;
5290 rtx value;
5291 rtx label;
5294 struct constant_pool
5296 struct constant_pool *next;
5297 rtx first_insn;
5298 rtx pool_insn;
5299 bitmap insns;
5301 struct constant *constants[NR_C_MODES];
5302 struct constant *execute;
5303 rtx label;
5304 int size;
5307 /* Allocate new constant_pool structure. */
5309 static struct constant_pool *
5310 s390_alloc_pool (void)
5312 struct constant_pool *pool;
5313 int i;
5315 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5316 pool->next = NULL;
5317 for (i = 0; i < NR_C_MODES; i++)
5318 pool->constants[i] = NULL;
5320 pool->execute = NULL;
5321 pool->label = gen_label_rtx ();
5322 pool->first_insn = NULL_RTX;
5323 pool->pool_insn = NULL_RTX;
5324 pool->insns = BITMAP_ALLOC (NULL);
5325 pool->size = 0;
5327 return pool;
5330 /* Create new constant pool covering instructions starting at INSN
5331 and chain it to the end of POOL_LIST. */
5333 static struct constant_pool *
5334 s390_start_pool (struct constant_pool **pool_list, rtx insn)
5336 struct constant_pool *pool, **prev;
5338 pool = s390_alloc_pool ();
5339 pool->first_insn = insn;
5341 for (prev = pool_list; *prev; prev = &(*prev)->next)
5343 *prev = pool;
5345 return pool;
5348 /* End range of instructions covered by POOL at INSN and emit
5349 placeholder insn representing the pool. */
5351 static void
5352 s390_end_pool (struct constant_pool *pool, rtx insn)
5354 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
5356 if (!insn)
5357 insn = get_last_insn ();
5359 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
5360 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5363 /* Add INSN to the list of insns covered by POOL. */
5365 static void
5366 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
5368 bitmap_set_bit (pool->insns, INSN_UID (insn));
5371 /* Return pool out of POOL_LIST that covers INSN. */
5373 static struct constant_pool *
5374 s390_find_pool (struct constant_pool *pool_list, rtx insn)
5376 struct constant_pool *pool;
5378 for (pool = pool_list; pool; pool = pool->next)
5379 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
5380 break;
5382 return pool;
5385 /* Add constant VAL of mode MODE to the constant pool POOL. */
5387 static void
5388 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
5390 struct constant *c;
5391 int i;
5393 for (i = 0; i < NR_C_MODES; i++)
5394 if (constant_modes[i] == mode)
5395 break;
5396 gcc_assert (i != NR_C_MODES);
5398 for (c = pool->constants[i]; c != NULL; c = c->next)
5399 if (rtx_equal_p (val, c->value))
5400 break;
5402 if (c == NULL)
5404 c = (struct constant *) xmalloc (sizeof *c);
5405 c->value = val;
5406 c->label = gen_label_rtx ();
5407 c->next = pool->constants[i];
5408 pool->constants[i] = c;
5409 pool->size += GET_MODE_SIZE (mode);
5413 /* Find constant VAL of mode MODE in the constant pool POOL.
5414 Return an RTX describing the distance from the start of
5415 the pool to the location of the new constant. */
5417 static rtx
5418 s390_find_constant (struct constant_pool *pool, rtx val,
5419 enum machine_mode mode)
5421 struct constant *c;
5422 rtx offset;
5423 int i;
5425 for (i = 0; i < NR_C_MODES; i++)
5426 if (constant_modes[i] == mode)
5427 break;
5428 gcc_assert (i != NR_C_MODES);
5430 for (c = pool->constants[i]; c != NULL; c = c->next)
5431 if (rtx_equal_p (val, c->value))
5432 break;
5434 gcc_assert (c);
5436 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5437 gen_rtx_LABEL_REF (Pmode, pool->label));
5438 offset = gen_rtx_CONST (Pmode, offset);
5439 return offset;
5442 /* Check whether INSN is an execute. Return the label_ref to its
5443 execute target template if so, NULL_RTX otherwise. */
5445 static rtx
5446 s390_execute_label (rtx insn)
5448 if (GET_CODE (insn) == INSN
5449 && GET_CODE (PATTERN (insn)) == PARALLEL
5450 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5451 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5452 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5454 return NULL_RTX;
5457 /* Add execute target for INSN to the constant pool POOL. */
5459 static void
5460 s390_add_execute (struct constant_pool *pool, rtx insn)
5462 struct constant *c;
5464 for (c = pool->execute; c != NULL; c = c->next)
5465 if (INSN_UID (insn) == INSN_UID (c->value))
5466 break;
5468 if (c == NULL)
5470 c = (struct constant *) xmalloc (sizeof *c);
5471 c->value = insn;
5472 c->label = gen_label_rtx ();
5473 c->next = pool->execute;
5474 pool->execute = c;
5475 pool->size += 6;
5479 /* Find execute target for INSN in the constant pool POOL.
5480 Return an RTX describing the distance from the start of
5481 the pool to the location of the execute target. */
5483 static rtx
5484 s390_find_execute (struct constant_pool *pool, rtx insn)
5486 struct constant *c;
5487 rtx offset;
5489 for (c = pool->execute; c != NULL; c = c->next)
5490 if (INSN_UID (insn) == INSN_UID (c->value))
5491 break;
5493 gcc_assert (c);
5495 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5496 gen_rtx_LABEL_REF (Pmode, pool->label));
5497 offset = gen_rtx_CONST (Pmode, offset);
5498 return offset;
5501 /* For an execute INSN, extract the execute target template. */
5503 static rtx
5504 s390_execute_target (rtx insn)
5506 rtx pattern = PATTERN (insn);
5507 gcc_assert (s390_execute_label (insn));
5509 if (XVECLEN (pattern, 0) == 2)
5511 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5513 else
5515 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5516 int i;
5518 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5519 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5521 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5524 return pattern;
5527 /* Indicate that INSN cannot be duplicated. This is the case for
5528 execute insns that carry a unique label. */
5530 static bool
5531 s390_cannot_copy_insn_p (rtx insn)
5533 rtx label = s390_execute_label (insn);
5534 return label && label != const0_rtx;
5537 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
5538 do not emit the pool base label. */
5540 static void
5541 s390_dump_pool (struct constant_pool *pool, bool remote_label)
5543 struct constant *c;
5544 rtx insn = pool->pool_insn;
5545 int i;
5547 /* Switch to rodata section. */
5548 if (TARGET_CPU_ZARCH)
5550 insn = emit_insn_after (gen_pool_section_start (), insn);
5551 INSN_ADDRESSES_NEW (insn, -1);
5554 /* Ensure minimum pool alignment. */
5555 if (TARGET_CPU_ZARCH)
5556 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
5557 else
5558 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
5559 INSN_ADDRESSES_NEW (insn, -1);
5561 /* Emit pool base label. */
5562 if (!remote_label)
5564 insn = emit_label_after (pool->label, insn);
5565 INSN_ADDRESSES_NEW (insn, -1);
5568 /* Dump constants in descending alignment requirement order,
5569 ensuring proper alignment for every constant. */
5570 for (i = 0; i < NR_C_MODES; i++)
5571 for (c = pool->constants[i]; c; c = c->next)
5573 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
5574 rtx value = c->value;
5575 if (GET_CODE (value) == CONST
5576 && GET_CODE (XEXP (value, 0)) == UNSPEC
5577 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
5578 && XVECLEN (XEXP (value, 0), 0) == 1)
5580 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
5581 gen_rtx_LABEL_REF (VOIDmode, pool->label));
5582 value = gen_rtx_CONST (VOIDmode, value);
5585 insn = emit_label_after (c->label, insn);
5586 INSN_ADDRESSES_NEW (insn, -1);
5588 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
5589 gen_rtvec (1, value),
5590 UNSPECV_POOL_ENTRY);
5591 insn = emit_insn_after (value, insn);
5592 INSN_ADDRESSES_NEW (insn, -1);
5595 /* Ensure minimum alignment for instructions. */
5596 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
5597 INSN_ADDRESSES_NEW (insn, -1);
5599 /* Output in-pool execute template insns. */
5600 for (c = pool->execute; c; c = c->next)
5602 insn = emit_label_after (c->label, insn);
5603 INSN_ADDRESSES_NEW (insn, -1);
5605 insn = emit_insn_after (s390_execute_target (c->value), insn);
5606 INSN_ADDRESSES_NEW (insn, -1);
5609 /* Switch back to previous section. */
5610 if (TARGET_CPU_ZARCH)
5612 insn = emit_insn_after (gen_pool_section_end (), insn);
5613 INSN_ADDRESSES_NEW (insn, -1);
5616 insn = emit_barrier_after (insn);
5617 INSN_ADDRESSES_NEW (insn, -1);
5619 /* Remove placeholder insn. */
5620 remove_insn (pool->pool_insn);
5623 /* Free all memory used by POOL. */
5625 static void
5626 s390_free_pool (struct constant_pool *pool)
5628 struct constant *c, *next;
5629 int i;
5631 for (i = 0; i < NR_C_MODES; i++)
5632 for (c = pool->constants[i]; c; c = next)
5634 next = c->next;
5635 free (c);
5638 for (c = pool->execute; c; c = next)
5640 next = c->next;
5641 free (c);
5644 BITMAP_FREE (pool->insns);
5645 free (pool);
5649 /* Collect main literal pool. Return NULL on overflow. */
5651 static struct constant_pool *
5652 s390_mainpool_start (void)
5654 struct constant_pool *pool;
5655 rtx insn;
5657 pool = s390_alloc_pool ();
5659 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5661 if (GET_CODE (insn) == INSN
5662 && GET_CODE (PATTERN (insn)) == SET
5663 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
5664 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5666 gcc_assert (!pool->pool_insn);
5667 pool->pool_insn = insn;
5670 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
5672 s390_add_execute (pool, insn);
5674 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5676 rtx pool_ref = NULL_RTX;
5677 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5678 if (pool_ref)
5680 rtx constant = get_pool_constant (pool_ref);
5681 enum machine_mode mode = get_pool_mode (pool_ref);
5682 s390_add_constant (pool, constant, mode);
5687 gcc_assert (pool->pool_insn || pool->size == 0);
5689 if (pool->size >= 4096)
5691 /* We're going to chunkify the pool, so remove the main
5692 pool placeholder insn. */
5693 remove_insn (pool->pool_insn);
5695 s390_free_pool (pool);
5696 pool = NULL;
5699 return pool;
5702 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5703 Modify the current function to output the pool constants as well as
5704 the pool register setup instruction. */
5706 static void
5707 s390_mainpool_finish (struct constant_pool *pool)
5709 rtx base_reg = cfun->machine->base_reg;
5710 rtx insn;
5712 /* If the pool is empty, we're done. */
5713 if (pool->size == 0)
5715 /* We don't actually need a base register after all. */
5716 cfun->machine->base_reg = NULL_RTX;
5718 if (pool->pool_insn)
5719 remove_insn (pool->pool_insn);
5720 s390_free_pool (pool);
5721 return;
5724 /* We need correct insn addresses. */
5725 shorten_branches (get_insns ());
5727 /* On zSeries, we use a LARL to load the pool register. The pool is
5728 located in the .rodata section, so we emit it after the function. */
5729 if (TARGET_CPU_ZARCH)
5731 insn = gen_main_base_64 (base_reg, pool->label);
5732 insn = emit_insn_after (insn, pool->pool_insn);
5733 INSN_ADDRESSES_NEW (insn, -1);
5734 remove_insn (pool->pool_insn);
5736 insn = get_last_insn ();
5737 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5738 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5740 s390_dump_pool (pool, 0);
5743 /* On S/390, if the total size of the function's code plus literal pool
5744 does not exceed 4096 bytes, we use BASR to set up a function base
5745 pointer, and emit the literal pool at the end of the function. */
5746 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
5747 + pool->size + 8 /* alignment slop */ < 4096)
5749 insn = gen_main_base_31_small (base_reg, pool->label);
5750 insn = emit_insn_after (insn, pool->pool_insn);
5751 INSN_ADDRESSES_NEW (insn, -1);
5752 remove_insn (pool->pool_insn);
5754 insn = emit_label_after (pool->label, insn);
5755 INSN_ADDRESSES_NEW (insn, -1);
5757 insn = get_last_insn ();
5758 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5759 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5761 s390_dump_pool (pool, 1);
5764 /* Otherwise, we emit an inline literal pool and use BASR to branch
5765 over it, setting up the pool register at the same time. */
5766 else
5768 rtx pool_end = gen_label_rtx ();
5770 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
5771 insn = emit_insn_after (insn, pool->pool_insn);
5772 INSN_ADDRESSES_NEW (insn, -1);
5773 remove_insn (pool->pool_insn);
5775 insn = emit_label_after (pool->label, insn);
5776 INSN_ADDRESSES_NEW (insn, -1);
5778 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5779 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5781 insn = emit_label_after (pool_end, pool->pool_insn);
5782 INSN_ADDRESSES_NEW (insn, -1);
5784 s390_dump_pool (pool, 1);
5788 /* Replace all literal pool references. */
5790 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5792 if (INSN_P (insn))
5793 replace_ltrel_base (&PATTERN (insn));
5795 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5797 rtx addr, pool_ref = NULL_RTX;
5798 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5799 if (pool_ref)
5801 if (s390_execute_label (insn))
5802 addr = s390_find_execute (pool, insn);
5803 else
5804 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
5805 get_pool_mode (pool_ref));
5807 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5808 INSN_CODE (insn) = -1;
5814 /* Free the pool. */
5815 s390_free_pool (pool);
5818 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5819 We have decided we cannot use this pool, so revert all changes
5820 to the current function that were done by s390_mainpool_start. */
5821 static void
5822 s390_mainpool_cancel (struct constant_pool *pool)
5824 /* We didn't actually change the instruction stream, so simply
5825 free the pool memory. */
5826 s390_free_pool (pool);
5830 /* Chunkify the literal pool. */
5832 #define S390_POOL_CHUNK_MIN 0xc00
5833 #define S390_POOL_CHUNK_MAX 0xe00
5835 static struct constant_pool *
5836 s390_chunkify_start (void)
5838 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
5839 int extra_size = 0;
5840 bitmap far_labels;
5841 rtx pending_ltrel = NULL_RTX;
5842 rtx insn;
5844 rtx (*gen_reload_base) (rtx, rtx) =
5845 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
5848 /* We need correct insn addresses. */
5850 shorten_branches (get_insns ());
5852 /* Scan all insns and move literals to pool chunks. */
5854 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5856 /* Check for pending LTREL_BASE. */
5857 if (INSN_P (insn))
5859 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
5860 if (ltrel_base)
5862 gcc_assert (ltrel_base == pending_ltrel);
5863 pending_ltrel = NULL_RTX;
5867 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
5869 if (!curr_pool)
5870 curr_pool = s390_start_pool (&pool_list, insn);
5872 s390_add_execute (curr_pool, insn);
5873 s390_add_pool_insn (curr_pool, insn);
5875 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5877 rtx pool_ref = NULL_RTX;
5878 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5879 if (pool_ref)
5881 rtx constant = get_pool_constant (pool_ref);
5882 enum machine_mode mode = get_pool_mode (pool_ref);
5884 if (!curr_pool)
5885 curr_pool = s390_start_pool (&pool_list, insn);
5887 s390_add_constant (curr_pool, constant, mode);
5888 s390_add_pool_insn (curr_pool, insn);
5890 /* Don't split the pool chunk between a LTREL_OFFSET load
5891 and the corresponding LTREL_BASE. */
5892 if (GET_CODE (constant) == CONST
5893 && GET_CODE (XEXP (constant, 0)) == UNSPEC
5894 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
5896 gcc_assert (!pending_ltrel);
5897 pending_ltrel = pool_ref;
5902 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
5904 if (curr_pool)
5905 s390_add_pool_insn (curr_pool, insn);
5906 /* An LTREL_BASE must follow within the same basic block. */
5907 gcc_assert (!pending_ltrel);
5910 if (!curr_pool
5911 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
5912 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
5913 continue;
5915 if (TARGET_CPU_ZARCH)
5917 if (curr_pool->size < S390_POOL_CHUNK_MAX)
5918 continue;
5920 s390_end_pool (curr_pool, NULL_RTX);
5921 curr_pool = NULL;
5923 else
5925 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
5926 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
5927 + extra_size;
5929 /* We will later have to insert base register reload insns.
5930 Those will have an effect on code size, which we need to
5931 consider here. This calculation makes rather pessimistic
5932 worst-case assumptions. */
5933 if (GET_CODE (insn) == CODE_LABEL)
5934 extra_size += 6;
5936 if (chunk_size < S390_POOL_CHUNK_MIN
5937 && curr_pool->size < S390_POOL_CHUNK_MIN)
5938 continue;
5940 /* Pool chunks can only be inserted after BARRIERs ... */
5941 if (GET_CODE (insn) == BARRIER)
5943 s390_end_pool (curr_pool, insn);
5944 curr_pool = NULL;
5945 extra_size = 0;
5948 /* ... so if we don't find one in time, create one. */
5949 else if ((chunk_size > S390_POOL_CHUNK_MAX
5950 || curr_pool->size > S390_POOL_CHUNK_MAX))
5952 rtx label, jump, barrier;
5954 /* We can insert the barrier only after a 'real' insn. */
5955 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
5956 continue;
5957 if (get_attr_length (insn) == 0)
5958 continue;
5960 /* Don't separate LTREL_BASE from the corresponding
5961 LTREL_OFFSET load. */
5962 if (pending_ltrel)
5963 continue;
5965 label = gen_label_rtx ();
5966 jump = emit_jump_insn_after (gen_jump (label), insn);
5967 barrier = emit_barrier_after (jump);
5968 insn = emit_label_after (label, barrier);
5969 JUMP_LABEL (jump) = label;
5970 LABEL_NUSES (label) = 1;
5972 INSN_ADDRESSES_NEW (jump, -1);
5973 INSN_ADDRESSES_NEW (barrier, -1);
5974 INSN_ADDRESSES_NEW (insn, -1);
5976 s390_end_pool (curr_pool, barrier);
5977 curr_pool = NULL;
5978 extra_size = 0;
5983 if (curr_pool)
5984 s390_end_pool (curr_pool, NULL_RTX);
5985 gcc_assert (!pending_ltrel);
5987 /* Find all labels that are branched into
5988 from an insn belonging to a different chunk. */
5990 far_labels = BITMAP_ALLOC (NULL);
5992 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5994 /* Labels marked with LABEL_PRESERVE_P can be target
5995 of non-local jumps, so we have to mark them.
5996 The same holds for named labels.
5998 Don't do that, however, if it is the label before
5999 a jump table. */
6001 if (GET_CODE (insn) == CODE_LABEL
6002 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
6004 rtx vec_insn = next_real_insn (insn);
6005 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6006 PATTERN (vec_insn) : NULL_RTX;
6007 if (!vec_pat
6008 || !(GET_CODE (vec_pat) == ADDR_VEC
6009 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6010 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
6013 /* If we have a direct jump (conditional or unconditional)
6014 or a casesi jump, check all potential targets. */
6015 else if (GET_CODE (insn) == JUMP_INSN)
6017 rtx pat = PATTERN (insn);
6018 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
6019 pat = XVECEXP (pat, 0, 0);
6021 if (GET_CODE (pat) == SET)
6023 rtx label = JUMP_LABEL (insn);
6024 if (label)
6026 if (s390_find_pool (pool_list, label)
6027 != s390_find_pool (pool_list, insn))
6028 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6031 else if (GET_CODE (pat) == PARALLEL
6032 && XVECLEN (pat, 0) == 2
6033 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
6034 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
6035 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
6037 /* Find the jump table used by this casesi jump. */
6038 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
6039 rtx vec_insn = next_real_insn (vec_label);
6040 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6041 PATTERN (vec_insn) : NULL_RTX;
6042 if (vec_pat
6043 && (GET_CODE (vec_pat) == ADDR_VEC
6044 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6046 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
6048 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
6050 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
6052 if (s390_find_pool (pool_list, label)
6053 != s390_find_pool (pool_list, insn))
6054 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6061 /* Insert base register reload insns before every pool. */
6063 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6065 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6066 curr_pool->label);
6067 rtx insn = curr_pool->first_insn;
6068 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
6071 /* Insert base register reload insns at every far label. */
6073 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6074 if (GET_CODE (insn) == CODE_LABEL
6075 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
6077 struct constant_pool *pool = s390_find_pool (pool_list, insn);
6078 if (pool)
6080 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6081 pool->label);
6082 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
6087 BITMAP_FREE (far_labels);
6090 /* Recompute insn addresses. */
6092 init_insn_lengths ();
6093 shorten_branches (get_insns ());
6095 return pool_list;
6098 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6099 After we have decided to use this list, finish implementing
6100 all changes to the current function as required. */
6102 static void
6103 s390_chunkify_finish (struct constant_pool *pool_list)
6105 struct constant_pool *curr_pool = NULL;
6106 rtx insn;
6109 /* Replace all literal pool references. */
6111 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6113 if (INSN_P (insn))
6114 replace_ltrel_base (&PATTERN (insn));
6116 curr_pool = s390_find_pool (pool_list, insn);
6117 if (!curr_pool)
6118 continue;
6120 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6122 rtx addr, pool_ref = NULL_RTX;
6123 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6124 if (pool_ref)
6126 if (s390_execute_label (insn))
6127 addr = s390_find_execute (curr_pool, insn);
6128 else
6129 addr = s390_find_constant (curr_pool,
6130 get_pool_constant (pool_ref),
6131 get_pool_mode (pool_ref));
6133 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6134 INSN_CODE (insn) = -1;
6139 /* Dump out all literal pools. */
6141 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6142 s390_dump_pool (curr_pool, 0);
6144 /* Free pool list. */
6146 while (pool_list)
6148 struct constant_pool *next = pool_list->next;
6149 s390_free_pool (pool_list);
6150 pool_list = next;
6154 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6155 We have decided we cannot use this list, so revert all changes
6156 to the current function that were done by s390_chunkify_start. */
6158 static void
6159 s390_chunkify_cancel (struct constant_pool *pool_list)
6161 struct constant_pool *curr_pool = NULL;
6162 rtx insn;
6164 /* Remove all pool placeholder insns. */
6166 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6168 /* Did we insert an extra barrier? Remove it. */
6169 rtx barrier = PREV_INSN (curr_pool->pool_insn);
6170 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
6171 rtx label = NEXT_INSN (curr_pool->pool_insn);
6173 if (jump && GET_CODE (jump) == JUMP_INSN
6174 && barrier && GET_CODE (barrier) == BARRIER
6175 && label && GET_CODE (label) == CODE_LABEL
6176 && GET_CODE (PATTERN (jump)) == SET
6177 && SET_DEST (PATTERN (jump)) == pc_rtx
6178 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
6179 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
6181 remove_insn (jump);
6182 remove_insn (barrier);
6183 remove_insn (label);
6186 remove_insn (curr_pool->pool_insn);
6189 /* Remove all base register reload insns. */
6191 for (insn = get_insns (); insn; )
6193 rtx next_insn = NEXT_INSN (insn);
6195 if (GET_CODE (insn) == INSN
6196 && GET_CODE (PATTERN (insn)) == SET
6197 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
6198 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
6199 remove_insn (insn);
6201 insn = next_insn;
6204 /* Free pool list. */
6206 while (pool_list)
6208 struct constant_pool *next = pool_list->next;
6209 s390_free_pool (pool_list);
6210 pool_list = next;
6215 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
6217 void
6218 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
6220 REAL_VALUE_TYPE r;
6222 switch (GET_MODE_CLASS (mode))
6224 case MODE_FLOAT:
6225 case MODE_DECIMAL_FLOAT:
6226 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
6228 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
6229 assemble_real (r, mode, align);
6230 break;
6232 case MODE_INT:
6233 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
6234 break;
6236 default:
6237 gcc_unreachable ();
6242 /* Return an RTL expression representing the value of the return address
6243 for the frame COUNT steps up from the current frame. FRAME is the
6244 frame pointer of that frame. */
6247 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
6249 int offset;
6250 rtx addr;
6252 /* Without backchain, we fail for all but the current frame. */
6254 if (!TARGET_BACKCHAIN && count > 0)
6255 return NULL_RTX;
6257 /* For the current frame, we need to make sure the initial
6258 value of RETURN_REGNUM is actually saved. */
6260 if (count == 0)
6262 /* On non-z architectures branch splitting could overwrite r14. */
6263 if (TARGET_CPU_ZARCH)
6264 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
6265 else
6267 cfun_frame_layout.save_return_addr_p = true;
6268 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6272 if (TARGET_PACKED_STACK)
6273 offset = -2 * UNITS_PER_WORD;
6274 else
6275 offset = RETURN_REGNUM * UNITS_PER_WORD;
6277 addr = plus_constant (frame, offset);
6278 addr = memory_address (Pmode, addr);
6279 return gen_rtx_MEM (Pmode, addr);
6282 /* Return an RTL expression representing the back chain stored in
6283 the current stack frame. */
6286 s390_back_chain_rtx (void)
6288 rtx chain;
6290 gcc_assert (TARGET_BACKCHAIN);
6292 if (TARGET_PACKED_STACK)
6293 chain = plus_constant (stack_pointer_rtx,
6294 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6295 else
6296 chain = stack_pointer_rtx;
6298 chain = gen_rtx_MEM (Pmode, chain);
6299 return chain;
6302 /* Find first call clobbered register unused in a function.
6303 This could be used as base register in a leaf function
6304 or for holding the return address before epilogue. */
6306 static int
6307 find_unused_clobbered_reg (void)
6309 int i;
6310 for (i = 0; i < 6; i++)
6311 if (!regs_ever_live[i])
6312 return i;
6313 return 0;
6317 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
6318 clobbered hard regs in SETREG. */
6320 static void
6321 s390_reg_clobbered_rtx (rtx setreg, rtx set_insn ATTRIBUTE_UNUSED, void *data)
6323 int *regs_ever_clobbered = (int *)data;
6324 unsigned int i, regno;
6325 enum machine_mode mode = GET_MODE (setreg);
6327 if (GET_CODE (setreg) == SUBREG)
6329 rtx inner = SUBREG_REG (setreg);
6330 if (!GENERAL_REG_P (inner))
6331 return;
6332 regno = subreg_regno (setreg);
6334 else if (GENERAL_REG_P (setreg))
6335 regno = REGNO (setreg);
6336 else
6337 return;
6339 for (i = regno;
6340 i < regno + HARD_REGNO_NREGS (regno, mode);
6341 i++)
6342 regs_ever_clobbered[i] = 1;
6345 /* Walks through all basic blocks of the current function looking
6346 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
6347 of the passed integer array REGS_EVER_CLOBBERED are set to one for
6348 each of those regs. */
6350 static void
6351 s390_regs_ever_clobbered (int *regs_ever_clobbered)
6353 basic_block cur_bb;
6354 rtx cur_insn;
6355 unsigned int i;
6357 memset (regs_ever_clobbered, 0, 16 * sizeof (int));
6359 /* For non-leaf functions we have to consider all call clobbered regs to be
6360 clobbered. */
6361 if (!current_function_is_leaf)
6363 for (i = 0; i < 16; i++)
6364 regs_ever_clobbered[i] = call_really_used_regs[i];
6367 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
6368 this work is done by liveness analysis (mark_regs_live_at_end).
6369 Special care is needed for functions containing landing pads. Landing pads
6370 may use the eh registers, but the code which sets these registers is not
6371 contained in that function. Hence s390_regs_ever_clobbered is not able to
6372 deal with this automatically. */
6373 if (current_function_calls_eh_return || cfun->machine->has_landing_pad_p)
6374 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
6375 if (current_function_calls_eh_return
6376 || (cfun->machine->has_landing_pad_p
6377 && regs_ever_live [EH_RETURN_DATA_REGNO (i)]))
6378 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
6380 /* For nonlocal gotos all call-saved registers have to be saved.
6381 This flag is also set for the unwinding code in libgcc.
6382 See expand_builtin_unwind_init. For regs_ever_live this is done by
6383 reload. */
6384 if (current_function_has_nonlocal_label)
6385 for (i = 0; i < 16; i++)
6386 if (!call_really_used_regs[i])
6387 regs_ever_clobbered[i] = 1;
6389 FOR_EACH_BB (cur_bb)
6391 FOR_BB_INSNS (cur_bb, cur_insn)
6393 if (INSN_P (cur_insn))
6394 note_stores (PATTERN (cur_insn),
6395 s390_reg_clobbered_rtx,
6396 regs_ever_clobbered);
6401 /* Determine the frame area which actually has to be accessed
6402 in the function epilogue. The values are stored at the
6403 given pointers AREA_BOTTOM (address of the lowest used stack
6404 address) and AREA_TOP (address of the first item which does
6405 not belong to the stack frame). */
6407 static void
6408 s390_frame_area (int *area_bottom, int *area_top)
6410 int b, t;
6411 int i;
6413 b = INT_MAX;
6414 t = INT_MIN;
6416 if (cfun_frame_layout.first_restore_gpr != -1)
6418 b = (cfun_frame_layout.gprs_offset
6419 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6420 t = b + (cfun_frame_layout.last_restore_gpr
6421 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6424 if (TARGET_64BIT && cfun_save_high_fprs_p)
6426 b = MIN (b, cfun_frame_layout.f8_offset);
6427 t = MAX (t, (cfun_frame_layout.f8_offset
6428 + cfun_frame_layout.high_fprs * 8));
6431 if (!TARGET_64BIT)
6432 for (i = 2; i < 4; i++)
6433 if (cfun_fpr_bit_p (i))
6435 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6436 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6439 *area_bottom = b;
6440 *area_top = t;
6443 /* Fill cfun->machine with info about register usage of current function.
6444 Return in CLOBBERED_REGS which GPRs are currently considered set. */
6446 static void
6447 s390_register_info (int clobbered_regs[])
6449 int i, j;
6451 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6452 cfun_frame_layout.fpr_bitmap = 0;
6453 cfun_frame_layout.high_fprs = 0;
6454 if (TARGET_64BIT)
6455 for (i = 24; i < 32; i++)
6456 if (regs_ever_live[i] && !global_regs[i])
6458 cfun_set_fpr_bit (i - 16);
6459 cfun_frame_layout.high_fprs++;
6462 /* Find first and last gpr to be saved. We trust regs_ever_live
6463 data, except that we don't save and restore global registers.
6465 Also, all registers with special meaning to the compiler need
6466 to be handled extra. */
6468 s390_regs_ever_clobbered (clobbered_regs);
6470 for (i = 0; i < 16; i++)
6471 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i] && !fixed_regs[i];
6473 if (frame_pointer_needed)
6474 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1;
6476 if (flag_pic)
6477 clobbered_regs[PIC_OFFSET_TABLE_REGNUM]
6478 |= regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
6480 clobbered_regs[BASE_REGNUM]
6481 |= (cfun->machine->base_reg
6482 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
6484 clobbered_regs[RETURN_REGNUM]
6485 |= (!current_function_is_leaf
6486 || TARGET_TPF_PROFILING
6487 || cfun->machine->split_branches_pending_p
6488 || cfun_frame_layout.save_return_addr_p
6489 || current_function_calls_eh_return
6490 || current_function_stdarg);
6492 clobbered_regs[STACK_POINTER_REGNUM]
6493 |= (!current_function_is_leaf
6494 || TARGET_TPF_PROFILING
6495 || cfun_save_high_fprs_p
6496 || get_frame_size () > 0
6497 || current_function_calls_alloca
6498 || current_function_stdarg);
6500 for (i = 6; i < 16; i++)
6501 if (regs_ever_live[i] || clobbered_regs[i])
6502 break;
6503 for (j = 15; j > i; j--)
6504 if (regs_ever_live[j] || clobbered_regs[j])
6505 break;
6507 if (i == 16)
6509 /* Nothing to save/restore. */
6510 cfun_frame_layout.first_save_gpr_slot = -1;
6511 cfun_frame_layout.last_save_gpr_slot = -1;
6512 cfun_frame_layout.first_save_gpr = -1;
6513 cfun_frame_layout.first_restore_gpr = -1;
6514 cfun_frame_layout.last_save_gpr = -1;
6515 cfun_frame_layout.last_restore_gpr = -1;
6517 else
6519 /* Save slots for gprs from i to j. */
6520 cfun_frame_layout.first_save_gpr_slot = i;
6521 cfun_frame_layout.last_save_gpr_slot = j;
6523 for (i = cfun_frame_layout.first_save_gpr_slot;
6524 i < cfun_frame_layout.last_save_gpr_slot + 1;
6525 i++)
6526 if (clobbered_regs[i])
6527 break;
6529 for (j = cfun_frame_layout.last_save_gpr_slot; j > i; j--)
6530 if (clobbered_regs[j])
6531 break;
6533 if (i == cfun_frame_layout.last_save_gpr_slot + 1)
6535 /* Nothing to save/restore. */
6536 cfun_frame_layout.first_save_gpr = -1;
6537 cfun_frame_layout.first_restore_gpr = -1;
6538 cfun_frame_layout.last_save_gpr = -1;
6539 cfun_frame_layout.last_restore_gpr = -1;
6541 else
6543 /* Save / Restore from gpr i to j. */
6544 cfun_frame_layout.first_save_gpr = i;
6545 cfun_frame_layout.first_restore_gpr = i;
6546 cfun_frame_layout.last_save_gpr = j;
6547 cfun_frame_layout.last_restore_gpr = j;
6551 if (current_function_stdarg)
6553 /* Varargs functions need to save gprs 2 to 6. */
6554 if (cfun->va_list_gpr_size
6555 && current_function_args_info.gprs < GP_ARG_NUM_REG)
6557 int min_gpr = current_function_args_info.gprs;
6558 int max_gpr = min_gpr + cfun->va_list_gpr_size;
6559 if (max_gpr > GP_ARG_NUM_REG)
6560 max_gpr = GP_ARG_NUM_REG;
6562 if (cfun_frame_layout.first_save_gpr == -1
6563 || cfun_frame_layout.first_save_gpr > 2 + min_gpr)
6565 cfun_frame_layout.first_save_gpr = 2 + min_gpr;
6566 cfun_frame_layout.first_save_gpr_slot = 2 + min_gpr;
6569 if (cfun_frame_layout.last_save_gpr == -1
6570 || cfun_frame_layout.last_save_gpr < 2 + max_gpr - 1)
6572 cfun_frame_layout.last_save_gpr = 2 + max_gpr - 1;
6573 cfun_frame_layout.last_save_gpr_slot = 2 + max_gpr - 1;
6577 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
6578 if (TARGET_HARD_FLOAT && cfun->va_list_fpr_size
6579 && current_function_args_info.fprs < FP_ARG_NUM_REG)
6581 int min_fpr = current_function_args_info.fprs;
6582 int max_fpr = min_fpr + cfun->va_list_fpr_size;
6583 if (max_fpr > FP_ARG_NUM_REG)
6584 max_fpr = FP_ARG_NUM_REG;
6586 /* ??? This is currently required to ensure proper location
6587 of the fpr save slots within the va_list save area. */
6588 if (TARGET_PACKED_STACK)
6589 min_fpr = 0;
6591 for (i = min_fpr; i < max_fpr; i++)
6592 cfun_set_fpr_bit (i);
6596 if (!TARGET_64BIT)
6597 for (i = 2; i < 4; i++)
6598 if (regs_ever_live[i + 16] && !global_regs[i + 16])
6599 cfun_set_fpr_bit (i);
6602 /* Fill cfun->machine with info about frame of current function. */
6604 static void
6605 s390_frame_info (void)
6607 int i;
6609 cfun_frame_layout.frame_size = get_frame_size ();
6610 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
6611 fatal_error ("total size of local variables exceeds architecture limit");
6613 if (!TARGET_PACKED_STACK)
6615 cfun_frame_layout.backchain_offset = 0;
6616 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
6617 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
6618 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
6619 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr_slot
6620 * UNITS_PER_WORD);
6622 else if (TARGET_BACKCHAIN) /* kernel stack layout */
6624 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
6625 - UNITS_PER_WORD);
6626 cfun_frame_layout.gprs_offset
6627 = (cfun_frame_layout.backchain_offset
6628 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr_slot + 1)
6629 * UNITS_PER_WORD);
6631 if (TARGET_64BIT)
6633 cfun_frame_layout.f4_offset
6634 = (cfun_frame_layout.gprs_offset
6635 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6637 cfun_frame_layout.f0_offset
6638 = (cfun_frame_layout.f4_offset
6639 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6641 else
6643 /* On 31 bit we have to care about alignment of the
6644 floating point regs to provide fastest access. */
6645 cfun_frame_layout.f0_offset
6646 = ((cfun_frame_layout.gprs_offset
6647 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
6648 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6650 cfun_frame_layout.f4_offset
6651 = (cfun_frame_layout.f0_offset
6652 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6655 else /* no backchain */
6657 cfun_frame_layout.f4_offset
6658 = (STACK_POINTER_OFFSET
6659 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6661 cfun_frame_layout.f0_offset
6662 = (cfun_frame_layout.f4_offset
6663 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6665 cfun_frame_layout.gprs_offset
6666 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
6669 if (current_function_is_leaf
6670 && !TARGET_TPF_PROFILING
6671 && cfun_frame_layout.frame_size == 0
6672 && !cfun_save_high_fprs_p
6673 && !current_function_calls_alloca
6674 && !current_function_stdarg)
6675 return;
6677 if (!TARGET_PACKED_STACK)
6678 cfun_frame_layout.frame_size += (STACK_POINTER_OFFSET
6679 + current_function_outgoing_args_size
6680 + cfun_frame_layout.high_fprs * 8);
6681 else
6683 if (TARGET_BACKCHAIN)
6684 cfun_frame_layout.frame_size += UNITS_PER_WORD;
6686 /* No alignment trouble here because f8-f15 are only saved under
6687 64 bit. */
6688 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
6689 cfun_frame_layout.f4_offset),
6690 cfun_frame_layout.gprs_offset)
6691 - cfun_frame_layout.high_fprs * 8);
6693 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
6695 for (i = 0; i < 8; i++)
6696 if (cfun_fpr_bit_p (i))
6697 cfun_frame_layout.frame_size += 8;
6699 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
6701 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6702 the frame size to sustain 8 byte alignment of stack frames. */
6703 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
6704 STACK_BOUNDARY / BITS_PER_UNIT - 1)
6705 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
6707 cfun_frame_layout.frame_size += current_function_outgoing_args_size;
6711 /* Generate frame layout. Fills in register and frame data for the current
6712 function in cfun->machine. This routine can be called multiple times;
6713 it will re-do the complete frame layout every time. */
6715 static void
6716 s390_init_frame_layout (void)
6718 HOST_WIDE_INT frame_size;
6719 int base_used;
6720 int clobbered_regs[16];
6722 /* On S/390 machines, we may need to perform branch splitting, which
6723 will require both base and return address register. We have no
6724 choice but to assume we're going to need them until right at the
6725 end of the machine dependent reorg phase. */
6726 if (!TARGET_CPU_ZARCH)
6727 cfun->machine->split_branches_pending_p = true;
6731 frame_size = cfun_frame_layout.frame_size;
6733 /* Try to predict whether we'll need the base register. */
6734 base_used = cfun->machine->split_branches_pending_p
6735 || current_function_uses_const_pool
6736 || (!DISP_IN_RANGE (frame_size)
6737 && !CONST_OK_FOR_K (frame_size));
6739 /* Decide which register to use as literal pool base. In small
6740 leaf functions, try to use an unused call-clobbered register
6741 as base register to avoid save/restore overhead. */
6742 if (!base_used)
6743 cfun->machine->base_reg = NULL_RTX;
6744 else if (current_function_is_leaf && !regs_ever_live[5])
6745 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
6746 else
6747 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
6749 s390_register_info (clobbered_regs);
6750 s390_frame_info ();
6752 while (frame_size != cfun_frame_layout.frame_size);
6755 /* Update frame layout. Recompute actual register save data based on
6756 current info and update regs_ever_live for the special registers.
6757 May be called multiple times, but may never cause *more* registers
6758 to be saved than s390_init_frame_layout allocated room for. */
6760 static void
6761 s390_update_frame_layout (void)
6763 int clobbered_regs[16];
6765 s390_register_info (clobbered_regs);
6767 regs_ever_live[BASE_REGNUM] = clobbered_regs[BASE_REGNUM];
6768 regs_ever_live[RETURN_REGNUM] = clobbered_regs[RETURN_REGNUM];
6769 regs_ever_live[STACK_POINTER_REGNUM] = clobbered_regs[STACK_POINTER_REGNUM];
6771 if (cfun->machine->base_reg)
6772 regs_ever_live[REGNO (cfun->machine->base_reg)] = 1;
6775 /* Return true if it is legal to put a value with MODE into REGNO. */
6777 bool
6778 s390_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
6780 switch (REGNO_REG_CLASS (regno))
6782 case FP_REGS:
6783 if (REGNO_PAIR_OK (regno, mode))
6785 if (mode == SImode || mode == DImode)
6786 return true;
6788 if (FLOAT_MODE_P (mode) && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
6789 return true;
6791 break;
6792 case ADDR_REGS:
6793 if (FRAME_REGNO_P (regno) && mode == Pmode)
6794 return true;
6796 /* fallthrough */
6797 case GENERAL_REGS:
6798 if (REGNO_PAIR_OK (regno, mode))
6800 if (TARGET_64BIT
6801 || (mode != TFmode && mode != TCmode && mode != TDmode))
6802 return true;
6804 break;
6805 case CC_REGS:
6806 if (GET_MODE_CLASS (mode) == MODE_CC)
6807 return true;
6808 break;
6809 case ACCESS_REGS:
6810 if (REGNO_PAIR_OK (regno, mode))
6812 if (mode == SImode || mode == Pmode)
6813 return true;
6815 break;
6816 default:
6817 return false;
6820 return false;
6823 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6825 bool
6826 s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
6828 /* Once we've decided upon a register to use as base register, it must
6829 no longer be used for any other purpose. */
6830 if (cfun->machine->base_reg)
6831 if (REGNO (cfun->machine->base_reg) == old_reg
6832 || REGNO (cfun->machine->base_reg) == new_reg)
6833 return false;
6835 return true;
6838 /* Maximum number of registers to represent a value of mode MODE
6839 in a register of class CLASS. */
6841 bool
6842 s390_class_max_nregs (enum reg_class class, enum machine_mode mode)
6844 switch (class)
6846 case FP_REGS:
6847 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6848 return 2 * ((GET_MODE_SIZE (mode) / 2 + 8 - 1) / 8);
6849 else
6850 return (GET_MODE_SIZE (mode) + 8 - 1) / 8;
6851 case ACCESS_REGS:
6852 return (GET_MODE_SIZE (mode) + 4 - 1) / 4;
6853 default:
6854 break;
6856 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6859 /* Return true if register FROM can be eliminated via register TO. */
6861 bool
6862 s390_can_eliminate (int from, int to)
6864 /* On zSeries machines, we have not marked the base register as fixed.
6865 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
6866 If a function requires the base register, we say here that this
6867 elimination cannot be performed. This will cause reload to free
6868 up the base register (as if it were fixed). On the other hand,
6869 if the current function does *not* require the base register, we
6870 say here the elimination succeeds, which in turn allows reload
6871 to allocate the base register for any other purpose. */
6872 if (from == BASE_REGNUM && to == BASE_REGNUM)
6874 if (TARGET_CPU_ZARCH)
6876 s390_init_frame_layout ();
6877 return cfun->machine->base_reg == NULL_RTX;
6880 return false;
6883 /* Everything else must point into the stack frame. */
6884 gcc_assert (to == STACK_POINTER_REGNUM
6885 || to == HARD_FRAME_POINTER_REGNUM);
6887 gcc_assert (from == FRAME_POINTER_REGNUM
6888 || from == ARG_POINTER_REGNUM
6889 || from == RETURN_ADDRESS_POINTER_REGNUM);
6891 /* Make sure we actually saved the return address. */
6892 if (from == RETURN_ADDRESS_POINTER_REGNUM)
6893 if (!current_function_calls_eh_return
6894 && !current_function_stdarg
6895 && !cfun_frame_layout.save_return_addr_p)
6896 return false;
6898 return true;
6901 /* Return offset between register FROM and TO initially after prolog. */
6903 HOST_WIDE_INT
6904 s390_initial_elimination_offset (int from, int to)
6906 HOST_WIDE_INT offset;
6907 int index;
6909 /* ??? Why are we called for non-eliminable pairs? */
6910 if (!s390_can_eliminate (from, to))
6911 return 0;
6913 switch (from)
6915 case FRAME_POINTER_REGNUM:
6916 offset = (get_frame_size()
6917 + STACK_POINTER_OFFSET
6918 + current_function_outgoing_args_size);
6919 break;
6921 case ARG_POINTER_REGNUM:
6922 s390_init_frame_layout ();
6923 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
6924 break;
6926 case RETURN_ADDRESS_POINTER_REGNUM:
6927 s390_init_frame_layout ();
6928 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr_slot;
6929 gcc_assert (index >= 0);
6930 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
6931 offset += index * UNITS_PER_WORD;
6932 break;
6934 case BASE_REGNUM:
6935 offset = 0;
6936 break;
6938 default:
6939 gcc_unreachable ();
6942 return offset;
6945 /* Emit insn to save fpr REGNUM at offset OFFSET relative
6946 to register BASE. Return generated insn. */
6948 static rtx
6949 save_fpr (rtx base, int offset, int regnum)
6951 rtx addr;
6952 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6954 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
6955 set_mem_alias_set (addr, get_varargs_alias_set ());
6956 else
6957 set_mem_alias_set (addr, get_frame_alias_set ());
6959 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
6962 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
6963 to register BASE. Return generated insn. */
6965 static rtx
6966 restore_fpr (rtx base, int offset, int regnum)
6968 rtx addr;
6969 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6970 set_mem_alias_set (addr, get_frame_alias_set ());
6972 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
6975 /* Generate insn to save registers FIRST to LAST into
6976 the register save area located at offset OFFSET
6977 relative to register BASE. */
6979 static rtx
6980 save_gprs (rtx base, int offset, int first, int last)
6982 rtx addr, insn, note;
6983 int i;
6985 addr = plus_constant (base, offset);
6986 addr = gen_rtx_MEM (Pmode, addr);
6988 set_mem_alias_set (addr, get_frame_alias_set ());
6990 /* Special-case single register. */
6991 if (first == last)
6993 if (TARGET_64BIT)
6994 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
6995 else
6996 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
6998 RTX_FRAME_RELATED_P (insn) = 1;
6999 return insn;
7003 insn = gen_store_multiple (addr,
7004 gen_rtx_REG (Pmode, first),
7005 GEN_INT (last - first + 1));
7007 if (first <= 6 && current_function_stdarg)
7008 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7010 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
7012 if (first + i <= 6)
7013 set_mem_alias_set (mem, get_varargs_alias_set ());
7016 /* We need to set the FRAME_RELATED flag on all SETs
7017 inside the store-multiple pattern.
7019 However, we must not emit DWARF records for registers 2..5
7020 if they are stored for use by variable arguments ...
7022 ??? Unfortunately, it is not enough to simply not the
7023 FRAME_RELATED flags for those SETs, because the first SET
7024 of the PARALLEL is always treated as if it had the flag
7025 set, even if it does not. Therefore we emit a new pattern
7026 without those registers as REG_FRAME_RELATED_EXPR note. */
7028 if (first >= 6)
7030 rtx pat = PATTERN (insn);
7032 for (i = 0; i < XVECLEN (pat, 0); i++)
7033 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
7034 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
7036 RTX_FRAME_RELATED_P (insn) = 1;
7038 else if (last >= 6)
7040 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
7041 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
7042 gen_rtx_REG (Pmode, 6),
7043 GEN_INT (last - 6 + 1));
7044 note = PATTERN (note);
7046 REG_NOTES (insn) =
7047 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7048 note, REG_NOTES (insn));
7050 for (i = 0; i < XVECLEN (note, 0); i++)
7051 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
7052 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
7054 RTX_FRAME_RELATED_P (insn) = 1;
7057 return insn;
7060 /* Generate insn to restore registers FIRST to LAST from
7061 the register save area located at offset OFFSET
7062 relative to register BASE. */
7064 static rtx
7065 restore_gprs (rtx base, int offset, int first, int last)
7067 rtx addr, insn;
7069 addr = plus_constant (base, offset);
7070 addr = gen_rtx_MEM (Pmode, addr);
7071 set_mem_alias_set (addr, get_frame_alias_set ());
7073 /* Special-case single register. */
7074 if (first == last)
7076 if (TARGET_64BIT)
7077 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
7078 else
7079 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
7081 return insn;
7084 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
7085 addr,
7086 GEN_INT (last - first + 1));
7087 return insn;
7090 /* Return insn sequence to load the GOT register. */
7092 static GTY(()) rtx got_symbol;
7094 s390_load_got (void)
7096 rtx insns;
7098 if (!got_symbol)
7100 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
7101 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
7104 start_sequence ();
7106 if (TARGET_CPU_ZARCH)
7108 emit_move_insn (pic_offset_table_rtx, got_symbol);
7110 else
7112 rtx offset;
7114 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
7115 UNSPEC_LTREL_OFFSET);
7116 offset = gen_rtx_CONST (Pmode, offset);
7117 offset = force_const_mem (Pmode, offset);
7119 emit_move_insn (pic_offset_table_rtx, offset);
7121 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
7122 UNSPEC_LTREL_BASE);
7123 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
7125 emit_move_insn (pic_offset_table_rtx, offset);
7128 insns = get_insns ();
7129 end_sequence ();
7130 return insns;
7133 /* Expand the prologue into a bunch of separate insns. */
7135 void
7136 s390_emit_prologue (void)
7138 rtx insn, addr;
7139 rtx temp_reg;
7140 int i;
7141 int offset;
7142 int next_fpr = 0;
7144 /* Complete frame layout. */
7146 s390_update_frame_layout ();
7148 /* Annotate all constant pool references to let the scheduler know
7149 they implicitly use the base register. */
7151 push_topmost_sequence ();
7153 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7154 if (INSN_P (insn))
7155 annotate_constant_pool_refs (&PATTERN (insn));
7157 pop_topmost_sequence ();
7159 /* Choose best register to use for temp use within prologue.
7160 See below for why TPF must use the register 1. */
7162 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
7163 && !current_function_is_leaf
7164 && !TARGET_TPF_PROFILING)
7165 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7166 else
7167 temp_reg = gen_rtx_REG (Pmode, 1);
7169 /* Save call saved gprs. */
7170 if (cfun_frame_layout.first_save_gpr != -1)
7172 insn = save_gprs (stack_pointer_rtx,
7173 cfun_frame_layout.gprs_offset +
7174 UNITS_PER_WORD * (cfun_frame_layout.first_save_gpr
7175 - cfun_frame_layout.first_save_gpr_slot),
7176 cfun_frame_layout.first_save_gpr,
7177 cfun_frame_layout.last_save_gpr);
7178 emit_insn (insn);
7181 /* Dummy insn to mark literal pool slot. */
7183 if (cfun->machine->base_reg)
7184 emit_insn (gen_main_pool (cfun->machine->base_reg));
7186 offset = cfun_frame_layout.f0_offset;
7188 /* Save f0 and f2. */
7189 for (i = 0; i < 2; i++)
7191 if (cfun_fpr_bit_p (i))
7193 save_fpr (stack_pointer_rtx, offset, i + 16);
7194 offset += 8;
7196 else if (!TARGET_PACKED_STACK)
7197 offset += 8;
7200 /* Save f4 and f6. */
7201 offset = cfun_frame_layout.f4_offset;
7202 for (i = 2; i < 4; i++)
7204 if (cfun_fpr_bit_p (i))
7206 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7207 offset += 8;
7209 /* If f4 and f6 are call clobbered they are saved due to stdargs and
7210 therefore are not frame related. */
7211 if (!call_really_used_regs[i + 16])
7212 RTX_FRAME_RELATED_P (insn) = 1;
7214 else if (!TARGET_PACKED_STACK)
7215 offset += 8;
7218 if (TARGET_PACKED_STACK
7219 && cfun_save_high_fprs_p
7220 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
7222 offset = (cfun_frame_layout.f8_offset
7223 + (cfun_frame_layout.high_fprs - 1) * 8);
7225 for (i = 15; i > 7 && offset >= 0; i--)
7226 if (cfun_fpr_bit_p (i))
7228 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7230 RTX_FRAME_RELATED_P (insn) = 1;
7231 offset -= 8;
7233 if (offset >= cfun_frame_layout.f8_offset)
7234 next_fpr = i + 16;
7237 if (!TARGET_PACKED_STACK)
7238 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
7240 /* Decrement stack pointer. */
7242 if (cfun_frame_layout.frame_size > 0)
7244 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
7246 if (s390_stack_size)
7248 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
7249 & ~(s390_stack_guard - 1));
7250 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
7251 GEN_INT (stack_check_mask));
7253 if (TARGET_64BIT)
7254 gen_cmpdi (t, const0_rtx);
7255 else
7256 gen_cmpsi (t, const0_rtx);
7258 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
7259 gen_rtx_REG (CCmode,
7260 CC_REGNUM),
7261 const0_rtx),
7262 const0_rtx));
7265 if (s390_warn_framesize > 0
7266 && cfun_frame_layout.frame_size >= s390_warn_framesize)
7267 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
7268 current_function_name (), cfun_frame_layout.frame_size);
7270 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
7271 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
7273 /* Save incoming stack pointer into temp reg. */
7274 if (TARGET_BACKCHAIN || next_fpr)
7275 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
7277 /* Subtract frame size from stack pointer. */
7279 if (DISP_IN_RANGE (INTVAL (frame_off)))
7281 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7282 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7283 frame_off));
7284 insn = emit_insn (insn);
7286 else
7288 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
7289 frame_off = force_const_mem (Pmode, frame_off);
7291 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
7292 annotate_constant_pool_refs (&PATTERN (insn));
7295 RTX_FRAME_RELATED_P (insn) = 1;
7296 REG_NOTES (insn) =
7297 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7298 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7299 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7300 GEN_INT (-cfun_frame_layout.frame_size))),
7301 REG_NOTES (insn));
7303 /* Set backchain. */
7305 if (TARGET_BACKCHAIN)
7307 if (cfun_frame_layout.backchain_offset)
7308 addr = gen_rtx_MEM (Pmode,
7309 plus_constant (stack_pointer_rtx,
7310 cfun_frame_layout.backchain_offset));
7311 else
7312 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
7313 set_mem_alias_set (addr, get_frame_alias_set ());
7314 insn = emit_insn (gen_move_insn (addr, temp_reg));
7317 /* If we support asynchronous exceptions (e.g. for Java),
7318 we need to make sure the backchain pointer is set up
7319 before any possibly trapping memory access. */
7321 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
7323 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
7324 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
7328 /* Save fprs 8 - 15 (64 bit ABI). */
7330 if (cfun_save_high_fprs_p && next_fpr)
7332 insn = emit_insn (gen_add2_insn (temp_reg,
7333 GEN_INT (cfun_frame_layout.f8_offset)));
7335 offset = 0;
7337 for (i = 24; i <= next_fpr; i++)
7338 if (cfun_fpr_bit_p (i - 16))
7340 rtx addr = plus_constant (stack_pointer_rtx,
7341 cfun_frame_layout.frame_size
7342 + cfun_frame_layout.f8_offset
7343 + offset);
7345 insn = save_fpr (temp_reg, offset, i);
7346 offset += 8;
7347 RTX_FRAME_RELATED_P (insn) = 1;
7348 REG_NOTES (insn) =
7349 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7350 gen_rtx_SET (VOIDmode,
7351 gen_rtx_MEM (DFmode, addr),
7352 gen_rtx_REG (DFmode, i)),
7353 REG_NOTES (insn));
7357 /* Set frame pointer, if needed. */
7359 if (frame_pointer_needed)
7361 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
7362 RTX_FRAME_RELATED_P (insn) = 1;
7365 /* Set up got pointer, if needed. */
7367 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7369 rtx insns = s390_load_got ();
7371 for (insn = insns; insn; insn = NEXT_INSN (insn))
7373 annotate_constant_pool_refs (&PATTERN (insn));
7375 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
7376 REG_NOTES (insn));
7379 emit_insn (insns);
7382 if (TARGET_TPF_PROFILING)
7384 /* Generate a BAS instruction to serve as a function
7385 entry intercept to facilitate the use of tracing
7386 algorithms located at the branch target. */
7387 emit_insn (gen_prologue_tpf ());
7389 /* Emit a blockage here so that all code
7390 lies between the profiling mechanisms. */
7391 emit_insn (gen_blockage ());
7395 /* Expand the epilogue into a bunch of separate insns. */
7397 void
7398 s390_emit_epilogue (bool sibcall)
7400 rtx frame_pointer, return_reg;
7401 int area_bottom, area_top, offset = 0;
7402 int next_offset;
7403 rtvec p;
7404 int i;
7406 if (TARGET_TPF_PROFILING)
7409 /* Generate a BAS instruction to serve as a function
7410 entry intercept to facilitate the use of tracing
7411 algorithms located at the branch target. */
7413 /* Emit a blockage here so that all code
7414 lies between the profiling mechanisms. */
7415 emit_insn (gen_blockage ());
7417 emit_insn (gen_epilogue_tpf ());
7420 /* Check whether to use frame or stack pointer for restore. */
7422 frame_pointer = (frame_pointer_needed
7423 ? hard_frame_pointer_rtx : stack_pointer_rtx);
7425 s390_frame_area (&area_bottom, &area_top);
7427 /* Check whether we can access the register save area.
7428 If not, increment the frame pointer as required. */
7430 if (area_top <= area_bottom)
7432 /* Nothing to restore. */
7434 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
7435 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
7437 /* Area is in range. */
7438 offset = cfun_frame_layout.frame_size;
7440 else
7442 rtx insn, frame_off;
7444 offset = area_bottom < 0 ? -area_bottom : 0;
7445 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
7447 if (DISP_IN_RANGE (INTVAL (frame_off)))
7449 insn = gen_rtx_SET (VOIDmode, frame_pointer,
7450 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7451 insn = emit_insn (insn);
7453 else
7455 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
7456 frame_off = force_const_mem (Pmode, frame_off);
7458 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
7459 annotate_constant_pool_refs (&PATTERN (insn));
7463 /* Restore call saved fprs. */
7465 if (TARGET_64BIT)
7467 if (cfun_save_high_fprs_p)
7469 next_offset = cfun_frame_layout.f8_offset;
7470 for (i = 24; i < 32; i++)
7472 if (cfun_fpr_bit_p (i - 16))
7474 restore_fpr (frame_pointer,
7475 offset + next_offset, i);
7476 next_offset += 8;
7482 else
7484 next_offset = cfun_frame_layout.f4_offset;
7485 for (i = 18; i < 20; i++)
7487 if (cfun_fpr_bit_p (i - 16))
7489 restore_fpr (frame_pointer,
7490 offset + next_offset, i);
7491 next_offset += 8;
7493 else if (!TARGET_PACKED_STACK)
7494 next_offset += 8;
7499 /* Return register. */
7501 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7503 /* Restore call saved gprs. */
7505 if (cfun_frame_layout.first_restore_gpr != -1)
7507 rtx insn, addr;
7508 int i;
7510 /* Check for global register and save them
7511 to stack location from where they get restored. */
7513 for (i = cfun_frame_layout.first_restore_gpr;
7514 i <= cfun_frame_layout.last_restore_gpr;
7515 i++)
7517 /* These registers are special and need to be
7518 restored in any case. */
7519 if (i == STACK_POINTER_REGNUM
7520 || i == RETURN_REGNUM
7521 || i == BASE_REGNUM
7522 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
7523 continue;
7525 if (global_regs[i])
7527 addr = plus_constant (frame_pointer,
7528 offset + cfun_frame_layout.gprs_offset
7529 + (i - cfun_frame_layout.first_save_gpr_slot)
7530 * UNITS_PER_WORD);
7531 addr = gen_rtx_MEM (Pmode, addr);
7532 set_mem_alias_set (addr, get_frame_alias_set ());
7533 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
7537 if (! sibcall)
7539 /* Fetch return address from stack before load multiple,
7540 this will do good for scheduling. */
7542 if (cfun_frame_layout.save_return_addr_p
7543 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
7544 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
7546 int return_regnum = find_unused_clobbered_reg();
7547 if (!return_regnum)
7548 return_regnum = 4;
7549 return_reg = gen_rtx_REG (Pmode, return_regnum);
7551 addr = plus_constant (frame_pointer,
7552 offset + cfun_frame_layout.gprs_offset
7553 + (RETURN_REGNUM
7554 - cfun_frame_layout.first_save_gpr_slot)
7555 * UNITS_PER_WORD);
7556 addr = gen_rtx_MEM (Pmode, addr);
7557 set_mem_alias_set (addr, get_frame_alias_set ());
7558 emit_move_insn (return_reg, addr);
7562 insn = restore_gprs (frame_pointer,
7563 offset + cfun_frame_layout.gprs_offset
7564 + (cfun_frame_layout.first_restore_gpr
7565 - cfun_frame_layout.first_save_gpr_slot)
7566 * UNITS_PER_WORD,
7567 cfun_frame_layout.first_restore_gpr,
7568 cfun_frame_layout.last_restore_gpr);
7569 emit_insn (insn);
7572 if (! sibcall)
7575 /* Return to caller. */
7577 p = rtvec_alloc (2);
7579 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7580 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
7581 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7586 /* Return the size in bytes of a function argument of
7587 type TYPE and/or mode MODE. At least one of TYPE or
7588 MODE must be specified. */
7590 static int
7591 s390_function_arg_size (enum machine_mode mode, tree type)
7593 if (type)
7594 return int_size_in_bytes (type);
7596 /* No type info available for some library calls ... */
7597 if (mode != BLKmode)
7598 return GET_MODE_SIZE (mode);
7600 /* If we have neither type nor mode, abort */
7601 gcc_unreachable ();
7604 /* Return true if a function argument of type TYPE and mode MODE
7605 is to be passed in a floating-point register, if available. */
7607 static bool
7608 s390_function_arg_float (enum machine_mode mode, tree type)
7610 int size = s390_function_arg_size (mode, type);
7611 if (size > 8)
7612 return false;
7614 /* Soft-float changes the ABI: no floating-point registers are used. */
7615 if (TARGET_SOFT_FLOAT)
7616 return false;
7618 /* No type info available for some library calls ... */
7619 if (!type)
7620 return mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode;
7622 /* The ABI says that record types with a single member are treated
7623 just like that member would be. */
7624 while (TREE_CODE (type) == RECORD_TYPE)
7626 tree field, single = NULL_TREE;
7628 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7630 if (TREE_CODE (field) != FIELD_DECL)
7631 continue;
7633 if (single == NULL_TREE)
7634 single = TREE_TYPE (field);
7635 else
7636 return false;
7639 if (single == NULL_TREE)
7640 return false;
7641 else
7642 type = single;
7645 return TREE_CODE (type) == REAL_TYPE;
7648 /* Return true if a function argument of type TYPE and mode MODE
7649 is to be passed in an integer register, or a pair of integer
7650 registers, if available. */
7652 static bool
7653 s390_function_arg_integer (enum machine_mode mode, tree type)
7655 int size = s390_function_arg_size (mode, type);
7656 if (size > 8)
7657 return false;
7659 /* No type info available for some library calls ... */
7660 if (!type)
7661 return GET_MODE_CLASS (mode) == MODE_INT
7662 || (TARGET_SOFT_FLOAT && SCALAR_FLOAT_MODE_P (mode));
7664 /* We accept small integral (and similar) types. */
7665 if (INTEGRAL_TYPE_P (type)
7666 || POINTER_TYPE_P (type)
7667 || TREE_CODE (type) == OFFSET_TYPE
7668 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
7669 return true;
7671 /* We also accept structs of size 1, 2, 4, 8 that are not
7672 passed in floating-point registers. */
7673 if (AGGREGATE_TYPE_P (type)
7674 && exact_log2 (size) >= 0
7675 && !s390_function_arg_float (mode, type))
7676 return true;
7678 return false;
7681 /* Return 1 if a function argument of type TYPE and mode MODE
7682 is to be passed by reference. The ABI specifies that only
7683 structures of size 1, 2, 4, or 8 bytes are passed by value,
7684 all other structures (and complex numbers) are passed by
7685 reference. */
7687 static bool
7688 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
7689 enum machine_mode mode, tree type,
7690 bool named ATTRIBUTE_UNUSED)
7692 int size = s390_function_arg_size (mode, type);
7693 if (size > 8)
7694 return true;
7696 if (type)
7698 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
7699 return 1;
7701 if (TREE_CODE (type) == COMPLEX_TYPE
7702 || TREE_CODE (type) == VECTOR_TYPE)
7703 return 1;
7706 return 0;
7709 /* Update the data in CUM to advance over an argument of mode MODE and
7710 data type TYPE. (TYPE is null for libcalls where that information
7711 may not be available.). The boolean NAMED specifies whether the
7712 argument is a named argument (as opposed to an unnamed argument
7713 matching an ellipsis). */
7715 void
7716 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7717 tree type, int named ATTRIBUTE_UNUSED)
7719 if (s390_function_arg_float (mode, type))
7721 cum->fprs += 1;
7723 else if (s390_function_arg_integer (mode, type))
7725 int size = s390_function_arg_size (mode, type);
7726 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
7728 else
7729 gcc_unreachable ();
7732 /* Define where to put the arguments to a function.
7733 Value is zero to push the argument on the stack,
7734 or a hard register in which to store the argument.
7736 MODE is the argument's machine mode.
7737 TYPE is the data type of the argument (as a tree).
7738 This is null for libcalls where that information may
7739 not be available.
7740 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7741 the preceding args and about the function being called.
7742 NAMED is nonzero if this argument is a named parameter
7743 (otherwise it is an extra parameter matching an ellipsis).
7745 On S/390, we use general purpose registers 2 through 6 to
7746 pass integer, pointer, and certain structure arguments, and
7747 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7748 to pass floating point arguments. All remaining arguments
7749 are pushed to the stack. */
7752 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
7753 int named ATTRIBUTE_UNUSED)
7755 if (s390_function_arg_float (mode, type))
7757 if (cum->fprs + 1 > FP_ARG_NUM_REG)
7758 return 0;
7759 else
7760 return gen_rtx_REG (mode, cum->fprs + 16);
7762 else if (s390_function_arg_integer (mode, type))
7764 int size = s390_function_arg_size (mode, type);
7765 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
7767 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
7768 return 0;
7769 else
7770 return gen_rtx_REG (mode, cum->gprs + 2);
7773 /* After the real arguments, expand_call calls us once again
7774 with a void_type_node type. Whatever we return here is
7775 passed as operand 2 to the call expanders.
7777 We don't need this feature ... */
7778 else if (type == void_type_node)
7779 return const0_rtx;
7781 gcc_unreachable ();
7784 /* Return true if return values of type TYPE should be returned
7785 in a memory buffer whose address is passed by the caller as
7786 hidden first argument. */
7788 static bool
7789 s390_return_in_memory (tree type, tree fundecl ATTRIBUTE_UNUSED)
7791 /* We accept small integral (and similar) types. */
7792 if (INTEGRAL_TYPE_P (type)
7793 || POINTER_TYPE_P (type)
7794 || TREE_CODE (type) == OFFSET_TYPE
7795 || TREE_CODE (type) == REAL_TYPE)
7796 return int_size_in_bytes (type) > 8;
7798 /* Aggregates and similar constructs are always returned
7799 in memory. */
7800 if (AGGREGATE_TYPE_P (type)
7801 || TREE_CODE (type) == COMPLEX_TYPE
7802 || TREE_CODE (type) == VECTOR_TYPE)
7803 return true;
7805 /* ??? We get called on all sorts of random stuff from
7806 aggregate_value_p. We can't abort, but it's not clear
7807 what's safe to return. Pretend it's a struct I guess. */
7808 return true;
7811 /* Define where to return a (scalar) value of type TYPE.
7812 If TYPE is null, define where to return a (scalar)
7813 value of mode MODE from a libcall. */
7816 s390_function_value (tree type, enum machine_mode mode)
7818 if (type)
7820 int unsignedp = TYPE_UNSIGNED (type);
7821 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
7824 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || SCALAR_FLOAT_MODE_P (mode));
7825 gcc_assert (GET_MODE_SIZE (mode) <= 8);
7827 if (TARGET_HARD_FLOAT && SCALAR_FLOAT_MODE_P (mode))
7828 return gen_rtx_REG (mode, 16);
7829 else
7830 return gen_rtx_REG (mode, 2);
7834 /* Create and return the va_list datatype.
7836 On S/390, va_list is an array type equivalent to
7838 typedef struct __va_list_tag
7840 long __gpr;
7841 long __fpr;
7842 void *__overflow_arg_area;
7843 void *__reg_save_area;
7844 } va_list[1];
7846 where __gpr and __fpr hold the number of general purpose
7847 or floating point arguments used up to now, respectively,
7848 __overflow_arg_area points to the stack location of the
7849 next argument passed on the stack, and __reg_save_area
7850 always points to the start of the register area in the
7851 call frame of the current function. The function prologue
7852 saves all registers used for argument passing into this
7853 area if the function uses variable arguments. */
7855 static tree
7856 s390_build_builtin_va_list (void)
7858 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
7860 record = lang_hooks.types.make_type (RECORD_TYPE);
7862 type_decl =
7863 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
7865 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
7866 long_integer_type_node);
7867 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
7868 long_integer_type_node);
7869 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
7870 ptr_type_node);
7871 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
7872 ptr_type_node);
7874 va_list_gpr_counter_field = f_gpr;
7875 va_list_fpr_counter_field = f_fpr;
7877 DECL_FIELD_CONTEXT (f_gpr) = record;
7878 DECL_FIELD_CONTEXT (f_fpr) = record;
7879 DECL_FIELD_CONTEXT (f_ovf) = record;
7880 DECL_FIELD_CONTEXT (f_sav) = record;
7882 TREE_CHAIN (record) = type_decl;
7883 TYPE_NAME (record) = type_decl;
7884 TYPE_FIELDS (record) = f_gpr;
7885 TREE_CHAIN (f_gpr) = f_fpr;
7886 TREE_CHAIN (f_fpr) = f_ovf;
7887 TREE_CHAIN (f_ovf) = f_sav;
7889 layout_type (record);
7891 /* The correct type is an array type of one element. */
7892 return build_array_type (record, build_index_type (size_zero_node));
7895 /* Implement va_start by filling the va_list structure VALIST.
7896 STDARG_P is always true, and ignored.
7897 NEXTARG points to the first anonymous stack argument.
7899 The following global variables are used to initialize
7900 the va_list structure:
7902 current_function_args_info:
7903 holds number of gprs and fprs used for named arguments.
7904 current_function_arg_offset_rtx:
7905 holds the offset of the first anonymous stack argument
7906 (relative to the virtual arg pointer). */
7908 void
7909 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
7911 HOST_WIDE_INT n_gpr, n_fpr;
7912 int off;
7913 tree f_gpr, f_fpr, f_ovf, f_sav;
7914 tree gpr, fpr, ovf, sav, t;
7916 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7917 f_fpr = TREE_CHAIN (f_gpr);
7918 f_ovf = TREE_CHAIN (f_fpr);
7919 f_sav = TREE_CHAIN (f_ovf);
7921 valist = build_va_arg_indirect_ref (valist);
7922 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7923 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7924 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7925 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7927 /* Count number of gp and fp argument registers used. */
7929 n_gpr = current_function_args_info.gprs;
7930 n_fpr = current_function_args_info.fprs;
7932 if (cfun->va_list_gpr_size)
7934 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
7935 build_int_cst (NULL_TREE, n_gpr));
7936 TREE_SIDE_EFFECTS (t) = 1;
7937 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7940 if (cfun->va_list_fpr_size)
7942 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
7943 build_int_cst (NULL_TREE, n_fpr));
7944 TREE_SIDE_EFFECTS (t) = 1;
7945 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7948 /* Find the overflow area. */
7949 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
7950 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG)
7952 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
7954 off = INTVAL (current_function_arg_offset_rtx);
7955 off = off < 0 ? 0 : off;
7956 if (TARGET_DEBUG_ARG)
7957 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
7958 (int)n_gpr, (int)n_fpr, off);
7960 t = build2 (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_cst (NULL_TREE, off));
7962 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
7963 TREE_SIDE_EFFECTS (t) = 1;
7964 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7967 /* Find the register save area. */
7968 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
7969 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
7971 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
7972 t = build2 (PLUS_EXPR, TREE_TYPE (sav), t,
7973 build_int_cst (NULL_TREE, -RETURN_REGNUM * UNITS_PER_WORD));
7975 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
7976 TREE_SIDE_EFFECTS (t) = 1;
7977 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7981 /* Implement va_arg by updating the va_list structure
7982 VALIST as required to retrieve an argument of type
7983 TYPE, and returning that argument.
7985 Generates code equivalent to:
7987 if (integral value) {
7988 if (size <= 4 && args.gpr < 5 ||
7989 size > 4 && args.gpr < 4 )
7990 ret = args.reg_save_area[args.gpr+8]
7991 else
7992 ret = *args.overflow_arg_area++;
7993 } else if (float value) {
7994 if (args.fgpr < 2)
7995 ret = args.reg_save_area[args.fpr+64]
7996 else
7997 ret = *args.overflow_arg_area++;
7998 } else if (aggregate value) {
7999 if (args.gpr < 5)
8000 ret = *args.reg_save_area[args.gpr]
8001 else
8002 ret = **args.overflow_arg_area++;
8003 } */
8005 static tree
8006 s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
8007 tree *post_p ATTRIBUTE_UNUSED)
8009 tree f_gpr, f_fpr, f_ovf, f_sav;
8010 tree gpr, fpr, ovf, sav, reg, t, u;
8011 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
8012 tree lab_false, lab_over, addr;
8014 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8015 f_fpr = TREE_CHAIN (f_gpr);
8016 f_ovf = TREE_CHAIN (f_fpr);
8017 f_sav = TREE_CHAIN (f_ovf);
8019 valist = build_va_arg_indirect_ref (valist);
8020 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8021 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8022 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8023 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
8025 size = int_size_in_bytes (type);
8027 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
8029 if (TARGET_DEBUG_ARG)
8031 fprintf (stderr, "va_arg: aggregate type");
8032 debug_tree (type);
8035 /* Aggregates are passed by reference. */
8036 indirect_p = 1;
8037 reg = gpr;
8038 n_reg = 1;
8040 /* kernel stack layout on 31 bit: It is assumed here that no padding
8041 will be added by s390_frame_info because for va_args always an even
8042 number of gprs has to be saved r15-r2 = 14 regs. */
8043 sav_ofs = 2 * UNITS_PER_WORD;
8044 sav_scale = UNITS_PER_WORD;
8045 size = UNITS_PER_WORD;
8046 max_reg = GP_ARG_NUM_REG - n_reg;
8048 else if (s390_function_arg_float (TYPE_MODE (type), type))
8050 if (TARGET_DEBUG_ARG)
8052 fprintf (stderr, "va_arg: float type");
8053 debug_tree (type);
8056 /* FP args go in FP registers, if present. */
8057 indirect_p = 0;
8058 reg = fpr;
8059 n_reg = 1;
8060 sav_ofs = 16 * UNITS_PER_WORD;
8061 sav_scale = 8;
8062 max_reg = FP_ARG_NUM_REG - n_reg;
8064 else
8066 if (TARGET_DEBUG_ARG)
8068 fprintf (stderr, "va_arg: other type");
8069 debug_tree (type);
8072 /* Otherwise into GP registers. */
8073 indirect_p = 0;
8074 reg = gpr;
8075 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
8077 /* kernel stack layout on 31 bit: It is assumed here that no padding
8078 will be added by s390_frame_info because for va_args always an even
8079 number of gprs has to be saved r15-r2 = 14 regs. */
8080 sav_ofs = 2 * UNITS_PER_WORD;
8082 if (size < UNITS_PER_WORD)
8083 sav_ofs += UNITS_PER_WORD - size;
8085 sav_scale = UNITS_PER_WORD;
8086 max_reg = GP_ARG_NUM_REG - n_reg;
8089 /* Pull the value out of the saved registers ... */
8091 lab_false = create_artificial_label ();
8092 lab_over = create_artificial_label ();
8093 addr = create_tmp_var (ptr_type_node, "addr");
8094 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
8096 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
8097 t = build2 (GT_EXPR, boolean_type_node, reg, t);
8098 u = build1 (GOTO_EXPR, void_type_node, lab_false);
8099 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
8100 gimplify_and_add (t, pre_p);
8102 t = build2 (PLUS_EXPR, ptr_type_node, sav,
8103 fold_convert (ptr_type_node, size_int (sav_ofs)));
8104 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
8105 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
8106 t = build2 (PLUS_EXPR, ptr_type_node, t, fold_convert (ptr_type_node, u));
8108 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
8109 gimplify_and_add (t, pre_p);
8111 t = build1 (GOTO_EXPR, void_type_node, lab_over);
8112 gimplify_and_add (t, pre_p);
8114 t = build1 (LABEL_EXPR, void_type_node, lab_false);
8115 append_to_statement_list (t, pre_p);
8118 /* ... Otherwise out of the overflow area. */
8120 t = ovf;
8121 if (size < UNITS_PER_WORD)
8122 t = build2 (PLUS_EXPR, ptr_type_node, t,
8123 fold_convert (ptr_type_node, size_int (UNITS_PER_WORD - size)));
8125 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
8127 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
8128 gimplify_and_add (u, pre_p);
8130 t = build2 (PLUS_EXPR, ptr_type_node, t,
8131 fold_convert (ptr_type_node, size_int (size)));
8132 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, ovf, t);
8133 gimplify_and_add (t, pre_p);
8135 t = build1 (LABEL_EXPR, void_type_node, lab_over);
8136 append_to_statement_list (t, pre_p);
8139 /* Increment register save count. */
8141 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
8142 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
8143 gimplify_and_add (u, pre_p);
8145 if (indirect_p)
8147 t = build_pointer_type (build_pointer_type (type));
8148 addr = fold_convert (t, addr);
8149 addr = build_va_arg_indirect_ref (addr);
8151 else
8153 t = build_pointer_type (type);
8154 addr = fold_convert (t, addr);
8157 return build_va_arg_indirect_ref (addr);
8161 /* Builtins. */
8163 enum s390_builtin
8165 S390_BUILTIN_THREAD_POINTER,
8166 S390_BUILTIN_SET_THREAD_POINTER,
8168 S390_BUILTIN_max
8171 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
8172 CODE_FOR_get_tp_64,
8173 CODE_FOR_set_tp_64
8176 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
8177 CODE_FOR_get_tp_31,
8178 CODE_FOR_set_tp_31
8181 static void
8182 s390_init_builtins (void)
8184 tree ftype;
8186 ftype = build_function_type (ptr_type_node, void_list_node);
8187 add_builtin_function ("__builtin_thread_pointer", ftype,
8188 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
8189 NULL, NULL_TREE);
8191 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
8192 add_builtin_function ("__builtin_set_thread_pointer", ftype,
8193 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
8194 NULL, NULL_TREE);
8197 /* Expand an expression EXP that calls a built-in function,
8198 with result going to TARGET if that's convenient
8199 (and in mode MODE if that's convenient).
8200 SUBTARGET may be used as the target for computing one of EXP's operands.
8201 IGNORE is nonzero if the value is to be ignored. */
8203 static rtx
8204 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
8205 enum machine_mode mode ATTRIBUTE_UNUSED,
8206 int ignore ATTRIBUTE_UNUSED)
8208 #define MAX_ARGS 2
8210 unsigned int const *code_for_builtin =
8211 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
8213 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8214 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8215 enum insn_code icode;
8216 rtx op[MAX_ARGS], pat;
8217 int arity;
8218 bool nonvoid;
8219 tree arg;
8220 call_expr_arg_iterator iter;
8222 if (fcode >= S390_BUILTIN_max)
8223 internal_error ("bad builtin fcode");
8224 icode = code_for_builtin[fcode];
8225 if (icode == 0)
8226 internal_error ("bad builtin fcode");
8228 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
8230 arity = 0;
8231 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8233 const struct insn_operand_data *insn_op;
8235 if (arg == error_mark_node)
8236 return NULL_RTX;
8237 if (arity > MAX_ARGS)
8238 return NULL_RTX;
8240 insn_op = &insn_data[icode].operand[arity + nonvoid];
8242 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
8244 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
8245 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
8246 arity++;
8249 if (nonvoid)
8251 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8252 if (!target
8253 || GET_MODE (target) != tmode
8254 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
8255 target = gen_reg_rtx (tmode);
8258 switch (arity)
8260 case 0:
8261 pat = GEN_FCN (icode) (target);
8262 break;
8263 case 1:
8264 if (nonvoid)
8265 pat = GEN_FCN (icode) (target, op[0]);
8266 else
8267 pat = GEN_FCN (icode) (op[0]);
8268 break;
8269 case 2:
8270 pat = GEN_FCN (icode) (target, op[0], op[1]);
8271 break;
8272 default:
8273 gcc_unreachable ();
8275 if (!pat)
8276 return NULL_RTX;
8277 emit_insn (pat);
8279 if (nonvoid)
8280 return target;
8281 else
8282 return const0_rtx;
8286 /* Output assembly code for the trampoline template to
8287 stdio stream FILE.
8289 On S/390, we use gpr 1 internally in the trampoline code;
8290 gpr 0 is used to hold the static chain. */
8292 void
8293 s390_trampoline_template (FILE *file)
8295 rtx op[2];
8296 op[0] = gen_rtx_REG (Pmode, 0);
8297 op[1] = gen_rtx_REG (Pmode, 1);
8299 if (TARGET_64BIT)
8301 output_asm_insn ("basr\t%1,0", op);
8302 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
8303 output_asm_insn ("br\t%1", op);
8304 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
8306 else
8308 output_asm_insn ("basr\t%1,0", op);
8309 output_asm_insn ("lm\t%0,%1,6(%1)", op);
8310 output_asm_insn ("br\t%1", op);
8311 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
8315 /* Emit RTL insns to initialize the variable parts of a trampoline.
8316 FNADDR is an RTX for the address of the function's pure code.
8317 CXT is an RTX for the static chain value for the function. */
8319 void
8320 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
8322 emit_move_insn (gen_rtx_MEM (Pmode,
8323 memory_address (Pmode,
8324 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
8325 emit_move_insn (gen_rtx_MEM (Pmode,
8326 memory_address (Pmode,
8327 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
8330 /* Return rtx for 64-bit constant formed from the 32-bit subwords
8331 LOW and HIGH, independent of the host word size. */
8334 s390_gen_rtx_const_DI (int high, int low)
8336 #if HOST_BITS_PER_WIDE_INT >= 64
8337 HOST_WIDE_INT val;
8338 val = (HOST_WIDE_INT)high;
8339 val <<= 32;
8340 val |= (HOST_WIDE_INT)low;
8342 return GEN_INT (val);
8343 #else
8344 #if HOST_BITS_PER_WIDE_INT >= 32
8345 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
8346 #else
8347 gcc_unreachable ();
8348 #endif
8349 #endif
8352 /* Output assembler code to FILE to increment profiler label # LABELNO
8353 for profiling a function entry. */
8355 void
8356 s390_function_profiler (FILE *file, int labelno)
8358 rtx op[7];
8360 char label[128];
8361 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
8363 fprintf (file, "# function profiler \n");
8365 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
8366 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8367 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
8369 op[2] = gen_rtx_REG (Pmode, 1);
8370 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
8371 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
8373 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
8374 if (flag_pic)
8376 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
8377 op[4] = gen_rtx_CONST (Pmode, op[4]);
8380 if (TARGET_64BIT)
8382 output_asm_insn ("stg\t%0,%1", op);
8383 output_asm_insn ("larl\t%2,%3", op);
8384 output_asm_insn ("brasl\t%0,%4", op);
8385 output_asm_insn ("lg\t%0,%1", op);
8387 else if (!flag_pic)
8389 op[6] = gen_label_rtx ();
8391 output_asm_insn ("st\t%0,%1", op);
8392 output_asm_insn ("bras\t%2,%l6", op);
8393 output_asm_insn (".long\t%4", op);
8394 output_asm_insn (".long\t%3", op);
8395 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8396 output_asm_insn ("l\t%0,0(%2)", op);
8397 output_asm_insn ("l\t%2,4(%2)", op);
8398 output_asm_insn ("basr\t%0,%0", op);
8399 output_asm_insn ("l\t%0,%1", op);
8401 else
8403 op[5] = gen_label_rtx ();
8404 op[6] = gen_label_rtx ();
8406 output_asm_insn ("st\t%0,%1", op);
8407 output_asm_insn ("bras\t%2,%l6", op);
8408 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
8409 output_asm_insn (".long\t%4-%l5", op);
8410 output_asm_insn (".long\t%3-%l5", op);
8411 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8412 output_asm_insn ("lr\t%0,%2", op);
8413 output_asm_insn ("a\t%0,0(%2)", op);
8414 output_asm_insn ("a\t%2,4(%2)", op);
8415 output_asm_insn ("basr\t%0,%0", op);
8416 output_asm_insn ("l\t%0,%1", op);
8420 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
8421 into its SYMBOL_REF_FLAGS. */
8423 static void
8424 s390_encode_section_info (tree decl, rtx rtl, int first)
8426 default_encode_section_info (decl, rtl, first);
8428 /* If a variable has a forced alignment to < 2 bytes, mark it with
8429 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
8430 if (TREE_CODE (decl) == VAR_DECL
8431 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
8432 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
8435 /* Output thunk to FILE that implements a C++ virtual function call (with
8436 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
8437 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
8438 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
8439 relative to the resulting this pointer. */
8441 static void
8442 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
8443 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8444 tree function)
8446 rtx op[10];
8447 int nonlocal = 0;
8449 /* Operand 0 is the target function. */
8450 op[0] = XEXP (DECL_RTL (function), 0);
8451 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
8453 nonlocal = 1;
8454 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
8455 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
8456 op[0] = gen_rtx_CONST (Pmode, op[0]);
8459 /* Operand 1 is the 'this' pointer. */
8460 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8461 op[1] = gen_rtx_REG (Pmode, 3);
8462 else
8463 op[1] = gen_rtx_REG (Pmode, 2);
8465 /* Operand 2 is the delta. */
8466 op[2] = GEN_INT (delta);
8468 /* Operand 3 is the vcall_offset. */
8469 op[3] = GEN_INT (vcall_offset);
8471 /* Operand 4 is the temporary register. */
8472 op[4] = gen_rtx_REG (Pmode, 1);
8474 /* Operands 5 to 8 can be used as labels. */
8475 op[5] = NULL_RTX;
8476 op[6] = NULL_RTX;
8477 op[7] = NULL_RTX;
8478 op[8] = NULL_RTX;
8480 /* Operand 9 can be used for temporary register. */
8481 op[9] = NULL_RTX;
8483 /* Generate code. */
8484 if (TARGET_64BIT)
8486 /* Setup literal pool pointer if required. */
8487 if ((!DISP_IN_RANGE (delta)
8488 && !CONST_OK_FOR_K (delta)
8489 && !CONST_OK_FOR_Os (delta))
8490 || (!DISP_IN_RANGE (vcall_offset)
8491 && !CONST_OK_FOR_K (vcall_offset)
8492 && !CONST_OK_FOR_Os (vcall_offset)))
8494 op[5] = gen_label_rtx ();
8495 output_asm_insn ("larl\t%4,%5", op);
8498 /* Add DELTA to this pointer. */
8499 if (delta)
8501 if (CONST_OK_FOR_J (delta))
8502 output_asm_insn ("la\t%1,%2(%1)", op);
8503 else if (DISP_IN_RANGE (delta))
8504 output_asm_insn ("lay\t%1,%2(%1)", op);
8505 else if (CONST_OK_FOR_K (delta))
8506 output_asm_insn ("aghi\t%1,%2", op);
8507 else if (CONST_OK_FOR_Os (delta))
8508 output_asm_insn ("agfi\t%1,%2", op);
8509 else
8511 op[6] = gen_label_rtx ();
8512 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8516 /* Perform vcall adjustment. */
8517 if (vcall_offset)
8519 if (DISP_IN_RANGE (vcall_offset))
8521 output_asm_insn ("lg\t%4,0(%1)", op);
8522 output_asm_insn ("ag\t%1,%3(%4)", op);
8524 else if (CONST_OK_FOR_K (vcall_offset))
8526 output_asm_insn ("lghi\t%4,%3", op);
8527 output_asm_insn ("ag\t%4,0(%1)", op);
8528 output_asm_insn ("ag\t%1,0(%4)", op);
8530 else if (CONST_OK_FOR_Os (vcall_offset))
8532 output_asm_insn ("lgfi\t%4,%3", op);
8533 output_asm_insn ("ag\t%4,0(%1)", op);
8534 output_asm_insn ("ag\t%1,0(%4)", op);
8536 else
8538 op[7] = gen_label_rtx ();
8539 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
8540 output_asm_insn ("ag\t%4,0(%1)", op);
8541 output_asm_insn ("ag\t%1,0(%4)", op);
8545 /* Jump to target. */
8546 output_asm_insn ("jg\t%0", op);
8548 /* Output literal pool if required. */
8549 if (op[5])
8551 output_asm_insn (".align\t4", op);
8552 targetm.asm_out.internal_label (file, "L",
8553 CODE_LABEL_NUMBER (op[5]));
8555 if (op[6])
8557 targetm.asm_out.internal_label (file, "L",
8558 CODE_LABEL_NUMBER (op[6]));
8559 output_asm_insn (".long\t%2", op);
8561 if (op[7])
8563 targetm.asm_out.internal_label (file, "L",
8564 CODE_LABEL_NUMBER (op[7]));
8565 output_asm_insn (".long\t%3", op);
8568 else
8570 /* Setup base pointer if required. */
8571 if (!vcall_offset
8572 || (!DISP_IN_RANGE (delta)
8573 && !CONST_OK_FOR_K (delta)
8574 && !CONST_OK_FOR_Os (delta))
8575 || (!DISP_IN_RANGE (delta)
8576 && !CONST_OK_FOR_K (vcall_offset)
8577 && !CONST_OK_FOR_Os (vcall_offset)))
8579 op[5] = gen_label_rtx ();
8580 output_asm_insn ("basr\t%4,0", op);
8581 targetm.asm_out.internal_label (file, "L",
8582 CODE_LABEL_NUMBER (op[5]));
8585 /* Add DELTA to this pointer. */
8586 if (delta)
8588 if (CONST_OK_FOR_J (delta))
8589 output_asm_insn ("la\t%1,%2(%1)", op);
8590 else if (DISP_IN_RANGE (delta))
8591 output_asm_insn ("lay\t%1,%2(%1)", op);
8592 else if (CONST_OK_FOR_K (delta))
8593 output_asm_insn ("ahi\t%1,%2", op);
8594 else if (CONST_OK_FOR_Os (delta))
8595 output_asm_insn ("afi\t%1,%2", op);
8596 else
8598 op[6] = gen_label_rtx ();
8599 output_asm_insn ("a\t%1,%6-%5(%4)", op);
8603 /* Perform vcall adjustment. */
8604 if (vcall_offset)
8606 if (CONST_OK_FOR_J (vcall_offset))
8608 output_asm_insn ("l\t%4,0(%1)", op);
8609 output_asm_insn ("a\t%1,%3(%4)", op);
8611 else if (DISP_IN_RANGE (vcall_offset))
8613 output_asm_insn ("l\t%4,0(%1)", op);
8614 output_asm_insn ("ay\t%1,%3(%4)", op);
8616 else if (CONST_OK_FOR_K (vcall_offset))
8618 output_asm_insn ("lhi\t%4,%3", op);
8619 output_asm_insn ("a\t%4,0(%1)", op);
8620 output_asm_insn ("a\t%1,0(%4)", op);
8622 else if (CONST_OK_FOR_Os (vcall_offset))
8624 output_asm_insn ("iilf\t%4,%3", op);
8625 output_asm_insn ("a\t%4,0(%1)", op);
8626 output_asm_insn ("a\t%1,0(%4)", op);
8628 else
8630 op[7] = gen_label_rtx ();
8631 output_asm_insn ("l\t%4,%7-%5(%4)", op);
8632 output_asm_insn ("a\t%4,0(%1)", op);
8633 output_asm_insn ("a\t%1,0(%4)", op);
8636 /* We had to clobber the base pointer register.
8637 Re-setup the base pointer (with a different base). */
8638 op[5] = gen_label_rtx ();
8639 output_asm_insn ("basr\t%4,0", op);
8640 targetm.asm_out.internal_label (file, "L",
8641 CODE_LABEL_NUMBER (op[5]));
8644 /* Jump to target. */
8645 op[8] = gen_label_rtx ();
8647 if (!flag_pic)
8648 output_asm_insn ("l\t%4,%8-%5(%4)", op);
8649 else if (!nonlocal)
8650 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8651 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8652 else if (flag_pic == 1)
8654 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8655 output_asm_insn ("l\t%4,%0(%4)", op);
8657 else if (flag_pic == 2)
8659 op[9] = gen_rtx_REG (Pmode, 0);
8660 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
8661 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8662 output_asm_insn ("ar\t%4,%9", op);
8663 output_asm_insn ("l\t%4,0(%4)", op);
8666 output_asm_insn ("br\t%4", op);
8668 /* Output literal pool. */
8669 output_asm_insn (".align\t4", op);
8671 if (nonlocal && flag_pic == 2)
8672 output_asm_insn (".long\t%0", op);
8673 if (nonlocal)
8675 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
8676 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
8679 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
8680 if (!flag_pic)
8681 output_asm_insn (".long\t%0", op);
8682 else
8683 output_asm_insn (".long\t%0-%5", op);
8685 if (op[6])
8687 targetm.asm_out.internal_label (file, "L",
8688 CODE_LABEL_NUMBER (op[6]));
8689 output_asm_insn (".long\t%2", op);
8691 if (op[7])
8693 targetm.asm_out.internal_label (file, "L",
8694 CODE_LABEL_NUMBER (op[7]));
8695 output_asm_insn (".long\t%3", op);
8700 static bool
8701 s390_valid_pointer_mode (enum machine_mode mode)
8703 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8706 /* Checks whether the given ARGUMENT_LIST would use a caller
8707 saved register. This is used to decide whether sibling call
8708 optimization could be performed on the respective function
8709 call. */
8711 static bool
8712 s390_call_saved_register_used (tree argument_list)
8714 CUMULATIVE_ARGS cum;
8715 tree parameter;
8716 enum machine_mode mode;
8717 tree type;
8718 rtx parm_rtx;
8719 int reg;
8721 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
8723 while (argument_list)
8725 parameter = TREE_VALUE (argument_list);
8726 argument_list = TREE_CHAIN (argument_list);
8728 gcc_assert (parameter);
8730 /* For an undeclared variable passed as parameter we will get
8731 an ERROR_MARK node here. */
8732 if (TREE_CODE (parameter) == ERROR_MARK)
8733 return true;
8735 type = TREE_TYPE (parameter);
8736 gcc_assert (type);
8738 mode = TYPE_MODE (type);
8739 gcc_assert (mode);
8741 if (pass_by_reference (&cum, mode, type, true))
8743 mode = Pmode;
8744 type = build_pointer_type (type);
8747 parm_rtx = s390_function_arg (&cum, mode, type, 0);
8749 s390_function_arg_advance (&cum, mode, type, 0);
8751 if (parm_rtx && REG_P (parm_rtx))
8753 for (reg = 0;
8754 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
8755 reg++)
8756 if (! call_used_regs[reg + REGNO (parm_rtx)])
8757 return true;
8760 return false;
8763 /* Return true if the given call expression can be
8764 turned into a sibling call.
8765 DECL holds the declaration of the function to be called whereas
8766 EXP is the call expression itself. */
8768 static bool
8769 s390_function_ok_for_sibcall (tree decl, tree exp)
8771 /* The TPF epilogue uses register 1. */
8772 if (TARGET_TPF_PROFILING)
8773 return false;
8775 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
8776 which would have to be restored before the sibcall. */
8777 if (!TARGET_64BIT && flag_pic && decl && !targetm.binds_local_p (decl))
8778 return false;
8780 /* Register 6 on s390 is available as an argument register but unfortunately
8781 "caller saved". This makes functions needing this register for arguments
8782 not suitable for sibcalls. */
8783 if (TREE_OPERAND (exp, 1)
8784 && s390_call_saved_register_used (TREE_OPERAND (exp, 1)))
8785 return false;
8787 return true;
8790 /* Return the fixed registers used for condition codes. */
8792 static bool
8793 s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
8795 *p1 = CC_REGNUM;
8796 *p2 = INVALID_REGNUM;
8798 return true;
8801 /* This function is used by the call expanders of the machine description.
8802 It emits the call insn itself together with the necessary operations
8803 to adjust the target address and returns the emitted insn.
8804 ADDR_LOCATION is the target address rtx
8805 TLS_CALL the location of the thread-local symbol
8806 RESULT_REG the register where the result of the call should be stored
8807 RETADDR_REG the register where the return address should be stored
8808 If this parameter is NULL_RTX the call is considered
8809 to be a sibling call. */
8812 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
8813 rtx retaddr_reg)
8815 bool plt_call = false;
8816 rtx insn;
8817 rtx call;
8818 rtx clobber;
8819 rtvec vec;
8821 /* Direct function calls need special treatment. */
8822 if (GET_CODE (addr_location) == SYMBOL_REF)
8824 /* When calling a global routine in PIC mode, we must
8825 replace the symbol itself with the PLT stub. */
8826 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
8828 addr_location = gen_rtx_UNSPEC (Pmode,
8829 gen_rtvec (1, addr_location),
8830 UNSPEC_PLT);
8831 addr_location = gen_rtx_CONST (Pmode, addr_location);
8832 plt_call = true;
8835 /* Unless we can use the bras(l) insn, force the
8836 routine address into a register. */
8837 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
8839 if (flag_pic)
8840 addr_location = legitimize_pic_address (addr_location, 0);
8841 else
8842 addr_location = force_reg (Pmode, addr_location);
8846 /* If it is already an indirect call or the code above moved the
8847 SYMBOL_REF to somewhere else make sure the address can be found in
8848 register 1. */
8849 if (retaddr_reg == NULL_RTX
8850 && GET_CODE (addr_location) != SYMBOL_REF
8851 && !plt_call)
8853 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
8854 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
8857 addr_location = gen_rtx_MEM (QImode, addr_location);
8858 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
8860 if (result_reg != NULL_RTX)
8861 call = gen_rtx_SET (VOIDmode, result_reg, call);
8863 if (retaddr_reg != NULL_RTX)
8865 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
8867 if (tls_call != NULL_RTX)
8868 vec = gen_rtvec (3, call, clobber,
8869 gen_rtx_USE (VOIDmode, tls_call));
8870 else
8871 vec = gen_rtvec (2, call, clobber);
8873 call = gen_rtx_PARALLEL (VOIDmode, vec);
8876 insn = emit_call_insn (call);
8878 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8879 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
8881 /* s390_function_ok_for_sibcall should
8882 have denied sibcalls in this case. */
8883 gcc_assert (retaddr_reg != NULL_RTX);
8885 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
8887 return insn;
8890 /* Implement CONDITIONAL_REGISTER_USAGE. */
8892 void
8893 s390_conditional_register_usage (void)
8895 int i;
8897 if (flag_pic)
8899 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8900 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8902 if (TARGET_CPU_ZARCH)
8904 fixed_regs[BASE_REGNUM] = 0;
8905 call_used_regs[BASE_REGNUM] = 0;
8906 fixed_regs[RETURN_REGNUM] = 0;
8907 call_used_regs[RETURN_REGNUM] = 0;
8909 if (TARGET_64BIT)
8911 for (i = 24; i < 32; i++)
8912 call_used_regs[i] = call_really_used_regs[i] = 0;
8914 else
8916 for (i = 18; i < 20; i++)
8917 call_used_regs[i] = call_really_used_regs[i] = 0;
8920 if (TARGET_SOFT_FLOAT)
8922 for (i = 16; i < 32; i++)
8923 call_used_regs[i] = fixed_regs[i] = 1;
8927 /* Corresponding function to eh_return expander. */
8929 static GTY(()) rtx s390_tpf_eh_return_symbol;
8930 void
8931 s390_emit_tpf_eh_return (rtx target)
8933 rtx insn, reg;
8935 if (!s390_tpf_eh_return_symbol)
8936 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
8938 reg = gen_rtx_REG (Pmode, 2);
8940 emit_move_insn (reg, target);
8941 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
8942 gen_rtx_REG (Pmode, RETURN_REGNUM));
8943 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
8945 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
8948 /* Rework the prologue/epilogue to avoid saving/restoring
8949 registers unnecessarily. */
8951 static void
8952 s390_optimize_prologue (void)
8954 rtx insn, new_insn, next_insn;
8956 /* Do a final recompute of the frame-related data. */
8958 s390_update_frame_layout ();
8960 /* If all special registers are in fact used, there's nothing we
8961 can do, so no point in walking the insn list. */
8963 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
8964 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
8965 && (TARGET_CPU_ZARCH
8966 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
8967 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
8968 return;
8970 /* Search for prologue/epilogue insns and replace them. */
8972 for (insn = get_insns (); insn; insn = next_insn)
8974 int first, last, off;
8975 rtx set, base, offset;
8977 next_insn = NEXT_INSN (insn);
8979 if (GET_CODE (insn) != INSN)
8980 continue;
8982 if (GET_CODE (PATTERN (insn)) == PARALLEL
8983 && store_multiple_operation (PATTERN (insn), VOIDmode))
8985 set = XVECEXP (PATTERN (insn), 0, 0);
8986 first = REGNO (SET_SRC (set));
8987 last = first + XVECLEN (PATTERN (insn), 0) - 1;
8988 offset = const0_rtx;
8989 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
8990 off = INTVAL (offset);
8992 if (GET_CODE (base) != REG || off < 0)
8993 continue;
8994 if (cfun_frame_layout.first_save_gpr != -1
8995 && (cfun_frame_layout.first_save_gpr < first
8996 || cfun_frame_layout.last_save_gpr > last))
8997 continue;
8998 if (REGNO (base) != STACK_POINTER_REGNUM
8999 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9000 continue;
9001 if (first > BASE_REGNUM || last < BASE_REGNUM)
9002 continue;
9004 if (cfun_frame_layout.first_save_gpr != -1)
9006 new_insn = save_gprs (base,
9007 off + (cfun_frame_layout.first_save_gpr
9008 - first) * UNITS_PER_WORD,
9009 cfun_frame_layout.first_save_gpr,
9010 cfun_frame_layout.last_save_gpr);
9011 new_insn = emit_insn_before (new_insn, insn);
9012 INSN_ADDRESSES_NEW (new_insn, -1);
9015 remove_insn (insn);
9016 continue;
9019 if (cfun_frame_layout.first_save_gpr == -1
9020 && GET_CODE (PATTERN (insn)) == SET
9021 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
9022 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
9023 || (!TARGET_CPU_ZARCH
9024 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
9025 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
9027 set = PATTERN (insn);
9028 first = REGNO (SET_SRC (set));
9029 offset = const0_rtx;
9030 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9031 off = INTVAL (offset);
9033 if (GET_CODE (base) != REG || off < 0)
9034 continue;
9035 if (REGNO (base) != STACK_POINTER_REGNUM
9036 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9037 continue;
9039 remove_insn (insn);
9040 continue;
9043 if (GET_CODE (PATTERN (insn)) == PARALLEL
9044 && load_multiple_operation (PATTERN (insn), VOIDmode))
9046 set = XVECEXP (PATTERN (insn), 0, 0);
9047 first = REGNO (SET_DEST (set));
9048 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9049 offset = const0_rtx;
9050 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9051 off = INTVAL (offset);
9053 if (GET_CODE (base) != REG || off < 0)
9054 continue;
9055 if (cfun_frame_layout.first_restore_gpr != -1
9056 && (cfun_frame_layout.first_restore_gpr < first
9057 || cfun_frame_layout.last_restore_gpr > last))
9058 continue;
9059 if (REGNO (base) != STACK_POINTER_REGNUM
9060 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9061 continue;
9062 if (first > BASE_REGNUM || last < BASE_REGNUM)
9063 continue;
9065 if (cfun_frame_layout.first_restore_gpr != -1)
9067 new_insn = restore_gprs (base,
9068 off + (cfun_frame_layout.first_restore_gpr
9069 - first) * UNITS_PER_WORD,
9070 cfun_frame_layout.first_restore_gpr,
9071 cfun_frame_layout.last_restore_gpr);
9072 new_insn = emit_insn_before (new_insn, insn);
9073 INSN_ADDRESSES_NEW (new_insn, -1);
9076 remove_insn (insn);
9077 continue;
9080 if (cfun_frame_layout.first_restore_gpr == -1
9081 && GET_CODE (PATTERN (insn)) == SET
9082 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
9083 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
9084 || (!TARGET_CPU_ZARCH
9085 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
9086 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
9088 set = PATTERN (insn);
9089 first = REGNO (SET_DEST (set));
9090 offset = const0_rtx;
9091 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9092 off = INTVAL (offset);
9094 if (GET_CODE (base) != REG || off < 0)
9095 continue;
9096 if (REGNO (base) != STACK_POINTER_REGNUM
9097 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9098 continue;
9100 remove_insn (insn);
9101 continue;
9106 /* Perform machine-dependent processing. */
9108 static void
9109 s390_reorg (void)
9111 bool pool_overflow = false;
9113 /* Make sure all splits have been performed; splits after
9114 machine_dependent_reorg might confuse insn length counts. */
9115 split_all_insns_noflow ();
9117 /* From here on decomposed literal pool addresses must be accepted. */
9118 cfun->machine->decomposed_literal_pool_addresses_ok_p = true;
9120 /* Install the main literal pool and the associated base
9121 register load insns.
9123 In addition, there are two problematic situations we need
9124 to correct:
9126 - the literal pool might be > 4096 bytes in size, so that
9127 some of its elements cannot be directly accessed
9129 - a branch target might be > 64K away from the branch, so that
9130 it is not possible to use a PC-relative instruction.
9132 To fix those, we split the single literal pool into multiple
9133 pool chunks, reloading the pool base register at various
9134 points throughout the function to ensure it always points to
9135 the pool chunk the following code expects, and / or replace
9136 PC-relative branches by absolute branches.
9138 However, the two problems are interdependent: splitting the
9139 literal pool can move a branch further away from its target,
9140 causing the 64K limit to overflow, and on the other hand,
9141 replacing a PC-relative branch by an absolute branch means
9142 we need to put the branch target address into the literal
9143 pool, possibly causing it to overflow.
9145 So, we loop trying to fix up both problems until we manage
9146 to satisfy both conditions at the same time. Note that the
9147 loop is guaranteed to terminate as every pass of the loop
9148 strictly decreases the total number of PC-relative branches
9149 in the function. (This is not completely true as there
9150 might be branch-over-pool insns introduced by chunkify_start.
9151 Those never need to be split however.) */
9153 for (;;)
9155 struct constant_pool *pool = NULL;
9157 /* Collect the literal pool. */
9158 if (!pool_overflow)
9160 pool = s390_mainpool_start ();
9161 if (!pool)
9162 pool_overflow = true;
9165 /* If literal pool overflowed, start to chunkify it. */
9166 if (pool_overflow)
9167 pool = s390_chunkify_start ();
9169 /* Split out-of-range branches. If this has created new
9170 literal pool entries, cancel current chunk list and
9171 recompute it. zSeries machines have large branch
9172 instructions, so we never need to split a branch. */
9173 if (!TARGET_CPU_ZARCH && s390_split_branches ())
9175 if (pool_overflow)
9176 s390_chunkify_cancel (pool);
9177 else
9178 s390_mainpool_cancel (pool);
9180 continue;
9183 /* If we made it up to here, both conditions are satisfied.
9184 Finish up literal pool related changes. */
9185 if (pool_overflow)
9186 s390_chunkify_finish (pool);
9187 else
9188 s390_mainpool_finish (pool);
9190 /* We're done splitting branches. */
9191 cfun->machine->split_branches_pending_p = false;
9192 break;
9195 /* Generate out-of-pool execute target insns. */
9196 if (TARGET_CPU_ZARCH)
9198 rtx insn, label, target;
9200 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9202 label = s390_execute_label (insn);
9203 if (!label)
9204 continue;
9206 gcc_assert (label != const0_rtx);
9208 target = emit_label (XEXP (label, 0));
9209 INSN_ADDRESSES_NEW (target, -1);
9211 target = emit_insn (s390_execute_target (insn));
9212 INSN_ADDRESSES_NEW (target, -1);
9216 /* Try to optimize prologue and epilogue further. */
9217 s390_optimize_prologue ();
9221 /* Initialize GCC target structure. */
9223 #undef TARGET_ASM_ALIGNED_HI_OP
9224 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
9225 #undef TARGET_ASM_ALIGNED_DI_OP
9226 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
9227 #undef TARGET_ASM_INTEGER
9228 #define TARGET_ASM_INTEGER s390_assemble_integer
9230 #undef TARGET_ASM_OPEN_PAREN
9231 #define TARGET_ASM_OPEN_PAREN ""
9233 #undef TARGET_ASM_CLOSE_PAREN
9234 #define TARGET_ASM_CLOSE_PAREN ""
9236 #undef TARGET_DEFAULT_TARGET_FLAGS
9237 #define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
9238 #undef TARGET_HANDLE_OPTION
9239 #define TARGET_HANDLE_OPTION s390_handle_option
9241 #undef TARGET_ENCODE_SECTION_INFO
9242 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
9244 #ifdef HAVE_AS_TLS
9245 #undef TARGET_HAVE_TLS
9246 #define TARGET_HAVE_TLS true
9247 #endif
9248 #undef TARGET_CANNOT_FORCE_CONST_MEM
9249 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
9251 #undef TARGET_DELEGITIMIZE_ADDRESS
9252 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
9254 #undef TARGET_RETURN_IN_MEMORY
9255 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
9257 #undef TARGET_INIT_BUILTINS
9258 #define TARGET_INIT_BUILTINS s390_init_builtins
9259 #undef TARGET_EXPAND_BUILTIN
9260 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
9262 #undef TARGET_ASM_OUTPUT_MI_THUNK
9263 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
9264 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
9265 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
9267 #undef TARGET_SCHED_ADJUST_PRIORITY
9268 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
9269 #undef TARGET_SCHED_ISSUE_RATE
9270 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
9271 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
9272 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
9274 #undef TARGET_CANNOT_COPY_INSN_P
9275 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
9276 #undef TARGET_RTX_COSTS
9277 #define TARGET_RTX_COSTS s390_rtx_costs
9278 #undef TARGET_ADDRESS_COST
9279 #define TARGET_ADDRESS_COST s390_address_cost
9281 #undef TARGET_MACHINE_DEPENDENT_REORG
9282 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
9284 #undef TARGET_VALID_POINTER_MODE
9285 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
9287 #undef TARGET_BUILD_BUILTIN_VA_LIST
9288 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
9289 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
9290 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
9292 #undef TARGET_PROMOTE_FUNCTION_ARGS
9293 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
9294 #undef TARGET_PROMOTE_FUNCTION_RETURN
9295 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
9296 #undef TARGET_PASS_BY_REFERENCE
9297 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
9299 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
9300 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
9302 #undef TARGET_FIXED_CONDITION_CODE_REGS
9303 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
9305 #undef TARGET_CC_MODES_COMPATIBLE
9306 #define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
9308 #undef TARGET_INVALID_WITHIN_DOLOOP
9309 #define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_rtx_null
9311 #ifdef HAVE_AS_TLS
9312 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
9313 #define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
9314 #endif
9316 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
9317 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
9318 #define TARGET_MANGLE_FUNDAMENTAL_TYPE s390_mangle_fundamental_type
9319 #endif
9321 #undef TARGET_SCALAR_MODE_SUPPORTED_P
9322 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
9324 struct gcc_target targetm = TARGET_INITIALIZER;
9326 #include "gt-s390.h"